CSvmTutorial.cpp
Go to the documentation of this file.
1 #include <shark/Algorithms/Trainers/CSvmTrainer.h> // the C-SVM trainer
2 #include <shark/Models/Kernels/GaussianRbfKernel.h> //the used kernel for the SVM
3 #include <shark/ObjectiveFunctions/Loss/ZeroOneLoss.h> //used for evaluation of the classifier
4 #include <shark/Data/DataDistribution.h> //includes small toy distributions
5 
6 using namespace shark;
7 using namespace std;
8 
9 int main(int argc, char** argv)
10 {
11  // experiment settings
12  unsigned int ell = 500; // number of training data point
13  unsigned int tests = 10000; // number of test data points
14  double gamma = 0.5; // kernel bandwidth parameter
15  double C = 1000.0; // regularization parameter
16  bool bias = true; // use bias/offset parameter
17 
18  GaussianRbfKernel<> kernel(gamma); // Gaussian kernel
19  KernelClassifier<RealVector> kc; // (affine) linear function in kernel-induced feature space
20 
21  // generate dataset
22  Chessboard problem; // artificial benchmark data
23  ClassificationDataset training = problem.generateDataset(ell);
24  ClassificationDataset test = problem.generateDataset(tests);
25  // define the machine
26  CSvmTrainer<RealVector> trainer(&kernel, C, bias);
27 
28 // // ADDITIONAL/ADVANCED SVM SOLVER OPTIONS:
29 // //to use "double" as kernel matrix cache type internally instead of float:
30 // CSvmTrainer<RealVector, double> trainer(&kernel, C);
31 // //to keep non-support vectors after training:
32 // trainer.sparsify() = false;
33 // //to relax or tighten the stopping criterion from 1e-3 (here, tightened to 1e-6)
34 // trainer.stoppingCondition().minAccuracy = 1e-6;
35 // //to set the cache size to 128MB for double (16**6 times sizeof(double), when double was selected as cache type above)
36 // //or to 64MB for float (16**6 times sizeof(float), when the CSvmTrainer is declared without second template argument)
37 // trainer.setCacheSize( 0x1000000 );
38 
39  // train the machine
40  cout << "Algorithm: " << trainer.name() << "\ntraining ..." << flush; // Shark algorithms know their names
41  trainer.train(kc, training);
42  cout << "\n number of iterations: " << trainer.solutionProperties().iterations;
43  cout << "\n dual value: " << trainer.solutionProperties().value;
44  cout << "\n training time: " << trainer.solutionProperties().seconds << " seconds\ndone." << endl;
45 
46  // evaluate
47  ZeroOneLoss<unsigned int> loss; // 0-1 loss
48  Data<unsigned int> output = kc(training.inputs()); // evaluate on training set
49  double train_error = loss.eval(training.labels(), output);
50  cout << "training error:\t" << train_error << endl;
51  output = kc(test.inputs()); // evaluate on test set
52  double test_error = loss.eval(test.labels(), output);
53  cout << "test error:\t" << test_error << endl;
54 }