Shark machine learning library
Installation
Tutorials
Benchmarks
Documentation
Quick references
Class list
Global functions
examples
Supervised
KernelRegression.cpp
Go to the documentation of this file.
1
//===========================================================================
2
/*!
3
*
4
*
5
* \brief Kernel-based regression methods example program.
6
*
7
*
8
*
9
* \author T. Glasmachers
10
* \date -
11
*
12
*
13
* \par Copyright 1995-2017 Shark Development Team
14
*
15
* <BR><HR>
16
* This file is part of Shark.
17
* <https://shark-ml.github.io/Shark/>
18
*
19
* Shark is free software: you can redistribute it and/or modify
20
* it under the terms of the GNU Lesser General Public License as published
21
* by the Free Software Foundation, either version 3 of the License, or
22
* (at your option) any later version.
23
*
24
* Shark is distributed in the hope that it will be useful,
25
* but WITHOUT ANY WARRANTY; without even the implied warranty of
26
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27
* GNU Lesser General Public License for more details.
28
*
29
* You should have received a copy of the GNU Lesser General Public License
30
* along with Shark. If not, see <http://www.gnu.org/licenses/>.
31
*
32
*/
33
//===========================================================================
34
35
#include <
shark/LinAlg/Base.h
>
36
#include <
shark/Core/Random.h
>
37
#include <
shark/Models/Kernels/GaussianRbfKernel.h
>
38
#include <
shark/Algorithms/Trainers/EpsilonSvmTrainer.h
>
39
#include <
shark/Algorithms/Trainers/RegularizationNetworkTrainer.h
>
40
#include <
shark/ObjectiveFunctions/Loss/SquaredLoss.h
>
41
#include <
shark/Data/Dataset.h
>
42
#include <
shark/Data/DataDistribution.h
>
43
44
45
using namespace
shark
;
46
47
48
int
main
()
49
{
50
// experiment settings
51
unsigned
int
ell = 200;
52
unsigned
int
tests = 10000;
53
double
C = 10.0;
54
double
gamma = 1.0 / C;
55
double
epsilon = 0.03;
56
57
GaussianRbfKernel<>
kernel(0.1);
58
SquaredLoss<>
loss;
59
60
// generate dataset
61
Wave
problem;
62
RegressionDataset
training = problem.
generateDataset
(ell);
63
RegressionDataset
test = problem.
generateDataset
(tests);
64
65
// define the machines
66
KernelExpansion<RealVector>
svm[2] = {
67
KernelExpansion<RealVector>
(),
68
KernelExpansion<RealVector>
()
69
};
70
71
// define the corresponding trainers
72
AbstractTrainer<KernelExpansion<RealVector>
>* trainer[2];
73
trainer[0] =
new
EpsilonSvmTrainer<RealVector>
(&kernel, C, epsilon);
74
trainer[1] =
new
RegularizationNetworkTrainer<RealVector>
(&kernel, gamma);
75
76
for
(
unsigned
int
i=0; i<2; i++)
77
{
78
std::cout<<
"METHOD"
<<(i+1) <<
" "
<< trainer[i]->name().c_str()<<std::endl;
79
std::cout<<
"training ..."
<<std::flush;
80
trainer[i]->
train
(svm[i], training);
81
std::cout<<
"done"
<<std::endl;
82
83
Data<RealVector>
output = svm[i](training.
inputs
());
84
double
train_error = loss.
eval
(training.
labels
(), output);
85
std::cout<<
"training error: "
<<train_error<<std::endl;
86
output = svm[i](test.
inputs
());
87
double
test_error = loss.
eval
(test.
labels
(), output);
88
std::cout<<
" test error: "
<<test_error<<
"\n\n"
;
89
}
90
91
delete
trainer[0];
92
delete
trainer[1];
93
}