BinaryRBM.cpp
Go to the documentation of this file.
1//used for training the RBM
4
5//the problem
7
8//for evaluation
10#include <iostream>
11
12using namespace shark;
13using namespace std;
14
15int main(){
16
17 //we first create the problem. in this tutorial, we use BarsAndStripes
18 BarsAndStripes problem;
19 UnlabeledData<RealVector> data = problem.data();
20
21 //some constants needed for training
22 size_t numberOfHidden = 32;//hidden units of the rbm
23 size_t numberOfVisible = problem.inputDimension();//visible units of the inputs
24
25 //create rbm with simple binary units
27 rbm.setStructure(numberOfVisible,numberOfHidden);
28
29 //create derivative to optimize the rbm
30 //we want a simple vanilla CD-1
31 BinaryCD cd(&rbm);
32 cd.setK(1);
33 cd.setData(data);
34
35 //generate optimizer
36 SteepestDescent<> optimizer;
37 optimizer.setMomentum(0);
38 optimizer.setLearningRate(0.1);
39
40 //now we train the rbm and evaluate the mean negative log-likelihood at the end
41 unsigned int numIterations = 1000;//iterations for training
42 unsigned int numTrials = 10;//number of trials for training
43 double meanResult = 0;
44 for(unsigned int trial = 0; trial != numTrials; ++trial) {
45 initRandomUniform(rbm, -0.1,0.1);
46 cd.init();
47 optimizer.init(cd);
48
49 for(unsigned int iteration = 0; iteration != numIterations; ++iteration) {
50 optimizer.step(cd);
51 }
52 //evaluate exact likelihood after training. this is only possible for small problems!
53 double likelihood = negativeLogLikelihood(rbm,data);
54 std::cout<<trial<<" "<<likelihood<<std::endl;
55 meanResult +=likelihood;
56 }
57 meanResult /= numTrials;
58
59 //print the mean performance
60 cout << "RESULTS: " << std::endl;
61 cout << "======== " << std::endl;
62 cout << "mean negative log likelihood: " << meanResult << std::endl;
63
64}
65