LooError.h
Go to the documentation of this file.
1/*!
2 *
3 *
4 * \brief Leave-one-out error
5 *
6 *
7 *
8 * \author T.Glasmachers
9 * \date 2011
10 *
11 *
12 * \par Copyright 1995-2017 Shark Development Team
13 *
14 * <BR><HR>
15 * This file is part of Shark.
16 * <https://shark-ml.github.io/Shark/>
17 *
18 * Shark is free software: you can redistribute it and/or modify
19 * it under the terms of the GNU Lesser General Public License as published
20 * by the Free Software Foundation, either version 3 of the License, or
21 * (at your option) any later version.
22 *
23 * Shark is distributed in the hope that it will be useful,
24 * but WITHOUT ANY WARRANTY; without even the implied warranty of
25 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
26 * GNU Lesser General Public License for more details.
27 *
28 * You should have received a copy of the GNU Lesser General Public License
29 * along with Shark. If not, see <http://www.gnu.org/licenses/>.
30 *
31 */
32#ifndef SHARK_OBJECTIVEFUNCTIONS_LOOERROR_H
33#define SHARK_OBJECTIVEFUNCTIONS_LOOERROR_H
34
35
40#include <shark/Data/DataView.h>
41#include <boost/range/algorithm_ext/iota.hpp>
42
43
44namespace shark {
45
46
47///
48/// \brief Leave-one-out error objective function.
49///
50/// \par
51/// The leave-one-out measure is the average prediction performance of
52/// a learning machine on a dataset, where each sample is predicted by
53/// a machine trained on all but the sample to be predicted. This is an
54/// extreme form of cross-validation, with a fold size of one.
55///
56/// \par
57/// In general the leave-one-out error is costly to compute, since it
58/// requires training of a large number of learning machines. However,
59/// certain machines allow for a more efficient implementation. Refer
60/// to LooErrorCSvm for an example.
61/// \ingroup objfunctions
62template<class ModelTypeT, class LabelType = typename ModelTypeT::OutputType>
63class LooError : public AbstractObjectiveFunction< RealVector, double >
64{
65public:
66 typedef ModelTypeT ModelType;
67 typedef typename ModelType::InputType InputType;
68 typedef typename ModelType::OutputType OutputType;
72
73 ///
74 /// \brief Constructor.
75 ///
76 /// \param dataset Full data set for leave-one-out.
77 /// \param model Model built on subsets of the data.
78 /// \param trainer Trainer for learning on each subset.
79 /// \param loss Loss function for judging the validation output.
80 /// \param meta Meta object with parameters that influences the process, typically a trainer.
81 ///
83 DatasetType const& dataset,
84 ModelType* model,
85 TrainerType* trainer,
86 LossType* loss,
87 IParameterizable<>* meta = NULL)
88 : m_dataset(dataset)
89 , mep_meta(meta)
90 , mep_model(model)
91 , mep_trainer(trainer)
92 , mep_loss(loss)
93 {
95 }
96
97
98 /// \brief From INameable: return the class name.
99 std::string name() const
100 {
101 return "LooError<"
102 + mep_model->name() + ","
103 + mep_trainer->name() + ","
104 + mep_loss->name() + ">";
105 }
106
107 std::size_t numberOfVariables()const{
109 }
110
111 /// Evaluate the leave-one-out error:
112 /// train sub-models, evaluate objective,
113 /// return the average.
114 double eval() const {
115 this->m_evaluationCounter++;
116
117 std::size_t ell = m_dataset.size();
118 Data<OutputType> output;
119 double sum = 0.0;
120 std::vector<std::size_t> indices(ell - 1);
121 boost::iota(indices,0);
122 for (std::size_t i=0; i<ell-1; i++) indices[i] = i+1;
123 for (std::size_t i=0; i<ell; i++)
124 {
125 DatasetType train = toDataset(subset(m_dataset,indices));
126 mep_trainer->train(*mep_model, train);
127 OutputType validation = (*mep_model)(m_dataset[i].input);
128 sum += mep_loss->eval(m_dataset[i].label, validation);
129 if (i < ell - 1) indices[i] = i;
130 }
131 return sum / ell;
132 }
133
134 /// Evaluate the leave-one-out error for the given
135 /// parameters passed to the meta object (typically
136 /// these parameters need to be optimized in a model
137 /// selection procedure).
138 double eval(const RealVector& parameters) const {
139 SHARK_ASSERT(mep_meta != NULL);
140 mep_meta->setParameterVector(parameters);
141 return eval();
142 }
143protected:
149};
150
151
152}
153#endif