ZeroOneLoss.h
Go to the documentation of this file.
1//===========================================================================
2/*!
3 *
4 *
5 * \brief Error measure for classication tasks, typically used for evaluation of results
6 *
7 *
8 *
9 * \author T. Glasmachers
10 * \date 2010-2011
11 *
12 *
13 * \par Copyright 1995-2017 Shark Development Team
14 *
15 * <BR><HR>
16 * This file is part of Shark.
17 * <https://shark-ml.github.io/Shark/>
18 *
19 * Shark is free software: you can redistribute it and/or modify
20 * it under the terms of the GNU Lesser General Public License as published
21 * by the Free Software Foundation, either version 3 of the License, or
22 * (at your option) any later version.
23 *
24 * Shark is distributed in the hope that it will be useful,
25 * but WITHOUT ANY WARRANTY; without even the implied warranty of
26 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27 * GNU Lesser General Public License for more details.
28 *
29 * You should have received a copy of the GNU Lesser General Public License
30 * along with Shark. If not, see <http://www.gnu.org/licenses/>.
31 *
32 */
33
34#ifndef SHARK_OBJECTIVEFUNCTIONS_LOSS_ZEROONELOSS_H
35#define SHARK_OBJECTIVEFUNCTIONS_LOSS_ZEROONELOSS_H
36
37
39
40namespace shark {
41
42///
43/// \brief 0-1-loss for classification.
44///
45/// The ZeroOneLoss requires the existence of the comparison
46/// operator == for its LabelType template parameter. The
47/// loss function returns zero of the predictions exactly
48/// matches the label, and one otherwise.
49/// \ingroup lossfunctions
50template<class LabelType = unsigned int, class OutputType = LabelType>
51class ZeroOneLoss : public AbstractLoss<LabelType, LabelType>
52{
53public:
57
58 /// constructor
60 { }
61
62
63 /// \brief From INameable: return the class name.
64 std::string name() const
65 { return "ZeroOneLoss"; }
66
67 using base_type::eval;
68
69 ///\brief Return zero if labels == predictions and one otherwise.
70 double eval(BatchLabelType const& labels, BatchOutputType const& predictions) const{
71 std::size_t numInputs = labels.size();
72 SIZE_CHECK(numInputs == predictions.size());
73
74 double error = 0;
75 for(std::size_t i = 0; i != numInputs; ++i){
76 error += (predictions(i) != labels(i))?1.0:0.0;
77 }
78 return error;
79 }
80};
81
82
83/// \brief 0-1-loss for classification.
84template <class Float>
85class ZeroOneLoss<unsigned int, blas::vector<Float> > : public AbstractLoss<unsigned int, blas::vector<Float> >
86{
87public:
91
92 /// constructor
93 ///
94 /// \param threshold: in the case dim(predictions) == 1, predictions strictly larger than this parameter are regarded as belonging to the positive class
95 ZeroOneLoss(double threshold = 0.0)
96 {
97 m_threshold = threshold;
98 }
99
100 /// \brief From INameable: return the class name.
101 std::string name() const
102 { return "ZeroOneLoss"; }
103
104
105 // annoyingness of C++ templates
106 using base_type::eval;
107
108 /// Return zero if labels == arg max { predictions_i } and one otherwise,
109 /// where the index i runs over the components of the predictions vector.
110 /// A special version of dim(predictions) == 1 computes the predicted
111 /// labels by thresholding at zero. Shark's label convention is used,
112 /// saying that a positive value encodes class 0, a negative value
113 /// encodes class 1.
114 double eval(BatchLabelType const& labels, BatchOutputType const& predictions) const{
115 std::size_t numInputs = labels.size();
116 SIZE_CHECK(numInputs == predictions.size1());
117
118 double error = 0;
119 for(std::size_t i = 0; i != numInputs; ++i){
120 error+=evalSingle(labels(i),row(predictions,i));
121 }
122 return error;
123 }
124
125 double eval(Data<unsigned int> const& targets, Data< blas::vector<Float>> const& predictions, RealVector const& weights) const{
126 SIZE_CHECK(predictions.numberOfElements() == weights.size());
127 SIZE_CHECK(targets.numberOfElements() == weights.size());
128 SIZE_CHECK(predictions.numberOfBatches() == targets.numberOfBatches());
129 double error = 0;
130 for(std::size_t i = 0; i != predictions.numberOfBatches(); ++i){
131 for(std::size_t j = 0; j != targets.batch(i).size(); ++j){
132 error+= weights(i) * evalSingle(targets.batch(i)(j),row(predictions.batch(i),j));
133 }
134 }
135 return error / weights.size();
136 }
137
138private:
139 template<class VectorType>
140 double evalSingle(unsigned int label, VectorType const& predictions) const{
141 std::size_t size = predictions.size();
142 if (size == 1){
143 // binary case, single real-valued predictions
144 unsigned int t = (predictions(0) > m_threshold);
145 if (t == label) return 0.0;
146 else return 1.0;
147 }
148 else{
149 // multi-class case, one prediction component per class
150 RANGE_CHECK(label < size);
151 double p = predictions(label);
152 for (std::size_t i = 0; i<size; i++)
153 {
154 if (i == label) continue;
155 if (predictions(i) >= p) return 1.0;
156 }
157 return 0.0;
158 }
159 }
160
161 double m_threshold; ///< in the case dim(predictions) == 1, predictions strictly larger tha this parameter are regarded as belonging to the positive class
162};
163
164
165}
166#endif