Shark machine learning library
Installation
Tutorials
Benchmarks
Documentation
Quick references
Class list
Global functions
include
shark
ObjectiveFunctions
Regularizer.h
Go to the documentation of this file.
1
//===========================================================================
2
/*!
3
*
4
*
5
* \brief Regularizer
6
*
7
*
8
*
9
* \author T. Glasmachers
10
* \date 2010-2011
11
*
12
*
13
* \par Copyright 1995-2017 Shark Development Team
14
*
15
* <BR><HR>
16
* This file is part of Shark.
17
* <https://shark-ml.github.io/Shark/>
18
*
19
* Shark is free software: you can redistribute it and/or modify
20
* it under the terms of the GNU Lesser General Public License as published
21
* by the Free Software Foundation, either version 3 of the License, or
22
* (at your option) any later version.
23
*
24
* Shark is distributed in the hope that it will be useful,
25
* but WITHOUT ANY WARRANTY; without even the implied warranty of
26
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27
* GNU Lesser General Public License for more details.
28
*
29
* You should have received a copy of the GNU Lesser General Public License
30
* along with Shark. If not, see <http://www.gnu.org/licenses/>.
31
*
32
*/
33
//===========================================================================
34
#ifndef SHARK_OBJECTIVEFUNCTIONS_REGULARIZER_H
35
#define SHARK_OBJECTIVEFUNCTIONS_REGULARIZER_H
36
37
38
#include <
shark/ObjectiveFunctions/AbstractObjectiveFunction.h
>
39
40
namespace
shark
{
41
42
43
///
44
/// \brief One-norm of the input as an objective function
45
///
46
/// \par
47
/// The OneNormRegularizer is intended to be used together with other
48
/// objective functions within a CombinedObjectiveFunction, in order to
49
/// obtain a more smooth and more sparse solution.
50
/// \ingroup objfunctions
51
template
<
class
SearchPo
int
Type = RealVector>
52
class
OneNormRegularizer
:
public
AbstractObjectiveFunction
< SearchPointType, double >
53
{
54
public
:
55
56
/// Constructor
57
OneNormRegularizer
(std::size_t numVariables = 0):m_numberOfVariables(numVariables){
58
this->
m_features
|= this->
HAS_FIRST_DERIVATIVE
;
59
}
60
61
/// \brief From INameable: return the class name.
62
std::string
name
()
const
63
{
return
"OneNormRegularizer"
; }
64
65
std::size_t
numberOfVariables
()
const
{
66
return
m_numberOfVariables;
67
}
68
69
bool
hasScalableDimensionality
()
const
{
70
return
true
;
71
}
72
73
void
setNumberOfVariables
( std::size_t
numberOfVariables
){
74
m_numberOfVariables =
numberOfVariables
;
75
}
76
77
void
setMask
(
SearchPointType
const
&
mask
){
78
m_mask =
mask
;
79
}
80
SearchPointType
const
&
mask
()
const
{
81
return
m_mask;
82
}
83
/// Evaluates the objective function.
84
double
eval
(
SearchPointType
const
& input )
const
{
85
if
(m_mask.empty()){
86
return
norm_1(input);
87
}
else
{
88
return
norm_1(input * m_mask);
89
}
90
}
91
92
/// Evaluates the objective function
93
/// and calculates its gradient.
94
double
evalDerivative
(
SearchPointType
const
& input,
SearchPointType
& derivative )
const
{
95
SIZE_CHECK
(m_mask.empty() || m_mask.size() == input.size());
96
std::size_t ic = input.size();
97
derivative.resize(ic);
98
99
for
(std::size_t i = 0; i != ic; i++){
100
derivative(i) = boost::math::sign(input(i));
101
}
102
if
(!m_mask.empty()){
103
derivative *= m_mask;
104
}
105
return
eval
(input);
106
}
107
private
:
108
SearchPointType
m_mask;
109
std::size_t m_numberOfVariables;
110
};
111
112
113
///
114
/// \brief Two-norm of the input as an objective function
115
///
116
/// \par
117
/// The TwoNormRegularizer is intended to be used together with other
118
/// objective functions within a CombinedObjectiveFunction, in order to
119
/// obtain a more smooth solution.
120
/// \ingroup objfunctions
121
template
<
class
SearchPo
int
Type = RealVector>
122
class
TwoNormRegularizer
:
public
AbstractObjectiveFunction
< SearchPointType, double >
123
{
124
public
:
125
typedef
AbstractObjectiveFunction< SearchPointType, double >
base_type
;
126
127
/// Constructor
128
TwoNormRegularizer
(std::size_t numVariables = 0):m_numberOfVariables(numVariables){
129
this->
m_features
|=
base_type::HAS_FIRST_DERIVATIVE
;
130
}
131
132
/// \brief From INameable: return the class name.
133
std::string
name
()
const
134
{
return
"TwoNormRegularizer"
; }
135
136
std::size_t
numberOfVariables
()
const
{
137
return
m_numberOfVariables;
138
}
139
140
bool
hasScalableDimensionality
()
const
{
141
return
true
;
142
}
143
144
void
setNumberOfVariables
( std::size_t
numberOfVariables
){
145
m_numberOfVariables =
numberOfVariables
;
146
}
147
148
void
setMask
(
SearchPointType
const
&
mask
){
149
m_mask =
mask
;
150
}
151
SearchPointType
const
&
mask
()
const
{
152
return
m_mask;
153
}
154
155
/// Evaluates the objective function.
156
double
eval
(
SearchPointType
const
& input )
const
157
{
158
if
(m_mask.empty()){
159
return
0.5*norm_sqr(input);
160
}
else
{
161
return
0.5 * sum(m_mask*
sqr
(input));
162
}
163
}
164
165
/// Evaluates the objective function
166
/// and calculates its gradient.
167
double
evalDerivative
(
SearchPointType
const
& input,
SearchPointType
& derivative )
const
{
168
if
(m_mask.empty()){
169
derivative = input;
170
}
else
{
171
derivative = m_mask * input;
172
}
173
return
eval
(input);
174
}
175
private
:
176
std::size_t m_numberOfVariables;
177
SearchPointType
m_mask;
178
};
179
180
181
}
182
#endif
// SHARK_CORE_REGULARIZER_H