AbstractObjectiveFunction.h
Go to the documentation of this file.
1//===========================================================================
2/*!
3 *
4 *
5 * \brief AbstractObjectiveFunction
6 * \file
7 *
8 *
9 * \author T.Voss, T. Glasmachers, O.Krause
10 * \date 2010-2011
11 *
12 *
13 * \par Copyright 1995-2017 Shark Development Team
14 *
15 * <BR><HR>
16 * This file is part of Shark.
17 * <https://shark-ml.github.io/Shark/>
18 *
19 * Shark is free software: you can redistribute it and/or modify
20 * it under the terms of the GNU Lesser General Public License as published
21 * by the Free Software Foundation, either version 3 of the License, or
22 * (at your option) any later version.
23 *
24 * Shark is distributed in the hope that it will be useful,
25 * but WITHOUT ANY WARRANTY; without even the implied warranty of
26 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27 * GNU Lesser General Public License for more details.
28 *
29 * You should have received a copy of the GNU Lesser General Public License
30 * along with Shark. If not, see <http://www.gnu.org/licenses/>.
31 *
32 */
33//===========================================================================
34#ifndef SHARK_OBJECTIVEFUNCTIONS_ABSTRACTOBJECTIVEFUNCTION_H
35#define SHARK_OBJECTIVEFUNCTIONS_ABSTRACTOBJECTIVEFUNCTION_H
36
39#include <shark/Core/Flags.h>
40#include <shark/LinAlg/Base.h>
42
43namespace shark {
44
45/// \defgroup objfunctions Objective functions
46/// \brief Objective functions for optimization.
47///
48/// In shark, the learning problem is phrased as an objective function which is then optimized using \ref optimizers . This allows
49/// to test and develop algorithms using \ref benchmarks independent of the problem to solve.
50
51/// \brief Super class of all objective functions for optimization and learning.
52///
53/// \par
54/// The AbstractObjectiveFunction template class is the most general
55/// interface for a function to be minimized by an
56/// optimizer. It subsumes many more specialized classes,
57/// ranging from classical test problems in evolutionary algorithms to
58/// data-dependent objective functions in supervised learning. This
59/// interface allows all general purpose optimization procedures to be
60/// used as model training algorithms in a learning task, with
61/// applications ranging from training of neural networks to direct
62/// policy search in reinforcement learning.
63///
64/// AbstractObjectiveFunction offers a rich interface to support
65/// different types of optimizers. Since not every objective function meets
66/// every requirement, a flag system exists which tells the optimizer
67/// which features are available. These are:
68/// HAS_VALUE: The function can be evaluated. If not set, evalDerivative returns a meaningless
69/// value (for example std::numeric_limits<double>::quiet_nan());
70/// HAS_FIRST_DERIVATIVE: evalDerivative can be called for the FirstOrderDerivative.
71/// The Derivative is defined and as exact as possible;
72/// HAS_SECOND_DERIVATIVE: evalDerivative can be called for the second derivative.
73/// IS_CONSTRAINED_FEATURE: The function has constraints and isFeasible might return false;
74/// CAN_PROPOSE_STARTING_POINT: the function can return a possibly randomized starting point;
75/// CAN_PROVIDE_CLOSEST_FEASIBLE: if the function is constrained, closest feasible can be
76/// called to construct a feasible point.
77///
78/// In the single objective case, the shark convention is to return a double value, while in
79/// Multi objective optimization a RealVector is returned with an entry for every objective.
80/// Moreoever, derivatives in the single objective case are RealVectors, while they are
81/// RealMatrix in the multi-objective case (i.e. the jacobian of the function).
82///
83/// Calling the derivatives, proposeStartingPoint or closestFeasible when the flags are not set
84/// will throw an exception.
85/// The features can be queried using the method features() as in
86/// if(!(f.features()&Function::HAS_VALUE))
87///
88/// \ingroup objfunctions
89/// \tparam PointType The search space the function is defined upon.
90/// \tparam ResultT The objective space the function is defined upon.
91template <typename PointType, typename ResultT>
93public:
94 typedef PointType SearchPointType;
95 typedef ResultT ResultType;
96
97 //if the result type is not an arithmetic type, we assume it is a vector-type->multi objective optimization
98 typedef typename boost::mpl::if_<
99 std::is_arithmetic<ResultT>,
101 RealMatrix
103
108
109 /// \brief List of features that are supported by an implementation.
110 enum Feature {
111 HAS_VALUE = 1, ///< The function can be evaluated and evalDerivative returns a meaningless value (for example std::numeric_limits<double>::quiet_nan()).
112 HAS_FIRST_DERIVATIVE = 2, ///< The method evalDerivative is implemented for the first derivative and returns a sensible value.
113 HAS_SECOND_DERIVATIVE = 4, ///< The method evalDerivative is implemented for the second derivative and returns a sensible value.
114 CAN_PROPOSE_STARTING_POINT = 8, ///< The function can propose a sensible starting point to search algorithms.
115 IS_CONSTRAINED_FEATURE = 16, ///< The objective function is constrained.
116 HAS_CONSTRAINT_HANDLER = 32, ///< The constraints are governed by a constraint handler which can be queried by getConstraintHandler()
117 CAN_PROVIDE_CLOSEST_FEASIBLE = 64, ///< If the function is constrained, the method closestFeasible is implemented and returns a "repaired" solution.
118 IS_THREAD_SAFE = 128, ///< can eval or evalDerivative be called in parallel?
119 IS_NOISY = 256 ///< The function value is perturbed by some kind of noise
120 };
121
122 /// This statement declares the member m_features. See Core/Flags.h for details.
124
125 /// \brief returns whether this function can calculate it's function value
126 bool hasValue()const{
127 return m_features & HAS_VALUE;
128 }
129
130 /// \brief returns whether this function can calculate the first derivative
133 }
134
135 /// \brief returns whether this function can calculate the second derivative
138 }
139
140 /// \brief returns whether this function can propose a starting point.
144
145 /// \brief returns whether this function can return
146 bool isConstrained()const{
148 }
149
150 /// \brief returns whether this function can return
154
155 /// \brief Returns whether this function can calculate thee closest feasible to an infeasible point.
159
160 /// \brief Returns true, when the function can be usd in parallel threads.
161 bool isThreadSafe()const{
162 return m_features & IS_THREAD_SAFE;
163 }
164
165 /// \brief Returns true, when the function can be usd in parallel threads.
166 bool isNoisy()const{
167 return m_features & IS_NOISY;
168 }
169
170 /// \brief Default ctor.
174 /// \brief Virtual destructor
176
177 virtual void init() {
179 }
180
181 ///\brief Sets the Rng used by the objective function.
182 ///
183 /// Objective functions need random numbers for different tasks,
184 /// e.g. to provide a first starting point or for example
185 /// mini batch learning where batches are chosen randomly.
186 /// By default, shark::random::globalRng is used.
187 /// In a multi-threaded environment this might not be safe as
188 /// the Rng is not thread safe. In this case, every thread should use its
189 /// own Rng.
190 void setRng(random::rng_type* rng){
191 mep_rng = rng;
192 }
193
194 /// \brief Accesses the number of variables
195 virtual std::size_t numberOfVariables() const=0;
196
197 virtual bool hasScalableDimensionality()const{
198 return false;
199 }
200
201 /// \brief Adjusts the number of variables if the function is scalable.
202 /// \param [in] numberOfVariables The new dimension.
203 virtual void setNumberOfVariables( std::size_t numberOfVariables ){
204 throw SHARKEXCEPTION("dimensionality of function is not scalable");
205 }
206
207 virtual std::size_t numberOfObjectives() const{
208 return 1;
209 }
210 virtual bool hasScalableObjectives()const{
211 return false;
212 }
213
214 /// \brief Adjusts the number of objectives if the function is scalable.
215 /// \param numberOfObjectives The new number of objectives to optimize for.
216 virtual void setNumberOfObjectives( std::size_t numberOfObjectives ){
217 throw SHARKEXCEPTION("dimensionality of function is not scaleable");
218 }
219
220
221 /// \brief Accesses the evaluation counter of the function.
222 std::size_t evaluationCounter() const {
223 return m_evaluationCounter;
224 }
225
226 /// \brief Returns the constraint handler of the function if it has one.
227 ///
228 /// If the function does not offer a constraint handler, an exception is thrown.
230 SHARK_RUNTIME_CHECK(m_constraintHandler, "Objective Function does not have an constraint handler!");
231 return *m_constraintHandler;
232 }
233
234 /// \brief Tests whether a point in SearchSpace is feasible, e.g., whether the constraints are fulfilled.
235 /// \param [in] input The point to be tested for feasibility.
236 /// \return true if the point is feasible, false otherwise.
237 virtual bool isFeasible( const SearchPointType & input) const {
238 if(hasConstraintHandler()) return getConstraintHandler().isFeasible(input);
239 SHARK_RUNTIME_CHECK(!isConstrained(), "Not overwritten, even though function is constrained");
240 return true;
241 }
242
243 /// \brief If supported, the supplied point is repaired such that it satisfies all of the function's constraints.
244 ///
245 /// \param [in,out] input The point to be repaired.
246 ///
247 /// \throws FeatureNotAvailableException in the default implementation.
248 virtual void closestFeasible( SearchPointType & input ) const {
249 if(!isConstrained()) return;
250 if(hasConstraintHandler()) return getConstraintHandler().closestFeasible(input);
252 }
253
254 /// \brief Proposes a starting point in the feasible search space of the function.
255 ///
256 /// \return The generated starting point.
257 /// \throws FeatureNotAvailableException in the default implementation
258 /// and if a function does not support this feature.
260 if(hasConstraintHandler()&& getConstraintHandler().canGenerateRandomPoint()){
261 SearchPointType startingPoint;
262 getConstraintHandler().generateRandomPoint(*mep_rng, startingPoint);
263 return startingPoint;
264 }
265 else{
267 }
268 }
269
270 /// \brief Evaluates the objective function for the supplied argument.
271 /// \param [in] input The argument for which the function shall be evaluated.
272 /// \return The result of evaluating the function for the supplied argument.
273 /// \throws FeatureNotAvailableException in the default implementation
274 /// and if a function does not support this feature.
275 virtual ResultType eval( SearchPointType const& input )const {
277 }
278
279 /// \brief Evaluates the function. Useful together with STL-Algorithms like std::transform.
280 ResultType operator()( SearchPointType const& input ) const {
281 return eval(input);
282 }
283
284 /// \brief Evaluates the objective function and calculates its gradient.
285 /// \param [in] input The argument to eval the function for.
286 /// \param [out] derivative The derivate is placed here.
287 /// \return The result of evaluating the function for the supplied argument.
288 /// \throws FeatureNotAvailableException in the default implementation
289 /// and if a function does not support this feature.
293
294 /// \brief Evaluates the objective function and calculates its gradient.
295 /// \param [in] input The argument to eval the function for.
296 /// \param [out] derivative The derivate and the Hessian are placed here.
297 /// \return The result of evaluating the function for the supplied argument.
298 /// \throws FeatureNotAvailableException in the default implementation
299 /// and if a function does not support this feature.
303
304protected:
305 mutable std::size_t m_evaluationCounter; ///< Evaluation counter, default value: 0.
307 random::rng_type* mep_rng;
308
309 /// \brief helper function which is called to announce the presence of an constraint handler.
310 ///
311 /// This function quries the propabilities of the handler and sts up the flags accordingly
313 SHARK_RUNTIME_CHECK(handler, "[AbstractObjectiveFunction::AnnounceConstraintHandler] Handler is not allowed to be NULL");
314 m_constraintHandler = handler;
317 if(handler->canGenerateRandomPoint())
319 if(handler->canProvideClosestFeasible())
321 }
322};
323
326
327}
328
329#endif