Shark machine learning library
Installation
Tutorials
Benchmarks
Documentation
Quick references
Class list
Global functions
include
shark
Algorithms
GradientDescent
CG.h
Go to the documentation of this file.
1
//===========================================================================
2
/*!
3
*
4
*
5
* \brief CG
6
*
7
* Conjugate-gradient method for unconstraint optimization.
8
*
9
*
10
*
11
* \author O. Krause
12
* \date 2010
13
*
14
*
15
* \par Copyright 1995-2017 Shark Development Team
16
*
17
* <BR><HR>
18
* This file is part of Shark.
19
* <https://shark-ml.github.io/Shark/>
20
*
21
* Shark is free software: you can redistribute it and/or modify
22
* it under the terms of the GNU Lesser General Public License as published
23
* by the Free Software Foundation, either version 3 of the License, or
24
* (at your option) any later version.
25
*
26
* Shark is distributed in the hope that it will be useful,
27
* but WITHOUT ANY WARRANTY; without even the implied warranty of
28
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
29
* GNU Lesser General Public License for more details.
30
*
31
* You should have received a copy of the GNU Lesser General Public License
32
* along with Shark. If not, see <http://www.gnu.org/licenses/>.
33
*
34
*/
35
//===========================================================================
36
37
#ifndef SHARK_ML_OPTIMIZER_CG_H
38
#define SHARK_ML_OPTIMIZER_CG_H
39
40
#include <
shark/Algorithms/GradientDescent/AbstractLineSearchOptimizer.h
>
41
42
namespace
shark
{
43
/// \brief Conjugate-gradient method for unconstrained optimization
44
///
45
/// The next CG search Direction p_{k+1} is computed using the current gradient g_k by
46
/// \f$ p_{k+1} = \beta p_k - g_k \f$
47
/// where beta can be computed using different formulas
48
/// well known is the Fletcher - Reeves method:
49
/// \f$ \beta = ||g_k||2/ ||g_{k-1}||^2 \f$
50
/// we use
51
/// \f$ \beta = ||g_k||^2 /<p_k,g_k-g_{k-1}> \f$
52
/// which is formula 5.49 in Nocedal, Wright - Numerical Optimization.
53
/// This formula has better numerical properties than Fletcher-Reeves for non-quadratic functions
54
/// while ensuring a descent direction.
55
///
56
/// We implement restarting to ensure quadratic convergence near the optimum as well as numerical stability
57
/// \ingroup gradientopt
58
template
<
class
SearchPo
int
Type = RealVector>
59
class
CG
:
public
AbstractLineSearchOptimizer
<SearchPointType>
60
{
61
public
:
62
typedef
typename
AbstractLineSearchOptimizer<SearchPointType>::ObjectiveFunctionType
ObjectiveFunctionType
;
63
protected
:
64
void
initModel
();
65
void
computeSearchDirection
(
ObjectiveFunctionType
const
& objectiveFunction);
66
public
:
67
std::string
name
()
const
68
{
return
"CG"
; }
69
70
//from ISerializable
71
void
read
(
InArchive
& archive );
72
void
write
(
OutArchive
& archive )
const
;
73
protected
:
74
unsigned
m_count
;
75
};
76
77
//implementation is included in the library
78
extern
template
class
CG<RealVector>
;
79
extern
template
class
CG<FloatVector>
;
80
#ifdef SHARK_USE_OPENCL
81
extern
template
class
CG<RealGPUVector>
;
82
extern
template
class
CG<FloatGPUVector>
;
83
#endif
84
}
85
86
#endif