-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmaintest.cpp
73 lines (57 loc) · 2.2 KB
/
maintest.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#include <vector>
#include <iostream>
#include <stdexcept>
#include "opt.h"
// Single-variable f(x) = x^2
double x2(double x)
{
return x * x;
}
// Multi-variate f(x) = |x|^2
double x2_Xd(std::vector<double> x)
{
size_t N = x.size();
double ans = 0.0;
for (size_t i = 0; i < N; i++)
ans += x[i] * x[i];
return ans;
}
// Rosenbrock function
double rosenbrock(std::vector<double> x)
{
if (x.size() != 2)
std::runtime_error::runtime_error("rosenbrock function only takes 2D vectors as input");
return (1 - x[0])*(1 - x[0]) + 100.0*(x[1] - x[0] * x[0])*(x[1] - x[0] * x[0]);
}
int main()
{
// Single-variable problem parameters:
double x0 = 8.;
double x0_init = x0;
/*** Call the single-variable gradient descent optimizer on the x^2 function: ***/
std::cout << "Solution for min val of f(x) = x^2 with initial guess x0 = " << x0_init << ":" << std::endl;
double gdval = opt::gradientDescent(x2, x0);
std::cout << std::endl;
// Multi-variable problem parameters:
std::vector<double> X0 = { 4.,4. };
std::vector<double> X0_init = X0;
/*** Call the multi-variable gradient descent optimizer on the |x|^2 function: ***/
std::cout << "Solution for min val of f(x) = |x|^2 with initial guess X0 = [" << X0_init[0] << ", " << X0_init[1] << "]:" << std::endl;
gdval = opt::gradientDescent_Xd(x2_Xd, X0);
std::cout << std::endl;
// Re-initialize parameters for rosenbrock opt problem
X0 = {-0.5, 0.5};
X0_init = X0;
/*** Call the multi-variable gradient descent optimizer on the Rosenbrock function: ***/
std::cout << "Solution for min val of rosenbrock(x) with initial guess X0 = [" << X0_init[0] << ", " << X0_init[1] << "]:" << std::endl;
gdval = opt::gradientDescent_Xd(rosenbrock, X0, 1.e-10);
std::cout << std::endl;
// Re-initialize parameters for rosenbrock opt problem with nonlinear conjugate gradient method
X0 = { -0.5, 0.5 };
X0_init = X0;
/*** Call the nonlinear conjugate gradient optimizer on the Rosenbrock function: ***/
std::cout << "Solution for min val of rosenbrock(x) with initial guess X0 = [" << X0_init[0] << ", " << X0_init[1] << "]:" << std::endl;
gdval = opt::ncgd_Xd(rosenbrock, X0);
std::cin.get();
return 0;
}