diff --git a/Cargo.toml b/Cargo.toml index cfb85b940..955360261 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,3 +8,6 @@ members = [ exclude = [ "media/book/tests", ] + +[profile.bench] +debug = true diff --git a/argmin/Cargo.toml b/argmin/Cargo.toml index addfef2ec..ed66f7803 100644 --- a/argmin/Cargo.toml +++ b/argmin/Cargo.toml @@ -45,6 +45,7 @@ ndarray = { version = "0.15", features = ["serde-1"] } ndarray-linalg = { version = "0.16", features = ["netlib"] } argmin-math = { path = "../argmin-math" } serde = { version = "1.0", features = ["derive", "rc"] } +criterion = { version = "0.4", features = ["html_reports"] } [features] default = ["slog-logger", "serde1"] @@ -184,3 +185,128 @@ required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] [[example]] name = "writers" required-features = ["argmin-math/ndarray_latest-serde", "slog-logger", "serde1"] + +[[bench]] +name = "backtracking" +harness = false +required-features = [] + +[[bench]] +name = "bfgs" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "brentroot" +harness = false +required-features = [] + +[[bench]] +name = "brentopt" +harness = false +required-features = [] + +[[bench]] +name = "conjugategradient" +harness = false +required-features = [] + +[[bench]] +name = "dfp" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "gaussnewton" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde"] + +[[bench]] +name = "gaussnewton_linesearch" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "goldensectionsearch" +harness = false +required-features = [] + +[[bench]] +name = "hagerzhang" +harness = false +required-features = [] + +[[bench]] +name = "landweber" +harness = false +required-features = [] + +[[bench]] +name = "lbfgs" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "lbfgs2d" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde"] + +[[bench]] +name = "morethuente" +harness = false +required-features = [] + +[[bench]] +name = "neldermead" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "newton" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "newton_cg" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "nonlinear_cg" +harness = false +required-features = [] + +[[bench]] +name = "owl_qn" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "particleswarm" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde"] + +[[bench]] +name = "simulatedannealing" +harness = false +required-features = [] + +[[bench]] +name = "sr1" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "sr1_trustregion" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "steepestdescent" +harness = false +required-features = [] + +[[bench]] +name = "trustregion_nd" +harness = false +required-features = ["argmin-math/ndarray_latest-serde"] diff --git a/argmin/benches/backtracking.rs b/argmin/benches/backtracking.rs new file mode 100644 index 000000000..64b98a3bf --- /dev/null +++ b/argmin/benches/backtracking.rs @@ -0,0 +1,72 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch}; +use argmin::solver::linesearch::{condition::ArmijoCondition, BacktrackingLineSearch}; +use argmin_testfunctions::{sphere, sphere_derivative}; + +struct Sphere {} + +impl CostFunction for Sphere { + type Param = Vec; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(sphere(param)) + } +} + +impl Gradient for Sphere { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, param: &Self::Param) -> Result { + Ok(sphere_derivative(param)) + } +} + +fn run() -> Result<(), Error> { + // define initial parameter vector + let init_param: Vec = vec![0.7, 0.0]; + // Define problem + let operator = Sphere {}; + // Set condition + let cond = ArmijoCondition::new(0.5)?; + // Set up Line Search method + let mut solver = BacktrackingLineSearch::new(cond).rho(0.9)?; + // Set search direction + solver.search_direction(vec![-1.0, 0.0]); + // Set initial position + solver.initial_step_length(1.0)?; + + let init_cost = operator.cost(&init_param)?; + let init_grad = operator.gradient(&init_param)?; + + // Run solver + let _res = Executor::new(operator, solver) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .configure(|state| { + state + .param(init_param) + .gradient(init_grad) + .cost(init_cost) + .max_iters(10) + }) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("Backtracking", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/bfgs.rs b/argmin/benches/bfgs.rs new file mode 100644 index 000000000..e0e5e8405 --- /dev/null +++ b/argmin/benches/bfgs.rs @@ -0,0 +1,176 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::BFGS; +use argmin_testfunctions::rosenbrock; +use finitediff::FiniteDiff; +use nalgebra::uninit::InitStatus; +use ndarray::{array, Array1, Array2, FixedInitializer}; + +struct RosenbrockVec { + a: f64, + b: f64, +} + +struct RosenbrockNdarray { + a: f64, + b: f64, +} + +impl CostFunction for RosenbrockVec { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(p, self.a, self.b)) + } +} + +impl Gradient for RosenbrockVec { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x, self.a, self.b))) + } +} + +impl CostFunction for RosenbrockNdarray { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for RosenbrockNdarray { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +fn run_vec( + a: f64, + b: f64, + init_param: &[f64], + c1: f64, + c2: f64, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = RosenbrockVec { a, b }; + // Define initial parameter vector + let init_param: Vec = Vec::from(init_param); + let mut init_hessian = Vec::>::new(); + for i in 0..init_hessian.len() { + let mut row = Vec::new(); + for j in 0..init_hessian.len() { + if i == j { + row.push(1.0); + } else { + row.push(0.0); + } + } + init_hessian.push(row); + } + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = BFGS::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| { + state + .param(init_param) + .inv_hessian(init_hessian) + .max_iters(iterations) + }) + .run()?; + Ok(()) +} +fn run_ndarray( + a: f64, + b: f64, + init_param: &[f64], + c1: f64, + c2: f64, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = RosenbrockNdarray { a, b }; + // Define initial parameter vector + let init_param: Array1 = Array1::from_vec(Vec::from(init_param)); + let init_hessian: Array2 = Array2::eye(init_param.len()); + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = BFGS::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| { + state + .param(init_param) + .inv_hessian(init_hessian) + .max_iters(iterations) + }) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + let a = 1.0; + let b = 100.0; + let init_param = vec![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0]; + let c1 = 1e-4; + let c2 = 0.9; + let iterations: u64 = 60; + let mut group = c.benchmark_group("BFGS"); + for i in 2..init_param.len() { + // WARN: Vec version immediately fails with + // Condition violated: `MoreThuenteLineSearch`: Search direction must be a descent direction. + // + // group.bench_with_input(BenchmarkId::new("Vec", i), &i, |bencher, i| { + // bencher.iter(|| { + // run_vec( + // black_box(a), + // black_box(b), + // black_box(&init_param[0..*i]), + // black_box(c1), + // black_box(c2), + // black_box(iterations), + // ).expect("Benchmark should run without errors") + // }) + // }); + group.bench_with_input(BenchmarkId::new("ndarray", i), &i, |bencher, i| { + bencher.iter(|| { + run_ndarray( + black_box(a), + black_box(b), + black_box(&init_param[0..*i]), + black_box(c1), + black_box(c2), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + } + group.finish(); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/brentopt.rs b/argmin/benches/brentopt.rs new file mode 100644 index 000000000..7afa7af86 --- /dev/null +++ b/argmin/benches/brentopt.rs @@ -0,0 +1,49 @@ +// Copyright 2018-2020 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::brent::BrentOpt; + +/// Test function: `f(x) = exp(-x) - exp(5-x/2)` +/// xmin == 2 log(2 exp(-5)) +/// xmin ~= -8.6137056388801093812 +/// f(xmin) == -exp(10)/4 +/// f(xmin) ~= -5506.6164487016791292 +struct TestFunc {} + +impl CostFunction for TestFunc { + // one dimensional problem, no vector needed + type Param = f64; + type Output = f64; + + fn cost(&self, x: &Self::Param) -> Result { + Ok((-x).exp() - (5. - x / 2.).exp()) + } +} + +fn run() -> Result<(), Error> { + let cost = TestFunc {}; + let solver = BrentOpt::new(-10., 10.); + + let _res = Executor::new(cost, solver) + .configure(|state| state.max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run() + .unwrap(); + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("BrentOpt", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/brentroot.rs b/argmin/benches/brentroot.rs new file mode 100644 index 000000000..7fcc7e030 --- /dev/null +++ b/argmin/benches/brentroot.rs @@ -0,0 +1,53 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::brent::BrentRoot; + +/// Test function generalise from Wikipedia example +struct TestFunc { + zero1: f64, + zero2: f64, +} + +impl CostFunction for TestFunc { + // one dimensional problem, no vector needed + type Param = f64; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok((p + self.zero1) * (p - self.zero2) * (p - self.zero2)) + } +} + +fn run() -> Result<(), Error> { + let cost = TestFunc { + zero1: 3., + zero2: -1., + }; + let init_param = 0.5; + let solver = BrentRoot::new(-4., 0.5, 1e-11); + + let _res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run() + .unwrap(); + + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("BrentRoot", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/conjugategradient.rs b/argmin/benches/conjugategradient.rs new file mode 100644 index 000000000..0ee9f7f32 --- /dev/null +++ b/argmin/benches/conjugategradient.rs @@ -0,0 +1,52 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{Error, Executor, Operator}; +use argmin::solver::conjugategradient::ConjugateGradient; + +struct MyProblem {} + +impl Operator for MyProblem { + type Param = Vec; + type Output = Vec; + + fn apply(&self, p: &Self::Param) -> Result { + Ok(vec![4.0 * p[0] + 1.0 * p[1], 1.0 * p[0] + 3.0 * p[1]]) + } +} + +fn run() -> Result<(), Error> { + // Define initial parameter vector + let init_param: Vec = vec![2.0, 1.0]; + + // Define the right hand side `b` of `A * x = b` + let b = vec![1.0, 2.0]; + + // Set up operator + let operator = MyProblem {}; + + // Set up the solver + let solver: ConjugateGradient<_, f64> = ConjugateGradient::new(b); + + // Run solver + let _res = Executor::new(operator, solver) + .configure(|state| state.param(init_param).max_iters(2)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("ConjugateGradient", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/dfp.rs b/argmin/benches/dfp.rs new file mode 100644 index 000000000..8c96c5d26 --- /dev/null +++ b/argmin/benches/dfp.rs @@ -0,0 +1,74 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::DFP; +use argmin_testfunctions::rosenbrock; +use finitediff::FiniteDiff; +use ndarray::{array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + let init_param: Array1 = array![-1.2, 1.0]; + let init_hessian: Array2 = Array2::eye(2); + // let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + // let init_hessian: Array2 = Array2::eye(8); + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; + // Set up solver + let solver = DFP::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| { + state + .param(init_param) + .inv_hessian(init_hessian) + .max_iters(1000) + }) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("DFP", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/gaussnewton.rs b/argmin/benches/gaussnewton.rs new file mode 100644 index 000000000..fd080d06b --- /dev/null +++ b/argmin/benches/gaussnewton.rs @@ -0,0 +1,167 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{black_box, criterion_group, criterion_main, Criterion}; + +use argmin::core::{Error, Executor, Jacobian, Operator}; +use argmin::solver::gaussnewton::GaussNewton; +use nalgebra::{DMatrix, DVector}; +use ndarray::{Array1, Array2}; + +type Rate = f64; +type S = f64; +type Measurement = (S, Rate); + +// Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm +// Model used in this example: +// `rate = (V_{max} * [S]) / (K_M + [S]) ` +// where `V_{max}` and `K_M` are the sought parameters and `[S]` and `rate` is the measured data. + +struct ProblemNG { + data: Vec, +} + +struct ProblemNd { + data: Vec, +} + +impl Operator for ProblemNd { + type Param = Array1; + type Output = Array1; + + fn apply(&self, p: &Self::Param) -> Result { + Ok(self + .data + .iter() + .map(|(s, rate)| rate - (p[0] * s) / (p[1] + s)) + .collect::>()) + } +} + +impl Operator for ProblemNG { + type Param = DVector; + type Output = DVector; + + fn apply(&self, p: &Self::Param) -> Result { + Ok(DVector::from_vec( + self.data + .iter() + .map(|(s, rate)| rate - (p[0] * s) / (p[1] + s)) + .collect(), + )) + } +} + +impl Jacobian for ProblemNd { + type Param = Array1; + type Jacobian = Array2; + + fn jacobian(&self, p: &Self::Param) -> Result { + Ok(Array2::from_shape_fn((self.data.len(), 2), |(si, i)| { + if i == 0 { + -self.data[si].0 / (p[1] + self.data[si].0) + } else { + p[0] * self.data[si].0 / (p[1] + self.data[si].0).powi(2) + } + })) + } +} + +impl Jacobian for ProblemNG { + type Param = DVector; + type Jacobian = DMatrix; + + fn jacobian(&self, p: &Self::Param) -> Result { + Ok(DMatrix::from_fn(7, 2, |si, i| { + if i == 0 { + -self.data[si].0 / (p[1] + self.data[si].0) + } else { + p[0] * self.data[si].0 / (p[1] + self.data[si].0).powi(2) + } + })) + } +} + +fn run_ngalgebra( + data: &Vec<(f64, f64)>, + init_param: (f64, f64), + iterations: u64, +) -> Result<(), Error> { + // Define cost function + // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm + let cost = ProblemNG { data: data.clone() }; + + // Define initial parameter vector + let init_param: DVector = DVector::from_vec(vec![init_param.0, init_param.1]); + + // Set up solver + let solver: GaussNewton = GaussNewton::new(); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + +fn run_ndarray( + data: &Vec<(f64, f64)>, + init_param: (f64, f64), + iterations: u64, +) -> Result<(), Error> { + // Define cost function + // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm + let cost = ProblemNd { data: data.clone() }; + // Define initial parameter vector + let init_param: Array1 = Array1::from(vec![init_param.0, init_param.1]); + // Set up solver + let solver: GaussNewton = GaussNewton::new(); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + let data = vec![ + (0.038, 0.050), + (0.194, 0.127), + (0.425, 0.094), + (0.626, 0.2122), + (1.253, 0.2729), + (2.5, 0.2665), + (3.74, 0.3317), + ]; + let init_param = (0.9, 0.2); + let iterations = 10; + let mut group = c.benchmark_group("GaussNewton"); + group.bench_function("GaussNewton_ngalgebra", |b| { + b.iter(|| { + run_ngalgebra( + black_box(&data), + black_box(init_param), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + group.bench_function("GaussNewton_ndarry", |b| { + b.iter(|| { + run_ndarray( + black_box(&data), + black_box(init_param), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + group.finish(); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/gaussnewton_linesearch.rs b/argmin/benches/gaussnewton_linesearch.rs new file mode 100644 index 000000000..1d2bc31ea --- /dev/null +++ b/argmin/benches/gaussnewton_linesearch.rs @@ -0,0 +1,91 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{Error, Executor, Jacobian, Operator}; +use argmin::solver::gaussnewton::GaussNewtonLS; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use ndarray::{Array1, Array2}; + +type Rate = f64; +type S = f64; +type Measurement = (S, Rate); + +// Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm +// Model used in this example: +// `rate = (V_{max} * [S]) / (K_M + [S]) ` +// where `V_{max}` and `K_M` are the sought parameters and `[S]` and `rate` is the measured data. +struct Problem { + data: Vec, +} + +impl Operator for Problem { + type Param = Array1; + type Output = Array1; + + fn apply(&self, p: &Self::Param) -> Result { + Ok(self + .data + .iter() + .map(|(s, rate)| rate - (p[0] * s) / (p[1] + s)) + .collect::>()) + } +} + +impl Jacobian for Problem { + type Param = Array1; + type Jacobian = Array2; + + fn jacobian(&self, p: &Self::Param) -> Result { + Ok(Array2::from_shape_fn((self.data.len(), 2), |(si, i)| { + if i == 0 { + -self.data[si].0 / (p[1] + self.data[si].0) + } else { + p[0] * self.data[si].0 / (p[1] + self.data[si].0).powi(2) + } + })) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm + let cost = Problem { + data: vec![ + (0.038, 0.050), + (0.194, 0.127), + (0.425, 0.094), + (0.626, 0.2122), + (1.253, 0.2729), + (2.5, 0.2665), + (3.74, 0.3317), + ], + }; + + let linesearch = MoreThuenteLineSearch::new().with_bounds(0.0, 1.0)?; + // Define initial parameter vector + let init_param: Array1 = Array1::from(vec![0.9, 0.2]); + // Set up solver + let solver = GaussNewtonLS::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(10)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("GaussNewtonLineSearch", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/goldensectionsearch.rs b/argmin/benches/goldensectionsearch.rs new file mode 100644 index 000000000..b7e01bc18 --- /dev/null +++ b/argmin/benches/goldensectionsearch.rs @@ -0,0 +1,48 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::goldensectionsearch::GoldenSectionSearch; + +/// Test function from Wikipedia example +struct TestFunc {} + +impl CostFunction for TestFunc { + // one dimensional problem, no vector needed + type Param = f32; + type Output = f32; + + fn cost(&self, x: &Self::Param) -> Result { + // In interval [2.5, 2.5] + // Min at 1.0 + // Max at -1.666 (multiply by -1.0 to test) + Ok((x + 3.0) * (x - 1.0).powi(2)) + } +} + +fn run() -> Result<(), Error> { + let cost = TestFunc {}; + let init_param = -0.5; + let solver = GoldenSectionSearch::new(-2.5, 3.0)?.with_tolerance(0.0001)?; + + let _res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run() + .unwrap(); + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("GoldenSectionSearch", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/hagerzhang.rs b/argmin/benches/hagerzhang.rs new file mode 100644 index 000000000..697ce58a3 --- /dev/null +++ b/argmin/benches/hagerzhang.rs @@ -0,0 +1,73 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch}; +use argmin::solver::linesearch::HagerZhangLineSearch; +use argmin_testfunctions::{sphere, sphere_derivative}; + +struct Sphere {} + +impl CostFunction for Sphere { + type Param = Vec; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(sphere(param)) + } +} + +impl Gradient for Sphere { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, param: &Self::Param) -> Result { + Ok(sphere_derivative(param)) + } +} + +fn run() -> Result<(), Error> { + // Define initial parameter vector + let init_param: Vec = vec![1.0, 0.0]; + + // Problem definition + let operator = Sphere {}; + + // Set up line search method + let mut solver = HagerZhangLineSearch::new(); + + // Set search direction + solver.search_direction(vec![-1.5, -0.5]); + // Set initial step length + solver.initial_step_length(10.0)?; + + let init_cost = operator.cost(&init_param)?; + let init_grad = operator.gradient(&init_param)?; + + // Run solver + let _res = Executor::new(operator, solver) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + // Gradient and cost are optional. If they are not provided, they will be computed + .configure(|state| { + state + .param(init_param) + .gradient(init_grad) + .cost(init_cost) + .max_iters(100) + }) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("HagerZhangLineSearch", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/landweber.rs b/argmin/benches/landweber.rs new file mode 100644 index 000000000..ae38f218c --- /dev/null +++ b/argmin/benches/landweber.rs @@ -0,0 +1,46 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{Error, Executor, Gradient}; +use argmin::solver::landweber::Landweber; +use argmin_testfunctions::rosenbrock_2d_derivative; + +struct Rosenbrock {} + +impl Gradient for Rosenbrock { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p, 1.0, 100.0)) + } +} + +fn run() -> Result<(), Error> { + // define initial parameter vector + let init_param: Vec = vec![1.2, 1.2]; + let operator = Rosenbrock {}; + + let iters = 10; + let solver = Landweber::new(0.001); + + let _res = Executor::new(operator, solver) + .configure(|state| state.param(init_param).max_iters(iters)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("Landweber", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/lbfgs.rs b/argmin/benches/lbfgs.rs new file mode 100644 index 000000000..e5a8351e7 --- /dev/null +++ b/argmin/benches/lbfgs.rs @@ -0,0 +1,159 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::LBFGS; +use argmin_testfunctions::{rosenbrock, rosenbrock_2d, rosenbrock_2d_derivative}; +use finitediff::FiniteDiff; +use nalgebra::DVector; +use ndarray::{array, Array1}; + +struct RosenbrockVec { + a: f64, + b: f64, +} + +struct RosenbrockNdarray { + a: f64, + b: f64, +} + +impl CostFunction for RosenbrockVec { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for RosenbrockVec { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x, self.a, self.b))) + } +} + +impl CostFunction for RosenbrockNdarray { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for RosenbrockNdarray { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +fn run_vec( + a: f64, + b: f64, + init_param: &[f64], + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = RosenbrockVec { a, b }; + + // Define initial parameter vector + let init_param: Vec = Vec::from(init_param); + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = LBFGS::new(linesearch, m); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + +fn run_ndarray( + a: f64, + b: f64, + init_param: &[f64], + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = RosenbrockNdarray { a, b }; + + // Define initial parameter vector + let init_param: Array1 = Array1::from_vec(Vec::from(init_param)); + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = LBFGS::new(linesearch, m); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + let a = 1.0; + let b = 100.0; + let init_param = vec![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + let c1 = 1e-4; + let c2 = 0.9; + let m = 7; + let iterations: u64 = 100; + let mut group = c.benchmark_group("LBFGS"); + for i in 2..init_param.len() { + group.bench_with_input(BenchmarkId::new("Vec", i), &i, |bencher, i| { + bencher.iter(|| { + run_vec( + black_box(a), + black_box(b), + black_box(&init_param[0..*i]), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + group.bench_with_input(BenchmarkId::new("ndarray", i), &i, |bencher, i| { + bencher.iter(|| { + run_ndarray( + black_box(a), + black_box(b), + black_box(&init_param[0..*i]), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + } + group.finish(); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/lbfgs2d.rs b/argmin/benches/lbfgs2d.rs new file mode 100644 index 000000000..a934a294a --- /dev/null +++ b/argmin/benches/lbfgs2d.rs @@ -0,0 +1,267 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{black_box, criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::LBFGS; +use argmin_testfunctions::{rosenbrock, rosenbrock_2d, rosenbrock_2d_derivative}; +use finitediff::FiniteDiff; +use nalgebra::DVector; +use ndarray::{array, Array1}; + +struct RosenbrockVec { + a: f64, + b: f64, +} + +struct RosenbrockNdarray { + a: f64, + b: f64, +} + +struct Rosenbrock2DVec { + a: f64, + b: f64, +} + +struct Rosenbrock2DNalgebra { + a: f64, + b: f64, +} + +struct Rosenbrock2DNdarray { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock2DVec { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p, self.a, self.b)) + } +} + +impl CostFunction for Rosenbrock2DNalgebra { + type Param = DVector; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p.data.as_vec(), self.a, self.b)) + } +} + +impl CostFunction for Rosenbrock2DNdarray { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for Rosenbrock2DVec { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p, self.a, self.b)) + } +} + +impl Gradient for Rosenbrock2DNalgebra { + type Param = DVector; + type Gradient = DVector; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p.data.as_vec(), self.a, self.b).into()) + } +} + +impl Gradient for Rosenbrock2DNdarray { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p.as_slice().unwrap(), self.a, self.b).into()) + } +} + +// Multidimensional version +impl CostFunction for RosenbrockVec { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for RosenbrockVec { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x, self.a, self.b))) + } +} + +impl CostFunction for RosenbrockNdarray { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for RosenbrockNdarray { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +fn run_2d_vec( + a: f64, + b: f64, + init_param: &Vec, + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock2DVec { a, b }; + + // Define initial parameter vector + let init_param = (*init_param).clone(); // This is here to account for the same clone on + // ndarray and ngalgebra + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = LBFGS::new(linesearch, m); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + +fn run_2d_ngalgebra( + a: f64, + b: f64, + init_param: &Vec, + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock2DNalgebra { a, b }; + // Define initial parameter vector + let init_param: DVector = DVector::from((*init_param).clone()); + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = LBFGS::new(linesearch, m); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + +fn run_2d_ndarray( + a: f64, + b: f64, + init_param: &Vec, + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock2DNdarray { a, b }; + + // Define initial parameter vector + let init_param: Array1 = Array1::from_vec((*init_param).clone()); + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = LBFGS::new(linesearch, m); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + let a = 1.0; + let b = 100.0; + let init_param = vec![-1.2, 1.0]; + let c1 = 1e-4; + let c2 = 0.9; + let m = 7; + let iterations: u64 = 100; + let mut group = c.benchmark_group("LBFGS_2D"); + group.bench_function("Vec", |bencher| { + bencher.iter(|| { + run_2d_vec( + black_box(a), + black_box(b), + black_box(&init_param), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + group.bench_function("nalgebra", |bencher| { + bencher.iter(|| { + run_2d_ngalgebra( + black_box(a), + black_box(b), + black_box(&init_param), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + group.bench_function("ndarray", |bencher| { + bencher.iter(|| { + run_2d_ndarray( + black_box(a), + black_box(b), + black_box(&init_param), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + group.finish(); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/morethuente.rs b/argmin/benches/morethuente.rs new file mode 100644 index 000000000..ad46b9d0a --- /dev/null +++ b/argmin/benches/morethuente.rs @@ -0,0 +1,70 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin_testfunctions::{sphere, sphere_derivative}; + +struct Sphere {} + +impl CostFunction for Sphere { + type Param = Vec; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(sphere(param)) + } +} + +impl Gradient for Sphere { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, param: &Self::Param) -> Result { + Ok(sphere_derivative(param)) + } +} + +fn run() -> Result<(), Error> { + // Define initial parameter vector + let init_param: Vec = vec![1.0, 0.0]; + // Problem definition + let operator = Sphere {}; + // Set up line search method + let mut solver = MoreThuenteLineSearch::new(); + // Set search direction + solver.search_direction(vec![-2.0, 0.0]); + // Set initial step length + solver.initial_step_length(1.0)?; + + let init_cost = operator.cost(&init_param)?; + let init_grad = operator.gradient(&init_param)?; + + // Run solver + let _res = Executor::new(operator, solver) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + // Gradient and cost are optional. If they are not provided, they will be computed + .configure(|state| { + state + .param(init_param) + .gradient(init_grad) + .cost(init_cost) + .max_iters(10) + }) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("MoreThuenteLineSearch", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/neldermead.rs b/argmin/benches/neldermead.rs new file mode 100644 index 000000000..20c7a4440 --- /dev/null +++ b/argmin/benches/neldermead.rs @@ -0,0 +1,58 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::neldermead::NelderMead; +use argmin_testfunctions::rosenbrock; +use ndarray::{array, Array1}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Set up solver -- note that the proper choice of the vertices is very important! + let solver = NelderMead::new(vec![ + // array![-2.0, 3.0], + // array![-2.0, -1.0], + // array![2.0, -1.0], + array![-1.0, 3.0], + array![2.0, 1.5], + array![2.0, -1.0], + ]) + .with_sd_tolerance(0.0001)?; + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("NelderMead", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/newton.rs b/argmin/benches/newton.rs new file mode 100644 index 000000000..7f5a3678b --- /dev/null +++ b/argmin/benches/newton.rs @@ -0,0 +1,67 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{Error, Executor, Gradient, Hessian}; +use argmin::solver::newton::Newton; +use argmin_testfunctions::{rosenbrock_2d_derivative, rosenbrock_2d_hessian}; +use ndarray::{Array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(Array1::from(rosenbrock_2d_derivative( + &p.to_vec(), + self.a, + self.b, + ))) + } +} + +impl Hessian for Rosenbrock { + type Param = Array1; + type Hessian = Array2; + + fn hessian(&self, p: &Self::Param) -> Result { + let h = rosenbrock_2d_hessian(&p.to_vec(), self.a, self.b); + Ok(Array::from_shape_vec((2, 2), h)?) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + // let init_param: Array1 = Array1::from(vec![1.2, 1.2]); + let init_param: Array1 = Array1::from(vec![-1.2, 1.0]); + + // Set up solver + let solver: Newton = Newton::new(); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(8)) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("Newton", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/newton_cg.rs b/argmin/benches/newton_cg.rs new file mode 100644 index 000000000..bfd09bf35 --- /dev/null +++ b/argmin/benches/newton_cg.rs @@ -0,0 +1,81 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient, Hessian}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::newton::NewtonCG; +use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative, rosenbrock_2d_hessian}; +use ndarray::{Array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(Array1::from(rosenbrock_2d_derivative( + &p.to_vec(), + self.a, + self.b, + ))) + } +} + +impl Hessian for Rosenbrock { + type Param = Array1; + type Hessian = Array2; + + fn hessian(&self, p: &Self::Param) -> Result { + let h = rosenbrock_2d_hessian(&p.to_vec(), self.a, self.b); + Ok(Array::from_shape_vec((2, 2), h)?) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + // let init_param: Array1 = Array1::from(vec![1.2, 1.2]); + let init_param: Array1 = Array1::from(vec![-1.2, 1.0]); + + // set up line search + let linesearch = MoreThuenteLineSearch::new(); + + // Set up solver + let solver = NewtonCG::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("NewtonCG", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/nonlinear_cg.rs b/argmin/benches/nonlinear_cg.rs new file mode 100644 index 000000000..6b4640846 --- /dev/null +++ b/argmin/benches/nonlinear_cg.rs @@ -0,0 +1,75 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::conjugategradient::{beta::PolakRibiere, NonlinearConjugateGradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative}; + +struct Rosenbrock {} + +impl CostFunction for Rosenbrock { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p, 1.0, 100.0)) + } +} + +impl Gradient for Rosenbrock { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p, 1.0, 100.0)) + } +} + +fn run() -> Result<(), Error> { + // Set up cost function + let operator = Rosenbrock {}; + + // define initial parameter vector + let init_param: Vec = vec![1.2, 1.2]; + + // set up line search + let linesearch = MoreThuenteLineSearch::new(); + let beta_method = PolakRibiere::new(); + + // Set up nonlinear conjugate gradient method + let solver = NonlinearConjugateGradient::new(linesearch, beta_method) + // Set the number of iterations when a restart should be performed + // This allows the algorithm to "forget" previous information which may not be helpful anymore. + .restart_iters(10) + // Set the value for the orthogonality measure. + // Setting this parameter leads to a restart of the algorithm (setting beta = 0) after two + // consecutive search directions are not orthogonal anymore. In other words, if this condition + // is met: + // + // `|\nabla f_k^T * \nabla f_{k-1}| / | \nabla f_k ||^2 >= v` + // + // A typical value for `v` is 0.1. + .restart_orthogonality(0.1); + + // Run solver + let _res = Executor::new(operator, solver) + .configure(|state| state.param(init_param).max_iters(20).target_cost(0.0)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("NonlinearConjugateGradient", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/owl_qn.rs b/argmin/benches/owl_qn.rs new file mode 100644 index 000000000..5f17157a6 --- /dev/null +++ b/argmin/benches/owl_qn.rs @@ -0,0 +1,69 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::LBFGS; +use argmin_testfunctions::rosenbrock; +use finitediff::FiniteDiff; +use ndarray::{array, Array1}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + let init_param: Array1 = array![-1.2, 1.0]; + // let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; + + // Set up solver + let solver = LBFGS::new(linesearch, 7) + .with_l1_regularization(1.0)? + .with_tolerance_cost(1e-6)?; + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("owl_qn", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/particleswarm.rs b/argmin/benches/particleswarm.rs new file mode 100644 index 000000000..00d22132f --- /dev/null +++ b/argmin/benches/particleswarm.rs @@ -0,0 +1,126 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{black_box, criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::particleswarm::ParticleSwarm; +use argmin_testfunctions::himmelblau; +use nalgebra::{dvector, DVector}; +use ndarray::{array, Array1}; + +struct HimmelblauVec {} + +impl CostFunction for HimmelblauVec { + type Param = Vec; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(himmelblau(param)) + } +} + +struct HimmelblauNalgebra {} + +impl CostFunction for HimmelblauNalgebra { + type Param = DVector; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(himmelblau(param.into())) + } +} + +struct HimmelblauNdarray {} + +impl CostFunction for HimmelblauNdarray { + type Param = Array1; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(himmelblau(param.as_slice().unwrap())) + } +} + +fn run_vec(bound: f64, num_particles: usize, iterations: u64) -> Result<(), Error> { + let cost_function = HimmelblauVec {}; + + let solver = ParticleSwarm::new((vec![-bound, -bound], vec![bound, bound]), num_particles); + + let res = Executor::new(cost_function, solver) + .configure(|state| state.max_iters(iterations)) + .run()?; + Ok(()) +} + +fn run_ngalgebra(bound: f64, num_particles: usize, iterations: u64) -> Result<(), Error> { + let cost_function = HimmelblauNalgebra {}; + + let solver = ParticleSwarm::new( + (dvector![-bound, -bound], dvector![bound, bound]), + num_particles, + ); + + let res = Executor::new(cost_function, solver) + .configure(|state| state.max_iters(iterations)) + .run()?; + Ok(()) +} + +fn run_ndarray(bound: f64, num_particles: usize, iterations: u64) -> Result<(), Error> { + let cost_function = HimmelblauNdarray {}; + + let solver = ParticleSwarm::new( + (array![-bound, -bound], array![bound, bound]), + num_particles, + ); + + let res = Executor::new(cost_function, solver) + .configure(|state| state.max_iters(iterations)) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + let bound = 4.0; + let num_particles = 40; + let iterations = 100; + let mut group = c.benchmark_group("ParticleSwarm"); + group.bench_function("Vec", |b| { + b.iter(|| { + run_vec( + black_box(bound), + black_box(num_particles), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + group.bench_function("ngalgebra", |b| { + b.iter(|| { + run_ngalgebra( + black_box(bound), + black_box(num_particles), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + group.bench_function("ndarray", |b| { + b.iter(|| { + run_ndarray( + black_box(bound), + black_box(num_particles), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) + }); + group.finish(); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/simulatedannealing.rs b/argmin/benches/simulatedannealing.rs new file mode 100644 index 000000000..2668b74bd --- /dev/null +++ b/argmin/benches/simulatedannealing.rs @@ -0,0 +1,145 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::simulatedannealing::{Anneal, SATempFunc, SimulatedAnnealing}; +use argmin_testfunctions::rosenbrock; +use rand::distributions::Uniform; +use rand::prelude::*; +use rand_xoshiro::Xoshiro256PlusPlus; +use std::sync::{Arc, Mutex}; + +struct Rosenbrock { + /// Parameter a, usually 1.0 + a: f64, + /// Parameter b, usually 100.0 + b: f64, + /// lower bound + lower_bound: Vec, + /// upper bound + upper_bound: Vec, + /// Random number generator. We use a `Arc>` here because `ArgminOperator` requires + /// `self` to be passed as an immutable reference. This gives us thread safe interior + /// mutability. + rng: Arc>, +} + +impl Rosenbrock { + /// Constructor + pub fn new(a: f64, b: f64, lower_bound: Vec, upper_bound: Vec) -> Self { + Rosenbrock { + a, + b, + lower_bound, + upper_bound, + rng: Arc::new(Mutex::new(Xoshiro256PlusPlus::from_entropy())), + } + } +} + +impl CostFunction for Rosenbrock { + type Param = Vec; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(rosenbrock(param, self.a, self.b)) + } +} + +impl Anneal for Rosenbrock { + type Param = Vec; + type Output = Vec; + type Float = f64; + + /// Anneal a parameter vector + fn anneal(&self, param: &Vec, temp: f64) -> Result, Error> { + let mut param_n = param.clone(); + let mut rng = self.rng.lock().unwrap(); + let distr = Uniform::from(0..param.len()); + // Perform modifications to a degree proportional to the current temperature `temp`. + for _ in 0..(temp.floor() as u64 + 1) { + // Compute random index of the parameter vector using the supplied random number + // generator. + let idx = rng.sample(distr); + + // Compute random number in [0.1, 0.1]. + let val = rng.sample(Uniform::new_inclusive(-0.1, 0.1)); + + // modify previous parameter value at random position `idx` by `val` + param_n[idx] += val; + + // check if bounds are violated. If yes, project onto bound. + param_n[idx] = param_n[idx].clamp(self.lower_bound[idx], self.upper_bound[idx]); + } + Ok(param_n) + } +} + +fn run() -> Result<(), Error> { + // Define bounds + let lower_bound: Vec = vec![-5.0, -5.0]; + let upper_bound: Vec = vec![5.0, 5.0]; + + // Define cost function + let operator = Rosenbrock::new(1.0, 100.0, lower_bound, upper_bound); + + // Define initial parameter vector + let init_param: Vec = vec![1.0, 1.2]; + + // Define initial temperature + let temp = 15.0; + + // Set up simulated annealing solver + // An alternative random number generator (RNG) can be provided to `new_with_rng`: + // SimulatedAnnealing::new_with_rng(temp, Xoshiro256PlusPlus::from_entropy())? + let solver = SimulatedAnnealing::new(temp)? + // Optional: Define temperature function (defaults to `SATempFunc::TemperatureFast`) + .with_temp_func(SATempFunc::Boltzmann) + ///////////////////////// + // Stopping criteria // + ///////////////////////// + // Optional: stop if there was no new best solution after 1000 iterations + .with_stall_best(1000) + // Optional: stop if there was no accepted solution after 1000 iterations + .with_stall_accepted(1000) + ///////////////////////// + // Reannealing // + ///////////////////////// + // Optional: Reanneal after 1000 iterations (resets temperature to initial temperature) + .with_reannealing_fixed(1000) + // Optional: Reanneal after no accepted solution has been found for `iter` iterations + .with_reannealing_accepted(500) + // Optional: Start reannealing after no new best solution has been found for 800 iterations + .with_reannealing_best(800); + + ///////////////////////// + // Run solver // + ///////////////////////// + let _res = Executor::new(operator, solver) + .configure(|state| { + state + .param(init_param) + // Optional: Set maximum number of iterations (defaults to `std::u64::MAX`) + .max_iters(10_000) + // Optional: Set target cost function value (defaults to `std::f64::NEG_INFINITY`) + .target_cost(0.0) + }) + // Optional: Attach a observer + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("SimulatedAnnealing", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/sr1.rs b/argmin/benches/sr1.rs new file mode 100644 index 000000000..04209327c --- /dev/null +++ b/argmin/benches/sr1.rs @@ -0,0 +1,72 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient}; +// use argmin::solver::linesearch::HagerZhangLineSearch; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::SR1; +use argmin_testfunctions::styblinski_tang; +use finitediff::FiniteDiff; +use ndarray::{array, Array1, Array2}; + +struct StyblinskiTang {} + +impl CostFunction for StyblinskiTang { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(styblinski_tang(&p.to_vec())) + } +} +impl Gradient for StyblinskiTang { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| styblinski_tang(&x.to_vec()))) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = StyblinskiTang {}; + + // Define initial parameter vector + // let init_param: Array1 = array![-1.2, 1.0, -5.0, 2.0, 3.0, 2.0, 4.0, 5.0]; + let init_param: Array1 = array![5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0]; + let init_hessian: Array2 = Array2::eye(8); + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; + // let linesearch = HagerZhangLineSearch::new(); + + // Set up solver + let solver = SR1::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| { + state + .param(init_param) + .inv_hessian(init_hessian) + .max_iters(1000) + }) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("SR1", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/sr1_trustregion.rs b/argmin/benches/sr1_trustregion.rs new file mode 100644 index 000000000..7c49832a3 --- /dev/null +++ b/argmin/benches/sr1_trustregion.rs @@ -0,0 +1,87 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient, Hessian}; +use argmin::solver::quasinewton::SR1TrustRegion; +#[allow(unused_imports)] +use argmin::solver::trustregion::{CauchyPoint, Dogleg, Steihaug, TrustRegion}; +use argmin_testfunctions::rosenbrock; +use finitediff::FiniteDiff; +use ndarray::{array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +impl Hessian for Rosenbrock { + type Param = Array1; + type Hessian = Array2; + + fn hessian(&self, p: &Self::Param) -> Result { + Ok((*p).forward_hessian(&|x| self.gradient(x).unwrap())) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + let init_param: Array1 = array![-1.2, 1.0]; + // let init_param: Array1 = array![1.2, 1.0]; + let init_hessian: Array2 = Array2::eye(2); + // let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + // let init_hessian: Array2 = Array2::eye(8); + + // Set up the subproblem + let subproblem = Steihaug::new().with_max_iters(20); + // let subproblem = CauchyPoint::new(); + // let subproblem = Dogleg::new(); + + // Set up solver + let solver = SR1TrustRegion::new(subproblem); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| { + state + .param(init_param) + .hessian(init_hessian) + .max_iters(1000) + }) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("SR1TrustRegion", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/steepestdescent.rs b/argmin/benches/steepestdescent.rs new file mode 100644 index 000000000..e6bcc596b --- /dev/null +++ b/argmin/benches/steepestdescent.rs @@ -0,0 +1,70 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::gradientdescent::SteepestDescent; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p, self.a, self.b)) + } +} + +impl Gradient for Rosenbrock { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p, self.a, self.b)) + } +} + +fn run() -> Result<(), Error> { + // Define cost function (must implement `ArgminOperator`) + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + // easy case + let init_param: Vec = vec![1.2, 1.2]; + // tough case + // let init_param: Vec = vec![-1.2, 1.0]; + + // Pick a line search. + // let linesearch = HagerZhangLineSearch::new(); + let linesearch = MoreThuenteLineSearch::new(); + + // Set up solver + let solver = SteepestDescent::new(linesearch); + + // Run solver + let _res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(10)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("SteepestDescent", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/trustregion_nd.rs b/argmin/benches/trustregion_nd.rs new file mode 100644 index 000000000..062d87719 --- /dev/null +++ b/argmin/benches/trustregion_nd.rs @@ -0,0 +1,86 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient, Hessian}; +#[allow(unused_imports)] +use argmin::solver::trustregion::{CauchyPoint, Dogleg, Steihaug, TrustRegion}; +use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative, rosenbrock_2d_hessian}; +use ndarray::{Array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(Array1::from(rosenbrock_2d_derivative( + &p.to_vec(), + self.a, + self.b, + ))) + } +} + +impl Hessian for Rosenbrock { + type Param = Array1; + type Hessian = Array2; + + fn hessian(&self, p: &Self::Param) -> Result { + let h = rosenbrock_2d_hessian(&p.to_vec(), self.a, self.b); + Ok(Array::from_shape_vec((2, 2), h)?) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + // easy case + // let init_param: Array1 = Array1::from_vec(vec![1.2, 1.2]); + // tough case + let init_param: Array1 = Array1::from(vec![-1.2, 1.0]); + + // Set up the subproblem + // let subproblem = Steihaug::new().with_max_iters(2); + let subproblem = CauchyPoint::new(); + // let subproblem = Dogleg::new(); + + // Set up solver + let solver = TrustRegion::new(subproblem); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(50)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("TrustRegion_nd", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches);