From 90aace18cecb83011b04709398e184f1ee32696f Mon Sep 17 00:00:00 2001 From: jonboh Date: Thu, 11 May 2023 17:43:33 +0200 Subject: [PATCH 01/16] add backtracking benchmark --- argmin/Cargo.toml | 5 ++ argmin/benches/backtracking.rs | 88 ++++++++++++++++++++++++++++++++++ 2 files changed, 93 insertions(+) create mode 100644 argmin/benches/backtracking.rs diff --git a/argmin/Cargo.toml b/argmin/Cargo.toml index addfef2ec..2c03ede4b 100644 --- a/argmin/Cargo.toml +++ b/argmin/Cargo.toml @@ -45,6 +45,11 @@ ndarray = { version = "0.15", features = ["serde-1"] } ndarray-linalg = { version = "0.16", features = ["netlib"] } argmin-math = { path = "../argmin-math" } serde = { version = "1.0", features = ["derive", "rc"] } +criterion = { version = "0.4", features = ["html_reports"] } + +[[bench]] +name = "backtracking" +harness = false [features] default = ["slog-logger", "serde1"] diff --git a/argmin/benches/backtracking.rs b/argmin/benches/backtracking.rs new file mode 100644 index 000000000..1e13d06fe --- /dev/null +++ b/argmin/benches/backtracking.rs @@ -0,0 +1,88 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{black_box, criterion_group, criterion_main, Criterion}; + + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch}; +use argmin::solver::linesearch::{condition::ArmijoCondition, BacktrackingLineSearch}; +use argmin_testfunctions::{sphere, sphere_derivative}; + +struct Sphere {} + +impl CostFunction for Sphere { + type Param = Vec; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(sphere(param)) + } +} + +impl Gradient for Sphere { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, param: &Self::Param) -> Result { + Ok(sphere_derivative(param)) + } +} + +fn run() -> Result<(), Error> { + // define initial parameter vector + let init_param: Vec = vec![0.7, 0.0]; + + // Define problem + let operator = Sphere {}; + + // Set condition + let cond = ArmijoCondition::new(0.5)?; + + // Set up Line Search method + let mut solver = BacktrackingLineSearch::new(cond).rho(0.9)?; + + // The following parameters do not follow the builder pattern because they are part of the + // ArgminLineSearch trait which needs to be object safe. + + // Set search direction + solver.search_direction(vec![-1.0, 0.0]); + + // Set initial position + solver.initial_step_length(1.0)?; + + let init_cost = operator.cost(&init_param)?; + let init_grad = operator.gradient(&init_param)?; + + // Run solver + let res = Executor::new(operator, solver) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + // Gradient and cost are optional. If they are not provided, they will be computed + .configure(|state| { + state + .param(init_param) + .gradient(init_grad) + .cost(init_cost) + .max_iters(10) + }) + .run()?; + + // Wait a second (lets the logger flush everything before printing again) + // std::thread::sleep(std::time::Duration::from_secs(1)); + + // Print result + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("backtracking", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + From 0ea142c28c7fcdcfea90ab6a57954c198c7fefc9 Mon Sep 17 00:00:00 2001 From: jonboh Date: Fri, 12 May 2023 22:03:38 +0200 Subject: [PATCH 02/16] make benchmarks from examples --- argmin/Cargo.toml | 139 ++++++++++++++++++++- argmin/benches/backtracking.rs | 21 +--- argmin/benches/bfgs.rs | 71 +++++++++++ argmin/benches/brentopt.rs | 49 ++++++++ argmin/benches/brentroot.rs | 53 ++++++++ argmin/benches/conjugategradient.rs | 52 ++++++++ argmin/benches/dfp.rs | 74 ++++++++++++ argmin/benches/gaussnewton.rs | 88 ++++++++++++++ argmin/benches/gaussnewton_linesearch.rs | 89 ++++++++++++++ argmin/benches/gaussnewton_nalgebra.rs | 92 ++++++++++++++ argmin/benches/goldensectionsearch.rs | 48 ++++++++ argmin/benches/hagerzhang.rs | 73 ++++++++++++ argmin/benches/landweber.rs | 46 +++++++ argmin/benches/lbfgs.rs | 67 +++++++++++ argmin/benches/lbfgs_nalgebra.rs | 68 +++++++++++ argmin/benches/morethuente.rs | 71 +++++++++++ argmin/benches/neldermead.rs | 59 +++++++++ argmin/benches/newton.rs | 68 +++++++++++ argmin/benches/newton_cg.rs | 82 +++++++++++++ argmin/benches/nonlinear_cg.rs | 76 ++++++++++++ argmin/benches/owl_qn.rs | 70 +++++++++++ argmin/benches/particleswarm.rs | 42 +++++++ argmin/benches/particleswarm_nalgebra.rs | 43 +++++++ argmin/benches/simulatedannealing.rs | 146 +++++++++++++++++++++++ argmin/benches/sr1.rs | 73 ++++++++++++ argmin/benches/sr1_trustregion.rs | 88 ++++++++++++++ argmin/benches/steepestdescent.rs | 73 ++++++++++++ argmin/benches/trustregion_nd.rs | 87 ++++++++++++++ 28 files changed, 1986 insertions(+), 22 deletions(-) create mode 100644 argmin/benches/bfgs.rs create mode 100644 argmin/benches/brentopt.rs create mode 100644 argmin/benches/brentroot.rs create mode 100644 argmin/benches/conjugategradient.rs create mode 100644 argmin/benches/dfp.rs create mode 100644 argmin/benches/gaussnewton.rs create mode 100644 argmin/benches/gaussnewton_linesearch.rs create mode 100644 argmin/benches/gaussnewton_nalgebra.rs create mode 100644 argmin/benches/goldensectionsearch.rs create mode 100644 argmin/benches/hagerzhang.rs create mode 100644 argmin/benches/landweber.rs create mode 100644 argmin/benches/lbfgs.rs create mode 100644 argmin/benches/lbfgs_nalgebra.rs create mode 100644 argmin/benches/morethuente.rs create mode 100644 argmin/benches/neldermead.rs create mode 100644 argmin/benches/newton.rs create mode 100644 argmin/benches/newton_cg.rs create mode 100644 argmin/benches/nonlinear_cg.rs create mode 100644 argmin/benches/owl_qn.rs create mode 100644 argmin/benches/particleswarm.rs create mode 100644 argmin/benches/particleswarm_nalgebra.rs create mode 100644 argmin/benches/simulatedannealing.rs create mode 100644 argmin/benches/sr1.rs create mode 100644 argmin/benches/sr1_trustregion.rs create mode 100644 argmin/benches/steepestdescent.rs create mode 100644 argmin/benches/trustregion_nd.rs diff --git a/argmin/Cargo.toml b/argmin/Cargo.toml index 2c03ede4b..7f1057ef4 100644 --- a/argmin/Cargo.toml +++ b/argmin/Cargo.toml @@ -47,10 +47,6 @@ argmin-math = { path = "../argmin-math" } serde = { version = "1.0", features = ["derive", "rc"] } criterion = { version = "0.4", features = ["html_reports"] } -[[bench]] -name = "backtracking" -harness = false - [features] default = ["slog-logger", "serde1"] wasm-bindgen = ["instant/wasm-bindgen", "getrandom/js"] @@ -189,3 +185,138 @@ required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] [[example]] name = "writers" required-features = ["argmin-math/ndarray_latest-serde", "slog-logger", "serde1"] + +[[bench]] +name = "backtracking" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "bfgs" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] + +[[bench]] +name = "brentroot" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "brentopt" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "conjugategradient" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "dfp" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] + +[[bench]] +name = "gaussnewton" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] + +[[bench]] +name = "gaussnewton_linesearch" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] + +[[bench]] +name = "gaussnewton_nalgebra" +harness = false +required-features = ["_nalgebral", "argmin-math/nalgebra_latest-serde", "slog-logger"] + +[[bench]] +name = "goldensectionsearch" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "hagerzhang" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "landweber" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "lbfgs" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] + +[[bench]] +name = "lbfgs_nalgebra" +harness = false +required-features = ["argmin-math/nalgebra_latest-serde", "slog-logger"] + +[[bench]] +name = "morethuente" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "neldermead" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] + +[[bench]] +name = "newton" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] + +[[bench]] +name = "newton_cg" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] + +[[bench]] +name = "nonlinear_cg" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "owl_qn" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] + +[[bench]] +name = "particleswarm" +harness = false +required-features = [] + +[[bench]] +name = "particleswarm_nalgebra" +harness = false +required-features = ["argmin-math/nalgebra_latest-serde"] + +[[bench]] +name = "simulatedannealing" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "sr1" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] + +[[bench]] +name = "sr1_trustregion" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-loger"] + +[[bench]] +name = "steepestdescent" +harness = false +required-features = ["slog-logger"] + +[[bench]] +name = "trustregion_nd" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] diff --git a/argmin/benches/backtracking.rs b/argmin/benches/backtracking.rs index 1e13d06fe..0f0141630 100644 --- a/argmin/benches/backtracking.rs +++ b/argmin/benches/backtracking.rs @@ -5,8 +5,7 @@ // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. -use criterion::{black_box, criterion_group, criterion_main, Criterion}; - +use criterion::{criterion_group, criterion_main, Criterion}; use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch}; @@ -36,22 +35,14 @@ impl Gradient for Sphere { fn run() -> Result<(), Error> { // define initial parameter vector let init_param: Vec = vec![0.7, 0.0]; - // Define problem let operator = Sphere {}; - // Set condition let cond = ArmijoCondition::new(0.5)?; - // Set up Line Search method let mut solver = BacktrackingLineSearch::new(cond).rho(0.9)?; - - // The following parameters do not follow the builder pattern because they are part of the - // ArgminLineSearch trait which needs to be object safe. - // Set search direction solver.search_direction(vec![-1.0, 0.0]); - // Set initial position solver.initial_step_length(1.0)?; @@ -59,9 +50,8 @@ fn run() -> Result<(), Error> { let init_grad = operator.gradient(&init_param)?; // Run solver - let res = Executor::new(operator, solver) + let _res = Executor::new(operator, solver) // .add_observer(SlogLogger::term(), ObserverMode::Always) - // Gradient and cost are optional. If they are not provided, they will be computed .configure(|state| { state .param(init_param) @@ -70,17 +60,12 @@ fn run() -> Result<(), Error> { .max_iters(10) }) .run()?; - - // Wait a second (lets the logger flush everything before printing again) - // std::thread::sleep(std::time::Duration::from_secs(1)); - - // Print result Ok(()) } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("backtracking", |b| b.iter(|| run())); + c.bench_function("Backtracking", |b| b.iter(|| run())); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/bfgs.rs b/argmin/benches/bfgs.rs new file mode 100644 index 000000000..ff0c9f90e --- /dev/null +++ b/argmin/benches/bfgs.rs @@ -0,0 +1,71 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::BFGS; +use argmin_testfunctions::rosenbrock; +use finitediff::FiniteDiff; +use ndarray::{array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + // Define initial parameter vector + // TODO: parametrize dimension + let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + let init_hessian: Array2 = Array2::eye(8); + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; + // Set up solver + let solver = BFGS::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| { + state + .param(init_param) + .inv_hessian(init_hessian) + .max_iters(60) + }) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("BFGS", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/brentopt.rs b/argmin/benches/brentopt.rs new file mode 100644 index 000000000..735ac5e34 --- /dev/null +++ b/argmin/benches/brentopt.rs @@ -0,0 +1,49 @@ +// Copyright 2018-2020 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::brent::BrentOpt; + +/// Test function: `f(x) = exp(-x) - exp(5-x/2)` +/// xmin == 2 log(2 exp(-5)) +/// xmin ~= -8.6137056388801093812 +/// f(xmin) == -exp(10)/4 +/// f(xmin) ~= -5506.6164487016791292 +struct TestFunc {} + +impl CostFunction for TestFunc { + // one dimensional problem, no vector needed + type Param = f64; + type Output = f64; + + fn cost(&self, x: &Self::Param) -> Result { + Ok((-x).exp() - (5. - x / 2.).exp()) + } +} + +fn run() -> Result<(), Error> { + let cost = TestFunc {}; + let solver = BrentOpt::new(-10., 10.); + + let res = Executor::new(cost, solver) + .configure(|state| state.max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run() + .unwrap(); + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("BrentOpt", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/brentroot.rs b/argmin/benches/brentroot.rs new file mode 100644 index 000000000..36e587102 --- /dev/null +++ b/argmin/benches/brentroot.rs @@ -0,0 +1,53 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::brent::BrentRoot; + +/// Test function generalise from Wikipedia example +struct TestFunc { + zero1: f64, + zero2: f64, +} + +impl CostFunction for TestFunc { + // one dimensional problem, no vector needed + type Param = f64; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok((p + self.zero1) * (p - self.zero2) * (p - self.zero2)) + } +} + +fn run() -> Result<(), Error> { + let cost = TestFunc { + zero1: 3., + zero2: -1., + }; + let init_param = 0.5; + let solver = BrentRoot::new(-4., 0.5, 1e-11); + + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run() + .unwrap(); + + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("BrentRoot", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/conjugategradient.rs b/argmin/benches/conjugategradient.rs new file mode 100644 index 000000000..4eed4367c --- /dev/null +++ b/argmin/benches/conjugategradient.rs @@ -0,0 +1,52 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{Error, Executor, Operator}; +use argmin::solver::conjugategradient::ConjugateGradient; + +struct MyProblem {} + +impl Operator for MyProblem { + type Param = Vec; + type Output = Vec; + + fn apply(&self, p: &Self::Param) -> Result { + Ok(vec![4.0 * p[0] + 1.0 * p[1], 1.0 * p[0] + 3.0 * p[1]]) + } +} + +fn run() -> Result<(), Error> { + // Define initial parameter vector + let init_param: Vec = vec![2.0, 1.0]; + + // Define the right hand side `b` of `A * x = b` + let b = vec![1.0, 2.0]; + + // Set up operator + let operator = MyProblem {}; + + // Set up the solver + let solver: ConjugateGradient<_, f64> = ConjugateGradient::new(b); + + // Run solver + let res = Executor::new(operator, solver) + .configure(|state| state.param(init_param).max_iters(2)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("ConjugateGradient", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/dfp.rs b/argmin/benches/dfp.rs new file mode 100644 index 000000000..b1eba4bc7 --- /dev/null +++ b/argmin/benches/dfp.rs @@ -0,0 +1,74 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::DFP; +use argmin_testfunctions::rosenbrock; +use finitediff::FiniteDiff; +use ndarray::{array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + let init_param: Array1 = array![-1.2, 1.0]; + let init_hessian: Array2 = Array2::eye(2); + // let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + // let init_hessian: Array2 = Array2::eye(8); + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; +// Set up solver + let solver = DFP::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| { + state + .param(init_param) + .inv_hessian(init_hessian) + .max_iters(1000) + }) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("DFP", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/gaussnewton.rs b/argmin/benches/gaussnewton.rs new file mode 100644 index 000000000..2f100b879 --- /dev/null +++ b/argmin/benches/gaussnewton.rs @@ -0,0 +1,88 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{Error, Executor, Jacobian, Operator}; +use argmin::solver::gaussnewton::GaussNewton; +use ndarray::{Array1, Array2}; + +type Rate = f64; +type S = f64; +type Measurement = (S, Rate); + +// Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm +// Model used in this example: +// `rate = (V_{max} * [S]) / (K_M + [S]) ` +// where `V_{max}` and `K_M` are the sought parameters and `[S]` and `rate` is the measured data. +struct Problem { + data: Vec, +} + +impl Operator for Problem { + type Param = Array1; + type Output = Array1; + + fn apply(&self, p: &Self::Param) -> Result { + Ok(self + .data + .iter() + .map(|(s, rate)| rate - (p[0] * s) / (p[1] + s)) + .collect::>()) + } +} + +impl Jacobian for Problem { + type Param = Array1; + type Jacobian = Array2; + + fn jacobian(&self, p: &Self::Param) -> Result { + Ok(Array2::from_shape_fn((self.data.len(), 2), |(si, i)| { + if i == 0 { + -self.data[si].0 / (p[1] + self.data[si].0) + } else { + p[0] * self.data[si].0 / (p[1] + self.data[si].0).powi(2) + } + })) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm + let cost = Problem { + data: vec![ + (0.038, 0.050), + (0.194, 0.127), + (0.425, 0.094), + (0.626, 0.2122), + (1.253, 0.2729), + (2.5, 0.2665), + (3.74, 0.3317), + ], + }; +// Define initial parameter vector + let init_param: Array1 = Array1::from(vec![0.9, 0.2]); +// Set up solver + let solver: GaussNewton = GaussNewton::new(); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(10)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("GaussNewton", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/gaussnewton_linesearch.rs b/argmin/benches/gaussnewton_linesearch.rs new file mode 100644 index 000000000..bdfc800c8 --- /dev/null +++ b/argmin/benches/gaussnewton_linesearch.rs @@ -0,0 +1,89 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{Error, Executor, Jacobian, Operator}; +use argmin::solver::gaussnewton::GaussNewtonLS; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use ndarray::{Array1, Array2}; + +type Rate = f64; +type S = f64; +type Measurement = (S, Rate); + +// Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm +// Model used in this example: +// `rate = (V_{max} * [S]) / (K_M + [S]) ` +// where `V_{max}` and `K_M` are the sought parameters and `[S]` and `rate` is the measured data. +struct Problem { + data: Vec, +} + +impl Operator for Problem { + type Param = Array1; + type Output = Array1; + + fn apply(&self, p: &Self::Param) -> Result { + Ok(self + .data + .iter() + .map(|(s, rate)| rate - (p[0] * s) / (p[1] + s)) + .collect::>()) + } +} + +impl Jacobian for Problem { + type Param = Array1; + type Jacobian = Array2; + + fn jacobian(&self, p: &Self::Param) -> Result { + Ok(Array2::from_shape_fn((self.data.len(), 2), |(si, i)| { + if i == 0 { + -self.data[si].0 / (p[1] + self.data[si].0) + } else { + p[0] * self.data[si].0 / (p[1] + self.data[si].0).powi(2) + } + })) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm + let cost = Problem { + data: vec![ + (0.038, 0.050), + (0.194, 0.127), + (0.425, 0.094), + (0.626, 0.2122), + (1.253, 0.2729), + (2.5, 0.2665), + (3.74, 0.3317), + ], + }; + + let linesearch = MoreThuenteLineSearch::new().with_bounds(0.0, 1.0)?; + // Define initial parameter vector + let init_param: Array1 = Array1::from(vec![0.9, 0.2]); + // Set up solver + let solver = GaussNewtonLS::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(10)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("GaussNewtonLineSearch", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/argmin/benches/gaussnewton_nalgebra.rs b/argmin/benches/gaussnewton_nalgebra.rs new file mode 100644 index 000000000..6cda1ea0b --- /dev/null +++ b/argmin/benches/gaussnewton_nalgebra.rs @@ -0,0 +1,92 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{Error, Executor, Jacobian, Operator}; +use argmin::solver::gaussnewton::GaussNewton; + +use nalgebra::{DMatrix, DVector}; + +type Rate = f64; +type S = f64; +type Measurement = (S, Rate); + +// Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm +// Model used in this example: +// `rate = (V_{max} * [S]) / (K_M + [S]) ` +// where `V_{max}` and `K_M` are the sought parameters and `[S]` and `rate` is the measured data. +struct Problem { + data: Vec, +} + +impl Operator for Problem { + type Param = DVector; + type Output = DVector; + + fn apply(&self, p: &Self::Param) -> Result { + Ok(DVector::from_vec( + self.data + .iter() + .map(|(s, rate)| rate - (p[0] * s) / (p[1] + s)) + .collect(), + )) + } +} + +impl Jacobian for Problem { + type Param = DVector; + type Jacobian = DMatrix; + + fn jacobian(&self, p: &Self::Param) -> Result { + Ok(DMatrix::from_fn(7, 2, |si, i| { + if i == 0 { + -self.data[si].0 / (p[1] + self.data[si].0) + } else { + p[0] * self.data[si].0 / (p[1] + self.data[si].0).powi(2) + } + })) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm + let cost = Problem { + data: vec![ + (0.038, 0.050), + (0.194, 0.127), + (0.425, 0.094), + (0.626, 0.2122), + (1.253, 0.2729), + (2.5, 0.2665), + (3.74, 0.3317), + ], + }; + + // Define initial parameter vector + let init_param: DVector = DVector::from_vec(vec![0.9, 0.2]); + + // Set up solver + let solver: GaussNewton = GaussNewton::new(); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(10)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("GaussNewton_nalgebra", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/goldensectionsearch.rs b/argmin/benches/goldensectionsearch.rs new file mode 100644 index 000000000..7ebe551fd --- /dev/null +++ b/argmin/benches/goldensectionsearch.rs @@ -0,0 +1,48 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::goldensectionsearch::GoldenSectionSearch; + +/// Test function from Wikipedia example +struct TestFunc {} + +impl CostFunction for TestFunc { + // one dimensional problem, no vector needed + type Param = f32; + type Output = f32; + + fn cost(&self, x: &Self::Param) -> Result { + // In interval [2.5, 2.5] + // Min at 1.0 + // Max at -1.666 (multiply by -1.0 to test) + Ok((x + 3.0) * (x - 1.0).powi(2)) + } +} + +fn run() -> Result<(), Error> { + let cost = TestFunc {}; + let init_param = -0.5; + let solver = GoldenSectionSearch::new(-2.5, 3.0)?.with_tolerance(0.0001)?; + + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run() + .unwrap(); + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("GoldenSectionSearch", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/hagerzhang.rs b/argmin/benches/hagerzhang.rs new file mode 100644 index 000000000..93b10ff08 --- /dev/null +++ b/argmin/benches/hagerzhang.rs @@ -0,0 +1,73 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch}; +use argmin::solver::linesearch::HagerZhangLineSearch; +use argmin_testfunctions::{sphere, sphere_derivative}; + +struct Sphere {} + +impl CostFunction for Sphere { + type Param = Vec; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(sphere(param)) + } +} + +impl Gradient for Sphere { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, param: &Self::Param) -> Result { + Ok(sphere_derivative(param)) + } +} + +fn run() -> Result<(), Error> { + // Define initial parameter vector + let init_param: Vec = vec![1.0, 0.0]; + + // Problem definition + let operator = Sphere {}; + + // Set up line search method + let mut solver = HagerZhangLineSearch::new(); + + // Set search direction + solver.search_direction(vec![-1.5, -0.5]); +// Set initial step length + solver.initial_step_length(10.0)?; + + let init_cost = operator.cost(&init_param)?; + let init_grad = operator.gradient(&init_param)?; + + // Run solver + let _res = Executor::new(operator, solver) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + // Gradient and cost are optional. If they are not provided, they will be computed + .configure(|state| { + state + .param(init_param) + .gradient(init_grad) + .cost(init_cost) + .max_iters(100) + }) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("HagerZhangLineSearch", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/landweber.rs b/argmin/benches/landweber.rs new file mode 100644 index 000000000..a978f79e1 --- /dev/null +++ b/argmin/benches/landweber.rs @@ -0,0 +1,46 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{Error, Executor, Gradient}; +use argmin::solver::landweber::Landweber; +use argmin_testfunctions::rosenbrock_2d_derivative; + +struct Rosenbrock {} + +impl Gradient for Rosenbrock { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p, 1.0, 100.0)) + } +} + +fn run() -> Result<(), Error> { + // define initial parameter vector + let init_param: Vec = vec![1.2, 1.2]; + let operator = Rosenbrock {}; + + let iters = 10; + let solver = Landweber::new(0.001); + + let res = Executor::new(operator, solver) + .configure(|state| state.param(init_param).max_iters(iters)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("Landweber", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/lbfgs.rs b/argmin/benches/lbfgs.rs new file mode 100644 index 000000000..6c5013897 --- /dev/null +++ b/argmin/benches/lbfgs.rs @@ -0,0 +1,67 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::LBFGS; +use argmin_testfunctions::rosenbrock; +use finitediff::FiniteDiff; +use ndarray::{array, Array1}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + let init_param: Array1 = array![-1.2, 1.0]; + // let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; + // Set up solver + let solver = LBFGS::new(linesearch, 7); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("LBFGS", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/lbfgs_nalgebra.rs b/argmin/benches/lbfgs_nalgebra.rs new file mode 100644 index 000000000..3bb7936f7 --- /dev/null +++ b/argmin/benches/lbfgs_nalgebra.rs @@ -0,0 +1,68 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::LBFGS; +use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative}; +use nalgebra::DVector; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = DVector; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p.data.as_vec(), self.a, self.b)) + } +} + +impl Gradient for Rosenbrock { + type Param = DVector; + type Gradient = DVector; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(DVector::from(rosenbrock_2d_derivative( + p.data.as_vec(), + self.a, + self.b, + ))) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + // Define initial parameter vector + let init_param: DVector = DVector::from(vec![-1.2, 1.0]); + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; + // Set up solver + let solver = LBFGS::new(linesearch, 7); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("LBFGS_nalgebra", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/morethuente.rs b/argmin/benches/morethuente.rs new file mode 100644 index 000000000..5d96290ae --- /dev/null +++ b/argmin/benches/morethuente.rs @@ -0,0 +1,71 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin_testfunctions::{sphere, sphere_derivative}; + +struct Sphere {} + +impl CostFunction for Sphere { + type Param = Vec; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(sphere(param)) + } +} + +impl Gradient for Sphere { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, param: &Self::Param) -> Result { + Ok(sphere_derivative(param)) + } +} + +fn run() -> Result<(), Error> { + // Define initial parameter vector + let init_param: Vec = vec![1.0, 0.0]; + // Problem definition + let operator = Sphere {}; + // Set up line search method + let mut solver = MoreThuenteLineSearch::new(); + // Set search direction + solver.search_direction(vec![-2.0, 0.0]); + // Set initial step length + solver.initial_step_length(1.0)?; + + let init_cost = operator.cost(&init_param)?; + let init_grad = operator.gradient(&init_param)?; + + // Run solver + let res = Executor::new(operator, solver) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + // Gradient and cost are optional. If they are not provided, they will be computed + .configure(|state| { + state + .param(init_param) + .gradient(init_grad) + .cost(init_cost) + .max_iters(10) + }) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("MoreThuenteLineSearch", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/neldermead.rs b/argmin/benches/neldermead.rs new file mode 100644 index 000000000..e8bcb5b77 --- /dev/null +++ b/argmin/benches/neldermead.rs @@ -0,0 +1,59 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::neldermead::NelderMead; +use argmin_testfunctions::rosenbrock; +use ndarray::{array, Array1}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Set up solver -- note that the proper choice of the vertices is very important! + let solver = NelderMead::new(vec![ + // array![-2.0, 3.0], + // array![-2.0, -1.0], + // array![2.0, -1.0], + array![-1.0, 3.0], + array![2.0, 1.5], + array![2.0, -1.0], + ]) + .with_sd_tolerance(0.0001)?; + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("NelderMead", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/newton.rs b/argmin/benches/newton.rs new file mode 100644 index 000000000..c700ad7b1 --- /dev/null +++ b/argmin/benches/newton.rs @@ -0,0 +1,68 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{Error, Executor, Gradient, Hessian}; +use argmin::solver::newton::Newton; +use argmin_testfunctions::{rosenbrock_2d_derivative, rosenbrock_2d_hessian}; +use ndarray::{Array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(Array1::from(rosenbrock_2d_derivative( + &p.to_vec(), + self.a, + self.b, + ))) + } +} + +impl Hessian for Rosenbrock { + type Param = Array1; + type Hessian = Array2; + + fn hessian(&self, p: &Self::Param) -> Result { + let h = rosenbrock_2d_hessian(&p.to_vec(), self.a, self.b); + Ok(Array::from_shape_vec((2, 2), h)?) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + // let init_param: Array1 = Array1::from(vec![1.2, 1.2]); + let init_param: Array1 = Array1::from(vec![-1.2, 1.0]); + + // Set up solver + let solver: Newton = Newton::new(); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(8)) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("Newton", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/newton_cg.rs b/argmin/benches/newton_cg.rs new file mode 100644 index 000000000..82bc29dcf --- /dev/null +++ b/argmin/benches/newton_cg.rs @@ -0,0 +1,82 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient, Hessian}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::newton::NewtonCG; +use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative, rosenbrock_2d_hessian}; +use ndarray::{Array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(Array1::from(rosenbrock_2d_derivative( + &p.to_vec(), + self.a, + self.b, + ))) + } +} + +impl Hessian for Rosenbrock { + type Param = Array1; + type Hessian = Array2; + + fn hessian(&self, p: &Self::Param) -> Result { + let h = rosenbrock_2d_hessian(&p.to_vec(), self.a, self.b); + Ok(Array::from_shape_vec((2, 2), h)?) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + // let init_param: Array1 = Array1::from(vec![1.2, 1.2]); + let init_param: Array1 = Array1::from(vec![-1.2, 1.0]); + + // set up line search + let linesearch = MoreThuenteLineSearch::new(); + + // Set up solver + let solver = NewtonCG::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("NewtonCG", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/nonlinear_cg.rs b/argmin/benches/nonlinear_cg.rs new file mode 100644 index 000000000..1fc4cfe79 --- /dev/null +++ b/argmin/benches/nonlinear_cg.rs @@ -0,0 +1,76 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::conjugategradient::{beta::PolakRibiere, NonlinearConjugateGradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative}; + +struct Rosenbrock {} + +impl CostFunction for Rosenbrock { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p, 1.0, 100.0)) + } +} + +impl Gradient for Rosenbrock { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p, 1.0, 100.0)) + } +} + +fn run() -> Result<(), Error> { + // Set up cost function + let operator = Rosenbrock {}; + + // define initial parameter vector + let init_param: Vec = vec![1.2, 1.2]; + + // set up line search + let linesearch = MoreThuenteLineSearch::new(); + let beta_method = PolakRibiere::new(); + + // Set up nonlinear conjugate gradient method + let solver = NonlinearConjugateGradient::new(linesearch, beta_method) + // Set the number of iterations when a restart should be performed + // This allows the algorithm to "forget" previous information which may not be helpful anymore. + .restart_iters(10) + // Set the value for the orthogonality measure. + // Setting this parameter leads to a restart of the algorithm (setting beta = 0) after two + // consecutive search directions are not orthogonal anymore. In other words, if this condition + // is met: + // + // `|\nabla f_k^T * \nabla f_{k-1}| / | \nabla f_k ||^2 >= v` + // + // A typical value for `v` is 0.1. + .restart_orthogonality(0.1); + + // Run solver + let res = Executor::new(operator, solver) + .configure(|state| state.param(init_param).max_iters(20).target_cost(0.0)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("NonlinearConjugateGradient", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/owl_qn.rs b/argmin/benches/owl_qn.rs new file mode 100644 index 000000000..17dce89e3 --- /dev/null +++ b/argmin/benches/owl_qn.rs @@ -0,0 +1,70 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::LBFGS; +use argmin_testfunctions::rosenbrock; +use finitediff::FiniteDiff; +use ndarray::{array, Array1}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + let init_param: Array1 = array![-1.2, 1.0]; + // let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; + + // Set up solver + let solver = LBFGS::new(linesearch, 7) + .with_l1_regularization(1.0)? + .with_tolerance_cost(1e-6)?; + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(100)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("owl_qn", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/particleswarm.rs b/argmin/benches/particleswarm.rs new file mode 100644 index 000000000..89d049ca2 --- /dev/null +++ b/argmin/benches/particleswarm.rs @@ -0,0 +1,42 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::particleswarm::ParticleSwarm; +use argmin_testfunctions::himmelblau; + +struct Himmelblau {} + +impl CostFunction for Himmelblau { + type Param = Vec; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(himmelblau(param)) + } +} + +fn run() -> Result<(), Error> { + let cost_function = Himmelblau {}; + + let solver = ParticleSwarm::new((vec![-4.0, -4.0], vec![4.0, 4.0]), 40); + + let res = Executor::new(cost_function, solver) + .configure(|state| state.max_iters(100)) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("ParticleSwarm", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/particleswarm_nalgebra.rs b/argmin/benches/particleswarm_nalgebra.rs new file mode 100644 index 000000000..ea4eab592 --- /dev/null +++ b/argmin/benches/particleswarm_nalgebra.rs @@ -0,0 +1,43 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::particleswarm::ParticleSwarm; +use argmin_testfunctions::himmelblau; +use nalgebra::{dvector, DVector}; + +struct Himmelblau {} + +impl CostFunction for Himmelblau { + type Param = DVector; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(himmelblau(param.data.as_vec())) + } +} + +fn run() -> Result<(), Error> { + let cost_function = Himmelblau {}; + + let solver = ParticleSwarm::new((dvector![-4.0, -4.0], dvector![4.0, 4.0]), 40); + + let res = Executor::new(cost_function, solver) + .configure(|state| state.max_iters(100)) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("ParticleSwarm_ngalgebra", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/simulatedannealing.rs b/argmin/benches/simulatedannealing.rs new file mode 100644 index 000000000..0a5b5a4ef --- /dev/null +++ b/argmin/benches/simulatedannealing.rs @@ -0,0 +1,146 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor}; +use argmin::solver::simulatedannealing::{Anneal, SATempFunc, SimulatedAnnealing}; +use argmin_testfunctions::rosenbrock; +use rand::distributions::Uniform; +use rand::prelude::*; +use rand_xoshiro::Xoshiro256PlusPlus; +use std::sync::{Arc, Mutex}; + +struct Rosenbrock { + /// Parameter a, usually 1.0 + a: f64, + /// Parameter b, usually 100.0 + b: f64, + /// lower bound + lower_bound: Vec, + /// upper bound + upper_bound: Vec, + /// Random number generator. We use a `Arc>` here because `ArgminOperator` requires + /// `self` to be passed as an immutable reference. This gives us thread safe interior + /// mutability. + rng: Arc>, +} + +impl Rosenbrock { + /// Constructor + pub fn new(a: f64, b: f64, lower_bound: Vec, upper_bound: Vec) -> Self { + Rosenbrock { + a, + b, + lower_bound, + upper_bound, + rng: Arc::new(Mutex::new(Xoshiro256PlusPlus::from_entropy())), + } + } +} + +impl CostFunction for Rosenbrock { + type Param = Vec; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(rosenbrock(param, self.a, self.b)) + } +} + +impl Anneal for Rosenbrock { + type Param = Vec; + type Output = Vec; + type Float = f64; + + /// Anneal a parameter vector + fn anneal(&self, param: &Vec, temp: f64) -> Result, Error> { + let mut param_n = param.clone(); + let mut rng = self.rng.lock().unwrap(); + let distr = Uniform::from(0..param.len()); + // Perform modifications to a degree proportional to the current temperature `temp`. + for _ in 0..(temp.floor() as u64 + 1) { + // Compute random index of the parameter vector using the supplied random number + // generator. + let idx = rng.sample(distr); + + // Compute random number in [0.1, 0.1]. + let val = rng.sample(Uniform::new_inclusive(-0.1, 0.1)); + + // modify previous parameter value at random position `idx` by `val` + param_n[idx] += val; + + // check if bounds are violated. If yes, project onto bound. + param_n[idx] = param_n[idx].clamp(self.lower_bound[idx], self.upper_bound[idx]); + } + Ok(param_n) + } +} + +fn run() -> Result<(), Error> { + // Define bounds + let lower_bound: Vec = vec![-5.0, -5.0]; + let upper_bound: Vec = vec![5.0, 5.0]; + + // Define cost function + let operator = Rosenbrock::new(1.0, 100.0, lower_bound, upper_bound); + + // Define initial parameter vector + let init_param: Vec = vec![1.0, 1.2]; + + // Define initial temperature + let temp = 15.0; + + // Set up simulated annealing solver + // An alternative random number generator (RNG) can be provided to `new_with_rng`: + // SimulatedAnnealing::new_with_rng(temp, Xoshiro256PlusPlus::from_entropy())? + let solver = SimulatedAnnealing::new(temp)? + // Optional: Define temperature function (defaults to `SATempFunc::TemperatureFast`) + .with_temp_func(SATempFunc::Boltzmann) + ///////////////////////// + // Stopping criteria // + ///////////////////////// + // Optional: stop if there was no new best solution after 1000 iterations + .with_stall_best(1000) + // Optional: stop if there was no accepted solution after 1000 iterations + .with_stall_accepted(1000) + ///////////////////////// + // Reannealing // + ///////////////////////// + // Optional: Reanneal after 1000 iterations (resets temperature to initial temperature) + .with_reannealing_fixed(1000) + // Optional: Reanneal after no accepted solution has been found for `iter` iterations + .with_reannealing_accepted(500) + // Optional: Start reannealing after no new best solution has been found for 800 iterations + .with_reannealing_best(800); + + ///////////////////////// + // Run solver // + ///////////////////////// + let res = Executor::new(operator, solver) + .configure(|state| { + state + .param(init_param) + // Optional: Set maximum number of iterations (defaults to `std::u64::MAX`) + .max_iters(10_000) + // Optional: Set target cost function value (defaults to `std::f64::NEG_INFINITY`) + .target_cost(0.0) + }) + // Optional: Attach a observer + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("SimulatedAnnealing", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/sr1.rs b/argmin/benches/sr1.rs new file mode 100644 index 000000000..ac0ec8760 --- /dev/null +++ b/argmin/benches/sr1.rs @@ -0,0 +1,73 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient}; +// use argmin::solver::linesearch::HagerZhangLineSearch; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::SR1; +use argmin_testfunctions::styblinski_tang; +use finitediff::FiniteDiff; +use ndarray::{array, Array1, Array2}; + +struct StyblinskiTang {} + +impl CostFunction for StyblinskiTang { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(styblinski_tang(&p.to_vec())) + } +} +impl Gradient for StyblinskiTang { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| styblinski_tang(&x.to_vec()))) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = StyblinskiTang {}; + + // Define initial parameter vector + // let init_param: Array1 = array![-1.2, 1.0, -5.0, 2.0, 3.0, 2.0, 4.0, 5.0]; + let init_param: Array1 = array![5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0]; + let init_hessian: Array2 = Array2::eye(8); + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; + // let linesearch = HagerZhangLineSearch::new(); + + // Set up solver + let solver = SR1::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| { + state + .param(init_param) + .inv_hessian(init_hessian) + .max_iters(1000) + }) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("SR1", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/sr1_trustregion.rs b/argmin/benches/sr1_trustregion.rs new file mode 100644 index 000000000..834de84ae --- /dev/null +++ b/argmin/benches/sr1_trustregion.rs @@ -0,0 +1,88 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient, Hessian}; +use argmin::solver::quasinewton::SR1TrustRegion; +#[allow(unused_imports)] +use argmin::solver::trustregion::{CauchyPoint, Dogleg, Steihaug, TrustRegion}; +use argmin_testfunctions::rosenbrock; +use finitediff::FiniteDiff; +use ndarray::{array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +impl Hessian for Rosenbrock { + type Param = Array1; + type Hessian = Array2; + + fn hessian(&self, p: &Self::Param) -> Result { + Ok((*p).forward_hessian(&|x| self.gradient(x).unwrap())) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + let init_param: Array1 = array![-1.2, 1.0]; + // let init_param: Array1 = array![1.2, 1.0]; + let init_hessian: Array2 = Array2::eye(2); + // let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + // let init_hessian: Array2 = Array2::eye(8); + + // Set up the subproblem + let subproblem = Steihaug::new().with_max_iters(20); + // let subproblem = CauchyPoint::new(); + // let subproblem = Dogleg::new(); + + // Set up solver + let solver = SR1TrustRegion::new(subproblem); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| { + state + .param(init_param) + .hessian(init_hessian) + .max_iters(1000) + }) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("SR1TrustRegion", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/steepestdescent.rs b/argmin/benches/steepestdescent.rs new file mode 100644 index 000000000..0e56bd44c --- /dev/null +++ b/argmin/benches/steepestdescent.rs @@ -0,0 +1,73 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::gradientdescent::SteepestDescent; +use argmin::solver::linesearch::HagerZhangLineSearch; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p, self.a, self.b)) + } +} + +impl Gradient for Rosenbrock { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p, self.a, self.b)) + } +} + +fn run() -> Result<(), Error> { + // Define cost function (must implement `ArgminOperator`) + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + // easy case + let init_param: Vec = vec![1.2, 1.2]; + // tough case + // let init_param: Vec = vec![-1.2, 1.0]; + + // Pick a line search. + // let linesearch = HagerZhangLineSearch::new(); + let linesearch = MoreThuenteLineSearch::new(); + + // Set up solver + let solver = SteepestDescent::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(10)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("SteepestDescent", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + diff --git a/argmin/benches/trustregion_nd.rs b/argmin/benches/trustregion_nd.rs new file mode 100644 index 000000000..49a014ea2 --- /dev/null +++ b/argmin/benches/trustregion_nd.rs @@ -0,0 +1,87 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. + +use criterion::{criterion_group, criterion_main, Criterion}; + +use argmin::core::observers::{ObserverMode, SlogLogger}; +use argmin::core::{CostFunction, Error, Executor, Gradient, Hessian}; +#[allow(unused_imports)] +use argmin::solver::trustregion::{CauchyPoint, Dogleg, Steihaug, TrustRegion}; +use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative, rosenbrock_2d_hessian}; +use ndarray::{Array, Array1, Array2}; + +struct Rosenbrock { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for Rosenbrock { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(Array1::from(rosenbrock_2d_derivative( + &p.to_vec(), + self.a, + self.b, + ))) + } +} + +impl Hessian for Rosenbrock { + type Param = Array1; + type Hessian = Array2; + + fn hessian(&self, p: &Self::Param) -> Result { + let h = rosenbrock_2d_hessian(&p.to_vec(), self.a, self.b); + Ok(Array::from_shape_vec((2, 2), h)?) + } +} + +fn run() -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock { a: 1.0, b: 100.0 }; + + // Define initial parameter vector + // easy case + // let init_param: Array1 = Array1::from_vec(vec![1.2, 1.2]); + // tough case + let init_param: Array1 = Array1::from(vec![-1.2, 1.0]); + + // Set up the subproblem + // let subproblem = Steihaug::new().with_max_iters(2); + let subproblem = CauchyPoint::new(); + // let subproblem = Dogleg::new(); + + // Set up solver + let solver = TrustRegion::new(subproblem); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(50)) + // .add_observer(SlogLogger::term(), ObserverMode::Always) + .run()?; + Ok(()) +} + + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("TrustRegion_nd", |b| b.iter(|| run())); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + From 20b64ffd3b66961c10ed717ae69bf4425bbf5734 Mon Sep 17 00:00:00 2001 From: jonboh Date: Fri, 12 May 2023 22:47:04 +0200 Subject: [PATCH 03/16] benchmark ParticleSwarm backends Vec, nalgebra, ndarray --- argmin/Cargo.toml | 7 +-- argmin/benches/particleswarm.rs | 81 +++++++++++++++++++++++++++++---- 2 files changed, 73 insertions(+), 15 deletions(-) diff --git a/argmin/Cargo.toml b/argmin/Cargo.toml index 7f1057ef4..e3582d0d6 100644 --- a/argmin/Cargo.toml +++ b/argmin/Cargo.toml @@ -289,12 +289,7 @@ required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] [[bench]] name = "particleswarm" harness = false -required-features = [] - -[[bench]] -name = "particleswarm_nalgebra" -harness = false -required-features = ["argmin-math/nalgebra_latest-serde"] +required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde"] [[bench]] name = "simulatedannealing" diff --git a/argmin/benches/particleswarm.rs b/argmin/benches/particleswarm.rs index 89d049ca2..f3539fea9 100644 --- a/argmin/benches/particleswarm.rs +++ b/argmin/benches/particleswarm.rs @@ -4,15 +4,17 @@ // http://apache.org/licenses/LICENSE-2.0> or the MIT license , at your option. This file may not be // copied, modified, or distributed except according to those terms. -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{black_box, criterion_group, criterion_main, Criterion}; use argmin::core::{CostFunction, Error, Executor}; use argmin::solver::particleswarm::ParticleSwarm; use argmin_testfunctions::himmelblau; +use nalgebra::{dvector, DVector}; +use ndarray::{array, Array1}; -struct Himmelblau {} +struct HimmelblauVec {} -impl CostFunction for Himmelblau { +impl CostFunction for HimmelblauVec { type Param = Vec; type Output = f64; @@ -21,22 +23,83 @@ impl CostFunction for Himmelblau { } } -fn run() -> Result<(), Error> { - let cost_function = Himmelblau {}; +struct HimmelblauNG {} - let solver = ParticleSwarm::new((vec![-4.0, -4.0], vec![4.0, 4.0]), 40); +impl CostFunction for HimmelblauNG { + type Param = DVector; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(himmelblau(param.into())) + } +} + +struct HimmelblauNdarray {} + +impl CostFunction for HimmelblauNdarray { + type Param = Array1; + type Output = f64; + + fn cost(&self, param: &Self::Param) -> Result { + Ok(himmelblau(param.as_slice().unwrap())) + } +} + +fn run_vec(bound: f64, num_particles: usize, iterations: u64) -> Result<(), Error> { + let cost_function = HimmelblauVec {}; + + let solver = ParticleSwarm::new((vec![-bound, -bound], vec![bound, bound]), num_particles); let res = Executor::new(cost_function, solver) - .configure(|state| state.max_iters(100)) + .configure(|state| state.max_iters(iterations)) .run()?; Ok(()) } +fn run_ngalgebra(bound: f64, num_particles: usize, iterations: u64) -> Result<(), Error> { + let cost_function = HimmelblauNG {}; + + let solver = ParticleSwarm::new( + (dvector![-bound, -bound], dvector![bound, bound]), + num_particles, + ); + + let res = Executor::new(cost_function, solver) + .configure(|state| state.max_iters(iterations)) + .run()?; + Ok(()) +} + +fn run_ndarray(bound: f64, num_particles: usize, iterations: u64) -> Result<(), Error> { + let cost_function = HimmelblauNdarray {}; + + let solver = ParticleSwarm::new( + (array![-bound, -bound], array![bound, bound]), + num_particles, + ); + + let res = Executor::new(cost_function, solver) + .configure(|state| state.max_iters(iterations)) + .run()?; + Ok(()) +} fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("ParticleSwarm", |b| b.iter(|| run())); + let bound = 4.0; + let num_particles = 40; + let iterations = 100; + let mut group = c.benchmark_group("ParticleSwarm"); + group.bench_function("ParticleSwarm_Vec", |b| { + b.iter(|| run_vec(black_box(bound), black_box(num_particles), black_box(iterations))) + }); + group.bench_function("ParticleSwarm_ngalgebra", |b| { + b.iter(|| run_ngalgebra(black_box(bound), black_box(num_particles), black_box(iterations))) + }); + group.bench_function("ParticleSwarm_ndarry", |b| { + b.iter(|| run_ndarray(black_box(bound), black_box(num_particles), black_box(iterations))) + }); + group.finish(); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - From 84940bbca051520fbcc5fd19e191d37b1180871d Mon Sep 17 00:00:00 2001 From: jonboh Date: Fri, 12 May 2023 23:00:09 +0200 Subject: [PATCH 04/16] remove particleswarm_nalgebra file this benchmark has been moved to particleswarm in order to group benchmark the backends --- argmin/benches/particleswarm_nalgebra.rs | 43 ------------------------ 1 file changed, 43 deletions(-) delete mode 100644 argmin/benches/particleswarm_nalgebra.rs diff --git a/argmin/benches/particleswarm_nalgebra.rs b/argmin/benches/particleswarm_nalgebra.rs deleted file mode 100644 index ea4eab592..000000000 --- a/argmin/benches/particleswarm_nalgebra.rs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2018-2022 argmin developers -// -// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be -// copied, modified, or distributed except according to those terms. -use criterion::{criterion_group, criterion_main, Criterion}; - -use argmin::core::{CostFunction, Error, Executor}; -use argmin::solver::particleswarm::ParticleSwarm; -use argmin_testfunctions::himmelblau; -use nalgebra::{dvector, DVector}; - -struct Himmelblau {} - -impl CostFunction for Himmelblau { - type Param = DVector; - type Output = f64; - - fn cost(&self, param: &Self::Param) -> Result { - Ok(himmelblau(param.data.as_vec())) - } -} - -fn run() -> Result<(), Error> { - let cost_function = Himmelblau {}; - - let solver = ParticleSwarm::new((dvector![-4.0, -4.0], dvector![4.0, 4.0]), 40); - - let res = Executor::new(cost_function, solver) - .configure(|state| state.max_iters(100)) - .run()?; - Ok(()) -} - - -fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("ParticleSwarm_ngalgebra", |b| b.iter(|| run())); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); - From 46a3034b4b7b9ee2257f4227c7bf5aa929e84bd4 Mon Sep 17 00:00:00 2001 From: jonboh Date: Fri, 12 May 2023 23:31:23 +0200 Subject: [PATCH 05/16] benchmark lbfgs on the three backends --- argmin/benches/lbfgs.rs | 189 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 172 insertions(+), 17 deletions(-) diff --git a/argmin/benches/lbfgs.rs b/argmin/benches/lbfgs.rs index 6c5013897..23186c2ba 100644 --- a/argmin/benches/lbfgs.rs +++ b/argmin/benches/lbfgs.rs @@ -4,64 +4,219 @@ // http://apache.org/licenses/LICENSE-2.0> or the MIT license , at your option. This file may not be // copied, modified, or distributed except according to those terms. -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{black_box, criterion_group, criterion_main, Criterion}; use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient}; use argmin::solver::linesearch::MoreThuenteLineSearch; use argmin::solver::quasinewton::LBFGS; -use argmin_testfunctions::rosenbrock; +use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative}; use finitediff::FiniteDiff; +use nalgebra::DVector; use ndarray::{array, Array1}; -struct Rosenbrock { +struct RosenbrockVec { a: f64, b: f64, } -impl CostFunction for Rosenbrock { +struct RosenbrockNG { + a: f64, + b: f64, +} + +struct RosenbrockNd { + a: f64, + b: f64, +} + +impl CostFunction for RosenbrockVec { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p, self.a, self.b)) + } +} + +impl CostFunction for RosenbrockNG { + type Param = DVector; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p.data.as_vec(), self.a, self.b)) + } +} + +impl CostFunction for RosenbrockNd { type Param = Array1; type Output = f64; fn cost(&self, p: &Self::Param) -> Result { - Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + Ok(rosenbrock_2d(&p.to_vec(), self.a, self.b)) } } -impl Gradient for Rosenbrock { + +impl Gradient for RosenbrockVec { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p, self.a, self.b)) + } +} + +impl Gradient for RosenbrockNG { + type Param = DVector; + type Gradient = DVector; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative( + p.data.as_vec(), + self.a, + self.b, + ).into()) + } +} + +impl Gradient for RosenbrockNd { type Param = Array1; type Gradient = Array1; fn gradient(&self, p: &Self::Param) -> Result { - Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + Ok(rosenbrock_2d_derivative(p.as_slice().unwrap(), self.a, self.b).into()) } } -fn run() -> Result<(), Error> { +fn run_vec( + a: f64, + b: f64, + init_param: &Vec, + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { // Define cost function - let cost = Rosenbrock { a: 1.0, b: 100.0 }; + let cost = RosenbrockVec { a, b }; // Define initial parameter vector - let init_param: Array1 = array![-1.2, 1.0]; - // let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + let init_param = (*init_param).clone(); + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = LBFGS::new(linesearch, m); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} +fn run_ngalgebra( + a: f64, + b: f64, + init_param: &Vec, + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = RosenbrockNG { a, b }; + // Define initial parameter vector + let init_param: DVector = DVector::from((*init_param).clone()); // set up a line search - let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; // Set up solver - let solver = LBFGS::new(linesearch, 7); + let solver = LBFGS::new(linesearch, m); // Run solver let res = Executor::new(cost, solver) - .configure(|state| state.param(init_param).max_iters(100)) - // .add_observer(SlogLogger::term(), ObserverMode::Always) + .configure(|state| state.param(init_param).max_iters(iterations)) .run()?; Ok(()) } +fn run_ndarray( + a: f64, + b: f64, + init_param: &Vec, + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = RosenbrockNd { a, b }; + + // Define initial parameter vector + let init_param: Array1 = Array1::from_vec((*init_param).clone()); + // let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = LBFGS::new(linesearch, m); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("LBFGS", |b| b.iter(|| run())); + let a = 1.0; + let b = 100.0; + let init_param = vec![-1.2, 1.0]; + let c1 = 1e-4; + let c2 = 0.9; + let m = 7; + let iterations: u64 = 100; + let mut group = c.benchmark_group("LBFGS"); + group.bench_function("LBFGS_Vec", |bencher| { + bencher.iter(|| { + run_vec( + black_box(a), + black_box(b), + black_box(&init_param), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + }) + }); + group.bench_function("LBFGS_ngalgebra", |bencher| { + bencher.iter(|| { + run_ngalgebra( + black_box(a), + black_box(b), + black_box(&init_param), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + }) + }); + group.bench_function("LBFGS_ndarray", |bencher| { + bencher.iter(|| { + run_ndarray( + black_box(a), + black_box(b), + black_box(&init_param), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + }) + }); + group.finish(); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - From 1273a50cd006eab4048bb76c6d9a5dfc2458448d Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 09:06:16 +0200 Subject: [PATCH 06/16] remove lbfgs nalgebra bench which is already included in lbfgs bench --- argmin/Cargo.toml | 7 +--- argmin/benches/lbfgs.rs | 3 +- argmin/benches/lbfgs_nalgebra.rs | 68 -------------------------------- 3 files changed, 3 insertions(+), 75 deletions(-) delete mode 100644 argmin/benches/lbfgs_nalgebra.rs diff --git a/argmin/Cargo.toml b/argmin/Cargo.toml index e3582d0d6..7679d7a55 100644 --- a/argmin/Cargo.toml +++ b/argmin/Cargo.toml @@ -249,12 +249,7 @@ required-features = ["slog-logger"] [[bench]] name = "lbfgs" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] - -[[bench]] -name = "lbfgs_nalgebra" -harness = false -required-features = ["argmin-math/nalgebra_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde", "slog-logger"] [[bench]] name = "morethuente" diff --git a/argmin/benches/lbfgs.rs b/argmin/benches/lbfgs.rs index 23186c2ba..b233ebc12 100644 --- a/argmin/benches/lbfgs.rs +++ b/argmin/benches/lbfgs.rs @@ -101,7 +101,8 @@ fn run_vec( let cost = RosenbrockVec { a, b }; // Define initial parameter vector - let init_param = (*init_param).clone(); + let init_param = (*init_param).clone(); // This is here to account for the same clone on + // ndarray and ngalgebra // set up a line search let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; // Set up solver diff --git a/argmin/benches/lbfgs_nalgebra.rs b/argmin/benches/lbfgs_nalgebra.rs deleted file mode 100644 index 3bb7936f7..000000000 --- a/argmin/benches/lbfgs_nalgebra.rs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2018-2022 argmin developers -// -// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be -// copied, modified, or distributed except according to those terms. -use criterion::{criterion_group, criterion_main, Criterion}; - -use argmin::core::observers::{ObserverMode, SlogLogger}; -use argmin::core::{CostFunction, Error, Executor, Gradient}; -use argmin::solver::linesearch::MoreThuenteLineSearch; -use argmin::solver::quasinewton::LBFGS; -use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative}; -use nalgebra::DVector; - -struct Rosenbrock { - a: f64, - b: f64, -} - -impl CostFunction for Rosenbrock { - type Param = DVector; - type Output = f64; - - fn cost(&self, p: &Self::Param) -> Result { - Ok(rosenbrock_2d(p.data.as_vec(), self.a, self.b)) - } -} - -impl Gradient for Rosenbrock { - type Param = DVector; - type Gradient = DVector; - - fn gradient(&self, p: &Self::Param) -> Result { - Ok(DVector::from(rosenbrock_2d_derivative( - p.data.as_vec(), - self.a, - self.b, - ))) - } -} - -fn run() -> Result<(), Error> { - // Define cost function - let cost = Rosenbrock { a: 1.0, b: 100.0 }; - // Define initial parameter vector - let init_param: DVector = DVector::from(vec![-1.2, 1.0]); - // set up a line search - let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; - // Set up solver - let solver = LBFGS::new(linesearch, 7); - - // Run solver - let res = Executor::new(cost, solver) - .configure(|state| state.param(init_param).max_iters(100)) - // .add_observer(SlogLogger::term(), ObserverMode::Always) - .run()?; - Ok(()) -} - - -fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("LBFGS_nalgebra", |b| b.iter(|| run())); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); - From 016e6fc4e249e6fd52098a97f6840f1abb95761f Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 11:47:48 +0200 Subject: [PATCH 07/16] remove slog from benchmarks --- argmin/Cargo.toml | 56 +++++++++++++-------------- argmin/benches/backtracking.rs | 3 +- argmin/benches/bfgs.rs | 1 - argmin/benches/brentopt.rs | 5 +-- argmin/benches/brentroot.rs | 5 +-- argmin/benches/conjugategradient.rs | 5 +-- argmin/benches/dfp.rs | 1 - argmin/benches/gaussnewton.rs | 1 - argmin/benches/goldensectionsearch.rs | 5 +-- argmin/benches/hagerzhang.rs | 3 +- argmin/benches/landweber.rs | 5 +-- argmin/benches/morethuente.rs | 5 +-- argmin/benches/neldermead.rs | 1 - argmin/benches/newton.rs | 1 - argmin/benches/newton_cg.rs | 1 - argmin/benches/nonlinear_cg.rs | 5 +-- argmin/benches/owl_qn.rs | 1 - argmin/benches/simulatedannealing.rs | 5 +-- argmin/benches/sr1.rs | 1 - argmin/benches/sr1_trustregion.rs | 1 - argmin/benches/steepestdescent.rs | 6 +-- argmin/benches/trustregion_nd.rs | 1 - 22 files changed, 48 insertions(+), 70 deletions(-) diff --git a/argmin/Cargo.toml b/argmin/Cargo.toml index 7679d7a55..ed66f7803 100644 --- a/argmin/Cargo.toml +++ b/argmin/Cargo.toml @@ -189,97 +189,97 @@ required-features = ["argmin-math/ndarray_latest-serde", "slog-logger", "serde1" [[bench]] name = "backtracking" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "bfgs" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde"] [[bench]] name = "brentroot" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "brentopt" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "conjugategradient" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "dfp" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde"] [[bench]] name = "gaussnewton" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde"] [[bench]] name = "gaussnewton_linesearch" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] - -[[bench]] -name = "gaussnewton_nalgebra" -harness = false -required-features = ["_nalgebral", "argmin-math/nalgebra_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde"] [[bench]] name = "goldensectionsearch" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "hagerzhang" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "landweber" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "lbfgs" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde"] + +[[bench]] +name = "lbfgs2d" +harness = false +required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_latest-serde"] [[bench]] name = "morethuente" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "neldermead" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde"] [[bench]] name = "newton" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde"] [[bench]] name = "newton_cg" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde"] [[bench]] name = "nonlinear_cg" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "owl_qn" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde"] [[bench]] name = "particleswarm" @@ -289,24 +289,24 @@ required-features = ["argmin-math/ndarray_latest-serde", "argmin-math/nalgebra_l [[bench]] name = "simulatedannealing" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "sr1" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde"] [[bench]] name = "sr1_trustregion" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-loger"] +required-features = ["argmin-math/ndarray_latest-serde"] [[bench]] name = "steepestdescent" harness = false -required-features = ["slog-logger"] +required-features = [] [[bench]] name = "trustregion_nd" harness = false -required-features = ["argmin-math/ndarray_latest-serde", "slog-logger"] +required-features = ["argmin-math/ndarray_latest-serde"] diff --git a/argmin/benches/backtracking.rs b/argmin/benches/backtracking.rs index 0f0141630..72b133687 100644 --- a/argmin/benches/backtracking.rs +++ b/argmin/benches/backtracking.rs @@ -7,7 +7,6 @@ use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch}; use argmin::solver::linesearch::{condition::ArmijoCondition, BacktrackingLineSearch}; use argmin_testfunctions::{sphere, sphere_derivative}; @@ -65,7 +64,7 @@ fn run() -> Result<(), Error> { fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("Backtracking", |b| b.iter(|| run())); + c.bench_function("Backtracking", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/bfgs.rs b/argmin/benches/bfgs.rs index ff0c9f90e..a7551faee 100644 --- a/argmin/benches/bfgs.rs +++ b/argmin/benches/bfgs.rs @@ -7,7 +7,6 @@ use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient}; use argmin::solver::linesearch::MoreThuenteLineSearch; use argmin::solver::quasinewton::BFGS; diff --git a/argmin/benches/brentopt.rs b/argmin/benches/brentopt.rs index 735ac5e34..576b3e781 100644 --- a/argmin/benches/brentopt.rs +++ b/argmin/benches/brentopt.rs @@ -7,7 +7,6 @@ use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor}; use argmin::solver::brent::BrentOpt; @@ -32,7 +31,7 @@ fn run() -> Result<(), Error> { let cost = TestFunc {}; let solver = BrentOpt::new(-10., 10.); - let res = Executor::new(cost, solver) + let _res = Executor::new(cost, solver) .configure(|state| state.max_iters(100)) // .add_observer(SlogLogger::term(), ObserverMode::Always) .run() @@ -42,7 +41,7 @@ fn run() -> Result<(), Error> { fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("BrentOpt", |b| b.iter(|| run())); + c.bench_function("BrentOpt", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/brentroot.rs b/argmin/benches/brentroot.rs index 36e587102..20dc33693 100644 --- a/argmin/benches/brentroot.rs +++ b/argmin/benches/brentroot.rs @@ -7,7 +7,6 @@ use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor}; use argmin::solver::brent::BrentRoot; @@ -35,7 +34,7 @@ fn run() -> Result<(), Error> { let init_param = 0.5; let solver = BrentRoot::new(-4., 0.5, 1e-11); - let res = Executor::new(cost, solver) + let _res = Executor::new(cost, solver) .configure(|state| state.param(init_param).max_iters(100)) // .add_observer(SlogLogger::term(), ObserverMode::Always) .run() @@ -45,7 +44,7 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("BrentRoot", |b| b.iter(|| run())); + c.bench_function("BrentRoot", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/conjugategradient.rs b/argmin/benches/conjugategradient.rs index 4eed4367c..ec80fecdf 100644 --- a/argmin/benches/conjugategradient.rs +++ b/argmin/benches/conjugategradient.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{Error, Executor, Operator}; use argmin::solver::conjugategradient::ConjugateGradient; @@ -35,7 +34,7 @@ fn run() -> Result<(), Error> { let solver: ConjugateGradient<_, f64> = ConjugateGradient::new(b); // Run solver - let res = Executor::new(operator, solver) + let _res = Executor::new(operator, solver) .configure(|state| state.param(init_param).max_iters(2)) // .add_observer(SlogLogger::term(), ObserverMode::Always) .run()?; @@ -44,7 +43,7 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("ConjugateGradient", |b| b.iter(|| run())); + c.bench_function("ConjugateGradient", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/dfp.rs b/argmin/benches/dfp.rs index b1eba4bc7..78b6f0621 100644 --- a/argmin/benches/dfp.rs +++ b/argmin/benches/dfp.rs @@ -7,7 +7,6 @@ use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient}; use argmin::solver::linesearch::MoreThuenteLineSearch; use argmin::solver::quasinewton::DFP; diff --git a/argmin/benches/gaussnewton.rs b/argmin/benches/gaussnewton.rs index 2f100b879..c4963fb66 100644 --- a/argmin/benches/gaussnewton.rs +++ b/argmin/benches/gaussnewton.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{Error, Executor, Jacobian, Operator}; use argmin::solver::gaussnewton::GaussNewton; use ndarray::{Array1, Array2}; diff --git a/argmin/benches/goldensectionsearch.rs b/argmin/benches/goldensectionsearch.rs index 7ebe551fd..9ce8f4094 100644 --- a/argmin/benches/goldensectionsearch.rs +++ b/argmin/benches/goldensectionsearch.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor}; use argmin::solver::goldensectionsearch::GoldenSectionSearch; @@ -31,7 +30,7 @@ fn run() -> Result<(), Error> { let init_param = -0.5; let solver = GoldenSectionSearch::new(-2.5, 3.0)?.with_tolerance(0.0001)?; - let res = Executor::new(cost, solver) + let _res = Executor::new(cost, solver) .configure(|state| state.param(init_param).max_iters(100)) // .add_observer(SlogLogger::term(), ObserverMode::Always) .run() @@ -40,7 +39,7 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("GoldenSectionSearch", |b| b.iter(|| run())); + c.bench_function("GoldenSectionSearch", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/hagerzhang.rs b/argmin/benches/hagerzhang.rs index 93b10ff08..01b0bbbf2 100644 --- a/argmin/benches/hagerzhang.rs +++ b/argmin/benches/hagerzhang.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch}; use argmin::solver::linesearch::HagerZhangLineSearch; use argmin_testfunctions::{sphere, sphere_derivative}; @@ -65,7 +64,7 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("HagerZhangLineSearch", |b| b.iter(|| run())); + c.bench_function("HagerZhangLineSearch", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/landweber.rs b/argmin/benches/landweber.rs index a978f79e1..faed3da37 100644 --- a/argmin/benches/landweber.rs +++ b/argmin/benches/landweber.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{Error, Executor, Gradient}; use argmin::solver::landweber::Landweber; use argmin_testfunctions::rosenbrock_2d_derivative; @@ -30,7 +29,7 @@ fn run() -> Result<(), Error> { let iters = 10; let solver = Landweber::new(0.001); - let res = Executor::new(operator, solver) + let _res = Executor::new(operator, solver) .configure(|state| state.param(init_param).max_iters(iters)) // .add_observer(SlogLogger::term(), ObserverMode::Always) .run()?; @@ -38,7 +37,7 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("Landweber", |b| b.iter(|| run())); + c.bench_function("Landweber", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/morethuente.rs b/argmin/benches/morethuente.rs index 5d96290ae..cd49eb85e 100644 --- a/argmin/benches/morethuente.rs +++ b/argmin/benches/morethuente.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient, LineSearch}; use argmin::solver::linesearch::MoreThuenteLineSearch; use argmin_testfunctions::{sphere, sphere_derivative}; @@ -47,7 +46,7 @@ fn run() -> Result<(), Error> { let init_grad = operator.gradient(&init_param)?; // Run solver - let res = Executor::new(operator, solver) + let _res = Executor::new(operator, solver) // .add_observer(SlogLogger::term(), ObserverMode::Always) // Gradient and cost are optional. If they are not provided, they will be computed .configure(|state| { @@ -63,7 +62,7 @@ fn run() -> Result<(), Error> { fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("MoreThuenteLineSearch", |b| b.iter(|| run())); + c.bench_function("MoreThuenteLineSearch", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/neldermead.rs b/argmin/benches/neldermead.rs index e8bcb5b77..0e71973dc 100644 --- a/argmin/benches/neldermead.rs +++ b/argmin/benches/neldermead.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor}; use argmin::solver::neldermead::NelderMead; use argmin_testfunctions::rosenbrock; diff --git a/argmin/benches/newton.rs b/argmin/benches/newton.rs index c700ad7b1..29e70306d 100644 --- a/argmin/benches/newton.rs +++ b/argmin/benches/newton.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{Error, Executor, Gradient, Hessian}; use argmin::solver::newton::Newton; use argmin_testfunctions::{rosenbrock_2d_derivative, rosenbrock_2d_hessian}; diff --git a/argmin/benches/newton_cg.rs b/argmin/benches/newton_cg.rs index 82bc29dcf..91c1005b7 100644 --- a/argmin/benches/newton_cg.rs +++ b/argmin/benches/newton_cg.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient, Hessian}; use argmin::solver::linesearch::MoreThuenteLineSearch; use argmin::solver::newton::NewtonCG; diff --git a/argmin/benches/nonlinear_cg.rs b/argmin/benches/nonlinear_cg.rs index 1fc4cfe79..11622e882 100644 --- a/argmin/benches/nonlinear_cg.rs +++ b/argmin/benches/nonlinear_cg.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient}; use argmin::solver::conjugategradient::{beta::PolakRibiere, NonlinearConjugateGradient}; use argmin::solver::linesearch::MoreThuenteLineSearch; @@ -59,7 +58,7 @@ fn run() -> Result<(), Error> { .restart_orthogonality(0.1); // Run solver - let res = Executor::new(operator, solver) + let _res = Executor::new(operator, solver) .configure(|state| state.param(init_param).max_iters(20).target_cost(0.0)) // .add_observer(SlogLogger::term(), ObserverMode::Always) .run()?; @@ -68,7 +67,7 @@ fn run() -> Result<(), Error> { fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("NonlinearConjugateGradient", |b| b.iter(|| run())); + c.bench_function("NonlinearConjugateGradient", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/owl_qn.rs b/argmin/benches/owl_qn.rs index 17dce89e3..3e866b0dd 100644 --- a/argmin/benches/owl_qn.rs +++ b/argmin/benches/owl_qn.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient}; use argmin::solver::linesearch::MoreThuenteLineSearch; use argmin::solver::quasinewton::LBFGS; diff --git a/argmin/benches/simulatedannealing.rs b/argmin/benches/simulatedannealing.rs index 0a5b5a4ef..3b6bfa078 100644 --- a/argmin/benches/simulatedannealing.rs +++ b/argmin/benches/simulatedannealing.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor}; use argmin::solver::simulatedannealing::{Anneal, SATempFunc, SimulatedAnnealing}; use argmin_testfunctions::rosenbrock; @@ -121,7 +120,7 @@ fn run() -> Result<(), Error> { ///////////////////////// // Run solver // ///////////////////////// - let res = Executor::new(operator, solver) + let _res = Executor::new(operator, solver) .configure(|state| { state .param(init_param) @@ -138,7 +137,7 @@ fn run() -> Result<(), Error> { fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("SimulatedAnnealing", |b| b.iter(|| run())); + c.bench_function("SimulatedAnnealing", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/sr1.rs b/argmin/benches/sr1.rs index ac0ec8760..5302be45a 100644 --- a/argmin/benches/sr1.rs +++ b/argmin/benches/sr1.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient}; // use argmin::solver::linesearch::HagerZhangLineSearch; use argmin::solver::linesearch::MoreThuenteLineSearch; diff --git a/argmin/benches/sr1_trustregion.rs b/argmin/benches/sr1_trustregion.rs index 834de84ae..007b899d6 100644 --- a/argmin/benches/sr1_trustregion.rs +++ b/argmin/benches/sr1_trustregion.rs @@ -6,7 +6,6 @@ // copied, modified, or distributed except according to those terms. use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient, Hessian}; use argmin::solver::quasinewton::SR1TrustRegion; #[allow(unused_imports)] diff --git a/argmin/benches/steepestdescent.rs b/argmin/benches/steepestdescent.rs index 0e56bd44c..08d9102c7 100644 --- a/argmin/benches/steepestdescent.rs +++ b/argmin/benches/steepestdescent.rs @@ -8,10 +8,8 @@ use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient}; use argmin::solver::gradientdescent::SteepestDescent; -use argmin::solver::linesearch::HagerZhangLineSearch; use argmin::solver::linesearch::MoreThuenteLineSearch; use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative}; @@ -56,7 +54,7 @@ fn run() -> Result<(), Error> { let solver = SteepestDescent::new(linesearch); // Run solver - let res = Executor::new(cost, solver) + let _res = Executor::new(cost, solver) .configure(|state| state.param(init_param).max_iters(10)) // .add_observer(SlogLogger::term(), ObserverMode::Always) .run()?; @@ -65,7 +63,7 @@ fn run() -> Result<(), Error> { fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("SteepestDescent", |b| b.iter(|| run())); + c.bench_function("SteepestDescent", |b| b.iter(run)); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/trustregion_nd.rs b/argmin/benches/trustregion_nd.rs index 49a014ea2..2bebb6e91 100644 --- a/argmin/benches/trustregion_nd.rs +++ b/argmin/benches/trustregion_nd.rs @@ -7,7 +7,6 @@ use criterion::{criterion_group, criterion_main, Criterion}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient, Hessian}; #[allow(unused_imports)] use argmin::solver::trustregion::{CauchyPoint, Dogleg, Steihaug, TrustRegion}; From f2dd0d49cfe01d6fc34a2c875734fde892748aa8 Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 11:49:36 +0200 Subject: [PATCH 08/16] benchmark lbfgs in 2d and with parametrized dimension --- argmin/benches/gaussnewton.rs | 112 ++++++++-- argmin/benches/gaussnewton_nalgebra.rs | 92 --------- argmin/benches/lbfgs.rs | 151 ++++---------- argmin/benches/lbfgs2d.rs | 269 +++++++++++++++++++++++++ 4 files changed, 403 insertions(+), 221 deletions(-) delete mode 100644 argmin/benches/gaussnewton_nalgebra.rs create mode 100644 argmin/benches/lbfgs2d.rs diff --git a/argmin/benches/gaussnewton.rs b/argmin/benches/gaussnewton.rs index c4963fb66..e5d5856fc 100644 --- a/argmin/benches/gaussnewton.rs +++ b/argmin/benches/gaussnewton.rs @@ -4,11 +4,12 @@ // http://apache.org/licenses/LICENSE-2.0> or the MIT license , at your option. This file may not be // copied, modified, or distributed except according to those terms. -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{black_box, criterion_group, criterion_main, Criterion}; use argmin::core::{Error, Executor, Jacobian, Operator}; use argmin::solver::gaussnewton::GaussNewton; use ndarray::{Array1, Array2}; +use nalgebra::{DVector, DMatrix}; type Rate = f64; type S = f64; @@ -18,11 +19,18 @@ type Measurement = (S, Rate); // Model used in this example: // `rate = (V_{max} * [S]) / (K_M + [S]) ` // where `V_{max}` and `K_M` are the sought parameters and `[S]` and `rate` is the measured data. -struct Problem { + +struct ProblemNG { + data: Vec, +} + +struct ProblemNd { data: Vec, } -impl Operator for Problem { + + +impl Operator for ProblemNd { type Param = Array1; type Output = Array1; @@ -35,7 +43,22 @@ impl Operator for Problem { } } -impl Jacobian for Problem { +impl Operator for ProblemNG { + type Param = DVector; + type Output = DVector; + + fn apply(&self, p: &Self::Param) -> Result { + Ok(DVector::from_vec( + self.data + .iter() + .map(|(s, rate)| rate - (p[0] * s) / (p[1] + s)) + .collect(), + )) + } +} + + +impl Jacobian for ProblemNd { type Param = Array1; type Jacobian = Array2; @@ -50,36 +73,83 @@ impl Jacobian for Problem { } } -fn run() -> Result<(), Error> { + +impl Jacobian for ProblemNG { + type Param = DVector; + type Jacobian = DMatrix; + + fn jacobian(&self, p: &Self::Param) -> Result { + Ok(DMatrix::from_fn(7, 2, |si, i| { + if i == 0 { + -self.data[si].0 / (p[1] + self.data[si].0) + } else { + p[0] * self.data[si].0 / (p[1] + self.data[si].0).powi(2) + } + })) + } +} + + +fn run_ngalgebra(data: &Vec<(f64, f64)>, init_param: (f64,f64), iterations: u64) -> Result<(), Error> { // Define cost function // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm - let cost = Problem { - data: vec![ - (0.038, 0.050), - (0.194, 0.127), - (0.425, 0.094), - (0.626, 0.2122), - (1.253, 0.2729), - (2.5, 0.2665), - (3.74, 0.3317), - ], + let cost = ProblemNG { + data: data.clone() + }; + + // Define initial parameter vector + let init_param: DVector = DVector::from_vec(vec![init_param.0, init_param.1]); + + // Set up solver + let solver: GaussNewton = GaussNewton::new(); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + + +fn run_ndarray(data: &Vec<(f64, f64)>, init_param: (f64,f64), iterations: u64) -> Result<(), Error> { + // Define cost function + // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm + let cost = ProblemNd { + data: data.clone(), }; -// Define initial parameter vector - let init_param: Array1 = Array1::from(vec![0.9, 0.2]); -// Set up solver + // Define initial parameter vector + let init_param: Array1 = Array1::from(vec![init_param.0, init_param.1]); + // Set up solver let solver: GaussNewton = GaussNewton::new(); // Run solver let res = Executor::new(cost, solver) - .configure(|state| state.param(init_param).max_iters(10)) - // .add_observer(SlogLogger::term(), ObserverMode::Always) + .configure(|state| state.param(init_param).max_iters(iterations)) .run()?; Ok(()) } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("GaussNewton", |b| b.iter(|| run())); + let data = vec![ + (0.038, 0.050), + (0.194, 0.127), + (0.425, 0.094), + (0.626, 0.2122), + (1.253, 0.2729), + (2.5, 0.2665), + (3.74, 0.3317), + ]; + let init_param = (0.9, 0.2); + let iterations = 10; + let mut group = c.benchmark_group("GaussNewton"); + group.bench_function("GaussNewton_ngalgebra", |b| { + b.iter(|| run_ngalgebra(black_box(&data), black_box(init_param), black_box(iterations))) + }); + group.bench_function("GaussNewton_ndarry", |b| { + b.iter(|| run_ndarray(black_box(&data), black_box(init_param), black_box(iterations))) + }); + group.finish(); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/gaussnewton_nalgebra.rs b/argmin/benches/gaussnewton_nalgebra.rs deleted file mode 100644 index 6cda1ea0b..000000000 --- a/argmin/benches/gaussnewton_nalgebra.rs +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2018-2022 argmin developers -// -// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be -// copied, modified, or distributed except according to those terms. -use criterion::{criterion_group, criterion_main, Criterion}; - -use argmin::core::observers::{ObserverMode, SlogLogger}; -use argmin::core::{Error, Executor, Jacobian, Operator}; -use argmin::solver::gaussnewton::GaussNewton; - -use nalgebra::{DMatrix, DVector}; - -type Rate = f64; -type S = f64; -type Measurement = (S, Rate); - -// Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm -// Model used in this example: -// `rate = (V_{max} * [S]) / (K_M + [S]) ` -// where `V_{max}` and `K_M` are the sought parameters and `[S]` and `rate` is the measured data. -struct Problem { - data: Vec, -} - -impl Operator for Problem { - type Param = DVector; - type Output = DVector; - - fn apply(&self, p: &Self::Param) -> Result { - Ok(DVector::from_vec( - self.data - .iter() - .map(|(s, rate)| rate - (p[0] * s) / (p[1] + s)) - .collect(), - )) - } -} - -impl Jacobian for Problem { - type Param = DVector; - type Jacobian = DMatrix; - - fn jacobian(&self, p: &Self::Param) -> Result { - Ok(DMatrix::from_fn(7, 2, |si, i| { - if i == 0 { - -self.data[si].0 / (p[1] + self.data[si].0) - } else { - p[0] * self.data[si].0 / (p[1] + self.data[si].0).powi(2) - } - })) - } -} - -fn run() -> Result<(), Error> { - // Define cost function - // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm - let cost = Problem { - data: vec![ - (0.038, 0.050), - (0.194, 0.127), - (0.425, 0.094), - (0.626, 0.2122), - (1.253, 0.2729), - (2.5, 0.2665), - (3.74, 0.3317), - ], - }; - - // Define initial parameter vector - let init_param: DVector = DVector::from_vec(vec![0.9, 0.2]); - - // Set up solver - let solver: GaussNewton = GaussNewton::new(); - - // Run solver - let res = Executor::new(cost, solver) - .configure(|state| state.param(init_param).max_iters(10)) - // .add_observer(SlogLogger::term(), ObserverMode::Always) - .run()?; - Ok(()) -} - - -fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("GaussNewton_nalgebra", |b| b.iter(|| run())); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); - diff --git a/argmin/benches/lbfgs.rs b/argmin/benches/lbfgs.rs index b233ebc12..7a6399861 100644 --- a/argmin/benches/lbfgs.rs +++ b/argmin/benches/lbfgs.rs @@ -4,13 +4,12 @@ // http://apache.org/licenses/LICENSE-2.0> or the MIT license , at your option. This file may not be // copied, modified, or distributed except according to those terms. -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{black_box, criterion_group, criterion_main, Criterion, BenchmarkId}; -use argmin::core::observers::{ObserverMode, SlogLogger}; use argmin::core::{CostFunction, Error, Executor, Gradient}; use argmin::solver::linesearch::MoreThuenteLineSearch; use argmin::solver::quasinewton::LBFGS; -use argmin_testfunctions::{rosenbrock_2d, rosenbrock_2d_derivative}; +use argmin_testfunctions::{rosenbrock, rosenbrock_2d, rosenbrock_2d_derivative}; use finitediff::FiniteDiff; use nalgebra::DVector; use ndarray::{array, Array1}; @@ -20,11 +19,6 @@ struct RosenbrockVec { b: f64, } -struct RosenbrockNG { - a: f64, - b: f64, -} - struct RosenbrockNd { a: f64, b: f64, @@ -35,25 +29,7 @@ impl CostFunction for RosenbrockVec { type Output = f64; fn cost(&self, p: &Self::Param) -> Result { - Ok(rosenbrock_2d(p, self.a, self.b)) - } -} - -impl CostFunction for RosenbrockNG { - type Param = DVector; - type Output = f64; - - fn cost(&self, p: &Self::Param) -> Result { - Ok(rosenbrock_2d(p.data.as_vec(), self.a, self.b)) - } -} - -impl CostFunction for RosenbrockNd { - type Param = Array1; - type Output = f64; - - fn cost(&self, p: &Self::Param) -> Result { - Ok(rosenbrock_2d(&p.to_vec(), self.a, self.b)) + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) } } @@ -62,20 +38,16 @@ impl Gradient for RosenbrockVec { type Gradient = Vec; fn gradient(&self, p: &Self::Param) -> Result { - Ok(rosenbrock_2d_derivative(p, self.a, self.b)) + Ok((*p).forward_diff(&|x| rosenbrock(&x, self.a, self.b))) } } -impl Gradient for RosenbrockNG { - type Param = DVector; - type Gradient = DVector; +impl CostFunction for RosenbrockNd { + type Param = Array1; + type Output = f64; - fn gradient(&self, p: &Self::Param) -> Result { - Ok(rosenbrock_2d_derivative( - p.data.as_vec(), - self.a, - self.b, - ).into()) + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) } } @@ -84,14 +56,14 @@ impl Gradient for RosenbrockNd { type Gradient = Array1; fn gradient(&self, p: &Self::Param) -> Result { - Ok(rosenbrock_2d_derivative(p.as_slice().unwrap(), self.a, self.b).into()) + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) } } fn run_vec( a: f64, b: f64, - init_param: &Vec, + init_param: &[f64], c1: f64, c2: f64, m: usize, @@ -101,8 +73,7 @@ fn run_vec( let cost = RosenbrockVec { a, b }; // Define initial parameter vector - let init_param = (*init_param).clone(); // This is here to account for the same clone on - // ndarray and ngalgebra + let init_param: Vec = Vec::from(init_param); // set up a line search let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; // Set up solver @@ -115,35 +86,11 @@ fn run_vec( Ok(()) } -fn run_ngalgebra( - a: f64, - b: f64, - init_param: &Vec, - c1: f64, - c2: f64, - m: usize, - iterations: u64, -) -> Result<(), Error> { - // Define cost function - let cost = RosenbrockNG { a, b }; - // Define initial parameter vector - let init_param: DVector = DVector::from((*init_param).clone()); - // set up a line search - let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; - // Set up solver - let solver = LBFGS::new(linesearch, m); - - // Run solver - let res = Executor::new(cost, solver) - .configure(|state| state.param(init_param).max_iters(iterations)) - .run()?; - Ok(()) -} fn run_ndarray( a: f64, b: f64, - init_param: &Vec, + init_param: &[f64], c1: f64, c2: f64, m: usize, @@ -153,8 +100,7 @@ fn run_ndarray( let cost = RosenbrockNd { a, b }; // Define initial parameter vector - let init_param: Array1 = Array1::from_vec((*init_param).clone()); - // let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + let init_param: Array1 = Array1::from_vec(Vec::from(init_param)); // set up a line search let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; @@ -171,51 +117,40 @@ fn run_ndarray( fn criterion_benchmark(c: &mut Criterion) { let a = 1.0; let b = 100.0; - let init_param = vec![-1.2, 1.0]; + let init_param = vec![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; let c1 = 1e-4; let c2 = 0.9; let m = 7; let iterations: u64 = 100; let mut group = c.benchmark_group("LBFGS"); - group.bench_function("LBFGS_Vec", |bencher| { - bencher.iter(|| { - run_vec( - black_box(a), - black_box(b), - black_box(&init_param), - black_box(c1), - black_box(c2), - black_box(m), - black_box(iterations), - ) - }) - }); - group.bench_function("LBFGS_ngalgebra", |bencher| { - bencher.iter(|| { - run_ngalgebra( - black_box(a), - black_box(b), - black_box(&init_param), - black_box(c1), - black_box(c2), - black_box(m), - black_box(iterations), - ) - }) - }); - group.bench_function("LBFGS_ndarray", |bencher| { - bencher.iter(|| { - run_ndarray( - black_box(a), - black_box(b), - black_box(&init_param), - black_box(c1), - black_box(c2), - black_box(m), - black_box(iterations), - ) - }) - }); + for i in 2..init_param.len() { + group.bench_with_input(BenchmarkId::new("Vec", i), &i, |bencher, i| { + bencher.iter(|| { + run_vec( + black_box(a), + black_box(b), + black_box(&init_param[0..*i]), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + }) + }); + group.bench_with_input(BenchmarkId::new("ndarray", i), &i, |bencher, i| { + bencher.iter(|| { + run_ndarray( + black_box(a), + black_box(b), + black_box(&init_param[0..*i]), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + }) + }); + } group.finish(); } diff --git a/argmin/benches/lbfgs2d.rs b/argmin/benches/lbfgs2d.rs new file mode 100644 index 000000000..83b141f16 --- /dev/null +++ b/argmin/benches/lbfgs2d.rs @@ -0,0 +1,269 @@ +// Copyright 2018-2022 argmin developers +// +// Licensed under the Apache License, Version 2.0 or the MIT license , at your option. This file may not be +// copied, modified, or distributed except according to those terms. +use criterion::{black_box, criterion_group, criterion_main, Criterion}; + +use argmin::core::{CostFunction, Error, Executor, Gradient}; +use argmin::solver::linesearch::MoreThuenteLineSearch; +use argmin::solver::quasinewton::LBFGS; +use argmin_testfunctions::{rosenbrock, rosenbrock_2d, rosenbrock_2d_derivative}; +use finitediff::FiniteDiff; +use nalgebra::DVector; +use ndarray::{array, Array1}; + +struct RosenbrockVec { + a: f64, + b: f64, +} + +struct RosenbrockNd { + a: f64, + b: f64, +} + +struct Rosenbrock2DVec { + a: f64, + b: f64, +} + +struct Rosenbrock2DNG { + a: f64, + b: f64, +} + +struct Rosenbrock2DNd { + a: f64, + b: f64, +} + +impl CostFunction for Rosenbrock2DVec { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p, self.a, self.b)) + } +} + +impl CostFunction for Rosenbrock2DNG { + type Param = DVector; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(p.data.as_vec(), self.a, self.b)) + } +} + +impl CostFunction for Rosenbrock2DNd { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for Rosenbrock2DVec { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p, self.a, self.b)) + } +} + +impl Gradient for Rosenbrock2DNG { + type Param = DVector; + type Gradient = DVector; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative( + p.data.as_vec(), + self.a, + self.b, + ).into()) + } +} + +impl Gradient for Rosenbrock2DNd { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok(rosenbrock_2d_derivative(p.as_slice().unwrap(), self.a, self.b).into()) + } +} + +// Multidimensional version +impl CostFunction for RosenbrockVec { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for RosenbrockVec { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x, self.a, self.b))) + } +} + +impl CostFunction for RosenbrockNd { + type Param = Array1; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for RosenbrockNd { + type Param = Array1; + type Gradient = Array1; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x.to_vec(), self.a, self.b))) + } +} + +fn run_2d_vec( + a: f64, + b: f64, + init_param: &Vec, + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock2DVec { a, b }; + + // Define initial parameter vector + let init_param = (*init_param).clone(); // This is here to account for the same clone on + // ndarray and ngalgebra + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = LBFGS::new(linesearch, m); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + +fn run_2d_ngalgebra( + a: f64, + b: f64, + init_param: &Vec, + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock2DNG { a, b }; + // Define initial parameter vector + let init_param: DVector = DVector::from((*init_param).clone()); + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = LBFGS::new(linesearch, m); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + +fn run_2d_ndarray( + a: f64, + b: f64, + init_param: &Vec, + c1: f64, + c2: f64, + m: usize, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = Rosenbrock2DNd { a, b }; + + // Define initial parameter vector + let init_param: Array1 = Array1::from_vec((*init_param).clone()); + + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = LBFGS::new(linesearch, m); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| state.param(init_param).max_iters(iterations)) + .run()?; + Ok(()) +} + +fn criterion_benchmark(c: &mut Criterion) { + let a = 1.0; + let b = 100.0; + let init_param = vec![-1.2, 1.0]; + let c1 = 1e-4; + let c2 = 0.9; + let m = 7; + let iterations: u64 = 100; + let mut group = c.benchmark_group("LBFGS_2D"); + group.bench_function("Vec", |bencher| { + bencher.iter(|| { + run_2d_vec( + black_box(a), + black_box(b), + black_box(&init_param), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + }) + }); + group.bench_function("nalgebra", |bencher| { + bencher.iter(|| { + run_2d_ngalgebra( + black_box(a), + black_box(b), + black_box(&init_param), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + }) + }); + group.bench_function("ndarray", |bencher| { + bencher.iter(|| { + run_2d_ndarray( + black_box(a), + black_box(b), + black_box(&init_param), + black_box(c1), + black_box(c2), + black_box(m), + black_box(iterations), + ) + }) + }); + group.finish(); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); + From 5d51808d33cf870501b353681a954fb28c1043bf Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 12:13:50 +0200 Subject: [PATCH 09/16] benchmark BFGS on ndarray and Vec backends I'm not sure the Vec benchmark is doing work, it runs on 500 nanoseconds vs ndarray which runs on ~100 microseconds --- argmin/benches/bfgs.rs | 132 ++++++++++++++++++++++++++++++++++++----- 1 file changed, 118 insertions(+), 14 deletions(-) diff --git a/argmin/benches/bfgs.rs b/argmin/benches/bfgs.rs index a7551faee..096ee32d1 100644 --- a/argmin/benches/bfgs.rs +++ b/argmin/benches/bfgs.rs @@ -5,21 +5,45 @@ // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{black_box, BenchmarkId, criterion_group, criterion_main, Criterion}; use argmin::core::{CostFunction, Error, Executor, Gradient}; use argmin::solver::linesearch::MoreThuenteLineSearch; use argmin::solver::quasinewton::BFGS; use argmin_testfunctions::rosenbrock; use finitediff::FiniteDiff; -use ndarray::{array, Array1, Array2}; +use nalgebra::uninit::InitStatus; +use ndarray::{array, Array1, Array2, FixedInitializer}; -struct Rosenbrock { +struct RosenbrockVec { a: f64, b: f64, } -impl CostFunction for Rosenbrock { +struct RosenbrockNd { + a: f64, + b: f64, +} + +impl CostFunction for RosenbrockVec { + type Param = Vec; + type Output = f64; + + fn cost(&self, p: &Self::Param) -> Result { + Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + } +} + +impl Gradient for RosenbrockVec { + type Param = Vec; + type Gradient = Vec; + + fn gradient(&self, p: &Self::Param) -> Result { + Ok((*p).forward_diff(&|x| rosenbrock(&x, self.a, self.b))) + } +} + +impl CostFunction for RosenbrockNd { type Param = Array1; type Output = f64; @@ -27,7 +51,8 @@ impl CostFunction for Rosenbrock { Ok(rosenbrock(&p.to_vec(), self.a, self.b)) } } -impl Gradient for Rosenbrock { + +impl Gradient for RosenbrockNd { type Param = Array1; type Gradient = Array1; @@ -36,15 +61,33 @@ impl Gradient for Rosenbrock { } } -fn run() -> Result<(), Error> { +fn run_vec( + a: f64, + b: f64, + init_param: &[f64], + c1: f64, + c2: f64, + iterations: u64, +) -> Result<(), Error> { // Define cost function - let cost = Rosenbrock { a: 1.0, b: 100.0 }; + let cost = RosenbrockVec { a, b}; // Define initial parameter vector - // TODO: parametrize dimension - let init_param: Array1 = array![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; - let init_hessian: Array2 = Array2::eye(8); + let init_param: Vec = Vec::from(init_param); + let mut init_hessian = Vec::>::new(); + for i in 0..init_hessian.len() { + let mut row = Vec::new(); + for j in 0..init_hessian.len() { + if i==j { + row.push(1.0); + } + else { + row.push(0.0); + } + } + init_hessian.push(row); + } // set up a line search - let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; // Set up solver let solver = BFGS::new(linesearch); @@ -54,15 +97,76 @@ fn run() -> Result<(), Error> { state .param(init_param) .inv_hessian(init_hessian) - .max_iters(60) + .max_iters(iterations) + }) + .run()?; + Ok(()) +} +fn run_ndarray( + a: f64, + b: f64, + init_param: &[f64], + c1: f64, + c2: f64, + iterations: u64, +) -> Result<(), Error> { + // Define cost function + let cost = RosenbrockNd { a, b}; + // Define initial parameter vector + let init_param: Array1 = Array1::from_vec(Vec::from(init_param)); + let init_hessian: Array2 = Array2::eye(init_param.len()); + // set up a line search + let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; + // Set up solver + let solver = BFGS::new(linesearch); + + // Run solver + let res = Executor::new(cost, solver) + .configure(|state| { + state + .param(init_param) + .inv_hessian(init_hessian) + .max_iters(iterations) }) - // .add_observer(SlogLogger::term(), ObserverMode::Always) .run()?; Ok(()) } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("BFGS", |b| b.iter(|| run())); + let a = 1.0; + let b = 100.0; + let init_param = vec![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + let c1 = 1e-4; + let c2 = 0.9; + let iterations: u64 = 60; + let mut group = c.benchmark_group("BFGS"); + for i in 2..init_param.len() { + group.bench_with_input(BenchmarkId::new("Vec", i), &i, |bencher, i| { + bencher.iter(|| { + run_vec( + black_box(a), + black_box(b), + black_box(&init_param[0..*i]), + black_box(c1), + black_box(c2), + black_box(iterations), + ) + }) + }); + group.bench_with_input(BenchmarkId::new("ndarray", i), &i, |bencher, i| { + bencher.iter(|| { + run_ndarray( + black_box(a), + black_box(b), + black_box(&init_param[0..*i]), + black_box(c1), + black_box(c2), + black_box(iterations), + ) + }) + }); + } + group.finish(); } criterion_group!(benches, criterion_benchmark); From e78b5bff371a7de9b5a833269c3f8d3a8f2b5dcc Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 14:55:45 +0200 Subject: [PATCH 10/16] fail benchmark when the solver (or anything else) fails This was the reason why the Vec version of LBFGS was running on 500 ns (it just aborted) --- argmin/benches/backtracking.rs | 6 ++-- argmin/benches/bfgs.rs | 28 ++++++++++-------- argmin/benches/brentopt.rs | 5 ++-- argmin/benches/brentroot.rs | 5 ++-- argmin/benches/conjugategradient.rs | 5 ++-- argmin/benches/dfp.rs | 5 ++-- argmin/benches/gaussnewton.rs | 36 ++++++++++++++++-------- argmin/benches/gaussnewton_linesearch.rs | 4 ++- argmin/benches/goldensectionsearch.rs | 5 ++-- argmin/benches/hagerzhang.rs | 5 ++-- argmin/benches/landweber.rs | 5 ++-- argmin/benches/lbfgs.rs | 2 ++ argmin/benches/lbfgs2d.rs | 3 ++ argmin/benches/morethuente.rs | 6 ++-- argmin/benches/neldermead.rs | 6 ++-- argmin/benches/newton.rs | 6 ++-- argmin/benches/newton_cg.rs | 6 ++-- argmin/benches/nonlinear_cg.rs | 6 ++-- argmin/benches/owl_qn.rs | 6 ++-- argmin/benches/particleswarm.rs | 27 ++++++++++++++++-- argmin/benches/simulatedannealing.rs | 6 ++-- argmin/benches/sr1.rs | 6 ++-- argmin/benches/sr1_trustregion.rs | 6 ++-- argmin/benches/steepestdescent.rs | 6 ++-- argmin/benches/trustregion_nd.rs | 6 ++-- 25 files changed, 129 insertions(+), 78 deletions(-) diff --git a/argmin/benches/backtracking.rs b/argmin/benches/backtracking.rs index 72b133687..64b98a3bf 100644 --- a/argmin/benches/backtracking.rs +++ b/argmin/benches/backtracking.rs @@ -62,11 +62,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("Backtracking", |b| b.iter(run)); + c.bench_function("Backtracking", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/bfgs.rs b/argmin/benches/bfgs.rs index 096ee32d1..1bdb5f786 100644 --- a/argmin/benches/bfgs.rs +++ b/argmin/benches/bfgs.rs @@ -141,18 +141,21 @@ fn criterion_benchmark(c: &mut Criterion) { let iterations: u64 = 60; let mut group = c.benchmark_group("BFGS"); for i in 2..init_param.len() { - group.bench_with_input(BenchmarkId::new("Vec", i), &i, |bencher, i| { - bencher.iter(|| { - run_vec( - black_box(a), - black_box(b), - black_box(&init_param[0..*i]), - black_box(c1), - black_box(c2), - black_box(iterations), - ) - }) - }); + // WARN: Vec version immediately fails with + // Condition violated: `MoreThuenteLineSearch`: Search direction must be a descent direction. + // + // group.bench_with_input(BenchmarkId::new("Vec", i), &i, |bencher, i| { + // bencher.iter(|| { + // run_vec( + // black_box(a), + // black_box(b), + // black_box(&init_param[0..*i]), + // black_box(c1), + // black_box(c2), + // black_box(iterations), + // ).expect("Benchmark should run without errors") + // }) + // }); group.bench_with_input(BenchmarkId::new("ndarray", i), &i, |bencher, i| { bencher.iter(|| { run_ndarray( @@ -163,6 +166,7 @@ fn criterion_benchmark(c: &mut Criterion) { black_box(c2), black_box(iterations), ) + .expect("Benchmark should run without errors") }) }); } diff --git a/argmin/benches/brentopt.rs b/argmin/benches/brentopt.rs index 576b3e781..7afa7af86 100644 --- a/argmin/benches/brentopt.rs +++ b/argmin/benches/brentopt.rs @@ -39,9 +39,10 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("BrentOpt", |b| b.iter(run)); + c.bench_function("BrentOpt", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/brentroot.rs b/argmin/benches/brentroot.rs index 20dc33693..7fcc7e030 100644 --- a/argmin/benches/brentroot.rs +++ b/argmin/benches/brentroot.rs @@ -44,9 +44,10 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("BrentRoot", |b| b.iter(run)); + c.bench_function("BrentRoot", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/conjugategradient.rs b/argmin/benches/conjugategradient.rs index ec80fecdf..0ee9f7f32 100644 --- a/argmin/benches/conjugategradient.rs +++ b/argmin/benches/conjugategradient.rs @@ -43,9 +43,10 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("ConjugateGradient", |b| b.iter(run)); + c.bench_function("ConjugateGradient", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/dfp.rs b/argmin/benches/dfp.rs index 78b6f0621..141f03c14 100644 --- a/argmin/benches/dfp.rs +++ b/argmin/benches/dfp.rs @@ -65,9 +65,10 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("DFP", |b| b.iter(|| run())); + c.bench_function("DFP", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/gaussnewton.rs b/argmin/benches/gaussnewton.rs index e5d5856fc..8cea9d3ca 100644 --- a/argmin/benches/gaussnewton.rs +++ b/argmin/benches/gaussnewton.rs @@ -129,29 +129,41 @@ fn run_ndarray(data: &Vec<(f64, f64)>, init_param: (f64,f64), iterations: u64) - Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { let data = vec![ - (0.038, 0.050), - (0.194, 0.127), - (0.425, 0.094), - (0.626, 0.2122), - (1.253, 0.2729), - (2.5, 0.2665), - (3.74, 0.3317), - ]; + (0.038, 0.050), + (0.194, 0.127), + (0.425, 0.094), + (0.626, 0.2122), + (1.253, 0.2729), + (2.5, 0.2665), + (3.74, 0.3317), + ]; let init_param = (0.9, 0.2); let iterations = 10; let mut group = c.benchmark_group("GaussNewton"); group.bench_function("GaussNewton_ngalgebra", |b| { - b.iter(|| run_ngalgebra(black_box(&data), black_box(init_param), black_box(iterations))) + b.iter(|| { + run_ngalgebra( + black_box(&data), + black_box(init_param), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) }); group.bench_function("GaussNewton_ndarry", |b| { - b.iter(|| run_ndarray(black_box(&data), black_box(init_param), black_box(iterations))) + b.iter(|| { + run_ndarray( + black_box(&data), + black_box(init_param), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) }); group.finish(); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/gaussnewton_linesearch.rs b/argmin/benches/gaussnewton_linesearch.rs index bdfc800c8..1d2bc31ea 100644 --- a/argmin/benches/gaussnewton_linesearch.rs +++ b/argmin/benches/gaussnewton_linesearch.rs @@ -82,7 +82,9 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("GaussNewtonLineSearch", |b| b.iter(|| run())); + c.bench_function("GaussNewtonLineSearch", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); diff --git a/argmin/benches/goldensectionsearch.rs b/argmin/benches/goldensectionsearch.rs index 9ce8f4094..b7e01bc18 100644 --- a/argmin/benches/goldensectionsearch.rs +++ b/argmin/benches/goldensectionsearch.rs @@ -39,9 +39,10 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("GoldenSectionSearch", |b| b.iter(run)); + c.bench_function("GoldenSectionSearch", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/hagerzhang.rs b/argmin/benches/hagerzhang.rs index 01b0bbbf2..7e4d56e92 100644 --- a/argmin/benches/hagerzhang.rs +++ b/argmin/benches/hagerzhang.rs @@ -64,9 +64,10 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("HagerZhangLineSearch", |b| b.iter(run)); + c.bench_function("HagerZhangLineSearch", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/landweber.rs b/argmin/benches/landweber.rs index faed3da37..ae38f218c 100644 --- a/argmin/benches/landweber.rs +++ b/argmin/benches/landweber.rs @@ -37,9 +37,10 @@ fn run() -> Result<(), Error> { } fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("Landweber", |b| b.iter(run)); + c.bench_function("Landweber", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/lbfgs.rs b/argmin/benches/lbfgs.rs index 7a6399861..d86d0ff64 100644 --- a/argmin/benches/lbfgs.rs +++ b/argmin/benches/lbfgs.rs @@ -135,6 +135,7 @@ fn criterion_benchmark(c: &mut Criterion) { black_box(m), black_box(iterations), ) + .expect("Benchmark should run without errors") }) }); group.bench_with_input(BenchmarkId::new("ndarray", i), &i, |bencher, i| { @@ -148,6 +149,7 @@ fn criterion_benchmark(c: &mut Criterion) { black_box(m), black_box(iterations), ) + .expect("Benchmark should run without errors") }) }); } diff --git a/argmin/benches/lbfgs2d.rs b/argmin/benches/lbfgs2d.rs index 83b141f16..00f93f0f3 100644 --- a/argmin/benches/lbfgs2d.rs +++ b/argmin/benches/lbfgs2d.rs @@ -233,6 +233,7 @@ fn criterion_benchmark(c: &mut Criterion) { black_box(m), black_box(iterations), ) + .expect("Benchmark should run without errors") }) }); group.bench_function("nalgebra", |bencher| { @@ -246,6 +247,7 @@ fn criterion_benchmark(c: &mut Criterion) { black_box(m), black_box(iterations), ) + .expect("Benchmark should run without errors") }) }); group.bench_function("ndarray", |bencher| { @@ -259,6 +261,7 @@ fn criterion_benchmark(c: &mut Criterion) { black_box(m), black_box(iterations), ) + .expect("Benchmark should run without errors") }) }); group.finish(); diff --git a/argmin/benches/morethuente.rs b/argmin/benches/morethuente.rs index cd49eb85e..ad46b9d0a 100644 --- a/argmin/benches/morethuente.rs +++ b/argmin/benches/morethuente.rs @@ -60,11 +60,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("MoreThuenteLineSearch", |b| b.iter(run)); + c.bench_function("MoreThuenteLineSearch", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/neldermead.rs b/argmin/benches/neldermead.rs index 0e71973dc..20c7a4440 100644 --- a/argmin/benches/neldermead.rs +++ b/argmin/benches/neldermead.rs @@ -48,11 +48,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("NelderMead", |b| b.iter(|| run())); + c.bench_function("NelderMead", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/newton.rs b/argmin/benches/newton.rs index 29e70306d..7f5a3678b 100644 --- a/argmin/benches/newton.rs +++ b/argmin/benches/newton.rs @@ -57,11 +57,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("Newton", |b| b.iter(|| run())); + c.bench_function("Newton", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/newton_cg.rs b/argmin/benches/newton_cg.rs index 91c1005b7..bfd09bf35 100644 --- a/argmin/benches/newton_cg.rs +++ b/argmin/benches/newton_cg.rs @@ -71,11 +71,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("NewtonCG", |b| b.iter(|| run())); + c.bench_function("NewtonCG", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/nonlinear_cg.rs b/argmin/benches/nonlinear_cg.rs index 11622e882..6b4640846 100644 --- a/argmin/benches/nonlinear_cg.rs +++ b/argmin/benches/nonlinear_cg.rs @@ -65,11 +65,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("NonlinearConjugateGradient", |b| b.iter(run)); + c.bench_function("NonlinearConjugateGradient", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/owl_qn.rs b/argmin/benches/owl_qn.rs index 3e866b0dd..5f17157a6 100644 --- a/argmin/benches/owl_qn.rs +++ b/argmin/benches/owl_qn.rs @@ -59,11 +59,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("owl_qn", |b| b.iter(|| run())); + c.bench_function("owl_qn", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/particleswarm.rs b/argmin/benches/particleswarm.rs index f3539fea9..c3cc9a7d2 100644 --- a/argmin/benches/particleswarm.rs +++ b/argmin/benches/particleswarm.rs @@ -90,13 +90,34 @@ fn criterion_benchmark(c: &mut Criterion) { let iterations = 100; let mut group = c.benchmark_group("ParticleSwarm"); group.bench_function("ParticleSwarm_Vec", |b| { - b.iter(|| run_vec(black_box(bound), black_box(num_particles), black_box(iterations))) + b.iter(|| { + run_vec( + black_box(bound), + black_box(num_particles), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) }); group.bench_function("ParticleSwarm_ngalgebra", |b| { - b.iter(|| run_ngalgebra(black_box(bound), black_box(num_particles), black_box(iterations))) + b.iter(|| { + run_ngalgebra( + black_box(bound), + black_box(num_particles), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) }); group.bench_function("ParticleSwarm_ndarry", |b| { - b.iter(|| run_ndarray(black_box(bound), black_box(num_particles), black_box(iterations))) + b.iter(|| { + run_ndarray( + black_box(bound), + black_box(num_particles), + black_box(iterations), + ) + .expect("Benchmark should run without errors") + }) }); group.finish(); } diff --git a/argmin/benches/simulatedannealing.rs b/argmin/benches/simulatedannealing.rs index 3b6bfa078..2668b74bd 100644 --- a/argmin/benches/simulatedannealing.rs +++ b/argmin/benches/simulatedannealing.rs @@ -135,11 +135,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("SimulatedAnnealing", |b| b.iter(run)); + c.bench_function("SimulatedAnnealing", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/sr1.rs b/argmin/benches/sr1.rs index 5302be45a..04209327c 100644 --- a/argmin/benches/sr1.rs +++ b/argmin/benches/sr1.rs @@ -62,11 +62,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("SR1", |b| b.iter(|| run())); + c.bench_function("SR1", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/sr1_trustregion.rs b/argmin/benches/sr1_trustregion.rs index 007b899d6..7c49832a3 100644 --- a/argmin/benches/sr1_trustregion.rs +++ b/argmin/benches/sr1_trustregion.rs @@ -77,11 +77,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("SR1TrustRegion", |b| b.iter(|| run())); + c.bench_function("SR1TrustRegion", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/steepestdescent.rs b/argmin/benches/steepestdescent.rs index 08d9102c7..7ca50710c 100644 --- a/argmin/benches/steepestdescent.rs +++ b/argmin/benches/steepestdescent.rs @@ -61,11 +61,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("SteepestDescent", |b| b.iter(run)); + c.bench_function("SteepestDescent", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/trustregion_nd.rs b/argmin/benches/trustregion_nd.rs index 2bebb6e91..062d87719 100644 --- a/argmin/benches/trustregion_nd.rs +++ b/argmin/benches/trustregion_nd.rs @@ -76,11 +76,11 @@ fn run() -> Result<(), Error> { Ok(()) } - fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("TrustRegion_nd", |b| b.iter(|| run())); + c.bench_function("TrustRegion_nd", |b| { + b.iter(|| run().expect("Benchmark should run without errors")) + }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - From 8e1a839ed8264ebe5859aa66b4a6e3fddb484da6 Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 14:57:08 +0200 Subject: [PATCH 11/16] cargo fmt --- argmin/benches/bfgs.rs | 12 +++++------- argmin/benches/dfp.rs | 2 +- argmin/benches/gaussnewton.rs | 28 +++++++++++++--------------- argmin/benches/hagerzhang.rs | 2 +- argmin/benches/lbfgs.rs | 3 +-- argmin/benches/lbfgs2d.rs | 9 ++------- argmin/benches/steepestdescent.rs | 1 - 7 files changed, 23 insertions(+), 34 deletions(-) diff --git a/argmin/benches/bfgs.rs b/argmin/benches/bfgs.rs index 1bdb5f786..ecb943e43 100644 --- a/argmin/benches/bfgs.rs +++ b/argmin/benches/bfgs.rs @@ -5,7 +5,7 @@ // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. -use criterion::{black_box, BenchmarkId, criterion_group, criterion_main, Criterion}; +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; use argmin::core::{CostFunction, Error, Executor, Gradient}; use argmin::solver::linesearch::MoreThuenteLineSearch; @@ -70,17 +70,16 @@ fn run_vec( iterations: u64, ) -> Result<(), Error> { // Define cost function - let cost = RosenbrockVec { a, b}; + let cost = RosenbrockVec { a, b }; // Define initial parameter vector let init_param: Vec = Vec::from(init_param); let mut init_hessian = Vec::>::new(); for i in 0..init_hessian.len() { let mut row = Vec::new(); for j in 0..init_hessian.len() { - if i==j { + if i == j { row.push(1.0); - } - else { + } else { row.push(0.0); } } @@ -111,7 +110,7 @@ fn run_ndarray( iterations: u64, ) -> Result<(), Error> { // Define cost function - let cost = RosenbrockNd { a, b}; + let cost = RosenbrockNd { a, b }; // Define initial parameter vector let init_param: Array1 = Array1::from_vec(Vec::from(init_param)); let init_hessian: Array2 = Array2::eye(init_param.len()); @@ -175,4 +174,3 @@ fn criterion_benchmark(c: &mut Criterion) { criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/dfp.rs b/argmin/benches/dfp.rs index 141f03c14..8c96c5d26 100644 --- a/argmin/benches/dfp.rs +++ b/argmin/benches/dfp.rs @@ -48,7 +48,7 @@ fn run() -> Result<(), Error> { // set up a line search let linesearch = MoreThuenteLineSearch::new().with_c(1e-4, 0.9)?; -// Set up solver + // Set up solver let solver = DFP::new(linesearch); // Run solver diff --git a/argmin/benches/gaussnewton.rs b/argmin/benches/gaussnewton.rs index 8cea9d3ca..fd080d06b 100644 --- a/argmin/benches/gaussnewton.rs +++ b/argmin/benches/gaussnewton.rs @@ -8,8 +8,8 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion}; use argmin::core::{Error, Executor, Jacobian, Operator}; use argmin::solver::gaussnewton::GaussNewton; +use nalgebra::{DMatrix, DVector}; use ndarray::{Array1, Array2}; -use nalgebra::{DVector, DMatrix}; type Rate = f64; type S = f64; @@ -28,8 +28,6 @@ struct ProblemNd { data: Vec, } - - impl Operator for ProblemNd { type Param = Array1; type Output = Array1; @@ -57,7 +55,6 @@ impl Operator for ProblemNG { } } - impl Jacobian for ProblemNd { type Param = Array1; type Jacobian = Array2; @@ -73,7 +70,6 @@ impl Jacobian for ProblemNd { } } - impl Jacobian for ProblemNG { type Param = DVector; type Jacobian = DMatrix; @@ -89,13 +85,14 @@ impl Jacobian for ProblemNG { } } - -fn run_ngalgebra(data: &Vec<(f64, f64)>, init_param: (f64,f64), iterations: u64) -> Result<(), Error> { +fn run_ngalgebra( + data: &Vec<(f64, f64)>, + init_param: (f64, f64), + iterations: u64, +) -> Result<(), Error> { // Define cost function // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm - let cost = ProblemNG { - data: data.clone() - }; + let cost = ProblemNG { data: data.clone() }; // Define initial parameter vector let init_param: DVector = DVector::from_vec(vec![init_param.0, init_param.1]); @@ -110,13 +107,14 @@ fn run_ngalgebra(data: &Vec<(f64, f64)>, init_param: (f64,f64), iterations: u64) Ok(()) } - -fn run_ndarray(data: &Vec<(f64, f64)>, init_param: (f64,f64), iterations: u64) -> Result<(), Error> { +fn run_ndarray( + data: &Vec<(f64, f64)>, + init_param: (f64, f64), + iterations: u64, +) -> Result<(), Error> { // Define cost function // Example taken from Wikipedia: https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm - let cost = ProblemNd { - data: data.clone(), - }; + let cost = ProblemNd { data: data.clone() }; // Define initial parameter vector let init_param: Array1 = Array1::from(vec![init_param.0, init_param.1]); // Set up solver diff --git a/argmin/benches/hagerzhang.rs b/argmin/benches/hagerzhang.rs index 7e4d56e92..697ce58a3 100644 --- a/argmin/benches/hagerzhang.rs +++ b/argmin/benches/hagerzhang.rs @@ -42,7 +42,7 @@ fn run() -> Result<(), Error> { // Set search direction solver.search_direction(vec![-1.5, -0.5]); -// Set initial step length + // Set initial step length solver.initial_step_length(10.0)?; let init_cost = operator.cost(&init_param)?; diff --git a/argmin/benches/lbfgs.rs b/argmin/benches/lbfgs.rs index d86d0ff64..da43a8c2c 100644 --- a/argmin/benches/lbfgs.rs +++ b/argmin/benches/lbfgs.rs @@ -4,7 +4,7 @@ // http://apache.org/licenses/LICENSE-2.0> or the MIT license , at your option. This file may not be // copied, modified, or distributed except according to those terms. -use criterion::{black_box, criterion_group, criterion_main, Criterion, BenchmarkId}; +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; use argmin::core::{CostFunction, Error, Executor, Gradient}; use argmin::solver::linesearch::MoreThuenteLineSearch; @@ -86,7 +86,6 @@ fn run_vec( Ok(()) } - fn run_ndarray( a: f64, b: f64, diff --git a/argmin/benches/lbfgs2d.rs b/argmin/benches/lbfgs2d.rs index 00f93f0f3..256f3d2e2 100644 --- a/argmin/benches/lbfgs2d.rs +++ b/argmin/benches/lbfgs2d.rs @@ -80,11 +80,7 @@ impl Gradient for Rosenbrock2DNG { type Gradient = DVector; fn gradient(&self, p: &Self::Param) -> Result { - Ok(rosenbrock_2d_derivative( - p.data.as_vec(), - self.a, - self.b, - ).into()) + Ok(rosenbrock_2d_derivative(p.data.as_vec(), self.a, self.b).into()) } } @@ -149,7 +145,7 @@ fn run_2d_vec( // Define initial parameter vector let init_param = (*init_param).clone(); // This is here to account for the same clone on // ndarray and ngalgebra - // set up a line search + // set up a line search let linesearch = MoreThuenteLineSearch::new().with_c(c1, c2)?; // Set up solver let solver = LBFGS::new(linesearch, m); @@ -269,4 +265,3 @@ fn criterion_benchmark(c: &mut Criterion) { criterion_group!(benches, criterion_benchmark); criterion_main!(benches); - diff --git a/argmin/benches/steepestdescent.rs b/argmin/benches/steepestdescent.rs index 7ca50710c..e6bcc596b 100644 --- a/argmin/benches/steepestdescent.rs +++ b/argmin/benches/steepestdescent.rs @@ -5,7 +5,6 @@ // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. - use criterion::{criterion_group, criterion_main, Criterion}; use argmin::core::{CostFunction, Error, Executor, Gradient}; From 59067c314fc061c6be74b95796d40a90b3b1c7e4 Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 14:57:22 +0200 Subject: [PATCH 12/16] add debug symbols to benchmark profile this eases investigations using flamegraph --- Cargo.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Cargo.toml b/Cargo.toml index cfb85b940..955360261 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,3 +8,6 @@ members = [ exclude = [ "media/book/tests", ] + +[profile.bench] +debug = true From 8af2f6a18d917ec1487e9b30acdadad3015ee8f8 Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 15:03:07 +0200 Subject: [PATCH 13/16] remove failing init_param --- argmin/benches/bfgs.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/argmin/benches/bfgs.rs b/argmin/benches/bfgs.rs index ecb943e43..8d6efa346 100644 --- a/argmin/benches/bfgs.rs +++ b/argmin/benches/bfgs.rs @@ -134,7 +134,7 @@ fn run_ndarray( fn criterion_benchmark(c: &mut Criterion) { let a = 1.0; let b = 100.0; - let init_param = vec![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0, 4.0, 10.0]; + let init_param = vec![-1.2, 1.0, -10.0, 2.0, 3.0, 2.0]; let c1 = 1e-4; let c2 = 0.9; let iterations: u64 = 60; From f7218891d25da59d1bcdb7cb6024850af6aa3e26 Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 15:36:08 +0200 Subject: [PATCH 14/16] clearer naming --- argmin/benches/bfgs.rs | 8 ++++---- argmin/benches/lbfgs.rs | 8 ++++---- argmin/benches/lbfgs2d.rs | 22 +++++++++++----------- argmin/benches/particleswarm.rs | 8 ++++---- 4 files changed, 23 insertions(+), 23 deletions(-) diff --git a/argmin/benches/bfgs.rs b/argmin/benches/bfgs.rs index 8d6efa346..dacc2a4ed 100644 --- a/argmin/benches/bfgs.rs +++ b/argmin/benches/bfgs.rs @@ -20,7 +20,7 @@ struct RosenbrockVec { b: f64, } -struct RosenbrockNd { +struct RosenbrockNdarray { a: f64, b: f64, } @@ -43,7 +43,7 @@ impl Gradient for RosenbrockVec { } } -impl CostFunction for RosenbrockNd { +impl CostFunction for RosenbrockNdarray { type Param = Array1; type Output = f64; @@ -52,7 +52,7 @@ impl CostFunction for RosenbrockNd { } } -impl Gradient for RosenbrockNd { +impl Gradient for RosenbrockNdarray { type Param = Array1; type Gradient = Array1; @@ -110,7 +110,7 @@ fn run_ndarray( iterations: u64, ) -> Result<(), Error> { // Define cost function - let cost = RosenbrockNd { a, b }; + let cost = RosenbrockNdarray { a, b }; // Define initial parameter vector let init_param: Array1 = Array1::from_vec(Vec::from(init_param)); let init_hessian: Array2 = Array2::eye(init_param.len()); diff --git a/argmin/benches/lbfgs.rs b/argmin/benches/lbfgs.rs index da43a8c2c..e5a8351e7 100644 --- a/argmin/benches/lbfgs.rs +++ b/argmin/benches/lbfgs.rs @@ -19,7 +19,7 @@ struct RosenbrockVec { b: f64, } -struct RosenbrockNd { +struct RosenbrockNdarray { a: f64, b: f64, } @@ -42,7 +42,7 @@ impl Gradient for RosenbrockVec { } } -impl CostFunction for RosenbrockNd { +impl CostFunction for RosenbrockNdarray { type Param = Array1; type Output = f64; @@ -51,7 +51,7 @@ impl CostFunction for RosenbrockNd { } } -impl Gradient for RosenbrockNd { +impl Gradient for RosenbrockNdarray { type Param = Array1; type Gradient = Array1; @@ -96,7 +96,7 @@ fn run_ndarray( iterations: u64, ) -> Result<(), Error> { // Define cost function - let cost = RosenbrockNd { a, b }; + let cost = RosenbrockNdarray { a, b }; // Define initial parameter vector let init_param: Array1 = Array1::from_vec(Vec::from(init_param)); diff --git a/argmin/benches/lbfgs2d.rs b/argmin/benches/lbfgs2d.rs index 256f3d2e2..a934a294a 100644 --- a/argmin/benches/lbfgs2d.rs +++ b/argmin/benches/lbfgs2d.rs @@ -19,7 +19,7 @@ struct RosenbrockVec { b: f64, } -struct RosenbrockNd { +struct RosenbrockNdarray { a: f64, b: f64, } @@ -29,12 +29,12 @@ struct Rosenbrock2DVec { b: f64, } -struct Rosenbrock2DNG { +struct Rosenbrock2DNalgebra { a: f64, b: f64, } -struct Rosenbrock2DNd { +struct Rosenbrock2DNdarray { a: f64, b: f64, } @@ -48,7 +48,7 @@ impl CostFunction for Rosenbrock2DVec { } } -impl CostFunction for Rosenbrock2DNG { +impl CostFunction for Rosenbrock2DNalgebra { type Param = DVector; type Output = f64; @@ -57,7 +57,7 @@ impl CostFunction for Rosenbrock2DNG { } } -impl CostFunction for Rosenbrock2DNd { +impl CostFunction for Rosenbrock2DNdarray { type Param = Array1; type Output = f64; @@ -75,7 +75,7 @@ impl Gradient for Rosenbrock2DVec { } } -impl Gradient for Rosenbrock2DNG { +impl Gradient for Rosenbrock2DNalgebra { type Param = DVector; type Gradient = DVector; @@ -84,7 +84,7 @@ impl Gradient for Rosenbrock2DNG { } } -impl Gradient for Rosenbrock2DNd { +impl Gradient for Rosenbrock2DNdarray { type Param = Array1; type Gradient = Array1; @@ -112,7 +112,7 @@ impl Gradient for RosenbrockVec { } } -impl CostFunction for RosenbrockNd { +impl CostFunction for RosenbrockNdarray { type Param = Array1; type Output = f64; @@ -121,7 +121,7 @@ impl CostFunction for RosenbrockNd { } } -impl Gradient for RosenbrockNd { +impl Gradient for RosenbrockNdarray { type Param = Array1; type Gradient = Array1; @@ -167,7 +167,7 @@ fn run_2d_ngalgebra( iterations: u64, ) -> Result<(), Error> { // Define cost function - let cost = Rosenbrock2DNG { a, b }; + let cost = Rosenbrock2DNalgebra { a, b }; // Define initial parameter vector let init_param: DVector = DVector::from((*init_param).clone()); // set up a line search @@ -192,7 +192,7 @@ fn run_2d_ndarray( iterations: u64, ) -> Result<(), Error> { // Define cost function - let cost = Rosenbrock2DNd { a, b }; + let cost = Rosenbrock2DNdarray { a, b }; // Define initial parameter vector let init_param: Array1 = Array1::from_vec((*init_param).clone()); diff --git a/argmin/benches/particleswarm.rs b/argmin/benches/particleswarm.rs index c3cc9a7d2..41e3258e3 100644 --- a/argmin/benches/particleswarm.rs +++ b/argmin/benches/particleswarm.rs @@ -23,9 +23,9 @@ impl CostFunction for HimmelblauVec { } } -struct HimmelblauNG {} +struct HimmelblauNalgebra {} -impl CostFunction for HimmelblauNG { +impl CostFunction for HimmelblauNalgebra { type Param = DVector; type Output = f64; @@ -57,7 +57,7 @@ fn run_vec(bound: f64, num_particles: usize, iterations: u64) -> Result<(), Erro } fn run_ngalgebra(bound: f64, num_particles: usize, iterations: u64) -> Result<(), Error> { - let cost_function = HimmelblauNG {}; + let cost_function = HimmelblauNalgebra {}; let solver = ParticleSwarm::new( (dvector![-bound, -bound], dvector![bound, bound]), @@ -109,7 +109,7 @@ fn criterion_benchmark(c: &mut Criterion) { .expect("Benchmark should run without errors") }) }); - group.bench_function("ParticleSwarm_ndarry", |b| { + group.bench_function("ParticleSwarm_ndarray", |b| { b.iter(|| { run_ndarray( black_box(bound), From f6cd4172ddc2f368dbe00ca13964438ab7f35857 Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 15:56:35 +0200 Subject: [PATCH 15/16] this is already a vector --- argmin/benches/bfgs.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/argmin/benches/bfgs.rs b/argmin/benches/bfgs.rs index dacc2a4ed..e0e5e8405 100644 --- a/argmin/benches/bfgs.rs +++ b/argmin/benches/bfgs.rs @@ -30,7 +30,7 @@ impl CostFunction for RosenbrockVec { type Output = f64; fn cost(&self, p: &Self::Param) -> Result { - Ok(rosenbrock(&p.to_vec(), self.a, self.b)) + Ok(rosenbrock(p, self.a, self.b)) } } From 602c08746ae8bda81a224e5028380ed9f13e3d93 Mon Sep 17 00:00:00 2001 From: jonboh Date: Sat, 13 May 2023 16:23:11 +0200 Subject: [PATCH 16/16] use the same benchmark naming convention for particleswarm --- argmin/benches/particleswarm.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/argmin/benches/particleswarm.rs b/argmin/benches/particleswarm.rs index 41e3258e3..00d22132f 100644 --- a/argmin/benches/particleswarm.rs +++ b/argmin/benches/particleswarm.rs @@ -89,7 +89,7 @@ fn criterion_benchmark(c: &mut Criterion) { let num_particles = 40; let iterations = 100; let mut group = c.benchmark_group("ParticleSwarm"); - group.bench_function("ParticleSwarm_Vec", |b| { + group.bench_function("Vec", |b| { b.iter(|| { run_vec( black_box(bound), @@ -99,7 +99,7 @@ fn criterion_benchmark(c: &mut Criterion) { .expect("Benchmark should run without errors") }) }); - group.bench_function("ParticleSwarm_ngalgebra", |b| { + group.bench_function("ngalgebra", |b| { b.iter(|| { run_ngalgebra( black_box(bound), @@ -109,7 +109,7 @@ fn criterion_benchmark(c: &mut Criterion) { .expect("Benchmark should run without errors") }) }); - group.bench_function("ParticleSwarm_ndarray", |b| { + group.bench_function("ndarray", |b| { b.iter(|| { run_ndarray( black_box(bound),