Skip to content

Commit afaaa6e

Browse files
committed
rename optimize hyperparameters arguments
1 parent 22fd81f commit afaaa6e

File tree

2 files changed

+18
-20
lines changed

2 files changed

+18
-20
lines changed

python/ffsim/optimize/stochastic_reconfiguration.py

Lines changed: 17 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -34,12 +34,12 @@ def minimize_stochastic_reconfiguration(
3434
x0: np.ndarray,
3535
*,
3636
maxiter: int = 1000,
37-
variation: float = 1.0,
3837
cond: float = 1e-4,
3938
epsilon: float = 1e-8,
4039
gtol: float = 1e-5,
41-
optimize_hyperparameters: bool = True,
42-
optimize_hyperparameters_args: dict | None = None,
40+
variation: float = 1.0,
41+
optimize_variation: bool = True,
42+
optimize_kwargs: dict | None = None,
4343
callback: Callable[[OptimizeResult], Any] | None = None,
4444
) -> OptimizeResult:
4545
"""Minimize the energy of a variational ansatz using stochastic reconfiguration.
@@ -55,26 +55,24 @@ def minimize_stochastic_reconfiguration(
5555
hamiltonian: The Hamiltonian representing the energy to be minimized.
5656
x0: Initial guess for the parameters.
5757
maxiter: Maximum number of optimization iterations to perform.
58-
variation: Hyperparameter controlling the size of parameter variations
59-
used in the linear expansion of the wavefunction. Its value must be
60-
strictly between 0 and 1. A larger value results in larger variations.
6158
cond: `cond` argument to pass to `scipy.linalg.lstsq`_, which is called to
6259
solve for the parameter update.
6360
epsilon: Increment to use for approximating the gradient using
6461
finite difference.
6562
gtol: Convergence threshold for the norm of the projected gradient.
66-
optimize_hyperparameters: Whether to optimize the `variation` hyperparameter in
67-
each iteration. Optimizing the hyperparameter incurs more function and
68-
energy evaluations in each iteration, but may speed up convergence, leading
69-
to fewer iterations overall. The optimization is performed using
70-
`scipy.optimize.minimize`_.
71-
optimize_hyperparameters_args: Arguments to use when calling
72-
`scipy.optimize.minimize`_ to optimize the hyperparameters. The call is
73-
constructed as
63+
variation: Hyperparameter controlling the size of parameter variations
64+
used in the linear expansion of the wavefunction. Its value must be
65+
strictly between 0 and 1. A larger value results in larger variations.
66+
optimize_variation; Whether to optimize the `variation` hyperparameter
67+
in each iteration. Optimizing hyperparameters incurs more function and
68+
energy evaluations in each iteration, but may improve convergence.
69+
The optimization is performed using `scipy.optimize.minimize`_.
70+
optimize_kwargs: Arguments to use when calling `scipy.optimize.minimize`_ to
71+
optimize hyperparameters. The call is constructed as
7472
7573
.. code::
7674
77-
scipy.optimize.minimize(f, x0, **scipy_optimize_minimize_args)
75+
scipy.optimize.minimize(f, x0, **optimize_kwargs)
7876
7977
If not specified, the default value of `dict(method="L-BFGS-B")` will be
8078
used.
@@ -116,8 +114,8 @@ def minimize_stochastic_reconfiguration(
116114
if maxiter < 1:
117115
raise ValueError(f"maxiter must be at least 1. Got {maxiter}.")
118116

119-
if optimize_hyperparameters_args is None:
120-
optimize_hyperparameters_args = dict(method="L-BFGS-B")
117+
if optimize_kwargs is None:
118+
optimize_kwargs = dict(method="L-BFGS-B")
121119

122120
variation_param = math.sqrt(variation)
123121
params = x0.copy()
@@ -149,7 +147,7 @@ def minimize_stochastic_reconfiguration(
149147
converged = True
150148
break
151149

152-
if optimize_hyperparameters:
150+
if optimize_variation:
153151

154152
def f(x: np.ndarray) -> float:
155153
(variation_param,) = x
@@ -166,7 +164,7 @@ def f(x: np.ndarray) -> float:
166164
result = minimize(
167165
f,
168166
x0=[variation_param],
169-
**optimize_hyperparameters_args,
167+
**optimize_kwargs,
170168
)
171169
(variation_param,) = result.x
172170
variation = variation_param**2

tests/python/optimize/stochastic_reconfiguration_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ def callback(intermediate_result):
8585
params_to_vec,
8686
x0=x0,
8787
hamiltonian=hamiltonian,
88-
optimize_hyperparameters=False,
88+
optimize_variation=False,
8989
callback=callback,
9090
)
9191
np.testing.assert_allclose(energy(result.x), result.fun)

0 commit comments

Comments
 (0)