@@ -628,8 +628,9 @@ class PySRRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
628
628
evolution. Still uses full dataset for comparing against hall
629
629
of fame. Default is "auto", which enables batching for N≥1000.
630
630
batch_size : int | None
631
- The batch size to use if batching. If None (default) , uses
631
+ The batch size to use if batching. If None, uses
632
632
128 for N<5000, 256 for N<50000, or 512 for N≥50000.
633
+ Default is `None`.
633
634
fast_cycle : bool
634
635
Batch over population subsamples. This is a slightly different
635
636
algorithm than regularized evolution, but does cycles 15%
@@ -1560,7 +1561,7 @@ def _validate_and_modify_params(self) -> _DynamicallySetParams:
1560
1561
operators = {2 : ["+" , "*" , "-" , "/" ]},
1561
1562
maxdepth = self .maxsize ,
1562
1563
constraints = {},
1563
- batch_size = 1 ,
1564
+ batch_size = None ,
1564
1565
update_verbosity = int (self .verbosity ),
1565
1566
progress = self .progress ,
1566
1567
warmup_maxsize_by = 0.0 ,
@@ -1581,8 +1582,9 @@ def _validate_and_modify_params(self) -> _DynamicallySetParams:
1581
1582
1582
1583
for param_name in map (lambda x : x .name , fields (_DynamicallySetParams )):
1583
1584
user_param_value = getattr (self , param_name )
1584
- if user_param_value is None :
1585
+ if user_param_value is None and param_name != "batch_size" :
1585
1586
# Leave as the default in DynamicallySetParams
1587
+ # (except for batch_size, which we want to keep as None)
1586
1588
...
1587
1589
else :
1588
1590
# If user has specified it, we will override the default.
0 commit comments