@@ -36,8 +36,19 @@ class SkforecastOptCV(BaseEstimator):
3636 exog : pandas Series or DataFrame, default=None
3737 Exogenous variable/s used in the evaluation experiment.
3838
39- refit : bool, default=False
40- Whether to re-fit the forecaster in each iteration.
39+ refit : bool, default=True
40+ Whether to refit the forecaster with the best parameters on the entire
41+ data in ``fit``, after hyperparameter tuning has completed.
42+ If ``True``, ``best_forecaster_`` is fitted to the full ``y`` (and
43+ ``exog``) and can be used to make predictions via ``predict``.
44+ If ``False``, ``best_forecaster_`` only has its parameters set but is
45+ not refitted, so ``predict`` will raise. Use ``refit=False`` for the
46+ parameter-estimator use case.
47+
48+ backtesting_refit : bool, default=False
49+ Whether to refit the forecaster in each iteration of backtesting, while
50+ searching for the best hyperparameters. Passed through to skforecast's
51+ ``TimeSeriesFold``. Unrelated to the post-tuning ``refit`` flag above.
4152
4253 fixed_train_size : bool, default=False
4354 If True, the train size doesn't increase but moves by `steps` in each iteration.
@@ -49,9 +60,6 @@ class SkforecastOptCV(BaseEstimator):
4960 allow_incomplete_fold : bool, default=True
5061 If True, the last fold is allowed to have fewer samples than `steps`.
5162
52- return_best : bool, default=False
53- If True, the best model is returned.
54-
5563 n_jobs : int or 'auto', default="auto"
5664 Number of jobs to run in parallel.
5765
@@ -82,11 +90,11 @@ def __init__(
8290 metric ,
8391 initial_train_size ,
8492 exog = None ,
85- refit = False ,
93+ refit = True ,
94+ backtesting_refit = False ,
8695 fixed_train_size = False ,
8796 gap = 0 ,
8897 allow_incomplete_fold = True ,
89- return_best = False ,
9098 n_jobs = "auto" ,
9199 verbose = False ,
92100 show_progress = False ,
@@ -99,10 +107,10 @@ def __init__(
99107 self .initial_train_size = initial_train_size
100108 self .exog = exog
101109 self .refit = refit
110+ self .backtesting_refit = backtesting_refit
102111 self .fixed_train_size = fixed_train_size
103112 self .gap = gap
104113 self .allow_incomplete_fold = allow_incomplete_fold
105- self .return_best = return_best
106114 self .n_jobs = n_jobs
107115 self .verbose = verbose
108116 self .show_progress = show_progress
@@ -179,11 +187,10 @@ def fit(self, y, exog=None):
179187 metric = self .metric ,
180188 initial_train_size = self .initial_train_size ,
181189 exog = current_exog ,
182- refit = self .refit ,
190+ backtesting_refit = self .backtesting_refit ,
183191 fixed_train_size = self .fixed_train_size ,
184192 gap = self .gap ,
185193 allow_incomplete_fold = self .allow_incomplete_fold ,
186- return_best = self .return_best ,
187194 n_jobs = self .n_jobs ,
188195 verbose = self .verbose ,
189196 show_progress = self .show_progress ,
@@ -202,8 +209,8 @@ def fit(self, y, exog=None):
202209 self .best_forecaster_ = copy .deepcopy (self .forecaster )
203210 self .best_forecaster_ .set_params (best_params )
204211
205- # Refit model with best parameters on the whole dataset
206- self .best_forecaster_ .fit (y = y , exog = current_exog )
212+ if self . refit :
213+ self .best_forecaster_ .fit (y = y , exog = current_exog )
207214
208215 return self
209216
@@ -222,4 +229,11 @@ def predict(self, steps, exog=None, **kwargs):
222229 predictions : pandas Series
223230 Predicted values.
224231 """
232+ if not self .refit :
233+ raise RuntimeError (
234+ f"In { type (self ).__name__ } , refit must be True to make "
235+ f"predictions, but found refit=False. If refit=False, "
236+ f"{ type (self ).__name__ } can be used only to tune "
237+ f"hyperparameters, as a parameter estimator."
238+ )
225239 return self .best_forecaster_ .predict (steps = steps , exog = exog , ** kwargs )
0 commit comments