@@ -122,6 +122,7 @@ def decompose_all(sims: Mapping[str, Simulation]) -> None:
122122def compute_rmse_for_terms (
123123 specs : Sequence [TermSpec ],
124124 sims : Mapping [str , Simulation ],
125+ n_components : int | None = None ,
125126) -> tuple [dict [str , Any ], dict [str , Any ], dict [str , Any ]]:
126127 """Compute reconstruction error outputs for each configured term.
127128
@@ -131,24 +132,22 @@ def compute_rmse_for_terms(
131132 Term specifications defining the processing order and output keys.
132133 sims : Mapping[str, Simulation]
133134 Prepared and decomposed simulations keyed by :attr:`TermSpec.key`.
135+ n_components : int or None, default=None
136+ Number of components to use for reconstruction. When ``None``,
137+ all fitted components (``len(s.pca.components_)``) are used.
134138
135139 Returns
136140 -------
137141 tuple[dict[str, Any], dict[str, Any], dict[str, Any]]
138142 Tuple ``(recs, rmseVs, rmseMs)`` where each dictionary is keyed by
139143 :attr:`TermSpec.key`.
140-
141- Notes
142- -----
143- Uses all fitted components via ``len(s.pca.components_)`` before calling
144- :meth:`Simulation.error`.
145144 """
146145 recs : dict [str , Any ] = {}
147146 rmseVs : dict [str , Any ] = {}
148147 rmseMs : dict [str , Any ] = {}
149148 for spec in specs :
150149 s = sims [spec .key ]
151- n = len (s .pca .components_ )
150+ n = n_components if n_components is not None else len (s .pca .components_ )
152151 rec , rmseV , rmseM = s .error (n )
153152 recs [spec .key ] = rec
154153 rmseVs [spec .key ] = rmseV
@@ -255,21 +254,18 @@ def build_predictions(
255254def forecast_all (
256255 specs : Sequence [TermSpec ],
257256 preds : Mapping [str , Predictions ],
258- dfs : Mapping [str , pd .DataFrame ],
259257 * ,
260258 train_len : int ,
261259 steps : int ,
262260) -> tuple [dict [str , pd .DataFrame ], dict [str , Any ], dict [str , Any ]]:
263- """Forecasts for all terms and return outputs by key.
261+ """Run parallel forecasts for all terms and return raw outputs by key.
264262
265263 Parameters
266264 ----------
267265 specs : Sequence[TermSpec]
268266 Term specifications defining processing order and output keys.
269267 preds : Mapping[str, Predictions]
270268 Prediction objects keyed by :attr:`TermSpec.key`.
271- dfs : Mapping[str, pd.DataFrame]
272- Original component time-series DataFrames keyed by term.
273269 train_len : int
274270 Number of initial rows used as the training window.
275271 steps : int
@@ -279,21 +275,13 @@ def forecast_all(
279275 -------
280276 tuple[dict[str, pd.DataFrame], dict[str, Any], dict[str, Any]]
281277 Tuple ``(hats, hat_stds, metrics)`` keyed by :attr:`TermSpec.key`.
282-
283- Notes
284- -----
285- For each term, the function prepends ``dfs[key][:train_len]`` to the
286- forecast output from
287- :meth:`~nemo_spinup_forecast.forecast.Predictions.parallel_forecast`.
278+ ``hats`` contains the raw forecast output (forecast period only).
288279 """
289280 hats : dict [str , pd .DataFrame ] = {}
290281 hat_stds : dict [str , Any ] = {}
291282 metrics : dict [str , Any ] = {}
292283 for spec in specs :
293- # Forecast each time series component for each property
294284 hat , hat_std , m = preds [spec .key ].parallel_forecast (train_len , steps )
295- # Concatenate the forecasted time series period with the reference traning period
296- hat = pd .concat ([dfs [spec .key ][:train_len ], hat [:]])
297285 hats [spec .key ] = hat
298286 hat_stds [spec .key ] = hat_std
299287 metrics [spec .key ] = m
0 commit comments