diff --git a/nbs/src/adapters.prophet.ipynb b/nbs/src/adapters.prophet.ipynb
index e14e8f8a6..0aedd849d 100644
--- a/nbs/src/adapters.prophet.ipynb
+++ b/nbs/src/adapters.prophet.ipynb
@@ -36,14 +36,13 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "a192d24e",
+ "metadata": {},
"outputs": [],
"source": [
"#| hide\n",
"warnings.simplefilter(action='ignore', category=FutureWarning)\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -54,7 +53,6 @@
"source": [
"#| export\n",
"import sys\n",
- "from copy import deepcopy\n",
"\n",
"import pandas as pd\n",
"from statsforecast.arima import AutoARIMA\n",
@@ -117,27 +115,36 @@
" If your forecasting pipeline uses Prophet the `AutoARIMAProphet` adapter helps to\n",
" easily substitute Prophet with an AutoARIMA.\n",
"\n",
- " **Parameters:**
\n",
- " `growth`: String 'linear', 'logistic' or 'flat' to specify a linear, logistic or flat trend.
\n",
- " `changepoints`: List of dates of potential changepoints. Otherwise selected automatically.
\n",
- " `n_changepoints`: Number of potential changepoints to include.
\n",
- " `changepoint_range`: Proportion of history in which trend changepoints will be estimated.
\n",
- " `yearly_seasonality`: Fit yearly seasonality.\n",
- " Can be 'auto', True, False, or a number of Fourier terms to generate.
\n",
- " `weekly_seasonality`: Fit weekly seasonality.\n",
- " Can be 'auto', True, False, or a number of Fourier terms to generate.
\n",
- " `daily_seasonality`: Fit daily seasonality.\n",
- " Can be 'auto', True, False, or a number of Fourier terms to generate.
\n",
- " `holidays`: pandas.DataFrame with columns holiday (string) and ds (date type).
\n",
- " `interval_width`: float, uncertainty forecast intervals width. `StatsForecast`'s level
\n",
+ " Parameters\n",
+ " ----------\n",
+ " growth : string\n",
+ " 'linear', 'logistic' or 'flat' to specify a linear, logistic or flat trend.\n",
+ " changepoints : List of dates\n",
+ " Potential changepoints. Otherwise selected automatically.\n",
+ " n_changepoints : int\n",
+ " Number of potential changepoints to include.\n",
+ " changepoint_range : float\n",
+ " Proportion of history in which trend changepoints will be estimated.\n",
+ " yearly_seasonality : str, bool or int\n",
+ " Fit yearly seasonality. Can be 'auto', True, False, or a number of Fourier terms to generate.\n",
+ " weekly_seasonality : str, bool or int\n",
+ " Fit weekly seasonality. Can be 'auto', True, False, or a number of Fourier terms to generate.\n",
+ " daily_seasonality : str, bool or int\n",
+ " Fit daily seasonality. Can be 'auto', True, False, or a number of Fourier terms to generate.\n",
+ " holidays : pandas.DataFrame\n",
+ " DataFrame with columns holiday (string) and ds (date type).\n",
+ " interval_width : float\n",
+ " Uncertainty forecast intervals width. `StatsForecast`'s level\n",
"\n",
- " **Notes:**
\n",
+ " Notes\n",
+ " -----\n",
" You can create automated exogenous variables from the Prophet data processing pipeline\n",
" these exogenous will be included into `AutoARIMA`'s exogenous features. Parameters like \n",
" `seasonality_mode`, `seasonality_prior_scale`, `holidays_prior_scale`, `changepoint_prior_scale`,\n",
" `mcmc_samples`, `uncertainty_samples`, `stan_backend` are Prophet exclusive.\n",
"\n",
- " **References:**
\n",
+ " References\n",
+ " ----------\n",
" [Sean J. Taylor, Benjamin Letham (2017). \"Prophet Forecasting at Scale\"](https://peerj.com/preprints/3190.pdf)\n",
" \n",
" [Oskar Triebe, Hansika Hewamalage, Polina Pilyugina, Nikolay Laptev, Christoph Bergmeir, Ram Rajagopal (2021). \"NeuralProphet: Explainable Forecasting at Scale\".](https://arxiv.org/pdf/2111.15397.pdf)\n",
@@ -241,16 +248,20 @@
" biasadj=biasadj,\n",
" period=period)\n",
" \n",
- " def fit(self, df, disable_seasonal_features=True, **kwargs):\n",
+ " def fit(self, df, disable_seasonal_features=True):\n",
" \"\"\"Fit the AutoARIMAProphet adapter.\n",
"\n",
- " **Parameters:**
\n",
- " `df`: pandas.DataFrame, with columns ds (date type) and y, the time series.
\n",
- " `disable_seasonal_features`: bool, Wheter disable Prophet's seasonal features.
\n",
- " `kwargs`: Additional arguments.
\n",
+ " Parameters\n",
+ " ----------\n",
+ " df : pandas.DataFrame\n",
+ " DataFrame with columns ds (date type) and y, the time series.\n",
+ " disable_seasonal_features : bool (default=True)\n",
+ " Disable Prophet's seasonal features.\n",
" \n",
- " **Returns:**
\n",
- " `self`: `AutoARIMAProphet` adapter object with `AutoARIMA` fitted model.\n",
+ " Returns\n",
+ " -------\n",
+ " AutoARIMAProphet\n",
+ " Adapter object with `AutoARIMA` fitted model.\n",
" \"\"\"\n",
" if self.history is not None:\n",
" raise Exception('Prophet object can only be fit once. '\n",
@@ -272,7 +283,6 @@
" self.make_all_seasonality_features(history))\n",
" self.train_component_cols = component_cols\n",
" self.component_modes = modes\n",
- " self.fit_kwargs = deepcopy(kwargs)\n",
" if disable_seasonal_features:\n",
" seas = tuple(self.seasonalities.keys())\n",
" seasonal_features = seasonal_features.loc[:,~seasonal_features.columns.str.startswith(seas)]\n",
@@ -287,11 +297,15 @@
" def predict(self, df=None):\n",
" \"\"\"Predict using the AutoARIMAProphet adapter.\n",
" \n",
- " **Parameters:**
\n",
- " `df`: pandas.DataFrame, with columns ds (date type) and y, the time series.
\n",
+ " Parameters\n",
+ " ----------\n",
+ " df : pandas.DataFrame\n",
+ " DataFrame with columns ds (date type) and y, the time series.\n",
" \n",
- " **Returns:**
\n",
- " `fcsts_df`: A pandas.DataFrame with the forecast components.\n",
+ " Returns\n",
+ " -------\n",
+ " pandas.DataFrame\n",
+ " DataFrame with the forecast components.\n",
" \"\"\"\n",
" if self.history is None:\n",
" raise Exception('Model has not been fit.')\n",
diff --git a/nbs/src/core/distributed.fugue.ipynb b/nbs/src/core/distributed.fugue.ipynb
index fe377d6fb..8e4d2ed9c 100644
--- a/nbs/src/core/distributed.fugue.ipynb
+++ b/nbs/src/core/distributed.fugue.ipynb
@@ -121,12 +121,17 @@
" This class uses [Fugue](https://github.com/fugue-project/fugue) backend capable of distributing \n",
" computation on Spark, Dask and Ray without any rewrites.\n",
"\n",
- " **Parameters:**
\n",
- " `engine`: fugue.ExecutionEngine, a selection between Spark, Dask, and Ray.
\n",
- " `conf`: fugue.Config, engine configuration.
\n",
- " `**transform_kwargs`: additional kwargs for Fugue's transform method.
\n",
- "\n",
- " **Notes:**
\n",
+ " Parameters\n",
+ " ----------\n",
+ " engine : fugue.ExecutionEngine\n",
+ " A selection between Spark, Dask, and Ray.\n",
+ " conf : fugue.Config\n",
+ " Engine configuration.\n",
+ " **transform_kwargs\n",
+ " Additional kwargs for Fugue's transform method.\n",
+ "\n",
+ " Notes\n",
+ " -----\n",
" A short introduction to Fugue, with examples on how to scale pandas code to Spark, Dask or Ray\n",
" is available [here](https://fugue-tutorials.readthedocs.io/tutorials/quick_look/ten_minutes.html).\n",
" \"\"\"\n",
@@ -157,24 +162,33 @@
" This method uses Fugue's transform function, in combination with \n",
" `core.StatsForecast`'s forecast to efficiently fit a list of StatsForecast models.\n",
"\n",
- " **Parameters:**
\n",
- " `df`: pandas.DataFrame, with columns [`unique_id`, `ds`, `y`] and exogenous.
\n",
- " `freq`: str, frequency of the data, [pandas available frequencies](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases).
\n",
- " `models`: List[typing.Any], list of instantiated objects `StatsForecast.models`.
\n",
- " `fallback_model`: Any, Model to be used if a model fails.
\n",
- " `X_df`: pandas.DataFrame, with [unique_id, ds] columns and df’s future exogenous.\n",
- " `**kwargs`: Additional `core.StatsForecast` parameters. Example forecast horizon `h`.
\n",
- "\n",
- " **Returns:**
\n",
- " `fcsts_df`: pandas.DataFrame, with `models` columns for point predictions and probabilistic\n",
- " predictions for all fitted `models`.
\n",
+ " Parameters\n",
+ " ----------\n",
+ " df : pandas.DataFrame\n",
+ " DataFrame with columns [`unique_id`, `ds`, `y`] and exogenous.\n",
+ " freq : str\n",
+ " Frequency of the data, [pandas available frequencies](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases).\n",
+ " models : List[typing.Any]\n",
+ " List of instantiated objects `StatsForecast.models`.\n",
+ " fallback_model : Any\n",
+ " Model to be used if a model fails.\n",
+ " X_df : pandas.DataFrame\n",
+ " DataFrame with [unique_id, ds] columns and df’s future exogenous.\n",
+ " **kwargs\n",
+ " Additional `core.StatsForecast` parameters. Example forecast horizon `h`.\n",
+ "\n",
+ " Returns\n",
+ " -------\n",
+ " fcsts_df : pandas.DataFrame\n",
+ " DataFrame with `models` columns for point predictions and probabilistic predictions for all fitted `models`\n",
" \n",
- " **References:**
\n",
+ " References\n",
+ " ----------\n",
" For more information check the \n",
" [Fugue's transform](https://fugue-tutorials.readthedocs.io/tutorials/beginner/transform.html)\n",
- " tutorial.
\n",
+ " tutorial.\n",
" The [core.StatsForecast's forecast](https://nixtla.github.io/statsforecast/core.html#statsforecast.forecast)\n",
- " method documentation.
\n",
+ " method documentation.\n",
" Or the list of available [StatsForecast's models](https://nixtla.github.io/statsforecast/src/core/models.html).\n",
" \"\"\"\n",
" level = kwargs.get(\"level\", [])\n",
@@ -224,19 +238,26 @@
" provides better model's generalization measurements by increasing the test's length \n",
" and diversity.\n",
"\n",
- " **Parameters:**
\n",
- " `df`: pandas.DataFrame, with columns [`unique_id`, `ds`, `y`] and exogenous.
\n",
- " `freq`: str, frequency of the data, [panda's available frequencies](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases).
\n",
- " `models`: List[typing.Any], list of instantiated objects `StatsForecast.models`.
\n",
- " `fallback_model`: Any, Model to be used if a model fails.
\n",
- "\n",
- " **Returns:**
\n",
- " `fcsts_df`: pandas.DataFrame, with `models` columns for point predictions and probabilistic\n",
- " predictions for all fitted `models`.
\n",
+ " Parameters\n",
+ " ----------\n",
+ " df : pandas.DataFrame\n",
+ " DataFrame with columns [`unique_id`, `ds`, `y`] and exogenous.\n",
+ " freq : str\n",
+ " Frequency of the data, [pandas available frequencies](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases).\n",
+ " models : List[typing.Any]\n",
+ " List of instantiated objects `StatsForecast.models`.\n",
+ " fallback_model : Any\n",
+ " Model to be used if a model fails.\n",
+ "\n",
+ " Returns\n",
+ " -------\n",
+ " pandas.DataFrame\n",
+ " DataFrame, with `models` columns for point predictions and probabilistic predictions for all fitted `models`.\n",
" \n",
- " **References:**
\n",
+ " References\n",
+ " ----------\n",
" The [core.StatsForecast's cross validation](https://nixtla.github.io/statsforecast/core.html#statsforecast.cross_validation)\n",
- " method documentation.
\n",
+ " method documentation.\n",
" [Rob J. Hyndman and George Athanasopoulos (2018). \"Forecasting principles and practice, Temporal Cross-Validation\"](https://otexts.com/fpp3/tscv.html).\n",
" \"\"\"\n",
" level = kwargs.get(\"level\", [])\n",
diff --git a/nbs/src/core/models.ipynb b/nbs/src/core/models.ipynb
index c30c6cbde..9ca59d47e 100644
--- a/nbs/src/core/models.ipynb
+++ b/nbs/src/core/models.ipynb
@@ -127,16 +127,14 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"#| hide\n",
"import matplotlib.pyplot as plt\n",
"import pandas as pd\n",
"from datetime import date, timedelta\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -389,12 +387,12 @@
" Automatically selects the best ARIMA (AutoRegressive Integrated Moving Average) \n",
" model using an information criterion. Default is Akaike Information Criterion (AICc). \n",
" \n",
- " **Note:**\n",
- "\n",
+ " Notes\n",
+ " -----\n",
" This implementation is a mirror of Hyndman's [forecast::auto.arima](https://github.com/robjhyndman/forecast).\n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman, Yeasmin Khandakar (2008). \"Automatic Time Series Forecasting: The forecast package for R\"](https://www.jstatsoft.org/article/view/v027i03).\n",
" \n",
" Parameters\n",
@@ -1011,14 +1009,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import AutoARIMA\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -1063,12 +1059,12 @@
" If the component is selected as 'Z', it operates as a placeholder to ask the AutoETS model\n",
" to figure out the best parameter.\n",
" \n",
- " **Note:**\n",
- "\n",
+ " Notes\n",
+ " -----\n",
" This implementation is a mirror of Hyndman's [forecast::ets](https://github.com/robjhyndman/forecast).\n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman, Yeasmin Khandakar (2008). \"Automatic Time Series Forecasting: The forecast package for R\"](https://www.jstatsoft.org/article/view/v027i03).\n",
" \n",
" [Hyndman, Rob, et al (2008). \"Forecasting with exponential smoothing: the state space approach\"](https://robjhyndman.com/expsmooth/).\n",
@@ -1454,14 +1450,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import AutoETS\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -1580,8 +1574,8 @@
" If the component is selected as 'Z', it operates as a placeholder to ask the AutoCES model\n",
" to figure out the best parameter.\n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Svetunkov, Ivan & Kourentzes, Nikolaos. (2015). \"Complex Exponential Smoothing\". 10.13140/RG.2.1.3757.2562. ](https://onlinelibrary.wiley.com/doi/full/10.1002/nav.22074).\n",
" \n",
" Parameters\n",
@@ -1982,14 +1976,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import AutoCES\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -2028,8 +2020,8 @@
" Optimized Theta Model ('OTM'), Dynamic Standard Theta Model ('DSTM'),\n",
" Dynamic Optimized Theta Model ('DOTM')) model using mse. \n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Jose A. Fiorucci, Tiago R. Pellegrini, Francisco Louzada, Fotios Petropoulos, Anne B. Koehler (2016). \"Models for optimising the theta method and their relationship to state space models\". International Journal of Forecasting](https://www.sciencedirect.com/science/article/pii/S0169207016300243)\n",
" \n",
" Parameters\n",
@@ -2362,14 +2354,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import AutoTheta\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -2410,8 +2400,8 @@
"\n",
" AutoRegressive Integrated Moving Average model.\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman, Yeasmin Khandakar (2008). \"Automatic Time Series Forecasting: The forecast package for R\"](https://www.jstatsoft.org/article/view/v027i03).\n",
" \n",
" Parameters\n",
@@ -2839,14 +2829,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import ARIMA\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -3070,14 +3058,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import AutoRegressive\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -3243,8 +3229,8 @@
"\n",
" The rate $0 \\leq \\\\alpha \\leq 1$ at which the weights decrease is called the smoothing parameter. When $\\\\alpha = 1$, SES is equal to the naive method.\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Charles C Holt (1957). “Forecasting seasonals and trends by exponentially weighted moving averages”](https://doi.org/10.1016/j.ijforecast).\n",
"\n",
" Parameters\n",
@@ -3502,14 +3488,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import SimpleExponentialSmoothing\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -3568,8 +3552,8 @@
"\n",
" The smoothing parameter $\\\\alpha^*$ is optimized by square error minimization.\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Charles C Holt (1957). “Forecasting seasonals and trends by exponentially weighted moving averages”](https://doi.org/10.1016/j.ijforecast).\n",
"\n",
" Parameters\n",
@@ -3814,14 +3798,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import SimpleExponentialSmoothingOptimized\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -3889,13 +3871,13 @@
" Assuming there are $t$ observations and season $s$, the one-step forecast is given by: \n",
" $\\hat{y}_{t+1,s} = \\\\alpha y_t + (1-\\\\alpha) \\hat{y}_{t-1,s}$\n",
"\n",
- " **Note:**\n",
- "\n",
+ " Notes\n",
+ " -----\n",
" This method is an extremely simplified of Holt-Winter's method where the trend and level are set to zero.\n",
" And a single seasonal smoothing parameter $\\\\alpha$ is shared across seasons.\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Charles. C. Holt (1957). \"Forecasting seasonals and trends by exponentially weighted moving averages\", ONR Research Memorandum, Carnegie Institute of Technology 52.](https://www.sciencedirect.com/science/article/abs/pii/S0169207003001134).\n",
" [Peter R. Winters (1960). \"Forecasting sales by exponentially weighted moving averages\". Management Science](https://pubsonline.informs.org/doi/abs/10.1287/mnsc.6.3.324).\n",
"\n",
@@ -4195,14 +4177,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import SeasonalExponentialSmoothing\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -4277,13 +4257,13 @@
" \n",
" The smoothing parameter $\\\\alpha^*$ is optimized by square error minimization. \n",
"\n",
- " **Note:**\n",
- "\n",
+ " Notes\n",
+ " -----\n",
" This method is an extremely simplified of Holt-Winter's method where the trend and level are set to zero.\n",
" And a single seasonal smoothing parameter $\\\\alpha$ is shared across seasons.\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Charles. C. Holt (1957). \"Forecasting seasonals and trends by exponentially weighted moving averages\", ONR Research Memorandum, Carnegie Institute of Technology 52.](https://www.sciencedirect.com/science/article/abs/pii/S0169207003001134).\n",
" [Peter R. Winters (1960). \"Forecasting sales by exponentially weighted moving averages\". Management Science](https://pubsonline.informs.org/doi/abs/10.1287/mnsc.6.3.324).\n",
" \n",
@@ -4548,14 +4528,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import SeasonalExponentialSmoothingOptimized\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -4591,8 +4569,8 @@
" Also known as double exponential smoothing, Holt's method is an extension of exponential smoothing for series with a trend.\n",
" This implementation returns the corresponding `ETS` model with additive (A) or multiplicative (M) errors (so either 'AAN' or 'MAN'). \n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman and George Athanasopoulos (2018). \"Forecasting principles and practice, Methods with trend\"](https://otexts.com/fpp3/holt.html).\n",
"\n",
" Parameters\n",
@@ -4763,14 +4741,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import Holt\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -4806,8 +4782,8 @@
" Also known as triple exponential smoothing, Holt-Winters' method is an extension of exponential smoothing for series that contain both trend and seasonality.\n",
" This implementation returns the corresponding `ETS` model with additive (A) or multiplicative (M) errors (so either 'AAA' or 'MAM'). \n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman and George Athanasopoulos (2018). \"Forecasting principles and practice, Methods with seasonality\"](https://otexts.com/fpp3/holt-winters.html).\n",
" \n",
" Parameters\n",
@@ -4976,14 +4952,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import HoltWinters\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -5052,8 +5026,8 @@
" Assuming there are $t$ observations, the one-step forecast is given by: \n",
" $$ \\hat{y}_{t+1} = \\\\frac{1}{t} \\sum_{j=1}^t y_j $$\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman and George Athanasopoulos (2018). \"Forecasting principles and practice, Simple Methods\"](https://otexts.com/fpp3/simple-methods.html).\n",
"\n",
" Parameters \n",
@@ -5341,14 +5315,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import HistoricAverage\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -5386,8 +5358,8 @@
" All forecasts have the value of the last observation: \n",
" $\\hat{y}_{t+1} = y_t$ for all $t$\n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman and George Athanasopoulos (2018). \"forecasting principles and practice, Simple Methods\"](https://otexts.com/fpp3/simple-methods.html). \n",
" \n",
" Parameters \n",
@@ -5740,14 +5712,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import Naive\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -5815,8 +5785,8 @@
" From the previous equation, we can see that this is equivalent to extrapolating a line between \n",
" the first and the last observation. \n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman and George Athanasopoulos (2018). \"forecasting principles and practice, Simple Methods\"](https://otexts.com/fpp3/simple-methods.html).\n",
"\n",
" Parameters\n",
@@ -6128,14 +6098,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import RandomWalkWithDrift\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -6172,8 +6140,8 @@
"\n",
" A method similar to the naive, but uses the last known observation of the same period (e.g. the same month of the previous year) in order to capture seasonal variations.\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman and George Athanasopoulos (2018). \"forecasting principles and practice, Simple Methods\"](https://otexts.com/fpp3/simple-methods.html#seasonal-na%C3%AFve-method).\n",
"\n",
" Parameters\n",
@@ -6494,14 +6462,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import SeasonalNaive\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -6569,8 +6535,8 @@
" The length of the window selected should take into account the importance of past\n",
" observations and how fast the series changes.\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman and George Athanasopoulos (2018). \"forecasting principles and practice, Simple Methods\"](https://otexts.com/fpp3/simple-methods.html).\n",
"\n",
" Parameters\n",
@@ -6812,14 +6778,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import WindowAverage\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -6890,8 +6854,8 @@
"\n",
" An average of the last $k$ observations of the same period, with $k$ the length of the window.\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Rob J. Hyndman and George Athanasopoulos (2018). \"forecasting principles and practice, Simple Methods\"](https://otexts.com/fpp3/simple-methods.html).\n",
"\n",
" Parameters\n",
@@ -7148,14 +7112,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import SeasonalWindowAverage\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -7234,9 +7196,9 @@
" They are notoriously hard to forecast, and so, different methods have been developed \n",
" especifically for them.\n",
" \n",
- " **References:**\n",
- "\n",
- " [Nikolopoulos, K., Syntetos, A. A., Boylan, J. E., Petropoulos, F., & Assimakopoulos, V. (2011). An aggregate-disaggregate intermittent demand approach (ADIDA) to forecasting: an empirical proposition and analysis. Journal of the Operational Research Society, 62(3), 544-554.](https://researchportal.bath.ac.uk/en/publications/an-aggregate-disaggregate-intermittent-demand-approach-adida-to-f).\n",
+ " References\n",
+ " ----------\n",
+ " [Nikolopoulos, K., Syntetos, A. A., Boylan, J. E., Petropoulos, F., & Assimakopoulos, V. (2011). An aggregate–disaggregate intermittent demand approach (ADIDA) to forecasting: an empirical proposition and analysis. Journal of the Operational Research Society, 62(3), 544-554.](https://researchportal.bath.ac.uk/en/publications/an-aggregate-disaggregate-intermittent-demand-approach-adida-to-f).\n",
" \n",
" Parameters\n",
" ----------\n",
@@ -7485,14 +7447,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import ADIDA\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -7564,8 +7524,8 @@
" where $\\hat{z}_t$ and $\\hat{p}_t$ are forecasted using SES. The smoothing parameter \n",
" of both components is set equal to 0.1\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Croston, J. D. (1972). Forecasting and stock control for intermittent demands. Journal of the Operational Research Society, 23(3), 289-303.](https://link.springer.com/article/10.1057/jors.1972.50)\n",
" \n",
" Parameters\n",
@@ -7803,14 +7763,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import CrostonClassic\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -7882,8 +7840,8 @@
" selected from the range $[0.1,0.3]$. Both the non-zero demand $z_t$ and the inter-demand \n",
" intervals $p_t$ are smoothed separately, so their smoothing parameters can be different.\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Croston, J. D. (1972). Forecasting and stock control for intermittent demands. Journal of the Operational Research Society, 23(3), 289-303.](https://link.springer.com/article/10.1057/jors.1972.50).\n",
"\n",
" Parameters\n",
@@ -8117,14 +8075,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import CrostonOptimized\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -8188,8 +8144,8 @@
" forecast is given by:\n",
" $$ \\hat{y}_t = 0.95 \\\\frac{\\hat{z}_t}{\\hat{p}_t} $$\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Croston, J. D. (1972). Forecasting and stock control for intermittent demands. Journal of the Operational Research Society, 23(3), 289-303.](https://link.springer.com/article/10.1057/jors.1972.50).\n",
"\n",
" Parameters\n",
@@ -8429,14 +8385,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import CrostonSBA\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -8507,8 +8461,8 @@
" dynamics of the data. Uses the optimized SES to generate the forecasts at the new levels\n",
" and then combines them using a simple average.\n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Syntetos, A. A., & Boylan, J. E. (2021). Intermittent demand forecasting: Context, methods and applications. John Wiley & Sons.](https://www.ifors.org/intermittent-demand-forecasting-context-methods-and-applications/).\n",
"\n",
" Parameters\n",
@@ -8748,14 +8702,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import IMAPA\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -8841,8 +8793,8 @@
" Both $d_t$ and $z_t$ are forecasted using SES. The smooting paramaters of each may differ, \n",
" like in the optimized Croston's method.\n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Teunter, R. H., Syntetos, A. A., & Babai, M. Z. (2011). Intermittent demand: Linking forecasting to inventory obsolescence. European Journal of Operational Research, 214(3), 606-615.](https://www.sciencedirect.com/science/article/abs/pii/S0377221711004437)\n",
"\n",
" Parameters\n",
@@ -9089,14 +9041,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import TSB\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -9160,8 +9110,8 @@
" in multiple seasonalities using LOESS. Then forecasts the trend using \n",
" a custom non-seaonal model and each seasonality using a SeasonalNaive model.\n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Bandara, Kasun & Hyndman, Rob & Bergmeir, Christoph. (2021). \"MSTL: A Seasonal-Trend Decomposition Algorithm for Time Series with Multiple Seasonal Patterns\".](https://arxiv.org/abs/2107.13462).\n",
" \n",
" Parameters\n",
@@ -9648,14 +9598,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import MSTL\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -9695,8 +9643,8 @@
"class Theta(AutoTheta): \n",
" \"\"\" Standard Theta Method. \n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Jose A. Fiorucci, Tiago R. Pellegrini, Francisco Louzada, Fotios Petropoulos, Anne B. Koehler (2016). \"Models for optimising the theta method and their relationship to state space models\". International Journal of Forecasting](https://www.sciencedirect.com/science/article/pii/S0169207016300243)\n",
"\n",
" Parameters\n",
@@ -9866,14 +9814,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import Theta\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -9906,8 +9852,8 @@
"class OptimizedTheta(AutoTheta): \n",
" \"\"\" Optimized Theta Method. \n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Jose A. Fiorucci, Tiago R. Pellegrini, Francisco Louzada, Fotios Petropoulos, Anne B. Koehler (2016). \"Models for optimising the theta method and their relationship to state space models\". International Journal of Forecasting](https://www.sciencedirect.com/science/article/pii/S0169207016300243)\n",
"\n",
" Parameters\n",
@@ -10079,14 +10025,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import OptimizedTheta\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -10119,8 +10063,8 @@
"class DynamicTheta(AutoTheta): \n",
" \"\"\" Dynamic Standard Theta Method. \n",
"\n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Jose A. Fiorucci, Tiago R. Pellegrini, Francisco Louzada, Fotios Petropoulos, Anne B. Koehler (2016). \"Models for optimising the theta method and their relationship to state space models\". International Journal of Forecasting](https://www.sciencedirect.com/science/article/pii/S0169207016300243)\n",
"\n",
" Parameters\n",
@@ -10292,14 +10236,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import DynamicTheta\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -10332,8 +10274,8 @@
"class DynamicOptimizedTheta(AutoTheta): \n",
" \"\"\" Dynamic Optimized Theta Method. \n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Jose A. Fiorucci, Tiago R. Pellegrini, Francisco Louzada, Fotios Petropoulos, Anne B. Koehler (2016). \"Models for optimising the theta method and their relationship to state space models\". International Journal of Forecasting](https://www.sciencedirect.com/science/article/pii/S0169207016300243)\n",
"\n",
" Parameters\n",
@@ -10504,14 +10446,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import DynamicOptimizedTheta\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -10568,8 +10508,8 @@
" \n",
" The ARCH model is a particular case of the GARCH model when $q=0$. \n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Engle, R. F. (1982). Autoregressive conditional heteroscedasticity with estimates of the variance of United Kingdom inflation. Econometrica: Journal of the econometric society, 987-1007.](http://www.econ.uiuc.edu/~econ508/Papers/engle82.pdf) \n",
" \n",
" [Bollerslev, T. (1986). Generalized autoregressive conditional heteroskedasticity. Journal of econometrics, 31(3), 307-327.](https://citeseerx.ist.psu.edu/document?repid=rep1&type=pdf&doi=7da8bfa5295375c1141d797e80065a599153c19d)\n",
@@ -10915,8 +10855,8 @@
" Here $\\epsilon_t$ is a sequence of iid random variables with zero mean and unit variance. \n",
" The coefficients $w$ and $a_i$, $i=1,...,p$ must be nonnegative and $\\sum_{k=1}^p a_k < 1$. \n",
" \n",
- " **References:**\n",
- "\n",
+ " References\n",
+ " ----------\n",
" [Engle, R. F. (1982). Autoregressive conditional heteroscedasticity with estimates of the variance of United Kingdom inflation. Econometrica: Journal of the econometric society, 987-1007.](http://www.econ.uiuc.edu/~econ508/Papers/engle82.pdf) \n",
"\n",
" [James D. Hamilton. Time Series Analysis Princeton University Press, Princeton, New Jersey, 1st Edition, 1994.](https://press.princeton.edu/books/hardcover/9780691042893/time-series-analysis)\n",
@@ -11352,14 +11292,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import ConstantModel\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -11510,14 +11448,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import ZeroModel\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
@@ -11639,14 +11575,12 @@
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {},
"outputs": [],
"source": [
"from statsforecast.models import NaNModel\n",
"from statsforecast.utils import AirPassengers as ap\n"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
diff --git a/nbs/src/distributed.multiprocess.ipynb b/nbs/src/distributed.multiprocess.ipynb
index 9c509d826..3b60c4681 100644
--- a/nbs/src/distributed.multiprocess.ipynb
+++ b/nbs/src/distributed.multiprocess.ipynb
@@ -75,11 +75,10 @@
"class MultiprocessBackend(ParallelBackend):\n",
" \"\"\"MultiprocessBackend Parent Class for Distributed Computation.\n",
"\n",
- " **Parameters:**
\n",
- " `n_jobs`: int, number of jobs used in the parallel processing, use -1 for all cores.
\n",
- "\n",
- " **Notes:**
\n",
- " \n",
+ " Parameters\n",
+ " ----------\n",
+ " n_jobs : int\n",
+ " Number of jobs used in the parallel processing, use -1 for all cores.\n",
" \"\"\"\n",
" def __init__(self, n_jobs: int) -> None:\n",
" self.n_jobs = n_jobs\n",
diff --git a/nbs/src/utils.ipynb b/nbs/src/utils.ipynb
index 0fc36afa3..634f31360 100644
--- a/nbs/src/utils.ipynb
+++ b/nbs/src/utils.ipynb
@@ -109,17 +109,29 @@
" If `n_static_features > 0`, then each series gets static features with random values.\n",
" If `equal_ends == True` then all series end at the same date.\n",
"\n",
- " **Parameters:**
\n",
- " `n_series`: int, number of series for synthetic panel.
\n",
- " `min_length`: int, minimal length of synthetic panel's series.
\n",
- " `max_length`: int, minimal length of synthetic panel's series.
\n",
- " `n_static_features`: int, default=0, number of static exogenous variables for synthetic panel's series.
\n",
- " `equal_ends`: bool, if True, series finish in the same date stamp `ds`.
\n",
- " `freq`: str, frequency of the data, [panda's available frequencies](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases).
\n",
- " `engine`: str, engine to be used in DataFrame construction; NOTE: index does not exist in polars DataFrame\n",
+ " Parameters\n",
+ " ----------\n",
+ " n_series : int\n",
+ " Number of series for synthetic panel.\n",
+ " freq : str (default='D')\n",
+ " Frequency of the data, 'D' or 'M'.\n",
+ " min_length : int (default=50)\n",
+ " Minimum length of synthetic panel's series.\n",
+ " max_length : int (default=500)\n",
+ " Maximum length of synthetic panel's series.\n",
+ " n_static_features : int (default=0)\n",
+ " Number of static exogenous variables for synthetic panel's series.\n",
+ " equal_ends : bool (default=False)\n",
+ " Series should end in the same date stamp `ds`.\n",
+ " engine : str (default='pandas')\n",
+ " Output Dataframe type ('pandas' or 'polars').\n",
+ " seed : int (default=0)\n",
+ " Random seed used for generating the data.\n",
"\n",
- " **Returns:**
\n",
- " `freq`: pandas.DataFrame | polars.DataFrame, synthetic panel with columns [`unique_id`, `ds`, `y`] and exogenous.\n",
+ " Returns\n",
+ " -------\n",
+ " pandas or polars DataFrame\n",
+ " Synthetic panel with columns [`unique_id`, `ds`, `y`] and exogenous.\n",
" \"\"\"\n",
" return utils_generate_series(\n",
" n_series=n_series,\n",
diff --git a/statsforecast/adapters/prophet.py b/statsforecast/adapters/prophet.py
index afda82587..d12c9426e 100644
--- a/statsforecast/adapters/prophet.py
+++ b/statsforecast/adapters/prophet.py
@@ -3,9 +3,8 @@
# %% auto 0
__all__ = ['AutoARIMAProphet']
-# %% ../../nbs/src/adapters.prophet.ipynb 3
+# %% ../../nbs/src/adapters.prophet.ipynb 4
import sys
-from copy import deepcopy
import pandas as pd
from ..arima import AutoARIMA
@@ -33,7 +32,7 @@
)
raise ModuleNotFoundError(msg) from e
-# %% ../../nbs/src/adapters.prophet.ipynb 6
+# %% ../../nbs/src/adapters.prophet.ipynb 7
class AutoARIMAProphet(Prophet):
"""AutoARIMAProphet adapter.
@@ -44,27 +43,36 @@ class AutoARIMAProphet(Prophet):
If your forecasting pipeline uses Prophet the `AutoARIMAProphet` adapter helps to
easily substitute Prophet with an AutoARIMA.
- **Parameters:**
- `growth`: String 'linear', 'logistic' or 'flat' to specify a linear, logistic or flat trend.
- `changepoints`: List of dates of potential changepoints. Otherwise selected automatically.
- `n_changepoints`: Number of potential changepoints to include.
- `changepoint_range`: Proportion of history in which trend changepoints will be estimated.
- `yearly_seasonality`: Fit yearly seasonality.
- Can be 'auto', True, False, or a number of Fourier terms to generate.
- `weekly_seasonality`: Fit weekly seasonality.
- Can be 'auto', True, False, or a number of Fourier terms to generate.
- `daily_seasonality`: Fit daily seasonality.
- Can be 'auto', True, False, or a number of Fourier terms to generate.
- `holidays`: pandas.DataFrame with columns holiday (string) and ds (date type).
- `interval_width`: float, uncertainty forecast intervals width. `StatsForecast`'s level
+ Parameters
+ ----------
+ growth : string
+ 'linear', 'logistic' or 'flat' to specify a linear, logistic or flat trend.
+ changepoints : List of dates
+ Potential changepoints. Otherwise selected automatically.
+ n_changepoints : int
+ Number of potential changepoints to include.
+ changepoint_range : float
+ Proportion of history in which trend changepoints will be estimated.
+ yearly_seasonality : str, bool or int
+ Fit yearly seasonality. Can be 'auto', True, False, or a number of Fourier terms to generate.
+ weekly_seasonality : str, bool or int
+ Fit weekly seasonality. Can be 'auto', True, False, or a number of Fourier terms to generate.
+ daily_seasonality : str, bool or int
+ Fit daily seasonality. Can be 'auto', True, False, or a number of Fourier terms to generate.
+ holidays : pandas.DataFrame
+ DataFrame with columns holiday (string) and ds (date type).
+ interval_width : float
+ Uncertainty forecast intervals width. `StatsForecast`'s level
- **Notes:**
+ Notes
+ -----
You can create automated exogenous variables from the Prophet data processing pipeline
these exogenous will be included into `AutoARIMA`'s exogenous features. Parameters like
`seasonality_mode`, `seasonality_prior_scale`, `holidays_prior_scale`, `changepoint_prior_scale`,
`mcmc_samples`, `uncertainty_samples`, `stan_backend` are Prophet exclusive.
- **References:**
+ References
+ ----------
[Sean J. Taylor, Benjamin Letham (2017). "Prophet Forecasting at Scale"](https://peerj.com/preprints/3190.pdf)
[Oskar Triebe, Hansika Hewamalage, Polina Pilyugina, Nikolay Laptev, Christoph Bergmeir, Ram Rajagopal (2021). "NeuralProphet: Explainable Forecasting at Scale".](https://arxiv.org/pdf/2111.15397.pdf)
@@ -175,16 +183,20 @@ def __init__(
period=period,
)
- def fit(self, df, disable_seasonal_features=True, **kwargs):
+ def fit(self, df, disable_seasonal_features=True):
"""Fit the AutoARIMAProphet adapter.
- **Parameters:**
- `df`: pandas.DataFrame, with columns ds (date type) and y, the time series.
- `disable_seasonal_features`: bool, Wheter disable Prophet's seasonal features.
- `kwargs`: Additional arguments.
+ Parameters
+ ----------
+ df : pandas.DataFrame
+ DataFrame with columns ds (date type) and y, the time series.
+ disable_seasonal_features : bool (default=True)
+ Disable Prophet's seasonal features.
- **Returns:**
- `self`: `AutoARIMAProphet` adapter object with `AutoARIMA` fitted model.
+ Returns
+ -------
+ AutoARIMAProphet
+ Adapter object with `AutoARIMA` fitted model.
"""
if self.history is not None:
raise Exception(
@@ -213,7 +225,6 @@ def fit(self, df, disable_seasonal_features=True, **kwargs):
) = self.make_all_seasonality_features(history)
self.train_component_cols = component_cols
self.component_modes = modes
- self.fit_kwargs = deepcopy(kwargs)
if disable_seasonal_features:
seas = tuple(self.seasonalities.keys())
seasonal_features = seasonal_features.loc[
@@ -230,11 +241,15 @@ def fit(self, df, disable_seasonal_features=True, **kwargs):
def predict(self, df=None):
"""Predict using the AutoARIMAProphet adapter.
- **Parameters:**
- `df`: pandas.DataFrame, with columns ds (date type) and y, the time series.
+ Parameters
+ ----------
+ df : pandas.DataFrame
+ DataFrame with columns ds (date type) and y, the time series.
- **Returns:**
- `fcsts_df`: A pandas.DataFrame with the forecast components.
+ Returns
+ -------
+ pandas.DataFrame
+ DataFrame with the forecast components.
"""
if self.history is None:
raise Exception("Model has not been fit.")
diff --git a/statsforecast/distributed/fugue.py b/statsforecast/distributed/fugue.py
index 98ea4600c..11d909825 100644
--- a/statsforecast/distributed/fugue.py
+++ b/statsforecast/distributed/fugue.py
@@ -53,12 +53,17 @@ class FugueBackend(ParallelBackend):
This class uses [Fugue](https://github.com/fugue-project/fugue) backend capable of distributing
computation on Spark, Dask and Ray without any rewrites.
- **Parameters:**
- `engine`: fugue.ExecutionEngine, a selection between Spark, Dask, and Ray.
- `conf`: fugue.Config, engine configuration.
- `**transform_kwargs`: additional kwargs for Fugue's transform method.
+ Parameters
+ ----------
+ engine : fugue.ExecutionEngine
+ A selection between Spark, Dask, and Ray.
+ conf : fugue.Config
+ Engine configuration.
+ **transform_kwargs
+ Additional kwargs for Fugue's transform method.
- **Notes:**
+ Notes
+ -----
A short introduction to Fugue, with examples on how to scale pandas code to Spark, Dask or Ray
is available [here](https://fugue-tutorials.readthedocs.io/tutorials/quick_look/ten_minutes.html).
"""
@@ -85,24 +90,33 @@ def forecast(
This method uses Fugue's transform function, in combination with
`core.StatsForecast`'s forecast to efficiently fit a list of StatsForecast models.
- **Parameters:**
- `df`: pandas.DataFrame, with columns [`unique_id`, `ds`, `y`] and exogenous.
- `freq`: str, frequency of the data, [pandas available frequencies](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases).
- `models`: List[typing.Any], list of instantiated objects `StatsForecast.models`.
- `fallback_model`: Any, Model to be used if a model fails.
- `X_df`: pandas.DataFrame, with [unique_id, ds] columns and df’s future exogenous.
- `**kwargs`: Additional `core.StatsForecast` parameters. Example forecast horizon `h`.
+ Parameters
+ ----------
+ df : pandas.DataFrame
+ DataFrame with columns [`unique_id`, `ds`, `y`] and exogenous.
+ freq : str
+ Frequency of the data, [pandas available frequencies](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases).
+ models : List[typing.Any]
+ List of instantiated objects `StatsForecast.models`.
+ fallback_model : Any
+ Model to be used if a model fails.
+ X_df : pandas.DataFrame
+ DataFrame with [unique_id, ds] columns and df’s future exogenous.
+ **kwargs
+ Additional `core.StatsForecast` parameters. Example forecast horizon `h`.
- **Returns:**
- `fcsts_df`: pandas.DataFrame, with `models` columns for point predictions and probabilistic
- predictions for all fitted `models`.
+ Returns
+ -------
+ fcsts_df : pandas.DataFrame
+ DataFrame with `models` columns for point predictions and probabilistic predictions for all fitted `models`
- **References:**
+ References
+ ----------
For more information check the
[Fugue's transform](https://fugue-tutorials.readthedocs.io/tutorials/beginner/transform.html)
- tutorial.
+ tutorial.
The [core.StatsForecast's forecast](https://nixtla.github.io/statsforecast/core.html#statsforecast.forecast)
- method documentation.
+ method documentation.
Or the list of available [StatsForecast's models](https://nixtla.github.io/statsforecast/src/core/models.html).
"""
level = kwargs.get("level", [])
@@ -156,19 +170,26 @@ def cross_validation(
provides better model's generalization measurements by increasing the test's length
and diversity.
- **Parameters:**
- `df`: pandas.DataFrame, with columns [`unique_id`, `ds`, `y`] and exogenous.
- `freq`: str, frequency of the data, [panda's available frequencies](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases).
- `models`: List[typing.Any], list of instantiated objects `StatsForecast.models`.
- `fallback_model`: Any, Model to be used if a model fails.
+ Parameters
+ ----------
+ df : pandas.DataFrame
+ DataFrame with columns [`unique_id`, `ds`, `y`] and exogenous.
+ freq : str
+ Frequency of the data, [pandas available frequencies](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases).
+ models : List[typing.Any]
+ List of instantiated objects `StatsForecast.models`.
+ fallback_model : Any
+ Model to be used if a model fails.
- **Returns:**
- `fcsts_df`: pandas.DataFrame, with `models` columns for point predictions and probabilistic
- predictions for all fitted `models`.
+ Returns
+ -------
+ pandas.DataFrame
+ DataFrame, with `models` columns for point predictions and probabilistic predictions for all fitted `models`.
- **References:**
+ References
+ ----------
The [core.StatsForecast's cross validation](https://nixtla.github.io/statsforecast/core.html#statsforecast.cross_validation)
- method documentation.
+ method documentation.
[Rob J. Hyndman and George Athanasopoulos (2018). "Forecasting principles and practice, Temporal Cross-Validation"](https://otexts.com/fpp3/tscv.html).
"""
level = kwargs.get("level", [])
diff --git a/statsforecast/distributed/multiprocess.py b/statsforecast/distributed/multiprocess.py
index 048d16884..be62f3d1c 100644
--- a/statsforecast/distributed/multiprocess.py
+++ b/statsforecast/distributed/multiprocess.py
@@ -19,11 +19,10 @@
class MultiprocessBackend(ParallelBackend):
"""MultiprocessBackend Parent Class for Distributed Computation.
- **Parameters:**
- `n_jobs`: int, number of jobs used in the parallel processing, use -1 for all cores.
-
- **Notes:**
-
+ Parameters
+ ----------
+ n_jobs : int
+ Number of jobs used in the parallel processing, use -1 for all cores.
"""
def __init__(self, n_jobs: int) -> None:
diff --git a/statsforecast/models.py b/statsforecast/models.py
index 8d7a81037..676f1ed72 100644
--- a/statsforecast/models.py
+++ b/statsforecast/models.py
@@ -42,7 +42,7 @@
NOGIL,
)
-# %% ../nbs/src/core/models.ipynb 8
+# %% ../nbs/src/core/models.ipynb 9
def _add_fitted_pi(res, se, level):
level = sorted(level)
level = np.asarray(level)
@@ -55,7 +55,7 @@ def _add_fitted_pi(res, se, level):
res = {**res, **lo, **hi}
return res
-# %% ../nbs/src/core/models.ipynb 9
+# %% ../nbs/src/core/models.ipynb 10
def _add_conformal_distribution_intervals(
fcst: Dict,
cs: np.ndarray,
@@ -84,7 +84,7 @@ def _add_conformal_distribution_intervals(
fcst[col] = quantiles[i]
return fcst
-# %% ../nbs/src/core/models.ipynb 10
+# %% ../nbs/src/core/models.ipynb 11
def _get_conformal_method(method: str):
available_methods = {
"conformal_distribution": _add_conformal_distribution_intervals,
@@ -97,7 +97,7 @@ def _get_conformal_method(method: str):
)
return available_methods[method]
-# %% ../nbs/src/core/models.ipynb 11
+# %% ../nbs/src/core/models.ipynb 12
class _TS:
def new(self):
b = type(self).__new__(type(self))
@@ -145,17 +145,19 @@ def _add_conformal_intervals(self, fcst, y, X, level):
def _add_predict_conformal_intervals(self, fcst, level):
return self._add_conformal_intervals(fcst=fcst, y=None, X=None, level=level)
-# %% ../nbs/src/core/models.ipynb 16
+# %% ../nbs/src/core/models.ipynb 17
class AutoARIMA(_TS):
"""AutoARIMA model.
Automatically selects the best ARIMA (AutoRegressive Integrated Moving Average)
model using an information criterion. Default is Akaike Information Criterion (AICc).
- **Note:**
+ Notes
+ -----
This implementation is a mirror of Hyndman's [forecast::auto.arima](https://github.com/robjhyndman/forecast).
- **References:**
+ References
+ ----------
[Rob J. Hyndman, Yeasmin Khandakar (2008). "Automatic Time Series Forecasting: The forecast package for R"](https://www.jstatsoft.org/article/view/v027i03).
Parameters
@@ -569,7 +571,7 @@ def forward(
res = _add_fitted_pi(res=res, se=se, level=level)
return res
-# %% ../nbs/src/core/models.ipynb 31
+# %% ../nbs/src/core/models.ipynb 33
class AutoETS(_TS):
"""Automatic Exponential Smoothing model.
@@ -585,10 +587,12 @@ class AutoETS(_TS):
If the component is selected as 'Z', it operates as a placeholder to ask the AutoETS model
to figure out the best parameter.
- **Note:**
+ Notes
+ -----
This implementation is a mirror of Hyndman's [forecast::ets](https://github.com/robjhyndman/forecast).
- **References:**
+ References
+ ----------
[Rob J. Hyndman, Yeasmin Khandakar (2008). "Automatic Time Series Forecasting: The forecast package for R"](https://www.jstatsoft.org/article/view/v027i03).
[Hyndman, Rob, et al (2008). "Forecasting with exponential smoothing: the state space approach"](https://robjhyndman.com/expsmooth/).
@@ -821,7 +825,7 @@ def forward(
res = _add_fitted_pi(res=res, se=se, level=level)
return res
-# %% ../nbs/src/core/models.ipynb 45
+# %% ../nbs/src/core/models.ipynb 48
class ETS(AutoETS):
@classmethod
def _warn(cls):
@@ -849,7 +853,7 @@ def __init__(
def __repr__(self):
return self.alias
-# %% ../nbs/src/core/models.ipynb 50
+# %% ../nbs/src/core/models.ipynb 53
class AutoCES(_TS):
"""Complex Exponential Smoothing model.
@@ -865,7 +869,8 @@ class AutoCES(_TS):
If the component is selected as 'Z', it operates as a placeholder to ask the AutoCES model
to figure out the best parameter.
- **References:**
+ References
+ ----------
[Svetunkov, Ivan & Kourentzes, Nikolaos. (2015). "Complex Exponential Smoothing". 10.13140/RG.2.1.3757.2562. ](https://onlinelibrary.wiley.com/doi/full/10.1002/nav.22074).
Parameters
@@ -1090,7 +1095,7 @@ def forward(
res = _add_fitted_pi(res=res, se=se, level=level)
return res
-# %% ../nbs/src/core/models.ipynb 67
+# %% ../nbs/src/core/models.ipynb 71
class AutoTheta(_TS):
"""AutoTheta model.
@@ -1098,7 +1103,8 @@ class AutoTheta(_TS):
Optimized Theta Model ('OTM'), Dynamic Standard Theta Model ('DSTM'),
Dynamic Optimized Theta Model ('DOTM')) model using mse.
- **References:**
+ References
+ ----------
[Jose A. Fiorucci, Tiago R. Pellegrini, Francisco Louzada, Fotios Petropoulos, Anne B. Koehler (2016). "Models for optimising the theta method and their relationship to state space models". International Journal of Forecasting](https://www.sciencedirect.com/science/article/pii/S0169207016300243)
Parameters
@@ -1309,13 +1315,14 @@ def forward(
res = _add_fitted_pi(res=res, se=se, level=level)
return res
-# %% ../nbs/src/core/models.ipynb 82
+# %% ../nbs/src/core/models.ipynb 87
class ARIMA(_TS):
"""ARIMA model.
AutoRegressive Integrated Moving Average model.
- **References:**
+ References
+ ----------
[Rob J. Hyndman, Yeasmin Khandakar (2008). "Automatic Time Series Forecasting: The forecast package for R"](https://www.jstatsoft.org/article/view/v027i03).
Parameters
@@ -1609,7 +1616,7 @@ def forward(
res = _add_fitted_pi(res=res, se=se, level=level)
return res
-# %% ../nbs/src/core/models.ipynb 96
+# %% ../nbs/src/core/models.ipynb 102
class AutoRegressive(ARIMA):
"""Simple Autoregressive model.
@@ -1684,7 +1691,7 @@ def __init__(
def __repr__(self):
return self.alias
-# %% ../nbs/src/core/models.ipynb 110
+# %% ../nbs/src/core/models.ipynb 117
@njit(nogil=NOGIL, cache=CACHE)
def _ses_fcst_mse(x: np.ndarray, alpha: float) -> Tuple[float, float, np.ndarray]:
"""Perform simple exponential smoothing on a series.
@@ -1770,7 +1777,7 @@ def _chunk_sums(array: np.ndarray, chunk_size: int) -> np.ndarray:
sums[i] = array[start : start + chunk_size].sum()
return sums
-# %% ../nbs/src/core/models.ipynb 111
+# %% ../nbs/src/core/models.ipynb 118
@njit(nogil=NOGIL, cache=CACHE)
def _ses(
y: np.ndarray, # time series
@@ -1785,7 +1792,7 @@ def _ses(
fcst["fitted"] = fitted_vals
return fcst
-# %% ../nbs/src/core/models.ipynb 112
+# %% ../nbs/src/core/models.ipynb 119
class SimpleExponentialSmoothing(_TS):
"""SimpleExponentialSmoothing model.
@@ -1795,7 +1802,8 @@ class SimpleExponentialSmoothing(_TS):
The rate $0 \leq \\alpha \leq 1$ at which the weights decrease is called the smoothing parameter. When $\\alpha = 1$, SES is equal to the naive method.
- **References:**
+ References
+ ----------
[Charles C Holt (1957). “Forecasting seasonals and trends by exponentially weighted moving averages”](https://doi.org/10.1016/j.ijforecast).
Parameters
@@ -1947,7 +1955,7 @@ def forecast(
raise Exception("You must pass `prediction_intervals` to " "compute them.")
return res
-# %% ../nbs/src/core/models.ipynb 123
+# %% ../nbs/src/core/models.ipynb 131
def _ses_optimized(
y: np.ndarray, # time series
h: int, # forecasting horizon
@@ -1960,7 +1968,7 @@ def _ses_optimized(
fcst["fitted"] = fitted_vals
return fcst
-# %% ../nbs/src/core/models.ipynb 124
+# %% ../nbs/src/core/models.ipynb 132
class SimpleExponentialSmoothingOptimized(_TS):
"""SimpleExponentialSmoothing model.
@@ -1970,7 +1978,8 @@ class SimpleExponentialSmoothingOptimized(_TS):
The smoothing parameter $\\alpha^*$ is optimized by square error minimization.
- **References:**
+ References
+ ----------
[Charles C Holt (1957). “Forecasting seasonals and trends by exponentially weighted moving averages”](https://doi.org/10.1016/j.ijforecast).
Parameters
@@ -2117,7 +2126,7 @@ def forecast(
raise Exception("You must pass `prediction_intervals` to compute them.")
return res
-# %% ../nbs/src/core/models.ipynb 135
+# %% ../nbs/src/core/models.ipynb 144
@njit(nogil=NOGIL, cache=CACHE)
def _seasonal_exponential_smoothing(
y: np.ndarray, # time series
@@ -2142,7 +2151,7 @@ def _seasonal_exponential_smoothing(
fcst["fitted"] = fitted_vals
return fcst
-# %% ../nbs/src/core/models.ipynb 136
+# %% ../nbs/src/core/models.ipynb 145
class SeasonalExponentialSmoothing(_TS):
"""SeasonalExponentialSmoothing model.
@@ -2151,11 +2160,13 @@ class SeasonalExponentialSmoothing(_TS):
Assuming there are $t$ observations and season $s$, the one-step forecast is given by:
$\hat{y}_{t+1,s} = \\alpha y_t + (1-\\alpha) \hat{y}_{t-1,s}$
- **Note:**
+ Notes
+ -----
This method is an extremely simplified of Holt-Winter's method where the trend and level are set to zero.
And a single seasonal smoothing parameter $\\alpha$ is shared across seasons.
- **References:**
+ References
+ ----------
[Charles. C. Holt (1957). "Forecasting seasonals and trends by exponentially weighted moving averages", ONR Research Memorandum, Carnegie Institute of Technology 52.](https://www.sciencedirect.com/science/article/abs/pii/S0169207003001134).
[Peter R. Winters (1960). "Forecasting sales by exponentially weighted moving averages". Management Science](https://pubsonline.informs.org/doi/abs/10.1287/mnsc.6.3.324).
@@ -2321,7 +2332,7 @@ def forecast(
raise Exception("You must pass `prediction_intervals` to compute them.")
return res
-# %% ../nbs/src/core/models.ipynb 150
+# %% ../nbs/src/core/models.ipynb 160
def _seasonal_ses_optimized(
y: np.ndarray, # time series
h: int, # forecasting horizon
@@ -2344,7 +2355,7 @@ def _seasonal_ses_optimized(
fcst["fitted"] = fitted_vals
return fcst
-# %% ../nbs/src/core/models.ipynb 151
+# %% ../nbs/src/core/models.ipynb 161
class SeasonalExponentialSmoothingOptimized(_TS):
def __init__(
self,
@@ -2361,11 +2372,13 @@ def __init__(
The smoothing parameter $\\alpha^*$ is optimized by square error minimization.
- **Note:**
+ Notes
+ -----
This method is an extremely simplified of Holt-Winter's method where the trend and level are set to zero.
And a single seasonal smoothing parameter $\\alpha$ is shared across seasons.
- **References:**
+ References
+ ----------
[Charles. C. Holt (1957). "Forecasting seasonals and trends by exponentially weighted moving averages", ONR Research Memorandum, Carnegie Institute of Technology 52.](https://www.sciencedirect.com/science/article/abs/pii/S0169207003001134).
[Peter R. Winters (1960). "Forecasting sales by exponentially weighted moving averages". Management Science](https://pubsonline.informs.org/doi/abs/10.1287/mnsc.6.3.324).
@@ -2519,14 +2532,15 @@ def forecast(
raise Exception("You must pass `prediction_intervals` to compute them.")
return res
-# %% ../nbs/src/core/models.ipynb 163
+# %% ../nbs/src/core/models.ipynb 174
class Holt(AutoETS):
"""Holt's method.
Also known as double exponential smoothing, Holt's method is an extension of exponential smoothing for series with a trend.
This implementation returns the corresponding `ETS` model with additive (A) or multiplicative (M) errors (so either 'AAN' or 'MAN').
- **References:**
+ References
+ ----------
[Rob J. Hyndman and George Athanasopoulos (2018). "Forecasting principles and practice, Methods with trend"](https://otexts.com/fpp3/holt.html).
Parameters
@@ -2550,7 +2564,6 @@ def __init__(
alias: str = "Holt",
prediction_intervals: Optional[ConformalIntervals] = None,
):
-
self.season_length = season_length
self.error_type = error_type
self.alias = alias
@@ -2563,14 +2576,15 @@ def __init__(
def __repr__(self):
return self.alias
-# %% ../nbs/src/core/models.ipynb 176
+# %% ../nbs/src/core/models.ipynb 188
class HoltWinters(AutoETS):
"""Holt-Winters' method.
Also known as triple exponential smoothing, Holt-Winters' method is an extension of exponential smoothing for series that contain both trend and seasonality.
This implementation returns the corresponding `ETS` model with additive (A) or multiplicative (M) errors (so either 'AAA' or 'MAM').
- **References:**
+ References
+ ----------
[Rob J. Hyndman and George Athanasopoulos (2018). "Forecasting principles and practice, Methods with seasonality"](https://otexts.com/fpp3/holt-winters.html).
Parameters
@@ -2605,7 +2619,7 @@ def __init__(
def __repr__(self):
return self.alias
-# %% ../nbs/src/core/models.ipynb 190
+# %% ../nbs/src/core/models.ipynb 203
@njit(nogil=NOGIL, cache=CACHE)
def _historic_average(
y: np.ndarray, # time series
@@ -2621,7 +2635,7 @@ def _historic_average(
fcst["fitted"] = fitted_vals
return fcst
-# %% ../nbs/src/core/models.ipynb 191
+# %% ../nbs/src/core/models.ipynb 204
class HistoricAverage(_TS):
def __init__(
self,
@@ -2634,7 +2648,8 @@ def __init__(
Assuming there are $t$ observations, the one-step forecast is given by:
$$ \hat{y}_{t+1} = \\frac{1}{t} \sum_{j=1}^t y_j $$
- **References:**
+ References
+ ----------
[Rob J. Hyndman and George Athanasopoulos (2018). "Forecasting principles and practice, Simple Methods"](https://otexts.com/fpp3/simple-methods.html).
Parameters
@@ -2749,7 +2764,6 @@ def forecast(
level: Optional[List[int]] = None,
fitted: bool = False,
):
-
"""Memory Efficient HistoricAverage predictions.
This method avoids memory burden due from object storage.
@@ -2799,7 +2813,7 @@ def forecast(
return res
-# %% ../nbs/src/core/models.ipynb 203
+# %% ../nbs/src/core/models.ipynb 217
class Naive(_TS):
def __init__(
self,
@@ -2811,7 +2825,8 @@ def __init__(
All forecasts have the value of the last observation:
$\hat{y}_{t+1} = y_t$ for all $t$
- **References:**
+ References
+ ----------
[Rob J. Hyndman and George Athanasopoulos (2018). "forecasting principles and practice, Simple Methods"](https://otexts.com/fpp3/simple-methods.html).
Parameters
@@ -3007,7 +3022,7 @@ def forward(
)
return res
-# %% ../nbs/src/core/models.ipynb 218
+# %% ../nbs/src/core/models.ipynb 233
@njit(nogil=NOGIL, cache=CACHE)
def _random_walk_with_drift(
y: np.ndarray, # time series
@@ -3027,7 +3042,7 @@ def _random_walk_with_drift(
fcst["fitted"] = fitted_vals
return fcst
-# %% ../nbs/src/core/models.ipynb 219
+# %% ../nbs/src/core/models.ipynb 234
class RandomWalkWithDrift(_TS):
def __init__(
self,
@@ -3044,7 +3059,8 @@ def __init__(
From the previous equation, we can see that this is equivalent to extrapolating a line between
the first and the last observation.
- **References:**
+ References
+ ----------
[Rob J. Hyndman and George Athanasopoulos (2018). "forecasting principles and practice, Simple Methods"](https://otexts.com/fpp3/simple-methods.html).
Parameters
@@ -3204,7 +3220,7 @@ def forecast(
return res
-# %% ../nbs/src/core/models.ipynb 233
+# %% ../nbs/src/core/models.ipynb 249
class SeasonalNaive(_TS):
def __init__(
self,
@@ -3216,7 +3232,8 @@ def __init__(
A method similar to the naive, but uses the last known observation of the same period (e.g. the same month of the previous year) in order to capture seasonal variations.
- **References:**
+ References
+ ----------
[Rob J. Hyndman and George Athanasopoulos (2018). "forecasting principles and practice, Simple Methods"](https://otexts.com/fpp3/simple-methods.html#seasonal-na%C3%AFve-method).
Parameters
@@ -3393,7 +3410,7 @@ def forecast(
return res
-# %% ../nbs/src/core/models.ipynb 247
+# %% ../nbs/src/core/models.ipynb 264
@njit(nogil=NOGIL, cache=CACHE)
def _window_average(
y: np.ndarray, # time series
@@ -3409,7 +3426,7 @@ def _window_average(
mean = _repeat_val(val=wavg, h=h)
return {"mean": mean}
-# %% ../nbs/src/core/models.ipynb 248
+# %% ../nbs/src/core/models.ipynb 265
class WindowAverage(_TS):
def __init__(
self,
@@ -3424,7 +3441,8 @@ def __init__(
The length of the window selected should take into account the importance of past
observations and how fast the series changes.
- **References:**
+ References
+ ----------
[Rob J. Hyndman and George Athanasopoulos (2018). "forecasting principles and practice, Simple Methods"](https://otexts.com/fpp3/simple-methods.html).
Parameters
@@ -3567,7 +3585,7 @@ def forecast(
raise Exception("You must pass `prediction_intervals` to " "compute them.")
return res
-# %% ../nbs/src/core/models.ipynb 259
+# %% ../nbs/src/core/models.ipynb 277
@njit(nogil=NOGIL, cache=CACHE)
def _seasonal_window_average(
y: np.ndarray,
@@ -3588,7 +3606,7 @@ def _seasonal_window_average(
out = _repeat_val_seas(season_vals=season_avgs, h=h, season_length=season_length)
return {"mean": out}
-# %% ../nbs/src/core/models.ipynb 260
+# %% ../nbs/src/core/models.ipynb 278
class SeasonalWindowAverage(_TS):
def __init__(
self,
@@ -3601,7 +3619,8 @@ def __init__(
An average of the last $k$ observations of the same period, with $k$ the length of the window.
- **References:**
+ References
+ ----------
[Rob J. Hyndman and George Athanasopoulos (2018). "forecasting principles and practice, Simple Methods"](https://otexts.com/fpp3/simple-methods.html).
Parameters
@@ -3762,7 +3781,7 @@ def forecast(
raise Exception("You must pass `prediction_intervals` to compute them.")
return res
-# %% ../nbs/src/core/models.ipynb 272
+# %% ../nbs/src/core/models.ipynb 291
def _adida(
y: np.ndarray, # time series
h: int, # forecasting horizon
@@ -3783,7 +3802,7 @@ def _adida(
mean = _repeat_val(val=forecast, h=h)
return {"mean": mean}
-# %% ../nbs/src/core/models.ipynb 273
+# %% ../nbs/src/core/models.ipynb 292
class ADIDA(_TS):
def __init__(
self,
@@ -3801,7 +3820,8 @@ def __init__(
They are notoriously hard to forecast, and so, different methods have been developed
especifically for them.
- **References:**
+ References
+ ----------
[Nikolopoulos, K., Syntetos, A. A., Boylan, J. E., Petropoulos, F., & Assimakopoulos, V. (2011). An aggregate–disaggregate intermittent demand approach (ADIDA) to forecasting: an empirical proposition and analysis. Journal of the Operational Research Society, 62(3), 544-554.](https://researchportal.bath.ac.uk/en/publications/an-aggregate-disaggregate-intermittent-demand-approach-adida-to-f).
Parameters
@@ -3945,7 +3965,7 @@ def forecast(
)
return res
-# %% ../nbs/src/core/models.ipynb 285
+# %% ../nbs/src/core/models.ipynb 305
@njit(nogil=NOGIL, cache=CACHE)
def _croston_classic(
y: np.ndarray, # time series
@@ -3967,7 +3987,7 @@ def _croston_classic(
mean = _repeat_val(val=mean, h=h)
return {"mean": mean}
-# %% ../nbs/src/core/models.ipynb 286
+# %% ../nbs/src/core/models.ipynb 306
class CrostonClassic(_TS):
def __init__(
self,
@@ -3984,7 +4004,8 @@ def __init__(
where $\hat{z}_t$ and $\hat{p}_t$ are forecasted using SES. The smoothing parameter
of both components is set equal to 0.1
- **References:**
+ References
+ ----------
[Croston, J. D. (1972). Forecasting and stock control for intermittent demands. Journal of the Operational Research Society, 23(3), 289-303.](https://link.springer.com/article/10.1057/jors.1972.50)
Parameters
@@ -4124,7 +4145,7 @@ def forecast(
)
return res
-# %% ../nbs/src/core/models.ipynb 297
+# %% ../nbs/src/core/models.ipynb 318
def _croston_optimized(
y: np.ndarray, # time series
h: int, # forecasting horizon
@@ -4145,7 +4166,7 @@ def _croston_optimized(
mean = _repeat_val(val=mean, h=h)
return {"mean": mean}
-# %% ../nbs/src/core/models.ipynb 298
+# %% ../nbs/src/core/models.ipynb 319
class CrostonOptimized(_TS):
def __init__(
self,
@@ -4163,7 +4184,8 @@ def __init__(
selected from the range $[0.1,0.3]$. Both the non-zero demand $z_t$ and the inter-demand
intervals $p_t$ are smoothed separately, so their smoothing parameters can be different.
- **References:**
+ References
+ ----------
[Croston, J. D. (1972). Forecasting and stock control for intermittent demands. Journal of the Operational Research Society, 23(3), 289-303.](https://link.springer.com/article/10.1057/jors.1972.50).
Parameters
@@ -4298,7 +4320,7 @@ def forecast(
raise Exception("You must pass `prediction_intervals` to compute them.")
return res
-# %% ../nbs/src/core/models.ipynb 309
+# %% ../nbs/src/core/models.ipynb 331
@njit(nogil=NOGIL, cache=CACHE)
def _croston_sba(
y: np.ndarray, # time series
@@ -4311,7 +4333,7 @@ def _croston_sba(
mean["mean"] *= 0.95
return mean
-# %% ../nbs/src/core/models.ipynb 310
+# %% ../nbs/src/core/models.ipynb 332
class CrostonSBA(_TS):
def __init__(
self,
@@ -4329,7 +4351,8 @@ def __init__(
forecast is given by:
$$ \hat{y}_t = 0.95 \\frac{\hat{z}_t}{\hat{p}_t} $$
- **References:**
+ References
+ ----------
[Croston, J. D. (1972). Forecasting and stock control for intermittent demands. Journal of the Operational Research Society, 23(3), 289-303.](https://link.springer.com/article/10.1057/jors.1972.50).
Parameters
@@ -4470,7 +4493,7 @@ def forecast(
)
return res
-# %% ../nbs/src/core/models.ipynb 321
+# %% ../nbs/src/core/models.ipynb 344
def _imapa(
y: np.ndarray, # time series
h: int, # forecasting horizon
@@ -4494,7 +4517,7 @@ def _imapa(
mean = _repeat_val(val=forecast, h=h)
return {"mean": mean}
-# %% ../nbs/src/core/models.ipynb 322
+# %% ../nbs/src/core/models.ipynb 345
class IMAPA(_TS):
def __init__(
self,
@@ -4508,7 +4531,8 @@ def __init__(
dynamics of the data. Uses the optimized SES to generate the forecasts at the new levels
and then combines them using a simple average.
- **References:**
+ References
+ ----------
[Syntetos, A. A., & Boylan, J. E. (2021). Intermittent demand forecasting: Context, methods and applications. John Wiley & Sons.](https://www.ifors.org/intermittent-demand-forecasting-context-methods-and-applications/).
Parameters
@@ -4650,7 +4674,7 @@ def forecast(
)
return res
-# %% ../nbs/src/core/models.ipynb 333
+# %% ../nbs/src/core/models.ipynb 357
@njit(nogil=NOGIL, cache=CACHE)
def _tsb(
y: np.ndarray, # time series
@@ -4671,7 +4695,7 @@ def _tsb(
mean = _repeat_val(val=forecast, h=h)
return {"mean": mean}
-# %% ../nbs/src/core/models.ipynb 334
+# %% ../nbs/src/core/models.ipynb 358
class TSB(_TS):
def __init__(
self,
@@ -4699,7 +4723,8 @@ def __init__(
Both $d_t$ and $z_t$ are forecasted using SES. The smooting paramaters of each may differ,
like in the optimized Croston's method.
- **References:**
+ References
+ ----------
[Teunter, R. H., Syntetos, A. A., & Babai, M. Z. (2011). Intermittent demand: Linking forecasting to inventory obsolescence. European Journal of Operational Research, 214(3), 606-615.](https://www.sciencedirect.com/science/article/abs/pii/S0377221711004437)
Parameters
@@ -4838,7 +4863,7 @@ def forecast(
raise Exception("You must pass `prediction_intervals` to compute them.")
return res
-# %% ../nbs/src/core/models.ipynb 346
+# %% ../nbs/src/core/models.ipynb 371
def _predict_mstl_seas(mstl_ob, h, season_length):
seasoncolumns = mstl_ob.filter(regex="seasonal*").columns
nseasons = len(seasoncolumns)
@@ -4855,7 +4880,7 @@ def _predict_mstl_seas(mstl_ob, h, season_length):
lastseas = seascomp.sum(axis=1)
return lastseas
-# %% ../nbs/src/core/models.ipynb 347
+# %% ../nbs/src/core/models.ipynb 372
class MSTL(_TS):
"""MSTL model.
@@ -4863,7 +4888,8 @@ class MSTL(_TS):
in multiple seasonalities using LOESS. Then forecasts the trend using
a custom non-seaonal model and each seasonality using a SeasonalNaive model.
- **References:**
+ References
+ ----------
[Bandara, Kasun & Hyndman, Rob & Bergmeir, Christoph. (2021). "MSTL: A Seasonal-Trend Decomposition Algorithm for Time Series with Multiple Seasonal Patterns".](https://arxiv.org/abs/2107.13462).
Parameters
@@ -4891,7 +4917,6 @@ def __init__(
alias: str = "MSTL",
prediction_intervals: Optional[ConformalIntervals] = None,
):
-
# check ETS model doesnt have seasonality
if repr(trend_forecaster) == "AutoETS":
if trend_forecaster.model[2] != "N":
@@ -5130,11 +5155,12 @@ def forward(
}
return res
-# %% ../nbs/src/core/models.ipynb 363
+# %% ../nbs/src/core/models.ipynb 389
class Theta(AutoTheta):
"""Standard Theta Method.
- **References:**
+ References
+ ----------
[Jose A. Fiorucci, Tiago R. Pellegrini, Francisco Louzada, Fotios Petropoulos, Anne B. Koehler (2016). "Models for optimising the theta method and their relationship to state space models". International Journal of Forecasting](https://www.sciencedirect.com/science/article/pii/S0169207016300243)
Parameters
@@ -5166,11 +5192,12 @@ def __init__(
prediction_intervals=prediction_intervals,
)
-# %% ../nbs/src/core/models.ipynb 376
+# %% ../nbs/src/core/models.ipynb 403
class OptimizedTheta(AutoTheta):
"""Optimized Theta Method.
- **References:**
+ References
+ ----------
[Jose A. Fiorucci, Tiago R. Pellegrini, Francisco Louzada, Fotios Petropoulos, Anne B. Koehler (2016). "Models for optimising the theta method and their relationship to state space models". International Journal of Forecasting](https://www.sciencedirect.com/science/article/pii/S0169207016300243)
Parameters
@@ -5202,11 +5229,12 @@ def __init__(
prediction_intervals=prediction_intervals,
)
-# %% ../nbs/src/core/models.ipynb 389
+# %% ../nbs/src/core/models.ipynb 417
class DynamicTheta(AutoTheta):
"""Dynamic Standard Theta Method.
- **References:**
+ References
+ ----------
[Jose A. Fiorucci, Tiago R. Pellegrini, Francisco Louzada, Fotios Petropoulos, Anne B. Koehler (2016). "Models for optimising the theta method and their relationship to state space models". International Journal of Forecasting](https://www.sciencedirect.com/science/article/pii/S0169207016300243)
Parameters
@@ -5238,11 +5266,12 @@ def __init__(
prediction_intervals=prediction_intervals,
)
-# %% ../nbs/src/core/models.ipynb 402
+# %% ../nbs/src/core/models.ipynb 431
class DynamicOptimizedTheta(AutoTheta):
"""Dynamic Optimized Theta Method.
- **References:**
+ References
+ ----------
[Jose A. Fiorucci, Tiago R. Pellegrini, Francisco Louzada, Fotios Petropoulos, Anne B. Koehler (2016). "Models for optimising the theta method and their relationship to state space models". International Journal of Forecasting](https://www.sciencedirect.com/science/article/pii/S0169207016300243)
Parameters
@@ -5274,7 +5303,7 @@ def __init__(
prediction_intervals=prediction_intervals,
)
-# %% ../nbs/src/core/models.ipynb 416
+# %% ../nbs/src/core/models.ipynb 446
class GARCH(_TS):
"""Generalized Autoregressive Conditional Heteroskedasticity (GARCH) model.
@@ -5295,7 +5324,8 @@ class GARCH(_TS):
The ARCH model is a particular case of the GARCH model when $q=0$.
- **References:**
+ References
+ ----------
[Engle, R. F. (1982). Autoregressive conditional heteroscedasticity with estimates of the variance of United Kingdom inflation. Econometrica: Journal of the econometric society, 987-1007.](http://www.econ.uiuc.edu/~econ508/Papers/engle82.pdf)
[Bollerslev, T. (1986). Generalized autoregressive conditional heteroskedasticity. Journal of econometrics, 31(3), 307-327.](https://citeseerx.ist.psu.edu/document?repid=rep1&type=pdf&doi=7da8bfa5295375c1141d797e80065a599153c19d)
@@ -5468,7 +5498,7 @@ def forecast(
res = _add_fitted_pi(res=res, se=se, level=level)
return res
-# %% ../nbs/src/core/models.ipynb 429
+# %% ../nbs/src/core/models.ipynb 459
class ARCH(GARCH):
"""Autoregressive Conditional Heteroskedasticity (ARCH) model.
@@ -5481,10 +5511,11 @@ class ARCH(GARCH):
$$ \sigma_t^2 = w0 + \sum_{i=1}^p a_i y_{t-i}^2$$.
- Here {$\epsilon_t$} is a sequence of iid random variables with zero mean and unit variance.
+ Here $\epsilon_t$ is a sequence of iid random variables with zero mean and unit variance.
The coefficients $w$ and $a_i$, $i=1,...,p$ must be nonnegative and $\sum_{k=1}^p a_k < 1$.
- **References:**
+ References
+ ----------
[Engle, R. F. (1982). Autoregressive conditional heteroscedasticity with estimates of the variance of United Kingdom inflation. Econometrica: Journal of the econometric society, 987-1007.](http://www.econ.uiuc.edu/~econ508/Papers/engle82.pdf)
[James D. Hamilton. Time Series Analysis Princeton University Press, Princeton, New Jersey, 1st Edition, 1994.](https://press.princeton.edu/books/hardcover/9780691042893/time-series-analysis)
@@ -5514,7 +5545,7 @@ def __init__(
def __repr__(self):
return self.alias
-# %% ../nbs/src/core/models.ipynb 440
+# %% ../nbs/src/core/models.ipynb 470
class ConstantModel(_TS):
def __init__(self, constant: float, alias: str = "ConstantModel"):
"""Constant Model.
@@ -5699,7 +5730,7 @@ def forward(
)
return res
-# %% ../nbs/src/core/models.ipynb 453
+# %% ../nbs/src/core/models.ipynb 484
class ZeroModel(ConstantModel):
def __init__(self, alias: str = "ZeroModel"):
"""Returns Zero forecasts.
@@ -5713,7 +5744,7 @@ def __init__(self, alias: str = "ZeroModel"):
"""
super().__init__(constant=0, alias=alias)
-# %% ../nbs/src/core/models.ipynb 466
+# %% ../nbs/src/core/models.ipynb 498
class NaNModel(ConstantModel):
def __init__(self, alias: str = "NaNModel"):
"""NaN Model.
diff --git a/statsforecast/utils.py b/statsforecast/utils.py
index 958477738..5c5b3c933 100644
--- a/statsforecast/utils.py
+++ b/statsforecast/utils.py
@@ -44,17 +44,29 @@ def generate_series(
If `n_static_features > 0`, then each series gets static features with random values.
If `equal_ends == True` then all series end at the same date.
- **Parameters:**
- `n_series`: int, number of series for synthetic panel.
- `min_length`: int, minimal length of synthetic panel's series.
- `max_length`: int, minimal length of synthetic panel's series.
- `n_static_features`: int, default=0, number of static exogenous variables for synthetic panel's series.
- `equal_ends`: bool, if True, series finish in the same date stamp `ds`.
- `freq`: str, frequency of the data, [panda's available frequencies](https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases).
- `engine`: str, engine to be used in DataFrame construction; NOTE: index does not exist in polars DataFrame
+ Parameters
+ ----------
+ n_series : int
+ Number of series for synthetic panel.
+ freq : str (default='D')
+ Frequency of the data, 'D' or 'M'.
+ min_length : int (default=50)
+ Minimum length of synthetic panel's series.
+ max_length : int (default=500)
+ Maximum length of synthetic panel's series.
+ n_static_features : int (default=0)
+ Number of static exogenous variables for synthetic panel's series.
+ equal_ends : bool (default=False)
+ Series should end in the same date stamp `ds`.
+ engine : str (default='pandas')
+ Output Dataframe type ('pandas' or 'polars').
+ seed : int (default=0)
+ Random seed used for generating the data.
- **Returns:**
- `freq`: pandas.DataFrame | polars.DataFrame, synthetic panel with columns [`unique_id`, `ds`, `y`] and exogenous.
+ Returns
+ -------
+ pandas or polars DataFrame
+ Synthetic panel with columns [`unique_id`, `ds`, `y`] and exogenous.
"""
return utils_generate_series(
n_series=n_series,