forecaster.py 50 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386
  1. # Copyright (c) 2017-present, Facebook, Inc.
  2. # All rights reserved.
  3. #
  4. # This source code is licensed under the BSD-style license found in the
  5. # LICENSE file in the root directory of this source tree. An additional grant
  6. # of patent rights can be found in the PATENTS file in the same directory.
  7. from __future__ import absolute_import
  8. from __future__ import division
  9. from __future__ import print_function
  10. from __future__ import unicode_literals
  11. from collections import defaultdict
  12. from copy import deepcopy
  13. from datetime import timedelta
  14. import logging
  15. import warnings
  16. import numpy as np
  17. import pandas as pd
  18. # fb-block 1 start
  19. from fbprophet.models import prophet_stan_models
  20. from fbprophet.plot import (
  21. plot,
  22. plot_components,
  23. plot_forecast_component,
  24. seasonality_plot_df,
  25. plot_weekly,
  26. plot_yearly,
  27. plot_seasonality,
  28. )
  29. # fb-block 1 end
  30. logging.basicConfig()
  31. logger = logging.getLogger(__name__)
  32. warnings.filterwarnings("default", category=DeprecationWarning)
  33. try:
  34. import pystan # noqa F401
  35. except ImportError:
  36. logger.error('You cannot run prophet without pystan installed')
  37. raise
  38. # fb-block 2
  39. class Prophet(object):
  40. """Prophet forecaster.
  41. Parameters
  42. ----------
  43. growth: String 'linear' or 'logistic' to specify a linear or logistic
  44. trend.
  45. changepoints: List of dates at which to include potential changepoints. If
  46. not specified, potential changepoints are selected automatically.
  47. n_changepoints: Number of potential changepoints to include. Not used
  48. if input `changepoints` is supplied. If `changepoints` is not supplied,
  49. then n_changepoints potential changepoints are selected uniformly from
  50. the first 80 percent of the history.
  51. yearly_seasonality: Fit yearly seasonality.
  52. Can be 'auto', True, False, or a number of Fourier terms to generate.
  53. weekly_seasonality: Fit weekly seasonality.
  54. Can be 'auto', True, False, or a number of Fourier terms to generate.
  55. daily_seasonality: Fit daily seasonality.
  56. Can be 'auto', True, False, or a number of Fourier terms to generate.
  57. holidays: pd.DataFrame with columns holiday (string) and ds (date type)
  58. and optionally columns lower_window and upper_window which specify a
  59. range of days around the date to be included as holidays.
  60. lower_window=-2 will include 2 days prior to the date as holidays. Also
  61. optionally can have a column prior_scale specifying the prior scale for
  62. that holiday.
  63. seasonality_prior_scale: Parameter modulating the strength of the
  64. seasonality model. Larger values allow the model to fit larger seasonal
  65. fluctuations, smaller values dampen the seasonality. Can be specified
  66. for individual seasonalities using add_seasonality.
  67. holidays_prior_scale: Parameter modulating the strength of the holiday
  68. components model, unless overridden in the holidays input.
  69. changepoint_prior_scale: Parameter modulating the flexibility of the
  70. automatic changepoint selection. Large values will allow many
  71. changepoints, small values will allow few changepoints.
  72. mcmc_samples: Integer, if greater than 0, will do full Bayesian inference
  73. with the specified number of MCMC samples. If 0, will do MAP
  74. estimation.
  75. interval_width: Float, width of the uncertainty intervals provided
  76. for the forecast. If mcmc_samples=0, this will be only the uncertainty
  77. in the trend using the MAP estimate of the extrapolated generative
  78. model. If mcmc.samples>0, this will be integrated over all model
  79. parameters, which will include uncertainty in seasonality.
  80. uncertainty_samples: Number of simulated draws used to estimate
  81. uncertainty intervals.
  82. """
  83. def __init__(
  84. self,
  85. growth='linear',
  86. changepoints=None,
  87. n_changepoints=25,
  88. yearly_seasonality='auto',
  89. weekly_seasonality='auto',
  90. daily_seasonality='auto',
  91. holidays=None,
  92. seasonality_prior_scale=10.0,
  93. holidays_prior_scale=10.0,
  94. changepoint_prior_scale=0.05,
  95. mcmc_samples=0,
  96. interval_width=0.80,
  97. uncertainty_samples=1000,
  98. ):
  99. self.growth = growth
  100. self.changepoints = pd.to_datetime(changepoints)
  101. if self.changepoints is not None:
  102. self.n_changepoints = len(self.changepoints)
  103. self.specified_changepoints = True
  104. else:
  105. self.n_changepoints = n_changepoints
  106. self.specified_changepoints = False
  107. self.yearly_seasonality = yearly_seasonality
  108. self.weekly_seasonality = weekly_seasonality
  109. self.daily_seasonality = daily_seasonality
  110. if holidays is not None:
  111. if not (
  112. isinstance(holidays, pd.DataFrame)
  113. and 'ds' in holidays # noqa W503
  114. and 'holiday' in holidays # noqa W503
  115. ):
  116. raise ValueError("holidays must be a DataFrame with 'ds' and "
  117. "'holiday' columns.")
  118. holidays['ds'] = pd.to_datetime(holidays['ds'])
  119. self.holidays = holidays
  120. self.seasonality_prior_scale = float(seasonality_prior_scale)
  121. self.changepoint_prior_scale = float(changepoint_prior_scale)
  122. self.holidays_prior_scale = float(holidays_prior_scale)
  123. self.mcmc_samples = mcmc_samples
  124. self.interval_width = interval_width
  125. self.uncertainty_samples = uncertainty_samples
  126. # Set during fitting
  127. self.start = None
  128. self.y_scale = None
  129. self.logistic_floor = False
  130. self.t_scale = None
  131. self.changepoints_t = None
  132. self.seasonalities = {}
  133. self.extra_regressors = {}
  134. self.stan_fit = None
  135. self.params = {}
  136. self.history = None
  137. self.history_dates = None
  138. self.validate_inputs()
  139. def validate_inputs(self):
  140. """Validates the inputs to Prophet."""
  141. if self.growth not in ('linear', 'logistic'):
  142. raise ValueError(
  143. "Parameter 'growth' should be 'linear' or 'logistic'.")
  144. if self.holidays is not None:
  145. has_lower = 'lower_window' in self.holidays
  146. has_upper = 'upper_window' in self.holidays
  147. if has_lower + has_upper == 1:
  148. raise ValueError('Holidays must have both lower_window and ' +
  149. 'upper_window, or neither')
  150. if has_lower:
  151. if self.holidays['lower_window'].max() > 0:
  152. raise ValueError('Holiday lower_window should be <= 0')
  153. if self.holidays['upper_window'].min() < 0:
  154. raise ValueError('Holiday upper_window should be >= 0')
  155. for h in self.holidays['holiday'].unique():
  156. self.validate_column_name(h, check_holidays=False)
  157. def validate_column_name(self, name, check_holidays=True,
  158. check_seasonalities=True, check_regressors=True):
  159. """Validates the name of a seasonality, holiday, or regressor.
  160. Parameters
  161. ----------
  162. name: string
  163. check_holidays: bool check if name already used for holiday
  164. check_seasonalities: bool check if name already used for seasonality
  165. check_regressors: bool check if name already used for regressor
  166. """
  167. if '_delim_' in name:
  168. raise ValueError('Name cannot contain "_delim_"')
  169. reserved_names = [
  170. 'trend', 'seasonal', 'seasonalities', 'daily', 'weekly', 'yearly',
  171. 'holidays', 'zeros', 'extra_regressors', 'yhat'
  172. ]
  173. rn_l = [n + '_lower' for n in reserved_names]
  174. rn_u = [n + '_upper' for n in reserved_names]
  175. reserved_names.extend(rn_l)
  176. reserved_names.extend(rn_u)
  177. reserved_names.extend([
  178. 'ds', 'y', 'cap', 'floor', 'y_scaled', 'cap_scaled'])
  179. if name in reserved_names:
  180. raise ValueError('Name "{}" is reserved.'.format(name))
  181. if (check_holidays and self.holidays is not None and
  182. name in self.holidays['holiday'].unique()):
  183. raise ValueError(
  184. 'Name "{}" already used for a holiday.'.format(name))
  185. if check_seasonalities and name in self.seasonalities:
  186. raise ValueError(
  187. 'Name "{}" already used for a seasonality.'.format(name))
  188. if check_regressors and name in self.extra_regressors:
  189. raise ValueError(
  190. 'Name "{}" already used for an added regressor.'.format(name))
  191. def setup_dataframe(self, df, initialize_scales=False):
  192. """Prepare dataframe for fitting or predicting.
  193. Adds a time index and scales y. Creates auxiliary columns 't', 't_ix',
  194. 'y_scaled', and 'cap_scaled'. These columns are used during both
  195. fitting and predicting.
  196. Parameters
  197. ----------
  198. df: pd.DataFrame with columns ds, y, and cap if logistic growth. Any
  199. specified additional regressors must also be present.
  200. initialize_scales: Boolean set scaling factors in self from df.
  201. Returns
  202. -------
  203. pd.DataFrame prepared for fitting or predicting.
  204. """
  205. if 'y' in df:
  206. df['y'] = pd.to_numeric(df['y'])
  207. if np.isinf(df['y'].values).any():
  208. raise ValueError('Found infinity in column y.')
  209. df['ds'] = pd.to_datetime(df['ds'])
  210. if df['ds'].isnull().any():
  211. raise ValueError('Found NaN in column ds.')
  212. for name in self.extra_regressors:
  213. if name not in df:
  214. raise ValueError(
  215. 'Regressor "{}" missing from dataframe'.format(name))
  216. df = df.sort_values('ds')
  217. df.reset_index(inplace=True, drop=True)
  218. self.initialize_scales(initialize_scales, df)
  219. if self.logistic_floor:
  220. if 'floor' not in df:
  221. raise ValueError("Expected column 'floor'.")
  222. else:
  223. df['floor'] = 0
  224. if self.growth == 'logistic':
  225. assert 'cap' in df
  226. df['cap_scaled'] = (df['cap'] - df['floor']) / self.y_scale
  227. df['t'] = (df['ds'] - self.start) / self.t_scale
  228. if 'y' in df:
  229. df['y_scaled'] = (df['y'] - df['floor']) / self.y_scale
  230. for name, props in self.extra_regressors.items():
  231. df[name] = pd.to_numeric(df[name])
  232. df[name] = ((df[name] - props['mu']) / props['std'])
  233. if df[name].isnull().any():
  234. raise ValueError('Found NaN in column ' + name)
  235. return df
  236. def initialize_scales(self, initialize_scales, df):
  237. """Initialize model scales.
  238. Sets model scaling factors using df.
  239. Parameters
  240. ----------
  241. initialize_scales: Boolean set the scales or not.
  242. df: pd.DataFrame for setting scales.
  243. """
  244. if not initialize_scales:
  245. return
  246. if self.growth == 'logistic' and 'floor' in df:
  247. self.logistic_floor = True
  248. floor = df['floor']
  249. else:
  250. floor = 0.
  251. self.y_scale = (df['y'] - floor).abs().max()
  252. if self.y_scale == 0:
  253. self.y_scale = 1
  254. self.start = df['ds'].min()
  255. self.t_scale = df['ds'].max() - self.start
  256. for name, props in self.extra_regressors.items():
  257. standardize = props['standardize']
  258. n_vals = len(df[name].unique())
  259. if n_vals < 2:
  260. raise ValueError('Regressor {} is constant.'.format(name))
  261. if standardize == 'auto':
  262. if set(df[name].unique()) == set([1, 0]):
  263. # Don't standardize binary variables.
  264. standardize = False
  265. else:
  266. standardize = True
  267. if standardize:
  268. mu = df[name].mean()
  269. std = df[name].std()
  270. self.extra_regressors[name]['mu'] = mu
  271. self.extra_regressors[name]['std'] = std
  272. def set_changepoints(self):
  273. """Set changepoints
  274. Sets m$changepoints to the dates of changepoints. Either:
  275. 1) The changepoints were passed in explicitly.
  276. A) They are empty.
  277. B) They are not empty, and need validation.
  278. 2) We are generating a grid of them.
  279. 3) The user prefers no changepoints be used.
  280. """
  281. if self.changepoints is not None:
  282. if len(self.changepoints) == 0:
  283. pass
  284. else:
  285. too_low = min(self.changepoints) < self.history['ds'].min()
  286. too_high = max(self.changepoints) > self.history['ds'].max()
  287. if too_low or too_high:
  288. raise ValueError(
  289. 'Changepoints must fall within training data.')
  290. else:
  291. # Place potential changepoints evenly through first 80% of history
  292. hist_size = np.floor(self.history.shape[0] * 0.8)
  293. if self.n_changepoints + 1 > hist_size:
  294. self.n_changepoints = hist_size - 1
  295. logger.info(
  296. 'n_changepoints greater than number of observations.'
  297. 'Using {}.'.format(self.n_changepoints)
  298. )
  299. if self.n_changepoints > 0:
  300. cp_indexes = (
  301. np.linspace(0, hist_size, self.n_changepoints + 1)
  302. .round()
  303. .astype(np.int)
  304. )
  305. self.changepoints = (
  306. self.history.iloc[cp_indexes]['ds'].tail(-1)
  307. )
  308. else:
  309. # set empty changepoints
  310. self.changepoints = []
  311. if len(self.changepoints) > 0:
  312. self.changepoints_t = np.sort(np.array(
  313. (self.changepoints - self.start) / self.t_scale))
  314. else:
  315. self.changepoints_t = np.array([0]) # dummy changepoint
  316. def get_changepoint_matrix(self):
  317. """Gets changepoint matrix for history dataframe."""
  318. A = np.zeros((self.history.shape[0], len(self.changepoints_t)))
  319. for i, t_i in enumerate(self.changepoints_t):
  320. A[self.history['t'].values >= t_i, i] = 1
  321. return A
  322. @staticmethod
  323. def fourier_series(dates, period, series_order):
  324. """Provides Fourier series components with the specified frequency
  325. and order.
  326. Parameters
  327. ----------
  328. dates: pd.Series containing timestamps.
  329. period: Number of days of the period.
  330. series_order: Number of components.
  331. Returns
  332. -------
  333. Matrix with seasonality features.
  334. """
  335. # convert to days since epoch
  336. t = np.array(
  337. (dates - pd.datetime(1970, 1, 1))
  338. .dt.total_seconds()
  339. .astype(np.float)
  340. ) / (3600 * 24.)
  341. return np.column_stack([
  342. fun((2.0 * (i + 1) * np.pi * t / period))
  343. for i in range(series_order)
  344. for fun in (np.sin, np.cos)
  345. ])
  346. @classmethod
  347. def make_seasonality_features(cls, dates, period, series_order, prefix):
  348. """Data frame with seasonality features.
  349. Parameters
  350. ----------
  351. cls: Prophet class.
  352. dates: pd.Series containing timestamps.
  353. period: Number of days of the period.
  354. series_order: Number of components.
  355. prefix: Column name prefix.
  356. Returns
  357. -------
  358. pd.DataFrame with seasonality features.
  359. """
  360. features = cls.fourier_series(dates, period, series_order)
  361. columns = [
  362. '{}_delim_{}'.format(prefix, i + 1)
  363. for i in range(features.shape[1])
  364. ]
  365. return pd.DataFrame(features, columns=columns)
  366. def make_holiday_features(self, dates):
  367. """Construct a dataframe of holiday features.
  368. Parameters
  369. ----------
  370. dates: pd.Series containing timestamps used for computing seasonality.
  371. Returns
  372. -------
  373. holiday_features: pd.DataFrame with a column for each holiday.
  374. prior_scale_list: List of prior scales for each holiday column.
  375. """
  376. # Holds columns of our future matrix.
  377. expanded_holidays = defaultdict(lambda: np.zeros(dates.shape[0]))
  378. prior_scales = {}
  379. # Makes an index so we can perform `get_loc` below.
  380. # Strip to just dates.
  381. row_index = pd.DatetimeIndex(dates.apply(lambda x: x.date()))
  382. for _ix, row in self.holidays.iterrows():
  383. dt = row.ds.date()
  384. try:
  385. lw = int(row.get('lower_window', 0))
  386. uw = int(row.get('upper_window', 0))
  387. except ValueError:
  388. lw = 0
  389. uw = 0
  390. ps = float(row.get('prior_scale', self.holidays_prior_scale))
  391. if np.isnan(ps):
  392. ps = float(self.holidays_prior_scale)
  393. if (
  394. row.holiday in prior_scales and prior_scales[row.holiday] != ps
  395. ):
  396. raise ValueError(
  397. 'Holiday {} does not have consistent prior scale '
  398. 'specification.'.format(row.holiday))
  399. if ps <= 0:
  400. raise ValueError('Prior scale must be > 0')
  401. prior_scales[row.holiday] = ps
  402. for offset in range(lw, uw + 1):
  403. occurrence = dt + timedelta(days=offset)
  404. try:
  405. loc = row_index.get_loc(occurrence)
  406. except KeyError:
  407. loc = None
  408. key = '{}_delim_{}{}'.format(
  409. row.holiday,
  410. '+' if offset >= 0 else '-',
  411. abs(offset)
  412. )
  413. if loc is not None:
  414. expanded_holidays[key][loc] = 1.
  415. else:
  416. # Access key to generate value
  417. expanded_holidays[key]
  418. holiday_features = pd.DataFrame(expanded_holidays)
  419. prior_scale_list = [
  420. prior_scales[h.split('_delim_')[0]]
  421. for h in holiday_features.columns
  422. ]
  423. return holiday_features, prior_scale_list
  424. def add_regressor(self, name, prior_scale=None, standardize='auto'):
  425. """Add an additional regressor to be used for fitting and predicting.
  426. The dataframe passed to `fit` and `predict` will have a column with the
  427. specified name to be used as a regressor. When standardize='auto', the
  428. regressor will be standardized unless it is binary. The regression
  429. coefficient is given a prior with the specified scale parameter.
  430. Decreasing the prior scale will add additional regularization. If no
  431. prior scale is provided, self.holidays_prior_scale will be used.
  432. Parameters
  433. ----------
  434. name: string name of the regressor.
  435. prior_scale: optional float scale for the normal prior. If not
  436. provided, self.holidays_prior_scale will be used.
  437. standardize: optional, specify whether this regressor will be
  438. standardized prior to fitting. Can be 'auto' (standardize if not
  439. binary), True, or False.
  440. Returns
  441. -------
  442. The prophet object.
  443. """
  444. if self.history is not None:
  445. raise Exception(
  446. "Regressors must be added prior to model fitting.")
  447. self.validate_column_name(name, check_regressors=False)
  448. if prior_scale is None:
  449. prior_scale = float(self.holidays_prior_scale)
  450. assert prior_scale > 0
  451. self.extra_regressors[name] = {
  452. 'prior_scale': prior_scale,
  453. 'standardize': standardize,
  454. 'mu': 0.,
  455. 'std': 1.,
  456. }
  457. return self
  458. def add_seasonality(self, name, period, fourier_order, prior_scale=None):
  459. """Add a seasonal component with specified period, number of Fourier
  460. components, and prior scale.
  461. Increasing the number of Fourier components allows the seasonality to
  462. change more quickly (at risk of overfitting). Default values for yearly
  463. and weekly seasonalities are 10 and 3 respectively.
  464. Increasing prior scale will allow this seasonality component more
  465. flexibility, decreasing will dampen it. If not provided, will use the
  466. seasonality_prior_scale provided on Prophet initialization (defaults
  467. to 10).
  468. Parameters
  469. ----------
  470. name: string name of the seasonality component.
  471. period: float number of days in one period.
  472. fourier_order: int number of Fourier components to use.
  473. prior_scale: float prior scale for this component.
  474. Returns
  475. -------
  476. The prophet object.
  477. """
  478. if self.history is not None:
  479. raise Exception(
  480. "Seasonality must be added prior to model fitting.")
  481. if name not in ['daily', 'weekly', 'yearly']:
  482. # Allow overwriting built-in seasonalities
  483. self.validate_column_name(name, check_seasonalities=False)
  484. if prior_scale is None:
  485. ps = self.seasonality_prior_scale
  486. else:
  487. ps = float(prior_scale)
  488. if ps <= 0:
  489. raise ValueError('Prior scale must be > 0')
  490. self.seasonalities[name] = {
  491. 'period': period,
  492. 'fourier_order': fourier_order,
  493. 'prior_scale': ps,
  494. }
  495. return self
  496. def make_all_seasonality_features(self, df):
  497. """Dataframe with seasonality features.
  498. Includes seasonality features, holiday features, and added regressors.
  499. Parameters
  500. ----------
  501. df: pd.DataFrame with dates for computing seasonality features and any
  502. added regressors.
  503. Returns
  504. -------
  505. pd.DataFrame with regression features.
  506. list of prior scales for each column of the features dataframe.
  507. """
  508. seasonal_features = []
  509. prior_scales = []
  510. # Seasonality features
  511. for name, props in self.seasonalities.items():
  512. features = self.make_seasonality_features(
  513. df['ds'],
  514. props['period'],
  515. props['fourier_order'],
  516. name,
  517. )
  518. seasonal_features.append(features)
  519. prior_scales.extend(
  520. [props['prior_scale']] * features.shape[1])
  521. # Holiday features
  522. if self.holidays is not None:
  523. features, holiday_priors = self.make_holiday_features(df['ds'])
  524. seasonal_features.append(features)
  525. prior_scales.extend(holiday_priors)
  526. # Additional regressors
  527. for name, props in self.extra_regressors.items():
  528. seasonal_features.append(pd.DataFrame(df[name]))
  529. prior_scales.append(props['prior_scale'])
  530. if len(seasonal_features) == 0:
  531. seasonal_features.append(
  532. pd.DataFrame({'zeros': np.zeros(df.shape[0])}))
  533. prior_scales.append(1.)
  534. return pd.concat(seasonal_features, axis=1), prior_scales
  535. def parse_seasonality_args(self, name, arg, auto_disable, default_order):
  536. """Get number of fourier components for built-in seasonalities.
  537. Parameters
  538. ----------
  539. name: string name of the seasonality component.
  540. arg: 'auto', True, False, or number of fourier components as provided.
  541. auto_disable: bool if seasonality should be disabled when 'auto'.
  542. default_order: int default fourier order
  543. Returns
  544. -------
  545. Number of fourier components, or 0 for disabled.
  546. """
  547. if arg == 'auto':
  548. fourier_order = 0
  549. if name in self.seasonalities:
  550. logger.info(
  551. 'Found custom seasonality named "{name}", '
  552. 'disabling built-in {name} seasonality.'.format(name=name)
  553. )
  554. elif auto_disable:
  555. logger.info(
  556. 'Disabling {name} seasonality. Run prophet with '
  557. '{name}_seasonality=True to override this.'.format(
  558. name=name)
  559. )
  560. else:
  561. fourier_order = default_order
  562. elif arg is True:
  563. fourier_order = default_order
  564. elif arg is False:
  565. fourier_order = 0
  566. else:
  567. fourier_order = int(arg)
  568. return fourier_order
  569. def set_auto_seasonalities(self):
  570. """Set seasonalities that were left on auto.
  571. Turns on yearly seasonality if there is >=2 years of history.
  572. Turns on weekly seasonality if there is >=2 weeks of history, and the
  573. spacing between dates in the history is <7 days.
  574. Turns on daily seasonality if there is >=2 days of history, and the
  575. spacing between dates in the history is <1 day.
  576. """
  577. first = self.history['ds'].min()
  578. last = self.history['ds'].max()
  579. dt = self.history['ds'].diff()
  580. min_dt = dt.iloc[dt.nonzero()[0]].min()
  581. # Yearly seasonality
  582. yearly_disable = last - first < pd.Timedelta(days=730)
  583. fourier_order = self.parse_seasonality_args(
  584. 'yearly', self.yearly_seasonality, yearly_disable, 10)
  585. if fourier_order > 0:
  586. self.seasonalities['yearly'] = {
  587. 'period': 365.25,
  588. 'fourier_order': fourier_order,
  589. 'prior_scale': self.seasonality_prior_scale,
  590. }
  591. # Weekly seasonality
  592. weekly_disable = ((last - first < pd.Timedelta(weeks=2)) or
  593. (min_dt >= pd.Timedelta(weeks=1)))
  594. fourier_order = self.parse_seasonality_args(
  595. 'weekly', self.weekly_seasonality, weekly_disable, 3)
  596. if fourier_order > 0:
  597. self.seasonalities['weekly'] = {
  598. 'period': 7,
  599. 'fourier_order': fourier_order,
  600. 'prior_scale': self.seasonality_prior_scale,
  601. }
  602. # Daily seasonality
  603. daily_disable = ((last - first < pd.Timedelta(days=2)) or
  604. (min_dt >= pd.Timedelta(days=1)))
  605. fourier_order = self.parse_seasonality_args(
  606. 'daily', self.daily_seasonality, daily_disable, 4)
  607. if fourier_order > 0:
  608. self.seasonalities['daily'] = {
  609. 'period': 1,
  610. 'fourier_order': fourier_order,
  611. 'prior_scale': self.seasonality_prior_scale,
  612. }
  613. @staticmethod
  614. def linear_growth_init(df):
  615. """Initialize linear growth.
  616. Provides a strong initialization for linear growth by calculating the
  617. growth and offset parameters that pass the function through the first
  618. and last points in the time series.
  619. Parameters
  620. ----------
  621. df: pd.DataFrame with columns ds (date), y_scaled (scaled time series),
  622. and t (scaled time).
  623. Returns
  624. -------
  625. A tuple (k, m) with the rate (k) and offset (m) of the linear growth
  626. function.
  627. """
  628. i0, i1 = df['ds'].idxmin(), df['ds'].idxmax()
  629. T = df['t'].iloc[i1] - df['t'].iloc[i0]
  630. k = (df['y_scaled'].iloc[i1] - df['y_scaled'].iloc[i0]) / T
  631. m = df['y_scaled'].iloc[i0] - k * df['t'].iloc[i0]
  632. return (k, m)
  633. @staticmethod
  634. def logistic_growth_init(df):
  635. """Initialize logistic growth.
  636. Provides a strong initialization for logistic growth by calculating the
  637. growth and offset parameters that pass the function through the first
  638. and last points in the time series.
  639. Parameters
  640. ----------
  641. df: pd.DataFrame with columns ds (date), cap_scaled (scaled capacity),
  642. y_scaled (scaled time series), and t (scaled time).
  643. Returns
  644. -------
  645. A tuple (k, m) with the rate (k) and offset (m) of the logistic growth
  646. function.
  647. """
  648. i0, i1 = df['ds'].idxmin(), df['ds'].idxmax()
  649. T = df['t'].iloc[i1] - df['t'].iloc[i0]
  650. # Force valid values, in case y > cap or y < 0
  651. C0 = df['cap_scaled'].iloc[i0]
  652. C1 = df['cap_scaled'].iloc[i1]
  653. y0 = max(0.01 * C0, min(0.99 * C0, df['y_scaled'].iloc[i0]))
  654. y1 = max(0.01 * C1, min(0.99 * C1, df['y_scaled'].iloc[i1]))
  655. r0 = C0 / y0
  656. r1 = C1 / y1
  657. if abs(r0 - r1) <= 0.01:
  658. r0 = 1.05 * r0
  659. L0 = np.log(r0 - 1)
  660. L1 = np.log(r1 - 1)
  661. # Initialize the offset
  662. m = L0 * T / (L0 - L1)
  663. # And the rate
  664. k = (L0 - L1) / T
  665. return (k, m)
  666. # fb-block 7
  667. def fit(self, df, **kwargs):
  668. """Fit the Prophet model.
  669. This sets self.params to contain the fitted model parameters. It is a
  670. dictionary parameter names as keys and the following items:
  671. k (Mx1 array): M posterior samples of the initial slope.
  672. m (Mx1 array): The initial intercept.
  673. delta (MxN array): The slope change at each of N changepoints.
  674. beta (MxK matrix): Coefficients for K seasonality features.
  675. sigma_obs (Mx1 array): Noise level.
  676. Note that M=1 if MAP estimation.
  677. Parameters
  678. ----------
  679. df: pd.DataFrame containing the history. Must have columns ds (date
  680. type) and y, the time series. If self.growth is 'logistic', then
  681. df must also have a column cap that specifies the capacity at
  682. each ds.
  683. kwargs: Additional arguments passed to the optimizing or sampling
  684. functions in Stan.
  685. Returns
  686. -------
  687. The fitted Prophet object.
  688. """
  689. if self.history is not None:
  690. raise Exception('Prophet object can only be fit once. '
  691. 'Instantiate a new object.')
  692. history = df[df['y'].notnull()].copy()
  693. if history.shape[0] < 2:
  694. raise ValueError('Dataframe has less than 2 non-NaN rows.')
  695. self.history_dates = pd.to_datetime(df['ds']).sort_values()
  696. history = self.setup_dataframe(history, initialize_scales=True)
  697. self.history = history
  698. self.set_auto_seasonalities()
  699. seasonal_features, prior_scales = (
  700. self.make_all_seasonality_features(history))
  701. self.set_changepoints()
  702. A = self.get_changepoint_matrix()
  703. dat = {
  704. 'T': history.shape[0],
  705. 'K': seasonal_features.shape[1],
  706. 'S': len(self.changepoints_t),
  707. 'y': history['y_scaled'],
  708. 't': history['t'],
  709. 'A': A,
  710. 't_change': self.changepoints_t,
  711. 'X': seasonal_features,
  712. 'sigmas': prior_scales,
  713. 'tau': self.changepoint_prior_scale,
  714. }
  715. if self.growth == 'linear':
  716. kinit = self.linear_growth_init(history)
  717. else:
  718. dat['cap'] = history['cap_scaled']
  719. kinit = self.logistic_growth_init(history)
  720. model = prophet_stan_models[self.growth]
  721. def stan_init():
  722. return {
  723. 'k': kinit[0],
  724. 'm': kinit[1],
  725. 'delta': np.zeros(len(self.changepoints_t)),
  726. 'beta': np.zeros(seasonal_features.shape[1]),
  727. 'sigma_obs': 1,
  728. }
  729. if history['y'].min() == history['y'].max():
  730. # Nothing to fit.
  731. self.params = stan_init()
  732. self.params['sigma_obs'] = 1e-9
  733. for par in self.params:
  734. self.params[par] = np.array([self.params[par]])
  735. elif self.mcmc_samples > 0:
  736. stan_fit = model.sampling(
  737. dat,
  738. init=stan_init,
  739. iter=self.mcmc_samples,
  740. **kwargs
  741. )
  742. for par in stan_fit.model_pars:
  743. self.params[par] = stan_fit[par]
  744. else:
  745. try:
  746. params = model.optimizing(
  747. dat, init=stan_init, iter=1e4, **kwargs)
  748. except RuntimeError:
  749. params = model.optimizing(
  750. dat, init=stan_init, iter=1e4, algorithm='Newton',
  751. **kwargs
  752. )
  753. for par in params:
  754. self.params[par] = params[par].reshape((1, -1))
  755. # If no changepoints were requested, replace delta with 0s
  756. if len(self.changepoints) == 0:
  757. # Fold delta into the base rate k
  758. self.params['k'] = self.params['k'] + self.params['delta']
  759. self.params['delta'] = np.zeros(self.params['delta'].shape)
  760. return self
  761. # fb-block 8
  762. def predict(self, df=None):
  763. """Predict using the prophet model.
  764. Parameters
  765. ----------
  766. df: pd.DataFrame with dates for predictions (column ds), and capacity
  767. (column cap) if logistic growth. If not provided, predictions are
  768. made on the history.
  769. Returns
  770. -------
  771. A pd.DataFrame with the forecast components.
  772. """
  773. if df is None:
  774. df = self.history.copy()
  775. else:
  776. if df.shape[0] == 0:
  777. raise ValueError('Dataframe has no rows.')
  778. df = self.setup_dataframe(df.copy())
  779. df['trend'] = self.predict_trend(df)
  780. seasonal_components = self.predict_seasonal_components(df)
  781. intervals = self.predict_uncertainty(df)
  782. # Drop columns except ds, cap, floor, and trend
  783. cols = ['ds', 'trend']
  784. if 'cap' in df:
  785. cols.append('cap')
  786. if self.logistic_floor:
  787. cols.append('floor')
  788. # Add in forecast components
  789. df2 = pd.concat((df[cols], intervals, seasonal_components), axis=1)
  790. df2['yhat'] = df2['trend'] + df2['seasonal']
  791. return df2
  792. @staticmethod
  793. def piecewise_linear(t, deltas, k, m, changepoint_ts):
  794. """Evaluate the piecewise linear function.
  795. Parameters
  796. ----------
  797. t: np.array of times on which the function is evaluated.
  798. deltas: np.array of rate changes at each changepoint.
  799. k: Float initial rate.
  800. m: Float initial offset.
  801. changepoint_ts: np.array of changepoint times.
  802. Returns
  803. -------
  804. Vector y(t).
  805. """
  806. # Intercept changes
  807. gammas = -changepoint_ts * deltas
  808. # Get cumulative slope and intercept at each t
  809. k_t = k * np.ones_like(t)
  810. m_t = m * np.ones_like(t)
  811. for s, t_s in enumerate(changepoint_ts):
  812. indx = t >= t_s
  813. k_t[indx] += deltas[s]
  814. m_t[indx] += gammas[s]
  815. return k_t * t + m_t
  816. @staticmethod
  817. def piecewise_logistic(t, cap, deltas, k, m, changepoint_ts):
  818. """Evaluate the piecewise logistic function.
  819. Parameters
  820. ----------
  821. t: np.array of times on which the function is evaluated.
  822. cap: np.array of capacities at each t.
  823. deltas: np.array of rate changes at each changepoint.
  824. k: Float initial rate.
  825. m: Float initial offset.
  826. changepoint_ts: np.array of changepoint times.
  827. Returns
  828. -------
  829. Vector y(t).
  830. """
  831. # Compute offset changes
  832. k_cum = np.concatenate((np.atleast_1d(k), np.cumsum(deltas) + k))
  833. gammas = np.zeros(len(changepoint_ts))
  834. for i, t_s in enumerate(changepoint_ts):
  835. gammas[i] = (
  836. (t_s - m - np.sum(gammas))
  837. * (1 - k_cum[i] / k_cum[i + 1]) # noqa W503
  838. )
  839. # Get cumulative rate and offset at each t
  840. k_t = k * np.ones_like(t)
  841. m_t = m * np.ones_like(t)
  842. for s, t_s in enumerate(changepoint_ts):
  843. indx = t >= t_s
  844. k_t[indx] += deltas[s]
  845. m_t[indx] += gammas[s]
  846. return cap / (1 + np.exp(-k_t * (t - m_t)))
  847. def predict_trend(self, df):
  848. """Predict trend using the prophet model.
  849. Parameters
  850. ----------
  851. df: Prediction dataframe.
  852. Returns
  853. -------
  854. Vector with trend on prediction dates.
  855. """
  856. k = np.nanmean(self.params['k'])
  857. m = np.nanmean(self.params['m'])
  858. deltas = np.nanmean(self.params['delta'], axis=0)
  859. t = np.array(df['t'])
  860. if self.growth == 'linear':
  861. trend = self.piecewise_linear(t, deltas, k, m, self.changepoints_t)
  862. else:
  863. cap = df['cap_scaled']
  864. trend = self.piecewise_logistic(
  865. t, cap, deltas, k, m, self.changepoints_t)
  866. return trend * self.y_scale + df['floor']
  867. def predict_seasonal_components(self, df):
  868. """Predict seasonality components, holidays, and added regressors.
  869. Parameters
  870. ----------
  871. df: Prediction dataframe.
  872. Returns
  873. -------
  874. Dataframe with seasonal components.
  875. """
  876. seasonal_features, _ = self.make_all_seasonality_features(df)
  877. lower_p = 100 * (1.0 - self.interval_width) / 2
  878. upper_p = 100 * (1.0 + self.interval_width) / 2
  879. components = pd.DataFrame({
  880. 'col': np.arange(seasonal_features.shape[1]),
  881. 'component': [x.split('_delim_')[0] for x in seasonal_features.columns],
  882. })
  883. # Add total for all regression components
  884. components = components.append(pd.DataFrame({
  885. 'col': np.arange(seasonal_features.shape[1]),
  886. 'component': 'seasonal',
  887. }))
  888. # Add totals for seasonality, holiday, and extra regressors
  889. components = self.add_group_component(
  890. components, 'seasonalities', self.seasonalities.keys())
  891. if self.holidays is not None:
  892. components = self.add_group_component(
  893. components, 'holidays', self.holidays['holiday'].unique())
  894. components = self.add_group_component(
  895. components, 'extra_regressors', self.extra_regressors.keys())
  896. # Remove the placeholder
  897. components = components[components['component'] != 'zeros']
  898. X = seasonal_features.as_matrix()
  899. data = {}
  900. for component, features in components.groupby('component'):
  901. cols = features.col.tolist()
  902. comp_beta = self.params['beta'][:, cols]
  903. comp_features = X[:, cols]
  904. comp = (
  905. np.matmul(comp_features, comp_beta.transpose())
  906. * self.y_scale # noqa W503
  907. )
  908. data[component] = np.nanmean(comp, axis=1)
  909. data[component + '_lower'] = np.nanpercentile(comp, lower_p,
  910. axis=1)
  911. data[component + '_upper'] = np.nanpercentile(comp, upper_p,
  912. axis=1)
  913. return pd.DataFrame(data)
  914. def add_group_component(self, components, name, group):
  915. """Adds a component with given name that contains all of the components
  916. in group.
  917. Parameters
  918. ----------
  919. components: Dataframe with components.
  920. name: Name of new group component.
  921. group: List of components that form the group.
  922. Returns
  923. -------
  924. Dataframe with components.
  925. """
  926. new_comp = components[components['component'].isin(set(group))].copy()
  927. new_comp['component'] = name
  928. components = components.append(new_comp)
  929. return components
  930. def sample_posterior_predictive(self, df):
  931. """Prophet posterior predictive samples.
  932. Parameters
  933. ----------
  934. df: Prediction dataframe.
  935. Returns
  936. -------
  937. Dictionary with posterior predictive samples for each component.
  938. """
  939. n_iterations = self.params['k'].shape[0]
  940. samp_per_iter = max(1, int(np.ceil(
  941. self.uncertainty_samples / float(n_iterations)
  942. )))
  943. # Generate seasonality features once so we can re-use them.
  944. seasonal_features, _ = self.make_all_seasonality_features(df)
  945. sim_values = {'yhat': [], 'trend': [], 'seasonal': []}
  946. for i in range(n_iterations):
  947. for _j in range(samp_per_iter):
  948. sim = self.sample_model(df, seasonal_features, i)
  949. for key in sim_values:
  950. sim_values[key].append(sim[key])
  951. for k, v in sim_values.items():
  952. sim_values[k] = np.column_stack(v)
  953. return sim_values
  954. def predictive_samples(self, df):
  955. """Sample from the posterior predictive distribution.
  956. Parameters
  957. ----------
  958. df: Dataframe with dates for predictions (column ds), and capacity
  959. (column cap) if logistic growth.
  960. Returns
  961. -------
  962. Dictionary with keys "trend", "seasonal", and "yhat" containing
  963. posterior predictive samples for that component. "seasonal" is the sum
  964. of seasonalities, holidays, and added regressors.
  965. """
  966. df = self.setup_dataframe(df.copy())
  967. sim_values = self.sample_posterior_predictive(df)
  968. return sim_values
  969. def predict_uncertainty(self, df):
  970. """Prediction intervals for yhat and trend.
  971. Parameters
  972. ----------
  973. df: Prediction dataframe.
  974. Returns
  975. -------
  976. Dataframe with uncertainty intervals.
  977. """
  978. sim_values = self.sample_posterior_predictive(df)
  979. lower_p = 100 * (1.0 - self.interval_width) / 2
  980. upper_p = 100 * (1.0 + self.interval_width) / 2
  981. series = {}
  982. for key in ['yhat', 'trend']:
  983. series['{}_lower'.format(key)] = np.nanpercentile(
  984. sim_values[key], lower_p, axis=1)
  985. series['{}_upper'.format(key)] = np.nanpercentile(
  986. sim_values[key], upper_p, axis=1)
  987. return pd.DataFrame(series)
  988. def sample_model(self, df, seasonal_features, iteration):
  989. """Simulate observations from the extrapolated generative model.
  990. Parameters
  991. ----------
  992. df: Prediction dataframe.
  993. seasonal_features: pd.DataFrame of seasonal features.
  994. iteration: Int sampling iteration to use parameters from.
  995. Returns
  996. -------
  997. Dataframe with trend, seasonality, and yhat, each like df['t'].
  998. """
  999. trend = self.sample_predictive_trend(df, iteration)
  1000. beta = self.params['beta'][iteration]
  1001. seasonal = np.matmul(seasonal_features.as_matrix(), beta) * self.y_scale
  1002. sigma = self.params['sigma_obs'][iteration]
  1003. noise = np.random.normal(0, sigma, df.shape[0]) * self.y_scale
  1004. return pd.DataFrame({
  1005. 'yhat': trend + seasonal + noise,
  1006. 'trend': trend,
  1007. 'seasonal': seasonal,
  1008. })
  1009. def sample_predictive_trend(self, df, iteration):
  1010. """Simulate the trend using the extrapolated generative model.
  1011. Parameters
  1012. ----------
  1013. df: Prediction dataframe.
  1014. iteration: Int sampling iteration to use parameters from.
  1015. Returns
  1016. -------
  1017. np.array of simulated trend over df['t'].
  1018. """
  1019. k = self.params['k'][iteration]
  1020. m = self.params['m'][iteration]
  1021. deltas = self.params['delta'][iteration]
  1022. t = np.array(df['t'])
  1023. T = t.max()
  1024. if T > 1:
  1025. # Get the time discretization of the history
  1026. dt = np.diff(self.history['t'])
  1027. dt = np.min(dt[dt > 0])
  1028. # Number of time periods in the future
  1029. N = np.ceil((T - 1) / float(dt))
  1030. S = len(self.changepoints_t)
  1031. prob_change = min(1, (S * (T - 1)) / N)
  1032. n_changes = np.random.binomial(N, prob_change)
  1033. # Sample ts
  1034. changepoint_ts_new = sorted(np.random.uniform(1, T, n_changes))
  1035. else:
  1036. # Case where we're not extrapolating.
  1037. changepoint_ts_new = []
  1038. n_changes = 0
  1039. # Get the empirical scale of the deltas, plus epsilon to avoid NaNs.
  1040. lambda_ = np.mean(np.abs(deltas)) + 1e-8
  1041. # Sample deltas
  1042. deltas_new = np.random.laplace(0, lambda_, n_changes)
  1043. # Prepend the times and deltas from the history
  1044. changepoint_ts = np.concatenate((self.changepoints_t,
  1045. changepoint_ts_new))
  1046. deltas = np.concatenate((deltas, deltas_new))
  1047. if self.growth == 'linear':
  1048. trend = self.piecewise_linear(t, deltas, k, m, changepoint_ts)
  1049. else:
  1050. cap = df['cap_scaled']
  1051. trend = self.piecewise_logistic(t, cap, deltas, k, m,
  1052. changepoint_ts)
  1053. return trend * self.y_scale + df['floor']
  1054. def make_future_dataframe(self, periods, freq='D', include_history=True):
  1055. """Simulate the trend using the extrapolated generative model.
  1056. Parameters
  1057. ----------
  1058. periods: Int number of periods to forecast forward.
  1059. freq: Any valid frequency for pd.date_range, such as 'D' or 'M'.
  1060. include_history: Boolean to include the historical dates in the data
  1061. frame for predictions.
  1062. Returns
  1063. -------
  1064. pd.Dataframe that extends forward from the end of self.history for the
  1065. requested number of periods.
  1066. """
  1067. if self.history_dates is None:
  1068. raise Exception('Model must be fit before this can be used.')
  1069. last_date = self.history_dates.max()
  1070. dates = pd.date_range(
  1071. start=last_date,
  1072. periods=periods + 1, # An extra in case we include start
  1073. freq=freq)
  1074. dates = dates[dates > last_date] # Drop start if equals last_date
  1075. dates = dates[:periods] # Return correct number of periods
  1076. if include_history:
  1077. dates = np.concatenate((np.array(self.history_dates), dates))
  1078. return pd.DataFrame({'ds': dates})
  1079. def plot(self, fcst, ax=None, uncertainty=True, plot_cap=True, xlabel='ds',
  1080. ylabel='y'):
  1081. """Plot the Prophet forecast.
  1082. Parameters
  1083. ----------
  1084. fcst: pd.DataFrame output of self.predict.
  1085. ax: Optional matplotlib axes on which to plot.
  1086. uncertainty: Optional boolean to plot uncertainty intervals.
  1087. plot_cap: Optional boolean indicating if the capacity should be shown
  1088. in the figure, if available.
  1089. xlabel: Optional label name on X-axis
  1090. ylabel: Optional label name on Y-axis
  1091. Returns
  1092. -------
  1093. A matplotlib figure.
  1094. """
  1095. return plot(
  1096. m=self, fcst=fcst, ax=ax, uncertainty=uncertainty,
  1097. plot_cap=plot_cap, xlabel=xlabel, ylabel=ylabel,
  1098. )
  1099. def plot_components(self, fcst, uncertainty=True, plot_cap=True,
  1100. weekly_start=0, yearly_start=0):
  1101. """Plot the Prophet forecast components.
  1102. Will plot whichever are available of: trend, holidays, weekly
  1103. seasonality, and yearly seasonality.
  1104. Parameters
  1105. ----------
  1106. fcst: pd.DataFrame output of self.predict.
  1107. uncertainty: Optional boolean to plot uncertainty intervals.
  1108. plot_cap: Optional boolean indicating if the capacity should be shown
  1109. in the figure, if available.
  1110. weekly_start: Optional int specifying the start day of the weekly
  1111. seasonality plot. 0 (default) starts the week on Sunday. 1 shifts
  1112. by 1 day to Monday, and so on.
  1113. yearly_start: Optional int specifying the start day of the yearly
  1114. seasonality plot. 0 (default) starts the year on Jan 1. 1 shifts
  1115. by 1 day to Jan 2, and so on.
  1116. Returns
  1117. -------
  1118. A matplotlib figure.
  1119. """
  1120. return plot_components(
  1121. m=self, fcst=fcst, uncertainty=uncertainty, plot_cap=plot_cap,
  1122. weekly_start=weekly_start, yearly_start=yearly_start,
  1123. )
  1124. def plot_forecast_component(
  1125. self, fcst, name, ax=None, uncertainty=True, plot_cap=False):
  1126. warnings.warn(
  1127. 'This method will be removed in the next version. '
  1128. 'Please use fbprophet.plot.plot_forecast_component. ',
  1129. DeprecationWarning,
  1130. )
  1131. return plot_forecast_component(
  1132. self, fcst=fcst, name=name, ax=ax, uncertainty=uncertainty,
  1133. plot_cap=plot_cap,
  1134. )
  1135. def seasonality_plot_df(self, ds):
  1136. warnings.warn(
  1137. 'This method will be removed in the next version. '
  1138. 'Please use fbprophet.plot.seasonality_plot_df. ',
  1139. DeprecationWarning,
  1140. )
  1141. return seasonality_plot_df(self, ds=ds)
  1142. def plot_weekly(self, ax=None, uncertainty=True, weekly_start=0):
  1143. warnings.warn(
  1144. 'This method will be removed in the next version. '
  1145. 'Please use fbprophet.plot.plot_weekly. ',
  1146. DeprecationWarning,
  1147. )
  1148. return plot_weekly(
  1149. self, ax=ax, uncertainty=uncertainty, weekly_start=weekly_start,
  1150. )
  1151. def plot_yearly(self, ax=None, uncertainty=True, yearly_start=0):
  1152. warnings.warn(
  1153. 'This method will be removed in the next version. '
  1154. 'Please use fbprophet.plot.plot_yearly. ',
  1155. DeprecationWarning,
  1156. )
  1157. return plot_yearly(
  1158. self, ax=ax, uncertainty=uncertainty, yearly_start=yearly_start,
  1159. )
  1160. def plot_seasonality(self, name, ax=None, uncertainty=True):
  1161. warnings.warn(
  1162. 'This method will be removed in the next version. '
  1163. 'Please use fbprophet.plot.plot_seasonality. ',
  1164. DeprecationWarning,
  1165. )
  1166. return plot_seasonality(
  1167. self, name=name, ax=ax, uncertainty=uncertainty,
  1168. )
  1169. def copy(self, cutoff=None):
  1170. """Copy Prophet object
  1171. Parameters
  1172. ----------
  1173. cutoff: pd.Timestamp or None, default None.
  1174. cuttoff Timestamp for changepoints member variable.
  1175. changepoints are only retained if 'changepoints <= cutoff'
  1176. Returns
  1177. -------
  1178. Prophet class object with the same parameter with model variable
  1179. """
  1180. if self.history is None:
  1181. raise Exception('This is for copying a fitted Prophet object.')
  1182. if self.specified_changepoints:
  1183. changepoints = self.changepoints
  1184. if cutoff is not None:
  1185. # Filter change points '<= cutoff'
  1186. changepoints = changepoints[changepoints <= cutoff]
  1187. else:
  1188. changepoints = None
  1189. # Auto seasonalities are set to False because they are already set in
  1190. # self.seasonalities.
  1191. m = Prophet(
  1192. growth=self.growth,
  1193. n_changepoints=self.n_changepoints,
  1194. changepoints=changepoints,
  1195. yearly_seasonality=False,
  1196. weekly_seasonality=False,
  1197. daily_seasonality=False,
  1198. holidays=self.holidays,
  1199. seasonality_prior_scale=self.seasonality_prior_scale,
  1200. changepoint_prior_scale=self.changepoint_prior_scale,
  1201. holidays_prior_scale=self.holidays_prior_scale,
  1202. mcmc_samples=self.mcmc_samples,
  1203. interval_width=self.interval_width,
  1204. uncertainty_samples=self.uncertainty_samples,
  1205. )
  1206. m.extra_regressors = deepcopy(self.extra_regressors)
  1207. m.seasonalities = deepcopy(self.seasonalities)
  1208. return m