forecaster.py 58 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524
  1. # Copyright (c) 2017-present, Facebook, Inc.
  2. # All rights reserved.
  3. #
  4. # This source code is licensed under the BSD-style license found in the
  5. # LICENSE file in the root directory of this source tree. An additional grant
  6. # of patent rights can be found in the PATENTS file in the same directory.
  7. from __future__ import absolute_import
  8. from __future__ import division
  9. from __future__ import print_function
  10. from __future__ import unicode_literals
  11. from collections import defaultdict
  12. from datetime import timedelta
  13. import logging
  14. import warnings
  15. import numpy as np
  16. import pandas as pd
  17. from fbprophet.diagnostics import prophet_copy
  18. from fbprophet.models import prophet_stan_model
  19. from fbprophet.make_holidays import get_holiday_names, make_holidays_df
  20. from fbprophet.plot import (
  21. plot,
  22. plot_components,
  23. plot_forecast_component,
  24. seasonality_plot_df,
  25. plot_weekly,
  26. plot_yearly,
  27. plot_seasonality,
  28. )
  29. logging.basicConfig()
  30. logger = logging.getLogger(__name__)
  31. warnings.filterwarnings("default", category=DeprecationWarning)
  32. try:
  33. import pystan # noqa F401
  34. except ImportError:
  35. logger.exception('You cannot run fbprophet without pystan installed')
  36. class Prophet(object):
  37. """Prophet forecaster.
  38. Parameters
  39. ----------
  40. growth: String 'linear' or 'logistic' to specify a linear or logistic
  41. trend.
  42. changepoints: List of dates at which to include potential changepoints. If
  43. not specified, potential changepoints are selected automatically.
  44. n_changepoints: Number of potential changepoints to include. Not used
  45. if input `changepoints` is supplied. If `changepoints` is not supplied,
  46. then n_changepoints potential changepoints are selected uniformly from
  47. the first `changepoint_range` proportion of the history.
  48. changepoint_range: Proportion of history in which trend changepoints will
  49. be estimated. Defaults to 0.8 for the first 80%. Not used if
  50. `changepoints` is specified.
  51. Not used if input `changepoints` is supplied.
  52. yearly_seasonality: Fit yearly seasonality.
  53. Can be 'auto', True, False, or a number of Fourier terms to generate.
  54. weekly_seasonality: Fit weekly seasonality.
  55. Can be 'auto', True, False, or a number of Fourier terms to generate.
  56. daily_seasonality: Fit daily seasonality.
  57. Can be 'auto', True, False, or a number of Fourier terms to generate.
  58. holidays: pd.DataFrame with columns holiday (string) and ds (date type)
  59. and optionally columns lower_window and upper_window which specify a
  60. range of days around the date to be included as holidays.
  61. lower_window=-2 will include 2 days prior to the date as holidays. Also
  62. optionally can have a column prior_scale specifying the prior scale for
  63. that holiday.
  64. append_holidays: country name or abbreviation; must be string
  65. seasonality_mode: 'additive' (default) or 'multiplicative'.
  66. seasonality_prior_scale: Parameter modulating the strength of the
  67. seasonality model. Larger values allow the model to fit larger seasonal
  68. fluctuations, smaller values dampen the seasonality. Can be specified
  69. for individual seasonalities using add_seasonality.
  70. holidays_prior_scale: Parameter modulating the strength of the holiday
  71. components model, unless overridden in the holidays input.
  72. changepoint_prior_scale: Parameter modulating the flexibility of the
  73. automatic changepoint selection. Large values will allow many
  74. changepoints, small values will allow few changepoints.
  75. mcmc_samples: Integer, if greater than 0, will do full Bayesian inference
  76. with the specified number of MCMC samples. If 0, will do MAP
  77. estimation.
  78. interval_width: Float, width of the uncertainty intervals provided
  79. for the forecast. If mcmc_samples=0, this will be only the uncertainty
  80. in the trend using the MAP estimate of the extrapolated generative
  81. model. If mcmc.samples>0, this will be integrated over all model
  82. parameters, which will include uncertainty in seasonality.
  83. uncertainty_samples: Number of simulated draws used to estimate
  84. uncertainty intervals.
  85. """
  86. def __init__(
  87. self,
  88. growth='linear',
  89. changepoints=None,
  90. n_changepoints=25,
  91. changepoint_range=0.8,
  92. yearly_seasonality='auto',
  93. weekly_seasonality='auto',
  94. daily_seasonality='auto',
  95. holidays=None,
  96. append_holidays=None,
  97. seasonality_mode='additive',
  98. seasonality_prior_scale=10.0,
  99. holidays_prior_scale=10.0,
  100. changepoint_prior_scale=0.05,
  101. mcmc_samples=0,
  102. interval_width=0.80,
  103. uncertainty_samples=1000,
  104. ):
  105. self.growth = growth
  106. self.changepoints = pd.to_datetime(changepoints)
  107. if self.changepoints is not None:
  108. self.n_changepoints = len(self.changepoints)
  109. self.specified_changepoints = True
  110. else:
  111. self.n_changepoints = n_changepoints
  112. self.specified_changepoints = False
  113. self.changepoint_range = changepoint_range
  114. self.yearly_seasonality = yearly_seasonality
  115. self.weekly_seasonality = weekly_seasonality
  116. self.daily_seasonality = daily_seasonality
  117. if holidays is not None:
  118. if not (
  119. isinstance(holidays, pd.DataFrame)
  120. and 'ds' in holidays # noqa W503
  121. and 'holiday' in holidays # noqa W503
  122. ):
  123. raise ValueError("holidays must be a DataFrame with 'ds' and "
  124. "'holiday' columns.")
  125. holidays['ds'] = pd.to_datetime(holidays['ds'])
  126. self.holidays = holidays
  127. if append_holidays is not None:
  128. if not (
  129. isinstance(append_holidays, str)
  130. ):
  131. raise ValueError("append_holidays must be a string")
  132. self.append_holidays = append_holidays
  133. self.seasonality_mode = seasonality_mode
  134. self.seasonality_prior_scale = float(seasonality_prior_scale)
  135. self.changepoint_prior_scale = float(changepoint_prior_scale)
  136. self.holidays_prior_scale = float(holidays_prior_scale)
  137. self.mcmc_samples = mcmc_samples
  138. self.interval_width = interval_width
  139. self.uncertainty_samples = uncertainty_samples
  140. # Set during fitting
  141. self.start = None
  142. self.y_scale = None
  143. self.logistic_floor = False
  144. self.t_scale = None
  145. self.changepoints_t = None
  146. self.seasonalities = {}
  147. self.extra_regressors = {}
  148. self.stan_fit = None
  149. self.params = {}
  150. self.history = None
  151. self.history_dates = None
  152. self.train_component_cols = None
  153. self.component_modes = None
  154. self.train_holiday_names = None
  155. self.validate_inputs()
  156. def validate_inputs(self):
  157. """Validates the inputs to Prophet."""
  158. if self.growth not in ('linear', 'logistic'):
  159. raise ValueError(
  160. "Parameter 'growth' should be 'linear' or 'logistic'.")
  161. if ((self.changepoint_range < 0) or (self.changepoint_range > 1)):
  162. raise ValueError("Parameter 'changepoint_range' must be in [0, 1]")
  163. if self.holidays is not None:
  164. has_lower = 'lower_window' in self.holidays
  165. has_upper = 'upper_window' in self.holidays
  166. if has_lower + has_upper == 1:
  167. raise ValueError('Holidays must have both lower_window and ' +
  168. 'upper_window, or neither')
  169. if has_lower:
  170. if self.holidays['lower_window'].max() > 0:
  171. raise ValueError('Holiday lower_window should be <= 0')
  172. if self.holidays['upper_window'].min() < 0:
  173. raise ValueError('Holiday upper_window should be >= 0')
  174. for h in self.holidays['holiday'].unique():
  175. self.validate_column_name(h, check_holidays=False)
  176. if self.seasonality_mode not in ['additive', 'multiplicative']:
  177. raise ValueError(
  178. "seasonality_mode must be 'additive' or 'multiplicative'"
  179. )
  180. def validate_column_name(self, name, check_holidays=True,
  181. check_seasonalities=True, check_regressors=True):
  182. """Validates the name of a seasonality, holiday, or regressor.
  183. Parameters
  184. ----------
  185. name: string
  186. check_holidays: bool check if name already used for holiday
  187. check_seasonalities: bool check if name already used for seasonality
  188. check_regressors: bool check if name already used for regressor
  189. """
  190. if '_delim_' in name:
  191. raise ValueError('Name cannot contain "_delim_"')
  192. reserved_names = [
  193. 'trend', 'additive_terms', 'daily', 'weekly', 'yearly',
  194. 'holidays', 'zeros', 'extra_regressors_additive', 'yhat',
  195. 'extra_regressors_multiplicative', 'multiplicative_terms',
  196. ]
  197. rn_l = [n + '_lower' for n in reserved_names]
  198. rn_u = [n + '_upper' for n in reserved_names]
  199. reserved_names.extend(rn_l)
  200. reserved_names.extend(rn_u)
  201. reserved_names.extend([
  202. 'ds', 'y', 'cap', 'floor', 'y_scaled', 'cap_scaled'])
  203. if name in reserved_names:
  204. raise ValueError('Name "{}" is reserved.'.format(name))
  205. if (check_holidays and self.holidays is not None and
  206. name in self.holidays['holiday'].unique()):
  207. raise ValueError(
  208. 'Name "{}" already used for a holiday.'.format(name))
  209. if (check_holidays and self.append_holidays is not None and
  210. name in get_holiday_names(self.append_holidays)):
  211. raise ValueError(
  212. 'Name "{}" is a holiday name in {}.'.format(name, self.append_holidays))
  213. if check_seasonalities and name in self.seasonalities:
  214. raise ValueError(
  215. 'Name "{}" already used for a seasonality.'.format(name))
  216. if check_regressors and name in self.extra_regressors:
  217. raise ValueError(
  218. 'Name "{}" already used for an added regressor.'.format(name))
  219. def setup_dataframe(self, df, initialize_scales=False):
  220. """Prepare dataframe for fitting or predicting.
  221. Adds a time index and scales y. Creates auxiliary columns 't', 't_ix',
  222. 'y_scaled', and 'cap_scaled'. These columns are used during both
  223. fitting and predicting.
  224. Parameters
  225. ----------
  226. df: pd.DataFrame with columns ds, y, and cap if logistic growth. Any
  227. specified additional regressors must also be present.
  228. initialize_scales: Boolean set scaling factors in self from df.
  229. Returns
  230. -------
  231. pd.DataFrame prepared for fitting or predicting.
  232. """
  233. if 'y' in df:
  234. df['y'] = pd.to_numeric(df['y'])
  235. if np.isinf(df['y'].values).any():
  236. raise ValueError('Found infinity in column y.')
  237. df['ds'] = pd.to_datetime(df['ds'])
  238. if df['ds'].isnull().any():
  239. raise ValueError('Found NaN in column ds.')
  240. for name in self.extra_regressors:
  241. if name not in df:
  242. raise ValueError(
  243. 'Regressor "{}" missing from dataframe'.format(name))
  244. df = df.sort_values('ds')
  245. df.reset_index(inplace=True, drop=True)
  246. self.initialize_scales(initialize_scales, df)
  247. if self.logistic_floor:
  248. if 'floor' not in df:
  249. raise ValueError("Expected column 'floor'.")
  250. else:
  251. df['floor'] = 0
  252. if self.growth == 'logistic':
  253. if 'cap' not in df:
  254. raise ValueError(
  255. "Capacities must be supplied for logistic growth in "
  256. "column 'cap'"
  257. )
  258. df['cap_scaled'] = (df['cap'] - df['floor']) / self.y_scale
  259. df['t'] = (df['ds'] - self.start) / self.t_scale
  260. if 'y' in df:
  261. df['y_scaled'] = (df['y'] - df['floor']) / self.y_scale
  262. for name, props in self.extra_regressors.items():
  263. df[name] = pd.to_numeric(df[name])
  264. df[name] = ((df[name] - props['mu']) / props['std'])
  265. if df[name].isnull().any():
  266. raise ValueError('Found NaN in column ' + name)
  267. return df
  268. def initialize_scales(self, initialize_scales, df):
  269. """Initialize model scales.
  270. Sets model scaling factors using df.
  271. Parameters
  272. ----------
  273. initialize_scales: Boolean set the scales or not.
  274. df: pd.DataFrame for setting scales.
  275. """
  276. if not initialize_scales:
  277. return
  278. if self.growth == 'logistic' and 'floor' in df:
  279. self.logistic_floor = True
  280. floor = df['floor']
  281. else:
  282. floor = 0.
  283. self.y_scale = (df['y'] - floor).abs().max()
  284. if self.y_scale == 0:
  285. self.y_scale = 1
  286. self.start = df['ds'].min()
  287. self.t_scale = df['ds'].max() - self.start
  288. for name, props in self.extra_regressors.items():
  289. standardize = props['standardize']
  290. n_vals = len(df[name].unique())
  291. if n_vals < 2:
  292. standardize = False
  293. if standardize == 'auto':
  294. if set(df[name].unique()) == set([1, 0]):
  295. # Don't standardize binary variables.
  296. standardize = False
  297. else:
  298. standardize = True
  299. if standardize:
  300. mu = df[name].mean()
  301. std = df[name].std()
  302. self.extra_regressors[name]['mu'] = mu
  303. self.extra_regressors[name]['std'] = std
  304. def set_changepoints(self):
  305. """Set changepoints
  306. Sets m$changepoints to the dates of changepoints. Either:
  307. 1) The changepoints were passed in explicitly.
  308. A) They are empty.
  309. B) They are not empty, and need validation.
  310. 2) We are generating a grid of them.
  311. 3) The user prefers no changepoints be used.
  312. """
  313. if self.changepoints is not None:
  314. if len(self.changepoints) == 0:
  315. pass
  316. else:
  317. too_low = min(self.changepoints) < self.history['ds'].min()
  318. too_high = max(self.changepoints) > self.history['ds'].max()
  319. if too_low or too_high:
  320. raise ValueError(
  321. 'Changepoints must fall within training data.')
  322. else:
  323. # Place potential changepoints evenly through first
  324. # changepoint_range proportion of the history
  325. hist_size = np.floor(
  326. self.history.shape[0] * self.changepoint_range)
  327. if self.n_changepoints + 1 > hist_size:
  328. self.n_changepoints = hist_size - 1
  329. logger.info(
  330. 'n_changepoints greater than number of observations.'
  331. 'Using {}.'.format(self.n_changepoints)
  332. )
  333. if self.n_changepoints > 0:
  334. cp_indexes = (
  335. np.linspace(0, hist_size - 1, self.n_changepoints + 1)
  336. .round()
  337. .astype(np.int)
  338. )
  339. self.changepoints = (
  340. self.history.iloc[cp_indexes]['ds'].tail(-1)
  341. )
  342. else:
  343. # set empty changepoints
  344. self.changepoints = []
  345. if len(self.changepoints) > 0:
  346. self.changepoints_t = np.sort(np.array(
  347. (self.changepoints - self.start) / self.t_scale))
  348. else:
  349. self.changepoints_t = np.array([0]) # dummy changepoint
  350. @staticmethod
  351. def fourier_series(dates, period, series_order):
  352. """Provides Fourier series components with the specified frequency
  353. and order.
  354. Parameters
  355. ----------
  356. dates: pd.Series containing timestamps.
  357. period: Number of days of the period.
  358. series_order: Number of components.
  359. Returns
  360. -------
  361. Matrix with seasonality features.
  362. """
  363. # convert to days since epoch
  364. t = np.array(
  365. (dates - pd.datetime(1970, 1, 1))
  366. .dt.total_seconds()
  367. .astype(np.float)
  368. ) / (3600 * 24.)
  369. return np.column_stack([
  370. fun((2.0 * (i + 1) * np.pi * t / period))
  371. for i in range(series_order)
  372. for fun in (np.sin, np.cos)
  373. ])
  374. @classmethod
  375. def make_seasonality_features(cls, dates, period, series_order, prefix):
  376. """Data frame with seasonality features.
  377. Parameters
  378. ----------
  379. cls: Prophet class.
  380. dates: pd.Series containing timestamps.
  381. period: Number of days of the period.
  382. series_order: Number of components.
  383. prefix: Column name prefix.
  384. Returns
  385. -------
  386. pd.DataFrame with seasonality features.
  387. """
  388. features = cls.fourier_series(dates, period, series_order)
  389. columns = [
  390. '{}_delim_{}'.format(prefix, i + 1)
  391. for i in range(features.shape[1])
  392. ]
  393. return pd.DataFrame(features, columns=columns)
  394. def make_holiday_features(self, dates):
  395. """Construct a dataframe of holiday features.
  396. Parameters
  397. ----------
  398. dates: pd.Series containing timestamps used for computing seasonality.
  399. Returns
  400. -------
  401. holiday_features: pd.DataFrame with a column for each holiday.
  402. prior_scale_list: List of prior scales for each holiday column.
  403. holiday_names: List of names of holidays
  404. """
  405. # Concatenate holidays and append_holidays
  406. all_holidays = self.holidays
  407. if self.append_holidays is not None:
  408. year_list = list({x.year for x in dates})
  409. append_holidays_df = make_holidays_df(
  410. year_list=year_list,
  411. country=self.append_holidays)
  412. all_holidays = pd.concat((all_holidays, append_holidays_df), sort=False)
  413. all_holidays.reset_index(drop=True, inplace=True)
  414. # Make fit and predict holidays components match
  415. if self.train_holiday_names is not None:
  416. train_holidays = self.train_holiday_names
  417. # Remove holiday names didn't show up in fit
  418. index_to_drop = all_holidays.index[
  419. np.logical_not(
  420. all_holidays.holiday.isin(train_holidays))]
  421. all_holidays = all_holidays.drop(index_to_drop)
  422. # Add holiday names show up in fit but not in predict with ds as NA
  423. holidays_to_add = pd.DataFrame(
  424. {'holiday':
  425. train_holidays[
  426. np.logical_not(
  427. train_holidays.isin(all_holidays.holiday))]})
  428. all_holidays = pd.concat((all_holidays, holidays_to_add), sort=False)
  429. all_holidays.reset_index(drop=True, inplace=True)
  430. # Holds columns of our future matrix.
  431. expanded_holidays = defaultdict(lambda: np.zeros(dates.shape[0]))
  432. prior_scales = {}
  433. # Makes an index so we can perform `get_loc` below.
  434. # Strip to just dates.
  435. row_index = pd.DatetimeIndex(dates.apply(lambda x: x.date()))
  436. for _ix, row in all_holidays.iterrows():
  437. dt = row.ds.date()
  438. try:
  439. lw = int(row.get('lower_window', 0))
  440. uw = int(row.get('upper_window', 0))
  441. except ValueError:
  442. lw = 0
  443. uw = 0
  444. ps = float(row.get('prior_scale', self.holidays_prior_scale))
  445. if np.isnan(ps):
  446. ps = float(self.holidays_prior_scale)
  447. if (
  448. row.holiday in prior_scales and prior_scales[row.holiday] != ps
  449. ):
  450. raise ValueError(
  451. 'Holiday {} does not have consistent prior scale '
  452. 'specification.'.format(row.holiday))
  453. if ps <= 0:
  454. raise ValueError('Prior scale must be > 0')
  455. prior_scales[row.holiday] = ps
  456. for offset in range(lw, uw + 1):
  457. occurrence = dt + timedelta(days=offset)
  458. try:
  459. loc = row_index.get_loc(occurrence)
  460. except KeyError:
  461. loc = None
  462. key = '{}_delim_{}{}'.format(
  463. row.holiday,
  464. '+' if offset >= 0 else '-',
  465. abs(offset)
  466. )
  467. if loc is not None:
  468. expanded_holidays[key][loc] = 1.
  469. else:
  470. # Access key to generate value
  471. expanded_holidays[key]
  472. holiday_features = pd.DataFrame(expanded_holidays)
  473. # Make sure fit and predict component_cols perfectly equal
  474. holiday_features = holiday_features[sorted(holiday_features.columns.tolist())]
  475. prior_scale_list = [
  476. prior_scales[h.split('_delim_')[0]]
  477. for h in holiday_features.columns
  478. ]
  479. holiday_names = list(prior_scales.keys())
  480. # Store holiday names used in fit
  481. if self.train_holiday_names is None:
  482. self.train_holiday_names = pd.Series(holiday_names)
  483. return holiday_features, prior_scale_list, holiday_names
  484. def add_regressor(
  485. self, name, prior_scale=None, standardize='auto', mode=None
  486. ):
  487. """Add an additional regressor to be used for fitting and predicting.
  488. The dataframe passed to `fit` and `predict` will have a column with the
  489. specified name to be used as a regressor. When standardize='auto', the
  490. regressor will be standardized unless it is binary. The regression
  491. coefficient is given a prior with the specified scale parameter.
  492. Decreasing the prior scale will add additional regularization. If no
  493. prior scale is provided, self.holidays_prior_scale will be used.
  494. Mode can be specified as either 'additive' or 'multiplicative'. If not
  495. specified, self.seasonality_mode will be used. 'additive' means the
  496. effect of the regressor will be added to the trend, 'multiplicative'
  497. means it will multiply the trend.
  498. Parameters
  499. ----------
  500. name: string name of the regressor.
  501. prior_scale: optional float scale for the normal prior. If not
  502. provided, self.holidays_prior_scale will be used.
  503. standardize: optional, specify whether this regressor will be
  504. standardized prior to fitting. Can be 'auto' (standardize if not
  505. binary), True, or False.
  506. mode: optional, 'additive' or 'multiplicative'. Defaults to
  507. self.seasonality_mode.
  508. Returns
  509. -------
  510. The prophet object.
  511. """
  512. if self.history is not None:
  513. raise Exception(
  514. "Regressors must be added prior to model fitting.")
  515. self.validate_column_name(name, check_regressors=False)
  516. if prior_scale is None:
  517. prior_scale = float(self.holidays_prior_scale)
  518. if mode is None:
  519. mode = self.seasonality_mode
  520. if prior_scale <= 0:
  521. raise ValueError('Prior scale must be > 0')
  522. if mode not in ['additive', 'multiplicative']:
  523. raise ValueError("mode must be 'additive' or 'multiplicative'")
  524. self.extra_regressors[name] = {
  525. 'prior_scale': prior_scale,
  526. 'standardize': standardize,
  527. 'mu': 0.,
  528. 'std': 1.,
  529. 'mode': mode,
  530. }
  531. return self
  532. def add_seasonality(
  533. self, name, period, fourier_order, prior_scale=None, mode=None
  534. ):
  535. """Add a seasonal component with specified period, number of Fourier
  536. components, and prior scale.
  537. Increasing the number of Fourier components allows the seasonality to
  538. change more quickly (at risk of overfitting). Default values for yearly
  539. and weekly seasonalities are 10 and 3 respectively.
  540. Increasing prior scale will allow this seasonality component more
  541. flexibility, decreasing will dampen it. If not provided, will use the
  542. seasonality_prior_scale provided on Prophet initialization (defaults
  543. to 10).
  544. Mode can be specified as either 'additive' or 'multiplicative'. If not
  545. specified, self.seasonality_mode will be used (defaults to additive).
  546. Additive means the seasonality will be added to the trend,
  547. multiplicative means it will multiply the trend.
  548. Parameters
  549. ----------
  550. name: string name of the seasonality component.
  551. period: float number of days in one period.
  552. fourier_order: int number of Fourier components to use.
  553. prior_scale: optional float prior scale for this component.
  554. mode: optional 'additive' or 'multiplicative'
  555. Returns
  556. -------
  557. The prophet object.
  558. """
  559. if self.history is not None:
  560. raise Exception(
  561. "Seasonality must be added prior to model fitting.")
  562. if name not in ['daily', 'weekly', 'yearly']:
  563. # Allow overwriting built-in seasonalities
  564. self.validate_column_name(name, check_seasonalities=False)
  565. if prior_scale is None:
  566. ps = self.seasonality_prior_scale
  567. else:
  568. ps = float(prior_scale)
  569. if ps <= 0:
  570. raise ValueError('Prior scale must be > 0')
  571. if mode is None:
  572. mode = self.seasonality_mode
  573. if mode not in ['additive', 'multiplicative']:
  574. raise ValueError("mode must be 'additive' or 'multiplicative'")
  575. self.seasonalities[name] = {
  576. 'period': period,
  577. 'fourier_order': fourier_order,
  578. 'prior_scale': ps,
  579. 'mode': mode,
  580. }
  581. return self
  582. def make_all_seasonality_features(self, df):
  583. """Dataframe with seasonality features.
  584. Includes seasonality features, holiday features, and added regressors.
  585. Parameters
  586. ----------
  587. df: pd.DataFrame with dates for computing seasonality features and any
  588. added regressors.
  589. Returns
  590. -------
  591. pd.DataFrame with regression features.
  592. list of prior scales for each column of the features dataframe.
  593. Dataframe with indicators for which regression components correspond to
  594. which columns.
  595. Dictionary with keys 'additive' and 'multiplicative' listing the
  596. component names for each mode of seasonality.
  597. """
  598. seasonal_features = []
  599. prior_scales = []
  600. modes = {'additive': [], 'multiplicative': []}
  601. # Seasonality features
  602. for name, props in self.seasonalities.items():
  603. features = self.make_seasonality_features(
  604. df['ds'],
  605. props['period'],
  606. props['fourier_order'],
  607. name,
  608. )
  609. seasonal_features.append(features)
  610. prior_scales.extend(
  611. [props['prior_scale']] * features.shape[1])
  612. modes[props['mode']].append(name)
  613. # Holiday features
  614. if self.holidays is not None or self.append_holidays is not None:
  615. features, holiday_priors, holiday_names = (
  616. self.make_holiday_features(df['ds'])
  617. )
  618. seasonal_features.append(features)
  619. prior_scales.extend(holiday_priors)
  620. modes[self.seasonality_mode].extend(holiday_names)
  621. # Additional regressors
  622. for name, props in self.extra_regressors.items():
  623. seasonal_features.append(pd.DataFrame(df[name]))
  624. prior_scales.append(props['prior_scale'])
  625. modes[props['mode']].append(name)
  626. # Dummy to prevent empty X
  627. if len(seasonal_features) == 0:
  628. seasonal_features.append(
  629. pd.DataFrame({'zeros': np.zeros(df.shape[0])}))
  630. prior_scales.append(1.)
  631. seasonal_features = pd.concat(seasonal_features, axis=1)
  632. component_cols, modes = self.regressor_column_matrix(
  633. seasonal_features, modes
  634. )
  635. return seasonal_features, prior_scales, component_cols, modes
  636. def regressor_column_matrix(self, seasonal_features, modes):
  637. """Dataframe indicating which columns of the feature matrix correspond
  638. to which seasonality/regressor components.
  639. Includes combination components, like 'additive_terms'. These
  640. combination components will be added to the 'modes' input.
  641. Parameters
  642. ----------
  643. seasonal_features: Constructed seasonal features dataframe
  644. modes: Dictionary with keys 'additive' and 'multiplicative' listing the
  645. component names for each mode of seasonality.
  646. Returns
  647. -------
  648. component_cols: A binary indicator dataframe with columns seasonal
  649. components and rows columns in seasonal_features. Entry is 1 if
  650. that columns is used in that component.
  651. modes: Updated input with combination components.
  652. """
  653. components = pd.DataFrame({
  654. 'col': np.arange(seasonal_features.shape[1]),
  655. 'component': [
  656. x.split('_delim_')[0] for x in seasonal_features.columns
  657. ],
  658. })
  659. # Add total for holidays
  660. if self.train_holiday_names is not None:
  661. components = self.add_group_component(
  662. components, 'holidays', self.train_holiday_names.unique())
  663. # Add totals additive and multiplicative components, and regressors
  664. for mode in ['additive', 'multiplicative']:
  665. components = self.add_group_component(
  666. components, mode + '_terms', modes[mode]
  667. )
  668. regressors_by_mode = [
  669. r for r, props in self.extra_regressors.items()
  670. if props['mode'] == mode
  671. ]
  672. components = self.add_group_component(
  673. components, 'extra_regressors_' + mode, regressors_by_mode)
  674. # Add combination components to modes
  675. modes[mode].append(mode + '_terms')
  676. modes[mode].append('extra_regressors_' + mode)
  677. # After all of the additive/multiplicative groups have been added,
  678. modes[self.seasonality_mode].append('holidays')
  679. # Convert to a binary matrix
  680. component_cols = pd.crosstab(
  681. components['col'], components['component'],
  682. ).sort_index(level='col')
  683. # Add columns for additive and multiplicative terms, if missing
  684. for name in ['additive_terms', 'multiplicative_terms']:
  685. if name not in component_cols:
  686. component_cols[name] = 0
  687. # Remove the placeholder
  688. component_cols.drop('zeros', axis=1, inplace=True, errors='ignore')
  689. # Validation
  690. if (
  691. max(component_cols['additive_terms']
  692. + component_cols['multiplicative_terms']) > 1
  693. ):
  694. raise Exception('A bug occurred in seasonal components.')
  695. # Compare to the training, if set.
  696. if self.train_component_cols is not None:
  697. component_cols = component_cols[self.train_component_cols.columns]
  698. if not component_cols.equals(self.train_component_cols):
  699. raise Exception('A bug occurred in constructing regressors.')
  700. return component_cols, modes
  701. def add_group_component(self, components, name, group):
  702. """Adds a component with given name that contains all of the components
  703. in group.
  704. Parameters
  705. ----------
  706. components: Dataframe with components.
  707. name: Name of new group component.
  708. group: List of components that form the group.
  709. Returns
  710. -------
  711. Dataframe with components.
  712. """
  713. new_comp = components[components['component'].isin(set(group))].copy()
  714. group_cols = new_comp['col'].unique()
  715. if len(group_cols) > 0:
  716. new_comp = pd.DataFrame({'col': group_cols, 'component': name})
  717. components = components.append(new_comp)
  718. return components
  719. def parse_seasonality_args(self, name, arg, auto_disable, default_order):
  720. """Get number of fourier components for built-in seasonalities.
  721. Parameters
  722. ----------
  723. name: string name of the seasonality component.
  724. arg: 'auto', True, False, or number of fourier components as provided.
  725. auto_disable: bool if seasonality should be disabled when 'auto'.
  726. default_order: int default fourier order
  727. Returns
  728. -------
  729. Number of fourier components, or 0 for disabled.
  730. """
  731. if arg == 'auto':
  732. fourier_order = 0
  733. if name in self.seasonalities:
  734. logger.info(
  735. 'Found custom seasonality named "{name}", '
  736. 'disabling built-in {name} seasonality.'.format(name=name)
  737. )
  738. elif auto_disable:
  739. logger.info(
  740. 'Disabling {name} seasonality. Run prophet with '
  741. '{name}_seasonality=True to override this.'.format(
  742. name=name)
  743. )
  744. else:
  745. fourier_order = default_order
  746. elif arg is True:
  747. fourier_order = default_order
  748. elif arg is False:
  749. fourier_order = 0
  750. else:
  751. fourier_order = int(arg)
  752. return fourier_order
  753. def set_auto_seasonalities(self):
  754. """Set seasonalities that were left on auto.
  755. Turns on yearly seasonality if there is >=2 years of history.
  756. Turns on weekly seasonality if there is >=2 weeks of history, and the
  757. spacing between dates in the history is <7 days.
  758. Turns on daily seasonality if there is >=2 days of history, and the
  759. spacing between dates in the history is <1 day.
  760. """
  761. first = self.history['ds'].min()
  762. last = self.history['ds'].max()
  763. dt = self.history['ds'].diff()
  764. min_dt = dt.iloc[dt.nonzero()[0]].min()
  765. # Yearly seasonality
  766. yearly_disable = last - first < pd.Timedelta(days=730)
  767. fourier_order = self.parse_seasonality_args(
  768. 'yearly', self.yearly_seasonality, yearly_disable, 10)
  769. if fourier_order > 0:
  770. self.seasonalities['yearly'] = {
  771. 'period': 365.25,
  772. 'fourier_order': fourier_order,
  773. 'prior_scale': self.seasonality_prior_scale,
  774. 'mode': self.seasonality_mode,
  775. }
  776. # Weekly seasonality
  777. weekly_disable = ((last - first < pd.Timedelta(weeks=2)) or
  778. (min_dt >= pd.Timedelta(weeks=1)))
  779. fourier_order = self.parse_seasonality_args(
  780. 'weekly', self.weekly_seasonality, weekly_disable, 3)
  781. if fourier_order > 0:
  782. self.seasonalities['weekly'] = {
  783. 'period': 7,
  784. 'fourier_order': fourier_order,
  785. 'prior_scale': self.seasonality_prior_scale,
  786. 'mode': self.seasonality_mode,
  787. }
  788. # Daily seasonality
  789. daily_disable = ((last - first < pd.Timedelta(days=2)) or
  790. (min_dt >= pd.Timedelta(days=1)))
  791. fourier_order = self.parse_seasonality_args(
  792. 'daily', self.daily_seasonality, daily_disable, 4)
  793. if fourier_order > 0:
  794. self.seasonalities['daily'] = {
  795. 'period': 1,
  796. 'fourier_order': fourier_order,
  797. 'prior_scale': self.seasonality_prior_scale,
  798. 'mode': self.seasonality_mode,
  799. }
  800. @staticmethod
  801. def linear_growth_init(df):
  802. """Initialize linear growth.
  803. Provides a strong initialization for linear growth by calculating the
  804. growth and offset parameters that pass the function through the first
  805. and last points in the time series.
  806. Parameters
  807. ----------
  808. df: pd.DataFrame with columns ds (date), y_scaled (scaled time series),
  809. and t (scaled time).
  810. Returns
  811. -------
  812. A tuple (k, m) with the rate (k) and offset (m) of the linear growth
  813. function.
  814. """
  815. i0, i1 = df['ds'].idxmin(), df['ds'].idxmax()
  816. T = df['t'].iloc[i1] - df['t'].iloc[i0]
  817. k = (df['y_scaled'].iloc[i1] - df['y_scaled'].iloc[i0]) / T
  818. m = df['y_scaled'].iloc[i0] - k * df['t'].iloc[i0]
  819. return (k, m)
  820. @staticmethod
  821. def logistic_growth_init(df):
  822. """Initialize logistic growth.
  823. Provides a strong initialization for logistic growth by calculating the
  824. growth and offset parameters that pass the function through the first
  825. and last points in the time series.
  826. Parameters
  827. ----------
  828. df: pd.DataFrame with columns ds (date), cap_scaled (scaled capacity),
  829. y_scaled (scaled time series), and t (scaled time).
  830. Returns
  831. -------
  832. A tuple (k, m) with the rate (k) and offset (m) of the logistic growth
  833. function.
  834. """
  835. i0, i1 = df['ds'].idxmin(), df['ds'].idxmax()
  836. T = df['t'].iloc[i1] - df['t'].iloc[i0]
  837. # Force valid values, in case y > cap or y < 0
  838. C0 = df['cap_scaled'].iloc[i0]
  839. C1 = df['cap_scaled'].iloc[i1]
  840. y0 = max(0.01 * C0, min(0.99 * C0, df['y_scaled'].iloc[i0]))
  841. y1 = max(0.01 * C1, min(0.99 * C1, df['y_scaled'].iloc[i1]))
  842. r0 = C0 / y0
  843. r1 = C1 / y1
  844. if abs(r0 - r1) <= 0.01:
  845. r0 = 1.05 * r0
  846. L0 = np.log(r0 - 1)
  847. L1 = np.log(r1 - 1)
  848. # Initialize the offset
  849. m = L0 * T / (L0 - L1)
  850. # And the rate
  851. k = (L0 - L1) / T
  852. return (k, m)
  853. def fit(self, df, **kwargs):
  854. """Fit the Prophet model.
  855. This sets self.params to contain the fitted model parameters. It is a
  856. dictionary parameter names as keys and the following items:
  857. k (Mx1 array): M posterior samples of the initial slope.
  858. m (Mx1 array): The initial intercept.
  859. delta (MxN array): The slope change at each of N changepoints.
  860. beta (MxK matrix): Coefficients for K seasonality features.
  861. sigma_obs (Mx1 array): Noise level.
  862. Note that M=1 if MAP estimation.
  863. Parameters
  864. ----------
  865. df: pd.DataFrame containing the history. Must have columns ds (date
  866. type) and y, the time series. If self.growth is 'logistic', then
  867. df must also have a column cap that specifies the capacity at
  868. each ds.
  869. kwargs: Additional arguments passed to the optimizing or sampling
  870. functions in Stan.
  871. Returns
  872. -------
  873. The fitted Prophet object.
  874. """
  875. if self.history is not None:
  876. raise Exception('Prophet object can only be fit once. '
  877. 'Instantiate a new object.')
  878. if ('ds' not in df) or ('y' not in df):
  879. raise ValueError(
  880. "Dataframe must have columns 'ds' and 'y' with the dates and "
  881. "values respectively."
  882. )
  883. history = df[df['y'].notnull()].copy()
  884. if history.shape[0] < 2:
  885. raise ValueError('Dataframe has less than 2 non-NaN rows.')
  886. self.history_dates = pd.to_datetime(df['ds']).sort_values()
  887. history = self.setup_dataframe(history, initialize_scales=True)
  888. self.history = history
  889. self.set_auto_seasonalities()
  890. seasonal_features, prior_scales, component_cols, modes = (
  891. self.make_all_seasonality_features(history))
  892. self.train_component_cols = component_cols
  893. self.component_modes = modes
  894. self.set_changepoints()
  895. dat = {
  896. 'T': history.shape[0],
  897. 'K': seasonal_features.shape[1],
  898. 'S': len(self.changepoints_t),
  899. 'y': history['y_scaled'],
  900. 't': history['t'],
  901. 't_change': self.changepoints_t,
  902. 'X': seasonal_features,
  903. 'sigmas': prior_scales,
  904. 'tau': self.changepoint_prior_scale,
  905. 'trend_indicator': int(self.growth == 'logistic'),
  906. 's_a': component_cols['additive_terms'],
  907. 's_m': component_cols['multiplicative_terms'],
  908. }
  909. if self.growth == 'linear':
  910. dat['cap'] = np.zeros(self.history.shape[0])
  911. kinit = self.linear_growth_init(history)
  912. else:
  913. dat['cap'] = history['cap_scaled']
  914. kinit = self.logistic_growth_init(history)
  915. model = prophet_stan_model
  916. def stan_init():
  917. return {
  918. 'k': kinit[0],
  919. 'm': kinit[1],
  920. 'delta': np.zeros(len(self.changepoints_t)),
  921. 'beta': np.zeros(seasonal_features.shape[1]),
  922. 'sigma_obs': 1,
  923. }
  924. if (
  925. (history['y'].min() == history['y'].max())
  926. and self.growth == 'linear'
  927. ):
  928. # Nothing to fit.
  929. self.params = stan_init()
  930. self.params['sigma_obs'] = 1e-9
  931. for par in self.params:
  932. self.params[par] = np.array([self.params[par]])
  933. elif self.mcmc_samples > 0:
  934. stan_fit = model.sampling(
  935. dat,
  936. init=stan_init,
  937. iter=self.mcmc_samples,
  938. **kwargs
  939. )
  940. for par in stan_fit.model_pars:
  941. self.params[par] = stan_fit[par]
  942. else:
  943. try:
  944. params = model.optimizing(
  945. dat, init=stan_init, iter=1e4, **kwargs)
  946. except RuntimeError:
  947. params = model.optimizing(
  948. dat, init=stan_init, iter=1e4, algorithm='Newton',
  949. **kwargs
  950. )
  951. for par in params:
  952. self.params[par] = params[par].reshape((1, -1))
  953. # If no changepoints were requested, replace delta with 0s
  954. if len(self.changepoints) == 0:
  955. # Fold delta into the base rate k
  956. self.params['k'] = self.params['k'] + self.params['delta']
  957. self.params['delta'] = np.zeros(self.params['delta'].shape)
  958. return self
  959. def predict(self, df=None):
  960. """Predict using the prophet model.
  961. Parameters
  962. ----------
  963. df: pd.DataFrame with dates for predictions (column ds), and capacity
  964. (column cap) if logistic growth. If not provided, predictions are
  965. made on the history.
  966. Returns
  967. -------
  968. A pd.DataFrame with the forecast components.
  969. """
  970. if df is None:
  971. df = self.history.copy()
  972. else:
  973. if df.shape[0] == 0:
  974. raise ValueError('Dataframe has no rows.')
  975. df = self.setup_dataframe(df.copy())
  976. df['trend'] = self.predict_trend(df)
  977. seasonal_components = self.predict_seasonal_components(df)
  978. intervals = self.predict_uncertainty(df)
  979. # Drop columns except ds, cap, floor, and trend
  980. cols = ['ds', 'trend']
  981. if 'cap' in df:
  982. cols.append('cap')
  983. if self.logistic_floor:
  984. cols.append('floor')
  985. # Add in forecast components
  986. df2 = pd.concat((df[cols], intervals, seasonal_components), axis=1)
  987. df2['yhat'] = (
  988. df2['trend'] * (1 + df2['multiplicative_terms'])
  989. + df2['additive_terms']
  990. )
  991. return df2
  992. @staticmethod
  993. def piecewise_linear(t, deltas, k, m, changepoint_ts):
  994. """Evaluate the piecewise linear function.
  995. Parameters
  996. ----------
  997. t: np.array of times on which the function is evaluated.
  998. deltas: np.array of rate changes at each changepoint.
  999. k: Float initial rate.
  1000. m: Float initial offset.
  1001. changepoint_ts: np.array of changepoint times.
  1002. Returns
  1003. -------
  1004. Vector y(t).
  1005. """
  1006. # Intercept changes
  1007. gammas = -changepoint_ts * deltas
  1008. # Get cumulative slope and intercept at each t
  1009. k_t = k * np.ones_like(t)
  1010. m_t = m * np.ones_like(t)
  1011. for s, t_s in enumerate(changepoint_ts):
  1012. indx = t >= t_s
  1013. k_t[indx] += deltas[s]
  1014. m_t[indx] += gammas[s]
  1015. return k_t * t + m_t
  1016. @staticmethod
  1017. def piecewise_logistic(t, cap, deltas, k, m, changepoint_ts):
  1018. """Evaluate the piecewise logistic function.
  1019. Parameters
  1020. ----------
  1021. t: np.array of times on which the function is evaluated.
  1022. cap: np.array of capacities at each t.
  1023. deltas: np.array of rate changes at each changepoint.
  1024. k: Float initial rate.
  1025. m: Float initial offset.
  1026. changepoint_ts: np.array of changepoint times.
  1027. Returns
  1028. -------
  1029. Vector y(t).
  1030. """
  1031. # Compute offset changes
  1032. k_cum = np.concatenate((np.atleast_1d(k), np.cumsum(deltas) + k))
  1033. gammas = np.zeros(len(changepoint_ts))
  1034. for i, t_s in enumerate(changepoint_ts):
  1035. gammas[i] = (
  1036. (t_s - m - np.sum(gammas))
  1037. * (1 - k_cum[i] / k_cum[i + 1]) # noqa W503
  1038. )
  1039. # Get cumulative rate and offset at each t
  1040. k_t = k * np.ones_like(t)
  1041. m_t = m * np.ones_like(t)
  1042. for s, t_s in enumerate(changepoint_ts):
  1043. indx = t >= t_s
  1044. k_t[indx] += deltas[s]
  1045. m_t[indx] += gammas[s]
  1046. return cap / (1 + np.exp(-k_t * (t - m_t)))
  1047. def predict_trend(self, df):
  1048. """Predict trend using the prophet model.
  1049. Parameters
  1050. ----------
  1051. df: Prediction dataframe.
  1052. Returns
  1053. -------
  1054. Vector with trend on prediction dates.
  1055. """
  1056. k = np.nanmean(self.params['k'])
  1057. m = np.nanmean(self.params['m'])
  1058. deltas = np.nanmean(self.params['delta'], axis=0)
  1059. t = np.array(df['t'])
  1060. if self.growth == 'linear':
  1061. trend = self.piecewise_linear(t, deltas, k, m, self.changepoints_t)
  1062. else:
  1063. cap = df['cap_scaled']
  1064. trend = self.piecewise_logistic(
  1065. t, cap, deltas, k, m, self.changepoints_t)
  1066. return trend * self.y_scale + df['floor']
  1067. def predict_seasonal_components(self, df):
  1068. """Predict seasonality components, holidays, and added regressors.
  1069. Parameters
  1070. ----------
  1071. df: Prediction dataframe.
  1072. Returns
  1073. -------
  1074. Dataframe with seasonal components.
  1075. """
  1076. seasonal_features, _, component_cols, _ = (
  1077. self.make_all_seasonality_features(df)
  1078. )
  1079. lower_p = 100 * (1.0 - self.interval_width) / 2
  1080. upper_p = 100 * (1.0 + self.interval_width) / 2
  1081. X = seasonal_features.values
  1082. data = {}
  1083. for component in component_cols.columns:
  1084. beta_c = self.params['beta'] * component_cols[component].values
  1085. comp = np.matmul(X, beta_c.transpose())
  1086. if component in self.component_modes['additive']:
  1087. comp *= self.y_scale
  1088. data[component] = np.nanmean(comp, axis=1)
  1089. data[component + '_lower'] = np.nanpercentile(
  1090. comp, lower_p, axis=1,
  1091. )
  1092. data[component + '_upper'] = np.nanpercentile(
  1093. comp, upper_p, axis=1,
  1094. )
  1095. return pd.DataFrame(data)
  1096. def sample_posterior_predictive(self, df):
  1097. """Prophet posterior predictive samples.
  1098. Parameters
  1099. ----------
  1100. df: Prediction dataframe.
  1101. Returns
  1102. -------
  1103. Dictionary with posterior predictive samples for the forecast yhat and
  1104. for the trend component.
  1105. """
  1106. n_iterations = self.params['k'].shape[0]
  1107. samp_per_iter = max(1, int(np.ceil(
  1108. self.uncertainty_samples / float(n_iterations)
  1109. )))
  1110. # Generate seasonality features once so we can re-use them.
  1111. seasonal_features, _, component_cols, _ = (
  1112. self.make_all_seasonality_features(df)
  1113. )
  1114. sim_values = {'yhat': [], 'trend': []}
  1115. for i in range(n_iterations):
  1116. for _j in range(samp_per_iter):
  1117. sim = self.sample_model(
  1118. df=df,
  1119. seasonal_features=seasonal_features,
  1120. iteration=i,
  1121. s_a=component_cols['additive_terms'],
  1122. s_m=component_cols['multiplicative_terms'],
  1123. )
  1124. for key in sim_values:
  1125. sim_values[key].append(sim[key])
  1126. for k, v in sim_values.items():
  1127. sim_values[k] = np.column_stack(v)
  1128. return sim_values
  1129. def predictive_samples(self, df):
  1130. """Sample from the posterior predictive distribution.
  1131. Parameters
  1132. ----------
  1133. df: Dataframe with dates for predictions (column ds), and capacity
  1134. (column cap) if logistic growth.
  1135. Returns
  1136. -------
  1137. Dictionary with keys "trend" and "yhat" containing
  1138. posterior predictive samples for that component.
  1139. """
  1140. df = self.setup_dataframe(df.copy())
  1141. sim_values = self.sample_posterior_predictive(df)
  1142. return sim_values
  1143. def predict_uncertainty(self, df):
  1144. """Prediction intervals for yhat and trend.
  1145. Parameters
  1146. ----------
  1147. df: Prediction dataframe.
  1148. Returns
  1149. -------
  1150. Dataframe with uncertainty intervals.
  1151. """
  1152. sim_values = self.sample_posterior_predictive(df)
  1153. lower_p = 100 * (1.0 - self.interval_width) / 2
  1154. upper_p = 100 * (1.0 + self.interval_width) / 2
  1155. series = {}
  1156. for key in ['yhat', 'trend']:
  1157. series['{}_lower'.format(key)] = np.nanpercentile(
  1158. sim_values[key], lower_p, axis=1)
  1159. series['{}_upper'.format(key)] = np.nanpercentile(
  1160. sim_values[key], upper_p, axis=1)
  1161. return pd.DataFrame(series)
  1162. def sample_model(self, df, seasonal_features, iteration, s_a, s_m):
  1163. """Simulate observations from the extrapolated generative model.
  1164. Parameters
  1165. ----------
  1166. df: Prediction dataframe.
  1167. seasonal_features: pd.DataFrame of seasonal features.
  1168. iteration: Int sampling iteration to use parameters from.
  1169. s_a: Indicator vector for additive components
  1170. s_m: Indicator vector for multiplicative components
  1171. Returns
  1172. -------
  1173. Dataframe with trend and yhat, each like df['t'].
  1174. """
  1175. trend = self.sample_predictive_trend(df, iteration)
  1176. beta = self.params['beta'][iteration]
  1177. Xb_a = np.matmul(seasonal_features.values, beta * s_a) * self.y_scale
  1178. Xb_m = np.matmul(seasonal_features.values, beta * s_m)
  1179. sigma = self.params['sigma_obs'][iteration]
  1180. noise = np.random.normal(0, sigma, df.shape[0]) * self.y_scale
  1181. return pd.DataFrame({
  1182. 'yhat': trend * (1 + Xb_m) + Xb_a + noise,
  1183. 'trend': trend
  1184. })
  1185. def sample_predictive_trend(self, df, iteration):
  1186. """Simulate the trend using the extrapolated generative model.
  1187. Parameters
  1188. ----------
  1189. df: Prediction dataframe.
  1190. iteration: Int sampling iteration to use parameters from.
  1191. Returns
  1192. -------
  1193. np.array of simulated trend over df['t'].
  1194. """
  1195. k = self.params['k'][iteration]
  1196. m = self.params['m'][iteration]
  1197. deltas = self.params['delta'][iteration]
  1198. t = np.array(df['t'])
  1199. T = t.max()
  1200. # New changepoints from a Poisson process with rate S on [1, T]
  1201. if T > 1:
  1202. S = len(self.changepoints_t)
  1203. n_changes = np.random.poisson(S * (T - 1))
  1204. else:
  1205. n_changes = 0
  1206. if n_changes > 0:
  1207. changepoint_ts_new = 1 + np.random.rand(n_changes) * (T - 1)
  1208. changepoint_ts_new.sort()
  1209. else:
  1210. changepoint_ts_new = []
  1211. # Get the empirical scale of the deltas, plus epsilon to avoid NaNs.
  1212. lambda_ = np.mean(np.abs(deltas)) + 1e-8
  1213. # Sample deltas
  1214. deltas_new = np.random.laplace(0, lambda_, n_changes)
  1215. # Prepend the times and deltas from the history
  1216. changepoint_ts = np.concatenate((self.changepoints_t,
  1217. changepoint_ts_new))
  1218. deltas = np.concatenate((deltas, deltas_new))
  1219. if self.growth == 'linear':
  1220. trend = self.piecewise_linear(t, deltas, k, m, changepoint_ts)
  1221. else:
  1222. cap = df['cap_scaled']
  1223. trend = self.piecewise_logistic(t, cap, deltas, k, m,
  1224. changepoint_ts)
  1225. return trend * self.y_scale + df['floor']
  1226. def make_future_dataframe(self, periods, freq='D', include_history=True):
  1227. """Simulate the trend using the extrapolated generative model.
  1228. Parameters
  1229. ----------
  1230. periods: Int number of periods to forecast forward.
  1231. freq: Any valid frequency for pd.date_range, such as 'D' or 'M'.
  1232. include_history: Boolean to include the historical dates in the data
  1233. frame for predictions.
  1234. Returns
  1235. -------
  1236. pd.Dataframe that extends forward from the end of self.history for the
  1237. requested number of periods.
  1238. """
  1239. if self.history_dates is None:
  1240. raise Exception('Model must be fit before this can be used.')
  1241. last_date = self.history_dates.max()
  1242. dates = pd.date_range(
  1243. start=last_date,
  1244. periods=periods + 1, # An extra in case we include start
  1245. freq=freq)
  1246. dates = dates[dates > last_date] # Drop start if equals last_date
  1247. dates = dates[:periods] # Return correct number of periods
  1248. if include_history:
  1249. dates = np.concatenate((np.array(self.history_dates), dates))
  1250. return pd.DataFrame({'ds': dates})
  1251. def plot(self, fcst, ax=None, uncertainty=True, plot_cap=True, xlabel='ds',
  1252. ylabel='y'):
  1253. """Plot the Prophet forecast.
  1254. Parameters
  1255. ----------
  1256. fcst: pd.DataFrame output of self.predict.
  1257. ax: Optional matplotlib axes on which to plot.
  1258. uncertainty: Optional boolean to plot uncertainty intervals.
  1259. plot_cap: Optional boolean indicating if the capacity should be shown
  1260. in the figure, if available.
  1261. xlabel: Optional label name on X-axis
  1262. ylabel: Optional label name on Y-axis
  1263. Returns
  1264. -------
  1265. A matplotlib figure.
  1266. """
  1267. return plot(
  1268. m=self, fcst=fcst, ax=ax, uncertainty=uncertainty,
  1269. plot_cap=plot_cap, xlabel=xlabel, ylabel=ylabel,
  1270. )
  1271. def plot_components(self, fcst, uncertainty=True, plot_cap=True,
  1272. weekly_start=0, yearly_start=0):
  1273. """Plot the Prophet forecast components.
  1274. Will plot whichever are available of: trend, holidays, weekly
  1275. seasonality, and yearly seasonality.
  1276. Parameters
  1277. ----------
  1278. fcst: pd.DataFrame output of self.predict.
  1279. uncertainty: Optional boolean to plot uncertainty intervals.
  1280. plot_cap: Optional boolean indicating if the capacity should be shown
  1281. in the figure, if available.
  1282. weekly_start: Optional int specifying the start day of the weekly
  1283. seasonality plot. 0 (default) starts the week on Sunday. 1 shifts
  1284. by 1 day to Monday, and so on.
  1285. yearly_start: Optional int specifying the start day of the yearly
  1286. seasonality plot. 0 (default) starts the year on Jan 1. 1 shifts
  1287. by 1 day to Jan 2, and so on.
  1288. Returns
  1289. -------
  1290. A matplotlib figure.
  1291. """
  1292. return plot_components(
  1293. m=self, fcst=fcst, uncertainty=uncertainty, plot_cap=plot_cap,
  1294. weekly_start=weekly_start, yearly_start=yearly_start,
  1295. )
  1296. def plot_forecast_component(
  1297. self, fcst, name, ax=None, uncertainty=True, plot_cap=False):
  1298. warnings.warn(
  1299. 'This method will be removed in the next version. '
  1300. 'Please use fbprophet.plot.plot_forecast_component. ',
  1301. DeprecationWarning,
  1302. )
  1303. return plot_forecast_component(
  1304. self, fcst=fcst, name=name, ax=ax, uncertainty=uncertainty,
  1305. plot_cap=plot_cap,
  1306. )
  1307. def seasonality_plot_df(self, ds):
  1308. warnings.warn(
  1309. 'This method will be removed in the next version. '
  1310. 'Please use fbprophet.plot.seasonality_plot_df. ',
  1311. DeprecationWarning,
  1312. )
  1313. return seasonality_plot_df(self, ds=ds)
  1314. def plot_weekly(self, ax=None, uncertainty=True, weekly_start=0):
  1315. warnings.warn(
  1316. 'This method will be removed in the next version. '
  1317. 'Please use fbprophet.plot.plot_weekly. ',
  1318. DeprecationWarning,
  1319. )
  1320. return plot_weekly(
  1321. self, ax=ax, uncertainty=uncertainty, weekly_start=weekly_start,
  1322. )
  1323. def plot_yearly(self, ax=None, uncertainty=True, yearly_start=0):
  1324. warnings.warn(
  1325. 'This method will be removed in the next version. '
  1326. 'Please use fbprophet.plot.plot_yearly. ',
  1327. DeprecationWarning,
  1328. )
  1329. return plot_yearly(
  1330. self, ax=ax, uncertainty=uncertainty, yearly_start=yearly_start,
  1331. )
  1332. def plot_seasonality(self, name, ax=None, uncertainty=True):
  1333. warnings.warn(
  1334. 'This method will be removed in the next version. '
  1335. 'Please use fbprophet.plot.plot_seasonality. ',
  1336. DeprecationWarning,
  1337. )
  1338. return plot_seasonality(
  1339. self, name=name, ax=ax, uncertainty=uncertainty,
  1340. )
  1341. def copy(self, cutoff=None):
  1342. warnings.warn(
  1343. 'This method will be removed in the next version. '
  1344. 'Please use fbprophet.diagnostics.prophet_copy. ',
  1345. DeprecationWarning,
  1346. )
  1347. return prophet_copy(m=self, cutoff=cutoff)