forecaster.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758
  1. # Copyright (c) 2017-present, Facebook, Inc.
  2. # All rights reserved.
  3. #
  4. # This source code is licensed under the BSD-style license found in the
  5. # LICENSE file in the root directory of this source tree. An additional grant
  6. # of patent rights can be found in the PATENTS file in the same directory.
  7. from __future__ import absolute_import
  8. from __future__ import division
  9. from __future__ import print_function
  10. from __future__ import unicode_literals
  11. from collections import defaultdict
  12. from datetime import timedelta
  13. import pickle
  14. from matplotlib import pyplot as plt
  15. from matplotlib.dates import DateFormatter, MonthLocator
  16. from matplotlib.ticker import MaxNLocator
  17. import numpy as np
  18. import pandas as pd
  19. # fb-block 1 start
  20. import pkg_resources
  21. # fb-block 1 end
  22. try:
  23. import pystan
  24. except ImportError:
  25. print('You cannot run prophet without pystan installed')
  26. raise
  27. # fb-block 2
  28. class Prophet(object):
  29. # Plotting default color for R/Python consistency
  30. forecast_color = '#0072B2'
  31. def __init__(
  32. self,
  33. growth='linear',
  34. changepoints=None,
  35. n_changepoints=25,
  36. yearly_seasonality=True,
  37. weekly_seasonality=True,
  38. holidays=None,
  39. seasonality_prior_scale=10.0,
  40. holidays_prior_scale=10.0,
  41. changepoint_prior_scale=0.05,
  42. mcmc_samples=0,
  43. interval_width=0.80,
  44. uncertainty_samples=1000,
  45. ):
  46. if growth not in ('linear', 'logistic'):
  47. raise ValueError("growth setting must be 'linear' or 'logistic'")
  48. self.growth = growth
  49. self.changepoints = pd.to_datetime(changepoints)
  50. if self.changepoints is not None:
  51. self.n_changepoints = len(self.changepoints)
  52. else:
  53. self.n_changepoints = n_changepoints
  54. self.yearly_seasonality = yearly_seasonality
  55. self.weekly_seasonality = weekly_seasonality
  56. if holidays is not None:
  57. if not (
  58. isinstance(holidays, pd.DataFrame)
  59. and 'ds' in holidays
  60. and 'holiday' in holidays
  61. ):
  62. raise ValueError("holidays must be a DataFrame with 'ds' and "
  63. "'holiday' columns.")
  64. holidays['ds'] = pd.to_datetime(holidays['ds'])
  65. self.holidays = holidays
  66. self.seasonality_prior_scale = float(seasonality_prior_scale)
  67. self.changepoint_prior_scale = float(changepoint_prior_scale)
  68. self.holidays_prior_scale = float(holidays_prior_scale)
  69. self.mcmc_samples = mcmc_samples
  70. self.interval_width = interval_width
  71. self.uncertainty_samples = uncertainty_samples
  72. # Set during fitting
  73. self.start = None
  74. self.y_scale = None
  75. self.t_scale = None
  76. self.changepoints_t = None
  77. self.stan_fit = None
  78. self.params = {}
  79. self.history = None
  80. @classmethod
  81. def get_linear_model(cls):
  82. # fb-block 3
  83. # fb-block 4 start
  84. model_file = pkg_resources.resource_filename(
  85. 'fbprophet',
  86. 'stan_models/linear_growth.pkl'
  87. )
  88. # fb-block 4 end
  89. with open(model_file, 'rb') as f:
  90. return pickle.load(f)
  91. @classmethod
  92. def get_logistic_model(cls):
  93. # fb-block 5
  94. # fb-block 6 start
  95. model_file = pkg_resources.resource_filename(
  96. 'fbprophet',
  97. 'stan_models/logistic_growth.pkl'
  98. )
  99. # fb-block 6 end
  100. with open(model_file, 'rb') as f:
  101. return pickle.load(f)
  102. def setup_dataframe(self, df, initialize_scales=False):
  103. """Create auxillary columns 't', 't_ix', 'y_scaled', and 'cap_scaled'.
  104. These columns are used during both fitting and prediction.
  105. """
  106. if 'y' in df:
  107. df['y'] = pd.to_numeric(df['y'])
  108. df['ds'] = pd.to_datetime(df['ds'])
  109. df = df.sort_values('ds')
  110. df.reset_index(inplace=True, drop=True)
  111. if initialize_scales:
  112. self.y_scale = df['y'].max()
  113. self.start = df['ds'].min()
  114. self.t_scale = df['ds'].max() - self.start
  115. df['t'] = (df['ds'] - self.start) / self.t_scale
  116. if 'y' in df:
  117. df['y_scaled'] = df['y'] / self.y_scale
  118. if self.growth == 'logistic':
  119. assert 'cap' in df
  120. df['cap_scaled'] = df['cap'] / self.y_scale
  121. return df
  122. def set_changepoints(self):
  123. """Generate a list of changepoints.
  124. Either:
  125. 1) the changepoints were passed in explicitly
  126. A) they are empty
  127. B) not empty, needs validation
  128. 2) we are generating a grid of them
  129. 3) the user prefers no changepoints to be used
  130. """
  131. if self.changepoints is not None:
  132. if len(self.changepoints) == 0:
  133. pass
  134. else:
  135. too_low = min(self.changepoints) < self.history['ds'].min()
  136. too_high = max(self.changepoints) > self.history['ds'].max()
  137. if too_low or too_high:
  138. raise ValueError('Changepoints must fall within training data.')
  139. elif self.n_changepoints > 0:
  140. # Place potential changepoints evenly throuh first 80% of history
  141. max_ix = np.floor(self.history.shape[0] * 0.8)
  142. cp_indexes = (
  143. np.linspace(0, max_ix, self.n_changepoints + 1)
  144. .round()
  145. .astype(np.int)
  146. )
  147. self.changepoints = self.history.ix[cp_indexes]['ds'].tail(-1)
  148. else:
  149. # set empty changepoints
  150. self.changepoints = []
  151. if len(self.changepoints) > 0:
  152. self.changepoints_t = np.sort(np.array(
  153. (self.changepoints - self.start) / self.t_scale))
  154. else:
  155. self.changepoints_t = np.array([0]) # dummy changepoint
  156. def get_changepoint_matrix(self):
  157. A = np.zeros((self.history.shape[0], len(self.changepoints_t)))
  158. for i, t_i in enumerate(self.changepoints_t):
  159. A[self.history['t'].values >= t_i, i] = 1
  160. return A
  161. @staticmethod
  162. def fourier_series(dates, period, series_order):
  163. """Generate a Fourier expansion for a fixed frequency and order.
  164. Parameters
  165. ----------
  166. dates: a pd.Series containing timestamps
  167. period: an integer frequency (number of days)
  168. series_order: number of components to generate
  169. Returns
  170. -------
  171. a 2-dimensional np.array with one row per row in `dt`
  172. """
  173. # convert to days since epoch
  174. t = np.array(
  175. (dates - pd.datetime(1970, 1, 1))
  176. .apply(lambda x: x.days)
  177. .astype(np.float)
  178. )
  179. return np.column_stack([
  180. fun((2.0 * (i + 1) * np.pi * t / period))
  181. for i in range(series_order)
  182. for fun in (np.sin, np.cos)
  183. ])
  184. @classmethod
  185. def make_seasonality_features(cls, dates, period, series_order, prefix):
  186. features = cls.fourier_series(dates, period, series_order)
  187. columns = [
  188. '{}_{}'.format(prefix, i + 1)
  189. for i in range(features.shape[1])
  190. ]
  191. return pd.DataFrame(features, columns=columns)
  192. def make_holiday_features(self, dates):
  193. """Generate a DataFrame with each column corresponding to a holiday.
  194. """
  195. # A smaller prior scale will shrink holiday estimates more
  196. scale_ratio = self.holidays_prior_scale / self.seasonality_prior_scale
  197. # Holds columns of our future matrix.
  198. expanded_holidays = defaultdict(lambda: np.zeros(dates.shape[0]))
  199. # Makes an index so we can perform `get_loc` below.
  200. row_index = pd.DatetimeIndex(dates)
  201. for ix, row in self.holidays.iterrows():
  202. dt = row.ds.date()
  203. try:
  204. lw = int(row.get('lower_window', 0))
  205. uw = int(row.get('upper_window', 0))
  206. except ValueError:
  207. lw = 0
  208. uw = 0
  209. for offset in range(lw, uw + 1):
  210. occurrence = dt + timedelta(days=offset)
  211. try:
  212. loc = row_index.get_loc(occurrence)
  213. except KeyError:
  214. loc = None
  215. key = '{}_{}{}'.format(
  216. row.holiday,
  217. '+' if offset >= 0 else '-',
  218. abs(offset)
  219. )
  220. if loc is not None:
  221. expanded_holidays[key][loc] = scale_ratio
  222. else:
  223. # Access key to generate value
  224. expanded_holidays[key]
  225. # This relies pretty importantly on pandas keeping the columns in order.
  226. return pd.DataFrame(expanded_holidays)
  227. def make_all_seasonality_features(self, df):
  228. seasonal_features = [
  229. # Add a column of zeros in case no seasonality is used.
  230. pd.DataFrame({'zeros': np.zeros(df.shape[0])})
  231. ]
  232. # Seasonality features
  233. if self.yearly_seasonality:
  234. seasonal_features.append(self.make_seasonality_features(
  235. df['ds'],
  236. 365.25,
  237. 10,
  238. 'yearly',
  239. ))
  240. if self.weekly_seasonality:
  241. seasonal_features.append(self.make_seasonality_features(
  242. df['ds'],
  243. 7,
  244. 3,
  245. 'weekly',
  246. ))
  247. if self.holidays is not None:
  248. seasonal_features.append(self.make_holiday_features(df['ds']))
  249. return pd.concat(seasonal_features, axis=1)
  250. @staticmethod
  251. def linear_growth_init(df):
  252. i0, i1 = df['ds'].idxmin(), df['ds'].idxmax()
  253. T = df['t'].ix[i1] - df['t'].ix[i0]
  254. k = (df['y_scaled'].ix[i1] - df['y_scaled'].ix[i0]) / T
  255. m = df['y_scaled'].ix[i0] - k * df['t'].ix[i0]
  256. return (k, m)
  257. @staticmethod
  258. def logistic_growth_init(df):
  259. i0, i1 = df['ds'].idxmin(), df['ds'].idxmax()
  260. T = df['t'].ix[i1] - df['t'].ix[i0]
  261. # Force valid values, in case y > cap.
  262. r0 = max(1.01, df['cap_scaled'].ix[i0] / df['y_scaled'].ix[i0])
  263. r1 = max(1.01, df['cap_scaled'].ix[i1] / df['y_scaled'].ix[i1])
  264. if abs(r0 - r1) <= 0.01:
  265. r0 = 1.05 * r0
  266. L0 = np.log(r0 - 1)
  267. L1 = np.log(r1 - 1)
  268. # Initialize the offset
  269. m = L0 * T / (L0 - L1)
  270. # And the rate
  271. k = L0 / m
  272. return (k, m)
  273. # fb-block 7
  274. def fit(self, df, **kwargs):
  275. """Fit the Prophet model to data.
  276. Parameters
  277. ----------
  278. df: pd.DataFrame containing history. Must have columns 'ds', 'y', and
  279. if logistic growth, 'cap'.
  280. kwargs: Additional arguments passed to Stan's sampling or optimizing
  281. function, as appropriate.
  282. Returns
  283. -------
  284. The fitted Prophet object.
  285. """
  286. history = df[df['y'].notnull()].copy()
  287. history = self.setup_dataframe(history, initialize_scales=True)
  288. self.history = history
  289. seasonal_features = self.make_all_seasonality_features(history)
  290. self.set_changepoints()
  291. A = self.get_changepoint_matrix()
  292. dat = {
  293. 'T': history.shape[0],
  294. 'K': seasonal_features.shape[1],
  295. 'S': len(self.changepoints_t),
  296. 'y': history['y_scaled'],
  297. 't': history['t'],
  298. 'A': A,
  299. 't_change': self.changepoints_t,
  300. 'X': seasonal_features,
  301. 'sigma': self.seasonality_prior_scale,
  302. 'tau': self.changepoint_prior_scale,
  303. }
  304. if self.growth == 'linear':
  305. kinit = self.linear_growth_init(history)
  306. model = self.get_linear_model()
  307. else:
  308. dat['cap'] = history['cap_scaled']
  309. kinit = self.logistic_growth_init(history)
  310. model = self.get_logistic_model()
  311. def stan_init():
  312. return {
  313. 'k': kinit[0],
  314. 'm': kinit[1],
  315. 'delta': np.zeros(len(self.changepoints_t)),
  316. 'beta': np.zeros(seasonal_features.shape[1]),
  317. 'sigma_obs': 1,
  318. }
  319. if self.mcmc_samples > 0:
  320. stan_fit = model.sampling(
  321. dat,
  322. init=stan_init,
  323. iter=self.mcmc_samples,
  324. **kwargs
  325. )
  326. for par in stan_fit.model_pars:
  327. self.params[par] = stan_fit[par]
  328. else:
  329. params = model.optimizing(dat, init=stan_init, iter=1e4, **kwargs)
  330. for par in params:
  331. self.params[par] = params[par].reshape((1, -1))
  332. # If no changepoints were requested, replace delta with 0s
  333. if len(self.changepoints) == 0:
  334. # Fold delta into the base rate k
  335. params['k'] = params['k'] + params['delta']
  336. params['delta'] = np.zeros(params['delta'].shape)
  337. return self
  338. # fb-block 8
  339. def predict(self, df=None):
  340. """Predict historical and future values for y.
  341. Note: you must only pass in future dates here.
  342. Historical dates are prepended before predictions are made.
  343. `df` can be None, in which case we predict only on history.
  344. """
  345. if df is None:
  346. df = self.history.copy()
  347. else:
  348. df = self.setup_dataframe(df)
  349. df['trend'] = self.predict_trend(df)
  350. seasonal_components = self.predict_seasonal_components(df)
  351. intervals = self.predict_uncertainty(df)
  352. df2 = pd.concat((df, intervals, seasonal_components), axis=1)
  353. df2['yhat'] = df2['trend'] + df2['seasonal']
  354. return df2
  355. @staticmethod
  356. def piecewise_linear(t, deltas, k, m, changepoint_ts):
  357. # Intercept changes
  358. gammas = -changepoint_ts * deltas
  359. # Get cumulative slope and intercept at each t
  360. k_t = k * np.ones_like(t)
  361. m_t = m * np.ones_like(t)
  362. for s, t_s in enumerate(changepoint_ts):
  363. indx = t >= t_s
  364. k_t[indx] += deltas[s]
  365. m_t[indx] += gammas[s]
  366. return k_t * t + m_t
  367. @staticmethod
  368. def piecewise_logistic(t, cap, deltas, k, m, changepoint_ts):
  369. # Compute offset changes
  370. k_cum = np.concatenate((np.atleast_1d(k), np.cumsum(deltas) + k))
  371. gammas = np.zeros(len(changepoint_ts))
  372. for i, t_s in enumerate(changepoint_ts):
  373. gammas[i] = (
  374. (t_s - m - np.sum(gammas))
  375. * (1 - k_cum[i] / k_cum[i + 1])
  376. )
  377. # Get cumulative rate and offset at each t
  378. k_t = k * np.ones_like(t)
  379. m_t = m * np.ones_like(t)
  380. for s, t_s in enumerate(changepoint_ts):
  381. indx = t >= t_s
  382. k_t[indx] += deltas[s]
  383. m_t[indx] += gammas[s]
  384. return cap / (1 + np.exp(-k_t * (t - m_t)))
  385. def predict_trend(self, df):
  386. k = np.nanmean(self.params['k'])
  387. m = np.nanmean(self.params['m'])
  388. deltas = np.nanmean(self.params['delta'], axis=0)
  389. t = np.array(df['t'])
  390. if self.growth == 'linear':
  391. trend = self.piecewise_linear(t, deltas, k, m, self.changepoints_t)
  392. else:
  393. cap = df['cap_scaled']
  394. trend = self.piecewise_logistic(
  395. t, cap, deltas, k, m, self.changepoints_t)
  396. return trend * self.y_scale
  397. def predict_seasonal_components(self, df):
  398. seasonal_features = self.make_all_seasonality_features(df)
  399. lower_p = 100 * (1.0 - self.interval_width) / 2
  400. upper_p = 100 * (1.0 + self.interval_width) / 2
  401. components = pd.DataFrame({
  402. 'col': np.arange(seasonal_features.shape[1]),
  403. 'component': [x.split('_')[0] for x in seasonal_features.columns],
  404. })
  405. # Remove the placeholder
  406. components = components[components['component'] != 'zeros']
  407. if components.shape[0] > 0:
  408. X = seasonal_features.as_matrix()
  409. data = {}
  410. for component, features in components.groupby('component'):
  411. cols = features.col.tolist()
  412. comp_beta = self.params['beta'][:, cols]
  413. comp_features = X[:, cols]
  414. comp = (
  415. np.matmul(comp_features, comp_beta.transpose())
  416. * self.y_scale
  417. )
  418. data[component] = np.nanmean(comp, axis=1)
  419. data[component + '_lower'] = np.nanpercentile(comp, lower_p,
  420. axis=1)
  421. data[component + '_upper'] = np.nanpercentile(comp, upper_p,
  422. axis=1)
  423. component_predictions = pd.DataFrame(data)
  424. component_predictions['seasonal'] = (
  425. component_predictions[components['component'].unique()].sum(1))
  426. else:
  427. component_predictions = pd.DataFrame(
  428. {'seasonal': np.zeros(df.shape[0])})
  429. return component_predictions
  430. def predict_uncertainty(self, df):
  431. n_iterations = self.params['k'].shape[0]
  432. samp_per_iter = max(1, int(np.ceil(
  433. self.uncertainty_samples / float(n_iterations)
  434. )))
  435. # Generate seasonality features once so we can re-use them.
  436. seasonal_features = self.make_all_seasonality_features(df)
  437. sim_values = {'yhat': [], 'trend': [], 'seasonal': []}
  438. for i in range(n_iterations):
  439. for j in range(samp_per_iter):
  440. sim = self.sample_model(df, seasonal_features, i)
  441. for key in sim_values:
  442. sim_values[key].append(sim[key])
  443. lower_p = 100 * (1.0 - self.interval_width) / 2
  444. upper_p = 100 * (1.0 + self.interval_width) / 2
  445. series = {}
  446. for key, value in sim_values.items():
  447. mat = np.column_stack(value)
  448. series['{}_lower'.format(key)] = np.nanpercentile(mat, lower_p,
  449. axis=1)
  450. series['{}_upper'.format(key)] = np.nanpercentile(mat, upper_p,
  451. axis=1)
  452. return pd.DataFrame(series)
  453. def sample_model(self, df, seasonal_features, iteration):
  454. trend = self.sample_predictive_trend(df, iteration)
  455. beta = self.params['beta'][iteration]
  456. seasonal = np.matmul(seasonal_features.as_matrix(), beta) * self.y_scale
  457. sigma = self.params['sigma_obs'][iteration]
  458. noise = np.random.normal(0, sigma, df.shape[0]) * self.y_scale
  459. return pd.DataFrame({
  460. 'yhat': trend + seasonal + noise,
  461. 'trend': trend,
  462. 'seasonal': seasonal,
  463. })
  464. def sample_predictive_trend(self, df, iteration):
  465. k = self.params['k'][iteration]
  466. m = self.params['m'][iteration]
  467. deltas = self.params['delta'][iteration]
  468. t = np.array(df['t'])
  469. T = t.max()
  470. if T > 1:
  471. # Get the time discretization of the history
  472. dt = np.diff(self.history['t'])
  473. dt = np.min(dt[dt > 0])
  474. # Number of time periods in the future
  475. N = np.ceil((T - 1) / float(dt))
  476. S = len(self.changepoints_t)
  477. prob_change = min(1, (S * (T - 1)) / N)
  478. n_changes = np.random.binomial(N, prob_change)
  479. # Sample ts
  480. changepoint_ts_new = sorted(np.random.uniform(1, T, n_changes))
  481. else:
  482. # Case where we're not extrapolating.
  483. changepoint_ts_new = []
  484. n_changes = 0
  485. # Get the empirical scale of the deltas, plus epsilon to avoid NaNs.
  486. lambda_ = np.mean(np.abs(deltas)) + 1e-8
  487. # Sample deltas
  488. deltas_new = np.random.laplace(0, lambda_, n_changes)
  489. # Prepend the times and deltas from the history
  490. changepoint_ts = np.concatenate((self.changepoints_t,
  491. changepoint_ts_new))
  492. deltas = np.concatenate((deltas, deltas_new))
  493. if self.growth == 'linear':
  494. trend = self.piecewise_linear(t, deltas, k, m, changepoint_ts)
  495. else:
  496. cap = df['cap_scaled']
  497. trend = self.piecewise_logistic(t, cap, deltas, k, m,
  498. changepoint_ts)
  499. return trend * self.y_scale
  500. def make_future_dataframe(self, periods, freq='D', include_history=True):
  501. last_date = self.history['ds'].max()
  502. dates = pd.date_range(
  503. start=last_date,
  504. periods=periods + 1, # closed='right' removes a period
  505. freq=freq,
  506. closed='right') # omits the start date
  507. if include_history:
  508. dates = np.concatenate((np.array(self.history['ds']), dates))
  509. return pd.DataFrame({'ds': dates})
  510. def plot(self, fcst, uncertainty=True, xlabel='ds', ylabel='y'):
  511. """Plot the Prophet forecast.
  512. Parameters
  513. ----------
  514. fcst: pd.DataFrame output of self.predict.
  515. uncertainty: Optional boolean to plot uncertainty intervals.
  516. xlabel: Optional label name on X-axis
  517. ylabel: Optional label name on Y-axis
  518. Returns
  519. -------
  520. a matplotlib figure.
  521. """
  522. fig = plt.figure(facecolor='w', figsize=(10, 6))
  523. ax = fig.add_subplot(111)
  524. ax.plot(self.history['ds'].values, self.history['y'], 'k.')
  525. ax.plot(fcst['ds'].values, fcst['yhat'], ls='-', c=self.forecast_color)
  526. if 'cap' in fcst:
  527. ax.plot(fcst['ds'].values, fcst['cap'], ls='--', c='k')
  528. if uncertainty:
  529. ax.fill_between(fcst['ds'].values, fcst['yhat_lower'],
  530. fcst['yhat_upper'], color=self.forecast_color,
  531. alpha=0.2)
  532. ax.grid(True, which='major', c='gray', ls='-', lw=1, alpha=0.2)
  533. ax.set_xlabel(xlabel)
  534. ax.set_ylabel(ylabel)
  535. fig.tight_layout()
  536. return fig
  537. def plot_components(self, fcst, uncertainty=True):
  538. """Plot the Prophet forecast components.
  539. Will plot whichever are available of: trend, holidays, weekly
  540. seasonality, and yearly seasonality.
  541. Parameters
  542. ----------
  543. fcst: pd.DataFrame output of self.predict.
  544. uncertainty: Optional boolean to plot uncertainty intervals.
  545. Returns
  546. -------
  547. a matplotlib figure.
  548. """
  549. # Identify components to be plotted
  550. components = [('plot_trend', True),
  551. ('plot_holidays', self.holidays is not None),
  552. ('plot_weekly', 'weekly' in fcst),
  553. ('plot_yearly', 'yearly' in fcst)]
  554. components = [(plot, cond) for plot, cond in components if cond]
  555. npanel = len(components)
  556. fig, axes = plt.subplots(npanel, 1, facecolor='w',
  557. figsize=(9, 3 * npanel))
  558. artists = []
  559. for ax, plot in zip(axes,
  560. [getattr(self, plot) for plot, _ in components]):
  561. artists += plot(fcst, ax=ax, uncertainty=uncertainty)
  562. fig.tight_layout()
  563. return artists
  564. def plot_trend(self, fcst, ax=None, uncertainty=True, **plotargs):
  565. artists = []
  566. if not ax:
  567. ax = fig.add_subplot(111)
  568. artists += ax.plot(fcst['ds'].values, fcst['trend'], ls='-',
  569. c=self.forecast_color)
  570. if 'cap' in fcst:
  571. artists += ax.plot(fcst['ds'].values, fcst['cap'], ls='--', c='k')
  572. if uncertainty:
  573. artists += [ax.fill_between(
  574. fcst['ds'].values, fcst['trend_lower'], fcst['trend_upper'],
  575. color=self.forecast_color, alpha=0.2)]
  576. ax.grid(True, which='major', c='gray', ls='-', lw=1, alpha=0.2)
  577. ax.xaxis.set_major_locator(MaxNLocator(nbins=7))
  578. ax.set_xlabel('ds')
  579. ax.set_ylabel('trend')
  580. return artists
  581. def plot_holidays(self, fcst, ax=None, uncertainty=True):
  582. artists = []
  583. if not ax:
  584. ax = fig.add_subplot(111)
  585. holiday_comps = self.holidays['holiday'].unique()
  586. y_holiday = fcst[holiday_comps].sum(1)
  587. y_holiday_l = fcst[[h + '_lower' for h in holiday_comps]].sum(1)
  588. y_holiday_u = fcst[[h + '_upper' for h in holiday_comps]].sum(1)
  589. # NOTE the above CI calculation is incorrect if holidays overlap
  590. # in time. Since it is just for the visualization we will not
  591. # worry about it now.
  592. artists += ax.plot(fcst['ds'].values, y_holiday, ls='-',
  593. c=self.forecast_color)
  594. if uncertainty:
  595. artists += [ax.fill_between(fcst['ds'].values,
  596. y_holiday_l, y_holiday_u,
  597. color=self.forecast_color, alpha=0.2)]
  598. ax.grid(True, which='major', c='gray', ls='-', lw=1, alpha=0.2)
  599. ax.xaxis.set_major_locator(MaxNLocator(nbins=7))
  600. ax.set_xlabel('ds')
  601. ax.set_ylabel('holidays')
  602. return artists
  603. def plot_weekly(self, fcst, ax=None, uncertainty=True):
  604. artists = []
  605. if not ax:
  606. ax = fig.add_subplot(111)
  607. df_s = fcst.copy()
  608. df_s['dow'] = df_s['ds'].dt.weekday_name
  609. df_s = df_s.groupby('dow').first()
  610. days = pd.date_range(start='2017-01-01', periods=7).weekday_name
  611. y_weekly = [df_s.loc[d]['weekly'] for d in days]
  612. y_weekly_l = [df_s.loc[d]['weekly_lower'] for d in days]
  613. y_weekly_u = [df_s.loc[d]['weekly_upper'] for d in days]
  614. artists += ax.plot(range(len(days)), y_weekly, ls='-',
  615. c=self.forecast_color)
  616. if uncertainty:
  617. artists += [ax.fill_between(range(len(days)),
  618. y_weekly_l, y_weekly_u,
  619. color=self.forecast_color, alpha=0.2)]
  620. ax.grid(True, which='major', c='gray', ls='-', lw=1, alpha=0.2)
  621. ax.set_xticks(range(len(days)))
  622. ax.set_xticklabels(days)
  623. ax.set_xlabel('Day of week')
  624. ax.set_ylabel('weekly')
  625. return artists
  626. def plot_yearly(self, fcst, ax=None, uncertainty=True):
  627. artists = []
  628. if not ax:
  629. ax = fig.add_subplot(npanel, 1, panel_num)
  630. df_s = fcst.copy()
  631. df_s['doy'] = df_s['ds'].map(lambda x: x.strftime('2000-%m-%d'))
  632. df_s = df_s.groupby('doy').first().sort_index()
  633. artists += ax.plot(pd.to_datetime(df_s.index), df_s['yearly'], ls='-',
  634. c=self.forecast_color)
  635. if uncertainty:
  636. artists += [ax.fill_between(
  637. pd.to_datetime(df_s.index), df_s['yearly_lower'],
  638. df_s['yearly_upper'], color=self.forecast_color, alpha=0.2)]
  639. ax.grid(True, which='major', c='gray', ls='-', lw=1, alpha=0.2)
  640. months = MonthLocator(range(1, 13), bymonthday=1, interval=2)
  641. ax.xaxis.set_major_formatter(DateFormatter('%B %-d'))
  642. ax.xaxis.set_major_locator(months)
  643. ax.set_xlabel('Day of year')
  644. ax.set_ylabel('yearly')
  645. return artists
  646. # fb-block 9