Parcourir la source

Make stan code windows-compatible. (#96)

The vector/matrix operations fail to compile in windows due to eigen
incompatibility with windows compiler. Here we opt to use a non-
vectorized model for windows platform.
Rolando (Max) Espinoza il y a 8 ans
Parent
commit
e33e7c4b37

+ 2 - 1
python/MANIFEST.in

@@ -1,2 +1,3 @@
-include stan/*.stan
+include stan/unix/*.stan
+include stan/win/*.stan
 include LICENSE

+ 7 - 1
python/setup.py

@@ -1,5 +1,6 @@
 import os.path
 import pickle
+import platform
 import sys
 
 from pkg_resources import (
@@ -26,8 +27,13 @@ class BuildPyCommand(build_py):
         target_dir = os.path.join(self.build_lib, 'fbprophet/stan_models')
         self.mkpath(target_dir)
 
+        if platform.platform().startswith('Win'):
+            plat = 'win'
+        else:
+            plat = 'unix'
+
         for model_type in ['linear', 'logistic']:
-            with open('stan/prophet_{}_growth.stan'.format(model_type)) as f:
+            with open('stan/{}/prophet_{}_growth.stan'.format(plat, model_type)) as f:
                 model_code = f.read()
             sm = StanModel(model_code=model_code)
             with open(os.path.join(target_dir, '{}_growth.pkl'.format(model_type)), 'wb') as f:

python/stan/prophet_linear_growth.stan → python/stan/unix/prophet_linear_growth.stan


python/stan/prophet_logistic_growth.stan → python/stan/unix/prophet_logistic_growth.stan


+ 45 - 0
python/stan/win/prophet_linear_growth.stan

@@ -0,0 +1,45 @@
+data {
+  int T;                                // Sample size
+  int<lower=1> K;                       // Number of seasonal vectors
+  real t[T];                            // Day
+  real y[T];                            // Time-series
+  int S;                                // Number of changepoints
+  real A[T, S];                   // Split indicators
+  real t_change[S];                 // Index of changepoints
+  real X[T,K];                // season vectors
+  real<lower=0> sigma;              // scale on seasonality prior
+  real<lower=0> tau;                  // scale on changepoints prior
+}
+
+parameters {
+  real k;                            // Base growth rate
+  real m;                            // offset
+  real delta[S];                       // Rate adjustments
+  real<lower=0> sigma_obs;               // Observation noise (incl. seasonal variation)
+  real beta[K];                    // seasonal vector
+}
+
+transformed parameters {
+  real gamma[S];                  // adjusted offsets, for piecewise continuity
+
+  for (i in 1:S) {
+    gamma[i] = -t_change[i] * delta[i];
+  }
+}
+
+model {
+  real Y[T];
+
+  //priors
+  k ~ normal(0, 5);
+  m ~ normal(0, 5);
+  delta ~ double_exponential(0, tau);
+  sigma_obs ~ normal(0, 0.5);
+  beta ~ normal(0, sigma);
+
+  // Likelihood
+  for (i in 1:T) {
+    Y[i] = (dot_product(A[i], delta) + k) * t[i] + (dot_product(A[i], gamma) + m) + dot_product(X[i], beta);
+  }
+  y ~ normal(Y, sigma_obs);
+}

+ 57 - 0
python/stan/win/prophet_logistic_growth.stan

@@ -0,0 +1,57 @@
+data {
+  int T;                                // Sample size
+  int<lower=1> K;                       // Number of seasonal vectors
+  real t[T];                            // Day
+  real cap[T];                          // Capacities
+  real y[T];                            // Time-series
+  int S;                                // Number of changepoints
+  real A[T, S];                   // Split indicators
+  real t_change[S];                 // Index of changepoints
+  real X[T,K];                    // season vectors
+  real<lower=0> sigma;              // scale on seasonality prior
+  real<lower=0> tau;                  // scale on changepoints prior
+}
+
+parameters {
+  real k;                            // Base growth rate
+  real m;                            // offset
+  real delta[S];                       // Rate adjustments
+  real<lower=0> sigma_obs;               // Observation noise (incl. seasonal variation)
+  real beta[K];                    // seasonal vector
+}
+
+transformed parameters {
+  real gamma[S];                  // adjusted offsets, for piecewise continuity
+  real k_s[S + 1];                 // actual rate in each segment
+  real m_pr;
+
+  // Compute the rate in each segment
+  k_s[1] = k;
+  for (i in 1:S) {
+    k_s[i + 1] = k_s[i] + delta[i];
+  }
+
+  // Piecewise offsets
+  m_pr = m; // The offset in the previous segment
+  for (i in 1:S) {
+    gamma[i] = (t_change[i] - m_pr) * (1 - k_s[i] / k_s[i + 1]);
+    m_pr = m_pr + gamma[i];  // update for the next segment
+  }
+}
+
+model {
+  real Y[T];
+
+  //priors
+  k ~ normal(0, 5);
+  m ~ normal(0, 5);
+  delta ~ double_exponential(0, tau);
+  sigma_obs ~ normal(0, 0.1);
+  beta ~ normal(0, sigma);
+
+  // Likelihood
+  for (i in 1:T) {
+    Y[i] = cap[i] / (1 + exp(-(k + dot_product(A[i], delta)) * (t[i] - (m + dot_product(A[i], gamma))))) + dot_product(X[i], beta);
+  }
+  y ~ normal(Y, sigma_obs);
+}