Skip to content
Snippets Groups Projects
Commit d000f801 authored by Martin Renoult's avatar Martin Renoult
Browse files

Update Bayesian_for_LGM_PMIP

parent 3f80d98c
No related branches found
No related tags found
No related merge requests found
......@@ -110,7 +110,7 @@ with Model() as model: # model specifications in PyMC3 are wrapped in a with-sta
# Define priors
sigma = HalfCauchy('Sigma', beta=5, testval=1.)
intercept = Normal('Intercept', 0, sd=1)
x_coeff = Normal('Slope', -1, sd=1)
x_coeff = Normal('x', -1, sd=1)
# Define likelihood
likelihood = Normal('y', mu=intercept + x_coeff * x,
......@@ -118,29 +118,13 @@ with Model() as model: # model specifications in PyMC3 are wrapped in a with-sta
# Inference! 4 jobs in parallel (convergence check)
# By default, the sampling method is NUTS
trace = sample(progressbar=False,njobs=4, draws=2000)
trace = sample(progressbar=False, draws=2000, cores=4)
# Extract the data of the trace
values_x = trace['Slope']
values_x = trace['x']
values_intercept = trace['Intercept']
values_sigma = trace['Sigma']
# Compute specific 5-95% and 17-83% interval for slope, intercept and sigma
x_stats_95 = stats.quantiles(values_x, qlist=(5, 95))
x_stats_95 = [ v for v in x_stats_95.values()]
x_stats_33 = stats.quantiles(values_x, qlist=(17, 83))
x_stats_33 = [ v for v in x_stats_33.values()]
intercept_stats_95 = stats.quantiles(values_intercept, qlist=(5, 95))
intercept_stats_95 = [ v for v in intercept_stats_95.values()]
intercept_stats_33 = stats.quantiles(values_intercept, qlist=(17, 83))
intercept_stats_33 = [ v for v in intercept_stats_33.values()]
sigma_stats_95 = stats.quantiles(values_sigma, qlist=(5, 95))
sigma_stats_95 = [ v for v in sigma_stats_95.values()]
sigma_stats_33 = stats.quantiles(values_sigma, qlist=(17, 83))
sigma_stats_33 = [ v for v in sigma_stats_33.values()]
# Gelman-Rubin test for convergence of the model
# If BFMI = Gelman-Rubin, then you have convergence
# It compares the variance between the chains to the variance inside a chain
......@@ -164,11 +148,10 @@ for j in ran:
predicted_t = values_x * j + values_intercept + np.random.normal(loc=0, scale=values_sigma)
# Calculate and save the 5-95% interval of the prediction
stats_predict_t_90 = stats.quantiles(predicted_t, qlist=(5, 95))
stats_predict_t_90 = [ v for v in stats_predict_t_90.values()]
stats_predict_t_90 = np.percentile(predicted_t, q=(5,95))
# Calculate and save the 17-83% interval of the prediction
stats_predict_t_66 = stats.quantiles(predicted_t, qlist=(17, 83))
stats_predict_t_66 = [ v for v in stats_predict_t_66.values()]
stats_predict_t_66 = np.percentile(predicted_t, q=(17,83))
# Save in a list the intervals for every sample of sensitivity "ran"
list_predict_t_stats_66.append(stats_predict_t_66)
list_predict_t_stats_90.append(stats_predict_t_90)
......@@ -207,10 +190,8 @@ def truncated_cauchy_rvs(loc=0, scale=1, a=-1, b=1, size=None):
prior_S = truncated_cauchy_rvs(loc=2.5, scale=3, a=1/math.inf, b=math.inf, size=8000)
# Compute 5-95% and 17-83% prior intervals
prior_stats_90 = stats.quantiles(prior_S, qlist=(5, 95))
prior_stats_90 = [ v for v in prior_stats_90.values()]
prior_stats_66 = stats.quantiles(prior_S, qlist=(17, 83))
prior_stats_66 = [ v for v in prior_stats_66.values()]
prior_stats_90 = np.percentile(prior_S, q=(5, 95))
prior_stats_66 = np.percentile(prior_S, q=(17, 83))
# Model to generate a single point based on the prior on S
def gen_mod(alpha, s, beta, error):
......@@ -229,8 +210,7 @@ T = -2.2
stdT = 0.4
gauss_obs = np.random.normal(loc=T, scale=stdT, size=2000)
obs_stats_90 = stats.quantiles(gauss_obs, qlist=(5, 95))
obs_stats_90 = [ v for v in obs_stats_90.values()]
obs_stats_90 = np.percentile(gauss_obs, q=(5, 95))
# Weight the temperature samples
weight = likelihood(model_T, T, stdT)
......@@ -247,11 +227,8 @@ post_std = np.std(posterior)
#post_stats_33 = stats.hpd(posterior, alpha=0.33)
# Compute 5-95% and 17-83% posterior intervals
post_stats_90 = stats.quantiles(posterior, qlist=(5, 95))
post_stats_90 = [ v for v in post_stats_90.values()]
post_stats_66 = stats.quantiles(posterior, qlist=(17, 83))
post_stats_66 = [ v for v in post_stats_66.values()]
post_stats_90 = np.percentile(posterior, q=(5, 95))
post_stats_66 = np.percentile(posterior, q=(17, 83))
#------------------------------------------------------------------------------
## Pliocene loading - only use for combining multiple emergent constraints
## Require saved files of the pliocene alpha, beta and sigma
......@@ -292,10 +269,8 @@ post_stats_66 = [ v for v in post_stats_66.values()]
##post_stats_90_combi = stats.hpd(posterior_combi, alpha=0.1)
##post_stats_66_combi = stats.hpd(posterior_combi, alpha=0.33)
#
#post_stats_90_combi = stats.quantiles(posterior_combi, qlist=(5, 95))
#post_stats_90_combi = [ v for v in post_stats_90_combi.values()]
#post_stats_66_combi = stats.quantiles(posterior_combi, qlist=(17, 83))
#post_stats_66_combi = [ v for v in post_stats_66_combi.values()]
#post_stats_90_combi = np.percentile(posterior_combi, q=(5, 95))
#post_stats_66_combi = np.percentile(posterior_combi, q=(17, 83))
#
## Following lines are esthetic, used for the plot
#
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment