Skip to content
Snippets Groups Projects
Commit a7dff392 authored by Martin Renoult's avatar Martin Renoult
Browse files

Update Bayesian_for_LGM_PMIP

parent a901d57a
No related branches found
No related tags found
No related merge requests found
......@@ -38,27 +38,27 @@ ub_66 = list()
# In the following order:
# PMIP2[MIROC, IPSL, CCSM, ECHAM, FGOALS, HadCM3, ECBILT]
# PMIP3[NCAR-CCSM4, IPSL-CM5A-LR, MIROC-ESM, MPI-ESM-P, CNRM-CM5, MRI-CGCM3, FGOALS-g2]
# PMIP4[MPI-ESM1.2-LR, MIROC-ES2L]
# PMIP4[MPI-ESM1.2-LR, MIROC-ES2L, INM-CM4-8, AWI-ESM-1.1-LR]
# Data
#x = [4,4.4,2.7,3.4,2.3,3.3,1.8,
# 3.2,4.13,4.67,3.45,3.25,2.6,3.37,
# 3.01, 2.66]
# 3.01, 2.66, 1.81, 3.61]
#
#y = [-2.74899797,-2.82983243,-2.11649457,-3.15521371,-2.35793821,-2.77644642,-1.33816631,
# -2.5957031, -3.3755188, -2.5169983, -2.5567322, -1.6669922, -2.81839, -3.1465664,
# -2.0607605,-2.2322693]
# -2.0607605,-2.2322693,-2.4320984,-1.7489014]
# Latest model versions approach
#x = [3.3,1.8,
# 3.2,4.13,3.25,2.6,3.37,
# 3.01, 2.66]
# 3.01, 2.66, 1.81, 3.61]
#
#y = [-2.77644642,-1.33816631,
# -2.5957031, -3.3755188, -1.6669922, -2.81839, -3.1465664,
# -2.0607605,-2.2322693]
# -2.0607605,-2.2322693,-2.4320984,-1.7489014]
# PMIP2 only
#x = [4,4.4,2.7,3.4,2.3,3.3,1.8]
......@@ -83,7 +83,7 @@ y = [-2.74899797,-2.82983243,-2.11649457,-3.15521371,-2.35793821,-2.77644642,-1.
#
##plt.plot(x[0:7], y[0:7], '.', label='PMIP2', markersize=15, color='#0066ff',mec='#25097C')
##plt.plot(x[7:14], y[7:14], '.', label='PMIP3',markersize=15, color='#ff9933',mec='#9C590C')
##plt.plot(x[14:16], y[14:16], '.', label='PMIP4',markersize=15, color='#ff33cc',mec='#990073')
##plt.plot(x[14:18], y[14:18], '.', label='PMIP4',markersize=15, color='#ff33cc',mec='#990073')
#
#plt.xlim(-1, 6)
#plt.ylim(-8, 1)
......@@ -116,11 +116,11 @@ with Model() as model: # model specifications in PyMC3 are wrapped in a with-sta
likelihood = Normal('y', mu=intercept + x_coeff * x,
sd=sigma, observed=y)
# Inference! 4 jobs in parallel (convergence check)
# 4 chains in parallel
# By default, the sampling method is NUTS
# The following line will not work with PyMC3 older than 3.8. If you use an
# older version, replace "cores=4" by "njobs=4"
trace = sample(progressbar=False, draws=2000, cores=4)
trace = sample(progressbar=False, draws=100000, cores=4)
# Extract the data of the trace
values_x = trace['x']
......@@ -140,6 +140,7 @@ values_sigma = trace['Sigma']
#------------------------------------------------------------------------------
## Predicted temperature calculation
## Create a predicted ensemble for 5-95% estimate
# Discrete sample of sensitivity
ran = np.linspace(0, 10, 500)
......@@ -162,14 +163,8 @@ for j in ran:
## Bayesian framework
# prior on sensitivity
#prior_S = np.random.uniform(0, 10, size=800000)
#prior_S = stat.halfcauchy.rvs(loc=0, scale=5, size=8000)
#prior_S = stat.cauchy.rvs(loc=0, scale=5, size=8000)
#prior_S = stat.gamma.rvs(a=2, loc=0, scale=2, size=800000)
#prior_S = stat.gamma.rvs(a=1, loc=0, scale=5, size=8000)
#prior_S = stat.wald.rvs(loc=0, scale=5, size=8000)
#prior_S = stat.chi2.rvs(df=1.2, loc=0, scale=5, size=8000)
#prior_S = stat.lognorm.rvs(s=1, loc=0, scale=5, size=8000)
#prior_S = np.random.uniform(0, 10, size=400000)
#prior_S = stat.gamma.rvs(a=2, loc=0, scale=2, size=400000)
def truncated_cauchy_rvs(loc=0, scale=1, a=-1, b=1, size=None):
"""
......@@ -189,7 +184,7 @@ def truncated_cauchy_rvs(loc=0, scale=1, a=-1, b=1, size=None):
return rvs
# Truncated-at-zero Cauchy distribution
prior_S = truncated_cauchy_rvs(loc=2.5, scale=3, a=1/math.inf, b=math.inf, size=8000)
prior_S = truncated_cauchy_rvs(loc=2.5, scale=3, a=1/math.inf, b=math.inf, size=400000)
# Compute 5-95% and 17-83% prior intervals
prior_stats_90 = np.percentile(prior_S, q=(5, 95))
......@@ -199,7 +194,7 @@ prior_stats_66 = np.percentile(prior_S, q=(17, 83))
def gen_mod(alpha, s, beta, error):
return alpha * s + beta + np.random.normal(loc=0, scale=error)
# Likelihood gaussian function
# Likelihood estimate
def likelihood(sim, obs, std):
return stat.norm.pdf(x=sim, loc=obs, scale=std)
......@@ -210,82 +205,23 @@ model_T = gen_mod(values_x, prior_S, values_intercept, values_sigma)
# Tropical T
T = -2.2
stdT = 0.4
gauss_obs = np.random.normal(loc=T, scale=stdT, size=2000)
gauss_obs = np.random.normal(loc=T, scale=stdT, size=800000)
obs_stats_90 = np.percentile(gauss_obs, q=(5, 95))
# Weight the temperature samples
# Create weights through importance sampling
weight = likelihood(model_T, T, stdT)
weight = weight/weight.sum()
# Bayesian updating of the prior!
posterior = np.random.choice(prior_S, size=8000, p=weight)
# Bayesian updating of the prior with importance sampling weight
posterior = np.random.choice(prior_S, size=100000, p=weight)
post_median = np.median(posterior)
post_std = np.std(posterior)
# Compute 90% minimum width interval. Careful, different than 5-95%
#post_stats_95 = stats.hpd(posterior, alpha=0.1)
#post_stats_33 = stats.hpd(posterior, alpha=0.33)
# Compute 5-95% and 17-83% posterior intervals
post_stats_90 = np.percentile(posterior, q=(5, 95))
post_stats_66 = np.percentile(posterior, q=(17, 83))
#------------------------------------------------------------------------------
## Pliocene loading - only use for combining multiple emergent constraints
## Require saved files of the pliocene alpha, beta and sigma
#
#x_plio = pd.read_csv('valuesx.csv', header=None)
#intercept_plio = pd.read_csv('valuesintercept.csv', header=None)
#sigma_plio = pd.read_csv('valuessigma.csv', header=None)
#
#x_plio = np.float64(x_plio[0])
#intercept_plio = np.float64(intercept_plio[0])
#sigma_plio = np.float64(sigma_plio[0])
#
### Bayesian framework for Pliocene
#
## prior on sensitivity
#prior_S_plio = posterior
#
## Generate temperatures
#model_T_plio = gen_mod(x_plio, prior_S_plio, intercept_plio, sigma_plio)
#
## "Real" observed data
## Tropical T
#T_plio = 0.8
#stdT_plio = 1
#
#gauss_obs_plio = np.random.normal(loc=T_plio, scale=stdT_plio, size=200000)
#
## Weight the temperature samples
#weight_plio = likelihood(model_T_plio, T_plio, stdT_plio)
#weight_plio = weight_plio/weight_plio.sum()
#
## Bayesian updating of the prior!
#posterior_combi = np.random.choice(prior_S_plio, size=200000, p=weight_plio)
#
#post_median_combi = np.median(posterior_combi)
##post_std = np.std(posterior)
#
##post_stats_90_combi = stats.hpd(posterior_combi, alpha=0.1)
##post_stats_66_combi = stats.hpd(posterior_combi, alpha=0.33)
#
#post_stats_90_combi = np.percentile(posterior_combi, q=(5, 95))
#post_stats_66_combi = np.percentile(posterior_combi, q=(17, 83))
#
## Following lines are esthetic, used for the plot
#
#pliopriortest = stat.halfcauchy.rvs(loc=0, scale=5, size=800000)
#model_T_plio_test = gen_mod(x_plio, pliopriortest, intercept_plio, sigma_plio)
#gauss_obs_plio_test = np.random.normal(loc=T_plio, scale=stdT_plio, size=200000)
## Weight the temperature samples
#weight_plio_test = likelihood(model_T_plio_test, T_plio, stdT_plio)
#weight_plio_test = weight_plio_test/weight_plio_test.sum()
#
## Bayesian inference! (and save the results)
#posterior_plio = np.random.choice(pliopriortest, size=200000, p=weight_plio_test)
#------------------------------------------------------------------------------
## Plot part
## 1st plot: Trace plot
......@@ -358,12 +294,12 @@ plt.axhline(y=-0.15, xmin=(-1/(xlim[0]-xlim[1]))-(post_median/(xlim[0]-xlim[1]))
# All PMIP models
#plt.plot(x[0:7], y[0:7], '.', label='PMIP2', markersize=17, color='#0066ff',mec='#25097C')
#plt.plot(x[7:14], y[7:14], '.', label='PMIP3',markersize=17, color='#ff9933',mec='#9C590C')
#plt.plot(x[14:16], y[14:16], '.', label='PMIP4',markersize=17, color='#ff33cc',mec='#990073')
#plt.plot(x[14:18], y[14:18], '.', label='PMIP4',markersize=17, color='#ff33cc',mec='#990073')
# Current latest versions
#plt.plot(x[0:2], y[0:2], '.', label='PMIP2', markersize=17, color='#0066ff',mec='#25097C')
#plt.plot(x[2:7], y[2:7], '.', label='PMIP3',markersize=17, color='#ff9933',mec='#9C590C')
#plt.plot(x[7:9], y[7:9], '.', label='PMIP4',markersize=17, color='#ff33cc',mec='#990073')
#plt.plot(x[7:11], y[7:11], '.', label='PMIP4',markersize=17, color='#ff33cc',mec='#990073')
# Isolated PMIP3
plt.plot(x[0:7], y[0:7], '.', label='PMIP3',markersize=17, color='#ff9933',mec='#9C590C')
......@@ -373,7 +309,7 @@ texts = [plt.text(x[i], y[i], '%s' %(i+6), ha='center', va='center', fontsize=15
adjust_text(texts)
#texts2 = [plt.text(x[i], y[i], '%s' %(i+8), ha='center', va='center', fontsize=15) for i in range(4, 7)]
#adjust_text(texts2)
#texts3 = [plt.text(x[i], y[i], '%s' %(i+17), ha='center', va='center', fontsize=15) for i in range(7, 9)]
#texts3 = [plt.text(x[i], y[i], '%s' %(i+17), ha='center', va='center', fontsize=15) for i in range(7, 11)]
#adjust_text(texts3)
# Make it pretty
......@@ -418,7 +354,7 @@ plt.plot(x, cauchy, '-', color='darkorange', label='Prior',linewidth=4, linestyl
#plt.plot(x, uniform, '-', color='darkorange', label='Prior',linewidth=4, linestyle='--')
#plt.plot(x, gamma, '-', color='darkorange', label='Prior',linewidth=4, linestyle='--')
# Fit function. Doesn't work well sometimes, will be fixed...
# Fit function. Doesn't work well sometimes...
# Changing the value of alpha plot the true histogram behind
def fit_function(x, A, beta, B, mu, sigma):
return (A * np.exp(-x/beta) + B * np.exp(-1.0 * (x - mu)**2 / (2 * sigma**2)))
......@@ -467,73 +403,4 @@ plt.xticks(ticks=np.arange(1,9,1), fontsize=14)
plt.yticks([0.1, 0.2, 0.3, 0.4, 0.5], fontsize=14)
plt.legend(loc='upper right', edgecolor='k')
plt.tight_layout()
#plt.savefig('Posterior_PMIP.pdf', dpi=300)
#-------------------------------
## 4th plot: Combining multiple constraints
#fig, ax = plt.subplots(figsize=(7,7))
#
# Fit function. Doesn't work well sometimes, will be fixed...
# Changing the value of alpha plot the true histogram behind
#ncombi, binscombi, patchescombi = plt.hist(posterior_combi, density=True, bins=100, alpha=0)
#
#nlgm, binslgm, patcheslgm = plt.hist(posterior, density=True, bins=500, alpha=0)
#nplio, binsplio, patchesplio = plt.hist(posterior_plio, density=True, bins=500, alpha=0)
#
#binscenterscombi = np.array([0.5 * (binscombi[ijk] + binscombi[ijk+1]) for ijk in range(len(binscombi)-1)])
#poptcombi, pcovcombi = curve_fit(fit_function, xdata=binscenterscombi, ydata=ncombi)
#plt.plot(xspace, fit_function(xspace, *poptcombi), color='darkorange', linewidth=4, label='Combined Posterior')
#
#binscenterslgm = np.array([0.5 * (binslgm[kij] + binslgm[kij+1]) for kij in range(len(binslgm)-1)])
#poptlgm, pcovlgm = curve_fit(fit_function, xdata=binscenterslgm, ydata=nlgm)
#plt.plot(xspace, fit_function(xspace, *poptlgm), color='#6699ff', linewidth=4, label='LGM Posterior')
#
#binscentersplio = np.array([0.5 * (binsplio[jik] + binsplio[jik+1]) for jik in range(len(binsplio)-1)])
#poptplio, pcovplio = curve_fit(fit_function, xdata=binscentersplio, ydata=nplio)
#plt.plot(xspace, fit_function(xspace, *poptplio), color='#666699', linewidth=4, label='Pliocene Posterior')
#
#ylim = plt.ylim(-0.02, 0.5)
#xlim = plt.xlim(-1,8)
#
#plt.xlabel('Climate sensitivity (K)', fontsize=16)
#plt.ylabel('Probability', fontsize=16)
#
#plt.axhline(y=0.008, xmin=(-1/(xlim[0]-xlim[1]))-(post_stats_90_combi[0]/(xlim[0]-xlim[1])),
# xmax=(-1/(xlim[0]-xlim[1]))-(post_stats_90_combi[1]/(xlim[0]-xlim[1])), c='#9933ff', label='90% estimate', linewidth=2)
#plt.axhline(y=0.008, xmin=(-1/(xlim[0]-xlim[1]))-(post_stats_90_combi[0]/(xlim[0]-xlim[1])),
# xmax=(-1/(xlim[0]-xlim[1]))-(post_stats_90_combi[0]/(xlim[0]-xlim[1])), marker='<', c='#9933ff')
#plt.axhline(y=0.008, xmin=(-1/(xlim[0]-xlim[1]))-(post_stats_90_combi[1]/(xlim[0]-xlim[1])),
# xmax=(-1/(xlim[0]-xlim[1]))-(post_stats_90_combi[1]/(xlim[0]-xlim[1])), marker='>', c='#9933ff')
#
#plt.axhline(y=0.008, xmin=(-1/(xlim[0]-xlim[1]))-(post_median_combi/(xlim[0]-xlim[1])),
# xmax=(-1/(xlim[0]-xlim[1]))-(post_median_combi/(xlim[0]-xlim[1])), c='#9933ff', marker='.', ms=12)
#
#plt.axhline(y=0.02, xmin=(-1/(xlim[0]-xlim[1]))-(post_stats_66_combi[0]/(xlim[0]-xlim[1])),
# xmax=(-1/(xlim[0]-xlim[1]))-(post_stats_66_combi[1]/(xlim[0]-xlim[1])), linestyle=':', c='#9933ff', label='66% estimate', linewidth=2)
#plt.axhline(y=0.02, xmin=(-1/(xlim[0]-xlim[1]))-(post_stats_66_combi[0]/(xlim[0]-xlim[1])),
# xmax=(-1/(xlim[0]-xlim[1]))-(post_stats_66_combi[0]/(xlim[0]-xlim[1])), marker='<', c='#9933ff')
#plt.axhline(y=0.02, xmin=(-1/(xlim[0]-xlim[1]))-(post_stats_66_combi[1]/(xlim[0]-xlim[1])),
# xmax=(-1/(xlim[0]-xlim[1]))-(post_stats_66_combi[1]/(xlim[0]-xlim[1])), marker='>', c='#9933ff')
#
#plt.axhline(y=0.02, xmin=(-1/(xlim[0]-xlim[1]))-(post_median_combi/(xlim[0]-xlim[1])),
# xmax=(-1/(xlim[0]-xlim[1]))-(post_median_combi/(xlim[0]-xlim[1])), c='#9933ff', marker='.', ms=12)
#
#ax.spines['top'].set_visible(False)
#ax.spines['right'].set_visible(False)
#ax.spines['bottom'].set_position(('data', 0))
#ax.spines['left'].set_position(('data', 0))
#ax.spines['left'].set_linewidth(2)
#ax.spines['bottom'].set_linewidth(2)
#ax.tick_params('x', width=2)
#ax.tick_params('y', width=2)
#plt.xticks(ticks=np.arange(1,9,1),fontsize=14)
#plt.yticks([0.1, 0.2, 0.3, 0.4,0.5], fontsize=14)
#plt.legend(loc='upper right', edgecolor='k')
#plt.tight_layout()
#plt.savefig('Combi.pdf', dpi=300)
#
## Text saving if necessary
#np.savetxt('posterior_pmip23.csv', posterior)
##np.savetxt('valuesintercept.csv', values_intercept)
##np.savetxt('valuessigma.csv', values_sigma)
\ No newline at end of file
#plt.savefig('Posterior_PMIP.pdf', dpi=300)
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment