Metropolis-Hasting (MH) 算法首先初始化向量 x 的第一个值(假设我们将使用 MCMC 采样 n=10,000 个 x 值)x[0] 。然后在每次迭代中,我们首先使用转换内核生成一个 x_cand,并且我们需要计算接受率 alpha 来决定下一个 x 是采用 x_cand 还是仅采用最后一个 x 值。接受率如下所示:
代码如下:
def f(x):
import math
return 2*x**2*(1-x)**8*math.cos(4*math.pi*x)**2
def q(x,y):
return norm.pdf(x,loc=y,scale=0.1)
n = 10000
x = np.zeros(n)
x[0] = norm.rvs(loc=0,scale=0.1,size=1)[0]
for i in range(n-1):
while True:
x_cand = norm.rvs(loc=x[i],scale=0.1,size=1)[0]
if x_cand >= 0 and x_cand <= 1:
break
if x_cand >= 0 and x_cand <= 1:
rho = (q(x[i],x_cand)/q(x_cand,x[i]))*(f(x_cand)/f(x[i]))
alpha = min(1,rho)
u = uniform.rvs(loc=0,scale=1,size=1)[0]
if u < alpha:
x[i+1] = x_cand
else:
x[i+1] = x[i]
sns.histplot(x)
fig,ax = plt.subplots()
ax.plot(np.arange(10000),x)
💦分层贝叶斯
nruns = 10000
K = 100
n = 1000
y = np.zeros(K,n)
lambda_est = np.zeros(K,nruns)
sigma_est = np.zeros(nruns)
mu_est = np.zeros(nruns)
tau_est = np.zeros(nruns)
for i in range(K):
loc = uniform.rvs(loc=0,scale=10,size=1)[0]
scale = uniform.rvs(loc=0,scale=0.1,size=1)[0]
lambda_est[i,0] = norm.rvs(loc=loc,scale=scale,size=1)[0]
sigma_est[0] = uniform.rvs(loc=0,scale=0.1,size=1)[0]
mu_est[0] = norm.rvs(loc=uniform.rvs(loc=0,scale=10,size=1)[0],scale=1,size=1)[0]
tau_est[0] = uniform.rvs(loc=0,scale=0.1,size=1)[0]
for runs in range(1,n_runs-1,1):
for i in range(1,K-1):
mean = i/math.sqrt(1/tau_est[runs-1]) + n/sigma_est[runs-1]
std = (mean^2)*(mu_est[runs-1]/(tau_est[runs-1])+y[i,:].mean()*n/sigma_est[runs-1])
lambda_est[i,runs] = norm.rvs(loc=mean,scale=std,size=1)[0]
sigma_sum_term = 0
for i in range(K):
for j in range(n):
sigma_sum_term += (y[i,j]-lambda_est[i,runs])**2
sigma_est[runs] = invgamma(loc=K*n/2,scale=sigma_sum_term/2)
tau_sum_term = 0
for i in range(K):
tau_sum_term += (lambda_est[i,runs]-mu_est[runs-1])**2
tau_est[runs] = invgamma(loc=K/2,scale=tau_sum_term/2)
mu_est[runs] = norm.rvs(loc=lambda_est[:,runs-1].mean(),scale=math.sqrt(tau_est[runs]/2))