CAR模型从pymc2到PyMC3

时间:2017-04-10 16:43:54

标签: pymc pymc3

我仍然是PyMC3中的菜鸟,所以问题可能是天真的,但我不知道如何在pymc3中翻译这个pymc2代码。特别是我不清楚如何翻译R函数。

beta = pymc.Normal('beta', mu=0, tau=1.0e-4)
s = pymc.Uniform('s', lower=0, upper=1.0e+4)
tau = pymc.Lambda('tau', lambda s=s: s**(-2))

### Intrinsic CAR
@pymc.stochastic
def R(tau=tau, value=np.zeros(N)):
    # Calculate mu based on average of neighbors
    mu = np.array([sum(W[i]*value[A[i]])/Wplus[i] for i in xrange(N)])

    # Scale precision to the number of neighbors
    taux = tau*Wplus
    return pymc.normal_like(value, mu, taux)

@pymc.deterministic
def M(beta=beta, R=R):
    return [np.exp(beta + R[i]) for i in xrange(N)]

obsvd = pymc.Poisson("obsvd", mu=M, value=Y, observed=True)
model = pymc.Model([s, beta, obsvd])

来自https://github.com/Youki/statistical-modeling-for-data-analysis-with-python/blob/945c13549a872d869e33bc48082c42efc022a07b/Chapter11/Chapter11.rst的代码和http://glau.ca/?p=340

你能帮帮我吗?感谢

1 个答案:

答案 0 :(得分:2)

在PyMC3中,您可以使用Theano的扫描功能来实现CAR模型。他们的documentation中有一个示例代码。链接文档中有两种CAR实现。这是第一个 [Source]


from theano import scan
floatX = "float32"

from pymc3.distributions import continuous
from pymc3.distributions import distribution

class CAR(distribution.Continuous):
    """
    Conditional Autoregressive (CAR) distribution

    Parameters
    ----------
    a : list of adjacency information
    w : list of weight information
    tau : precision at each location
    """
    def __init__(self, w, a, tau, *args, **kwargs):
        super(CAR, self).__init__(*args, **kwargs)
        self.a = a = tt.as_tensor_variable(a)
        self.w = w = tt.as_tensor_variable(w)
        self.tau = tau*tt.sum(w, axis=1)
        self.mode = 0.

    def get_mu(self, x):

        def weigth_mu(w, a):
            a1 = tt.cast(a, 'int32')
            return tt.sum(w*x[a1])/tt.sum(w)

        mu_w, _ = scan(fn=weigth_mu,
                       sequences=[self.w, self.a])

        return mu_w

    def logp(self, x):
        mu_w = self.get_mu(x)
        tau = self.tau
        return tt.sum(continuous.Normal.dist(mu=mu_w, tau=tau).logp(x))

with pm.Model() as model1:
    # Vague prior on intercept
    beta0 = pm.Normal('beta0', mu=0.0, tau=1.0e-5)
    # Vague prior on covariate effect
    beta1 = pm.Normal('beta1', mu=0.0, tau=1.0e-5)

    # Random effects (hierarchial) prior
    tau_h = pm.Gamma('tau_h', alpha=3.2761, beta=1.81)
    # Spatial clustering prior
    tau_c = pm.Gamma('tau_c', alpha=1.0, beta=1.0)

    # Regional random effects
    theta = pm.Normal('theta', mu=0.0, tau=tau_h, shape=N)
    mu_phi = CAR('mu_phi', w=wmat, a=amat, tau=tau_c, shape=N)

    # Zero-centre phi
    phi = pm.Deterministic('phi', mu_phi-tt.mean(mu_phi))

    # Mean model
    mu = pm.Deterministic('mu', tt.exp(logE + beta0 + beta1*aff + theta + phi))

    # Likelihood
    Yi = pm.Poisson('Yi', mu=mu, observed=O)

    # Marginal SD of heterogeniety effects
    sd_h = pm.Deterministic('sd_h', tt.std(theta))
    # Marginal SD of clustering (spatial) effects
    sd_c = pm.Deterministic('sd_c', tt.std(phi))
    # Proportion sptial variance
    alpha = pm.Deterministic('alpha', sd_c/(sd_h+sd_c))

    trace1 = pm.sample(1000, tune=500, cores=4,
                       init='advi',
                       nuts_kwargs={"target_accept":0.9,
                                    "max_treedepth": 15})

M函数在这里写为:

mu = pm.Deterministic('mu', tt.exp(logE + beta0 + beta1*aff + theta + phi))