Skip to content
Open

Mapk #11

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 69 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -97,4 +97,72 @@ venv.bak/
*.pdf

# avoid uploading data
notebooks/data/*
notebooks/data/*

# avoid PyCharm files
*.iml
*.xml

# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf

# Generated files
.idea/**/contentModel.xml

# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml

# Gradle
.idea/**/gradle.xml
.idea/**/libraries

# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/modules.xml
# .idea/*.iml
# .idea/modules

# CMake
cmake-build-*/

# Mongo Explorer plugin
.idea/**/mongoSettings.xml

# File-based project format
*.iws

# IntelliJ
out/

# mpeltonen/sbt-idea plugin
.idea_modules/

# JIRA plugin
atlassian-ide-plugin.xml

# Cursive Clojure plugin
.idea/replstate.xml

# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties

# Editor-based Rest Client
.idea/httpRequests

# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
Empty file added mapk_demon/__init__.py
Empty file.
25 changes: 25 additions & 0 deletions mapk_demon/inference.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import pyro
from pyro.infer.mcmc import MCMC
from pyro.infer.mcmc.nuts import HMC


def infer_dist(prog, n_dist, type='mcmc'):
"""Obtain the unique distribution entailed by a SCM program.

Do inference on a SCM program and obtain a object representing the
probability distribution entailed by the SCM.

This implementation depends on simple importance sampling with 5000
samples.

`prog`: the subroutine encoding the SCM.
`n_dist`: a dictionary containing distributions for each
noise object.
"""
if type == 'mcmc':
hmc_kernel = HMC(prog, step_size=0.9, num_steps=4)
posterior = MCMC(hmc_kernel, num_samples=1000, warmup_steps=50).run(n_dist)
return posterior

else:
return 0
187 changes: 187 additions & 0 deletions mapk_demon/receivers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,187 @@
from __future__ import division

from functools import partial

import pyro
from pyro import sample
from pyro.distributions import Normal, Uniform, Delta

import pyro.optim
import torch


def g1(a):
return a / (a + 1)


def g2(a):
return a ** 2 / (a ** 2 + a + 1)


def f(mu, N):
return N * mu ** (0.5) + mu


hyperparameters = {
"t_3k": 1.2,
"t_2k": 1.2,
"t_k": 0.003
}


def f_map3k(E1, N_3k, params, mode):
total_3k = hyperparameters['t_3k']
alpha_3k = params['alpha_3k'].rsample()
nu_3k = params['nu_3k'].rsample()

map3k_mu = total_3k * g1(E1 * (alpha_3k/nu_3k))

if mode == 'companion':
return sample("map3k", Normal(f(map3k_mu, N_3k), 1.))
else:
return sample("map3k", Delta(f(map3k_mu, N_3k)))


def f_map2k(map3k, N_2k, params, mode):
total_2k = hyperparameters['t_2k']
alpha_2k = params['alpha_2k'].rsample()
nu_2k = params['nu_2k'].rsample()

map2k_mu = total_2k * g2(map3k * (alpha_2k / nu_2k))

if mode == 'companion':
return sample("map2k", Normal(f(map2k_mu, N_2k), 1.))
else:
return sample("map2k", Delta(f(map2k_mu, N_2k)))


def f_mapk(map2k, N_k, params, mode):
total_k = hyperparameters['t_k']
alpha_k = params['alpha_k'].rsample()
nu_k = params['nu_k'].rsample()

mapk_mu = total_k * g2(map2k * (alpha_k / nu_k))

if mode == 'companion':
return sample("mapk", Normal(f(mapk_mu, N_k), 1.))
else:
return sample("mapk", Delta(f(mapk_mu, N_k)))


def mapk_receiver(noise_dists):
'''
Mapk receiver model
:param noise_dists: N_3k, N_2k, N_k
:return: concentration of map3k, map2k and mapk at steady state
'''
# Parameters
mode = 'original'
al_m = torch.tensor(700.)
al_s = torch.tensor(1.)

nu_m = torch.tensor(.15)
nu_s = torch.tensor(.05)

params = {
"alpha_3k": Normal(al_m, al_s),
"alpha_2k": Normal(al_m, al_s),
"alpha_k": Normal(al_m, al_s),
"nu_3k": Normal(nu_m, nu_s),
"nu_2k": Normal(nu_m, nu_s),
"nu_k": Normal(nu_m, nu_s)
}

with pyro.iarange("model"):
E1 = Uniform(1.5e-5, 10.).rsample()
N_3k = sample('N_3k', noise_dists['N_3k'])
N_2k = sample('N_2k', noise_dists['N_2k'])
N_k = sample('N_k', noise_dists['N_k'])

map3k = f_map3k(E1, N_3k, params, mode)
map2k = f_map2k(map3k, N_2k, params, mode)
mapk = f_mapk(map2k, N_k, params, mode)

return {
'map3k': map3k,
'map2k': map2k,
'mapk': mapk
}


def mapk_companion(noise_dists):
'''
Companion model that enables inference
:param noise_dists: N_3k, N_2k, N_k
:return:
'''
mode = 'companion'
# Parameters
al_m = torch.tensor(700.)
al_s = torch.tensor(1.)

nu_m = torch.tensor(.15)
nu_s = torch.tensor(.05)

params = {
"alpha_3k": Normal(al_m, al_s),
"alpha_2k": Normal(al_m, al_s),
"alpha_k": Normal(al_m, al_s),
"nu_3k": Normal(nu_m, nu_s),
"nu_2k": Normal(nu_m, nu_s),
"nu_k": Normal(nu_m, nu_s)
}

with pyro.iarange("model"):
E1 = Uniform(1.5e-5, 10.).rsample()
N_3k = sample('N_3k', noise_dists['N_3k'])
N_2k = sample('N_2k', noise_dists['N_2k'])
N_k = sample('N_k', noise_dists['N_k'])

map3k = f_map3k(E1, N_3k, params, mode)
map2k = f_map2k(map3k, N_2k, params, mode)
mapk = f_mapk(map2k, N_k, params, mode)

return {
'map3k': map3k,
'map2k': map2k,
'mapk': mapk
}


def mapk_do_receiver(do_value,noise_dists):
'''
This model validates the results from doing counterfactual inference on mapk_receiver
:param do_value: model uses this value to do hard intervention on map3k
:param noise_dists: N_3k, N_2k, N_k
:return:
'''
mode = 'original'


# Parameters
al_m = torch.tensor(700.)
al_s = torch.tensor(1.)

nu_m = torch.tensor(.15)
nu_s = torch.tensor(.05)

params = {
"alpha_3k": Normal(al_m, al_s),
"alpha_2k": Normal(al_m, al_s),
"alpha_k": Normal(al_m, al_s),
"nu_3k": Normal(nu_m, nu_s),
"nu_2k": Normal(nu_m, nu_s),
"nu_k": Normal(nu_m, nu_s)
}

with pyro.iarange("model"):
E1 = Uniform(1.5e-5, 10.).rsample()
N_3k = sample('N_3k', noise_dists['N_3k'])
N_2k = sample('N_2k', noise_dists['N_2k'])
N_k = sample('N_k', noise_dists['N_k'])

map3k = do_value
map2k = f_map2k(map3k, N_2k, params, mode)
mapk = f_mapk(map2k, N_k, params, mode)

return map2k
Loading