Skip to content

Commit ffa9f36

Browse files
author
perdaug
committed
[MaxVar, Part 1] Added the general Gaussian noise model.
1 parent 54e62b2 commit ffa9f36

8 files changed

Lines changed: 155 additions & 52 deletions

File tree

CHANGELOG.rst

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
11
Changelog
22
=========
33

4+
0.x
5+
---
6+
7+
- Added the general Gaussian noise example model (fixed covariance)
8+
49
0.6.2 (2017-09-06)
510
------------------
611

elfi/examples/bignk.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def BiGNK(a1, a2, b1, b2, g1, g2, k1, k2, rho, c=.8, n_obs=150, batch_size=1,
9898
term_product_misaligned = np.swapaxes(term_product, 1, 0)
9999
y_misaligned = np.add(a, term_product_misaligned)
100100
y = np.swapaxes(y_misaligned, 1, 0)
101-
# print(y.shape)
101+
102102
return y
103103

104104

elfi/examples/gauss.py

Lines changed: 104 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,30 +1,81 @@
1-
"""An example implementation of a Gaussian noise model."""
1+
"""Example implementations of Gaussian noise models."""
22

33
from functools import partial
44

55
import numpy as np
66
import scipy.stats as ss
77

88
import elfi
9+
from elfi.examples.gnk import euclidean_multidim
910

1011

11-
def Gauss(mu, sigma, n_obs=50, batch_size=1, random_state=None):
12-
"""Sample the Gaussian distribution.
12+
def gauss(mu, sigma, n_obs=50, batch_size=1, random_state=None):
13+
"""Sample the 1-D Gaussian distribution.
1314
1415
Parameters
1516
----------
1617
mu : float, array_like
1718
sigma : float, array_like
1819
n_obs : int, optional
1920
batch_size : int, optional
20-
random_state : RandomState, optional
21+
random_state : np.random.RandomState, optional
22+
23+
Returns
24+
-------
25+
y_obs : array_like
26+
1-D observation.
2127
2228
"""
23-
# Standardising the parameter's format.
24-
mu = np.asanyarray(mu).reshape((-1, 1))
25-
sigma = np.asanyarray(sigma).reshape((-1, 1))
26-
y = ss.norm.rvs(loc=mu, scale=sigma, size=(batch_size, n_obs), random_state=random_state)
27-
return y
29+
# Handling batching.
30+
batches_mu = np.asanyarray(mu).reshape((-1, 1))
31+
batches_sigma = np.asanyarray(sigma).reshape((-1, 1))
32+
33+
# Sampling observations.
34+
y_obs = ss.norm.rvs(loc=batches_mu, scale=batches_sigma,
35+
size=(batch_size, n_obs), random_state=random_state)
36+
return y_obs
37+
38+
39+
def gauss_nd_mean(*mu, cov_matrix, n_obs=15, batch_size=1, random_state=None):
40+
"""Sample an n-D Gaussian distribution.
41+
42+
Parameters
43+
----------
44+
*mu : array_like
45+
Mean parameters.
46+
cov_matrix : array_like
47+
Covariance matrix.
48+
n_obs : int, optional
49+
batch_size : int, optional
50+
random_state : np.random.RandomState, optional
51+
52+
Returns
53+
-------
54+
y_obs : array_like
55+
n-D observation.
56+
57+
"""
58+
n_dim = len(mu)
59+
60+
# Handling batching.
61+
batches_mu = np.zeros(shape=(batch_size, n_dim))
62+
for idx_dim, param_mu in enumerate(mu):
63+
batches_mu[:, idx_dim] = param_mu
64+
batches_cov = np.zeros(shape=(batch_size, n_dim, n_dim))
65+
for idx_batch in range(batch_size):
66+
batches_cov[idx_batch, :, :] = cov_matrix
67+
68+
# Sampling the observations.
69+
y_obs = np.zeros(shape=(batch_size, n_obs, n_dim))
70+
for idx_batch in range(batch_size):
71+
y_batch = ss.multivariate_normal.rvs(mean=batches_mu[idx_batch],
72+
cov=batches_cov[idx_batch],
73+
size=n_obs,
74+
random_state=random_state)
75+
if n_dim == 1:
76+
y_batch = y_batch[:, np.newaxis]
77+
y_obs[idx_batch, :, :] = y_batch
78+
return y_obs
2879

2980

3081
def ss_mean(x):
@@ -39,36 +90,65 @@ def ss_var(x):
3990
return ss
4091

4192

42-
def get_model(n_obs=50, true_params=None, seed_obs=None):
43-
"""Return a complete Gaussian noise model.
93+
def get_model(n_obs=50, true_params=None, seed_obs=None, nd_mean=False, cov_matrix=None):
94+
"""Return a Gaussian noise model.
4495
4596
Parameters
4697
----------
4798
n_obs : int, optional
48-
the number of observations
4999
true_params : list, optional
50-
true_params[0] corresponds to the mean,
51-
true_params[1] corresponds to the standard deviation
100+
Default parameter settings.
52101
seed_obs : int, optional
53-
seed for the observed data generation
102+
Seed for the observed data generation.
103+
nd_mean : bool, optional
104+
Option to use an n-D mean Gaussian noise model.
105+
cov_matrix : None, optional
106+
Covariance matrix, a requirement for the nd_mean model.
54107
55108
Returns
56109
-------
57110
m : elfi.ElfiModel
58111
59112
"""
113+
# Defining the default settings.
60114
if true_params is None:
61-
true_params = [10, 2]
115+
if nd_mean:
116+
true_params = [4, 4] # 2-D mean.
117+
else:
118+
true_params = [4, .4] # mean and standard deviation.
119+
120+
# Choosing the simulator for both observations and simulations.
121+
if nd_mean:
122+
sim_fn = partial(gauss_nd_mean, cov_matrix=cov_matrix, n_obs=n_obs)
123+
else:
124+
sim_fn = partial(gauss, n_obs=n_obs)
62125

63-
y_obs = Gauss(*true_params, n_obs=n_obs, random_state=np.random.RandomState(seed_obs))
64-
sim_fn = partial(Gauss, n_obs=n_obs)
126+
# Obtaining the observations.
127+
y_obs = sim_fn(*true_params, n_obs=n_obs, random_state=np.random.RandomState(seed_obs))
65128

66129
m = elfi.ElfiModel()
67-
elfi.Prior('uniform', -10, 50, model=m, name='mu')
68-
elfi.Prior('truncnorm', 0.01, 5, model=m, name='sigma')
69-
elfi.Simulator(sim_fn, m['mu'], m['sigma'], observed=y_obs, name='Gauss')
70-
elfi.Summary(ss_mean, m['Gauss'], name='S1')
71-
elfi.Summary(ss_var, m['Gauss'], name='S2')
72-
elfi.Distance('euclidean', m['S1'], m['S2'], name='d')
130+
# Initialising the priors.
131+
priors = []
132+
if nd_mean:
133+
n_dim = len(true_params)
134+
for i in range(n_dim):
135+
name_prior = 'mu_{}'.format(i)
136+
prior_mu = elfi.Prior('uniform', 0, 8, model=m, name=name_prior)
137+
priors.append(prior_mu)
138+
else:
139+
priors.append(elfi.Prior('uniform', 0, 8, model=m, name='mu'))
140+
priors.append(elfi.Prior('truncnorm', 0.01, 5, model=m, name='sigma'))
141+
elfi.Simulator(sim_fn, *priors, observed=y_obs, name='gauss')
142+
143+
# Initialising the summary statistics.
144+
sumstats = []
145+
sumstats.append(elfi.Summary(ss_mean, m['gauss'], name='ss_mean'))
146+
sumstats.append(elfi.Summary(ss_var, m['gauss'], name='ss_var'))
147+
148+
# Choosing the discrepancy metric.
149+
if nd_mean:
150+
elfi.Discrepancy(euclidean_multidim, *sumstats, name='d')
151+
else:
152+
elfi.Distance('euclidean', *sumstats, name='d')
73153

74154
return m

elfi/examples/gnk.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -134,11 +134,11 @@ def euclidean_multidim(*simulated, observed):
134134
array_like
135135
136136
"""
137-
pts_sim = np.column_stack(simulated)
138-
pts_obs = np.column_stack(observed)
139-
d_multidim = np.sum((pts_sim - pts_obs)**2., axis=1)
140-
d_squared = np.sum(d_multidim, axis=1)
141-
d = np.sqrt(d_squared)
137+
pts_sim = np.stack(simulated, axis=1)
138+
pts_obs = np.stack(observed, axis=1)
139+
d_ss_merged = np.sum((pts_sim - pts_obs)**2., axis=1)
140+
d_dim_merged = np.sum(d_ss_merged, axis=1)
141+
d = np.sqrt(d_dim_merged)
142142

143143
return d
144144

@@ -185,8 +185,8 @@ def ss_robust(y):
185185
ss_g = _get_ss_g(y)
186186
ss_k = _get_ss_k(y)
187187

188-
ss_robust = np.stack((ss_a, ss_b, ss_g, ss_k), axis=1)
189-
188+
# Combining the summary statistics by expanding the dimensionality.
189+
ss_robust = np.hstack((ss_a, ss_b, ss_g, ss_k))
190190
return ss_robust
191191

192192

@@ -209,7 +209,8 @@ def ss_octile(y):
209209
octiles = np.linspace(12.5, 87.5, 7)
210210
E1, E2, E3, E4, E5, E6, E7 = np.percentile(y, octiles, axis=1)
211211

212-
ss_octile = np.stack((E1, E2, E3, E4, E5, E6, E7), axis=1)
212+
# Combining the summary statistics by expanding the dimensionality.
213+
ss_octile = np.hstack((E1, E2, E3, E4, E5, E6, E7))
213214

214215
return ss_octile
215216

elfi/methods/post_processing.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -242,8 +242,8 @@ def adjust_posterior(sample, model, summary_names, parameter_names=None, adjustm
242242
>>> import elfi
243243
>>> from elfi.examples import gauss
244244
>>> m = gauss.get_model()
245-
>>> res = elfi.Rejection(m['d'], output_names=['S1', 'S2']).sample(1000)
246-
>>> adj = adjust_posterior(res, m, ['S1', 'S2'], ['mu'], LinearAdjustment())
245+
>>> res = elfi.Rejection(m['d'], output_names=['ss_mean', 'ss_var']).sample(1000)
246+
>>> adj = adjust_posterior(res, m, ['ss_mean', 'ss_var'], ['mu'], LinearAdjustment())
247247
248248
"""
249249
adjustment = _get_adjustment(adjustment)

tests/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import elfi.clients.ipyparallel as eipp
1010
import elfi.clients.multiprocessing as mp
1111
import elfi.clients.native as native
12+
import elfi.examples.gauss
1213
import elfi.examples.ma2
1314

1415
elfi.clients.native.set_as_default()

tests/functional/test_post_processing.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,9 @@ def test_single_parameter_linear_adjustment():
2828
# Hyperparameters
2929
mu0, sigma0 = (10, 100)
3030

31-
y_obs = gauss.Gauss(
31+
y_obs = gauss.gauss(
3232
mu, sigma, n_obs=n_obs, batch_size=1, random_state=np.random.RandomState(seed))
33-
sim_fn = partial(gauss.Gauss, sigma=sigma, n_obs=n_obs)
33+
sim_fn = partial(gauss.gauss, sigma=sigma, n_obs=n_obs)
3434

3535
# Posterior
3636
n = y_obs.shape[1]
@@ -40,12 +40,12 @@ def test_single_parameter_linear_adjustment():
4040
# Model
4141
m = elfi.ElfiModel()
4242
elfi.Prior('norm', mu0, sigma0, model=m, name='mu')
43-
elfi.Simulator(sim_fn, m['mu'], observed=y_obs, name='Gauss')
44-
elfi.Summary(lambda x: x.mean(axis=1), m['Gauss'], name='S1')
45-
elfi.Distance('euclidean', m['S1'], name='d')
43+
elfi.Simulator(sim_fn, m['mu'], observed=y_obs, name='gauss')
44+
elfi.Summary(lambda x: x.mean(axis=1), m['gauss'], name='ss_mean')
45+
elfi.Distance('euclidean', m['ss_mean'], name='d')
4646

47-
res = elfi.Rejection(m['d'], output_names=['S1'], seed=seed).sample(1000, threshold=1)
48-
adj = elfi.adjust_posterior(model=m, sample=res, parameter_names=['mu'], summary_names=['S1'])
47+
res = elfi.Rejection(m['d'], output_names=['ss_mean'], seed=seed).sample(1000, threshold=1)
48+
adj = elfi.adjust_posterior(model=m, sample=res, parameter_names=['mu'], summary_names=['ss_mean'])
4949

5050
assert np.allclose(_statistics(adj.outputs['mu']), (4.9772879640569778, 0.02058680115402544))
5151

@@ -61,9 +61,9 @@ def test_nonfinite_values():
6161
# Hyperparameters
6262
mu0, sigma0 = (10, 100)
6363

64-
y_obs = gauss.Gauss(
64+
y_obs = gauss.gauss(
6565
mu, sigma, n_obs=n_obs, batch_size=1, random_state=np.random.RandomState(seed))
66-
sim_fn = partial(gauss.Gauss, sigma=sigma, n_obs=n_obs)
66+
sim_fn = partial(gauss.gauss, sigma=sigma, n_obs=n_obs)
6767

6868
# Posterior
6969
n = y_obs.shape[1]
@@ -73,19 +73,19 @@ def test_nonfinite_values():
7373
# Model
7474
m = elfi.ElfiModel()
7575
elfi.Prior('norm', mu0, sigma0, model=m, name='mu')
76-
elfi.Simulator(sim_fn, m['mu'], observed=y_obs, name='Gauss')
77-
elfi.Summary(lambda x: x.mean(axis=1), m['Gauss'], name='S1')
78-
elfi.Distance('euclidean', m['S1'], name='d')
76+
elfi.Simulator(sim_fn, m['mu'], observed=y_obs, name='gauss')
77+
elfi.Summary(lambda x: x.mean(axis=1), m['gauss'], name='ss_mean')
78+
elfi.Distance('euclidean', m['ss_mean'], name='d')
7979

80-
res = elfi.Rejection(m['d'], output_names=['S1'], seed=seed).sample(1000, threshold=1)
80+
res = elfi.Rejection(m['d'], output_names=['ss_mean'], seed=seed).sample(1000, threshold=1)
8181

8282
# Add some invalid values
8383
res.outputs['mu'] = np.append(res.outputs['mu'], np.array([np.inf]))
84-
res.outputs['S1'] = np.append(res.outputs['S1'], np.array([np.inf]))
84+
res.outputs['ss_mean'] = np.append(res.outputs['ss_mean'], np.array([np.inf]))
8585

8686
with pytest.warns(UserWarning):
8787
adj = elfi.adjust_posterior(
88-
model=m, sample=res, parameter_names=['mu'], summary_names=['S1'])
88+
model=m, sample=res, parameter_names=['mu'], summary_names=['ss_mean'])
8989

9090
assert np.allclose(_statistics(adj.outputs['mu']), (4.9772879640569778, 0.02058680115402544))
9191

tests/unit/test_examples.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,12 +41,28 @@ def test_bdm():
4141
if do_cleanup:
4242
os.system('rm {}/bdm'.format(cpp_path))
4343

44-
45-
def test_Gauss():
44+
def test_gauss():
4645
m = gauss.get_model()
4746
rej = elfi.Rejection(m, m['d'], batch_size=10)
4847
rej.sample(20)
4948

49+
def test_gauss_1d_mean():
50+
params_true = [4]
51+
cov_matrix = [1]
52+
53+
m = gauss.get_model(true_params=params_true, nd_mean=True, cov_matrix=cov_matrix)
54+
rej = elfi.Rejection(m, m['d'], batch_size=10)
55+
rej.sample(20)
56+
57+
58+
def test_gauss_2d_mean():
59+
params_true = [4, 4]
60+
cov_matrix = [[1, .5], [.5, 1]]
61+
62+
m = gauss.get_model(true_params=params_true, nd_mean=True, cov_matrix=cov_matrix)
63+
rej = elfi.Rejection(m, m['d'], batch_size=10)
64+
rej.sample(20)
65+
5066

5167
def test_Ricker():
5268
m = ricker.get_model()

0 commit comments

Comments
 (0)