Skip to content

Commit

Permalink
Made a new branch called logLmax with the updated anesthetic.examples… (
Browse files Browse the repository at this point in the history
#348)

* Made a new branch called logLmax with the updated anesthetic.examples.perfect_ns.correlated_gaussian.

* Changed Version from 2.4.2 to 2.4.3

* Trying to fix flake8 error

* Added a test, tests.test_examples.test_perfect_ns_correlated_gaussian, which checks that the logLmax functionality is working

* Updated the tests/test_ns_correlated_gaussian.py to follow the format of the other tests. I am using numpy.testing.assert_allclose() but not sure what to put for rtol and atol. I have also updated the .gitignore file to exclude the .DS_Store file. In the __init__.py, I added a temporary line in line 12  to check if the correct anesthetic is being imported locally. This line will need to be removed later.

* Fixed the flake8 compliance issue

* Updated tests/test_perfect_ns_correlated_guassian.py and added absolute tolerance to the assert_allclose function in test_logLmax. The test is passed. I previously accidentally deleted tests/test.ipynb so  I added it back. Deleted the line, print test logLmax in anesthetic/__init__.py

* Removed tests/test.ipynb file. Move the test logLmax function from test_perfect_ns_correlated_gaussian.py to tests/test_examples.py, then removed test_perfect_ns_correlated_gaussian.py. Added logLmax variable to gaussian() in anesthetic/examples/perfect_ns.py. Changed version number to 2.5.0 becuase we have added functionality. Modified the planck_gaussian() function in examples/perfect_ns.py, added logLmax when calling the correlated_gaussian() function.

* Removed the lines for printing out the test results  of test_logLmax().

* Changed version number from 2.5.0 to 2.6.0

* Updated planck_gaussian function in perfect_ns.py

* Deleted unused lines in planck_gaussian function in perfect_ns.py

* Fixing flake issues.

* Added the option to pass arguments to NestedSamples constructor

* Updated the logL_mean test

* flake8 correction

* Added myself Dily Ong as one of the authors of anesthetic.

* Added LogLmax to guassian function's docstring in perfect_ns.py

* Added LogLmax to correlated_guassian function's docstring in perfect_ns.py

* Solved flake8 issue.

---------

Co-authored-by: Will Handley <[email protected]>
  • Loading branch information
DilyOng and williamjameshandley authored Feb 6, 2024
1 parent 7c30e72 commit b93dec5
Show file tree
Hide file tree
Showing 7 changed files with 65 additions and 33 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,5 @@ plikHM_TTTEEE_lowl_lowE_lensing_NS/
*~
.pytest_cache/*
.coverage

.DS_Store
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
anesthetic: nested sampling post-processing
===========================================
:Authors: Will Handley and Lukas Hergt
:Version: 2.6.0
:Version: 2.7.0
:Homepage: https://github.com/handley-lab/anesthetic
:Documentation: http://anesthetic.readthedocs.io/

Expand Down
1 change: 0 additions & 1 deletion anesthetic/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import pandas.plotting._misc
from anesthetic._format import _DataFrameFormatter
from anesthetic._version import __version__ # noqa: F401

# TODO: remove this when conda pandas version catches up
from packaging.version import parse
assert parse(pandas.__version__) >= parse('2.0.0')
Expand Down
2 changes: 1 addition & 1 deletion anesthetic/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '2.6.0'
__version__ = '2.7.0'
71 changes: 42 additions & 29 deletions anesthetic/examples/perfect_ns.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
from anesthetic.samples import merge_nested_samples


def gaussian(nlive, ndims, sigma=0.1, R=1, logLmin=-1e-2):
def gaussian(nlive, ndims, sigma=0.1, R=1, logLmin=-1e-2, logLmax=0,
*args, **kwargs):
"""Perfect nested sampling run for a spherical Gaussian & prior.
Up to normalisation this is identical to the example in John Skilling's
Expand All @@ -29,14 +30,20 @@ def gaussian(nlive, ndims, sigma=0.1, R=1, logLmin=-1e-2):
logLmin : float
loglikelihood at which to terminate
logLmax : float
maximum loglikelihood
The remaining arguments are passed to the
:class:`anesthetic.samples.NestedSamples` constructor.
Returns
-------
samples : :class:`anesthetic.samples.NestedSamples`
Nested sampling run
"""

def logLike(x):
return -(x**2).sum(axis=-1)/2/sigma**2
return logLmax - (x**2).sum(axis=-1)/2/sigma**2

def random_sphere(n):
return random_ellipsoid(np.zeros(ndims), np.eye(ndims), n)
Expand All @@ -48,7 +55,8 @@ def random_sphere(n):
while logL.min() < logLmin:
points = r * random_sphere(nlive)
logL = logLike(points)
samples.append(NestedSamples(points, logL=logL, logL_birth=logL_birth))
samples.append(NestedSamples(points, logL=logL, logL_birth=logL_birth,
*args, **kwargs))
logL_birth = logL.copy()
r = (points**2).sum(axis=-1, keepdims=True)**0.5

Expand All @@ -57,14 +65,14 @@ def random_sphere(n):
return samples.loc[samples.logL_birth < logLend].recompute()


def correlated_gaussian(nlive, mean, cov, bounds=None):
def correlated_gaussian(nlive, mean, cov, bounds=None, logLmax=0,
*args, **kwargs):
"""Perfect nested sampling run for a correlated gaussian likelihood.
This produces a perfect nested sampling run with a uniform prior over the
unit hypercube, with a likelihood gaussian in the parameters normalised so
that the evidence is unity. The algorithm proceeds by simultaneously
rejection sampling from the prior and sampling exactly and uniformly from
the known ellipsoidal contours.
This produces a perfect nested sampling run with a uniform prior over
the unit hypercube. The algorithm proceeds by simultaneously rejection
sampling from the prior and sampling exactly and uniformly from the
known ellipsoidal contours.
This can produce perfect runs in up to around d~15 dimensions. Beyond
this rejection sampling from a truncated gaussian in the early stage
Expand All @@ -85,6 +93,12 @@ def correlated_gaussian(nlive, mean, cov, bounds=None):
bounds : 2d array-like, shape (ndims, 2)
bounds of a gaussian, default ``[[0, 1]]*ndims``
logLmax : float
maximum loglikelihood
The remaining arguments are passed to the
:class:`anesthetic.samples.NestedSamples` constructor.
Returns
-------
samples : :class:`anesthetic.samples.NestedSamples`
Expand All @@ -95,7 +109,7 @@ def correlated_gaussian(nlive, mean, cov, bounds=None):
invcov = np.linalg.inv(cov)

def logLike(x):
return -0.5 * ((x-mean) @ invcov * (x-mean)).sum(axis=-1)
return logLmax - 0.5 * ((x - mean) @ invcov * (x - mean)).sum(axis=-1)

ndims = len(mean)

Expand All @@ -104,10 +118,9 @@ def logLike(x):

bounds = np.array(bounds, dtype=float)

logLmax = logLike(mean)

points = np.random.uniform(*bounds.T, (2*nlive, ndims))
samples = NestedSamples(points, logL=logLike(points), logL_birth=-np.inf)
samples = NestedSamples(points, logL=logLike(points), logL_birth=-np.inf,
*args, **kwargs)

while (1/samples.nlive.iloc[:-nlive]).sum() < samples.D_KL()*2:
logLs = samples.logL.iloc[-nlive]
Expand All @@ -120,19 +133,21 @@ def logLike(x):
points = np.random.uniform(*bounds.T, (nlive, ndims))
logL = logLike(points)
i = logL > logLs
samps_1 = NestedSamples(points[i], logL=logL[i], logL_birth=logLs)
samps_1 = NestedSamples(points[i], logL=logL[i], logL_birth=logLs,
*args, **kwargs)

# Ellipsoidal round
points = random_ellipsoid(mean, cov*2*(logLmax - logLs), nlive)
logL = logLike(points)
i = ((points > bounds.T[0]) & (points < bounds.T[1])).all(axis=1)
samps_2 = NestedSamples(points[i], logL=logL[i], logL_birth=logLs)
samps_2 = NestedSamples(points[i], logL=logL[i], logL_birth=logLs,
*args, **kwargs)
samples = merge_nested_samples([samples, samps_1, samps_2])

return samples


def wedding_cake(nlive, ndims, sigma=0.01, alpha=0.5):
def wedding_cake(nlive, ndims, sigma=0.01, alpha=0.5, *args, **kwargs):
"""Perfect nested sampling run for a wedding cake likelihood.
This is a likelihood with nested hypercuboidal plateau regions of constant
Expand All @@ -157,6 +172,9 @@ def wedding_cake(nlive, ndims, sigma=0.01, alpha=0.5):
alpha : float
volume compression between plateau regions
The remaining arguments are passed to the
:class:`anesthetic.samples.NestedSamples` constructor.
"""

def i(x):
Expand Down Expand Up @@ -192,7 +210,8 @@ def logL(x):
live_points[j] = x_
live_likes[j] = logL(x_)

samps = NestedSamples(points, logL=death_likes, logL_birth=birth_likes)
samps = NestedSamples(points, logL=death_likes, logL_birth=birth_likes,
*args, **kwargs)
weights = samps.get_weights()
if weights[-nlive:].sum() < 0.001 * weights.sum():
break
Expand All @@ -201,7 +220,8 @@ def logL(x):
birth_likes = np.concatenate([birth_likes, live_birth_likes])
points = np.concatenate([points, live_points])

return NestedSamples(points, logL=death_likes, logL_birth=birth_likes)
return NestedSamples(points, logL=death_likes, logL_birth=birth_likes,
*args, **kwargs)


def planck_gaussian(nlive=500):
Expand Down Expand Up @@ -244,14 +264,7 @@ def planck_gaussian(nlive=500):
[8.00e-01, 1.20e+00]])

logL_mean = -1400.35

samples = correlated_gaussian(nlive, mean, cov, bounds)

data = samples.iloc[:, :len(columns)].to_numpy()
logL = samples['logL'].to_numpy()
logL_birth = samples['logL_birth'].to_numpy()
logL_birth += logL_mean - samples.logL.mean()
logL += logL_mean - samples.logL.mean()
samples = NestedSamples(data=data, columns=columns, labels=labels,
logL=logL, logL_birth=logL_birth)
return samples
d = len(mean)
logLmax = logL_mean + d/2
return correlated_gaussian(nlive, mean, cov, bounds, logLmax,
columns=columns, labels=labels)
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ authors = [
{ name="Aleksandr Petrosyan", email="[email protected]" },
{ name="Liangliang Su", email="[email protected]"},
{ name="David Yallup", email="[email protected]" },
{ name="Dily Ong", email="[email protected]" }
]
description = "nested sampling post-processing"
readme = "README.rst"
Expand Down
19 changes: 18 additions & 1 deletion tests/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,25 @@ def test_planck():
assert (planck[params] < bounds.T[1]).all().all()
assert_allclose(planck[params].mean(), mean, atol=1e-3)
assert_allclose(planck[params].cov(), cov, atol=1e-3)
assert_allclose(planck.logL.mean(), logL_mean, atol=1e-3)
assert_allclose(planck.logL.mean(), logL_mean,
atol=3*planck.logL_P(12).std())
assert_allclose(planck.logZ(), logZ, atol=3*planck.logZ(12).std())
assert_allclose(planck.D_KL(), D_KL, atol=3*planck.D_KL(12).std())
assert_allclose(planck.d_G(), len(params), atol=3*planck.d_G(12).std())
assert_allclose(planck.logL_P(), logL_mean, atol=3*planck.logL_P(12).std())


def test_logLmax():
nlive = 1000
mean = [0.1, 0.3, 0.5]
# cov = covariance matrix
cov = np.array([[.01, 0.009, 0], [0.009, .01, 0], [0, 0, 0.1]])*0.01
bounds = [[0, 1], [0, 1], [0, 1]]
logLmax = 10
# d = number of parameters
d = len(mean)
samples = correlated_gaussian(nlive, mean, cov, bounds=bounds,
logLmax=logLmax)
abs_err = samples.logL.std()/np.sqrt(samples.neff())
atol = abs_err*3
assert_allclose(samples.logL.mean(), logLmax-d/2, atol=atol)

0 comments on commit b93dec5

Please sign in to comment.