From b93dec5759cee058a8ab17ab54ba7931fbcf16db Mon Sep 17 00:00:00 2001 From: Dily Ong <46112650+DilyOng@users.noreply.github.com> Date: Tue, 6 Feb 2024 21:41:07 +0800 Subject: [PATCH] =?UTF-8?q?Made=20a=20new=20branch=20called=20logLmax=20wi?= =?UTF-8?q?th=20the=20updated=20anesthetic.examples=E2=80=A6=20(#348)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Made a new branch called logLmax with the updated anesthetic.examples.perfect_ns.correlated_gaussian. * Changed Version from 2.4.2 to 2.4.3 * Trying to fix flake8 error * Added a test, tests.test_examples.test_perfect_ns_correlated_gaussian, which checks that the logLmax functionality is working * Updated the tests/test_ns_correlated_gaussian.py to follow the format of the other tests. I am using numpy.testing.assert_allclose() but not sure what to put for rtol and atol. I have also updated the .gitignore file to exclude the .DS_Store file. In the __init__.py, I added a temporary line in line 12 to check if the correct anesthetic is being imported locally. This line will need to be removed later. * Fixed the flake8 compliance issue * Updated tests/test_perfect_ns_correlated_guassian.py and added absolute tolerance to the assert_allclose function in test_logLmax. The test is passed. I previously accidentally deleted tests/test.ipynb so I added it back. Deleted the line, print test logLmax in anesthetic/__init__.py * Removed tests/test.ipynb file. Move the test logLmax function from test_perfect_ns_correlated_gaussian.py to tests/test_examples.py, then removed test_perfect_ns_correlated_gaussian.py. Added logLmax variable to gaussian() in anesthetic/examples/perfect_ns.py. Changed version number to 2.5.0 becuase we have added functionality. Modified the planck_gaussian() function in examples/perfect_ns.py, added logLmax when calling the correlated_gaussian() function. * Removed the lines for printing out the test results of test_logLmax(). * Changed version number from 2.5.0 to 2.6.0 * Updated planck_gaussian function in perfect_ns.py * Deleted unused lines in planck_gaussian function in perfect_ns.py * Fixing flake issues. * Added the option to pass arguments to NestedSamples constructor * Updated the logL_mean test * flake8 correction * Added myself Dily Ong as one of the authors of anesthetic. * Added LogLmax to guassian function's docstring in perfect_ns.py * Added LogLmax to correlated_guassian function's docstring in perfect_ns.py * Solved flake8 issue. --------- Co-authored-by: Will Handley --- .gitignore | 2 + README.rst | 2 +- anesthetic/__init__.py | 1 - anesthetic/_version.py | 2 +- anesthetic/examples/perfect_ns.py | 71 ++++++++++++++++++------------- pyproject.toml | 1 + tests/test_examples.py | 19 ++++++++- 7 files changed, 65 insertions(+), 33 deletions(-) diff --git a/.gitignore b/.gitignore index 52d12e31..34c623d3 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,5 @@ plikHM_TTTEEE_lowl_lowE_lensing_NS/ *~ .pytest_cache/* .coverage + +.DS_Store \ No newline at end of file diff --git a/README.rst b/README.rst index 0df6459f..3dd7f84e 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ anesthetic: nested sampling post-processing =========================================== :Authors: Will Handley and Lukas Hergt -:Version: 2.6.0 +:Version: 2.7.0 :Homepage: https://github.com/handley-lab/anesthetic :Documentation: http://anesthetic.readthedocs.io/ diff --git a/anesthetic/__init__.py b/anesthetic/__init__.py index 2c5b855b..732b238f 100644 --- a/anesthetic/__init__.py +++ b/anesthetic/__init__.py @@ -9,7 +9,6 @@ import pandas.plotting._misc from anesthetic._format import _DataFrameFormatter from anesthetic._version import __version__ # noqa: F401 - # TODO: remove this when conda pandas version catches up from packaging.version import parse assert parse(pandas.__version__) >= parse('2.0.0') diff --git a/anesthetic/_version.py b/anesthetic/_version.py index f0e5e1ea..766ce2d0 100644 --- a/anesthetic/_version.py +++ b/anesthetic/_version.py @@ -1 +1 @@ -__version__ = '2.6.0' +__version__ = '2.7.0' diff --git a/anesthetic/examples/perfect_ns.py b/anesthetic/examples/perfect_ns.py index 69252a0e..25ea868a 100644 --- a/anesthetic/examples/perfect_ns.py +++ b/anesthetic/examples/perfect_ns.py @@ -5,7 +5,8 @@ from anesthetic.samples import merge_nested_samples -def gaussian(nlive, ndims, sigma=0.1, R=1, logLmin=-1e-2): +def gaussian(nlive, ndims, sigma=0.1, R=1, logLmin=-1e-2, logLmax=0, + *args, **kwargs): """Perfect nested sampling run for a spherical Gaussian & prior. Up to normalisation this is identical to the example in John Skilling's @@ -29,6 +30,12 @@ def gaussian(nlive, ndims, sigma=0.1, R=1, logLmin=-1e-2): logLmin : float loglikelihood at which to terminate + logLmax : float + maximum loglikelihood + + The remaining arguments are passed to the + :class:`anesthetic.samples.NestedSamples` constructor. + Returns ------- samples : :class:`anesthetic.samples.NestedSamples` @@ -36,7 +43,7 @@ def gaussian(nlive, ndims, sigma=0.1, R=1, logLmin=-1e-2): """ def logLike(x): - return -(x**2).sum(axis=-1)/2/sigma**2 + return logLmax - (x**2).sum(axis=-1)/2/sigma**2 def random_sphere(n): return random_ellipsoid(np.zeros(ndims), np.eye(ndims), n) @@ -48,7 +55,8 @@ def random_sphere(n): while logL.min() < logLmin: points = r * random_sphere(nlive) logL = logLike(points) - samples.append(NestedSamples(points, logL=logL, logL_birth=logL_birth)) + samples.append(NestedSamples(points, logL=logL, logL_birth=logL_birth, + *args, **kwargs)) logL_birth = logL.copy() r = (points**2).sum(axis=-1, keepdims=True)**0.5 @@ -57,14 +65,14 @@ def random_sphere(n): return samples.loc[samples.logL_birth < logLend].recompute() -def correlated_gaussian(nlive, mean, cov, bounds=None): +def correlated_gaussian(nlive, mean, cov, bounds=None, logLmax=0, + *args, **kwargs): """Perfect nested sampling run for a correlated gaussian likelihood. - This produces a perfect nested sampling run with a uniform prior over the - unit hypercube, with a likelihood gaussian in the parameters normalised so - that the evidence is unity. The algorithm proceeds by simultaneously - rejection sampling from the prior and sampling exactly and uniformly from - the known ellipsoidal contours. + This produces a perfect nested sampling run with a uniform prior over + the unit hypercube. The algorithm proceeds by simultaneously rejection + sampling from the prior and sampling exactly and uniformly from the + known ellipsoidal contours. This can produce perfect runs in up to around d~15 dimensions. Beyond this rejection sampling from a truncated gaussian in the early stage @@ -85,6 +93,12 @@ def correlated_gaussian(nlive, mean, cov, bounds=None): bounds : 2d array-like, shape (ndims, 2) bounds of a gaussian, default ``[[0, 1]]*ndims`` + logLmax : float + maximum loglikelihood + + The remaining arguments are passed to the + :class:`anesthetic.samples.NestedSamples` constructor. + Returns ------- samples : :class:`anesthetic.samples.NestedSamples` @@ -95,7 +109,7 @@ def correlated_gaussian(nlive, mean, cov, bounds=None): invcov = np.linalg.inv(cov) def logLike(x): - return -0.5 * ((x-mean) @ invcov * (x-mean)).sum(axis=-1) + return logLmax - 0.5 * ((x - mean) @ invcov * (x - mean)).sum(axis=-1) ndims = len(mean) @@ -104,10 +118,9 @@ def logLike(x): bounds = np.array(bounds, dtype=float) - logLmax = logLike(mean) - points = np.random.uniform(*bounds.T, (2*nlive, ndims)) - samples = NestedSamples(points, logL=logLike(points), logL_birth=-np.inf) + samples = NestedSamples(points, logL=logLike(points), logL_birth=-np.inf, + *args, **kwargs) while (1/samples.nlive.iloc[:-nlive]).sum() < samples.D_KL()*2: logLs = samples.logL.iloc[-nlive] @@ -120,19 +133,21 @@ def logLike(x): points = np.random.uniform(*bounds.T, (nlive, ndims)) logL = logLike(points) i = logL > logLs - samps_1 = NestedSamples(points[i], logL=logL[i], logL_birth=logLs) + samps_1 = NestedSamples(points[i], logL=logL[i], logL_birth=logLs, + *args, **kwargs) # Ellipsoidal round points = random_ellipsoid(mean, cov*2*(logLmax - logLs), nlive) logL = logLike(points) i = ((points > bounds.T[0]) & (points < bounds.T[1])).all(axis=1) - samps_2 = NestedSamples(points[i], logL=logL[i], logL_birth=logLs) + samps_2 = NestedSamples(points[i], logL=logL[i], logL_birth=logLs, + *args, **kwargs) samples = merge_nested_samples([samples, samps_1, samps_2]) return samples -def wedding_cake(nlive, ndims, sigma=0.01, alpha=0.5): +def wedding_cake(nlive, ndims, sigma=0.01, alpha=0.5, *args, **kwargs): """Perfect nested sampling run for a wedding cake likelihood. This is a likelihood with nested hypercuboidal plateau regions of constant @@ -157,6 +172,9 @@ def wedding_cake(nlive, ndims, sigma=0.01, alpha=0.5): alpha : float volume compression between plateau regions + + The remaining arguments are passed to the + :class:`anesthetic.samples.NestedSamples` constructor. """ def i(x): @@ -192,7 +210,8 @@ def logL(x): live_points[j] = x_ live_likes[j] = logL(x_) - samps = NestedSamples(points, logL=death_likes, logL_birth=birth_likes) + samps = NestedSamples(points, logL=death_likes, logL_birth=birth_likes, + *args, **kwargs) weights = samps.get_weights() if weights[-nlive:].sum() < 0.001 * weights.sum(): break @@ -201,7 +220,8 @@ def logL(x): birth_likes = np.concatenate([birth_likes, live_birth_likes]) points = np.concatenate([points, live_points]) - return NestedSamples(points, logL=death_likes, logL_birth=birth_likes) + return NestedSamples(points, logL=death_likes, logL_birth=birth_likes, + *args, **kwargs) def planck_gaussian(nlive=500): @@ -244,14 +264,7 @@ def planck_gaussian(nlive=500): [8.00e-01, 1.20e+00]]) logL_mean = -1400.35 - - samples = correlated_gaussian(nlive, mean, cov, bounds) - - data = samples.iloc[:, :len(columns)].to_numpy() - logL = samples['logL'].to_numpy() - logL_birth = samples['logL_birth'].to_numpy() - logL_birth += logL_mean - samples.logL.mean() - logL += logL_mean - samples.logL.mean() - samples = NestedSamples(data=data, columns=columns, labels=labels, - logL=logL, logL_birth=logL_birth) - return samples + d = len(mean) + logLmax = logL_mean + d/2 + return correlated_gaussian(nlive, mean, cov, bounds, logLmax, + columns=columns, labels=labels) diff --git a/pyproject.toml b/pyproject.toml index 6d4486f6..caa28267 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ authors = [ { name="Aleksandr Petrosyan", email="a-p-petrosyan@yandex.ru" }, { name="Liangliang Su", email="liangliangsu@njnu.edu.cn"}, { name="David Yallup", email="david.yallup@gmail.com" }, + { name="Dily Ong", email="dlo26@cam.ac.uk" } ] description = "nested sampling post-processing" readme = "README.rst" diff --git a/tests/test_examples.py b/tests/test_examples.py index 506388fd..91b6d4bb 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -144,8 +144,25 @@ def test_planck(): assert (planck[params] < bounds.T[1]).all().all() assert_allclose(planck[params].mean(), mean, atol=1e-3) assert_allclose(planck[params].cov(), cov, atol=1e-3) - assert_allclose(planck.logL.mean(), logL_mean, atol=1e-3) + assert_allclose(planck.logL.mean(), logL_mean, + atol=3*planck.logL_P(12).std()) assert_allclose(planck.logZ(), logZ, atol=3*planck.logZ(12).std()) assert_allclose(planck.D_KL(), D_KL, atol=3*planck.D_KL(12).std()) assert_allclose(planck.d_G(), len(params), atol=3*planck.d_G(12).std()) assert_allclose(planck.logL_P(), logL_mean, atol=3*planck.logL_P(12).std()) + + +def test_logLmax(): + nlive = 1000 + mean = [0.1, 0.3, 0.5] + # cov = covariance matrix + cov = np.array([[.01, 0.009, 0], [0.009, .01, 0], [0, 0, 0.1]])*0.01 + bounds = [[0, 1], [0, 1], [0, 1]] + logLmax = 10 + # d = number of parameters + d = len(mean) + samples = correlated_gaussian(nlive, mean, cov, bounds=bounds, + logLmax=logLmax) + abs_err = samples.logL.std()/np.sqrt(samples.neff()) + atol = abs_err*3 + assert_allclose(samples.logL.mean(), logLmax-d/2, atol=atol)