#!/usr/bin/env python # coding: utf-8 # Searches: DynestyStatic # ======================= # # This example illustrates how to use the nested sampling algorithm DynestyStatic. # # Information about Dynesty can be found at the following links: # # - https://github.com/joshspeagle/dynesty # - https://dynesty.readthedocs.io/en/latest/ # In[ ]: get_ipython().run_line_magic('matplotlib', 'inline') from pyprojroot import here workspace_path = str(here()) get_ipython().run_line_magic('cd', '$workspace_path') print(f"Working Directory has been set to `{workspace_path}`") import matplotlib.pyplot as plt import numpy as np from os import path import autofit as af # __Data__ # # This example fits a single 1D Gaussian, we therefore load and plot data containing one Gaussian. # In[ ]: dataset_path = path.join("dataset", "example_1d", "gaussian_x1") data = af.util.numpy_array_from_json(file_path=path.join(dataset_path, "data.json")) noise_map = af.util.numpy_array_from_json( file_path=path.join(dataset_path, "noise_map.json") ) plt.errorbar( x=range(data.shape[0]), y=data, yerr=noise_map, linestyle="", color="k", ecolor="k", elinewidth=1, capsize=2, ) plt.show() plt.close() # __Model + Analysis__ # # We create the model and analysis, which in this example is a single `Gaussian` and therefore has dimensionality N=3. # In[ ]: model = af.Model(af.ex.Gaussian) model.centre = af.UniformPrior(lower_limit=0.0, upper_limit=100.0) model.normalization = af.LogUniformPrior(lower_limit=1e-2, upper_limit=1e2) model.sigma = af.UniformPrior(lower_limit=0.0, upper_limit=30.0) analysis = af.ex.Analysis(data=data, noise_map=noise_map) # __Search__ # # We now create and run the `DynestyStatic` object which acts as our non-linear search. # # We manually specify all of the Dynesty settings, descriptions of which are provided at the following webpage: # # https://dynesty.readthedocs.io/en/latest/api.html # https://dynesty.readthedocs.io/en/latest/api.html#module-dynesty.nestedsamplers # In[ ]: search = af.DynestyStatic( path_prefix=path.join("searches"), name="DynestyStatic", nlive=50, bound="multi", sample="auto", bootstrap=None, enlarge=None, update_interval=None, walks=25, facc=0.5, slices=5, fmove=0.9, max_move=100, iterations_per_update=2500, number_of_cores=1, ) result = search.fit(model=model, analysis=analysis) # __Result__ # # The result object returned by the fit provides information on the results of the non-linear search. Lets use it to # compare the maximum log likelihood `Gaussian` to the data. # In[ ]: model_data = result.max_log_likelihood_instance.model_data_from( xvalues=np.arange(data.shape[0]) ) plt.errorbar( x=range(data.shape[0]), y=data, yerr=noise_map, linestyle="", color="k", ecolor="k", elinewidth=1, capsize=2, ) plt.plot(range(data.shape[0]), model_data, color="r") plt.title("DynestyStatic model fit to 1D Gaussian dataset.") plt.xlabel("x values of profile") plt.ylabel("Profile normalization") plt.show() plt.close() # __Search Internal__ # # The result also contains the internal representation of the non-linear search. # # The internal representation of the non-linear search ensures that all sampling info is available in its native form. # This can be passed to functions which take it as input, for example if the sampling package has bespoke visualization # functions. # # For `DynestyStatic`, this is an instance of the `NestedSampler` object (`from dynesty import NestedSampler`). # In[ ]: search_internal = result.search_internal print(search_internal) # The internal search is by default not saved to hard-disk, because it can often take up quite a lot of hard-disk space # (significantly more than standard output files). # # This means that the search internal will only be available the first time you run the search. If you rerun the code # and the search is bypassed because the results already exist on hard-disk, the search internal will not be available. # # If you are frequently using the search internal you can have it saved to hard-disk by changing the `search_internal` # setting in `output.yaml` to `True`. The result will then have the search internal available as an attribute, # irrespective of whether the search is re-run or not. # In[ ]: