Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -125,13 +125,13 @@ venv.bak/
.idea
workspace/output/
output
test/optimize/test_fit
test/mle/test_fit
test/test_files/text/
test/
test_autofit/optimize/test_fit/
test_autofit/mle/test_fit/
test_autofit/test_files/text/psycopg2-binary==2.8.1
test_autofit/test_files/text/
fit/test_autofit/optimize/test_fit
fit/test_autofit/mle/test_fit
*.DS_Store

test_autofit/config/priors/old
Expand All @@ -157,7 +157,7 @@ test_autofit/samples.csv
__MACOSX
*.swp
test/autofit/test_fit
# Byte-compiled / optimized / DLL files
# Byte-compiled / mled / DLL files
__pycache__/
*.py[cod]
*$py.class
Expand Down Expand Up @@ -264,13 +264,13 @@ venv.bak/
.idea
workspace/output/
output
test/optimize/test_fit
test/mle/test_fit
test/test_files/text/
test/
test_autofit/optimize/test_fit/
test_autofit/mle/test_fit/
test_autofit/test_files/text/psycopg2-binary==2.8.1
test_autofit/test_files/text/
fit/test_autofit/optimize/test_fit
fit/test_autofit/mle/test_fit
*.DS_Store

test_autofit/config/priors/old
Expand Down
12 changes: 7 additions & 5 deletions autofit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,18 +66,20 @@
from .non_linear.grid.sensitivity import Sensitivity
from .non_linear.initializer import InitializerBall
from .non_linear.initializer import InitializerPrior
from .non_linear.initializer import SpecificRangeInitializer
from .non_linear.initializer import InitializerParamBounds
from .non_linear.initializer import InitializerParamStartPoints
from .non_linear.search.mcmc.auto_correlations import AutoCorrelationsSettings
from .non_linear.search.mcmc.emcee.search import Emcee
from .non_linear.search.mcmc.zeus.search import Zeus
from .non_linear.search.nest.nautilus.search import Nautilus
from .non_linear.search.nest.dynesty.search.dynamic import DynestyDynamic
from .non_linear.search.nest.dynesty.search.static import DynestyStatic
from .non_linear.search.nest.ultranest.search import UltraNest
from .non_linear.search.optimize.drawer.search import Drawer
from .non_linear.search.optimize.lbfgs.search import LBFGS
from .non_linear.search.optimize.pyswarms.search.globe import PySwarmsGlobal
from .non_linear.search.optimize.pyswarms.search.local import PySwarmsLocal
from .non_linear.search.mle.drawer.search import Drawer
from .non_linear.search.mle.bfgs.search import BFGS
from .non_linear.search.mle.bfgs.search import LBFGS
from .non_linear.search.mle.pyswarms.search.globe import PySwarmsGlobal
from .non_linear.search.mle.pyswarms.search.local import PySwarmsLocal
from .non_linear.paths.abstract import AbstractPaths
from .non_linear.paths import DirectoryPaths
from .non_linear.paths import DatabasePaths
Expand Down
2 changes: 1 addition & 1 deletion autofit/config/non_linear/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ Files

- ``mcmc.yaml``: Settings default behaviour of MCMC non-linear searches (e.g. Emcee).
- ``nest.yaml``: Settings default behaviour of nested sampler non-linear searches (e.g. Dynesty).
- ``optimizer.yaml``: Settings default behaviour of optimizer non-linear searches (e.g. PySwarms).
- ``mle.yaml``: Settings default behaviour of maximum likelihood estimator (mle) searches (e.g. PySwarms).
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# Configuration files that customize the default behaviour of non-linear searches.

# **PyAutoFit** supports the following optimizer algorithms:
# **PyAutoFit** supports the following maximum likelihood estimator (MLE) algorithms:

# - PySwarms: https://github.com/ljvmiranda921/pyswarms / https://pyswarms.readthedocs.io/en/latest/index.html

# Settings in the [search], [run] and [options] entries are specific to each nested algorithm and should be
# determined by consulting that optimizers method's own readthedocs.
# determined by consulting that method's own readthedocs.

PySwarmsGlobal:
run:
Expand Down Expand Up @@ -49,6 +49,44 @@ PySwarmsLocal:
updates:
iterations_per_update: 500 # The number of iterations of the non-linear search performed between every 'update', where an update performs tasks like outputting model.results.
remove_state_files_at_end: true # Whether to remove the savestate of the seach (e.g. the Emcee hdf5 file) at the end to save hard-disk space (results are still stored as PyAutoFit pickles and loadable).
BFGS:
search:
tol: null
options:
disp: false
eps: 1.0e-08
ftol: 2.220446049250313e-09
gtol: 1.0e-05
iprint: -1.0
maxcor: 10
maxfun: 15000
maxiter: 15000
maxls: 20
initialize: # The method used to generate where walkers are initialized in parameter space {prior | ball}.
method: ball # priors: samples are initialized by randomly drawing from each parameter's prior. ball: samples are initialized by randomly drawing unit values from a narrow uniform distribution.
ball_lower_limit: 0.49 # The lower limit of the uniform distribution unit values are drawn from when initializing walkers using the ball method.
ball_upper_limit: 0.51 # The upper limit of the uniform distribution unit values are drawn from when initializing walkers using the ball method.
parallel:
number_of_cores: 1 # The number of cores the search is parallelized over by default, using Python multiprocessing.
printing:
silence: false # If True, the default print output of the non-linear search is silcened and not printed by the Python interpreter.
updates:
iterations_per_update: 500 # The number of iterations of the non-linear search performed between every 'update', where an update performs tasks like outputting model.results.
remove_state_files_at_end: true # Whether to remove the savestate of the seach (e.g. the Emcee hdf5 file) at the end to save hard-disk space (results are still stored as PyAutoFit pickles and loadable).
Drawer:
search:
total_draws: 50
initialize: # The method used to generate where walkers are initialized in parameter space {prior | ball}.
method: ball # priors: samples are initialized by randomly drawing from each parameter's prior. ball: samples are initialized by randomly drawing unit values from a narrow uniform distribution.
ball_lower_limit: 0.49 # The lower limit of the uniform distribution unit values are drawn from when initializing walkers using the ball method.
ball_upper_limit: 0.51 # The upper limit of the uniform distribution unit values are drawn from when initializing walkers using the ball method.
parallel:
number_of_cores: 1 # The number of cores the search is parallelized over by default, using Python multiprocessing.
printing:
silence: false # If True, the default print output of the non-linear search is silcened and not printed by the Python interpreter.
updates:
iterations_per_update: 500 # The number of iterations of the non-linear search performed between every 'update', where an update performs tasks like outputting model.results.
remove_state_files_at_end: true # Whether to remove the savestate of the seach (e.g. the Emcee hdf5 file) at the end to save hard-disk space (results are still stored as PyAutoFit pickles and loadable).
LBFGS:
search:
tol: null
Expand Down
5 changes: 3 additions & 2 deletions autofit/config/visualize/plots_search.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,6 @@ nest:
corner_anesthetic: true # Output corner figure (using anestetic) during a non-linear search fit?
mcmc:
corner_cornerpy: true # Output corner figure (using corner.py) during a non-linear search fit?
optimize:
corner_cornerpy: true # Output corner figure (using corner.py) during a non-linear search fit?
mle:
subplot_parameters: true # Output a subplot of the best-fit parameters of the model?
log_likelihood_vs_iteration: true # Output a plot of the log likelihood versus iteration number?
4 changes: 2 additions & 2 deletions autofit/interpolator/covariance.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,8 +195,8 @@ def _relationships_for_value(
"""
analysis = self._analysis_for_value(value)
model = self.model(path_relationship_map=path_relationship_map or {})
optimizer = DynestyStatic()
result = optimizer.fit(model=model, analysis=analysis)
search = DynestyStatic()
result = search.fit(model=model, analysis=analysis)
return result.instance

def __getitem__(self, value: Equality) -> float:
Expand Down
8 changes: 8 additions & 0 deletions autofit/mapper/prior_model/abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -1465,6 +1465,14 @@ def paths(self) -> List[Path]:
"""
return [path for path, _ in self.path_priors_tuples]

@property
def paths_formatted(self) -> List[Path]:
"""
A list of paths to all the priors in the model, ordered by their
ids
"""
return [path for path, _ in self.path_priors_tuples]

@property
def composition(self):
return [".".join(path) for path in self.paths]
Expand Down
2 changes: 1 addition & 1 deletion autofit/mock.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from autofit.non_linear.mock.mock_result import MockResult
from autofit.non_linear.mock.mock_result import MockResultGrid
from autofit.non_linear.mock.mock_search import MockSearch
from autofit.non_linear.mock.mock_search import MockOptimizer
from autofit.non_linear.mock.mock_search import MockMLE
from autofit.non_linear.mock.mock_samples_summary import MockSamplesSummary
from autofit.non_linear.mock.mock_samples import MockSamples
from autofit.non_linear.mock.mock_samples import MockSamplesNest
Expand Down
17 changes: 17 additions & 0 deletions autofit/non_linear/fitness.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def __init__(
fom_is_log_likelihood: bool = True,
resample_figure_of_merit: float = -np.inf,
convert_to_chi_squared: bool = False,
store_history: bool = False,
):
"""
Interfaces with any non-linear search to fit the model to the data and return a log likelihood via
Expand Down Expand Up @@ -67,6 +68,11 @@ def __init__(
instead. The appropriate value depends on the search, but is typically either `None`, `-np.inf` or `1.0e99`.
All values indicate to the non-linear search that the model-fit should be resampled or ignored.

Many searches do not store the history of the parameters and log likelihood values, often to save
memory on large model-fits. However, this can be useful, for example to plot the results of a model-fit
versus iteration number. If the `store_history` bool is `True`, the parameters and log likelihoods are stored
in the `parameters_history_list` and `figure_of_merit_history_list` attribute of the fitness object.

Parameters
----------
analysis
Expand All @@ -86,6 +92,8 @@ def __init__(
convert_to_chi_squared
If `True`, the figure of merit returned is the log likelihood multiplied by -2.0, such that it is a
chi-squared value that is minimized.
store_history
If `True`, the parameters and log likelihood values of every model-fit are stored in lists.
"""

self.analysis = analysis
Expand All @@ -99,6 +107,10 @@ def __init__(
if self.paths is not None:
self.check_log_likelihood(fitness=self)

self.store_history = store_history
self.parameters_history_list = []
self.log_likelihood_history_list = []

def __getstate__(self):
state = self.__dict__.copy()
del state["_log_likelihood_function"]
Expand Down Expand Up @@ -154,6 +166,11 @@ def __call__(self, parameters, *kwargs):
log_prior_list = self.model.log_prior_list_from_vector(vector=parameters)
figure_of_merit = log_likelihood + sum(log_prior_list)

if self.store_history:

self.parameters_history_list.append(parameters)
self.log_likelihood_history_list.append(log_likelihood)

if self.convert_to_chi_squared:
figure_of_merit *= -2.0

Expand Down
93 changes: 87 additions & 6 deletions autofit/non_linear/initializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ class AbstractInitializer(ABC):
def _generate_unit_parameter_list(self, model):
pass

def info_from_model(self, model : AbstractPriorModel) -> str:
raise NotImplementedError

@staticmethod
def figure_of_metric(args) -> Optional[float]:
fitness, parameter_list = args
Expand Down Expand Up @@ -175,22 +178,22 @@ def samples_in_test_mode(self, total_points: int, model: AbstractPriorModel):
return unit_parameter_lists, parameter_lists, figure_of_merit_list


class SpecificRangeInitializer(AbstractInitializer):
class InitializerParamBounds(AbstractInitializer):
def __init__(
self,
parameter_dict: Dict[Prior, Tuple[float, float]],
lower_limit=0.0,
upper_limit=1.0,
):
"""
Initializer that allows the range of possible starting points for each prior
to be specified explicitly.
Initializer which uses the bounds on input parameters as the starting point for the search (e.g. where
an MLE optimization starts or MCMC walkers are initialized).

Parameters
----------
parameter_dict
A dictionary mapping priors to inclusive ranges of physical values that
the initial values for that dimension in the search may take
A dictionary mapping each parameter path to bounded ranges of physical values that
are where the search begins.
lower_limit
A default, unit lower limit used when a prior is not specified
upper_limit
Expand Down Expand Up @@ -226,7 +229,7 @@ def _generate_unit_parameter_list(self, model: AbstractPriorModel) -> List[float
key = ".".join(model.path_for_prior(prior))
if key not in self._generated_warnings:
logger.warning(
f"Range for {key} not set in the SpecificRangeInitializer. "
f"Range for {key} not set in the InitializerParamBounds. "
f"Using defaults."
)
self._generated_warnings.add(key)
Expand All @@ -240,6 +243,84 @@ def _generate_unit_parameter_list(self, model: AbstractPriorModel) -> List[float

return unit_parameter_list

def info_from_model(self, model : AbstractPriorModel) -> str:
"""
Returns a string showing the bounds of the parameters in the initializer.
"""
info = "Total Free Parameters = " + str(model.prior_count) + "\n"
info += "Total Starting Points = " + str(len(self.parameter_dict)) + "\n\n"
for prior in model.priors_ordered_by_id:

key = ".".join(model.path_for_prior(prior))

try:

value = self.info_value_from(self.parameter_dict[prior])

info += f"{key}: Start[{value}]\n"

except KeyError:

info += f"{key}: {prior})\n"

return info

def info_value_from(self, value : Tuple[float, float]) -> Tuple[float, float]:
"""
Returns the value that is used to display the bounds of the parameters in the initializer.

This function simply returns the input value, but it can be overridden in subclasses for diffferent
initializers.

Parameters
----------
value
The value to be displayed in the initializer info which is a tuple of the lower and upper bounds of the
parameter.
"""
return value


class InitializerParamStartPoints(InitializerParamBounds):
def __init__(
self,
parameter_dict: Dict[Prior, float],
):
"""
Initializer which input values of the parameters as the starting point for the search (e.g. where
an MLE optimization starts or MCMC walkers are initialized).

Parameters
----------
parameter_dict
A dictionary mapping each parameter path to the starting point physical values that
are where the search begins.
lower_limit
A default, unit lower limit used when a prior is not specified
upper_limit
A default, unit upper limit used when a prior is not specified
"""
parameter_dict_new = {}

for key, value in parameter_dict.items():
parameter_dict_new[key] = (value - 1.0e-8, value + 1.0e-8)

super().__init__(parameter_dict=parameter_dict_new)

def info_value_from(self, value : Tuple[float, float]) -> float:
"""
Returns the value that is used to display the starting point of the parameters in the initializer.

This function returns the mean of the input value, as the starting point is a single value in the center of the
bounds.

Parameters
----------
value
The value to be displayed in the initializer info which is a tuple of the lower and upper bounds of the
parameter.
"""
return (value[1] + value[0]) / 2.0

class Initializer(AbstractInitializer):
def __init__(self, lower_limit: float, upper_limit: float):
Expand Down
4 changes: 2 additions & 2 deletions autofit/non_linear/mock/mock_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,13 +157,13 @@ def perform_update(self, model, analysis, during_analysis, search_internal=None)
)


class MockOptimizer(MockSearch):
class MockMLE(MockSearch):
def __init__(self, **kwargs):
super().__init__(fit_fast=False, **kwargs)

@property
def samples_cls(self):
return MockOptimizer
return MockMLE

def project(
self, factor_approx: FactorApproximation, status: Status = Status()
Expand Down
Loading