Here we show a standalone example of using PyMultiNest to
estimate the parameters of a straight line model in data with Gaussian noise. The
data and model used in this example are defined in createdata.py
, which can be downloaded
from here . The
script shown below can be downloaded from here .
Example code
download
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Example of running PyMultiNest to fit the parameters of a straight line.
"""
from __future__ import print_function , division
import os
import sys
from scipy.special import ndtri
import numpy as np
try :
import matplotlib as mpl
mpl . use ( 'Agg' )
except ImportError :
pass
# import PyMultiNest Solver class
from pymultinest.solve import Solver
# import model and data
from createdata import *
LN2PI = np . log ( 2. * np . pi )
LNSIGMA = np . log ( sigma )
# create Solver class
class StraightLineModelPyMultiNest ( Solver ):
"""
A simple straight line model, with a Gaussian likelihood.
Args:
data (:class:`numpy.ndarray`): an array containing the observed data
abscissa (:class:`numpy.ndarray`): an array containing the points at which the data were taken
modelfunc (function): a function defining the model
sigma (float): the standard deviation of the noise in the data
**kwargs: keyword arguments for the run method
"""
# define the prior parameters
cmin = - 10. # lower range on c (the same as the uniform c prior lower bound)
cmax = 10. # upper range on c (the same as the uniform c prior upper bound)
mmu = 0. # mean of the Gaussian prior on m
msigma = 10. # standard deviation of the Gaussian prior on m
def __init__ ( self , data , abscissa , modelfunc , sigma , ** kwargs ):
# set the data
self . _data = data # oberserved data
self . _abscissa = abscissa # points at which the observed data are taken
self . _sigma = sigma # standard deviation(s) of the data
self . _logsigma = np . log ( sigma ) # log sigma here to save computations in the likelihood
self . _ndata = len ( data ) # number of data points
self . _model = modelfunc # model function
Solver . __init__ ( self , ** kwargs )
def Prior ( self , cube ):
"""
The prior transform going from the unit hypercube to the true parameters. This function
has to be called "Prior".
Args:
cube (:class:`numpy.ndarray`): an array of values drawn from the unit hypercube
Returns:
:class:`numpy.ndarray`: an array of the transformed parameters
"""
# extract values
mprime = cube [ 0 ]
cprime = cube [ 1 ]
m = self . mmu + self . msigma * ndtri ( mprime ) # convert back to m
c = cprime * ( self . cmax - self . cmin ) + self . cmin # convert back to c
return np . array ([ m , c ])
def LogLikelihood ( self , cube ):
"""
The log likelihood function. This function has to be called "LogLikelihood".
Args:
cube (:class:`numpy.ndarray`): an array of parameter values.
Returns:
float: the log likelihood value.
"""
# extract parameters
m = cube [ 0 ]
c = cube [ 1 ]
# calculate the model
model = self . _model ( x , m , c )
# normalisation
norm = - 0.5 * self . _ndata * LN2PI - self . _ndata * self . _logsigma
# chi-squared
chisq = np . sum ((( self . _data - model ) / ( self . _sigma )) ** 2 )
return norm - 0.5 * chisq
nlive = 1024 # number of live points
ndim = 2 # number of parameters
tol = 0.5 # stopping criterion
# run the algorithm
solution = StraightLineModelPyMultiNest ( data , x , straight_line , sigma , n_dims = ndim ,
n_live_points = nlive , evidence_tolerance = tol );
logZpymnest = solution . logZ # value of log Z
logZerrpymnest = solution . logZerr # estimate of the statistcal uncertainty on logZ
print ( 'Marginalised evidence is ± {} ' . format ( logZpymnest , logZerrpymnest ))
mchain_pymnest = solution . samples [:, 0 ] # extract chain of m values
cchain_pymnest = solution . samples [:, 1 ] # extract chain if c values
postsamples = np . vstack (( mchain_pymnest , cchain_pymnest )) . T
print ( 'Number of posterior samples is {} ' . format ( postsamples . shape [ 0 ]))
# plot posterior samples (if corner.py is installed)
try :
import corner # import corner.py
except ImportError :
sys . exit ( 1 )
fig = corner . corner ( postsamples , labels = [ r "$m$" , r "$c$" ], truths = [ m , c ])
fig . savefig ( 'PyMultiNest.png' )
Running the code
A description of installing PyMultiNest is given here . If you have downloaded the createdata.py
and test_PyMultiNest.py
scripts into the directory ${HOME}
, then you can run it using:
python test_PyMultiNest.py
If you have Matplotlib installed then the script will produce a plot of the posterior distributions
on the straight line parameters $m$ and $c$.
A Python 3 Docker image with PyMultiNest installed is
available , which can be used with:
docker run -it -v ${ HOME } :/work mattpitkin/samplers:python3
to enter an interactive container, and then within the container the test script can be run with:
python test_PyMultiNest.py