aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorshivesh <s.p.mandalia@qmul.ac.uk>2018-04-24 11:22:19 -0500
committershivesh <s.p.mandalia@qmul.ac.uk>2018-04-24 11:22:19 -0500
commitcfe60732b09544e304e66129383ceaf92ac8cdff (patch)
treecccf10230c86f293e540a3b158df52acd332114d
parent2ca0c5597590e2043bd280dd8aee3d9d09bae29a (diff)
downloadGolemFlavor-cfe60732b09544e304e66129383ceaf92ac8cdff.tar.gz
GolemFlavor-cfe60732b09544e304e66129383ceaf92ac8cdff.zip
Tue Apr 24 11:22:19 CDT 2018
-rwxr-xr-xfr.py42
-rwxr-xr-xsens.py336
-rwxr-xr-xsubmitter/clean.sh3
-rw-r--r--submitter/make_dag.py49
-rw-r--r--submitter/submit.sub6
-rw-r--r--utils/enums.py21
-rw-r--r--utils/fr.py17
-rw-r--r--utils/gf.py31
-rw-r--r--utils/likelihood.py38
-rw-r--r--utils/mcmc.py4
-rw-r--r--utils/multinest.py28
-rw-r--r--utils/param.py56
-rw-r--r--utils/plot.py25
13 files changed, 312 insertions, 344 deletions
diff --git a/fr.py b/fr.py
index 363cb8e..7558c8e 100755
--- a/fr.py
+++ b/fr.py
@@ -21,29 +21,43 @@ from utils import likelihood as llh_utils
from utils import mcmc as mcmc_utils
from utils import misc as misc_utils
from utils import plot as plot_utils
-from utils.enums import EnergyDependance, Likelihood, MCMCSeedType, ParamTag
-from utils.fr import estimate_scale, normalise_fr
+from utils.enums import EnergyDependance, Likelihood, MCMCSeedType
+from utils.enums import ParamTag, PriorsCateg
from utils.param import Param, ParamSet, get_paramsets
def define_nuisance():
- """Define the nuisance parameters to scan over with default values,
- ranges and sigma.
- """
+ """Define the nuisance parameters."""
+ tag = ParamTag.SM_ANGLES
+ g_prior = PriorsCateg.GAUSSIAN
+ nuisance = [
+ Param(name='s_12_2', value=0.307, seed=[0.29, 0.31], ranges=[0., 1.], std=0.013, tex=r's_{12}^2', prior=g_prior, tag=tag),
+ Param(name='c_13_4', value=1-(0.02206)**2, seed=[0.998, 1.0], ranges=[0., 1.], std=0.00147, tex=r'c_{13}^4', prior=g_prior, tag=tag),
+ Param(name='s_23_2', value=0.538, seed=[0.46, 0.61], ranges=[0., 1.], std=0.069, tex=r's_{23}^2', prior=g_prior, tag=tag),
+ Param(name='dcp', value=4.08404, seed=[0, 2*np.pi], ranges=[0., 2*np.pi], std=2.0, tex=r'\delta_{CP}', tag=tag),
+ Param(
+ name='m21_2', value=7.40E-23, seed=[7.2E-23, 7.6E-23], ranges=[6.80E-23, 8.02E-23],
+ std=2.1E-24, tex=r'\Delta m_{21}^2{\rm GeV}^{-2}', prior=g_prior, tag=tag
+ ),
+ Param(
+ name='m3x_2', value=2.494E-21, seed=[2.46E-21, 2.53E-21], ranges=[2.399E-21, 2.593E-21],
+ std=3.3E-23, tex=r'\Delta m_{3x}^2{\rm GeV}^{-2}', prior=g_prior, tag=tag
+ )
+ ]
tag = ParamTag.NUISANCE
- nuisance = ParamSet(
+ nuisance.extend([
Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0. , 50.], std=0.3, tag=tag),
Param(name='promptNorm', value=0., seed=[0. , 6. ], ranges=[0. , 50.], std=0.05, tag=tag),
Param(name='muonNorm', value=1., seed=[0.1, 2. ], ranges=[0. , 50.], std=0.1, tag=tag),
Param(name='astroNorm', value=6.9, seed=[0.1, 10.], ranges=[0. , 50.], std=0.1, tag=tag),
Param(name='astroDeltaGamma', value=2.5, seed=[1. , 3. ], ranges=[-5., 5. ], std=0.1, tag=tag)
- )
- return nuisance
+ ])
+ return ParamSet(nuisance)
def nuisance_argparse(parser):
- nuisance_paramset = define_nuisance()
- for parm in nuisance_paramset:
+ nuisance = define_nuisance()
+ for parm in nuisance:
parser.add_argument(
'--'+parm.name, type=float, default=parm.value,
help=parm.name+' to inject'
@@ -59,9 +73,9 @@ def process_args(args):
'--fix-mixing and --fix-mixing-almost cannot be used together'
)
- args.measured_ratio = normalise_fr(args.measured_ratio)
+ args.measured_ratio = fr_utils.normalise_fr(args.measured_ratio)
if args.fix_source_ratio:
- args.source_ratio = normalise_fr(args.source_ratio)
+ args.source_ratio = fr_utils.normalise_fr(args.source_ratio)
if args.energy_dependance is EnergyDependance.SPECTRAL:
args.binning = np.logspace(
@@ -69,7 +83,7 @@ def process_args(args):
)
if not args.fix_scale:
- args.scale = estimate_scale(args)
+ args.scale = fr_utils.estimate_scale(args)
args.scale_region = (args.scale/args.scale_region, args.scale*args.scale_region)
@@ -108,7 +122,7 @@ def main():
if args.seed is not None:
np.random.seed(args.seed)
- asimov_paramset, llh_paramset = get_paramsets(args)
+ asimov_paramset, llh_paramset = get_paramsets(args, define_nuisance())
outfile = misc_utils.gen_outfile_name(args)
print '== {0:<25} = {1}'.format('outfile', outfile)
diff --git a/sens.py b/sens.py
index 578528c..980ef3f 100755
--- a/sens.py
+++ b/sens.py
@@ -16,6 +16,7 @@ from functools import partial
import numpy as np
import numpy.ma as ma
+from scipy.optimize import minimize
from utils import fr as fr_utils
from utils import gf as gf_utils
@@ -23,32 +24,44 @@ from utils import likelihood as llh_utils
from utils import misc as misc_utils
from utils import plot as plot_utils
from utils.enums import EnergyDependance, Likelihood, ParamTag
-from utils.enums import SensitivityCateg, StatCateg
-from utils.fr import estimate_scale, normalise_fr
-from utils.misc import enum_parse, parse_bool
+from utils.enums import PriorsCateg, SensitivityCateg, StatCateg
from utils.param import Param, ParamSet, get_paramsets
from utils import multinest as mn_utils
def define_nuisance():
- """Define the nuisance parameters to scan over with default values,
- ranges and sigma.
- """
+ """Define the nuisance parameters."""
+ tag = ParamTag.SM_ANGLES
+ g_prior = PriorsCateg.GAUSSIAN
+ nuisance = [
+ Param(name='s_12_2', value=0.307, seed=[0.29, 0.31], ranges=[0., 1.], std=0.013, tex=r's_{12}^2', prior=g_prior, tag=tag),
+ Param(name='c_13_4', value=1-(0.02206)**2, seed=[0.998, 1.0], ranges=[0., 1.], std=0.00147, tex=r'c_{13}^4', prior=g_prior, tag=tag),
+ Param(name='s_23_2', value=0.538, seed=[0.46, 0.61], ranges=[0., 1.], std=0.069, tex=r's_{23}^2', prior=g_prior, tag=tag),
+ Param(name='dcp', value=4.08404, seed=[0, 2*np.pi], ranges=[0., 2*np.pi], std=2.0, tex=r'\delta_{CP}', tag=tag),
+ Param(
+ name='m21_2', value=7.40E-23, seed=[7.2E-23, 7.6E-23], ranges=[6.80E-23, 8.02E-23],
+ std=2.1E-24, tex=r'\Delta m_{21}^2{\rm GeV}^{-2}', prior=g_prior, tag=tag
+ ),
+ Param(
+ name='m3x_2', value=2.494E-21, seed=[2.46E-21, 2.53E-21], ranges=[2.399E-21, 2.593E-21],
+ std=3.3E-23, tex=r'\Delta m_{3x}^2{\rm GeV}^{-2}', prior=g_prior, tag=tag
+ )
+ ]
tag = ParamTag.NUISANCE
- nuisance = ParamSet(
+ nuisance.extend([
Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0. , 50.], std=0.3, tag=tag),
Param(name='promptNorm', value=0., seed=[0. , 6. ], ranges=[0. , 50.], std=0.05, tag=tag),
Param(name='muonNorm', value=1., seed=[0.1, 2. ], ranges=[0. , 50.], std=0.1, tag=tag),
Param(name='astroNorm', value=6.9, seed=[0.1, 10.], ranges=[0. , 50.], std=0.1, tag=tag),
Param(name='astroDeltaGamma', value=2.5, seed=[1. , 3. ], ranges=[-5., 5. ], std=0.1, tag=tag)
- )
- return nuisance
+ ])
+ return ParamSet(nuisance)
def nuisance_argparse(parser):
- nuisance_paramset = define_nuisance()
- for parm in nuisance_paramset:
+ nuisance = define_nuisance()
+ for parm in nuisance:
parser.add_argument(
'--'+parm.name, type=float, default=parm.value,
help=parm.name+' to inject'
@@ -68,9 +81,9 @@ def process_args(args):
'--mn-run and --fix-scale cannot be used together'
)
- args.measured_ratio = normalise_fr(args.measured_ratio)
+ args.measured_ratio = fr_utils.normalise_fr(args.measured_ratio)
if args.fix_source_ratio:
- args.source_ratio = normalise_fr(args.source_ratio)
+ args.source_ratio = fr_utils.normalise_fr(args.source_ratio)
if args.energy_dependance is EnergyDependance.SPECTRAL:
args.binning = np.logspace(
@@ -78,7 +91,7 @@ def process_args(args):
)
if not args.fix_scale:
- args.scale = estimate_scale(args)
+ args.scale = fr_utils.estimate_scale(args)
args.scale_region = (args.scale/args.scale_region, args.scale*args.scale_region)
if args.mn_eval_bin.lower() == 'all':
@@ -86,6 +99,10 @@ def process_args(args):
else:
args.mn_eval_bin = int(args.mn_eval_bin)
+ if args.stat_method is StatCateg.FREQUENTIST and \
+ args.likelihood is Likelihood.GOLEMFIT::
+ args.likelihood = Likelihood.GF_FREQ
+
def parse_args(args=None):
"""Parse command line arguments"""
@@ -107,14 +124,19 @@ def parse_args(args=None):
)
parser.add_argument(
'--run-method', default='full',
- type=partial(enum_parse, c=SensitivityCateg), choices=SensitivityCateg,
+ type=partial(misc_utils.enum_parse, c=SensitivityCateg),
+ choices=SensitivityCateg,
help='Choose which type of sensivity plot to make'
)
parser.add_argument(
'--stat-method', default='bayesian',
- type=partial(enum_parse, c=StatCateg), choices=StatCateg,
+ type=partial(misc_utils.enum_parse, c=StatCateg), choices=StatCateg,
help='Statistical method to employ'
)
+ parser.add_argument(
+ '--plot-statistic', type=parse_bool, default='False',
+ help='Plot MultiNest evidence or LLH value'
+ )
fr_utils.fr_argparse(parser)
gf_utils.gf_argparse(parser)
llh_utils.likelihood_argparse(parser)
@@ -131,29 +153,31 @@ def main():
if args.seed is not None:
np.random.seed(args.seed)
- asimov_paramset, llh_paramset = get_paramsets(args)
+ asimov_paramset, llh_paramset = get_paramsets(args, define_nuisance())
scale = llh_paramset.from_tag(ParamTag.SCALE)[0]
outfile = misc_utils.gen_outfile_name(args)
print '== {0:<25} = {1}'.format('outfile', outfile)
mmangles = llh_paramset.from_tag(ParamTag.MMANGLES)
if args.run_method is SensitivityCateg.FULL:
- mn_paramset_arr = [llh_paramset.from_tag(ParamTag.SCALE, invert=True)]
+ st_paramset_arr = [llh_paramset.from_tag(ParamTag.SCALE, invert=True)]
elif args.run_method is SensitivityCateg.FIXED_ANGLE or \
args.run_method is SensitivityCateg.CORR_ANGLE:
nscale_pmset = llh_paramset.from_tag(ParamTag.SCALE, invert=True)
- mn_paramset_arr = []
+ st_paramset_arr = []
for x in xrange(3):
- mn_paramset_arr.append(
+ st_paramset_arr.append(
ParamSet([prms for prms in nscale_pmset
if mmangles[x].name != prms.name])
)
- out = args.outfile+'/{0}/{1}/fr_evidence'.format(args.stat_method, args.run_method) \
+ out = args.outfile+'/{0}/{1}/fr_stat'.format(args.stat_method, args.run_method) \
+ misc_utils.gen_identifier(args)
if args.mn_run:
if args.likelihood is Likelihood.GOLEMFIT:
fitter = gf_utils.setup_fitter(args, asimov_paramset)
+ if args.StatCateg is StatCateg.FREQUENTIST:
+ fitter.SetFitParametersFlag(gf_utils.fit_flags(llh_paramset)
else: fitter = None
scan_scales = np.linspace(
@@ -170,14 +194,14 @@ def main():
else: eval_dim = 1
if args.run_method is SensitivityCateg.FULL:
- evidence_arr = np.full((eval_dim, 2), np.nan)
+ statistic_arr = np.full((eval_dim, 2), np.nan)
elif args.run_method is SensitivityCateg.FIXED_ANGLE:
- evidence_arr = np.full((len(mn_paramset_arr), eval_dim, 2), np.nan)
+ statistic_arr = np.full((len(st_paramset_arr), eval_dim, 2), np.nan)
elif args.run_method is SensitivityCateg.CORR_ANGLE:
- evidence_arr = np.full((len(mn_paramset_arr), eval_dim, eval_dim, 3), np.nan)
+ statistic_arr = np.full((len(st_paramset_arr), eval_dim, eval_dim, 3), np.nan)
- for idx_scen, mn_paramset in enumerate(mn_paramset_arr):
+ for idx_scen, mn_paramset in enumerate(st_paramset_arr):
print '|||| SCENARIO = {0}'.format(idx_scen)
for x in mmangles: x.value = 0.
if args.run_method is SensitivityCateg.FIXED_ANGLE:
@@ -204,38 +228,60 @@ def main():
print '|||| SCALE = {0:.0E}'.format(np.power(10, sc))
scale.value = sc
- try:
- a_lnZ = mn_utils.mn_evidence(
- mn_paramset = mn_paramset,
- llh_paramset = llh_paramset,
- asimov_paramset = asimov_paramset,
- args = args,
- fitter = fitter
- )
- except:
- print 'Failed run, continuing'
- continue
- print '## Evidence = {0}'.format(a_lnZ)
+ if args.stat_method is StatCateg.BAYESIAN:
+ try:
+ stat = mn_utils.mn_evidence(
+ mn_paramset = mn_paramset,
+ llh_paramset = llh_paramset,
+ asimov_paramset = asimov_paramset,
+ args = args,
+ fitter = fitter
+ )
+ except:
+ print 'Failed run, continuing'
+ continue
+ print '## Evidence = {0}'.format(stat)
+ elif args.stat_method is StatCateg.FREQUENTIST:
+ llh_paramset_freq = [x for parm in llh_paramset if
+ x.name not in asimov_paramset.names]
+ def fn(x):
+ for idx, parm in enumerate(llh_paramset_freq):
+ parm.value = x[idx]
+ theta = llh_paramset_freq.values
+ try:
+ llh = llh_utils.ln_prob(
+ theta=theta, args=args, asimov_paramset=asimov_paramset,
+ mcmc_paramset=mcmc_paramset_freq, fitter=fitter
+ )
+ except:
+ print 'Failed run, continuing'
+ return np.inf
+ return -llh
+
+ x0 = np.average(llh_paramset_freq.values, axis=1)
+ res = minimize(fun=fn, x0=x0, method='L-BFGS-B',
+ bounds=llh_paramset.seed, fitter=fitter)
+ stat = -fn(res.x)
if args.run_method is SensitivityCateg.FULL:
- evidence_arr[idx_sc] = np.array([sc, a_lnZ])
+ statistic_arr[idx_sc] = np.array([sc, stat])
elif args.run_method is SensitivityCateg.FIXED_ANGLE:
- evidence_arr[idx_scen][idx_sc] = np.array([sc, a_lnZ])
+ statistic_arr[idx_scen][idx_sc] = np.array([sc, stat])
elif args.run_method is SensitivityCateg.CORR_ANGLE:
- evidence_arr[idx_scen][idx_an][idx_sc] = np.array([an, sc, a_lnZ])
+ statistic_arr[idx_scen][idx_an][idx_sc] = np.array([an, sc, stat])
misc_utils.make_dir(out)
print 'Saving to {0}'.format(out+'.npy')
- np.save(out+'.npy', np.array(evidence_arr))
+ np.save(out+'.npy', np.array(statistic_arr))
- if args.plot_multinest:
- if args.mn_run: raw = evidence_arr
+ if args.plot_statistic:
+ if args.mn_run: raw = statistic_arr
else: raw = np.load(out+'.npy')
data = ma.masked_invalid(raw, 0)
basename = os.path.dirname(out) + '/mnrun/' + os.path.basename(out)
baseoutfile = basename[:5]+basename[5:].replace('data', 'plots')
if args.run_method is SensitivityCateg.FULL:
- plot_utils.plot_multinest(
+ plot_utils.plot_statistic(
data = data,
outfile = baseoutfile,
outformat = ['png'],
@@ -243,13 +289,13 @@ def main():
scale_param = scale
)
elif args.run_method is SensitivityCateg.FIXED_ANGLE:
- for idx_scen in xrange(len(mn_paramset_arr)):
+ for idx_scen in xrange(len(st_paramset_arr)):
print '|||| SCENARIO = {0}'.format(idx_scen)
outfile = baseoutfile + '_SCEN{0}'.format(idx_scen)
if idx_scen == 0: label = r'$\mathcal{O}_{12}=\frac{1}{2}$'
elif idx_scen == 1: label = r'$\mathcal{O}_{13}=\frac{1}{2}$'
elif idx_scen == 2: label = r'$\mathcal{O}_{23}=\frac{1}{2}$'
- plot_utils.plot_multinest(
+ plot_utils.plot_statistic(
data = data[idx_scen],
outfile = outfile,
outformat = ['png'],
@@ -258,7 +304,7 @@ def main():
label = label
)
elif args.run_method is SensitivityCateg.CORR_ANGLE:
- for idx_scen in xrange(len(mn_paramset_arr)):
+ for idx_scen in xrange(len(st_paramset_arr)):
print '|||| SCENARIO = {0}'.format(idx_scen)
basescenoutfile = baseoutfile + '_SCEN{0}'.format(idx_scen)
if idx_scen == 0: label = r'$\mathcal{O}_{12}='
@@ -268,7 +314,7 @@ def main():
print '|||| ANGLE = {0:<04.2}'.format(an)
outfile = basescenoutfile + '_ANGLE{0}'.format(idx_an)
label += r'{0:<04.2}$'.format(an)
- plot_utils.plot_multinest(
+ plot_utils.plot_statistic(
data = data[idx_scen][idx_an][:,1:],
outfile = outfile,
outformat = ['png'],
@@ -277,200 +323,6 @@ def main():
label = label
)
- # dirname = os.path.dirname(out)
- # plot_utils.bayes_factor_plot(
- # dirname=dirname, outfile=out, outformat=['png'], args=args
- # )
-
- # out = args.angles_lim_output+'/fr_an_evidence' + misc_utils.gen_identifier(args)
- # if args.run_angles_limit:
- # import pymultinest
-
- # scenarios = [
- # [np.sin(np.pi/2.)**2, 0, 0, 0],
- # [0, np.cos(np.pi/2.)**4, 0, 0],
- # [0, 0, np.sin(np.pi/2.)**2, 0],
- # ]
- # p = llh_paramset.from_tag([ParamTag.SCALE, ParamTag.MMANGLES], invert=True)
- # n_params = len(p)
- # prior_ranges = p.seeds
-
- # if not args.run_llh and args.likelihood is Likelihood.GOLEMFIT:
- # fitter = gf_utils.setup_fitter(args, asimov_paramset)
- # else: fitter = None
-
- # def CubePrior(cube, ndim, nparams):
- # # default are uniform priors
- # return ;
-
- # if args.bayes_eval_bin is not None:
- # data = np.zeros((len(scenarios), 1, 2))
- # else: data = np.zeros((len(scenarios), args.bayes_bins, 2))
- # mm_angles = llh_paramset.from_tag(ParamTag.MMANGLES)
- # sc_angles = llh_paramset.from_tag(ParamTag.SCALE)[0]
- # for idx, scen in enumerate(scenarios):
- # scales, evidences = [], []
- # for yidx, an in enumerate(mm_angles):
- # an.value = scen[yidx]
- # for s_idx, sc in enumerate(scan_scales):
- # if args.bayes_eval_bin is not None:
- # if s_idx in args.bayes_eval_bin:
- # if idx == 0:
- # out += '_scale_{0:.0E}'.format(np.power(10, sc))
- # else: continue
-
- # print '== SCALE = {0:.0E}'.format(np.power(10, sc))
- # sc_angles.value = sc
- # def lnProb(cube, ndim, nparams):
- # for i in range(ndim):
- # prange = prior_ranges[i][1] - prior_ranges[i][0]
- # p[i].value = prange*cube[i] + prior_ranges[i][0]
- # for name in p.names:
- # mcmc_paramset[name].value = p[name].value
- # theta = mcmc_paramset.values
- # # print 'theta', theta
- # # print 'mcmc_paramset', mcmc_paramset
- # return llh_utils.triangle_llh(
- # theta=theta,
- # args=args,
- # asimov_paramset=asimov_paramset,
- # mcmc_paramset=mcmc_paramset,
- # fitter=fitter
- # )
- # prefix = 'mnrun/' + os.path.basename(out) + '_'
- # if args.bayes_eval_bin is not None:
- # prefix += '{0}_{1}_'.format(idx, s_idx)
- # print 'begin running evidence calculation for {0}'.format(prefix)
- # result = pymultinest.run(
- # LogLikelihood=lnProb,
- # Prior=CubePrior,
- # n_dims=n_params,
- # importance_nested_sampling=True,
- # n_live_points=args.bayes_live_points,
- # evidence_tolerance=args.bayes_tolerance,
- # outputfiles_basename=prefix,
- # resume=False,
- # verbose=True
- # )
-
- # analyzer = pymultinest.Analyzer(outputfiles_basename=prefix, n_params=n_params)
- # a_lnZ = analyzer.get_stats()['global evidence']
- # print 'Evidence = {0}'.format(a_lnZ)
- # scales.append(sc)
- # evidences.append(a_lnZ)
-
- # for i, d in enumerate(evidences):
- # data[idx][i][0] = scales[i]
- # data[idx][i][1] = d
-
- # misc_utils.make_dir(out)
- # print 'saving to {0}.npy'.format(out)
- # np.save(out+'.npy', np.array(data))
-
- # dirname = os.path.dirname(out)
- # plot_utils.plot_BSM_angles_limit(
- # dirname=dirname, outfile=outfile, outformat=['png'],
- # args=args, bayesian=True
- # )
-
- # out = args.angles_corr_output+'/fr_co_evidence' + misc_utils.gen_identifier(args)
- # if args.run_angles_correlation:
- # if args.bayes_eval_bin is None: assert 0
- # import pymultinest
-
- # scenarios = [
- # [1, 0, 0, 0],
- # [0, 1, 0, 0],
- # [0, 0, 1, 0],
- # ]
- # nuisance = mcmc_paramset.from_tag(ParamTag.NUISANCE)
- # mm_angles = mcmc_paramset.from_tag(ParamTag.MMANGLES)
- # sc_angles = mcmc_paramset.from_tag(ParamTag.SCALE)[0]
-
- # if not args.run_mcmc and args.likelihood is Likelihood.GOLEMFIT:
- # fitter = gf_utils.setup_fitter(args, asimov_paramset)
- # else: fitter = None
-
- # def CubePrior(cube, ndim, nparams):
- # # default are uniform priors
- # return ;
-
- # scan_angles = np.linspace(0, 1, args.bayes_bins)
-
- # if args.bayes_eval_bin is not None:
- # data = np.zeros((len(scenarios), 1, 1, 3))
- # else: data = np.zeros((len(scenarios), args.bayes_bins, args.bayes_bins, 3))
- # for idx, scen in enumerate(scenarios):
- # for an in mm_angles:
- # an.value = 0
- # keep = mcmc_paramset.from_tag(ParamTag.MMANGLES)[idx]
- # q = ParamSet(nuisance.params + [x for x in mm_angles if x.name != keep.name])
- # n_params = len(q)
- # prior_ranges = q.seeds
-
- # scales, angles, evidences = [], [], []
- # for s_idx, sc in enumerate(scan_scales):
- # for a_idx, an in enumerate(scan_angles):
- # index = s_idx*args.bayes_bins + a_idx
- # if args.bayes_eval_bin is not None:
- # if index in args.bayes_eval_bin:
- # if idx == 0:
- # out += '_idx_{0}'.format(index)
- # else: continue
-
- # print '== SCALE = {0:.0E}'.format(np.power(10, sc))
- # print '== ANGLE = {0:.2f}'.format(an)
- # sc_angles.value = sc
- # keep.value = an
- # def lnProb(cube, ndim, nparams):
- # for i in range(ndim-1):
- # prange = prior_ranges[i][1] - prior_ranges[i][0]
- # q[i].value = prange*cube[i] + prior_ranges[i][0]
- # for name in q.names:
- # mcmc_paramset[name].value = q[name].value
- # theta = mcmc_paramset.values
- # # print 'theta', theta
- # # print 'mcmc_paramset', mcmc_paramset
- # return llh_utils.triangle_llh(
- # theta=theta,
- # args=args,
- # asimov_paramset=asimov_paramset,
- # mcmc_paramset=mcmc_paramset,
- # fitter=fitter
- # )
- # prefix = 'mnrun/' + os.path.basename(out) + '_'
- # if args.bayes_eval_bin is not None:
- # prefix += '{0}_{1}_{2}'.format(idx, s_idx, a_idx)
-
- # print 'begin running evidence calculation for {0}'.format(prefix)
- # result = pymultinest.run(
- # LogLikelihood=lnProb,
- # Prior=CubePrior,
- # n_dims=n_params,
- # importance_nested_sampling=True,
- # n_live_points=args.bayes_live_points,
- # evidence_tolerance=args.bayes_tolerance,
- # outputfiles_basename=prefix,
- # resume=False,
- # verbose=True
- # )
-
- # analyzer = pymultinest.Analyzer(outputfiles_basename=prefix, n_params=n_params)
- # a_lnZ = analyzer.get_stats()['global evidence']
- # print 'Evidence = {0}'.format(a_lnZ)
- # scales.append(sc)
- # angles.append(an)
- # evidences.append(a_lnZ)
-
- # for i, d in enumerate(evidences):
- # data[idx][i][i][0] = scales[i]
- # data[idx][i][i][1] = angles[i]
- # data[idx][i][i][2] = d
-
- # misc_utils.make_dir(out)
- # print 'saving to {0}.npy'.format(out)
- # np.save(out+'.npy', np.array(data))
-
main.__doc__ = __doc__
diff --git a/submitter/clean.sh b/submitter/clean.sh
index a89f017..bb1443c 100755
--- a/submitter/clean.sh
+++ b/submitter/clean.sh
@@ -4,4 +4,5 @@ rm -f dagman_FR.submit.*
rm -f dagman_FR_*.submit.*
rm -f logs/*
rm -f metaouts/*
-rm -f mnrun/*
+rm -rf mnrun/
+mkdir mnrun
diff --git a/submitter/make_dag.py b/submitter/make_dag.py
index c6e7400..0e41c9a 100644
--- a/submitter/make_dag.py
+++ b/submitter/make_dag.py
@@ -17,29 +17,29 @@ full_scan_mfr = [
fix_sfr_mfr = [
(1, 1, 1, 1, 2, 0),
+ (1, 1, 1, 1, 0, 0),
+ (1, 1, 1, 0, 1, 0),
+ (1, 1, 1, 0, 0, 1),
# (1, 1, 0, 1, 2, 0),
- # (1, 2, 0, 1, 2, 0),
- # (1, 1, 1, 1, 0, 0),
# (1, 1, 0, 1, 0, 0),
- # (1, 0, 0, 1, 0, 0),
- (1, 1, 1, 0, 1, 0),
# (1, 1, 0, 0, 1, 0),
+ # (1, 0, 0, 1, 0, 0),
# (0, 1, 0, 0, 1, 0),
+ # (1, 2, 0, 1, 2, 0),
# (1, 2, 0, 0, 1, 0),
- # (1, 1, 1, 0, 0, 1),
]
# MCMC
-run_mcmc = 'False'
+run_mcmc = 'True'
burnin = 2500
nsteps = 10000
nwalkers = 60
seed = 'None'
-threads = 4
+threads = 1
mcmc_seed_type = 'uniform'
# FR
-dimension = [4, 5, 7, 8]
+dimension = [3, 6]
energy = [1e6]
no_bsm = 'False'
sigma_ratio = ['0.01']
@@ -52,7 +52,7 @@ fix_mixing = 'False'
fix_mixing_almost = 'False'
# Likelihood
-likelihood = 'golemfit'
+likelihood = 'gaussian'
# Nuisance
convNorm = 1.
@@ -66,7 +66,7 @@ ast = 'p2_0'
data = 'real'
# Bayes Factor
-run_bayes_factor = 'True'
+run_bayes_factor = 'False'
run_angles_limit = 'False'
run_angles_correlation = 'False'
bayes_bins = 100
@@ -75,13 +75,12 @@ bayes_tolerance = 0.01
bayes_eval_bin = 'all' # set to 'all' to run normally
# Plot
-plot_angles = 'False'
+plot_angles = 'True'
plot_elements = 'False'
plot_bayes = 'False'
plot_angles_limit = 'False'
-outfile = 'dagman_FR_freq_fullscan_otherdims.submit'
-# outfile = 'dagman_FR_bayes_freq.submit'
+outfile = 'dagman_FR.submit'
golemfitsourcepath = os.environ['GOLEMSOURCEPATH'] + '/GolemFit'
condor_script = golemfitsourcepath + '/scripts/flavour_ratio/submitter/submit.sub'
@@ -161,18 +160,18 @@ with open(outfile, 'w') as f:
f.write('VARS\tjob{0}\tbinning_1="{1}"\n'.format(job_number, binning[1]))
f.write('VARS\tjob{0}\tbinning_2="{1}"\n'.format(job_number, binning[2]))
f.write('VARS\tjob{0}\tfix_mixing_almost="{1}"\n'.format(job_number, fix_mixing_almost))
- f.write('VARS\tjob{0}\trun_bayes_factor="{1}"\n'.format(job_number, run_bayes_factor))
- f.write('VARS\tjob{0}\tbayes_bins="{1}"\n'.format(job_number, bayes_bins))
- f.write('VARS\tjob{0}\tbayes_output="{1}"\n'.format(job_number, bayes_output))
- f.write('VARS\tjob{0}\tbayes_live_points="{1}"\n'.format(job_number, bayes_live_points))
- f.write('VARS\tjob{0}\tbayes_tolerance="{1}"\n'.format(job_number, bayes_tolerance))
- f.write('VARS\tjob{0}\tplot_bayes="{1}"\n'.format(job_number, plot_bayes))
- f.write('VARS\tjob{0}\tbayes_eval_bin="{1}"\n'.format(job_number, r))
- f.write('VARS\tjob{0}\trun_angles_limit="{1}"\n'.format(job_number, run_angles_limit))
- f.write('VARS\tjob{0}\tangles_lim_output="{1}"\n'.format(job_number, angles_lim_output))
- f.write('VARS\tjob{0}\tplot_angles_limit="{1}"\n'.format(job_number, plot_angles_limit))
- f.write('VARS\tjob{0}\trun_angles_correlation="{1}"\n'.format(job_number, run_angles_correlation))
- f.write('VARS\tjob{0}\tangles_corr_output="{1}"\n'.format(job_number, angles_corr_output))
+ # f.write('VARS\tjob{0}\trun_bayes_factor="{1}"\n'.format(job_number, run_bayes_factor))
+ # f.write('VARS\tjob{0}\tbayes_bins="{1}"\n'.format(job_number, bayes_bins))
+ # f.write('VARS\tjob{0}\tbayes_output="{1}"\n'.format(job_number, bayes_output))
+ # f.write('VARS\tjob{0}\tbayes_live_points="{1}"\n'.format(job_number, bayes_live_points))
+ # f.write('VARS\tjob{0}\tbayes_tolerance="{1}"\n'.format(job_number, bayes_tolerance))
+ # f.write('VARS\tjob{0}\tplot_bayes="{1}"\n'.format(job_number, plot_bayes))
+ # f.write('VARS\tjob{0}\tbayes_eval_bin="{1}"\n'.format(job_number, r))
+ # f.write('VARS\tjob{0}\trun_angles_limit="{1}"\n'.format(job_number, run_angles_limit))
+ # f.write('VARS\tjob{0}\tangles_lim_output="{1}"\n'.format(job_number, angles_lim_output))
+ # f.write('VARS\tjob{0}\tplot_angles_limit="{1}"\n'.format(job_number, plot_angles_limit))
+ # f.write('VARS\tjob{0}\trun_angles_correlation="{1}"\n'.format(job_number, run_angles_correlation))
+ # f.write('VARS\tjob{0}\tangles_corr_output="{1}"\n'.format(job_number, angles_corr_output))
job_number += 1
for frs in full_scan_mfr:
diff --git a/submitter/submit.sub b/submitter/submit.sub
index 810ffa0..8e3c5af 100644
--- a/submitter/submit.sub
+++ b/submitter/submit.sub
@@ -1,5 +1,7 @@
Executable = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/fr.py
-Arguments = "--ast $(ast) --astroDeltaGamma $(astroDeltaGamma) --astroNorm $(astroNorm) --burnin $(burnin) --convNorm $(convNorm) --data $(data) --dimension $(dimension) --energy $(energy) --fix-mixing $(fix_mixing) --fix-scale $(fix_scale) --fix-source-ratio $(fix_source_ratio) --likelihood $(likelihood) --measured-ratio $(mr0) $(mr1) $(mr2) --muonNorm $(muonNorm) --no-bsm $(no_bsm) --nsteps $(nsteps) --nwalkers $(nwalkers) --outfile $(outfile) --plot-angles $(plot_angles) --plot-elements $(plot_elements) --promptNorm $(promptNorm) --run-mcmc $(run_mcmc) --scale $(scale) --scale-region $(scale_region) --seed $(seed) --sigma-ratio $(sigma_ratio) --source-ratio $(sr0) $(sr1) $(sr2) --threads $(threads) --mcmc-seed-type $(mcmc_seed_type) --energy-dependance $(energy_dependance) --spectral-index $(spectral_index) --binning $(binning_0) $(binning_1) $(binning_2) --fix-mixing-almost $(fix_mixing_almost) --run-bayes-factor $(run_bayes_factor) --bayes-bins $(bayes_bins) --bayes-output $(bayes_output) --bayes-live-points $(bayes_live_points) --plot-bayes $(plot_bayes) --bayes-tolerance $(bayes_tolerance) --bayes-eval-bin $(bayes_eval_bin) --run-angles-limit $(run_angles_limit) --angles-lim-out $(angles_lim_output) --plot-angles-limit $(plot_angles_limit) --run-angles-correlation $(run_angles_correlation) --angles-corr-output $(angles_corr_output)"
+Arguments = "--ast $(ast) --astroDeltaGamma $(astroDeltaGamma) --astroNorm $(astroNorm) --burnin $(burnin) --convNorm $(convNorm) --data $(data) --dimension $(dimension) --energy $(energy) --fix-mixing $(fix_mixing) --fix-scale $(fix_scale) --fix-source-ratio $(fix_source_ratio) --likelihood $(likelihood) --measured-ratio $(mr0) $(mr1) $(mr2) --muonNorm $(muonNorm) --no-bsm $(no_bsm) --nsteps $(nsteps) --nwalkers $(nwalkers) --outfile $(outfile) --plot-angles $(plot_angles) --plot-elements $(plot_elements) --promptNorm $(promptNorm) --run-mcmc $(run_mcmc) --scale $(scale) --scale-region $(scale_region) --seed $(seed) --sigma-ratio $(sigma_ratio) --source-ratio $(sr0) $(sr1) $(sr2) --threads $(threads) --mcmc-seed-type $(mcmc_seed_type) --energy-dependance $(energy_dependance) --spectral-index $(spectral_index) --binning $(binning_0) $(binning_1) $(binning_2) --fix-mixing-almost $(fix_mixing_almost)"
+
+# --run-bayes-factor $(run_bayes_factor) --bayes-bins $(bayes_bins) --bayes-output $(bayes_output) --bayes-live-points $(bayes_live_points) --plot-bayes $(plot_bayes) --bayes-tolerance $(bayes_tolerance) --bayes-eval-bin $(bayes_eval_bin) --run-angles-limit $(run_angles_limit) --angles-lim-out $(angles_lim_output) --plot-angles-limit $(plot_angles_limit) --run-angles-correlation $(run_angles_correlation) --angles-corr-output $(angles_corr_output)"
# All logs will go to a single file
log = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).log
@@ -17,7 +19,7 @@ getenv = True
Transfer_output_files = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/metaouts/
request_memory = 1GB
-request_cpus = 1
+request_cpus = 4
Universe = vanilla
Notification = never
diff --git a/utils/enums.py b/utils/enums.py
index 359e104..8a9e868 100644
--- a/utils/enums.py
+++ b/utils/enums.py
@@ -21,11 +21,6 @@ class EnergyDependance(Enum):
SPECTRAL = 2
-class FitPriorsCateg(Enum):
- DEFAULT = 1
- XS = 2
-
-
class Likelihood(Enum):
FLAT = 1
GAUSSIAN = 2
@@ -35,11 +30,17 @@ class Likelihood(Enum):
class ParamTag(Enum):
NUISANCE = 1
- MMANGLES = 2
- SCALE = 3
- SRCANGLES = 4
- BESTFIT = 5
- NONE = 6
+ SM_ANGLES = 2
+ MMANGLES = 3
+ SCALE = 4
+ SRCANGLES = 5
+ BESTFIT = 6
+ NONE = 7
+
+
+class PriorsCateg(Enum):
+ UNIFORM = 1
+ GAUSSIAN = 2
class MCMCSeedType(Enum):
diff --git a/utils/fr.py b/utils/fr.py
index 6a405a7..342a848 100644
--- a/utils/fr.py
+++ b/utils/fr.py
@@ -198,17 +198,18 @@ def normalise_fr(fr):
return np.array(fr) / float(np.sum(fr))
-def estimate_scale(args, mass_eigenvalues=MASS_EIGENVALUES):
+def estimate_scale(args):
"""Estimate the scale at which new physics will enter."""
+ m_eign = args.m3x_2
if args.energy_dependance is EnergyDependance.MONO:
scale = np.power(
- 10, np.round(np.log10(MASS_EIGENVALUES[1]/args.energy)) - \
+ 10, np.round(np.log10(m_eign/args.energy)) - \
np.log10(args.energy**(args.dimension-3))
)
elif args.energy_dependance is EnergyDependance.SPECTRAL:
scale = np.power(
10, np.round(
- np.log10(MASS_EIGENVALUES[1]/np.power(10, np.average(np.log10(args.binning)))) \
+ np.log10(m_eign/np.power(10, np.average(np.log10(args.binning)))) \
- np.log10(np.power(10, np.average(np.log10(args.binning)))**(args.dimension-3))
)
)
@@ -297,7 +298,7 @@ NUFIT_U = angles_to_u((0.307, (1-0.02195)**2, 0.565, 3.97935))
def params_to_BSMu(theta, dim, energy, mass_eigenvalues=MASS_EIGENVALUES,
- nufit_u=NUFIT_U, no_bsm=False, fix_mixing=False,
+ sm_u=NUFIT_U, no_bsm=False, fix_mixing=False,
fix_mixing_almost=False, fix_scale=False, scale=None,
check_uni=True, epsilon=1e-9):
"""Construct the BSM mixing matrix from the BSM parameters.
@@ -316,8 +317,8 @@ def params_to_BSMu(theta, dim, energy, mass_eigenvalues=MASS_EIGENVALUES,
mass_eigenvalues : list, length = 2
SM mass eigenvalues
- nufit_u : numpy ndarray, dimension 3
- SM NuFIT mixing matrix
+ sm_u : numpy ndarray, dimension 3
+ SM mixing matrix
no_bsm : bool
Turn off BSM behaviour
@@ -350,7 +351,7 @@ def params_to_BSMu(theta, dim, energy, mass_eigenvalues=MASS_EIGENVALUES,
[-0.32561308 -3.95946524e-01j, 0.64294909 -2.23453580e-01j, 0.03700830 +5.22032403e-01j]])
"""
- if np.shape(nufit_u) != (3, 3):
+ if np.shape(sm_u) != (3, 3):
raise ValueError(
'Input matrix should be a square and dimension 3, '
'got\n{0}'.format(ham)
@@ -377,7 +378,7 @@ def params_to_BSMu(theta, dim, energy, mass_eigenvalues=MASS_EIGENVALUES,
mass_matrix = np.array(
[[0, 0, 0], [0, mass_eigenvalues[0], 0], [0, 0, mass_eigenvalues[1]]]
)
- sm_ham = (1./(2*energy))*np.dot(nufit_u, np.dot(mass_matrix, nufit_u.conj().T))
+ sm_ham = (1./(2*energy))*np.dot(sm_u, np.dot(mass_matrix, sm_u.conj().T))
if no_bsm:
eg_vector = cardano_eqn(sm_ham)
else:
diff --git a/utils/gf.py b/utils/gf.py
index 20afc75..59d1033 100644
--- a/utils/gf.py
+++ b/utils/gf.py
@@ -18,6 +18,37 @@ from utils.enums import DataType, SteeringCateg
from utils.misc import enum_parse, thread_factors
+def fit_flags(llh_paramset):
+ default_flags = {
+ # False means it's not fixed in minimization
+ 'astroFlavorAngle1' : True,
+ 'astroFlavorAngle2' : True,
+ 'astroENorm' : True,
+ 'astroMuNorm' : True,
+ 'astroTauNorm' : True,
+ 'convNorm' : True,
+ 'promptNorm' : True,
+ 'muonNorm' : True,
+ 'astroNorm' : True,
+ 'astroParticleBalance' : True,
+ 'astroDeltaGamma' : True,
+ 'cutoffEnergy' : True,
+ 'CRDeltaGamma' : True,
+ 'piKRatio' : True,
+ 'NeutrinoAntineutrinoRatio' : True,
+ 'darkNorm' : True,
+ 'domEfficiency' : True,
+ 'holeiceForward' : True,
+ 'anisotropyScale' : True,
+ 'astroNormSec' : True,
+ 'astroDeltaGammaSec' : True
+ }
+ flags = gf.FitParametersFlag()
+ for param in llh_paramset:
+ flags.__setattr__(param.name, False)
+ return flags
+
+
def steering_params(args):
steering_categ = args.ast
params = gf.SteeringParams()
diff --git a/utils/likelihood.py b/utils/likelihood.py
index 1981c72..04828e8 100644
--- a/utils/likelihood.py
+++ b/utils/likelihood.py
@@ -18,16 +18,21 @@ import GolemFitPy as gf
from utils import fr as fr_utils
from utils import gf as gf_utils
-from utils.enums import EnergyDependance, Likelihood, ParamTag
+from utils.enums import EnergyDependance, Likelihood, ParamTag, PriorsCateg
from utils.misc import enum_parse
-def gaussian_llh(fr, fr_bf, sigma):
+def multi_gaussian(fr, fr_bf, sigma):
"""Multivariate gaussian likelihood."""
cov_fr = np.identity(3) * sigma
return np.log(multivariate_normal.pdf(fr, mean=fr_bf, cov=cov_fr))
+def log_gaussian(x, mu, sig):
+ """Log gaussian for one dimension."""
+ return np.log((1./np.sqrt(2*np.pi*sig**2))*np.exp(-((x-mu)**2)/(2*sig**2)))
+
+
def likelihood_argparse(parser):
parser.add_argument(
'--likelihood', default='gaussian', type=partial(enum_parse, c=Likelihood),
@@ -41,16 +46,25 @@ def likelihood_argparse(parser):
def lnprior(theta, paramset):
"""Priors on theta."""
+ for idx, param in enumerate(paramset):
+ param.value = theta[idx]
ranges = paramset.ranges
for value, range in zip(theta, ranges):
if range[0] <= value <= range[1]:
pass
else: return -np.inf
- return 0.
+
+ prior = 0
+ for param in paramset:
+ if param.prior is PriorsCateg.GAUSSIAN:
+ prior += log_gaussian(
+ param.nominal_value, param.value, param.std
+ )
+ return prior
def triangle_llh(theta, args, asimov_paramset, llh_paramset, fitter):
- """-Log likelihood function for a given theta."""
+ """Log likelihood function for a given theta."""
if len(theta) != len(llh_paramset):
raise AssertionError(
'Length of MCMC scan is not the same as the input '
@@ -89,11 +103,21 @@ def triangle_llh(theta, args, asimov_paramset, llh_paramset, fitter):
[ParamTag.SCALE, ParamTag.MMANGLES], values=True
)
+ m_eig_names = ['m21_2', 'm3x_2']
+ mass_eigenvalues = [x.value for x in llh_paramset if x.name in m_eig_names]
+
+ ma_names = ['s_12_2', 'c_13_4', 's_23_2', 'dcp']
+ sm_u = fr_utils.angles_to_u(
+ [x.value for x in llh_paramset if x.name in ma_names]
+ )
+
if args.energy_dependance is EnergyDependance.MONO:
u = fr_utils.params_to_BSMu(
theta = bsm_angles,
dim = args.dimension,
energy = args.energy,
+ mass_eigenvalues = mass_eigenvalues,
+ sm_u = sm_u,
no_bsm = args.no_bsm,
fix_mixing = args.fix_mixing,
fix_mixing_almost = args.fix_mixing_almost,
@@ -108,6 +132,8 @@ def triangle_llh(theta, args, asimov_paramset, llh_paramset, fitter):
theta = bsm_angles,
dim = args.dimension,
energy = args.energy,
+ mass_eigenvalues = mass_eigenvalues,
+ sm_u = sm_u,
no_bsm = args.no_bsm,
fix_mixing = args.fix_mixing,
fix_mixing_almost = args.fix_mixing_almost,
@@ -130,14 +156,14 @@ def triangle_llh(theta, args, asimov_paramset, llh_paramset, fitter):
return 1.
elif args.likelihood is Likelihood.GAUSSIAN:
fr_bf = args.measured_ratio
- return gaussian_llh(fr, fr_bf, args.sigma_ratio)
+ return multi_gaussian(fr, fr_bf, args.sigma_ratio)
elif args.likelihood is Likelihood.GOLEMFIT:
return gf_utils.get_llh(fitter, hypo_paramset)
elif args.likelihood is Likelihood.GF_FREQ:
return gf_utils.get_llh_freq(fitter, hypo_paramset)
-def ln_prob(theta, args, fitter, asimov_paramset, llh_paramset):
+def ln_prob(theta, args, asimov_paramset, llh_paramset, fitter):
lp = lnprior(theta, paramset=llh_paramset)
if not np.isfinite(lp):
return -np.inf
diff --git a/utils/mcmc.py b/utils/mcmc.py
index 97b77dd..fa044ea 100644
--- a/utils/mcmc.py
+++ b/utils/mcmc.py
@@ -72,11 +72,11 @@ def mcmc_argparse(parser):
help='Type of distrbution to make the initial MCMC seed'
)
parser.add_argument(
- '--plot-angles', type=misc_utils.parse_bool, default='True',
+ '--plot-angles', type=parse_bool, default='True',
help='Plot MCMC triangle in the angles space'
)
parser.add_argument(
- '--plot-elements', type=misc_utils.parse_bool, default='False',
+ '--plot-elements', type=parse_bool, default='False',
help='Plot MCMC triangle in the mixing elements space'
)
diff --git a/utils/multinest.py b/utils/multinest.py
index 3a7ab2d..426a951 100644
--- a/utils/multinest.py
+++ b/utils/multinest.py
@@ -15,13 +15,27 @@ import numpy as np
from pymultinest import analyse, run
-from utils.likelihood import triangle_llh
+from utils import likelihood
from utils.misc import make_dir, parse_bool
-def CubePrior(cube, ndim, n_params):
- # default are uniform priors
- return ;
+def CubePrior(cube, ndim, n_params, mn_paramset):
+ if ndim != len(mn_paramset):
+ raise AssertionError(
+ 'Length of MultiNest scan paramset is not the same as the input '
+ 'params\ncube={0}\nmn_paramset]{1}'.format(cube, mn_paramset)
+ )
+ pranges = mn_paramset.seeds
+ for i in xrange(ndim):
+ mn_paramset[i].value = (pranges[i][1]-pranges[i][0])*cube[i] + pranges[i][0]
+ prior = 0
+ for parm in mn_paramset:
+ if parm.prior is PriorsCateg.GAUSSIAN:
+ prior_penatly += multivariate_normal(
+ parm.nominal_value, mean=parm.value, cov=parm.std
+ )
+ print 'prior', prior
+ return prior
def lnProb(cube, ndim, n_params, mn_paramset, llh_paramset, asimov_paramset,
@@ -38,7 +52,7 @@ def lnProb(cube, ndim, n_params, mn_paramset, llh_paramset, asimov_paramset,
llh_paramset[pm].value = mn_paramset[pm].value
theta = llh_paramset.values
# print 'llh_paramset', llh_paramset
- return triangle_llh(
+ return likelihood.ln_prob(
theta=theta,
args=args,
asimov_paramset=asimov_paramset,
@@ -72,10 +86,6 @@ def mn_argparse(parser):
'--mn-output', type=str, default='./mnrun/',
help='Folder to store MultiNest evaluations'
)
- parser.add_argument(
- '--plot-multinest', type=parse_bool, default='False',
- help='Plot MultiNest evidence'
- )
def mn_evidence(mn_paramset, llh_paramset, asimov_paramset, args, fitter):
diff --git a/utils/param.py b/utils/param.py
index bf230df..13f1a63 100644
--- a/utils/param.py
+++ b/utils/param.py
@@ -12,17 +12,20 @@ from __future__ import absolute_import, division
import sys
from collections import Sequence
+from copy import deepcopy
import numpy as np
from utils.plot import get_units
from utils.fr import fr_to_angles
-from utils.enums import Likelihood, ParamTag
+from utils.enums import Likelihood, ParamTag, PriorsCateg
class Param(object):
"""Parameter class to store parameters."""
- def __init__(self, name, value, ranges, seed=None, std=None, tex=None, tag=None):
+ def __init__(self, name, value, ranges, prior=None, seed=None, std=None,
+ tex=None, tag=None):
+ self._prior = None
self._seed = None
self._ranges = None
self._tex = None
@@ -30,8 +33,10 @@ class Param(object):
self.name = name
self.value = value
- self.seed = seed
+ self.nominal_value = deepcopy(value)
+ self.prior = prior
self.ranges = ranges
+ self.seed = seed
self.std = std
self.tex = tex
self.tag = tag
@@ -45,6 +50,18 @@ class Param(object):
self._ranges = [val for val in values]
@property
+ def prior(self):
+ return self._prior
+
+ @prior.setter
+ def prior(self, value):
+ if value is None:
+ self._prior = PriorsCateg.UNIFORM
+ else:
+ assert value in PriorsCateg
+ self._prior = value
+
+ @property
def seed(self):
if self._seed is None: return self.ranges
return tuple(self._seed)
@@ -146,6 +163,10 @@ class ParamSet(Sequence):
return tuple([obj.value for obj in self._params])
@property
+ def nominal_values(self):
+ return tuple([obj.nominal_value for obj in self._params])
+
+ @property
def seeds(self):
return tuple([obj.seed for obj in self._params])
@@ -185,18 +206,22 @@ class ParamSet(Sequence):
return ParamSet([io[1] for io in ps])
-def get_paramsets(args):
+def get_paramsets(args, nuisance_paramset):
"""Make the paramsets for generating the Asmimov MC sample and also running
the MCMC.
"""
asimov_paramset = []
llh_paramset = []
- if args.likelihood == Likelihood.GOLEMFIT:
- nuisance_paramset = define_nuisance()
- asimov_paramset.extend(nuisance_paramset.params)
- llh_paramset.extend(nuisance_paramset.params)
- for parm in nuisance_paramset:
- parm.value = args.__getattribute__(parm.name)
+
+ llh_paramset.extend(
+ [x for x in nuisance_paramset.from_tag(ParamTag.SM_ANGLES)]
+ )
+ if args.likelihood is Likelihood.GOLEMFIT:
+ gf_nuisance = [x for x in nuisance_paramset.from_tag(ParamTag.NUISANCE)]
+ asimov_paramset.extend(gf_nuisance)
+ llh_paramset.extend(gf_nuisance)
+ for parm in llh_paramset:
+ parm.value = args.__getattribute__(parm.name)
tag = ParamTag.BESTFIT
flavour_angles = fr_to_angles(args.measured_ratio)
asimov_paramset.extend([
@@ -207,17 +232,16 @@ def get_paramsets(args):
if not args.fix_mixing and not args.fix_mixing_almost:
tag = ParamTag.MMANGLES
- e = 1e-10
llh_paramset.extend([
- Param(name='s_12^2', value=0.5, ranges=[0.+e, 1.-e], std=0.2, tex=r'\tilde{s}_{12}^2', tag=tag),
- Param(name='c_13^4', value=0.5, ranges=[0.+e, 1.-e], std=0.2, tex=r'\tilde{c}_{13}^4', tag=tag),
- Param(name='s_23^2', value=0.5, ranges=[0.+e, 1.-e], std=0.2, tex=r'\tilde{s}_{23}^4', tag=tag),
- Param(name='dcp', value=np.pi, ranges=[0.+e, 2*np.pi-e], std=0.2, tex=r'\tilde{\delta_{CP}}', tag=tag)
+ Param(name='np_s_12^2', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{s}_{12}^2', tag=tag),
+ Param(name='np_c_13^4', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{c}_{13}^4', tag=tag),
+ Param(name='np_s_23^2', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{s}_{23}^4', tag=tag),
+ Param(name='np_dcp', value=np.pi, ranges=[0., 2*np.pi], std=0.2, tex=r'\tilde{\delta_{CP}}', tag=tag)
])
if args.fix_mixing_almost:
tag = ParamTag.MMANGLES
llh_paramset.extend([
- Param(name='s_23^2', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{s}_{23}^4', tag=tag)
+ Param(name='np_s_23^2', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{s}_{23}^4', tag=tag)
])
if not args.fix_scale:
tag = ParamTag.SCALE
diff --git a/utils/plot.py b/utils/plot.py
index 66c888c..6ae8dc2 100644
--- a/utils/plot.py
+++ b/utils/plot.py
@@ -24,7 +24,7 @@ from getdist import plots
from getdist import mcsamples
from utils import misc as misc_utils
-from utils.enums import EnergyDependance, Likelihood, ParamTag
+from utils.enums import EnergyDependance, Likelihood, ParamTag, StatCateg
from utils.fr import angles_to_u, angles_to_fr
rc('text', usetex=False)
@@ -231,25 +231,32 @@ def chainer_plot(infile, outfile, outformat, args, llh_paramset):
g.export(outfile+'_elements.'+of)
-def plot_multinest(data, outfile, outformat, args, scale_param, label=None):
- """Make MultiNest factor plot."""
+def plot_statistic(data, outfile, outformat, args, scale_param, label=None):
+ """Make MultiNest factor or LLH value plot."""
print "Making MultiNest Factor plot"
fig_text = gen_figtext(args)
if label is not None: fig_text += '\n' + label
print 'data.shape', data.shape
- scales, evidences = data.T
- min_idx = np.argmin(scales)
- null = evidences[min_idx]
-
- reduced_ev = -(evidences - null)
+ scales, statistic = data.T
+ if args.stat_method is StatCateg.BAYESIAN:
+ min_idx = np.argmin(scales)
+ null = statistic[min_idx]
+ reduced_ev = -(statistic - null)
+ elif args.stat_method is StatCateg.FREQUENTIST:
+ min_idx = np.argmin(statistic)
+ null = statistic[min_idx]
+ reduced_ev = 2*(statistic - null)
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot(111)
ax.set_xlim(scale_param.ranges)
ax.set_xlabel('$'+scale_param.tex+'$')
- ax.set_ylabel(r'Bayes Factor')
+ if args.stat_method is StatCateg.BAYESIAN:
+ ax.set_ylabel(r'Bayes Factor')
+ elif args.stat_method is StatCateg.FREQUENTIST:
+ ax.set_ylabel(r'$\Delta {\rm LLH}$')
ax.plot(scales, reduced_ev)