aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorshivesh <s.p.mandalia@qmul.ac.uk>2018-04-14 00:15:37 -0500
committershivesh <s.p.mandalia@qmul.ac.uk>2018-04-14 00:15:37 -0500
commite8f43856558fc093ac987b0d97f06f2d91b10ced (patch)
treec51e65c38c22b3c05e1e647554a55a6f33b22156
parentae60ec260f8939c952167035df5b6957fdfa4e9a (diff)
downloadGolemFlavor-e8f43856558fc093ac987b0d97f06f2d91b10ced.tar.gz
GolemFlavor-e8f43856558fc093ac987b0d97f06f2d91b10ced.zip
Sat Apr 14 00:15:37 CDT 2018
-rw-r--r--.gitignore4
-rwxr-xr-xfr.py120
-rwxr-xr-xsens.py115
-rwxr-xr-xsens_bayes.py175
-rw-r--r--submitter/make_dag.py13
-rw-r--r--submitter/submit.sub6
-rw-r--r--utils/misc.py42
-rw-r--r--utils/plot.py16
8 files changed, 217 insertions, 274 deletions
diff --git a/.gitignore b/.gitignore
index 36c83a3..8728cfe 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,10 +1,10 @@
*.npy
-nohup.out
+nohup*
*.pyc
*.nfs*
plots/
*.pdf
-dagman_FR.submit*
+dagman_FR*
submitter/logs/
mnrun_*
*.png
diff --git a/fr.py b/fr.py
index dc4f075..90c2e94 100755
--- a/fr.py
+++ b/fr.py
@@ -183,6 +183,10 @@ def parse_args():
help='Make the bayes factor plot for the scale'
)
parser.add_argument(
+ '--run-angles-limit', type=misc_utils.parse_bool, default='False',
+ help='Make the limit vs BSM angles plot'
+ )
+ parser.add_argument(
'--bayes-bins', type=int, default=10,
help='Number of bins for the Bayes factor plot'
)
@@ -203,6 +207,10 @@ def parse_args():
help='Folder to store MultiNest evaluations'
)
parser.add_argument(
+ '--angles-lim-output', type=str, default='./mnrun/',
+ help='Folder to store MultiNest evaluations'
+ )
+ parser.add_argument(
'--source-ratio', type=int, nargs=3, default=[2, 1, 0],
help='Set the source flavour ratio for the case when you want to fix it'
)
@@ -227,7 +235,7 @@ def parse_args():
help='Set the new physics scale'
)
parser.add_argument(
- '--scale-region', type=float, default=5e5,
+ '--scale-region', type=float, default=1e7,
help='Set the size of the box to scan for the scale'
)
parser.add_argument(
@@ -311,10 +319,9 @@ def main():
mcmc_paramset = mcmc_paramset
)
- out = args.bayes_output+'/fr_evidence'
-
+ out = args.bayes_output+'/fr_evidence' + misc_utils.gen_identifier(args)
sc_range = mcmc_paramset.from_tag(ParamTag.SCALE)[0].ranges
- scales = np.linspace(
+ scan_scales = np.linspace(
sc_range[0], sc_range[1], args.bayes_bins
)
if args.run_bayes_factor:
@@ -322,8 +329,7 @@ def main():
if not args.run_mcmc and args.likelihood is Likelihood.GOLEMFIT:
fitter = gf_utils.setup_fitter(args, asimov_paramset)
- else:
- fitter = None
+ else: fitter = None
p = mcmc_paramset.from_tag(ParamTag.SCALE, invert=True)
n_params = len(p)
@@ -334,19 +340,19 @@ def main():
return ;
arr = []
- for s_idx, s in enumerate(scales):
+ for s_idx, sc in enumerate(scan_scales):
if args.bayes_eval_bin is not None:
if args.bayes_eval_bin == s_idx:
- out += '_scale_{0:.0E}'.format(np.power(10, s))
+ out += '_scale_{0:.0E}'.format(np.power(10, sc))
else: continue
- print '== SCALE = {0:.0E}'.format(np.power(10, s))
+ print '== SCALE = {0:.0E}'.format(np.power(10, sc))
theta = np.zeros(n_params)
def lnProb(cube, ndim, nparams):
for i in range(ndim):
prange = prior_ranges[i][1] - prior_ranges[i][0]
theta[i] = prange*cube[i] + prior_ranges[i][0]
- theta_ = np.array(theta.tolist() + [s])
+ theta_ = np.array(theta.tolist() + [sc])
# print 'mcmc_paramset', mcmc_paramset
return llh_utils.triangle_llh(
theta=theta_,
@@ -356,8 +362,8 @@ def main():
fitter=fitter
)
- prefix = 'mnrun_{0:.0E}'.format(np.power(10, s)) + \
- '_' + os.path.basename(outfile) + '_'
+ prefix = 'mnrun_{0:.0E}'.format(np.power(10, sc)) + \
+ '_' + os.path.basename(out) + '_'
print 'begin running evidence calculation for {0}'.format(prefix)
result = pymultinest.run(
LogLikelihood=lnProb,
@@ -376,7 +382,7 @@ def main():
)
a_lnZ = analyzer.get_stats()['global evidence']
print 'Evidence = {0}'.format(a_lnZ)
- arr.append([s, a_lnZ])
+ arr.append([sc, a_lnZ])
misc_utils.make_dir(out)
np.save(out+'.npy', np.array(arr))
@@ -387,6 +393,94 @@ def main():
xlim=sc_range
)
+ out = args.angles_lim_output+'/fr_an_evidence' + misc_utils.gen_identifier(args)
+ if args.run_angles_limit:
+ import pymultinest
+
+ scenarios = [
+ [np.sin(np.pi/2.)**2, 0, 0, 0],
+ [0, np.cos(np.pi/2.)**4, 0, 0],
+ [0, 0, np.sin(np.pi/2.)**2, 0],
+ ]
+ p = mcmc_paramset.from_tag([ParamTag.SCALE, ParamTag.MMANGLES], invert=True)
+ n_params = len(p)
+ prior_ranges = p.seeds
+
+ if not args.run_mcmc and args.likelihood is Likelihood.GOLEMFIT:
+ fitter = gf_utils.setup_fitter(args, asimov_paramset)
+ else: fitter = None
+
+ def CubePrior(cube, ndim, nparams):
+ # default are uniform priors
+ return ;
+
+ data = np.zeros((len(scenarios), args.bayes_bins, 2))
+ mm_angles = mcmc_paramset.from_tag(ParamTag.MMANGLES)
+ sc_angles = mcmc_paramset.from_tag(ParamTag.SCALE)[0]
+ for idx, scen in enumerate(scenarios):
+ scales, evidences = [], []
+ for yidx, an in enumerate(mm_angles):
+ an.value = scen[yidx]
+ for s_idx, sc in enumerate(scan_scales):
+ if args.bayes_eval_bin is not None:
+ if args.bayes_eval_bin == s_idx:
+ if idx == 0:
+ out += '_scale_{0:.0E}'.format(np.power(10, sc))
+ else: continue
+
+ print '== SCALE = {0:.0E}'.format(np.power(10, sc))
+ sc_angles.value = sc
+ def lnProb(cube, ndim, nparams):
+ for i in range(ndim):
+ prange = prior_ranges[i][1] - prior_ranges[i][0]
+ p[i].value = prange*cube[i] + prior_ranges[i][0]
+ for name in p.names:
+ mcmc_paramset[name].value = p[name].value
+ theta = mcmc_paramset.values
+ # print 'theta', theta
+ # print 'mcmc_paramset', mcmc_paramset
+ return llh_utils.triangle_llh(
+ theta=theta,
+ args=args,
+ asimov_paramset=asimov_paramset,
+ mcmc_paramset=mcmc_paramset,
+ fitter=fitter
+ )
+ prefix = 'mnrun_{0:.0E}'.format(np.power(10, sc)) + \
+ '_' + os.path.basename(out) + '_'
+ print 'begin running evidence calculation for {0}'.format(prefix)
+ result = pymultinest.run(
+ LogLikelihood=lnProb,
+ Prior=CubePrior,
+ n_dims=n_params,
+ importance_nested_sampling=True,
+ n_live_points=args.bayes_live_points,
+ evidence_tolerance=args.bayes_tolerance,
+ outputfiles_basename=prefix,
+ resume=False,
+ verbose=True
+ )
+
+ analyzer = pymultinest.Analyzer(outputfiles_basename=prefix, n_params=n_params)
+ a_lnZ = analyzer.get_stats()['global evidence']
+ print 'Evidence = {0}'.format(a_lnZ)
+ scales.append(sc)
+ evidences.append(a_lnZ)
+
+ for i, d in enumerate(evidences):
+ data[idx][i][0] = scales[i]
+ data[idx][i][1] = d
+
+ misc_utils.make_dir(out)
+ print 'saving to {0}.npy'.format(out)
+ np.save(out+'.npy', np.array(data))
+
+ dirname = os.path.dirname(out)
+ plot_utils.plot_BSM_angles_limit(
+ dirname=dirname, outfile=outfile, outformat=['png'],
+ args=args, bayesian=True
+ )
+
print "DONE!"
diff --git a/sens.py b/sens.py
index 0c03b34..8b41a4a 100755
--- a/sens.py
+++ b/sens.py
@@ -17,6 +17,7 @@ from matplotlib import pyplot as plt
from matplotlib import rc
import GolemFitPy as gf
+import pymultinest
import fr
from utils import gf as gf_utils
@@ -32,7 +33,7 @@ rc('font', **{'family':'serif', 'serif':['Computer Modern'], 'size':18})
RUN = False
-z = 0+1E-6
+z = 0.
scenarios = [
[np.sin(np.pi/2.)**2, z, z, z],
[z, np.cos(np.pi/2.)**4, z, z],
@@ -40,86 +41,83 @@ scenarios = [
]
xticks = [r'$\mathcal{O}_{12}$', r'$\mathcal{O}_{13}$', r'$\mathcal{O}_{23}$']
-def fit_flags(flag_dict):
- flags = gf.FitParametersFlag()
- for key in flag_dict.iterkeys():
- flags.__setattr__(key, flag_dict[key])
- return flags
-
-default_flags = {
- # False means it's not fixed in minimization
- 'astroFlavorAngle1' : True,
- 'astroFlavorAngle2' : True,
- # 'astroENorm' : True,
- # 'astroMuNorm' : True,
- # 'astroTauNorm' : True,
- 'convNorm' : False,
- 'promptNorm' : False,
- 'muonNorm' : False,
- 'astroNorm' : False,
- 'astroParticleBalance' : True,
- 'astroDeltaGamma' : False,
- 'cutoffEnergy' : True,
- 'CRDeltaGamma' : False,
- 'piKRatio' : False,
- 'NeutrinoAntineutrinoRatio' : True,
- 'darkNorm' : True,
- 'domEfficiency' : True,
- 'holeiceForward' : True,
- 'anisotropyScale' : True,
- 'astroNormSec' : True,
- 'astroDeltaGammaSec' : True
-}
-
def main():
args = fr.parse_args()
- args.likelihood = Likelihood.GF_FREQ
fr.process_args(args)
misc_utils.print_args(args)
bins = 10
asimov_paramset, mcmc_paramset = fr.get_paramsets(args)
- outfile = misc_utils.gen_outfile_name(args)
- print '== {0:<25} = {1}'.format('outfile', outfile)
-
- asimov_paramset = asimov_paramset.from_tag(ParamTag.BESTFIT)
- mcmc_paramset = mcmc_paramset.from_tag(ParamTag.NUISANCE, invert=True)
sc_range = mcmc_paramset.from_tag(ParamTag.SCALE)[0].ranges
- scan_scales = np.linspace(sc_range[0], sc_range[1], bins+1)
+ scan_scales = np.linspace(sc_range[0], sc_range[1], bins)
print 'scan_scales', scan_scales
+ p = mcmc_paramset.from_tag([ParamTag.SCALE, ParamTag.MMANGLES], invert=True)
+ n_params = len(p)
+ prior_ranges = p.seeds
+
outfile = './sens'
if RUN:
- datapaths = gf.DataPaths()
- sparams = gf_utils.steering_params(args)
- npp = gf.NewPhysicsParams()
- fitter = gf.GolemFit(datapaths, sparams, npp)
- fitter.SetFitParametersFlag(fit_flags(default_flags))
- gf_utils.set_up_as(fitter, asimov_paramset)
-
- data = np.zeros((len(scenarios), bins+1, 2))
+ if args.likelihood is Likelihood.GOLEMFIT:
+ fitter = gf_utils.setup_fitter(args, asimov_paramset)
+ elif args.likelihood is Likelihood.GAUSSIAN:
+ fitter = None
+
+ def CubePrior(cube, ndim, nparams):
+ # default are uniform priors
+ return ;
+
+ data = np.zeros((len(scenarios), bins, 2))
mm_angles = mcmc_paramset.from_tag(ParamTag.MMANGLES)
+ sc_angles = mcmc_paramset.from_tag(ParamTag.SCALE)[0]
for idx, scen in enumerate(scenarios):
- arr = []
scales = []
- llhs = []
+ evidences = []
for yidx, an in enumerate(mm_angles):
an.value = scen[yidx]
for sc in scan_scales:
- theta = scen + [sc]
- print 'theta', theta
- llh = llh_utils.triangle_llh(
- theta=theta, args=args, asimov_paramset=asimov_paramset,
- mcmc_paramset=mcmc_paramset, fitter=fitter
+ sc_angles.value = sc
+ def lnProb(cube, ndim, nparams):
+ for i in range(ndim):
+ prange = prior_ranges[i][1] - prior_ranges[i][0]
+ p[i].value = prange*cube[i] + prior_ranges[i][0]
+ for name in p.names:
+ mcmc_paramset[name].value = p[name].value
+ theta = mcmc_paramset.values
+ # print 'theta', theta
+ # print 'mcmc_paramset', mcmc_paramset
+ return llh_utils.triangle_llh(
+ theta=theta,
+ args=args,
+ asimov_paramset=asimov_paramset,
+ mcmc_paramset=mcmc_paramset,
+ fitter=fitter
+ )
+ # TODO(shivesh)
+ prefix = 'mnrun_{0:.0E}'.format(np.power(10, sc)) + '_' + misc_utils.gen_outfile_name(args)[2:]
+ print 'begin running evidence calculation for {0}'.format(prefix)
+ result = pymultinest.run(
+ LogLikelihood=lnProb,
+ Prior=CubePrior,
+ n_dims=n_params,
+ importance_nested_sampling=True,
+ n_live_points=args.bayes_live_points,
+ evidence_tolerance=args.bayes_tolerance,
+ outputfiles_basename=prefix,
+ resume=False,
+ verbose=True
)
- print 'llh', llh
+
+ analyzer = pymultinest.Analyzer(outputfiles_basename=prefix, n_params=n_params)
+ a_lnZ = analyzer.get_stats()['global evidence']
+ print 'Evidence = {0}'.format(a_lnZ)
scales.append(sc)
- llhs.append(llh)
+ evidences.append(a_lnZ)
- for i, d in enumerate(llhs):
+ for i, d in enumerate(evidences):
data[idx][i][0] = scales[i]
data[idx][i][1] = d
@@ -130,7 +128,8 @@ def main():
outfile=outfile,
xticks=xticks,
outformat=['png'],
- args=args
+ args=args,
+ bayesian=True
)
diff --git a/sens_bayes.py b/sens_bayes.py
deleted file mode 100755
index b0030d4..0000000
--- a/sens_bayes.py
+++ /dev/null
@@ -1,175 +0,0 @@
-#! /usr/bin/env python
-# author : S. Mandalia
-# s.p.mandalia@qmul.ac.uk
-#
-# date : April 10, 2018
-
-"""
-HESE BSM flavour ratio analysis script
-"""
-
-from __future__ import absolute_import, division
-
-import numpy as np
-import matplotlib as mpl
-mpl.use('Agg')
-from matplotlib import pyplot as plt
-from matplotlib import rc
-
-import GolemFitPy as gf
-import pymultinest
-from pymultinest.solve import solve
-from pymultinest.watch import ProgressPrinter
-
-import fr
-from utils import gf as gf_utils
-from utils import likelihood as llh_utils
-from utils import misc as misc_utils
-from utils.enums import Likelihood, ParamTag
-from utils.plot import plot_BSM_angles_limit
-
-rc('text', usetex=False)
-rc('font', **{'family':'serif', 'serif':['Computer Modern'], 'size':18})
-
-
-RUN = False
-
-
-z = 0.
-scenarios = [
- [np.sin(np.pi/2.)**2, z, z, z],
- [z, np.cos(np.pi/2.)**4, z, z],
- [z, z, np.sin(np.pi/2.)**2, z],
-]
-xticks = [r'$\mathcal{O}_{12}$', r'$\mathcal{O}_{13}$', r'$\mathcal{O}_{23}$']
-
-def fit_flags(flag_dict):
- flags = gf.FitParametersFlag()
- for key in flag_dict.iterkeys():
- flags.__setattr__(key, flag_dict[key])
- return flags
-
-default_flags = {
- # False means it's not fixed in minimization
- 'astroFlavorAngle1' : True,
- 'astroFlavorAngle2' : True,
- # 'astroENorm' : True,
- # 'astroMuNorm' : True,
- # 'astroTauNorm' : True,
- 'convNorm' : False,
- 'promptNorm' : False,
- 'muonNorm' : False,
- 'astroNorm' : False,
- 'astroParticleBalance' : True,
- 'astroDeltaGamma' : False,
- 'cutoffEnergy' : True,
- 'CRDeltaGamma' : False,
- 'piKRatio' : False,
- 'NeutrinoAntineutrinoRatio' : True,
- 'darkNorm' : True,
- 'domEfficiency' : True,
- 'holeiceForward' : True,
- 'anisotropyScale' : True,
- 'astroNormSec' : True,
- 'astroDeltaGammaSec' : True
-}
-
-
-def main():
- args = fr.parse_args()
- fr.process_args(args)
- misc_utils.print_args(args)
-
- bins = 10
-
- asimov_paramset, mcmc_paramset = fr.get_paramsets(args)
-
- sc_range = mcmc_paramset.from_tag(ParamTag.SCALE)[0].ranges
- scan_scales = np.linspace(sc_range[0], sc_range[1], bins)
- print 'scan_scales', scan_scales
-
- p = mcmc_paramset.from_tag([ParamTag.SCALE, ParamTag.MMANGLES], invert=True)
- n_params = len(p)
- prior_ranges = p.seeds
-
- outfile = './sens'
- if RUN:
- if args.likelihood is Likelihood.GOLEMFIT:
- fitter = gf_utils.setup_fitter(args, asimov_paramset)
- fitter.SetFitParametersFlag(fit_flags(default_flags))
- elif args.likelihood is Likelihood.GAUSSIAN:
- fitter = None
-
- def CubePrior(cube, ndim, nparams):
- # default are uniform priors
- return ;
-
- data = np.zeros((len(scenarios), bins, 2))
- mm_angles = mcmc_paramset.from_tag(ParamTag.MMANGLES)
- sc_angles = mcmc_paramset.from_tag(ParamTag.SCALE)[0]
- for idx, scen in enumerate(scenarios):
- scales = []
- evidences = []
- for yidx, an in enumerate(mm_angles):
- an.value = scen[yidx]
- for sc in scan_scales:
- sc_angles.value = sc
- def lnProb(cube, ndim, nparams):
- for i in range(ndim):
- prange = prior_ranges[i][1] - prior_ranges[i][0]
- p[i].value = prange*cube[i] + prior_ranges[i][0]
- for name in p.names:
- mcmc_paramset[name].value = p[name].value
- theta = mcmc_paramset.values
- # print 'theta', theta
- # print 'mcmc_paramset', mcmc_paramset
- return llh_utils.triangle_llh(
- theta=theta,
- args=args,
- asimov_paramset=asimov_paramset,
- mcmc_paramset=mcmc_paramset,
- fitter=fitter
- )
- # TODO(shivesh)
- prefix = 'mnrun_{0:.0E}'.format(np.power(10, sc)) + '_' + misc_utils.gen_outfile_name(args)[2:]
- print 'begin running evidence calculation for {0}'.format(prefix)
- result = pymultinest.run(
- LogLikelihood=lnProb,
- Prior=CubePrior,
- n_dims=n_params,
- importance_nested_sampling=True,
- n_live_points=args.bayes_live_points,
- evidence_tolerance=args.bayes_tolerance,
- outputfiles_basename=prefix,
- resume=False,
- verbose=True
- )
-
- analyzer = pymultinest.Analyzer(outputfiles_basename=prefix, n_params=n_params)
- a_lnZ = analyzer.get_stats()['global evidence']
- print 'Evidence = {0}'.format(a_lnZ)
- scales.append(sc)
- evidences.append(a_lnZ)
-
- for i, d in enumerate(evidences):
- data[idx][i][0] = scales[i]
- data[idx][i][1] = d
-
- np.save(outfile + '.npy', data)
-
- plot_BSM_angles_limit(
- infile=outfile+'.npy',
- outfile=outfile,
- xticks=xticks,
- outformat=['png'],
- args=args,
- bayesian=True
- )
-
-
-main.__doc__ = __doc__
-
-
-if __name__ == '__main__':
- main()
-
diff --git a/submitter/make_dag.py b/submitter/make_dag.py
index be13ac8..53878a2 100644
--- a/submitter/make_dag.py
+++ b/submitter/make_dag.py
@@ -35,7 +35,7 @@ burnin = 500
nsteps = 2000
nwalkers = 60
seed = 24
-threads = 4
+threads = 1
mcmc_seed_type = 'uniform'
# FR
@@ -68,6 +68,7 @@ data = 'real'
# Bayes Factor
run_bayes_factor = 'False'
+run_angles_limit = 'True'
bayes_bins = 10
bayes_live_points = 200
bayes_tolerance = 0.01
@@ -78,7 +79,7 @@ plot_angles = 'False'
plot_elements = 'False'
plot_bayes = 'False'
-outfile = 'dagman_FR.submit'
+outfile = 'dagman_FR_angles_limit.submit'
golemfitsourcepath = os.environ['GOLEMSOURCEPATH'] + '/GolemFit'
condor_script = golemfitsourcepath + '/scripts/flavour_ratio/submitter/submit.sub'
@@ -105,6 +106,8 @@ with open(outfile, 'w') as f:
outchains = outchain_head + '/fix_ifr/{0}/'.format(str(sig).replace('.', '_'))
if run_bayes_factor == 'True':
bayes_output = outchains + '/bayes_factor/'
+ if run_angles_limit == 'True':
+ angles_lim_output = outchains + '/angles_limit/'
outchains += 'mcmc_chain'
for r in range(b_runs):
print 'run', r
@@ -156,6 +159,8 @@ with open(outfile, 'w') as f:
f.write('VARS\tjob{0}\tbayes_tolerance="{1}"\n'.format(job_number, bayes_tolerance))
f.write('VARS\tjob{0}\tplot_bayes="{1}"\n'.format(job_number, plot_bayes))
f.write('VARS\tjob{0}\tbayes_eval_bin="{1}"\n'.format(job_number, r))
+ f.write('VARS\tjob{0}\trun_angles_limit="{1}"\n'.format(job_number, run_angles_limit))
+ f.write('VARS\tjob{0}\tangles_lim_output="{1}"\n'.format(job_number, angles_lim_output))
job_number += 1
for frs in full_scan_mfr:
@@ -163,6 +168,8 @@ with open(outfile, 'w') as f:
outchains = outchain_head + '/full_scan/{0}'.format(str(sig).replace('.', '_'))
if run_bayes_factor == 'True':
bayes_output = outchains + '/bayes_factor/'
+ if run_angles_limit == 'True':
+ angles_lim_output = outchains + '/angles_limit/'
outchains += 'mcmc_chain'
for r in range(b_runs):
print 'run', r
@@ -214,4 +221,6 @@ with open(outfile, 'w') as f:
f.write('VARS\tjob{0}\tbayes_tolerance="{1}"\n'.format(job_number, bayes_tolerance))
f.write('VARS\tjob{0}\tplot_bayes="{1}"\n'.format(job_number, plot_bayes))
f.write('VARS\tjob{0}\tbayes_eval_bin="{1}"\n'.format(job_number, r))
+ f.write('VARS\tjob{0}\trun_angles_limit="{1}"\n'.format(job_number, run_angles_limit))
+ f.write('VARS\tjob{0}\tangles_lim_output="{1}"\n'.format(job_number, angles_lim_output))
job_number += 1
diff --git a/submitter/submit.sub b/submitter/submit.sub
index e9e66bd..d232097 100644
--- a/submitter/submit.sub
+++ b/submitter/submit.sub
@@ -1,5 +1,5 @@
Executable = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/fr.py
-Arguments = "--ast $(ast) --astroDeltaGamma $(astroDeltaGamma) --astroNorm $(astroNorm) --burnin $(burnin) --convNorm $(convNorm) --data $(data) --dimension $(dimension) --energy $(energy) --fix-mixing $(fix_mixing) --fix-scale $(fix_scale) --fix-source-ratio $(fix_source_ratio) --likelihood $(likelihood) --measured-ratio $(mr0) $(mr1) $(mr2) --muonNorm $(muonNorm) --no-bsm $(no_bsm) --nsteps $(nsteps) --nwalkers $(nwalkers) --outfile $(outfile) --plot-angles $(plot_angles) --plot-elements $(plot_elements) --promptNorm $(promptNorm) --run-mcmc $(run_mcmc) --scale $(scale) --scale-region $(scale_region) --seed $(seed) --sigma-ratio $(sigma_ratio) --source-ratio $(sr0) $(sr1) $(sr2) --threads $(threads) --likelihood $(likelihood) --mcmc-seed-type $(mcmc_seed_type) --energy-dependance $(energy_dependance) --spectral-index $(spectral_index) --binning $(binning_0) $(binning_1) $(binning_2) --fix-mixing-almost $(fix_mixing_almost) --run-bayes-factor $(run_bayes_factor) --bayes-bins $(bayes_bins) --bayes-output $(bayes_output) --bayes-live-points $(bayes_live_points) --plot-bayes $(plot_bayes) --bayes-tolerance $(bayes_tolerance) --bayes-eval-bin $(bayes_eval_bin)"
+Arguments = "--ast $(ast) --astroDeltaGamma $(astroDeltaGamma) --astroNorm $(astroNorm) --burnin $(burnin) --convNorm $(convNorm) --data $(data) --dimension $(dimension) --energy $(energy) --fix-mixing $(fix_mixing) --fix-scale $(fix_scale) --fix-source-ratio $(fix_source_ratio) --likelihood $(likelihood) --measured-ratio $(mr0) $(mr1) $(mr2) --muonNorm $(muonNorm) --no-bsm $(no_bsm) --nsteps $(nsteps) --nwalkers $(nwalkers) --outfile $(outfile) --plot-angles $(plot_angles) --plot-elements $(plot_elements) --promptNorm $(promptNorm) --run-mcmc $(run_mcmc) --scale $(scale) --scale-region $(scale_region) --seed $(seed) --sigma-ratio $(sigma_ratio) --source-ratio $(sr0) $(sr1) $(sr2) --threads $(threads) --likelihood $(likelihood) --mcmc-seed-type $(mcmc_seed_type) --energy-dependance $(energy_dependance) --spectral-index $(spectral_index) --binning $(binning_0) $(binning_1) $(binning_2) --fix-mixing-almost $(fix_mixing_almost) --run-bayes-factor $(run_bayes_factor) --bayes-bins $(bayes_bins) --bayes-output $(bayes_output) --bayes-live-points $(bayes_live_points) --plot-bayes $(plot_bayes) --bayes-tolerance $(bayes_tolerance) --bayes-eval-bin $(bayes_eval_bin) --run-angles-limit $(run_angles_limit) --angles-lim-out $(angles_lim_output)"
# All logs will go to a single file
log = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).log
@@ -16,8 +16,8 @@ getenv = True
# +TransferOutput=""
transfer_output_files = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/metaouts/
-request_memory = 30GB
-request_cpus = 4
+request_memory = 10GB
+request_cpus = 1
Universe = vanilla
Notification = never
diff --git a/utils/misc.py b/utils/misc.py
index 331e78a..f0c1ad4 100644
--- a/utils/misc.py
+++ b/utils/misc.py
@@ -196,63 +196,67 @@ class SortingHelpFormatter(argparse.HelpFormatter):
super(SortingHelpFormatter, self).add_arguments(actions)
-def gen_outfile_name(args):
- """Generate a name for the output file based on the input args.
-
- Parameters
- ----------
- args : argparse
- argparse object to print
-
- """
+def gen_identifier(args):
mr = args.measured_ratio
si = args.sigma_ratio
if args.fix_source_ratio:
sr = args.source_ratio
if args.fix_mixing:
- outfile = args.outfile+'_{0:03d}_{1:03d}_{2:03d}_{3:04d}_sfr_{4:03d}_{5:03d}_{6:03d}_DIM{7}_fix_mixing'.format(
+ out = '_{0:03d}_{1:03d}_{2:03d}_{3:04d}_sfr_{4:03d}_{5:03d}_{6:03d}_DIM{7}_fix_mixing'.format(
int(mr[0]*100), int(mr[1]*100), int(mr[2]*100), int(si*1000),
int(sr[0]*100), int(sr[1]*100), int(sr[2]*100), args.dimension
)
elif args.fix_mixing_almost:
- outfile = args.outfile+'_{0:03d}_{1:03d}_{2:03d}_{3:04d}_sfr_{4:03d}_{5:03d}_{6:03d}_DIM{7}_fix_mixing_almost'.format(
+ out = '_{0:03d}_{1:03d}_{2:03d}_{3:04d}_sfr_{4:03d}_{5:03d}_{6:03d}_DIM{7}_fix_mixing_almost'.format(
int(mr[0]*100), int(mr[1]*100), int(mr[2]*100), int(si*1000),
int(sr[0]*100), int(sr[1]*100), int(sr[2]*100), args.dimension
)
elif args.fix_scale:
- outfile = args.outfile+'_{0:03d}_{1:03d}_{2:03d}_{3:04d}_sfr_{4:03d}_{5:03d}_{6:03d}_DIM{7}_fixed_scale_{8}'.format(
+ out = '_{0:03d}_{1:03d}_{2:03d}_{3:04d}_sfr_{4:03d}_{5:03d}_{6:03d}_DIM{7}_fixed_scale_{8}'.format(
int(mr[0]*100), int(mr[1]*100), int(mr[2]*100), int(si*1000),
int(sr[0]*100), int(sr[1]*100), int(sr[2]*100), args.dimension,
args.scale
)
else:
- outfile = args.outfile+'_{0:03d}_{1:03d}_{2:03d}_{3:04d}_sfr_{4:03d}_{5:03d}_{6:03d}_DIM{7}_single_scale'.format(
+ out = '_{0:03d}_{1:03d}_{2:03d}_{3:04d}_sfr_{4:03d}_{5:03d}_{6:03d}_DIM{7}_single_scale'.format(
int(mr[0]*100), int(mr[1]*100), int(mr[2]*100), int(si*1000),
int(sr[0]*100), int(sr[1]*100), int(sr[2]*100), args.dimension
)
else:
if args.fix_mixing:
- outfile = args.outfile+'_{0:03d}_{1:03d}_{2:03d}_{3:04d}_DIM{4}_fix_mixing'.format(
+ out = '_{0:03d}_{1:03d}_{2:03d}_{3:04d}_DIM{4}_fix_mixing'.format(
int(mr[0]*100), int(mr[1]*100), int(mr[2]*100),
int(si*1000), args.dimension
)
elif args.fix_mixing_almost:
- outfile = args.outfile+'_{0:03d}_{1:03d}_{2:03d}_{3:04d}_DIM{4}_fix_mixing_almost'.format(
+ out = '_{0:03d}_{1:03d}_{2:03d}_{3:04d}_DIM{4}_fix_mixing_almost'.format(
int(mr[0]*100), int(mr[1]*100), int(mr[2]*100),
int(si*1000), args.dimension
)
elif args.fix_scale:
- outfile = args.outfile+'_{0:03d}_{1:03d}_{2:03d}_{3:04d}_DIM{4}_fixed_scale_{5}'.format(
+ out = '_{0:03d}_{1:03d}_{2:03d}_{3:04d}_DIM{4}_fixed_scale_{5}'.format(
int(mr[0]*100), int(mr[1]*100), int(mr[2]*100),
int(si*1000), args.dimension, args.scale
)
else:
- outfile = args.outfile+'_{0:03d}_{1:03d}_{2:03d}_{3:04d}_DIM{4}'.format(
+ out = '_{0:03d}_{1:03d}_{2:03d}_{3:04d}_DIM{4}'.format(
int(mr[0]*100), int(mr[1]*100), int(mr[2]*100),
int(si*1000), args.dimension
)
- if args.likelihood is Likelihood.FLAT: outfile += '_flat'
- return outfile
+ if args.likelihood is Likelihood.FLAT: out += '_flat'
+ return out
+
+
+def gen_outfile_name(args):
+ """Generate a name for the output file based on the input args.
+
+ Parameters
+ ----------
+ args : argparse
+ argparse object to print
+
+ """
+ return args.outfile + gen_identifier(args)
def parse_bool(s):
diff --git a/utils/plot.py b/utils/plot.py
index 0ff89c2..0d02c2a 100644
--- a/utils/plot.py
+++ b/utils/plot.py
@@ -100,6 +100,10 @@ def plot_argparse(parser):
'--plot-bayes', type=misc_utils.parse_bool, default='False',
help='Plot Bayes factor'
)
+ parser.add_argument(
+ '--plot-angles-limit', type=misc_utils.parse_bool, default='False',
+ help='Plot limit vs BSM angles'
+ )
def flat_angles_to_u(x):
@@ -274,6 +278,7 @@ def bayes_factor_plot(dirname, outfile, outformat, args, xlim):
raw.append(np.load(os.path.join(root, fn)))
raw = np.sort(np.vstack(raw), axis=0)
print 'raw', raw
+ print 'raw.shape', raw.shape
scales, evidences = raw.T
null = evidences[0]
@@ -309,12 +314,19 @@ def myround(x, base=5, up=False, down=False):
else: int(base * np.round(float(x)/base))
-def plot_BSM_angles_limit(infile, outfile, xticks, outformat, args, bayesian):
+def plot_BSM_angles_limit(dirname, outfile, outformat, args, bayesian):
"""Make BSM angles vs scale limit plot."""
+ if not args.plot_angles_limit: return
print "Making BSM angles limit plot."""
fig_text = gen_figtext(args)
+ xticks = [r'$\mathcal{O}_{12}$', r'$\mathcal{O}_{13}$', r'$\mathcal{O}_{23}$']
- raw = np.load(infile)
+ raw = []
+ for root, dirs, filenames in os.walk(dirname):
+ for fn in filenames:
+ if fn[-4:] == '.npy':
+ raw.append(np.load(os.path.join(root, fn)))
+ raw = np.sort(np.vstack(raw), axis=0)
print 'raw', raw
print 'raw.shape', raw.shape
sc_ranges = (