aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xplot_sens.py397
-rwxr-xr-xsens.py420
-rw-r--r--submitter/sens_dag.py180
-rw-r--r--submitter/sens_submit.sub12
-rw-r--r--utils/enums.py33
-rw-r--r--utils/fr.py140
-rw-r--r--utils/gf.py41
-rw-r--r--utils/llh.py104
-rw-r--r--utils/misc.py101
-rw-r--r--utils/mn.py13
-rw-r--r--utils/param.py66
-rw-r--r--utils/plot.py161
12 files changed, 512 insertions, 1156 deletions
diff --git a/plot_sens.py b/plot_sens.py
index f91b5f2..f7179d8 100755
--- a/plot_sens.py
+++ b/plot_sens.py
@@ -19,98 +19,30 @@ import numpy as np
import numpy.ma as ma
from utils import fr as fr_utils
-from utils import gf as gf_utils
from utils import llh as llh_utils
from utils import misc as misc_utils
from utils import plot as plot_utils
from utils.enums import EnergyDependance, Likelihood, MixingScenario, ParamTag
from utils.enums import PriorsCateg, SensitivityCateg, StatCateg
-from utils.param import Param, ParamSet, get_paramsets
-
-from utils import mn as mn_utils
-
-
-def define_nuisance():
- """Define the nuisance parameters."""
- tag = ParamTag.SM_ANGLES
- g_prior = PriorsCateg.GAUSSIAN
- lg_prior = PriorsCateg.LIMITEDGAUSS
- e = 1e-9
- nuisance = [
- Param(name='s_12_2', value=0.307, seed=[0.26, 0.35], ranges=[0., 1.], std=0.013, tex=r's_{12}^2', prior=lg_prior, tag=tag),
- Param(name='c_13_4', value=(1-(0.02206))**2, seed=[0.950, 0.961], ranges=[0., 1.], std=0.00147, tex=r'c_{13}^4', prior=lg_prior, tag=tag),
- Param(name='s_23_2', value=0.538, seed=[0.31, 0.75], ranges=[0., 1.], std=0.069, tex=r's_{23}^2', prior=lg_prior, tag=tag),
- Param(name='dcp', value=4.08404, seed=[0+e, 2*np.pi-e], ranges=[0., 2*np.pi], std=2.0, tex=r'\delta_{CP}', tag=tag),
- Param(
- name='m21_2', value=7.40E-23, seed=[7.2E-23, 7.6E-23], ranges=[6.80E-23, 8.02E-23],
- std=2.1E-24, tex=r'\Delta m_{21}^2{\rm GeV}^{-2}', prior=g_prior, tag=tag
- ),
- Param(
- name='m3x_2', value=2.494E-21, seed=[2.46E-21, 2.53E-21], ranges=[2.399E-21, 2.593E-21],
- std=3.3E-23, tex=r'\Delta m_{3x}^2{\rm GeV}^{-2}', prior=g_prior, tag=tag
- )
- ]
- tag = ParamTag.NUISANCE
- nuisance.extend([
- Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0.1, 10.], std=0.4, prior=lg_prior, tag=tag),
- Param(name='promptNorm', value=0., seed=[0. , 6. ], ranges=[0. , 20.], std=2.4, prior=lg_prior, tag=tag),
- Param(name='muonNorm', value=1., seed=[0.1, 2. ], ranges=[0. , 10.], std=0.1, tag=tag),
- Param(name='astroNorm', value=6.9, seed=[0., 5. ], ranges=[0. , 20.], std=1.5, tag=tag),
- Param(name='astroDeltaGamma', value=2.5, seed=[2.4, 3. ], ranges=[-5., 5. ], std=0.1, tag=tag)
- ])
- return ParamSet(nuisance)
-
-
-def nuisance_argparse(parser):
- nuisance = define_nuisance()
- for parm in nuisance:
- parser.add_argument(
- '--'+parm.name, type=float, default=parm.value,
- help=parm.name+' to inject'
- )
+from utils.param import Param, ParamSet
def process_args(args):
"""Process the input args."""
- if args.fix_mixing is not MixingScenario.NONE and args.fix_scale:
- raise NotImplementedError('Fixed mixing and scale not implemented')
- if args.fix_mixing is not MixingScenario.NONE and args.fix_mixing_almost:
- raise NotImplementedError(
- '--fix-mixing and --fix-mixing-almost cannot be used together'
- )
- if args.fix_scale:
- raise NotImplementedError(
- '--fix-scale not implemented'
- )
+ if args.data is not DataType.REAL:
+ args.injected_ratio = fr_utils.normalise_fr(args.injected_ratio)
- args.measured_ratio = fr_utils.normalise_fr(args.measured_ratio)
- if args.fix_source_ratio:
- assert len(args.source_ratios) % 3 == 0
- srs = [args.source_ratios[3*x:3*x+3]
- for x in range(int(len(args.source_ratios)/3))]
- args.source_ratios = map(fr_utils.normalise_fr, srs)
-
- if args.energy_dependance is EnergyDependance.SPECTRAL:
- args.binning = np.logspace(
- np.log10(args.binning[0]), np.log10(args.binning[1]), args.binning[2]+1
+ if len(args.source_ratios) % 3 != 0:
+ raise ValueError(
+ 'Invalid source ratios input {0}'.format(args.source_ratios)
)
- if args.split_jobs and args.run_method is SensitivityCateg.FULL:
- raise NotImplementedError(
- 'split_jobs and run_method not implemented'
- )
+ srs = [args.source_ratios[3*x:3*x+3]
+ for x in range(int(len(args.source_ratios)/3))]
+ args.source_ratios = map(fr_utils.normalise_fr, srs)
args.dimensions = np.sort(args.dimensions)
- args_copy = deepcopy(args)
- scale_regions = []
- for dim in args.dimensions:
- args_copy.dimension = dim
- _, scale_region = fr_utils.estimate_scale(args_copy)
- scale_regions.append(scale_region)
- args.scale_region = [np.min(scale_regions), np.max(scale_regions)]
- args.scale = np.power(10., np.average(np.log10(args.scale_region)))
-
def parse_args(args=None):
"""Parse command line arguments"""
@@ -119,53 +51,47 @@ def parse_args(args=None):
formatter_class=misc_utils.SortingHelpFormatter,
)
parser.add_argument(
- '--infile', type=str, default='./untitled',
- help='Path to input dir'
+ '--datadir', type=str,
+ default='/data/user/smandalia/flavour_ratio/data/sensitivity',
+ help='Path to data directory'
)
parser.add_argument(
- '--run-method', default='full',
- type=partial(misc_utils.enum_parse, c=SensitivityCateg),
- choices=SensitivityCateg,
- help='Choose which type of sensivity plot to make'
+ '--segments', type=int, default=10,
+ help='Number of new physics scales to evaluate'
)
parser.add_argument(
- '--stat-method', default='bayesian',
- type=partial(misc_utils.enum_parse, c=StatCateg), choices=StatCateg,
- help='Statistical method to employ'
+ '--split-jobs', type=misc_utils.parse_bool, default='True',
+ help='Did the jobs get split'
)
parser.add_argument(
- '--sens-bins', type=int, default=10,
- help='Number of bins for the Bayes factor plot'
+ '--dimensions', type=int, nargs='*', default=[3, 6],
+ help='Set the new physics dimensions to consider'
)
parser.add_argument(
- '--split-jobs', type=misc_utils.parse_bool, default='False',
- help='Did the jobs get split'
+ '--source-ratios', type=int, nargs='*', default=[1, 2, 0],
+ help='Set the source flavour ratios for the case when you want to fix it'
)
parser.add_argument(
- '--plot', type=misc_utils.parse_bool, default='True',
- help='Make sensitivity plots'
+ '--texture', type=partial(enum_parse, c=Texture),
+ default='none', choices=Texture, help='Set the BSM mixing texture'
)
parser.add_argument(
- '--plot-statistic', type=misc_utils.parse_bool, default='False',
- help='Plot MultiNest evidence or LLH value'
+ '--data', default='asimov', type=partial(enum_parse, c=DataType),
+ choices=DataType, help='select datatype'
)
- fr_utils.fr_argparse(parser)
- gf_utils.gf_argparse(parser)
- llh_utils.likelihood_argparse(parser)
- mn_utils.mn_argparse(parser)
- nuisance_argparse(parser)
- misc_utils.remove_option(parser, 'dimension')
- misc_utils.remove_option(parser, 'source_ratio')
- misc_utils.remove_option(parser, 'scale')
- misc_utils.remove_option(parser, 'scale_region')
parser.add_argument(
- '--dimensions', type=int, nargs='*', default=[3, 6],
- help='Set the new physics dimensions to consider'
+ '--plot-x', type=misc_utils.parse_bool, default='True',
+ help='Make sensitivity plot x vs limit'
)
parser.add_argument(
- '--source-ratios', type=int, nargs='*', default=[1, 2, 0],
- help='Set the source flavour ratios for the case when you want to fix it'
+ '--plot-table', type=misc_utils.parse_bool, default='True',
+ help='Make sensitivity table plot'
)
+ parser.add_argument(
+ '--plot-statistic', type=misc_utils.parse_bool, default='False',
+ help='Plot MultiNest evidence or LLH value'
+ )
+ llh_utils.likelihood_argparse(parser)
if args is None: return parser.parse_args()
else: return parser.parse_args(args.split())
@@ -173,99 +99,70 @@ def parse_args(args=None):
def main():
args = parse_args()
process_args(args)
- args.scale = 0
misc_utils.print_args(args)
- asimov_paramset, llh_paramset = get_paramsets(args, define_nuisance())
-
- scale = llh_paramset.from_tag(ParamTag.SCALE)[0]
- mmangles = llh_paramset.from_tag(ParamTag.MMANGLES)
- if args.run_method is SensitivityCateg.FULL:
- st_paramset_arr = [llh_paramset.from_tag(ParamTag.SCALE, invert=True)]
- elif args.run_method in [SensitivityCateg.FIXED_ANGLE, SensitivityCateg.CORR_ANGLE]:
- nscale_pmset = llh_paramset.from_tag([ParamTag.SCALE, ParamTag.MMANGLES], invert=True)
- st_paramset_arr = [nscale_pmset] * 3
- elif args.run_method in [SensitivityCateg.FIXED_ONE_ANGLE, SensitivityCateg.CORR_ONE_ANGLE]:
- nscale_pmset = llh_paramset.from_tag(ParamTag.SCALE, invert=True)
- st_paramset_arr = []
- for x in xrange(3):
- st_paramset_arr.append(
- ParamSet([prms for prms in nscale_pmset
- if mmangles[x].name != prms.name])
- )
-
- corr_angles_categ = [SensitivityCateg.CORR_ANGLE, SensitivityCateg.CORR_ONE_ANGLE]
- fixed_angle_categ = [SensitivityCateg.FIXED_ANGLE, SensitivityCateg.FIXED_ONE_ANGLE]
-
- if args.run_method in corr_angles_categ:
- scan_angles = np.linspace(0+1e-9, 1-1e-9, args.sens_bins)
- else: scan_angles = np.array([0])
- print 'scan_angles', scan_angles
-
dims = len(args.dimensions)
srcs = len(args.source_ratios)
- if args.run_method is SensitivityCateg.FULL:
- statistic_arr = np.full((dims, srcs, args.sens_bins, 2), np.nan)
- elif args.run_method in fixed_angle_categ:
- statistic_arr = np.full((dims, srcs, len(st_paramset_arr), args.sens_bins, 2), np.nan)
- elif args.run_method in corr_angles_categ:
- statistic_arr = np.full(
- (dims, srcs, len(st_paramset_arr), args.sens_bins, args.sens_bins, 3), np.nan
- )
+ if args.texture is Texture.NONE:
+ textures = [Texture.OEU, Texture.OET, Texture.OUT]
+ else:
+ textures = [args.texture]
+ texs = len(textures)
+
+ prefix = ''
+ # prefix = 'noprior'
+
+ # Initialise data structure.
+ statistic_arr = np.full((dims, srcs, texs, args.segments, 2), np.nan)
print 'Loading data'
for idim, dim in enumerate(args.dimensions):
argsc = deepcopy(args)
argsc.dimension = dim
- _, scale_region = fr_utils.estimate_scale(argsc)
- argsc.scale_region = scale_region
- scan_scales = np.linspace(
- np.log10(scale_region[0]), np.log10(scale_region[1]), args.sens_bins
+
+ # Array of scales to scan over.
+ boundaries = fr_utils.SCALE_BOUNDARIES[argsc.dimension]
+ eval_scales = np.linspace(
+ boundaries[0], boundaries[1], args.segments-1
)
- scan_scales = np.concatenate([[-100.], scan_scales])
+ eval_scales = np.concatenate([[-100.], eval_scales])
for isrc, src in enumerate(args.source_ratios):
argsc.source_ratio = src
- infile = args.infile
- if args.likelihood is Likelihood.GOLEMFIT:
- infile += '/golemfit/'
- elif args.likelihood is Likelihood.GAUSSIAN:
- infile += '/gaussian/'
- if args.likelihood is Likelihood.GAUSSIAN:
- infile += '{0}/'.format(str(args.sigma_ratio).replace('.', '_'))
- infile += '/DIM{0}/fix_ifr/{1}/{2}/{3}/fr_stat'.format(
- # infile += '/DIM{0}/fix_ifr/prior/{1}/{2}/{3}/fr_stat'.format(
- # infile += '/DIM{0}/fix_ifr/{1}/{2}/{3}/old/fr_stat'.format(
- # infile += '/DIM{0}/fix_ifr/seed2/{1}/{2}/{3}/fr_stat'.format(
- # infile += '/DIM{0}/fix_ifr/100TeV/{1}/{2}/{3}/fr_stat'.format(
- # infile += '/DIM{0}/fix_ifr/strictprior/{1}/{2}/{3}/fr_stat'.format(
- # infile += '/DIM{0}/fix_ifr/noprior/{1}/{2}/{3}/fr_stat'.format(
- dim, *map(misc_utils.parse_enum, [args.stat_method, args.run_method, args.data])
- ) + misc_utils.gen_identifier(argsc)
- print '== {0:<25} = {1}'.format('infile', infile)
-
- if args.split_jobs:
- for idx_an, an in enumerate(scan_angles):
- for idx_sc, sc in enumerate(scan_scales):
- filename = infile + '_scale_{0:.0E}'.format(np.power(10, sc))
+ for itex, texture in enumerate(textures):
+ argc.texture = texture
+
+ base_infile = args.datadir + '/{0}/{1}/{2}/fr_stat'.format(
+ *map(misc_utils.parse_enum, [args.stat_method, args.data]),
+ prefix
+ ) + misc_utils.gen_identifier(argsc)
+
+ print '== {0:<25} = {1}'.format('base_infile', base_infile)
+
+ if args.split_jobs:
+ for idx_sc, scale in enumerate(eval_scales):
+ infile = base_infile + '_scale_{0:.0E}'.format(
+ np.power(10, scale)
+ )
try:
- if args.run_method in fixed_angle_categ:
- print 'Loading from {0}'.format(filename+'.npy')
- statistic_arr[idim][isrc][:,idx_sc] = np.load(filename+'.npy')[:,0]
- if args.run_method in corr_angles_categ:
- filename += '_angle_{0:<04.2}'.format(an)
- print 'Loading from {0}'.format(filename+'.npy')
- statistic_arr[idim][isrc][:,idx_an,idx_sc] = np.load(filename+'.npy')[:,0,0]
+ print 'Loading from {0}'.format(infile+'.npy')
+ statistic_arr[idim][isrc][itex][idx_sc] = \
+ np.load(infile+'.npy')[:,0]
except:
- print 'Unable to load file {0}'.format(filename+'.npy')
+ print 'Unable to load file {0}'.format(
+ infile+'.npy'
+ )
continue
- else:
- print 'Loading from {0}'.format(infile+'.npy')
- try:
- statistic_arr[idim][isrc] = np.load(infile+'.npy')
- except:
- print 'Unable to load file {0}'.format(infile+'.npy')
- continue
+ else:
+ print 'Loading from {0}'.format(base_infile+'.npy')
+ try:
+ statistic_arr[idim][isrc][itex] = \
+ np.load(base_infile+'.npy')
+ except:
+ print 'Unable to load file {0}'.format(
+ base_infile+'.npy'
+ )
+ continue
data = ma.masked_invalid(statistic_arr)
@@ -273,114 +170,64 @@ def main():
if args.plot_statistic:
print 'Plotting statistic'
- argsc = deepcopy(args)
for idim, dim in enumerate(args.dimensions):
+ argsc = deepcopy(args)
argsc.dimension = dim
- _, scale_region = fr_utils.estimate_scale(argsc)
- argsc.scale_region = scale_region
- base_infile = args.infile
- if args.likelihood is Likelihood.GOLEMFIT:
- base_infile += '/golemfit/'
- elif args.likelihood is Likelihood.GAUSSIAN:
- base_infile += '/gaussian/'
- if args.likelihood is Likelihood.GAUSSIAN:
- base_infile += '{0}/'.format(str(args.sigma_ratio).replace('.', '_'))
- base_infile += '/DIM{0}/fix_ifr'.format(dim)
- # base_infile += '/DIM{0}/fix_ifr/prior'.format(dim)
- # base_infile += '/DIM{0}/fix_ifr/seed2'.format(dim)
- # base_infile += '/DIM{0}/fix_ifr/100TeV'.format(dim)
- # base_infile += '/DIM{0}/fix_ifr/strictprior'.format(dim)
- # base_infile += '/DIM{0}/fix_ifr/noprior'.format(dim)
+
+ # Array of scales to scan over.
+ boundaries = fr_utils.SCALE_BOUNDARIES[argsc.dimension]
+ eval_scales = np.linspace(
+ boundaries[0], boundaries[1], args.segments-1
+ )
+ eval_scales = np.concatenate([[-100.], eval_scales])
for isrc, src in enumerate(args.source_ratios):
argsc.source_ratio = src
- infile = base_infile +'/{0}/{1}/{2}/fr_stat'.format(
- # infile = base_infile +'/{0}/{1}/{2}/old/fr_stat'.format(
- *map(misc_utils.parse_enum, [args.stat_method, args.run_method, args.data])
- ) + misc_utils.gen_identifier(argsc)
- basename = os.path.dirname(infile)
- baseoutfile = basename[:5]+basename[5:].replace('data', 'plots')
- baseoutfile += '/' + os.path.basename(infile)
- if args.run_method is SensitivityCateg.FULL:
- outfile = baseoutfile
+ for itex, texture in enumerate(textures):
+ argc.texture = texture
+
+ base_infile = args.datadir + '/{0}/{1}/{2}/fr_stat'.format(
+ *map(misc_utils.parse_enum, [args.stat_method, args.data]),
+ prefix
+ ) + misc_utils.gen_identifier(argsc)
+ basename = os.path.dirname(base_infile)
+ outfile = basename[:5]+basename[5:].replace('data', 'plots')
+ outfile += '/' + os.path.basename(base_infile)
+
+ label = plot_utils.texture_label(texture)[:-1]+r'=\pi/4$'
plot_utils.plot_statistic(
- data = data[idim][isrc],
+ data = data[idim][isrc][itex],
outfile = outfile,
outformat = ['png'],
args = argsc,
scale_param = scale,
+ label = label
)
- if args.run_method in fixed_angle_categ:
- for idx_scen in xrange(len(st_paramset_arr)):
- print '|||| SCENARIO = {0}'.format(idx_scen)
- outfile = baseoutfile + '_SCEN{0}'.format(idx_scen)
- if idx_scen == 0: label = r'$\mathcal{O}_{12}=\pi/4$'
- elif idx_scen == 1: label = r'$\mathcal{O}_{13}=\pi/4$'
- elif idx_scen == 2: label = r'$\mathcal{O}_{23}=\pi/4$'
- plot_utils.plot_statistic(
- data = data[idim][isrc][idx_scen],
- outfile = outfile,
- outformat = ['png'],
- args = argsc,
- scale_param = scale,
- label = label
- )
- elif args.run_method in corr_angles_categ:
- for idx_scen in xrange(len(st_paramset_arr)):
- print '|||| SCENARIO = {0}'.format(idx_scen)
- basescenoutfile = baseoutfile + '_SCEN{0}'.format(idx_scen)
- if idx_scen == 0: label = r'$\mathcal{O}_{12}='
- elif idx_scen == 1: label = r'$\mathcal{O}_{13}='
- elif idx_scen == 2: label = r'$\mathcal{O}_{23}='
- for idx_an, an in enumerate(scan_angles):
- print '|||| ANGLE = {0:<04.2}'.format(float(an))
- outfile = basescenoutfile + '_ANGLE{0}'.format(idx_an)
- _label = label + r'{0:<04.2}$'.format(an)
- plot_utils.plot_statistic(
- data = data[idim][isrc][idx_scen][idx_an][:,1:],
- outfile = outfile,
- outformat = ['png'],
- args = argsc,
- scale_param = scale,
- label = _label
- )
-
- if args.plot:
- print 'Plotting sensitivities'
- basename = args.infile[:5]+args.infile[5:].replace('data', 'plots')
- baseoutfile = basename + '/{0}/{1}/{2}/'.format(
- *map(misc_utils.parse_enum, [args.likelihood, args.stat_method, args.data])
- )
+ basename = args.datadir[:5]+args.datadir[5:].replace('data', 'plots')
+ baseoutfile = basename + '/{0}/{1}/{2}/'.format(
+ *map(misc_utils.parse_enum, [args.stat_method, args.data]), prefix
+ )
- if args.run_method is SensitivityCateg.FULL:
- plot_utils.plot_sens_full(
- data = data,
- outfile = baseoutfile + '/FULL',
- outformat = ['png', 'pdf'],
- args = args,
- )
- elif args.run_method in fixed_angle_categ:
- plot_utils.plot_sens_fixed_angle_pretty(
- data = data,
- outfile = baseoutfile + '/fixed_angle_pretty_substantial',
- outformat = ['png', 'pdf'],
- args = args,
- )
- # plot_utils.plot_sens_fixed_angle(
- # data = data,
- # outfile = baseoutfile + '/FIXED_ANGLE',
- # outformat = ['png'],
- # args = args,
- # )
- elif args.run_method in corr_angles_categ:
- plot_utils.plot_sens_corr_angle(
+ if args.plot_x:
+ for idim, dim in enumerate(args.dimensions):
+ argsc = deepcopy(args)
+ argsc.dimension = dim
+ plot_utils.plot_x(
data = data,
- outfile = baseoutfile + '/CORR_ANGLE',
+ outfile = baseoutfile + '/hese_x',
outformat = ['png', 'pdf'],
- args = args,
+ args = argsc,
)
+ if args.plot_table:
+ plot_utils.plot_table_sens(
+ data = data,
+ outfile = baseoutfile + '/hese_table',
+ outformat = ['png', 'pdf'],
+ args = args,
+ )
+
main.__doc__ = __doc__
diff --git a/sens.py b/sens.py
index 7049b83..22d464a 100755
--- a/sens.py
+++ b/sens.py
@@ -22,12 +22,10 @@ from utils import fr as fr_utils
from utils import gf as gf_utils
from utils import llh as llh_utils
from utils import misc as misc_utils
-from utils import plot as plot_utils
-from utils.enums import EnergyDependance, Likelihood, MixingScenario, ParamTag
-from utils.enums import PriorsCateg, SensitivityCateg, StatCateg
-from utils.param import Param, ParamSet, get_paramsets
-
from utils import mn as mn_utils
+from utils.enums import Likelihood, ParamTag
+from utils.enums import PriorsCateg, SensitivityCateg, StatCateg, Texture
+from utils.param import Param, ParamSet
def define_nuisance():
@@ -58,20 +56,62 @@ def define_nuisance():
Param(name='muonNorm', value=1., seed=[0.1, 2. ], ranges=[0. , 10.], std=0.1, tag=tag),
Param(name='astroNorm', value=6.9, seed=[0., 5. ], ranges=[0. , 20.], std=1.5, tag=tag),
Param(name='astroDeltaGamma', value=2.5, seed=[2.4, 3. ], ranges=[-5., 5. ], std=0.1, tag=tag)
- # Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0. , 50.], std=0.3, tag=tag),
- # Param(name='promptNorm', value=0., seed=[0. , 6. ], ranges=[0. , 50.], std=0.05, tag=tag),
- # Param(name='muonNorm', value=1., seed=[0.1, 2. ], ranges=[0. , 50.], std=0.1, tag=tag),
- # Param(name='astroNorm', value=6.9, seed=[0.1, 10.], ranges=[0. , 50.], std=0.1, tag=tag),
- # Param(name='astroDeltaGamma', value=2.5, seed=[2.4, 3. ], ranges=[-5., 5. ], std=0.1, tag=tag)
- # Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0.5, 2. ], std=0.3, tag=tag),
- # Param(name='promptNorm', value=0., seed=[0. , 6. ], ranges=[0. , 6. ], std=0.05, tag=tag),
- # Param(name='muonNorm', value=1., seed=[0.1, 2. ], ranges=[0.1, 2. ], std=0.1, tag=tag),
- # Param(name='astroNorm', value=6.9, seed=[0.1, 10.], ranges=[0.1, 10.], std=0.1, tag=tag),
- # Param(name='astroDeltaGamma', value=2.5, seed=[2.4, 3. ], ranges=[2.4, 3. ], std=0.1, tag=tag)
])
return ParamSet(nuisance)
+def get_paramsets(args, nuisance_paramset):
+ """Make the paramsets for generating the Asmimov MC sample and also running
+ the MCMC.
+ """
+ asimov_paramset = []
+ llh_paramset = []
+
+ gf_nuisance = [x for x in nuisance_paramset.from_tag(ParamTag.NUISANCE)]
+
+ llh_paramset.extend(
+ [x for x in nuisance_paramset.from_tag(ParamTag.SM_ANGLES)]
+ )
+ llh_paramset.extend(gf_nuisance)
+
+ for parm in llh_paramset:
+ parm.value = args.__getattribute__(parm.name)
+
+ tag = ParamTag.MMANGLES
+ llh_paramset.extend([
+ Param(name='np_s_12^2', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{s}_{12}^2', tag=tag),
+ Param(name='np_c_13^4', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{c}_{13}^4', tag=tag),
+ Param(name='np_s_23^2', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{s}_{23}^2', tag=tag),
+ Param(name='np_dcp', value=np.pi, ranges=[0., 2*np.pi], std=0.2, tex=r'\tilde{\delta_{CP}}', tag=tag)
+ ])
+
+ boundaries = fr_utils.SCALE_BOUNDARIES[args.dimension]
+ tag = ParamTag.SCALE
+ llh_paramset.append(
+ Param(
+ name='logLam', value=np.mean(boundaries), ranges=boundaries, std=3,
+ tex=r'{\rm log}_{10}\left (\Lambda^{-1}'+get_units(args.dimension)+r'\right )',
+ tag=tag
+ )
+ )
+ llh_paramset = ParamSet(llh_paramset)
+
+ tag = ParamTag.BESTFIT
+ if args.data in [DataType.ASIMOV, DataType.REALISATION]:
+ flavour_angles = fr_to_angles(args.injected_ratio)
+ else:
+ flavour_angles = fr_to_angles([1, 1, 1])
+
+ asimov_paramset.extend(gf_nuisance)
+ asimov_paramset.extend([
+ Param(name='astroFlavorAngle1', value=flavour_angles[0], ranges=[ 0., 1.], std=0.2, tag=tag),
+ Param(name='astroFlavorAngle2', value=flavour_angles[1], ranges=[-1., 1.], std=0.2, tag=tag),
+ ])
+ asimov_paramset = ParamSet(asimov_paramset)
+
+ return asimov_paramset, llh_paramset
+
+
def nuisance_argparse(parser):
nuisance = define_nuisance()
for parm in nuisance:
@@ -83,43 +123,25 @@ def nuisance_argparse(parser):
def process_args(args):
"""Process the input args."""
- if args.fix_mixing is not MixingScenario.NONE and args.fix_scale:
- raise NotImplementedError('Fixed mixing and scale not implemented')
- if args.fix_mixing is not MixingScenario.NONE and args.fix_mixing_almost:
- raise NotImplementedError(
- '--fix-mixing and --fix-mixing-almost cannot be used together'
- )
- if args.sens_run and args.fix_scale:
- raise NotImplementedError(
- '--sens-run and --fix-scale cannot be used together'
- )
-
- args.measured_ratio = fr_utils.normalise_fr(args.measured_ratio)
- if args.fix_source_ratio:
- args.source_ratio = fr_utils.normalise_fr(args.source_ratio)
+ args.source_ratio = fr_utils.normalise_fr(args.source_ratio)
+ if args.data is not DataType.REAL:
+ args.injected_ratio = fr_utils.normalise_fr(args.injected_ratio)
- if args.energy_dependance is EnergyDependance.SPECTRAL:
- args.binning = np.logspace(
- np.log10(args.binning[0]), np.log10(args.binning[1]), args.binning[2]+1
- )
-
- if not args.fix_scale:
- args.scale, args.scale_region = fr_utils.estimate_scale(args)
+ args.binning = np.logspace(
+ np.log10(args.binning[0]), np.log10(args.binning[1]), args.binning[2]+1
+ )
- if args.sens_eval_bin.lower() == 'all':
- args.sens_eval_bin = None
+ if args.eval_segment.lower() == 'all':
+ args.eval_segment = None
else:
- args.sens_eval_bin = int(args.sens_eval_bin)
-
- if args.sens_eval_bin is not None and args.plot_statistic:
- print 'Cannot make plot when specific scale bin is chosen'
- args.plot_statistic = False
+ args.eval_segment = int(args.eval_segment)
if args.stat_method is StatCateg.FREQUENTIST and \
args.likelihood is Likelihood.GOLEMFIT:
args.likelihood = Likelihood.GF_FREQ
- args.burnin = False
+ if args.texture is Texture.NONE:
+ raise ValueError('Must assume a BSM texture')
def parse_args(args=None):
@@ -137,39 +159,20 @@ def parse_args(args=None):
help='Set the number of threads to use (int or "max")'
)
parser.add_argument(
- '--outfile', type=str, default='./untitled',
- help='Path to output chains'
- )
- parser.add_argument(
- '--sens-run', type=misc_utils.parse_bool, default='True',
- help='Generate sensitivities'
- )
- parser.add_argument(
- '--run-method', default='full',
- type=partial(misc_utils.enum_parse, c=SensitivityCateg),
- choices=SensitivityCateg,
- help='Choose which type of sensivity plot to make'
- )
- parser.add_argument(
- '--stat-method', default='bayesian',
- type=partial(misc_utils.enum_parse, c=StatCateg), choices=StatCateg,
- help='Statistical method to employ'
+ '--datadir', type=str, default='./untitled',
+ help='Path to store chains'
)
parser.add_argument(
- '--sens-bins', type=int, default=10,
- help='Number of bins for the Bayes factor plot'
+ '--segments', type=int, default=10,
+ help='Number of new physics scales to evaluate'
)
parser.add_argument(
- '--sens-eval-bin', type=str, default='all',
- help='Which bin to evalaute for Bayes factor plot'
- )
- parser.add_argument(
- '--plot-statistic', type=misc_utils.parse_bool, default='False',
- help='Plot MultiNest evidence or LLH value'
+ '--eval-segment', type=str, default='all',
+ help='Which point to evalaute'
)
fr_utils.fr_argparse(parser)
gf_utils.gf_argparse(parser)
- llh_utils.likelihood_argparse(parser)
+ llh_utils.llh_argparse(parser)
mn_utils.mn_argparse(parser)
nuisance_argparse(parser)
if args is None: return parser.parse_args()
@@ -186,235 +189,72 @@ def main():
asimov_paramset, llh_paramset = get_paramsets(args, define_nuisance())
- scale = llh_paramset.from_tag(ParamTag.SCALE)[0]
- mmangles = llh_paramset.from_tag(ParamTag.MMANGLES)
- if args.run_method is SensitivityCateg.FULL:
- st_paramset_arr = [llh_paramset.from_tag(ParamTag.SCALE, invert=True)]
- elif args.run_method in [SensitivityCateg.FIXED_ANGLE, SensitivityCateg.CORR_ANGLE]:
- nscale_pmset = llh_paramset.from_tag([ParamTag.SCALE, ParamTag.MMANGLES], invert=True)
- st_paramset_arr = [nscale_pmset] * 3
- elif args.run_method in [SensitivityCateg.FIXED_ONE_ANGLE, SensitivityCateg.CORR_ONE_ANGLE]:
- nscale_pmset = llh_paramset.from_tag(ParamTag.SCALE, invert=True)
- st_paramset_arr = []
- for x in xrange(3):
- st_paramset_arr.append(
- ParamSet([prms for prms in nscale_pmset
- if mmangles[x].name != prms.name])
- )
-
- scan_scales = np.linspace(
- np.log10(args.scale_region[0]), np.log10(args.scale_region[1]), args.sens_bins
+ # Scale and BSM mixings will be fixed.
+ scale_prm = llh_paramset.from_tag(ParamTag.SCALE)[0]
+ base_mn_pset = llh_paramset.from_tag(
+ [ParamTag.SCALE, ParamTag.MMANGLES], invert=True
)
- scan_scales = np.concatenate([[-100.], scan_scales])
- # scan_scales = np.array([-100.])
-
- corr_angles_categ = [SensitivityCateg.CORR_ANGLE, SensitivityCateg.CORR_ONE_ANGLE]
- fixed_angle_categ = [SensitivityCateg.FIXED_ANGLE, SensitivityCateg.FIXED_ONE_ANGLE]
- if args.run_method in corr_angles_categ:
- scan_angles = np.linspace(0+1e-9, 1-1e-9, args.sens_bins)
- else: scan_angles = np.array([0])
- print 'scan_scales', scan_scales
- print 'scan_angles', scan_angles
+ # Array of scales to scan over.
+ boundaries = fr_utils.SCALE_BOUNDARIES[args.dimension]
+ eval_scales = np.linspace(boundaries[0], boundaries[1], args.segments-1)
+ eval_scales = np.concatenate([[-100.], eval_scales])
- if args.sens_eval_bin is None:
- eval_dim = args.sens_bins
+ # Evaluate just one point (job), or all points.
+ if args.eval_segment is None:
+ eval_dim = args.segments
else: eval_dim = 1
- out = args.outfile+'/{0}/{1}/{2}/fr_stat'.format(
- *map(misc_utils.parse_enum, [args.stat_method, args.run_method, args.data])
+ outfile = args.datadir + '/{0}/{1}/fr_stat'.format(
+ *map(misc_utils.parse_enum, [args.stat_method, args.data])
) + misc_utils.gen_identifier(args)
- if args.sens_run:
- if args.likelihood in [Likelihood.GOLEMFIT, Likelihood.GF_FREQ]:
- fitter = gf_utils.setup_fitter(args, asimov_paramset)
- if args.stat_method is StatCateg.FREQUENTIST:
- flags, gf_nuisance = gf_utils.fit_flags(llh_paramset)
- llh_paramset = llh_paramset.remove_params(gf_nuisance)
- asimov_paramset = asimov_paramset.remove_params(gf_nuisance)
- st_paramset_arr = [x.remove_params(gf_nuisance)
- for x in st_paramset_arr]
- fitter.SetFitParametersFlag(flags)
- else: fitter = None
-
- if args.run_method is SensitivityCateg.FULL:
- statistic_arr = np.full((eval_dim, 2), np.nan)
- elif args.run_method in fixed_angle_categ:
- statistic_arr = np.full((len(st_paramset_arr), eval_dim, 2), np.nan)
- elif args.run_method in corr_angles_categ:
- statistic_arr = np.full(
- (len(st_paramset_arr), eval_dim, eval_dim, 3), np.nan
- )
- for idx_scen, sens_paramset in enumerate(st_paramset_arr):
- print '|||| SCENARIO = {0}'.format(idx_scen)
- if args.run_method in fixed_angle_categ:
- for x in mmangles: x.value = 0.+1e-9
- if idx_scen == 0 or idx_scen == 2:
- mmangles[idx_scen].value = np.sin(np.pi/4.)**2
- """s_12^2 or s_23^2"""
- mmangles[1].value = 1.
- """c_13^4"""
- elif idx_scen == 1:
- mmangles[idx_scen].value = np.cos(np.pi/4.)**4
- """c_13^4"""
-
- for idx_an, an in enumerate(scan_angles):
- if args.run_method in corr_angles_categ:
- for x in mmangles: x.value = 0.+1e-9
- angle = np.arcsin(np.sqrt(an))/2.
- if idx_scen == 0 or idx_scen == 2:
- mmangles[idx_scen].value = np.sin(angle)**2
- """s_12^2 or s_23^2"""
- mmangles[1].value = 1.
- """c_13^4"""
- elif idx_scen == 1:
- mmangles[idx_scen].value = np.cos(angle)**4
- """c_13^4"""
-
- for idx_sc, sc in enumerate(scan_scales):
- if args.sens_eval_bin is not None:
- if args.run_method in corr_angles_categ:
- index = idx_an*args.sens_bins + idx_sc
- else: index = idx_sc
- if index == args.sens_eval_bin:
- if idx_scen == 0:
- out += '_scale_{0:.0E}'.format(np.power(10, sc))
- if args.run_method in corr_angles_categ:
- out += '_angle_{0:<04.2}'.format(an)
- else: continue
-
- sc_param = llh_paramset.from_tag(ParamTag.SCALE)[0]
- if sc < sc_param.ranges[0]:
- sc_param.ranges = (sc, sc_param.ranges[1])
-
- if idx_sc == 0 or args.sens_eval_bin is not None:
- print '|||| ANGLE = {0:<04.2}'.format(float(an))
- print '|||| SCALE = {0:.0E}'.format(np.power(10, sc))
- scale.value = sc
- if args.stat_method is StatCateg.BAYESIAN:
- identifier = 'b{0}_{1}_sce{2}_sca{3}_an{4}'.format(
- args.sens_eval_bin, args.sens_bins, idx_scen, sc, idx_an
- )
- try:
- stat = mn_utils.mn_evidence(
- mn_paramset = sens_paramset,
- llh_paramset = llh_paramset,
- asimov_paramset = asimov_paramset,
- args = args,
- fitter = fitter,
- identifier = identifier
- )
- except:
- print 'Failed run, continuing'
- # raise
- continue
- print '## Evidence = {0}'.format(stat)
- elif args.stat_method is StatCateg.FREQUENTIST:
- raise NotImplementedError('Still needs testing')
- def fn(x):
- # TODO(shivesh): should be seed or ranges?
- # Force prior ranges to be inside "seed"
- for el in x:
- if el < 0 or el > 1: return np.inf
- pranges = sens_paramset.seeds
- for i, name in enumerate(sens_paramset.names):
- llh_paramset[name].value = \
- (pranges[i][1]-pranges[i][0])*x[i] + pranges[i][0]
- theta = llh_paramset.values
- llh = llh_utils.ln_prob(
- theta=theta, args=args, asimov_paramset=asimov_paramset,
- llh_paramset=llh_paramset, fitter=fitter
- )
- # print 'llh_paramset', llh_paramset
- # print '-llh', -llh
- return -llh
-
- n_params = len(sens_paramset)
- bounds = np.dstack([np.zeros(n_params), np.ones(n_params)])[0]
- x0_arr = np.linspace(0+1e-3, 1-1e-3, 3)
- stat = -np.inf
- try:
- for x0_v in x0_arr:
- x0 = np.full(n_params, x0_v)
- # res = minimize(fun=fn, x0=x0, method='Powell', tol=1e-3)
- res = minimize(fun=fn, x0=x0, method='Nelder-Mead', tol=1e-3)
- # res = minimize(fun=fn, x0=x0, method='L-BFGS-B', tol=1e-3, bounds=bounds)
- # res = minimize(fun=fn, x0=x0, method='BFGS', tol=1e-3)
- s = -fn(res.x)
- if s > stat:
- stat = s
- except AssertionError:
- # print 'Failed run, continuing'
- raise
- continue
- print '=== final llh', stat
- if args.run_method is SensitivityCateg.FULL:
- statistic_arr[idx_sc] = np.array([sc, stat])
- elif args.run_method in fixed_angle_categ:
- if args.sens_eval_bin is not None:
- statistic_arr[idx_scen][0] = np.array([sc, stat])
- else:
- statistic_arr[idx_scen][idx_sc] = np.array([sc, stat])
- elif args.run_method in corr_angles_categ:
- if args.sens_eval_bin is not None:
- statistic_arr[idx_scen][0][0] = np.array([an, sc, stat])
- else:
- statistic_arr[idx_scen][idx_an][idx_sc] = np.array([an, sc, stat])
-
- misc_utils.make_dir(out)
- print 'Saving to {0}'.format(out+'.npy')
- np.save(out+'.npy', statistic_arr)
-
- if args.plot_statistic:
- print 'Plotting statistic'
- if args.sens_run: raw = statistic_arr
- else: raw = np.load(out+'.npy')
- data = ma.masked_invalid(raw)
-
- basename = os.path.dirname(out) + '/statrun/' + os.path.basename(out)
- baseoutfile = basename[:5]+basename[5:].replace('data', 'plots')
- if args.run_method is SensitivityCateg.FULL:
- plot_utils.plot_statistic(
- data = data,
- outfile = baseoutfile,
- outformat = ['png'],
- args = args,
- scale_param = scale
+ # Setup Golemfit.
+ gf_utils.setup_fitter(args, asimov_paramset)
+
+ # Initialise data structure.
+ stat_arr = np.full((eval_dim, 2), np.nan)
+
+ for idx_sc, scale in enumerate(eval_scales):
+ if args.eval_segment is not None:
+ if idx_sc == args.eval_segment:
+ outfile += '_scale_{0:.0E}'.format(np.power(10, scale))
+ else: continue
+ print '|||| SCALE = {0:.0E}'.format(np.power(10, scale))
+
+ # Lower scale boundary for first (NULL) point and set the scale param.
+ if scale < scale_prm.ranges[0]:
+ scale_prm.ranges = (scale, scale_prm.ranges[1])
+ scale_prm.value = scale
+
+ if args.stat_method is StatCateg.BAYESIAN:
+ identifier = 'b{0}_{1}_sce{2}_sca{3}'.format(
+ args.eval_segment, args.segments, int(args.texture), scale
)
- elif args.run_method in fixed_angle_categ:
- for idx_scen in xrange(len(st_paramset_arr)):
- print '|||| SCENARIO = {0}'.format(idx_scen)
- outfile = baseoutfile + '_SCEN{0}'.format(idx_scen)
- if idx_scen == 0: label = r'$\mathcal{O}_{12}=\pi/4$'
- elif idx_scen == 1: label = r'$\mathcal{O}_{13}=\pi/4$'
- elif idx_scen == 2: label = r'$\mathcal{O}_{23}=\pi/4$'
- plot_utils.plot_statistic(
- data = data[idx_scen],
- outfile = outfile,
- outformat = ['png'],
- args = args,
- scale_param = scale,
- label = label
+ try:
+ stat = mn_utils.mn_evidence(
+ mn_paramset = base_mn_pset,
+ llh_paramset = llh_paramset,
+ asimov_paramset = asimov_paramset,
+ args = args,
+ identifier = identifier
)
- elif args.run_method in corr_angles_categ:
- for idx_scen in xrange(len(st_paramset_arr)):
- print '|||| SCENARIO = {0}'.format(idx_scen)
- basescenoutfile = baseoutfile + '_SCEN{0}'.format(idx_scen)
- if idx_scen == 0: label = r'$\mathcal{O}_{12}='
- elif idx_scen == 1: label = r'$\mathcal{O}_{13}='
- elif idx_scen == 2: label = r'$\mathcal{O}_{23}='
- for idx_an, an in enumerate(scan_angles):
- print '|||| ANGLE = {0:<04.2}'.format(float(an))
- outfile = basescenoutfile + '_ANGLE{0}'.format(idx_an)
- _label = label + r'{0:<04.2}$'.format(an)
- plot_utils.plot_statistic(
- data = data[idx_scen][index][:,1:],
- outfile = outfile,
- outformat = ['png'],
- args = args,
- scale_param = scale,
- label = _label
- )
+ except:
+ print 'Failed run, continuing'
+ # raise
+ continue
+ print '## Evidence = {0}'.format(stat)
+ elif args.stat_method is StatCateg.FREQUENTIST:
+ raise NotImplementedError('Still needs testing')
+
+ if args.eval_segment is not None:
+ stat_arr[0] = np.array([scale, stat])
+ else:
+ stat_arr[idx_sc] = np.array([scale, stat])
+
+ misc_utils.make_dir(outfile)
+ print 'Saving to {0}'.format(outfile+'.npy')
+ np.save(outfile+'.npy', stat_arr)
main.__doc__ = __doc__
diff --git a/submitter/sens_dag.py b/submitter/sens_dag.py
index 2046efb..e60477c 100644
--- a/submitter/sens_dag.py
+++ b/submitter/sens_dag.py
@@ -3,34 +3,36 @@
import os
import numpy as np
-full_scan_mfr = [
- # (1, 1, 1), (1, 2, 0)
+sources = [
+ (1, 2, 0),
+ (1, 0, 0),
+ (0, 1, 0),
]
-fix_sfr_mfr = [
- (1, 1, 1, 1, 2, 0),
- (1, 1, 1, 1, 0, 0),
- (1, 1, 1, 0, 1, 0),
- # (1, 1, 1, 0, 0, 1),
- # (1, 1, 0, 1, 2, 0),
- # (1, 1, 0, 1, 0, 0),
- # (1, 1, 0, 0, 1, 0),
- # (1, 0, 0, 1, 0, 0),
- # (0, 1, 0, 0, 1, 0),
- # (1, 2, 0, 1, 2, 0),
- # (1, 2, 0, 0, 1, 0),
+dims = [
+ 3, 4, 5, 6, 7, 8
]
+textures = [
+ 'OEU', 'OET', 'OUT'
+]
+
+datadir = '/data/user/smandalia/flavour_ratio/data/sensitivity'
+scratchdir = '/scratch/smandalia/flavour_ratio'
+
+prefix = ''
+# prefix = '_noprior'
+
+golemfitsourcepath = os.environ['GOLEMSOURCEPATH'] + '/GolemFit'
+condor_script = golemfitsourcepath + '/scripts/flavour_ratio/submitter/sens_submit.sub'
+
GLOBAL_PARAMS = {}
# Bayes Factor
-sens_eval_bin = 'true' # set to 'all' to run normally
GLOBAL_PARAMS.update(dict(
- sens_run = 'True',
- run_method = 'fixed_angle', # full, fixed_angle, corr_angle
- stat_method = 'bayesian',
- sens_bins = 10,
- seed = None
+ stat_method = 'bayesian',
+ segments = 10,
+ seed = 26
))
# MultiNest
@@ -40,31 +42,14 @@ GLOBAL_PARAMS.update(dict(
# mn_live_points = 300,
# mn_tolerance = 0.1,
mn_tolerance = 0.3,
- mn_output = './mnrun'
+ mn_output = scratchdir + '/mnrun'
))
# FR
-# dimension = [6]
-# dimension = [3, 6]
-dimension = [3, 4, 5, 6, 7, 8]
GLOBAL_PARAMS.update(dict(
- threads = 1,
- binning = '6e4 1e7 20',
- no_bsm = 'False',
- scale_region = "1E10",
- energy_dependance = 'spectral',
- spectral_index = -2,
- fix_mixing = 'None',
- fix_mixing_almost = 'False',
- fold_index = 'True',
- save_measured_fr = 'False',
- output_measured_fr = './frs/'
-))
-
-# Likelihood
-GLOBAL_PARAMS.update(dict(
- likelihood = 'golemfit',
- sigma_ratio = '0.01'
+ threads = 4,
+ binning = '6e4 1e7 20',
+ no_bsm = 'False'
))
# GolemFit
@@ -73,95 +58,32 @@ GLOBAL_PARAMS.update(dict(
data = 'real'
))
-# Plot
-GLOBAL_PARAMS.update(dict(
- plot_statistic = 'True'
-))
-
-outfile = 'dagman_FR_SENS_{0}_{1}_{2}_{3}'.format(
- GLOBAL_PARAMS['stat_method'], GLOBAL_PARAMS['run_method'],
- GLOBAL_PARAMS['likelihood'], GLOBAL_PARAMS['data']
+dagfile = 'dagman_SENS_{0}_{1}'.format(
+ GLOBAL_PARAMS['stat_method'], GLOBAL_PARAMS['data']
)
-# outfile += '_seed2'
-# outfile += '_tol03'
-# outfile += '_NULL'
-# outfile += '_prior'
-# outfile += '_strictprior'
-# outfile += '_noprior'
-outfile += '.submit'
-golemfitsourcepath = os.environ['GOLEMSOURCEPATH'] + '/GolemFit'
-condor_script = golemfitsourcepath + '/scripts/flavour_ratio/submitter/sens_submit.sub'
+dagfile += prefix + '.submit'
-if sens_eval_bin.lower() != 'all':
- if GLOBAL_PARAMS['run_method'].lower() == 'corr_angle':
- raise NotImplementedError
- sens_runs = GLOBAL_PARAMS['sens_bins']**2
- else:
- sens_runs = GLOBAL_PARAMS['sens_bins'] + 1
-else: sens_runs = 1
-
-with open(outfile, 'w') as f:
+with open(dagfile, 'w') as f:
job_number = 1
- for dim in dimension:
- print 'dimension', dim
- outchain_head = '/data/user/smandalia/flavour_ratio/data/{0}/DIM{1}'.format(
- GLOBAL_PARAMS['likelihood'], dim
- )
- for frs in fix_sfr_mfr:
- print 'frs', frs
- output = outchain_head + '/fix_ifr/'
- if GLOBAL_PARAMS['likelihood'].lower() == 'gaussian':
- output += '{0}/'.format(str(GLOBAL_PARAMS['sigma_ratio']).replace('.', '_'))
- # output += 'seed2/'
- # output += 'mn_noverlap/'
- # output += 'tol_03/'
- # output += 'prior/'
- # output += 'strictprior/'
- # output += 'noprior/'
- for r in xrange(sens_runs):
- print 'run', r
- f.write('JOB\tjob{0}\t{1}\n'.format(job_number, condor_script))
- f.write('VARS\tjob{0}\tdimension="{1}"\n'.format(job_number, dim))
- f.write('VARS\tjob{0}\tmr0="{1}"\n'.format(job_number, frs[0]))
- f.write('VARS\tjob{0}\tmr1="{1}"\n'.format(job_number, frs[1]))
- f.write('VARS\tjob{0}\tmr2="{1}"\n'.format(job_number, frs[2]))
- f.write('VARS\tjob{0}\tfix_source_ratio="{1}"\n'.format(job_number, True))
- f.write('VARS\tjob{0}\tsr0="{1}"\n'.format(job_number, frs[3]))
- f.write('VARS\tjob{0}\tsr1="{1}"\n'.format(job_number, frs[4]))
- f.write('VARS\tjob{0}\tsr2="{1}"\n'.format(job_number, frs[5]))
- if sens_eval_bin.lower() != 'all':
- f.write('VARS\tjob{0}\tsens_eval_bin="{1}"\n'.format(job_number, r))
- else:
- f.write('VARS\tjob{0}\tsens_eval_bin="{1}"\n'.format(job_number, 'all'))
- for key in GLOBAL_PARAMS.iterkeys():
- f.write('VARS\tjob{0}\t{1}="{2}"\n'.format(job_number, key, GLOBAL_PARAMS[key]))
- f.write('VARS\tjob{0}\toutfile="{1}"\n'.format(job_number, output))
- job_number += 1
- # break
-
- # for frs in full_scan_mfr:
- # print 'frs', frs
- # output = outchain_head + '/full/'
- # if GLOBAL_PARAMS['likelihood'].lower() == 'gaussian':
- # output += '{0}/'.format(str(GLOBAL_PARAMS['sigma_ratio']).replace('.', '_'))
- # for r in xrange(sens_runs):
- # print 'run', r
- # f.write('JOB\tjob{0}\t{1}\n'.format(job_number, condor_script))
- # f.write('VARS\tjob{0}\tdimension="{1}"\n'.format(job_number, dim))
- # f.write('VARS\tjob{0}\tmr0="{1}"\n'.format(job_number, frs[0]))
- # f.write('VARS\tjob{0}\tmr1="{1}"\n'.format(job_number, frs[1]))
- # f.write('VARS\tjob{0}\tmr2="{1}"\n'.format(job_number, frs[2]))
- # f.write('VARS\tjob{0}\tfix_source_ratio="{1}"\n'.format(job_number, False))
- # f.write('VARS\tjob{0}\tsr0="{1}"\n'.format(job_number, 0))
- # f.write('VARS\tjob{0}\tsr1="{1}"\n'.format(job_number, 0))
- # f.write('VARS\tjob{0}\tsr2="{1}"\n'.format(job_number, 0))
- # if sens_eval_bin.lower() != 'all':
- # f.write('VARS\tjob{0}\tsens_eval_bin="{1}"\n'.format(job_number, r))
- # else:
- # f.write('VARS\tjob{0}\tsens_eval_bin="{1}"\n'.format(job_number, 'all'))
- # for key in GLOBAL_PARAMS.iterkeys():
- # f.write('VARS\tjob{0}\t{1}="{2}"\n'.format(job_number, key, GLOBAL_PARAMS[key]))
- # f.write('VARS\tjob{0}\toutfile="{1}"\n'.format(job_number, output))
- # job_number += 1
+ for dim in dims:
+ print 'dims', dim
+ of_d = datadir + '/DIM{0}/{1}'.format(dim, prefix)
+ for src in sources:
+ print 'source flavour', src
+ for tex in textures:
+ print 'texture', tex
+ for r in xrange(GLOBAL_PARAMS['segments']):
+ print 'run', r
+ f.write('JOB\tjob{0}\t{1}\n'.format(job_number, condor_script))
+ f.write('VARS\tjob{0}\tdimension="{1}"\n'.format(job_number, dim))
+ f.write('VARS\tjob{0}\tsr0="{1}"\n'.format(job_number, src[0]))
+ f.write('VARS\tjob{0}\tsr1="{1}"\n'.format(job_number, src[1]))
+ f.write('VARS\tjob{0}\tsr2="{1}"\n'.format(job_number, src[2]))
+ f.write('VARS\tjob{0}\ttexture="{1}"\n'.format(job_number, tex))
+ f.write('VARS\tjob{0}\teval_segment="{1}"\n'.format(job_number, r))
+ for key in GLOBAL_PARAMS.iterkeys():
+ f.write('VARS\tjob{0}\t{1}="{2}"\n'.format(job_number, key, GLOBAL_PARAMS[key]))
+ f.write('VARS\tjob{0}\tdatadir="{1}"\n'.format(job_number, of_d))
+ job_number += 1
- print 'dag file = {0}'.format(outfile)
+ print 'dag file = {0}'.format(dagfile)
diff --git a/submitter/sens_submit.sub b/submitter/sens_submit.sub
index 6c92337..5a2c670 100644
--- a/submitter/sens_submit.sub
+++ b/submitter/sens_submit.sub
@@ -1,10 +1,10 @@
Executable = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/sens.py
-Arguments = "--ast $(ast) --data $(data) --dimension $(dimension) --fix-mixing $(fix_mixing) --fix-source-ratio $(fix_source_ratio) --likelihood $(likelihood) --measured-ratio $(mr0) $(mr1) $(mr2) --no-bsm $(no_bsm) --outfile $(outfile) --scale-region $(scale_region) --seed $(seed) --sigma-ratio $(sigma_ratio) --source-ratio $(sr0) $(sr1) $(sr2) --threads $(threads) --energy-dependance $(energy_dependance) --spectral-index $(spectral_index) --binning $(binning) --fix-mixing-almost $(fix_mixing_almost) --sens-run $(sens_run) --run-method $(run_method) --stat-method $(stat_method) --sens-bins $(sens_bins) --sens-eval-bin $(sens_eval_bin) --mn-live-points $(mn_live_points) --mn-tolerance $(mn_tolerance) --mn-output $(mn_output) --plot-statistic $(plot_statistic) --fold-index $(fold_index) --save-measured-fr $(save_measured_fr) --output-measured-fr=$(output_measured_fr)"
+Arguments = "--ast $(ast) --data $(data) --dimension $(dimension) --no-bsm $(no_bsm) --datadir $(datadir) --seed $(seed) --source-ratio $(sr0) $(sr1) $(sr2) --threads $(threads) --binning $(binning) --texture $(texture) --segments $(segments) --eval-segment $(eval_segment) --stat-method $(stat_method) --mn-live-points $(mn_live_points) --mn-tolerance $(mn_tolerance) --mn-output $(mn_output)"
# All logs will go to a single file
-log = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).log
-output = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).out
-error = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).err
+log = /scratch/smandalia/flavour_ratio/submitter/logs/job_$(Cluster).log
+output = /scratch/smandalia/flavour_ratio/submitter/logs/job_$(Cluster).out
+error = /scratch/smandalia/flavour_ratio/submitter/logs/job_$(Cluster).err
getenv = True
# environment = "X509_USER_PROXY=x509up_u14830"
@@ -14,10 +14,10 @@ getenv = True
# but do not try to copy outputs back (see: https://htcondor-wiki.cs.wisc.edu/index.cgi/tktview?tn=3081)
# +TransferOutput=""
-Transfer_output_files = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/metaouts/
+# Transfer_output_files = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/metaouts/
request_memory = 3GB
-request_cpus = 1
+request_cpus = 4
Universe = vanilla
Notification = never
diff --git a/utils/enums.py b/utils/enums.py
index 441a16f..e85158d 100644
--- a/utils/enums.py
+++ b/utils/enums.py
@@ -20,23 +20,9 @@ class DataType(Enum):
REALISATION = 3
-class EnergyDependance(Enum):
- MONO = 1
- SPECTRAL = 2
-
-
class Likelihood(Enum):
- FLAT = 1
- GAUSSIAN = 2
- GOLEMFIT = 3
- GF_FREQ = 4
-
-
-class MixingScenario(Enum):
- T12 = 1
- T13 = 2
- T23 = 3
- NONE = 4
+ GOLEMFIT = 1
+ GF_FREQ = 2
class ParamTag(Enum):
@@ -60,14 +46,6 @@ class MCMCSeedType(Enum):
GAUSSIAN = 2
-class SensitivityCateg(Enum):
- FULL = 1
- FIXED_ANGLE = 2
- CORR_ANGLE = 3
- FIXED_ONE_ANGLE = 4
- CORR_ONE_ANGLE = 5
-
-
class StatCateg(Enum):
BAYESIAN = 1
FREQUENTIST = 2
@@ -79,6 +57,7 @@ class SteeringCateg(Enum):
class Texture(Enum):
- OEU = 0
- OET = 1
- OUT = 2
+ OEU = 1
+ OET = 2
+ OUT = 3
+ NONE = 4
diff --git a/utils/fr.py b/utils/fr.py
index 1d12fe5..b8eba44 100644
--- a/utils/fr.py
+++ b/utils/fr.py
@@ -13,7 +13,7 @@ from functools import partial
import numpy as np
-from utils.enums import EnergyDependance, MixingScenario
+from utils.enums import Texture
from utils.misc import enum_parse, parse_bool
import mpmath as mp
@@ -40,7 +40,17 @@ ASIN = mp.asin
EXP = mp.exp
MASS_EIGENVALUES = [7.40E-23, 2.515E-21]
-"""SM mass eigenvalues"""
+"""SM mass eigenvalues."""
+
+SCALE_BOUNDARIES = {
+ 3 : (-32, -20),
+ 4 : (-40, -24),
+ 5 : (-48, -27),
+ 6 : (-56, -30),
+ 7 : (-64, -33),
+ 8 : (-72, -36)
+}
+"""Boundaries to scan the NP scale for each dimension."""
def determinant(x):
@@ -244,38 +254,13 @@ def normalise_fr(fr):
return np.array(fr) / float(np.sum(fr))
-def estimate_scale(args):
- """Estimate the scale at which new physics will enter."""
- try: m_eign = args.m3x_2
- except: m_eign = MASS_EIGENVALUES[1]
- if hasattr(args, 'scale'):
- if args.scale != 0:
- scale = args.scale
- scale_region = (scale/args.scale_region, scale*args.scale_region)
- return scale, scale_region
- if args.energy_dependance is EnergyDependance.MONO:
- scale = np.power(
- 10, np.round(np.log10(m_eign/args.energy)) - \
- np.log10(args.energy**(args.dimension-3))
- )
- scale_region = (scale/args.scale_region, scale*args.scale_region)
- elif args.energy_dependance is EnergyDependance.SPECTRAL:
- lower_s = (m_eign/args.binning[-1]) / (args.binning[-1]**(args.dimension-3))
- upper_s = (m_eign/args.binning[0]) / (args.binning[0]**(args.dimension-3))
- scale = np.power(10, np.average(np.log10([lower_s, upper_s])))
- diff = upper_s / lower_s
- scale_region = (lower_s/np.power(10, args.dimension), upper_s*diff*np.power(10, args.dimension))
- scale_region = [np.power(10, np.round(np.log10(x))) for x in scale_region]
- return scale, scale_region
-
-
def fr_argparse(parser):
parser.add_argument(
- '--measured-ratio', type=float, nargs=3, default=[1, 1, 1],
- help='Set the central value for the measured flavour ratio at IceCube'
+ '--injected-ratio', type=float, nargs=3, required=False,
+ help='Injected ratio if not using data'
)
parser.add_argument(
- '--source-ratio', type=float, nargs=3, default=[2, 1, 0],
+ '--source-ratio', type=float, nargs=3, default=[1, 2, 0],
help='Set the source flavour ratio for the case when you want to fix it'
)
parser.add_argument(
@@ -287,51 +272,13 @@ def fr_argparse(parser):
help='Set the new physics dimension to consider'
)
parser.add_argument(
- '--energy-dependance', default='spectral', type=partial(enum_parse, c=EnergyDependance),
- choices=EnergyDependance,
- help='Type of energy dependance to use in the BSM fit'
- )
- parser.add_argument(
- '--spectral-index', default=-2, type=float,
- help='Spectral index for spectral energy dependance'
- )
- parser.add_argument(
- '--fold-index', default='True', type=parse_bool,
- help='Fold in the spectral index when using GolemFit'
+ '--texture', type=partial(enum_parse, c=Texture),
+ default='none', choices=Texture, help='Set the BSM mixing texture'
)
parser.add_argument(
'--binning', default=[6e4, 1e7, 20], type=float, nargs=3,
help='Binning for spectral energy dependance'
)
- parser.add_argument(
- '--fix-source-ratio', type=parse_bool, default='False',
- help='Fix the source flavour ratio'
- )
- parser.add_argument(
- '--fix-mixing', type=partial(enum_parse, c=MixingScenario),
- default='None', choices=MixingScenario,
- help='Fix all mixing parameters to choice of maximal mixing'
- )
- parser.add_argument(
- '--fix-mixing-almost', type=parse_bool, default='False',
- help='Fix all mixing parameters except s23'
- )
- parser.add_argument(
- '--fix-scale', type=parse_bool, default='False',
- help='Fix the new physics scale'
- )
- parser.add_argument(
- '--scale', type=float, default=0,
- help='Set the new physics scale'
- )
- parser.add_argument(
- '--scale-region', type=float, default=1e10,
- help='Set the size of the box to scan for the scale'
- )
- parser.add_argument(
- '--energy', type=float, default=1000,
- help='Set the energy scale'
- )
def fr_to_angles(ratios):
@@ -363,8 +310,7 @@ NUFIT_U = angles_to_u((0.307, (1-0.02195)**2, 0.565, 3.97935))
def params_to_BSMu(theta, dim, energy, mass_eigenvalues=MASS_EIGENVALUES,
- sm_u=NUFIT_U, no_bsm=False, fix_mixing=MixingScenario.NONE,
- fix_mixing_almost=False, fix_scale=False, scale=None,
+ sm_u=NUFIT_U, no_bsm=False, texture=Texture.NONE,
check_uni=True, epsilon=1e-7):
"""Construct the BSM mixing matrix from the BSM parameters.
@@ -388,17 +334,8 @@ def params_to_BSMu(theta, dim, energy, mass_eigenvalues=MASS_EIGENVALUES,
no_bsm : bool
Turn off BSM behaviour
- fix_mixing : MixingScenario
- Fix the BSM mixing angles
-
- fix_mixing_almost : bool
- Fix the BSM mixing angles except one
-
- fix_scale : bool
- Fix the BSM scale
-
- scale : float
- Used with fix_scale - scale at which to fix
+ texture : Texture
+ BSM mixing texture
check_uni : bool
Check the resulting BSM mixing matrix is unitary
@@ -422,33 +359,20 @@ def params_to_BSMu(theta, dim, energy, mass_eigenvalues=MASS_EIGENVALUES,
'got\n{0}'.format(sm_u)
)
- if fix_mixing is not MixingScenario.NONE and fix_mixing_almost:
- raise NotImplementedError(
- '--fix-mixing and --fix-mixing-almost cannot be used together'
- )
-
if not isinstance(theta, (list, tuple)):
theta = [theta]
- if fix_mixing is MixingScenario.T12:
- s12_2, c13_4, s23_2, dcp, sc2 = 0.5, 1.0, 0.0, 0., theta
- elif fix_mixing is MixingScenario.T13:
- s12_2, c13_4, s23_2, dcp, sc2 = 0.0, 0.25, 0.0, 0., theta
- elif fix_mixing is MixingScenario.T23:
- s12_2, c13_4, s23_2, dcp, sc2 = 0.0, 1.0, 0.5, 0., theta
- elif fix_mixing_almost:
- s12_2, c13_4, dcp = 0.5, 1.0-1E-6, 0.
- s23_2, sc2 = theta
- elif fix_scale:
- s12_2, c13_4, s23_2, dcp = theta
- sc2 = scale
+ z = 0.+1e-9
+ if texture is Texture.OEU:
+ np_s12_2, np_c13_4, np_s23_2, np_dcp, sc2 = 0.5, 1.0, z, z, theta
+ elif texture is Texture.OET:
+ np_s12_2, np_c13_4, np_s23_2, np_dcp, sc2 = z, 0.25, z, z, theta
+ elif texture is Texture.OUT:
+ np_s12_2, np_c13_4, np_s23_2, np_dcp, sc2 = z, 1.0, 0.5, z, theta
else:
- s12_2, c13_4, s23_2, dcp, sc2 = theta
+ np_s12_2, np_c13_4, np_s23_2, np_dcp, sc2 = theta
- if len(theta) != 0:
- sc2 = np.power(10., sc2)
- else:
- sc2 = scale
+ sc2 = np.power(10., sc2)
sc1 = sc2 / 100.
mass_matrix = np.array(
@@ -458,11 +382,11 @@ def params_to_BSMu(theta, dim, energy, mass_eigenvalues=MASS_EIGENVALUES,
if no_bsm:
eg_vector = cardano_eqn(sm_ham)
else:
- new_physics_u = angles_to_u((s12_2, c13_4, s23_2, dcp))
- scale_matrix = np.array(
+ NP_U = angles_to_u((np_s12_2, np_c13_4, np_s23_2, np_dcp))
+ SC_U = np.array(
[[0, 0, 0], [0, sc1, 0], [0, 0, sc2]]
)
- bsm_term = (energy**(dim-3)) * np.dot(new_physics_u, np.dot(scale_matrix, new_physics_u.conj().T))
+ bsm_term = (energy**(dim-3)) * np.dot(NP_U, np.dot(SC_U, NP_U.conj().T))
bsm_ham = sm_ham + bsm_term
eg_vector = cardano_eqn(bsm_ham)
diff --git a/utils/gf.py b/utils/gf.py
index 13d5728..bc004bd 100644
--- a/utils/gf.py
+++ b/utils/gf.py
@@ -24,6 +24,9 @@ from utils.misc import enum_parse, parse_bool, thread_factors
from utils.param import ParamSet
+FITTER = None
+
+
def fit_flags(llh_paramset):
default_flags = {
# False means it's not fixed in minimization
@@ -74,9 +77,6 @@ def steering_params(args):
elif args.likelihood is Likelihood.GF_FREQ:
params.frequentist = True;
- # For Tianlu
- # params.years = [999]
-
if hasattr(args, 'binning'):
params.minFitEnergy = args.binning[0] # GeV
params.maxFitEnergy = args.binning[-1] # GeV
@@ -95,59 +95,61 @@ def steering_params(args):
return params
-def setup_asimov(fitter, params):
+def setup_asimov(params):
print 'Injecting the model', params
asimov_params = gf.FitParameters(gf.sampleTag.MagicTau)
for parm in params:
asimov_params.__setattr__(parm.name, float(parm.value))
- fitter.SetupAsimov(asimov_params)
+ FITTER.SetupAsimov(asimov_params)
-def setup_realisation(fitter, params, seed):
+def setup_realisation(params, seed):
print 'Injecting the model', params
asimov_params = gf.FitParameters(gf.sampleTag.MagicTau)
for parm in params:
asimov_params.__setattr__(parm.name, float(parm.value))
- fitter.Swallow(fitter.SpitRealization(asimov_params, seed))
+ FITTER.Swallow(FITTER.SpitRealization(asimov_params, seed))
def setup_fitter(args, asimov_paramset):
+ global FITTER
datapaths = gf.DataPaths()
sparams = steering_params(args)
npp = gf.NewPhysicsParams()
- fitter = gf.GolemFit(datapaths, sparams, npp)
+ FITTER = gf.GolemFit(datapaths, sparams, npp)
if args.data is DataType.ASIMOV:
- setup_asimov(fitter, asimov_paramset)
+ setup_asimov(FITTER, asimov_paramset)
elif args.data is DataType.REALISATION:
seed = args.seed if args.seed is not None else 1
- setup_realisation(fitter, asimov_paramset, seed)
+ setup_realisation(FITTER, asimov_paramset, seed)
elif args.data is DataType.REAL:
print 'Using MagicTau DATA'
- return fitter
-def get_llh(fitter, params):
+def get_llh(params):
# print 'params', params
fitparams = gf.FitParameters(gf.sampleTag.MagicTau)
for parm in params:
fitparams.__setattr__(parm.name, float(parm.value))
- llh = -fitter.EvalLLH(fitparams)
+ llh = -FITTER.EvalLLH(fitparams)
return llh
-def get_llh_freq(fitter, params):
+def get_llh_freq(params):
print 'setting to {0}'.format(params)
fitparams = gf.FitParameters(gf.sampleTag.MagicTau)
for parm in params:
fitparams.__setattr__(parm.name, float(parm.value))
- fitter.SetFitParametersSeed(fitparams)
- llh = -fitter.MinLLH().likelihood
+ FITTER.SetFitParametersSeed(fitparams)
+ llh = -FITTER.MinLLH().likelihood
return llh
-def data_distributions(fitter):
- hdat = fitter.GetDataDistribution()
- binedges = np.asarray([fitter.GetZenithBinsData(), fitter.GetEnergyBinsData()])
+def data_distributions():
+ hdat = FITTER.GetDataDistribution()
+ binedges = np.asarray(
+ [FITTER.GetZenithBinsData(), FITTER.GetEnergyBinsData()]
+ )
return hdat, binedges
@@ -164,4 +166,3 @@ def gf_argparse(parser):
choices=SteeringCateg,
help='use asimov/fake dataset with specific steering'
)
-
diff --git a/utils/llh.py b/utils/llh.py
index 0c1e97d..93587b9 100644
--- a/utils/llh.py
+++ b/utils/llh.py
@@ -17,7 +17,7 @@ from scipy.stats import multivariate_normal, truncnorm
from utils import fr as fr_utils
from utils import gf as gf_utils
-from utils.enums import EnergyDependance, Likelihood, ParamTag, PriorsCateg
+from utils.enums import Likelihood, ParamTag, PriorsCateg
from utils.misc import enum_parse, gen_identifier, parse_bool
@@ -34,22 +34,11 @@ def multi_gaussian(fr, fr_bf, sigma, offset=-320):
return np.log(multivariate_normal.pdf(fr, mean=fr_bf, cov=cov_fr)) + offset
-def likelihood_argparse(parser):
+def llh_argparse(parser):
parser.add_argument(
- '--likelihood', default='gaussian', type=partial(enum_parse, c=Likelihood),
- choices=Likelihood, help='likelihood contour'
- )
- parser.add_argument(
- '--sigma-ratio', type=float, default=0.01,
- help='Set the 1 sigma for the measured flavour ratio for a gaussian LLH'
- )
- parser.add_argument(
- '--save-measured-fr', type=parse_bool, default='False',
- help='Output the measured flavour ratios'
- )
- parser.add_argument(
- '--output-measured-fr', type=str, default='./frs',
- help='Output of the measured flavour ratios'
+ '--stat-method', default='bayesian',
+ type=partial(misc_utils.enum_parse, c=StatCateg), choices=StatCateg,
+ help='Statistical method to employ'
)
@@ -82,7 +71,7 @@ def lnprior(theta, paramset):
return prior
-def triangle_llh(theta, args, asimov_paramset, llh_paramset, fitter):
+def triangle_llh(theta, args, asimov_paramset, llh_paramset):
"""Log likelihood function for a given theta."""
if len(theta) != len(llh_paramset):
raise AssertionError(
@@ -95,31 +84,14 @@ def triangle_llh(theta, args, asimov_paramset, llh_paramset, fitter):
for param in llh_paramset.from_tag(ParamTag.NUISANCE):
hypo_paramset[param.name].value = param.value
- if args.energy_dependance is EnergyDependance.SPECTRAL:
- bin_centers = np.sqrt(args.binning[:-1]*args.binning[1:])
- bin_width = np.abs(np.diff(args.binning))
- if args.likelihood in [Likelihood.GOLEMFIT, Likelihood.GF_FREQ] \
- and args.fold_index:
- args.spectral_index = -hypo_paramset['astroDeltaGamma'].value
-
- if args.fix_source_ratio:
- if args.energy_dependance is EnergyDependance.MONO:
- source_flux = args.source_ratio
- elif args.energy_dependance is EnergyDependance.SPECTRAL:
- source_flux = np.array(
- [fr * np.power(bin_centers, args.spectral_index)
- for fr in args.source_ratio]
- ).T
- else:
- if args.energy_dependance is EnergyDependance.MONO:
- source_flux = fr_utils.angles_to_fr(
- llh_paramset.from_tag(ParamTag.SRCANGLES, values=True)
- )
- elif args.energy_dependance is EnergyDependance.SPECTRAL:
- source_flux = np.array(
- [fr * np.power(bin_centers, args.spectral_index)
- for fr in fr_utils.angles_to_fr(theta[-2:])]
- ).T
+ bin_centers = np.sqrt(args.binning[:-1]*args.binning[1:])
+ bin_width = np.abs(np.diff(args.binning))
+ spectral_index = -hypo_paramset['astroDeltaGamma'].value
+
+ source_flux = np.array(
+ [fr * np.power(bin_centers, spectral_index)
+ for fr in args.source_ratio]
+ ).T
bsm_angles = llh_paramset.from_tag(
[ParamTag.SCALE, ParamTag.MMANGLES], values=True
@@ -139,21 +111,7 @@ def triangle_llh(theta, args, asimov_paramset, llh_paramset, fitter):
if args.no_bsm:
fr = fr_utils.u_to_fr(source_flux, np.array(sm_u, dtype=np.complex256))
- elif args.energy_dependance is EnergyDependance.MONO:
- u = fr_utils.params_to_BSMu(
- theta = bsm_angles,
- dim = args.dimension,
- energy = args.energy,
- mass_eigenvalues = mass_eigenvalues,
- sm_u = sm_u,
- no_bsm = args.no_bsm,
- fix_mixing = args.fix_mixing,
- fix_mixing_almost = args.fix_mixing_almost,
- fix_scale = args.fix_scale,
- scale = args.scale
- )
- fr = fr_utils.u_to_fr(source_flux, u)
- elif args.energy_dependance is EnergyDependance.SPECTRAL:
+ else:
mf_perbin = []
for i_sf, sf_perbin in enumerate(source_flux):
u = fr_utils.params_to_BSMu(
@@ -163,10 +121,7 @@ def triangle_llh(theta, args, asimov_paramset, llh_paramset, fitter):
mass_eigenvalues = mass_eigenvalues,
sm_u = sm_u,
no_bsm = args.no_bsm,
- fix_mixing = args.fix_mixing,
- fix_mixing_almost = args.fix_mixing_almost,
- fix_scale = args.fix_scale,
- scale = args.scale
+ texture = args.texture,
)
fr = fr_utils.u_to_fr(sf_perbin, u)
mf_perbin.append(fr)
@@ -181,35 +136,18 @@ def triangle_llh(theta, args, asimov_paramset, llh_paramset, fitter):
for idx, param in enumerate(hypo_paramset.from_tag(ParamTag.BESTFIT)):
param.value = flavour_angles[idx]
- if args.likelihood is Likelihood.FLAT:
- llh = 1.
- elif args.likelihood is Likelihood.GAUSSIAN:
- fr_bf = args.measured_ratio
- llh = multi_gaussian(fr, fr_bf, args.sigma_ratio)
- elif args.likelihood is Likelihood.GOLEMFIT:
- llh = gf_utils.get_llh(fitter, hypo_paramset)
+ if args.likelihood is Likelihood.GOLEMFIT:
+ llh = gf_utils.get_llh(hypo_paramset)
elif args.likelihood is Likelihood.GF_FREQ:
- llh = gf_utils.get_llh_freq(fitter, hypo_paramset)
-
- if args.save_measured_fr and args.burnin is False:
- n = gen_identifier(args) + '.txt'
- with open(args.output_measured_fr + n, 'a') as f:
- f.write(r'{0:.3f} {1:.3f} {2:.3f}'.format(
- float(fr[0]), float(fr[1]), float(fr[2])
- ))
- for p in llh_paramset:
- f.write(r' {0:.3f}'.format(p.value))
- f.write(' LLH = {0:.3f}'.format(llh))
- f.write('\n')
-
+ llh = gf_utils.get_llh_freq(hypo_paramset)
return llh
-def ln_prob(theta, args, asimov_paramset, llh_paramset, fitter):
+def ln_prob(theta, args, asimov_paramset, llh_paramset):
lp = lnprior(theta, paramset=llh_paramset)
if not np.isfinite(lp):
return -np.inf
return lp + triangle_llh(
theta, args=args, asimov_paramset=asimov_paramset,
- llh_paramset=llh_paramset, fitter=fitter
+ llh_paramset=llh_paramset
)
diff --git a/utils/misc.py b/utils/misc.py
index 26bd828..36d5330 100644
--- a/utils/misc.py
+++ b/utils/misc.py
@@ -19,7 +19,8 @@ from operator import attrgetter
import numpy as np
-from utils.enums import Likelihood, MixingScenario
+from utils.enums import str_enum
+from utils.enums import Likelihood, Texture
class SortingHelpFormatter(argparse.HelpFormatter):
@@ -31,30 +32,20 @@ class SortingHelpFormatter(argparse.HelpFormatter):
def solve_ratio(fr):
denominator = reduce(gcd, fr)
- return [int(x/denominator) for x in fr]
+ f = [int(x/denominator) for x in fr]
+ if f[0] > 1E3 or f[1] > 1E3 or f[2] > 1E3:
+ return '{0:.2f}_{1:.2f}_{2:.2f}'.format(fr[0], fr[1], fr[2])
+ else:
+ return '{0}_{1}_{2}'.format(f[0], f[1], f[2])
def gen_identifier(args):
f = '_DIM{0}'.format(args.dimension)
- mr1, mr2, mr3 = solve_ratio(args.measured_ratio)
- if args.fix_source_ratio:
- sr1, sr2, sr3 = solve_ratio(args.source_ratio)
- f += '_sfr_{0:G}_{1:G}_{2:G}_mfr_{3:G}_{4:G}_{5:G}'.format(
- sr1, sr2, sr3, mr1, mr2, mr3
- )
- else:
- f += '_mfr_{3:03d}_{4:03d}_{5:03d}'.format(mr1, mr2, mr3)
-
- if args.fix_mixing is not MixingScenario.NONE:
- f += '_{0}'.format(args.fix_mixing)
- elif args.fix_mixing_almost:
- f += '_fix_mixing_almost'
- elif args.fix_scale:
- f += '_fix_scale_{0}'.format(args.scale)
-
- if args.likelihood is Likelihood.FLAT: f += '_flat'
- elif args.likelihood is Likelihood.GAUSSIAN:
- f += '_sigma_{0:03d}'.format(int(args.sigma_ratio*1000))
+ f += '_sfr_' + solve_ratio(args.source_ratio)
+ if args.data in [DataType.ASIMOV, DataType.REALISATION]:
+ f += '_mfr_' + solve_ratio(args.injected_ratio)
+ if args.Texture is not Texture.NONE:
+ f += '_{0}'.format(str_enum(args.texture))
return f
@@ -163,3 +154,71 @@ def thread_factors(t):
if t%x == 0:
return (x, int(t/x))
+
+def centers(x):
+ return (x[:-1]+x[1:])*0.5
+
+
+def get_units(dimension):
+ if dimension == 3: return r' / \:{\rm GeV}'
+ if dimension == 4: return r''
+ if dimension == 5: return r' / \:{rm GeV}^{-1}'
+ if dimension == 6: return r' / \:{rm GeV}^{-2}'
+ if dimension == 7: return r' / \:{rm GeV}^{-3}'
+ if dimension == 8: return r' / \:{rm GeV}^{-4}'
+
+
+def calc_nbins(x):
+ n = (np.max(x) - np.min(x)) / (2 * len(x)**(-1./3) * (np.percentile(x, 75) - np.percentile(x, 25)))
+ return np.floor(n)
+
+
+def calc_bins(x):
+ nbins = calc_nbins(x)
+ return np.linspace(np.min(x), np.max(x)+2, num=nbins+1)
+
+
+def most_likely(arr):
+ """Return the densest region given a 1D array of data."""
+ binning = calc_bins(arr)
+ harr = np.histogram(arr, binning)[0]
+ return centers(binning)[np.argmax(harr)]
+
+
+def interval(arr, percentile=68.):
+ """Returns the *percentile* shortest interval around the mode."""
+ center = most_likely(arr)
+ sarr = sorted(arr)
+ delta = np.abs(sarr - center)
+ curr_low = np.argmin(delta)
+ curr_up = curr_low
+ npoints = len(sarr)
+ while curr_up - curr_low < percentile/100.*npoints:
+ if curr_low == 0:
+ curr_up += 1
+ elif curr_up == npoints-1:
+ curr_low -= 1
+ elif sarr[curr_up]-sarr[curr_low-1] < sarr[curr_up+1]-sarr[curr_low]:
+ curr_low -= 1
+ elif sarr[curr_up]-sarr[curr_low-1] > sarr[curr_up+1]-sarr[curr_low]:
+ curr_up += 1
+ elif (curr_up - curr_low) % 2:
+ # they are equal so step half of the time up and down
+ curr_low -= 1
+ else:
+ curr_up += 1
+ return sarr[curr_low], center, sarr[curr_up]
+
+
+def flat_angles_to_u(x):
+ """Convert from angles to mixing elements."""
+ return abs(angles_to_u(x)).astype(np.float32).flatten().tolist()
+
+
+def myround(x, base=2, up=False, down=False):
+ if up == down and up is True: assert 0
+ if up: return int(base * np.round(float(x)/base-0.5))
+ elif down: return int(base * np.round(float(x)/base+0.5))
+ else: int(base * np.round(float(x)/base))
+
+
diff --git a/utils/mn.py b/utils/mn.py
index 6582c80..c60d316 100644
--- a/utils/mn.py
+++ b/utils/mn.py
@@ -24,7 +24,7 @@ def CubePrior(cube, ndim, n_params):
def lnProb(cube, ndim, n_params, mn_paramset, llh_paramset, asimov_paramset,
- args, fitter):
+ args):
if ndim != len(mn_paramset):
raise AssertionError(
'Length of MultiNest scan paramset is not the same as the input '
@@ -41,7 +41,6 @@ def lnProb(cube, ndim, n_params, mn_paramset, llh_paramset, asimov_paramset,
args=args,
asimov_paramset=asimov_paramset,
llh_paramset=llh_paramset,
- fitter=fitter
)
return llh
@@ -61,13 +60,14 @@ def mn_argparse(parser):
)
-def mn_evidence(mn_paramset, llh_paramset, asimov_paramset, args, fitter,
+def mn_evidence(mn_paramset, llh_paramset, asimov_paramset, args,
identifier='mn'):
"""Run the MultiNest algorithm to calculate the evidence."""
n_params = len(mn_paramset)
for n in mn_paramset.names:
llh_paramset[n].value = mn_paramset[n].value
+ print 'llh_paramset', llh_paramset
lnProbEval = partial(
lnProb,
@@ -75,14 +75,13 @@ def mn_evidence(mn_paramset, llh_paramset, asimov_paramset, args, fitter,
llh_paramset = llh_paramset,
asimov_paramset = asimov_paramset,
args = args,
- fitter = fitter
)
llh = '{0}'.format(args.likelihood).split('.')[1]
data = '{0}'.format(args.data).split('.')[1]
- sr1, sr2, sr3 = solve_ratio(args.source_ratio)
- prefix = './mnrun/DIM{0}/{1}/{2}/s{3}{4}{5}/{6}'.format(
- args.dimension, data, llh, sr1, sr2, sr3, identifier
+ src_string = solve_ratio(args.source_ratio)
+ prefix = './mnrun/DIM{0}/{1}/{2}/s{3}/{4}'.format(
+ args.dimension, data, llh, src_string, identifier
)
make_dir(prefix)
print 'Running evidence calculation for {0}'.format(prefix)
diff --git a/utils/param.py b/utils/param.py
index 572b65a..558018e 100644
--- a/utils/param.py
+++ b/utils/param.py
@@ -16,9 +16,8 @@ from copy import deepcopy
import numpy as np
-from utils.plot import get_units
from utils.fr import fr_to_angles
-from utils.enums import DataType, Likelihood, MixingScenario, ParamTag, PriorsCateg
+from utils.enums import DataType, Likelihood, ParamTag, PriorsCateg
class Param(object):
@@ -219,66 +218,3 @@ class ParamSet(Sequence):
elif isinstance(p, ParamSet):
param_sequence.extend(p.params)
return ParamSet(param_sequence)
-
-
-def get_paramsets(args, nuisance_paramset):
- """Make the paramsets for generating the Asmimov MC sample and also running
- the MCMC.
- """
- asimov_paramset = []
- llh_paramset = []
-
- llh_paramset.extend(
- [x for x in nuisance_paramset.from_tag(ParamTag.SM_ANGLES)]
- )
- if args.likelihood in [Likelihood.GOLEMFIT, Likelihood.GF_FREQ]:
- gf_nuisance = [x for x in nuisance_paramset.from_tag(ParamTag.NUISANCE)]
- asimov_paramset.extend(gf_nuisance)
- llh_paramset.extend(gf_nuisance)
- for parm in llh_paramset:
- parm.value = args.__getattribute__(parm.name)
- tag = ParamTag.BESTFIT
- try:
- flavour_angles = fr_to_angles(args.measured_ratio)
- except:
- flavour_angles = fr_to_angles(args.injected_ratio)
- asimov_paramset.extend([
- Param(name='astroFlavorAngle1', value=flavour_angles[0], ranges=[0., 1.], std=0.2, tag=tag),
- Param(name='astroFlavorAngle2', value=flavour_angles[1], ranges=[-1., 1.], std=0.2, tag=tag),
- ])
- asimov_paramset = ParamSet(asimov_paramset)
-
- if hasattr(args, 'fix_source_ratio'):
- if args.fix_mixing is MixingScenario.NONE and not args.fix_mixing_almost:
- tag = ParamTag.MMANGLES
- llh_paramset.extend([
- Param(name='np_s_12^2', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{s}_{12}^2', tag=tag),
- Param(name='np_c_13^4', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{c}_{13}^4', tag=tag),
- Param(name='np_s_23^2', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{s}_{23}^2', tag=tag),
- Param(name='np_dcp', value=np.pi, ranges=[0., 2*np.pi], std=0.2, tex=r'\tilde{\delta_{CP}}', tag=tag)
- ])
- if args.fix_mixing_almost:
- tag = ParamTag.MMANGLES
- llh_paramset.extend([
- Param(name='np_s_23^2', value=0.5, ranges=[0., 1.], std=0.2, tex=r'\tilde{s}_{23}^4', tag=tag)
- ])
- if not args.fix_scale:
- tag = ParamTag.SCALE
- if hasattr(args, 'dimension'):
- llh_paramset.append(
- Param(name='logLam', value=np.log10(args.scale), ranges=np.log10(args.scale_region), std=3,
- tex=r'{\rm log}_{10}\left (\Lambda^{-1}'+get_units(args.dimension)+r'\right )', tag=tag)
- )
- elif hasattr(args, 'dimensions'):
- llh_paramset.append(
- Param(name='logLam', value=np.log10(args.scale), ranges=np.log10(args.scale_region), std=3,
- tex=r'{\rm log}_{10}\left (\Lambda^{-1} / GeV^{-d+4}\right )', tag=tag)
- )
- if not args.fix_source_ratio:
- tag = ParamTag.SRCANGLES
- llh_paramset.extend([
- Param(name='s_phi4', value=0.5, ranges=[0., 1.], std=0.2, tex=r'sin^4(\phi)', tag=tag),
- Param(name='c_2psi', value=0.5, ranges=[-1., 1.], std=0.2, tex=r'cos(2\psi)', tag=tag)
- ])
- llh_paramset = ParamSet(llh_paramset)
- return asimov_paramset, llh_paramset
diff --git a/utils/plot.py b/utils/plot.py
index 6161cfb..29ef5dc 100644
--- a/utils/plot.py
+++ b/utils/plot.py
@@ -19,12 +19,13 @@ from scipy.interpolate import splev, splprep
from scipy.ndimage.filters import gaussian_filter
import matplotlib as mpl
+import matplotlib.patches as patches
+import matplotlib.gridspec as gridspec
mpl.use('Agg')
+
from matplotlib import rc
from matplotlib import pyplot as plt
from matplotlib.offsetbox import AnchoredText
-import matplotlib.patches as patches
-import matplotlib.gridspec as gridspec
from matplotlib.lines import Line2D
from matplotlib.patches import Patch
@@ -32,22 +33,19 @@ from getdist import plots, mcsamples
import ternary
from ternary.heatmapping import polygon_generator
-# print ternary.__file__
-# assert 0
import shapely.geometry as geometry
from utils import misc as misc_utils
from utils.enums import DataType, EnergyDependance, str_enum
-from utils.enums import Likelihood, MixingScenario, ParamTag, StatCateg
-from utils.enums import Texture
+from utils.enums import Likelihood, ParamTag, StatCateg, Texture
from utils.fr import angles_to_u, angles_to_fr
-bayes_K = 1. # Substantial degree of belief.
-# bayes_K = 3/2. # Strong degree of belief.
-# bayes_K = 2. # Very strong degree of belief
-# bayes_K = 5/2. # Decisive degree of belief
+BAYES_K = 1. # Substantial degree of belief.
+# BAYES_K = 3/2. # Strong degree of belief.
+# BAYES_K = 2. # Very strong degree of belief
+# BAYES_K = 5/2. # Decisive degree of belief
if os.path.isfile('./plot_llh/paper.mplstyle'):
@@ -58,64 +56,16 @@ if 'submitter' in socket.gethostname():
rc('text', usetex=False)
-def centers(x):
- return (x[:-1]+x[1:])*0.5
-
-
-def get_units(dimension):
- if dimension == 3: return r' / \:{\rm GeV}'
- if dimension == 4: return r''
- if dimension == 5: return r' / \:{rm GeV}^{-1}'
- if dimension == 6: return r' / \:{rm GeV}^{-2}'
- if dimension == 7: return r' / \:{rm GeV}^{-3}'
- if dimension == 8: return r' / \:{rm GeV}^{-4}'
-
-
-def calc_nbins(x):
- n = (np.max(x) - np.min(x)) / (2 * len(x)**(-1./3) * (np.percentile(x, 75) - np.percentile(x, 25)))
- return np.floor(n)
-
-
-def calc_bins(x):
- nbins = calc_nbins(x)
- return np.linspace(np.min(x), np.max(x)+2, num=nbins+1)
-
-
-def most_likely(arr):
- """Return the densest region given a 1D array of data."""
- binning = calc_bins(arr)
- harr = np.histogram(arr, binning)[0]
- return centers(binning)[np.argmax(harr)]
-
-
-def interval(arr, percentile=68.):
- """Returns the *percentile* shortest interval around the mode."""
- center = most_likely(arr)
- sarr = sorted(arr)
- delta = np.abs(sarr - center)
- curr_low = np.argmin(delta)
- curr_up = curr_low
- npoints = len(sarr)
- while curr_up - curr_low < percentile/100.*npoints:
- if curr_low == 0:
- curr_up += 1
- elif curr_up == npoints-1:
- curr_low -= 1
- elif sarr[curr_up]-sarr[curr_low-1] < sarr[curr_up+1]-sarr[curr_low]:
- curr_low -= 1
- elif sarr[curr_up]-sarr[curr_low-1] > sarr[curr_up+1]-sarr[curr_low]:
- curr_up += 1
- elif (curr_up - curr_low) % 2:
- # they are equal so step half of the time up and down
- curr_low -= 1
- else:
- curr_up += 1
- return sarr[curr_low], center, sarr[curr_up]
-
-
-def flat_angles_to_u(x):
- """Convert from angles to mixing elements."""
- return abs(angles_to_u(x)).astype(np.float32).flatten().tolist()
+def gen_figtext(args):
+ """Generate the figure text."""
+ t = ''
+ t += 'Source flavour ratio = [{0}]'.format(solve_ratio(args.source_ratio))
+ if args.data in [DataType.ASIMOV, DataType.REALISATION]:
+ t += '\nIC injected flavour ratio = [{0}]'.format(
+ solve_ratio(args.injected_ratio)
+ )
+ t += '\nDimension = {0}'.format(args.dimension)
+ return t
def plot_Tchain(Tchain, axes_labels, ranges):
@@ -139,39 +89,6 @@ def plot_Tchain(Tchain, axes_labels, ranges):
return g
-def gen_figtext(args):
- """Generate the figure text."""
- t = ''
- mr1, mr2, mr3 = misc_utils.solve_ratio(args.measured_ratio)
- if args.fix_source_ratio:
- sr1, sr2, sr3 = misc_utils.solve_ratio(args.source_ratio)
- t += 'Source flavour ratio = [{0}, {1}, {2}]'.format(sr1, sr2, sr3)
- if args.data in [DataType.ASIMOV, DataType.REALISATION]:
- t += '\nIC observed flavour ratio = [{0}, {1}, {2}]'.format(
- mr1, mr2, mr3
- )
- t += '\nDimension = {0}'.format(args.dimension)
- else:
- if args.data in [DataType.ASIMOV, DataType.REALISATION]:
- t += 'IC observed flavour ratio = [{0}, {1}, ' \
- '{2}]\nDimension = {3}'.format(
- mr1, mr2, mr3, args.dimension, int(args.energy)
- )
- if args.fix_scale:
- t += 'Scale = {0}'.format(args.scale)
- if args.likelihood is Likelihood.GAUSSIAN:
- t += '\nSigma = {0:.3f}'.format(args.sigma_ratio)
- if args.energy_dependance is EnergyDependance.SPECTRAL:
- if not args.fold_index:
- t += '\nSpectral Index = {0}'.format(int(args.spectral_index))
- t += '\nBinning = [{0}, {1}] TeV - {2} bins'.format(
- int(args.binning[0]/1e3), int(args.binning[-1]/1e3), len(args.binning)-1
- )
- elif args.energy_dependance is EnergyDependance.MONO:
- t += '\nEnergy = {0} TeV'.format(int(args.energy/1e3))
- return t
-
-
def chainer_plot(infile, outfile, outformat, args, llh_paramset, fig_text=None):
"""Make the triangle plot."""
if hasattr(args, 'plot_elements'):
@@ -287,13 +204,6 @@ def chainer_plot(infile, outfile, outformat, args, llh_paramset, fig_text=None):
g.export(outfile+'_elements.'+of)
-def myround(x, base=2, up=False, down=False):
- if up == down and up is True: assert 0
- if up: return int(base * np.round(float(x)/base-0.5))
- elif down: return int(base * np.round(float(x)/base+0.5))
- else: int(base * np.round(float(x)/base))
-
-
def plot_statistic(data, outfile, outformat, args, scale_param, label=None):
"""Make MultiNest factor or LLH value plot."""
print 'Making Statistic plot'
@@ -337,7 +247,7 @@ def plot_statistic(data, outfile, outformat, args, scale_param, label=None):
ax.plot(scales_rm, reduced_ev)
- ax.axhline(y=np.log(10**(bayes_K)), color='red', alpha=1., linewidth=1.3)
+ ax.axhline(y=np.log(10**(BAYES_K)), color='red', alpha=1., linewidth=1.3)
for ymaj in ax.yaxis.get_majorticklocs():
ax.axhline(y=ymaj, ls=':', color='gray', alpha=0.3, linewidth=1)
@@ -394,7 +304,7 @@ def plot_sens_full(data, outfile, outformat, args):
null = statistic[min_idx]
if args.stat_method is StatCateg.BAYESIAN:
reduced_ev = -(statistic - null)
- al = scales[reduced_ev > np.log(10**(bayes_K))] # Strong degree of belief
+ al = scales[reduced_ev > np.log(10**(BAYES_K))] # Strong degree of belief
# al = scales[reduced_ev > 0.4] # Testing
elif args.stat_method is StatCateg.FREQUENTIST:
reduced_ev = -2*(statistic - null)
@@ -437,8 +347,8 @@ def plot_sens_full(data, outfile, outformat, args):
fig.savefig(outfile+'.'+of, bbox_inches='tight', dpi=150)
-def plot_sens_fixed_angle_pretty(data, outfile, outformat, args):
- print 'Making FIXED_ANGLE_PRETTY sensitivity plot'
+def plot_table_sens(data, outfile, outformat, args):
+ print 'Making TABLE sensitivity plot'
argsc = deepcopy(args)
dims = len(data)
LV_atmo_90pc_limits = {
@@ -598,14 +508,14 @@ def plot_sens_fixed_angle_pretty(data, outfile, outformat, args):
null = statistic[min_idx]
if args.stat_method is StatCateg.BAYESIAN:
reduced_ev = -(statistic_rm - null)
- al = scales_rm[reduced_ev > np.log(10**(bayes_K))]
+ al = scales_rm[reduced_ev > np.log(10**(BAYES_K))]
elif args.stat_method is StatCateg.FREQUENTIST:
reduced_ev = -2*(statistic_rm - null)
al = scales_rm[reduced_ev > 2.71] # 90% CL for 1 DOF via Wilks
if len(al) == 0:
print 'No points for DIM {0} FRS {1}!'.format(dim, src)
continue
- if reduced_ev[-1] < np.log(10**(bayes_K)) - 0.1:
+ if reduced_ev[-1] < np.log(10**(BAYES_K)) - 0.1:
print 'Peaked contour does not exclude large scales! For ' \
'DIM {0} FRS{1}!'.format(dim, src)
continue
@@ -739,14 +649,14 @@ def plot_sens_fixed_angle(data, outfile, outformat, args):
null = statistic[min_idx]
if args.stat_method is StatCateg.BAYESIAN:
reduced_ev = -(statistic - null)
- al = scales[reduced_ev > np.log(10**(bayes_K))] # Strong degree of belief
+ al = scales[reduced_ev > np.log(10**(BAYES_K))] # Strong degree of belief
elif args.stat_method is StatCateg.FREQUENTIST:
reduced_ev = -2*(statistic - null)
al = scales[reduced_ev > 2.71] # 90% CL for 1 DOF via Wilks
if len(al) == 0:
print 'No points for DIM {0} FRS {1}!'.format(dim, src)
continue
- if reduced_ev[-1] < np.log(10**(bayes_K)) - 0.1:
+ if reduced_ev[-1] < np.log(10**(BAYES_K)) - 0.1:
print 'Peaked contour does not exclude large scales! For ' \
'DIM {0} FRS{1}!'.format(dim, src)
continue
@@ -903,7 +813,7 @@ def plot_sens_corr_angle(data, outfile, outformat, args):
print sep_arrays
if args.stat_method is StatCateg.BAYESIAN:
- reduced_pdat_mask = (sep_arrays[2] > np.log(10**(bayes_K))) # Strong degree of belief
+ reduced_pdat_mask = (sep_arrays[2] > np.log(10**(BAYES_K))) # Strong degree of belief
elif args.stat_method is StatCateg.FREQUENTIST:
reduced_pdat_mask = (sep_arrays[2] > 4.61) # 90% CL for 2 DOFS via Wilks
reduced_pdat = sep_arrays.T[reduced_pdat_mask].T
@@ -1175,7 +1085,7 @@ def plot_source_ternary(data, outfile, outformat, args):
for idim, dim in enumerate(args.dimensions):
print '|||| DIM = {0}, {1}'.format(idim, dim)
for isce in xrange(r_data.shape[1]):
- print '|||| SCEN = {0}'.format(str_enum(Texture(isce)))
+ print '|||| SCEN = {0}'.format(str_enum(MixingScenario(isce+1)))
fig = plt.figure(figsize=(8, 8))
ax = fig.add_subplot(111)
tax = get_tax(ax, scale=nsrcs)
@@ -1204,14 +1114,14 @@ def plot_source_ternary(data, outfile, outformat, args):
if args.stat_method is StatCateg.BAYESIAN:
reduced_ev = -(statistic_rm - null)
print 'reduced_ev', reduced_ev
- al = scales_rm[reduced_ev > np.log(10**(bayes_K))]
+ al = scales_rm[reduced_ev > np.log(10**(BAYES_K))]
else:
assert 0
if len(al) == 0:
print 'No points for DIM {0} FRS {1}!'.format(dim, src)
interp_dict[src_sc] = -60
continue
- if reduced_ev[-1] < np.log(10**(bayes_K)) - 0.1:
+ if reduced_ev[-1] < np.log(10**(BAYES_K)) - 0.1:
print 'Peaked contour does not exclude large scales! For ' \
'DIM {0} FRS{1}!'.format(dim, src)
interp_dict[src_sc] = -60
@@ -1244,8 +1154,9 @@ def texture_label(x):
raise AssertionError
-def plot_source_ternary_1D(data, outfile, outformat, args):
+def plot_x(data, outfile, outformat, args):
"""Ternary plot in the source flavour space for each operator texture."""
+ print 'Making X sensitivity plot'
sources = args.source_ratios
x_1D = []
i_src_1D = []
@@ -1279,7 +1190,7 @@ def plot_source_ternary_1D(data, outfile, outformat, args):
ax.set_ylabel(r'${\rm New\:Physics\:Scale}\:[\:{\rm log}_{10} (\Lambda_{d=6}^{-1}\:/\:{\rm GeV}^{-2})\: ]$', fontsize=18)
ax.set_xlim(0, 1)
for isce in xrange(r_data.shape[1]):
- print '|||| SCEN = {0}'.format(str_enum(Texture(isce)))
+ print '|||| SCEN = {0}'.format(str_enum(MixingScenario(isce+1)))
H = np.full(len(x_1D), np.nan)
for ix, x in enumerate(x_1D):
print '|||| X = {0}'.format(x)
@@ -1303,13 +1214,13 @@ def plot_source_ternary_1D(data, outfile, outformat, args):
if args.stat_method is StatCateg.BAYESIAN:
reduced_ev = -(statistic_rm - null)
print 'reduced_ev', reduced_ev
- al = scales_rm[reduced_ev > np.log(10**(bayes_K))]
+ al = scales_rm[reduced_ev > np.log(10**(BAYES_K))]
else:
assert 0
if len(al) == 0:
print 'No points for DIM {0} X {1}!'.format(dim, x)
continue
- if reduced_ev[-1] < np.log(10**(bayes_K)) - 0.1:
+ if reduced_ev[-1] < np.log(10**(BAYES_K)) - 0.1:
print 'Peaked contour does not exclude large scales! For ' \
'DIM {0} X {1}!'.format(dim, x)
continue
@@ -1320,7 +1231,7 @@ def plot_source_ternary_1D(data, outfile, outformat, args):
H = ma.masked_invalid(H)
H_0 = np.concatenate([[H[0]], H])
ax.step(be, H_0, linewidth=2,
- label=texture_label(Texture(isce)), linestyle='-',
+ label=texture_label(MixingScenario(isce+1)), linestyle='-',
drawstyle='steps-pre')
print 'H', H
print