diff options
| author | shivesh <s.p.mandalia@qmul.ac.uk> | 2019-04-12 11:01:36 -0500 |
|---|---|---|
| committer | shivesh <s.p.mandalia@qmul.ac.uk> | 2019-04-12 11:01:36 -0500 |
| commit | de54f2e44042920762ecc7fd86cd5a17356e77c3 (patch) | |
| tree | 650d04d927005e1186f16895f8eaf35270d12662 | |
| parent | 8c4f4b9eaf4408ad9bb46e924226da9dfb00fdb8 (diff) | |
| download | GolemFlavor-de54f2e44042920762ecc7fd86cd5a17356e77c3.tar.gz GolemFlavor-de54f2e44042920762ecc7fd86cd5a17356e77c3.zip | |
Fri 12 Apr 11:01:36 CDT 2019
| -rwxr-xr-x | contour.py | 171 | ||||
| -rwxr-xr-x | contour_emcee.py | 229 | ||||
| -rwxr-xr-x | plot_sens.py | 84 | ||||
| -rwxr-xr-x | plot_sens_sourcescan.py | 411 | ||||
| -rw-r--r-- | utils/misc.py | 11 | ||||
| -rw-r--r-- | utils/plot.py | 53 |
6 files changed, 134 insertions, 825 deletions
@@ -20,10 +20,10 @@ from utils import fr as fr_utils from utils import gf as gf_utils from utils import llh as llh_utils from utils import misc as misc_utils -from utils import mn as mn_utils +from utils import mcmc as mcmc_utils from utils import plot as plot_utils from utils.enums import str_enum -from utils.enums import DataType, Likelihood, ParamTag, PriorsCateg +from utils.enums import DataType, Likelihood, MCMCSeedType, ParamTag, PriorsCateg from utils.param import Param, ParamSet, get_paramsets from pymultinest import Analyzer, run @@ -35,11 +35,19 @@ def define_nuisance(): tag = ParamTag.NUISANCE lg_prior = PriorsCateg.LIMITEDGAUSS nuisance.extend([ - Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0.1, 10.], std=0.4, prior=lg_prior, tag=tag), - Param(name='promptNorm', value=0., seed=[0. , 6. ], ranges=[0. , 20.], std=2.4, prior=lg_prior, tag=tag), - Param(name='muonNorm', value=1., seed=[0.1, 2. ], ranges=[0. , 10.], std=0.1, tag=tag), - Param(name='astroNorm', value=6.9, seed=[0., 5. ], ranges=[0. , 20.], std=1.5, tag=tag), - Param(name='astroDeltaGamma', value=2.5, seed=[2.4, 3. ], ranges=[-5., 5. ], std=0.1, tag=tag) + # Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0.1, 10.], std=0.4, prior=lg_prior, tag=tag), + # Param(name='promptNorm', value=0., seed=[0., 6. ], ranges=[0., 20.], std=2.4, prior=lg_prior, tag=tag), + Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0.1, 10.], std=0.4, tag=tag), + Param(name='promptNorm', value=0., seed=[0., 6. ], ranges=[0., 20.], std=2.4, tag=tag), + Param(name='muonNorm', value=1., seed=[0.1, 2. ], ranges=[0., 10.], std=0.1, tag=tag), + Param(name='astroNorm', value=6.9, seed=[0., 5. ], ranges=[0., 20.], std=1.5, tag=tag), + Param(name='astroDeltaGamma', value=2.5, seed=[2.4, 3. ], ranges=[-5., 5. ], std=0.1, tag=tag), + Param(name='CRDeltaGamma', value=0., seed=[-0.1, 0.1 ], ranges=[-1., 1. ], std=0.1, tag=tag), + Param(name='NeutrinoAntineutrinoRatio', value=1., seed=[0.8, 1.2 ], ranges=[0., 2. ], std=0.1, tag=tag), + Param(name='anisotropyScale', value=1., seed=[0.8, 1.2 ], ranges=[0., 2. ], std=0.1, tag=tag), + Param(name='domEfficiency', value=0.99, seed=[0.8, 1.2 ], ranges=[0.8, 1.2 ], std=0.1, tag=tag), + Param(name='holeiceForward', value=0., seed=[-0.8, 0.8 ], ranges=[-4.42, 1.58 ], std=0.1, tag=tag), + Param(name='piKRatio', value=1.0, seed=[0.8, 1.2 ], ranges=[0., 2. ], std=0.1, tag=tag) ]) return ParamSet(nuisance) @@ -54,7 +62,6 @@ def nuisance_argparse(parser): def process_args(args): """Process the input args.""" - args.plot_angles = args.plot_chains if args.likelihood is not Likelihood.GOLEMFIT \ and args.likelihood is not Likelihood.GF_FREQ: raise AssertionError( @@ -76,18 +83,6 @@ def parse_args(args=None): help='Set the central value for the injected flavour ratio at IceCube' ) parser.add_argument( - '--run-scan', type=misc_utils.parse_bool, default='True', - help='Do the scan from scratch' - ) - parser.add_argument( - '--plot-chains', type=misc_utils.parse_bool, default='False', - help='Plot the (joint) posteriors' - ) - parser.add_argument( - '--plot-triangle', type=misc_utils.parse_bool, default='False', - help='Project the posterior contour on the flavour triangle' - ) - parser.add_argument( '--seed', type=misc_utils.seed_parse, default='25', help='Set the random seed value' ) @@ -103,10 +98,9 @@ def parse_args(args=None): gf_utils.gf_argparse(parser) except: pass llh_utils.likelihood_argparse(parser) - mn_utils.mn_argparse(parser) + mcmc_utils.mcmc_argparse(parser) nuisance_argparse(parser) misc_utils.remove_option(parser, 'sigma_ratio') - misc_utils.remove_option(parser, 'mn_output') if args is None: return parser.parse_args() else: return parser.parse_args(args.split()) @@ -163,25 +157,6 @@ def ln_prob(theta, args, hypo_paramset, fitter): ) -def lnProb(cube, ndim, n_params, hypo_paramset, args, fitter): - if ndim != len(hypo_paramset): - raise AssertionError( - 'Length of MultiNest scan paramset is not the same as the input ' - 'params\ncube={0}\nmn_paramset]{1}'.format(cube, hypo_paramset) - ) - pranges = hypo_paramset.ranges - for i in xrange(ndim): - hypo_paramset[i].value = (pranges[i][1]-pranges[i][0])*cube[i] + pranges[i][0] - theta = hypo_paramset.values - llh = ln_prob( - theta = theta, - args = args, - hypo_paramset = hypo_paramset, - fitter = fitter - ) - return llh - - def main(): args = parse_args() process_args(args) @@ -196,99 +171,53 @@ def main(): print '== {0:<25} = {1}'.format('outfile', outfile) n_params = len(hypo_paramset) - prefix = outfile + '_mn_' - misc_utils.make_dir(prefix) + outfile = outfile + '_emcee_' print 'asimov_paramset', asimov_paramset print 'hypo_paramset', hypo_paramset - if args.run_scan: + if args.run_mcmc: fitter = gf_utils.setup_fitter(args, asimov_paramset) - lnProbEval = partial( - lnProb, + ln_prob_eval = partial( + ln_prob, hypo_paramset = hypo_paramset, args = args, fitter = fitter ) - cwd = os.getcwd() - os.chdir(prefix[:-len(os.path.basename(prefix))]) - - print 'Running evidence calculation for {0}'.format(prefix) - run( - LogLikelihood = lnProbEval, - Prior = mn_utils.CubePrior, - n_dims = n_params, - n_live_points = args.mn_live_points, - evidence_tolerance = args.mn_tolerance, - outputfiles_basename = prefix[-len(os.path.basename(prefix)):], - importance_nested_sampling = True, - resume = False, - verbose = True - ) - - os.chdir(cwd) - - # Analyze - analyser = Analyzer( - outputfiles_basename=prefix, n_params=n_params - ) - print analyser - - pranges = hypo_paramset.ranges - - bf = analyser.get_best_fit()['parameters'] - for i in xrange(len(bf)): - bf[i] = (pranges[i][1]-pranges[i][0])*bf[i] + pranges[i][0] - print 'bestfit = ', bf - print 'bestfit log_likelihood', analyser.get_best_fit()['log_likelihood'] - - print - print '{0:50} = {1}'.format('global evidence', analyser.get_stats()['global evidence']) - print - - fig_text = gen_figtext(args, asimov_paramset) - fig_text += '\nBestfit LLH = {0}'.format(analyser.get_best_fit()['log_likelihood']) - fig_text += '\nBestfits = ' - for x in bf: fig_text += '{0:.2f} '.format(x) - - if args.plot_chains or args.plot_triangle: - chains = analyser.get_data()[:,2:] - for x in chains: - for i in xrange(len(x)): - x[i] = (pranges[i][1]-pranges[i][0])*x[i] + pranges[i][0] - - if args.plot_chains: - of = outfile[:5]+outfile[5:].replace('data', 'plots')+'_posterior' - plot_utils.chainer_plot( - infile = chains, - outfile = of, - outformat = ['png'], - args = args, - llh_paramset = hypo_paramset, - fig_text = fig_text - ) - print 'Saved plot', of - - if args.plot_triangle: - llh = -0.5 * analyser.get_data()[:,1] - - flavour_angles = chains[:,-2:] - flavour_ratios = np.array( - map(fr_utils.angles_to_fr, flavour_angles) - ) + if args.mcmc_seed_type == MCMCSeedType.UNIFORM: + p0 = mcmc_utils.flat_seed( + hypo_paramset, nwalkers=args.nwalkers + ) + elif args.mcmc_seed_type == MCMCSeedType.GAUSSIAN: + p0 = mcmc_utils.gaussian_seed( + hypo_paramset, nwalkers=args.nwalkers + ) - of = outfile[:5]+outfile[5:].replace('data', 'plots')+'_triangle' - plot_utils.triangle_project( - frs = flavour_ratios, - llh = llh, - outfile = of, - outformat = ['png'], - args = args, - llh_paramset = hypo_paramset, - fig_text = fig_text + samples = mcmc_utils.mcmc( + p0 = p0, + ln_prob = ln_prob_eval, + ndim = n_params, + nwalkers = args.nwalkers, + burnin = args.burnin, + nsteps = args.nsteps, + args = args, + threads = 1 + # TODO(shivesh): broken because you cannot pickle a GolemFitPy object + # threads = misc_utils.thread_factors(args.threads)[0] ) + mcmc_utils.save_chains(samples, outfile) + + of = outfile[:5]+outfile[5:].replace('data', 'plots')+'_posterior' + plot_utils.chainer_plot( + infile = outfile+'.npy', + outfile = of, + outformat = ['png'], + args = args, + llh_paramset = hypo_paramset, + fig_text = gen_figtext(args, hypo_paramset) + ) print "DONE!" diff --git a/contour_emcee.py b/contour_emcee.py deleted file mode 100755 index 5dc3c98..0000000 --- a/contour_emcee.py +++ /dev/null @@ -1,229 +0,0 @@ -#! /usr/bin/env python -# author : S. Mandalia -# s.p.mandalia@qmul.ac.uk -# -# date : November 26, 2018 - -""" -HESE flavour ratio contour -""" - -from __future__ import absolute_import, division - -import os -import argparse -from functools import partial - -import numpy as np - -from utils import fr as fr_utils -from utils import gf as gf_utils -from utils import llh as llh_utils -from utils import misc as misc_utils -from utils import mcmc as mcmc_utils -from utils import plot as plot_utils -from utils.enums import str_enum -from utils.enums import DataType, Likelihood, MCMCSeedType, ParamTag, PriorsCateg -from utils.param import Param, ParamSet, get_paramsets - -from pymultinest import Analyzer, run - - -def define_nuisance(): - """Define the nuisance parameters.""" - nuisance = [] - tag = ParamTag.NUISANCE - lg_prior = PriorsCateg.LIMITEDGAUSS - nuisance.extend([ - # Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0.1, 10.], std=0.4, prior=lg_prior, tag=tag), - # Param(name='promptNorm', value=0., seed=[0., 6. ], ranges=[0., 20.], std=2.4, prior=lg_prior, tag=tag), - Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0.1, 10.], std=0.4, tag=tag), - Param(name='promptNorm', value=0., seed=[0., 6. ], ranges=[0., 20.], std=2.4, tag=tag), - Param(name='muonNorm', value=1., seed=[0.1, 2. ], ranges=[0., 10.], std=0.1, tag=tag), - Param(name='astroNorm', value=6.9, seed=[0., 5. ], ranges=[0., 20.], std=1.5, tag=tag), - Param(name='astroDeltaGamma', value=2.5, seed=[2.4, 3. ], ranges=[-5., 5. ], std=0.1, tag=tag), - Param(name='CRDeltaGamma', value=0., seed=[-0.1, 0.1 ], ranges=[-1., 1. ], std=0.1, tag=tag), - Param(name='NeutrinoAntineutrinoRatio', value=1., seed=[0.8, 1.2 ], ranges=[0., 2. ], std=0.1, tag=tag), - Param(name='anisotropyScale', value=1., seed=[0.8, 1.2 ], ranges=[0., 2. ], std=0.1, tag=tag), - Param(name='domEfficiency', value=0.99, seed=[0.8, 1.2 ], ranges=[0.8, 1.2 ], std=0.1, tag=tag), - Param(name='holeiceForward', value=0., seed=[-0.8, 0.8 ], ranges=[-4.42, 1.58 ], std=0.1, tag=tag), - Param(name='piKRatio', value=1.0, seed=[0.8, 1.2 ], ranges=[0., 2. ], std=0.1, tag=tag) - ]) - return ParamSet(nuisance) - - -def nuisance_argparse(parser): - nuisance = define_nuisance() - for parm in nuisance: - parser.add_argument( - '--'+parm.name, type=float, default=parm.value, - help=parm.name+' to inject' - ) - -def process_args(args): - """Process the input args.""" - if args.likelihood is not Likelihood.GOLEMFIT \ - and args.likelihood is not Likelihood.GF_FREQ: - raise AssertionError( - 'Likelihood method {0} not supported for this ' - 'script!\nChoose either GOLEMFIT or GF_FREQ'.format( - str_enum(args.likelihood) - ) - ) - - -def parse_args(args=None): - """Parse command line arguments""" - parser = argparse.ArgumentParser( - description="BSM flavour ratio analysis", - formatter_class=misc_utils.SortingHelpFormatter, - ) - parser.add_argument( - '--injected-ratio', type=float, nargs=3, default=[1, 1, 1], - help='Set the central value for the injected flavour ratio at IceCube' - ) - parser.add_argument( - '--seed', type=misc_utils.seed_parse, default='25', - help='Set the random seed value' - ) - parser.add_argument( - '--threads', type=misc_utils.thread_type, default='1', - help='Set the number of threads to use (int or "max")' - ) - parser.add_argument( - '--outfile', type=str, default='./untitled', - help='Path to output results' - ) - try: - gf_utils.gf_argparse(parser) - except: pass - llh_utils.likelihood_argparse(parser) - mcmc_utils.mcmc_argparse(parser) - nuisance_argparse(parser) - misc_utils.remove_option(parser, 'sigma_ratio') - if args is None: return parser.parse_args() - else: return parser.parse_args(args.split()) - - -def gen_identifier(args): - f = '_{0}_{1}'.format(*map(str_enum, (args.likelihood, args.data))) - if args.data is not DataType.REAL: - ir1, ir2, ir3 = misc_utils.solve_ratio(args.injected_ratio) - f += '_INJ_{0:03d}_{1:03d}_{2:03d}'.format(ir1, ir2, ir3) - return f - - -def gen_figtext(args, asimov_paramset): - f = '' - if args.data is DataType.REAL: - f += 'IceCube Preliminary' - else: - ir1, ir2, ir3 = misc_utils.solve_ratio(args.injected_ratio) - f += 'Injected ratio = [{0}, {1}, {2}]'.format(ir1, ir2, ir3) - for param in asimov_paramset: - f += '\nInjected {0:20s} = {1:.3f}'.format( - param.name, param.nominal_value - ) - return f - - -def triangle_llh(theta, args, hypo_paramset, fitter): - """Log likelihood function for a given theta.""" - if len(theta) != len(hypo_paramset): - raise AssertionError( - 'Dimensions of scan is not the same as the input ' - 'params\ntheta={0}\nparamset]{1}'.format(theta, hypo_paramset) - ) - for idx, param in enumerate(hypo_paramset): - param.value = theta[idx] - - if args.likelihood is Likelihood.GOLEMFIT: - llh = gf_utils.get_llh(fitter, hypo_paramset) - elif args.likelihood is Likelihood.GF_FREQ: - llh = gf_utils.get_llh_freq(fitter, hypo_paramset) - - return llh - - -def ln_prob(theta, args, hypo_paramset, fitter): - lp = llh_utils.lnprior(theta, paramset=hypo_paramset) - if not np.isfinite(lp): - return -np.inf - return lp + triangle_llh( - theta, - args = args, - hypo_paramset = hypo_paramset, - fitter = fitter - ) - - -def main(): - args = parse_args() - process_args(args) - misc_utils.print_args(args) - - if args.seed is not None: - np.random.seed(args.seed) - - asimov_paramset, hypo_paramset = get_paramsets(args, define_nuisance()) - hypo_paramset.extend(asimov_paramset.from_tag(ParamTag.BESTFIT)) - outfile = args.outfile + gen_identifier(args) - print '== {0:<25} = {1}'.format('outfile', outfile) - - n_params = len(hypo_paramset) - outfile = outfile + '_emcee_' - - print 'asimov_paramset', asimov_paramset - print 'hypo_paramset', hypo_paramset - - if args.run_mcmc: - fitter = gf_utils.setup_fitter(args, asimov_paramset) - - ln_prob_eval = partial( - ln_prob, - hypo_paramset = hypo_paramset, - args = args, - fitter = fitter - ) - - if args.mcmc_seed_type == MCMCSeedType.UNIFORM: - p0 = mcmc_utils.flat_seed( - hypo_paramset, nwalkers=args.nwalkers - ) - elif args.mcmc_seed_type == MCMCSeedType.GAUSSIAN: - p0 = mcmc_utils.gaussian_seed( - hypo_paramset, nwalkers=args.nwalkers - ) - - samples = mcmc_utils.mcmc( - p0 = p0, - ln_prob = ln_prob_eval, - ndim = n_params, - nwalkers = args.nwalkers, - burnin = args.burnin, - nsteps = args.nsteps, - args = args, - threads = 1 - # TODO(shivesh): broken because you cannot pickle a GolemFitPy object - # threads = misc_utils.thread_factors(args.threads)[0] - ) - mcmc_utils.save_chains(samples, outfile) - - of = outfile[:5]+outfile[5:].replace('data', 'plots')+'_posterior' - plot_utils.chainer_plot( - infile = outfile+'.npy', - outfile = of, - outformat = ['png'], - args = args, - llh_paramset = hypo_paramset, - fig_text = gen_figtext(args, hypo_paramset) - ) - - print "DONE!" - - -main.__doc__ = __doc__ - - -if __name__ == '__main__': - main() diff --git a/plot_sens.py b/plot_sens.py index b68cb5d..0eef55e 100755 --- a/plot_sens.py +++ b/plot_sens.py @@ -20,10 +20,10 @@ import numpy.ma as ma from utils import fr as fr_utils from utils import llh as llh_utils -from utils import misc as misc_utils from utils import plot as plot_utils -from utils.enums import EnergyDependance, Likelihood, MixingScenario, ParamTag -from utils.enums import PriorsCateg, SensitivityCateg, StatCateg +from utils.enums import DataType, Texture +from utils.misc import enum_parse, parse_bool, parse_enum, print_args +from utils.misc import gen_identifier, SortingHelpFormatter from utils.param import Param, ParamSet @@ -32,14 +32,22 @@ def process_args(args): if args.data is not DataType.REAL: args.injected_ratio = fr_utils.normalise_fr(args.injected_ratio) - if len(args.source_ratios) % 3 != 0: - raise ValueError( - 'Invalid source ratios input {0}'.format(args.source_ratios) - ) + if args.source_ratios is not None: + if args.x_segments is not None: + raise ValueError('Cannot do both --source-ratios and --x-segments') + if len(args.source_ratios) % 3 != 0: + raise ValueError( + 'Invalid source ratios input {0}'.format(args.source_ratios) + ) - srs = [args.source_ratios[3*x:3*x+3] - for x in range(int(len(args.source_ratios)/3))] - args.source_ratios = map(fr_utils.normalise_fr, srs) + srs = [args.source_ratios[3*x:3*x+3] + for x in range(int(len(args.source_ratios)/3))] + args.source_ratios = map(fr_utils.normalise_fr, srs) + elif args.x_segments is not None: + x_array = np.linspace(0, 1, args.x_segments) + args.source_ratios = [[x, 1-x, 0] for x in x_array] + else: + raise ValueError('Must supply either --source-ratios or --x-segments') args.dimensions = np.sort(args.dimensions) @@ -48,7 +56,7 @@ def parse_args(args=None): """Parse command line arguments""" parser = argparse.ArgumentParser( description="HESE BSM flavour ratio analysis plotting script", - formatter_class=misc_utils.SortingHelpFormatter, + formatter_class=SortingHelpFormatter, ) parser.add_argument( '--datadir', type=str, @@ -60,7 +68,7 @@ def parse_args(args=None): help='Number of new physics scales to evaluate' ) parser.add_argument( - '--split-jobs', type=misc_utils.parse_bool, default='True', + '--split-jobs', type=parse_bool, default='True', help='Did the jobs get split' ) parser.add_argument( @@ -68,8 +76,12 @@ def parse_args(args=None): help='Set the new physics dimensions to consider' ) parser.add_argument( - '--source-ratios', type=int, nargs='*', default=[1, 2, 0], - help='Set the source flavour ratios' + '--source-ratios', type=int, nargs='*', default=None, + required=False, help='Set the source flavour ratios' + ) + parser.add_argument( + '--x-segments', type=int, default=None, + required=False, help='Number of segments in x' ) parser.add_argument( '--texture', type=partial(enum_parse, c=Texture), @@ -80,18 +92,18 @@ def parse_args(args=None): choices=DataType, help='select datatype' ) parser.add_argument( - '--plot-x', type=misc_utils.parse_bool, default='True', + '--plot-x', type=parse_bool, default='False', help='Make sensitivity plot x vs limit' ) parser.add_argument( - '--plot-table', type=misc_utils.parse_bool, default='True', + '--plot-table', type=parse_bool, default='False', help='Make sensitivity table plot' ) parser.add_argument( - '--plot-statistic', type=misc_utils.parse_bool, default='False', + '--plot-statistic', type=parse_bool, default='False', help='Plot MultiNest evidence or LLH value' ) - llh_utils.likelihood_argparse(parser) + llh_utils.llh_argparse(parser) if args is None: return parser.parse_args() else: return parser.parse_args(args.split()) @@ -99,7 +111,7 @@ def parse_args(args=None): def main(): args = parse_args() process_args(args) - misc_utils.print_args(args) + print_args(args) dims = len(args.dimensions) srcs = len(args.source_ratios) @@ -116,10 +128,11 @@ def main(): statistic_arr = np.full((dims, srcs, texs, args.segments, 2), np.nan) print 'Loading data' + argsc = deepcopy(args) for idim, dim in enumerate(args.dimensions): - argsc = deepcopy(args) argsc.dimension = dim + datadir = args.datadir + '/DIM{0}'.format(dim) # Array of scales to scan over. boundaries = fr_utils.SCALE_BOUNDARIES[argsc.dimension] eval_scales = np.linspace( @@ -130,12 +143,11 @@ def main(): for isrc, src in enumerate(args.source_ratios): argsc.source_ratio = src for itex, texture in enumerate(textures): - argc.texture = texture + argsc.texture = texture - base_infile = args.datadir + '/{0}/{1}/{2}/fr_stat'.format( - *map(misc_utils.parse_enum, [args.stat_method, args.data]), - prefix - ) + misc_utils.gen_identifier(argsc) + base_infile = datadir + '/{0}/{1}/'.format( + *map(parse_enum, [args.stat_method, args.data]) + ) + r'{0}/fr_stat'.format(prefix) + gen_identifier(argsc) print '== {0:<25} = {1}'.format('base_infile', base_infile) @@ -147,12 +159,13 @@ def main(): try: print 'Loading from {0}'.format(infile+'.npy') statistic_arr[idim][isrc][itex][idx_sc] = \ - np.load(infile+'.npy')[:,0] + np.load(infile+'.npy')[0] except: print 'Unable to load file {0}'.format( infile+'.npy' ) - continue + raise + # continue else: print 'Loading from {0}'.format(base_infile+'.npy') try: @@ -165,7 +178,6 @@ def main(): continue data = ma.masked_invalid(statistic_arr) - argsc = deepcopy(args) print 'data', data if args.plot_statistic: @@ -184,12 +196,11 @@ def main(): for isrc, src in enumerate(args.source_ratios): argsc.source_ratio = src for itex, texture in enumerate(textures): - argc.texture = texture + argsc.texture = texture - base_infile = args.datadir + '/{0}/{1}/{2}/fr_stat'.format( - *map(misc_utils.parse_enum, [args.stat_method, args.data]), - prefix - ) + misc_utils.gen_identifier(argsc) + base_infile = args.datadir + '/DIM{0}/{1}/{2}/'.format( + dim, *map(parse_enum, [args.stat_method, args.data]) + ) + r'{0}/fr_stat'.format(prefix) + gen_identifier(argsc) basename = os.path.dirname(base_infile) outfile = basename[:5]+basename[5:].replace('data', 'plots') outfile += '/' + os.path.basename(base_infile) @@ -205,10 +216,11 @@ def main(): ) basename = args.datadir[:5]+args.datadir[5:].replace('data', 'plots') - baseoutfile = basename + '/{0}/{1}/{2}/'.format( - *map(misc_utils.parse_enum, [args.stat_method, args.data]), prefix - ) + baseoutfile = basename + '/{0}/{1}/'.format( + *map(parse_enum, [args.stat_method, args.data]) + ) + r'{0}/'.format(prefix) + argsc = deepcopy(args) if args.plot_x: for idim, dim in enumerate(args.dimensions): argsc.dimension = dim diff --git a/plot_sens_sourcescan.py b/plot_sens_sourcescan.py deleted file mode 100755 index 130817d..0000000 --- a/plot_sens_sourcescan.py +++ /dev/null @@ -1,411 +0,0 @@ -#! /usr/bin/env python -# author : S. Mandalia -# s.p.mandalia@qmul.ac.uk -# -# date : April 28, 2018 - -""" -HESE BSM flavour ratio analysis plotting script -""" - -from __future__ import absolute_import, division - -import os -import argparse -from functools import partial -from copy import deepcopy - -import numpy as np -import numpy.ma as ma - -from utils import fr as fr_utils -from utils import gf as gf_utils -from utils import llh as llh_utils -from utils import misc as misc_utils -from utils import plot as plot_utils -from utils.enums import EnergyDependance, Likelihood, MixingScenario, ParamTag -from utils.enums import PriorsCateg, SensitivityCateg, StatCateg -from utils.param import Param, ParamSet, get_paramsets - -from utils import mn as mn_utils - - -def define_nuisance(): - """Define the nuisance parameters.""" - tag = ParamTag.SM_ANGLES - g_prior = PriorsCateg.GAUSSIAN - lg_prior = PriorsCateg.LIMITEDGAUSS - e = 1e-9 - nuisance = [ - Param(name='s_12_2', value=0.307, seed=[0.26, 0.35], ranges=[0., 1.], std=0.013, tex=r's_{12}^2', prior=lg_prior, tag=tag), - Param(name='c_13_4', value=(1-(0.02206))**2, seed=[0.950, 0.961], ranges=[0., 1.], std=0.00147, tex=r'c_{13}^4', prior=lg_prior, tag=tag), - Param(name='s_23_2', value=0.538, seed=[0.31, 0.75], ranges=[0., 1.], std=0.069, tex=r's_{23}^2', prior=lg_prior, tag=tag), - Param(name='dcp', value=4.08404, seed=[0+e, 2*np.pi-e], ranges=[0., 2*np.pi], std=2.0, tex=r'\delta_{CP}', tag=tag), - Param( - name='m21_2', value=7.40E-23, seed=[7.2E-23, 7.6E-23], ranges=[6.80E-23, 8.02E-23], - std=2.1E-24, tex=r'\Delta m_{21}^2{\rm GeV}^{-2}', prior=g_prior, tag=tag - ), - Param( - name='m3x_2', value=2.494E-21, seed=[2.46E-21, 2.53E-21], ranges=[2.399E-21, 2.593E-21], - std=3.3E-23, tex=r'\Delta m_{3x}^2{\rm GeV}^{-2}', prior=g_prior, tag=tag - ) - ] - tag = ParamTag.NUISANCE - nuisance.extend([ - Param(name='convNorm', value=1., seed=[0.5, 2. ], ranges=[0.1, 10.], std=0.4, prior=lg_prior, tag=tag), - Param(name='promptNorm', value=0., seed=[0. , 6. ], ranges=[0. , 20.], std=2.4, prior=lg_prior, tag=tag), - Param(name='muonNorm', value=1., seed=[0.1, 2. ], ranges=[0. , 10.], std=0.1, tag=tag), - Param(name='astroNorm', value=6.9, seed=[0., 5. ], ranges=[0. , 20.], std=1.5, tag=tag), - Param(name='astroDeltaGamma', value=2.5, seed=[2.4, 3. ], ranges=[-5., 5. ], std=0.1, tag=tag) - ]) - return ParamSet(nuisance) - - -def nuisance_argparse(parser): - nuisance = define_nuisance() - for parm in nuisance: - parser.add_argument( - '--'+parm.name, type=float, default=parm.value, - help=parm.name+' to inject' - ) - - -def process_args(args): - """Process the input args.""" - if args.fix_mixing is not MixingScenario.NONE and args.fix_scale: - raise NotImplementedError('Fixed mixing and scale not implemented') - if args.fix_mixing is not MixingScenario.NONE and args.fix_mixing_almost: - raise NotImplementedError( - '--fix-mixing and --fix-mixing-almost cannot be used together' - ) - if args.fix_scale: - raise NotImplementedError( - '--fix-scale not implemented' - ) - - args.measured_ratio = fr_utils.normalise_fr(args.measured_ratio) - # if args.fix_source_ratio: - # assert len(args.source_ratios) % 3 == 0 - # srs = [args.source_ratios[3*x:3*x+3] - # for x in range(int(len(args.source_ratios)/3))] - # args.source_ratios = map(fr_utils.normalise_fr, srs) - - if args.energy_dependance is EnergyDependance.SPECTRAL: - args.binning = np.logspace( - np.log10(args.binning[0]), np.log10(args.binning[1]), args.binning[2]+1 - ) - - if args.split_jobs and args.run_method is SensitivityCateg.FULL: - raise NotImplementedError( - 'split_jobs and run_method not implemented' - ) - - args.dimensions = np.sort(args.dimensions) - - args_copy = deepcopy(args) - scale_regions = [] - for dim in args.dimensions: - args_copy.dimension = dim - _, scale_region = fr_utils.estimate_scale(args_copy) - scale_regions.append(scale_region) - args.scale_region = [np.min(scale_regions), np.max(scale_regions)] - args.scale = np.power(10., np.average(np.log10(args.scale_region))) - - -def parse_args(args=None): - """Parse command line arguments""" - parser = argparse.ArgumentParser( - description="HESE BSM flavour ratio analysis plotting script", - formatter_class=misc_utils.SortingHelpFormatter, - ) - parser.add_argument( - '--infile', type=str, default='./untitled', - help='Path to input dir' - ) - parser.add_argument( - '--run-method', default='full', - type=partial(misc_utils.enum_parse, c=SensitivityCateg), - choices=SensitivityCateg, - help='Choose which type of sensivity plot to make' - ) - parser.add_argument( - '--stat-method', default='bayesian', - type=partial(misc_utils.enum_parse, c=StatCateg), choices=StatCateg, - help='Statistical method to employ' - ) - parser.add_argument( - '--sens-bins', type=int, default=10, - help='Number of bins for the Bayes factor plot' - ) - parser.add_argument( - '--split-jobs', type=misc_utils.parse_bool, default='False', - help='Did the jobs get split' - ) - parser.add_argument( - '--plot', type=misc_utils.parse_bool, default='True', - help='Make sensitivity plots' - ) - parser.add_argument( - '--plot-statistic', type=misc_utils.parse_bool, default='False', - help='Plot MultiNest evidence or LLH value' - ) - fr_utils.fr_argparse(parser) - gf_utils.gf_argparse(parser) - llh_utils.likelihood_argparse(parser) - mn_utils.mn_argparse(parser) - nuisance_argparse(parser) - misc_utils.remove_option(parser, 'dimension') - misc_utils.remove_option(parser, 'source_ratio') - misc_utils.remove_option(parser, 'scale') - misc_utils.remove_option(parser, 'scale_region') - parser.add_argument( - '--dimensions', type=int, nargs='*', default=[3, 6], - help='Set the new physics dimensions to consider' - ) - parser.add_argument( - '--source-bins', type=int, default=5, - help='Binning in source flavour space' - ) - if args is None: return parser.parse_args() - else: return parser.parse_args(args.split()) - - -def main(): - args = parse_args() - process_args(args) - args.scale = 0 - misc_utils.print_args(args) - - asimov_paramset, llh_paramset = get_paramsets(args, define_nuisance()) - - scale = llh_paramset.from_tag(ParamTag.SCALE)[0] - mmangles = llh_paramset.from_tag(ParamTag.MMANGLES) - if args.run_method is SensitivityCateg.FULL: - st_paramset_arr = [llh_paramset.from_tag(ParamTag.SCALE, invert=True)] - elif args.run_method in [SensitivityCateg.FIXED_ANGLE, SensitivityCateg.CORR_ANGLE]: - nscale_pmset = llh_paramset.from_tag([ParamTag.SCALE, ParamTag.MMANGLES], invert=True) - st_paramset_arr = [nscale_pmset] * 3 - elif args.run_method in [SensitivityCateg.FIXED_ONE_ANGLE, SensitivityCateg.CORR_ONE_ANGLE]: - nscale_pmset = llh_paramset.from_tag(ParamTag.SCALE, invert=True) - st_paramset_arr = [] - for x in xrange(3): - st_paramset_arr.append( - ParamSet([prms for prms in nscale_pmset - if mmangles[x].name != prms.name]) - ) - - corr_angles_categ = [SensitivityCateg.CORR_ANGLE, SensitivityCateg.CORR_ONE_ANGLE] - fixed_angle_categ = [SensitivityCateg.FIXED_ANGLE, SensitivityCateg.FIXED_ONE_ANGLE] - - if args.run_method in corr_angles_categ: - scan_angles = np.linspace(0+1e-9, 1-1e-9, args.sens_bins) - else: scan_angles = np.array([0]) - print 'scan_angles', scan_angles - - dims = len(args.dimensions) - binning = np.linspace(0, 1, args.source_bins) - grid = np.dstack(np.meshgrid(binning, binning)).reshape( - args.source_bins*args.source_bins, 2 - ) - source_ratios = [] - for x in grid: - if x[0]+x[1] > 1: - continue - source_ratios.append([x[0], x[1], 1-x[0]-x[1]]) - args.source_ratios = source_ratios - n_source_ratios = map(fr_utils.normalise_fr, source_ratios) - - srcs = len(n_source_ratios) - if args.run_method is SensitivityCateg.FULL: - statistic_arr = np.full((dims, srcs, args.sens_bins, 2), np.nan) - elif args.run_method in fixed_angle_categ: - statistic_arr = np.full((dims, srcs, len(st_paramset_arr), args.sens_bins, 2), np.nan) - elif args.run_method in corr_angles_categ: - statistic_arr = np.full( - (dims, srcs, len(st_paramset_arr), args.sens_bins, args.sens_bins, 3), np.nan - ) - - print 'Loading data' - for idim, dim in enumerate(args.dimensions): - argsc = deepcopy(args) - argsc.dimension = dim - _, scale_region = fr_utils.estimate_scale(argsc) - argsc.scale_region = scale_region - scan_scales = np.linspace( - np.log10(scale_region[0]), np.log10(scale_region[1]), args.sens_bins - ) - scan_scales = np.concatenate([[-100.], scan_scales]) - - for isrc, src in enumerate(n_source_ratios): - argsc.source_ratio = src - infile = args.infile - if args.likelihood is Likelihood.GOLEMFIT: - infile += '/golemfit/' - elif args.likelihood is Likelihood.GAUSSIAN: - infile += '/gaussian/' - if args.likelihood is Likelihood.GAUSSIAN: - infile += '{0}/'.format(str(args.sigma_ratio).replace('.', '_')) - # infile += '/DIM{0}/fix_ifr/{1}/{2}/{3}/fr_stat'.format( - infile += '/DIM{0}/fix_ifr/sourcescan/{1}/{2}/{3}/fr_stat'.format( - # infile += '/DIM{0}/fix_ifr/prior/{1}/{2}/{3}/fr_stat'.format( - # infile += '/DIM{0}/fix_ifr/{1}/{2}/{3}/old/fr_stat'.format( - # infile += '/DIM{0}/fix_ifr/seed2/{1}/{2}/{3}/fr_stat'.format( - # infile += '/DIM{0}/fix_ifr/100TeV/{1}/{2}/{3}/fr_stat'.format( - # infile += '/DIM{0}/fix_ifr/strictprior/{1}/{2}/{3}/fr_stat'.format( - # infile += '/DIM{0}/fix_ifr/noprior/{1}/{2}/{3}/fr_stat'.format( - dim, *map(misc_utils.parse_enum, [args.stat_method, args.run_method, args.data]) - ) + misc_utils.gen_identifier(argsc) - print '== {0:<25} = {1}'.format('infile', infile) - - if args.split_jobs: - for idx_an, an in enumerate(scan_angles): - for idx_sc, sc in enumerate(scan_scales): - filename = infile + '_scale_{0:.0E}'.format(np.power(10, sc)) - try: - if args.run_method in fixed_angle_categ: - print 'Loading from {0}'.format(filename+'.npy') - statistic_arr[idim][isrc][:,idx_sc] = np.load(filename+'.npy')[:,0] - if args.run_method in corr_angles_categ: - filename += '_angle_{0:<04.2}'.format(an) - print 'Loading from {0}'.format(filename+'.npy') - statistic_arr[idim][isrc][:,idx_an,idx_sc] = np.load(filename+'.npy')[:,0,0] - except: - print 'Unable to load file {0}'.format(filename+'.npy') - continue - else: - print 'Loading from {0}'.format(infile+'.npy') - try: - statistic_arr[idim][isrc] = np.load(infile+'.npy') - except: - print 'Unable to load file {0}'.format(infile+'.npy') - continue - - data = ma.masked_invalid(statistic_arr) - - print 'data', data - print 'data.shape', data.shape - if args.plot_statistic: - assert 0 - print 'Plotting statistic' - - argsc = deepcopy(args) - for idim, dim in enumerate(args.dimensions): - argsc.dimension = dim - _, scale_region = fr_utils.estimate_scale(argsc) - argsc.scale_region = scale_region - base_infile = args.infile - if args.likelihood is Likelihood.GOLEMFIT: - base_infile += '/golemfit/' - elif args.likelihood is Likelihood.GAUSSIAN: - base_infile += '/gaussian/' - if args.likelihood is Likelihood.GAUSSIAN: - base_infile += '{0}/'.format(str(args.sigma_ratio).replace('.', '_')) - # base_infile += '/DIM{0}/fix_ifr'.format(dim) - base_infile += '/DIM{0}/fix_ifr/sourcescan'.format(dim) - # base_infile += '/DIM{0}/fix_ifr/prior'.format(dim) - # base_infile += '/DIM{0}/fix_ifr/seed2'.format(dim) - # base_infile += '/DIM{0}/fix_ifr/100TeV'.format(dim) - # base_infile += '/DIM{0}/fix_ifr/strictprior'.format(dim) - # base_infile += '/DIM{0}/fix_ifr/noprior'.format(dim) - - for isrc, src in enumerate(n_source_ratios): - argsc.source_ratio = src - infile = base_infile +'/{0}/{1}/{2}/fr_stat'.format( - # infile = base_infile +'/{0}/{1}/{2}/old/fr_stat'.format( - *map(misc_utils.parse_enum, [args.stat_method, args.run_method, args.data]) - ) + misc_utils.gen_identifier(argsc) - basename = os.path.dirname(infile) - baseoutfile = basename[:5]+basename[5:].replace('data', 'plots') - baseoutfile += '/' + os.path.basename(infile) - if args.run_method is SensitivityCateg.FULL: - outfile = baseoutfile - plot_utils.plot_statistic( - data = data[idim][isrc], - outfile = outfile, - outformat = ['png'], - args = argsc, - scale_param = scale, - ) - if args.run_method in fixed_angle_categ: - for idx_scen in xrange(len(st_paramset_arr)): - print '|||| SCENARIO = {0}'.format(idx_scen) - outfile = baseoutfile + '_SCEN{0}'.format(idx_scen) - if idx_scen == 0: label = r'$\mathcal{O}_{12}=\pi/4$' - elif idx_scen == 1: label = r'$\mathcal{O}_{13}=\pi/4$' - elif idx_scen == 2: label = r'$\mathcal{O}_{23}=\pi/4$' - plot_utils.plot_statistic( - data = data[idim][isrc][idx_scen], - outfile = outfile, - outformat = ['png'], - args = argsc, - scale_param = scale, - label = label - ) - elif args.run_method in corr_angles_categ: - for idx_scen in xrange(len(st_paramset_arr)): - print '|||| SCENARIO = {0}'.format(idx_scen) - basescenoutfile = baseoutfile + '_SCEN{0}'.format(idx_scen) - if idx_scen == 0: label = r'$\mathcal{O}_{12}=' - elif idx_scen == 1: label = r'$\mathcal{O}_{13}=' - elif idx_scen == 2: label = r'$\mathcal{O}_{23}=' - for idx_an, an in enumerate(scan_angles): - print '|||| ANGLE = {0:<04.2}'.format(float(an)) - outfile = basescenoutfile + '_ANGLE{0}'.format(idx_an) - _label = label + r'{0:<04.2}$'.format(an) - plot_utils.plot_statistic( - data = data[idim][isrc][idx_scen][idx_an][:,1:], - outfile = outfile, - outformat = ['png'], - args = argsc, - scale_param = scale, - label = _label - ) - - if args.plot: - print 'Plotting sensitivities' - - basename = args.infile[:5]+args.infile[5:].replace('data', 'plots') - baseoutfile = basename + '/{0}/{1}/{2}/'.format( - *map(misc_utils.parse_enum, [args.likelihood, args.stat_method, args.data]) - ) - - if args.run_method is SensitivityCateg.FULL: - plot_utils.plot_sens_full( - data = data, - outfile = baseoutfile + '/FULL', - outformat = ['png', 'pdf'], - args = args, - ) - elif args.run_method in fixed_angle_categ: - # plot_utils.plot_sens_fixed_angle_pretty( - # data = data, - # outfile = baseoutfile + '/fixed_angle_pretty_substantial', - # outformat = ['png', 'pdf'], - # args = args, - # ) - # plot_utils.plot_sens_fixed_angle( - # data = data, - # outfile = baseoutfile + '/FIXED_ANGLE', - # outformat = ['png'], - # args = args, - # ) - plot_utils.plot_source_ternary_1D( - data = data, - outfile = baseoutfile + '/source_ternary', - outformat = ['png'], - args = args, - ) - elif args.run_method in corr_angles_categ: - plot_utils.plot_sens_corr_angle( - data = data, - outfile = baseoutfile + '/CORR_ANGLE', - outformat = ['png', 'pdf'], - args = args, - ) - - -main.__doc__ = __doc__ - - -if __name__ == '__main__': - main() diff --git a/utils/misc.py b/utils/misc.py index abef78a..e5fedb9 100644 --- a/utils/misc.py +++ b/utils/misc.py @@ -33,7 +33,8 @@ class SortingHelpFormatter(argparse.HelpFormatter): def solve_ratio(fr): denominator = reduce(gcd, fr) f = [int(x/denominator) for x in fr] - if f[0] > 1E3 or f[1] > 1E3 or f[2] > 1E3: + allow = (1, 2, 0) + if f[0] not in allow or f[1] not in allow or f[2] not in allow: return '{0:.2f}_{1:.2f}_{2:.2f}'.format(fr[0], fr[1], fr[2]) else: return '{0}_{1}_{2}'.format(f[0], f[1], f[2]) @@ -162,10 +163,10 @@ def centers(x): def get_units(dimension): if dimension == 3: return r' / \:{\rm GeV}' if dimension == 4: return r'' - if dimension == 5: return r' / \:{rm GeV}^{-1}' - if dimension == 6: return r' / \:{rm GeV}^{-2}' - if dimension == 7: return r' / \:{rm GeV}^{-3}' - if dimension == 8: return r' / \:{rm GeV}^{-4}' + if dimension == 5: return r' / \:{\rm GeV}^{-1}' + if dimension == 6: return r' / \:{\rm GeV}^{-2}' + if dimension == 7: return r' / \:{\rm GeV}^{-3}' + if dimension == 8: return r' / \:{\rm GeV}^{-4}' def calc_nbins(x): diff --git a/utils/plot.py b/utils/plot.py index 006cbef..4f9d961 100644 --- a/utils/plot.py +++ b/utils/plot.py @@ -36,10 +36,10 @@ from ternary.heatmapping import polygon_generator import shapely.geometry as geometry -from utils import misc as misc_utils -from utils.enums import DataType, EnergyDependance, str_enum +from utils.enums import DataType, str_enum from utils.enums import Likelihood, ParamTag, StatCateg, Texture -from utils.fr import angles_to_u, angles_to_fr +from utils.misc import get_units, make_dir, solve_ratio +from utils.fr import angles_to_u, angles_to_fr, SCALE_BOUNDARIES BAYES_K = 1. # Substantial degree of belief. @@ -115,7 +115,7 @@ def chainer_plot(infile, outfile, outformat, args, llh_paramset, fig_text=None): print 'raw.shape', raw.shape print 'raw', raw - misc_utils.make_dir(outfile) + make_dir(outfile), make_dir if fig_text is None: fig_text = gen_figtext(args) @@ -277,7 +277,7 @@ def plot_statistic(data, outfile, outformat, args, scale_param, label=None): ) at.patch.set_boxstyle("round,pad=0.1,rounding_size=0.5") ax.add_artist(at) - misc_utils.make_dir(outfile) + make_dir(outfile) for of in outformat: print 'Saving as {0}'.format(outfile+'.'+of) fig.savefig(outfile+'.'+of, bbox_inches='tight', dpi=150) @@ -322,12 +322,12 @@ def plot_sens_full(data, outfile, outformat, args): al = scales[reduced_ev > 2.71] # 90% CL for 1 DOF via Wilks if len(al) == 0: print 'No points for DIM {0} FRS {1} NULL {2}!'.format( - dim, misc_utils.solve_ratio(src), null + dim, solve_ratio(src), null ) print 'Reduced EV {0}'.format(reduced_ev) continue lim = al[0] - label = '[{0}, {1}, {2}]'.format(*misc_utils.solve_ratio(src)) + label = '[{0}, {1}, {2}]'.format(*solve_ratio(src)) if lim < yranges[0]: yranges[0] = lim if lim > yranges[1]: yranges[1] = lim+4 line = plt.Line2D( @@ -352,7 +352,7 @@ def plot_sens_full(data, outfile, outformat, args): for xmaj in ax.xaxis.get_majorticklocs(): ax.axvline(x=xmaj, ls=':', color='gray', alpha=0.4, linewidth=1) - misc_utils.make_dir(outfile) + make_dir(outfile) for of in outformat: print 'Saving plot as {0}'.format(outfile+'.'+of) fig.savefig(outfile+'.'+of, bbox_inches='tight', dpi=150) @@ -496,7 +496,7 @@ def plot_table_sens(data, outfile, outformat, args): if isrc not in legend_log: legend_log.append(isrc) - label = '{0} at source'.format(misc_utils.solve_ratio(src)) + label = '{0} at source'.format(solve_ratio(src)) legend_elements.append( Patch(facecolor=rgb_co[isrc]+[0.3], edgecolor=rgb_co[isrc]+[1], label=label) @@ -544,7 +544,7 @@ def plot_table_sens(data, outfile, outformat, args): fig.text(0.278, ybound, r'\bf IceCube Simulation', color='red', fontsize=13, ha='center', va='center', zorder=11) - misc_utils.make_dir(outfile) + make_dir(outfile) for of in outformat: print 'Saving plot as {0}'.format(outfile+'.'+of) fig.savefig(outfile+'.'+of, bbox_inches='tight', dpi=150) @@ -616,7 +616,7 @@ def plot_sens_fixed_angle(data, outfile, outformat, args): arr_len = 1.5 lim = al[0] print 'limit = {0}'.format(lim) - label = '{0} : {1} : {2}'.format(*misc_utils.solve_ratio(src)) + label = '{0} : {1} : {2}'.format(*solve_ratio(src)) # if lim < yranges[0]: yranges[0] = lim-arr_len # if lim > yranges[1]: yranges[1] = lim+arr_len+2 # if lim > yranges[1]: yranges[1] = lim @@ -680,7 +680,7 @@ def plot_sens_fixed_angle(data, outfile, outformat, args): ax.axvline(x=xmaj, ls=':', color='gray', alpha=0.2, linewidth=1) out = outfile + '_DIM{0}'.format(dim) - misc_utils.make_dir(out) + make_dir(out) for of in outformat: print 'Saving plot as {0}'.format(out+'.'+of) fig.savefig(out+'.'+of, bbox_inches='tight', dpi=150) @@ -824,7 +824,7 @@ def plot_sens_corr_angle(data, outfile, outformat, args): at.patch.set_boxstyle("round,pad=0.1,rounding_size=0.5") ax.add_artist(at) out = outfile + '_DIM{0}_SRC{1}_AN{2}'.format(dim, isrc, ian) - misc_utils.make_dir(out) + make_dir(out) for of in outformat: print 'Saving plot as {0}'.format(out+'.'+of) fig.savefig(out+'.'+of, bbox_inches='tight', dpi=150) @@ -934,7 +934,7 @@ def triangle_project(frs, llh, outfile, outformat, args, llh_paramset, fig_text) cb.set_label(r'$LLH$', fontsize=fontsize+5, labelpad=20, horizontalalignment='center') - misc_utils.make_dir(outfile) + make_dir(outfile) for of in outformat: print 'Saving plot as {0}'.format(outfile+'.'+of) fig.savefig(outfile+'.'+of, bbox_inches='tight', dpi=150) @@ -1088,7 +1088,7 @@ def plot_source_ternary(data, outfile, outformat, args): print 'vertices', heatmap(interp_dict, nsrcs) print tax.heatmap(interp_dict, scale=nsrcs, vmin=-60, vmax=-30) - misc_utils.make_dir(outfile) + make_dir(outfile) for of in outformat: print 'Saving plot as {0}'.format(outfile+'_SCEN{0}.'.format(isce)+of) fig.savefig(outfile+'_SCEN{0}.'.format(isce)+of, bbox_inches='tight', dpi=150) @@ -1102,7 +1102,7 @@ def plot_x(data, outfile, outformat, args): print 'Making X sensitivity plot' dims = args.dimensions srcs = args.source_ratios - x_arr = [i[0] for i in srcs] + x_arr = np.array([i[0] for i in srcs]) if args.texture is Texture.NONE: textures = [Texture.OEU, Texture.OET, Texture.OUT] else: @@ -1122,15 +1122,18 @@ def plot_x(data, outfile, outformat, args): for idim, dim in enumerate(dims): print '|||| DIM = {0}, {1}'.format(idim, dim) + boundaries = SCALE_BOUNDARIES[dim] + fig = plt.figure(figsize=(4, 4)) ax = fig.add_subplot(111) ax.tick_params(axis='x', labelsize=12) ax.tick_params(axis='y', labelsize=12) ax.set_xlabel(r'$x$', fontsize=18) - ax.set_ylabel(r'${\rm New\:Physics\:Scale}\:[\:{\rm log}_{10} (\Lambda_{d='+str(dim)+r'}^{-1}\:/\:'+get_units(args.dimension)+r')\: ]$', fontsize=18) + ax.set_ylabel(r'${\rm New\:Physics\:Scale}\:[\:{\rm log}_{10} (\Lambda_{d='+str(dim)+r'}^{-1}\:'+get_units(args.dimension)+r')\: ]$', fontsize=12) ax.set_xlim(0, 1) - for itex, tex in enumerate(texture): - print '|||| TEX = {0}'.format(texture) + ax.set_ylim(boundaries) + for itex, tex in enumerate(textures): + print '|||| TEX = {0}'.format(tex) lims = np.full(len(srcs), np.nan) for isrc, src in enumerate(srcs): x = src[0] @@ -1167,14 +1170,18 @@ def plot_x(data, outfile, outformat, args): lims[isrc] = lim lims = ma.masked_invalid(lims) - print 'lims', lims - ax.scatter(x_arr, lims) + size = np.sum(~lims.mask) + if size == 0: continue + tck, u = splprep([x_arr[~lims.mask], lims[~lims.mask]], s=0, k=1) + x, y = splev(np.linspace(0, 1, 100), tck) + ax.scatter(x_arr, lims, marker='o', s=10, alpha=1, zorder=5) + ax.fill_between(x, y, 0, label=texture_label(tex)) for ymaj in ax.yaxis.get_majorticklocs(): ax.axhline(y=ymaj, ls=':', color='gray', alpha=0.3, linewidth=1) - for xmaj in be: + for xmaj in ax.xaxis.get_majorticklocs(): ax.axvline(x=xmaj, ls=':', color='gray', alpha=0.3, linewidth=1) ax.legend() - misc_utils.make_dir(outfile) + make_dir(outfile) for of in outformat: print 'Saving plot as {0}'.format(outfile+'_DIM{0}.'.format(dim)+of) fig.savefig(outfile+'_DIM{0}.'.format(dim)+of, bbox_inches='tight', dpi=150) |
