aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--command4
-rwxr-xr-xcontour.py46
-rw-r--r--plot_command3
-rwxr-xr-xplot_sens.py9
-rwxr-xr-xsens.py48
-rwxr-xr-xsubmitter/clean.sh3
-rw-r--r--submitter/contour_dag.py51
-rw-r--r--submitter/contour_emcee_dag.py77
-rw-r--r--submitter/contour_emcee_submit.sub42
-rw-r--r--submitter/contour_submit.sub16
-rw-r--r--submitter/out0
-rw-r--r--submitter/sens_dag.py6
-rw-r--r--submitter/sens_dag_source.py183
-rw-r--r--utils/gf.py1
-rw-r--r--utils/llh.py9
-rw-r--r--utils/mcmc.py2
-rw-r--r--utils/misc.py1
-rw-r--r--utils/mn.py9
-rw-r--r--utils/param.py9
19 files changed, 100 insertions, 419 deletions
diff --git a/command b/command
new file mode 100644
index 0000000..1b34686
--- /dev/null
+++ b/command
@@ -0,0 +1,4 @@
+python contour.py --data real --debug true --nsteps 100 --burnin 10 --nwalkers 20 --outfile ./test_ --seed 26 --stat-method bayesian --threads max
+python sens.py --debug True --data real --datadir ./test --dimension 6 --eval-segment 1 --mn-live-points 100 --mn-output ./test --mn-tolerance 0.3 --seed 26 --segments 10 --source-ratio 1 2 0 --stat-method bayesian --threads 4 --texture oeu
+
+python plot_sens.py --data real --datadir /data/user/smandalia/flavour_ratio/data/sensitivity/ --dimensions 6 --plot-x True --segments 10 --x-segments 20 --split-jobs True --stat-method bayesian --texture none
diff --git a/contour.py b/contour.py
index 9242640..713bb22 100755
--- a/contour.py
+++ b/contour.py
@@ -12,6 +12,7 @@ from __future__ import absolute_import, division
import os
import argparse
+from copy import deepcopy
from functools import partial
import numpy as np
@@ -23,7 +24,8 @@ from utils import misc as misc_utils
from utils import mcmc as mcmc_utils
from utils import plot as plot_utils
from utils.enums import str_enum
-from utils.enums import DataType, Likelihood, MCMCSeedType, ParamTag, PriorsCateg
+from utils.enums import DataType, Likelihood, MCMCSeedType, ParamTag
+from utils.enums import PriorsCateg
from utils.param import Param, ParamSet
from pymultinest import Analyzer, run
@@ -60,33 +62,19 @@ def get_paramsets(args, nuisance_paramset):
llh_paramset = []
gf_nuisance = [x for x in nuisance_paramset.from_tag(ParamTag.NUISANCE)]
-
- llh_paramset.extend(
- [x for x in nuisance_paramset.from_tag(ParamTag.SM_ANGLES)]
- )
llh_paramset.extend(gf_nuisance)
for parm in llh_paramset:
parm.value = args.__getattribute__(parm.name)
- boundaries = fr_utils.SCALE_BOUNDARIES[args.dimension]
- tag = ParamTag.SCALE
- llh_paramset.append(
- Param(
- name='logLam', value=np.mean(boundaries), ranges=boundaries, std=3,
- tex=r'{\rm log}_{10}\left (\Lambda^{-1}' + \
- misc_utils.get_units(args.dimension)+r'\right )',
- tag=tag
- )
- )
llh_paramset = ParamSet(llh_paramset)
- tag = ParamTag.BESTFIT
if args.data is not DataType.REAL:
flavour_angles = fr_utils.fr_to_angles(args.injected_ratio)
else:
flavour_angles = fr_utils.fr_to_angles([1, 1, 1])
+ tag = ParamTag.BESTFIT
asimov_paramset.extend(gf_nuisance)
asimov_paramset.extend([
Param(name='astroFlavorAngle1', value=flavour_angles[0], ranges=[ 0., 1.], std=0.2, tag=tag),
@@ -110,13 +98,10 @@ def process_args(args):
if args.data is not DataType.REAL:
args.injected_ratio = fr_utils.normalise_fr(args.injected_ratio)
- if args.stat_method is StatCateg.BAYESIAN:
- args.likelihood = Likelihood.GOLEMFIT
- elif args.stat_method is StatCateg.FREQUENTIST:
- args.likelihood = Likelihood.GF_FREQ
+ args.likelihood = Likelihood.GOLEMFIT
- args.mcmc_threads = thread_factors(args.threads)[0]
- args.threads = thread_factors(args.threads)[1]
+ args.mcmc_threads = misc_utils.thread_factors(args.threads)[0]
+ args.threads = misc_utils.thread_factors(args.threads)[1]
def parse_args(args=None):
@@ -138,11 +123,10 @@ def parse_args(args=None):
help='Set the number of threads to use (int or "max")'
)
parser.add_argument(
- '--outfile', type=str, default='./untitled',
+ '--datadir', type=str, default='./untitled',
help='Path to output results'
)
gf_utils.gf_argparse(parser)
- llh_utils.llh_argparse(parser)
mcmc_utils.mcmc_argparse(parser)
nuisance_argparse(parser)
if args is None: return parser.parse_args()
@@ -150,7 +134,7 @@ def parse_args(args=None):
def gen_identifier(args):
- f = '_{0}_{1}'.format(*map(str_enum, (args.likelihood, args.data)))
+ f = '_{0}'.format(str_enum(args.data))
if args.data is not DataType.REAL:
ir1, ir2, ir3 = misc_utils.solve_ratio(args.injected_ratio)
f += '_INJ_{0:03d}_{1:03d}_{2:03d}'.format(ir1, ir2, ir3)
@@ -190,13 +174,14 @@ def triangle_llh(theta, args, hypo_paramset):
def ln_prob(theta, args, hypo_paramset):
- lp = llh_utils.lnprior(theta, paramset=hypo_paramset)
+ dc_hypo_paramset = deepcopy(hypo_paramset)
+ lp = llh_utils.lnprior(theta, paramset=dc_hypo_paramset)
if not np.isfinite(lp):
return -np.inf
return lp + triangle_llh(
theta,
args = args,
- hypo_paramset = hypo_paramset,
+ hypo_paramset = dc_hypo_paramset,
)
@@ -210,12 +195,9 @@ def main():
asimov_paramset, hypo_paramset = get_paramsets(args, define_nuisance())
hypo_paramset.extend(asimov_paramset.from_tag(ParamTag.BESTFIT))
- outfile = args.outfile + gen_identifier(args)
+ outfile = args.datadir + '/contour' + gen_identifier(args)
print '== {0:<25} = {1}'.format('outfile', outfile)
- n_params = len(hypo_paramset)
- outfile = outfile + '_emcee_'
-
print 'asimov_paramset', asimov_paramset
print 'hypo_paramset', hypo_paramset
@@ -240,7 +222,7 @@ def main():
samples = mcmc_utils.mcmc(
p0 = p0,
ln_prob = ln_prob_eval,
- ndim = n_params,
+ ndim = len(hypo_paramset),
nwalkers = args.nwalkers,
burnin = args.burnin,
nsteps = args.nsteps,
diff --git a/plot_command b/plot_command
deleted file mode 100644
index cf733f5..0000000
--- a/plot_command
+++ /dev/null
@@ -1,3 +0,0 @@
-python plot_sens.py --data real --dimensions 3 4 5 6 7 8 --fix-source-ratio True --source-ratios 1 2 0 1 0 0 0 1 0 --split-jobs True --stat-method bayesian --run-method fixed_angle --infile /data/user/smandalia/flavour_ratio/data --likelihood golemfit --sens-bins 20
-
-python contour.py --data realisation --debug True --injected-ratio 1 1 1 --likelihood golemfit --mn-live-points 500 --mn-tolerance 0.3 --outfile /data/user/smandalia/flavour_ratio/data/contour/golemfit/realisation/ --plot-chains True --plot-triangle True --run-scan False
diff --git a/plot_sens.py b/plot_sens.py
index 0eef55e..f190905 100755
--- a/plot_sens.py
+++ b/plot_sens.py
@@ -27,6 +27,9 @@ from utils.misc import gen_identifier, SortingHelpFormatter
from utils.param import Param, ParamSet
+MASK_X = (0.3, 0.8)
+
+
def process_args(args):
"""Process the input args."""
if args.data is not DataType.REAL:
@@ -45,7 +48,11 @@ def process_args(args):
args.source_ratios = map(fr_utils.normalise_fr, srs)
elif args.x_segments is not None:
x_array = np.linspace(0, 1, args.x_segments)
- args.source_ratios = [[x, 1-x, 0] for x in x_array]
+ sources = []
+ for x in x_array:
+ if x > MASK_X[0] and x < MASK_X[1]: continue
+ sources.append([x, 1-x, 0])
+ args.source_ratios = sources
else:
raise ValueError('Must supply either --source-ratios or --x-segments')
diff --git a/sens.py b/sens.py
index 9451056..6ecbea7 100755
--- a/sens.py
+++ b/sens.py
@@ -14,6 +14,8 @@ import os
import argparse
from functools import partial
+import glob
+
import numpy as np
import numpy.ma as ma
from scipy.optimize import minimize
@@ -216,35 +218,45 @@ def main():
print '|||| SCALE = {0:.0E}'.format(np.power(10, scale))
# Lower scale boundary for first (NULL) point and set the scale param.
+ reset_range = None
if scale < scale_prm.ranges[0]:
+ reset_range = scale_prm.ranges
scale_prm.ranges = (scale, scale_prm.ranges[1])
scale_prm.value = scale
- if args.stat_method is StatCateg.BAYESIAN:
- identifier = 'b{0}_{1}_{2}_sca{3}'.format(
- args.eval_segment, args.segments, str_enum(args.texture), scale
+ identifier = 'b{0}_{1}_{2}_sca{3}'.format(
+ args.eval_segment, args.segments, str_enum(args.texture), scale
+ )
+ llh = '{0}'.format(args.likelihood).split('.')[1]
+ data = '{0}'.format(args.data).split('.')[1]
+ src_string = solve_ratio(args.source_ratio)
+ prefix = args.mn_output + '/DIM{0}/{1}/{2}/s{3}/{4}'.format(
+ args.dimension, data, llh, src_string, identifier
+ )
+ try:
+ stat = mn_utils.mn_evidence(
+ mn_paramset = base_mn_pset,
+ llh_paramset = llh_paramset,
+ asimov_paramset = asimov_paramset,
+ args = args,
+ prefix = prefix
)
- try:
- stat = mn_utils.mn_evidence(
- mn_paramset = base_mn_pset,
- llh_paramset = llh_paramset,
- asimov_paramset = asimov_paramset,
- args = args,
- identifier = identifier
- )
- except:
- print 'Failed run'
- raise
- # continue
- print '## Evidence = {0}'.format(stat)
- elif args.stat_method is StatCateg.FREQUENTIST:
- raise NotImplementedError('Still needs testing')
+ except:
+ print 'Failed run'
+ raise
+ print '## Evidence = {0}'.format(stat)
if args.eval_segment is not None:
stat_arr[0] = np.array([scale, stat])
else:
stat_arr[idx_sc] = np.array([scale, stat])
+ # Cleanup.
+ for f in glob.glob(prefix + '*'):
+ os.remove(f)
+ if reset_range is not None:
+ scale_prm.ranges = reset_range
+
misc_utils.make_dir(outfile)
print 'Saving to {0}'.format(outfile+'.npy')
np.save(outfile+'.npy', stat_arr)
diff --git a/submitter/clean.sh b/submitter/clean.sh
index d669683..a56d7f1 100755
--- a/submitter/clean.sh
+++ b/submitter/clean.sh
@@ -2,9 +2,6 @@
rm -f dagman_*.submit.*
rm -f logs/*
-rm -f metaouts/*
-rm -rf mnrun/
-mkdir mnrun
rm -f /scratch/smandalia/flavour_ratio/submitter/dagman_*.submit.*
rm -f /scratch/smandalia/flavour_ratio/submitter/logs/*
diff --git a/submitter/contour_dag.py b/submitter/contour_dag.py
index 634801b..8b3c11c 100644
--- a/submitter/contour_dag.py
+++ b/submitter/contour_dag.py
@@ -13,50 +13,46 @@ injected_ratios = [
(0, 0, 1)
]
-GLOBAL_PARAMS = {}
+datadir = '/data/user/smandalia/flavour_ratio/data/contour'
-GLOBAL_PARAMS.update(dict(
- threads = 1,
- save_measured_fr = 'False',
- output_measured_fr = './frs/',
- seed = None
-))
+prefix = ''
+# prefix = '_noprior'
+
+golemfitsourcepath = os.environ['GOLEMSOURCEPATH'] + '/GolemFit'
+condor_script = golemfitsourcepath + '/scripts/flavour_ratio/submitter/contour_submit.sub'
+
+GLOBAL_PARAMS = {}
-# MultiNest
GLOBAL_PARAMS.update(dict(
- mn_live_points = 5000,
- mn_tolerance = 0.3,
+ threads = 12,
+ seed = 26
))
-# Likelihood
+# Emcee
GLOBAL_PARAMS.update(dict(
- likelihood = 'golemfit',
+ run_mcmc = 'True',
+ burnin = 200,
+ nsteps = 1000,
+ nwalkers = 60,
+ mcmc_seed_type = 'uniform'
))
# GolemFit
GLOBAL_PARAMS.update(dict(
ast = 'p2_0',
- # data = 'realisation'
- # data = 'asimov'
data = 'real'
))
# Plot
GLOBAL_PARAMS.update(dict(
- plot_chains = 'False',
- plot_triangle = 'False'
+ plot_angles = 'False',
+ plot_elements = 'False',
))
-outfile = 'dagman_FR_CONTOUR_{0}'.format(GLOBAL_PARAMS['data'])
-outfile += '.submit'
-output = '/data/user/smandalia/flavour_ratio/data/contour/{0}/{1}/'.format(
- GLOBAL_PARAMS['likelihood'], GLOBAL_PARAMS['data']
-)
-# output += 'nosyst/'
-# output += 'noprompt/'
-# output += 'strictpriors/'
+dagfile = 'dagman_CONTOUR_{0}'.format(GLOBAL_PARAMS['data'])
+dagfile += prefix + '.submit'
-with open(outfile, 'w') as f:
+with open(dagfile, 'w') as f:
job_number = 1
for inj in injected_ratios:
print 'inj', inj
@@ -66,8 +62,9 @@ with open(outfile, 'w') as f:
f.write('VARS\tjob{0}\tir2="{1}"\n'.format(job_number, inj[2]))
for key in GLOBAL_PARAMS.iterkeys():
f.write('VARS\tjob{0}\t{1}="{2}"\n'.format(job_number, key, GLOBAL_PARAMS[key]))
- f.write('VARS\tjob{0}\toutfile="{1}"\n'.format(job_number, output))
+ f.write('VARS\tjob{0}\tdatadir="{1}"\n'.format(job_number, datadir))
job_number += 1
if GLOBAL_PARAMS['data'] == 'real': break
-print 'dag file = {0}'.format(outfile)
+print 'total jobs = {0}'.format(job_number - 1)
+print 'dag file = {0}'.format(dagfile)
diff --git a/submitter/contour_emcee_dag.py b/submitter/contour_emcee_dag.py
deleted file mode 100644
index b16312a..0000000
--- a/submitter/contour_emcee_dag.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#! /usr/bin/env python
-
-import os
-import numpy as np
-
-gfsource = os.environ['GOLEMSOURCEPATH'] + '/GolemFit'
-condor_script = gfsource + '/scripts/flavour_ratio/submitter/contour_emcee_submit.sub'
-
-injected_ratios = [
- (1, 1, 1),
- (1, 0, 0),
- (0, 1, 0),
- (0, 0, 1)
-]
-
-GLOBAL_PARAMS = {}
-
-GLOBAL_PARAMS.update(dict(
- threads = 1,
-))
-
-# Emcee
-GLOBAL_PARAMS.update(dict(
- run_mcmc = 'True',
- burnin = 250,
- nsteps = 500,
- nwalkers = 60,
- seed = 25,
- mcmc_seed_type = 'uniform'
-))
-
-# Likelihood
-GLOBAL_PARAMS.update(dict(
- likelihood = 'golemfit',
-))
-
-# GolemFit
-GLOBAL_PARAMS.update(dict(
- ast = 'p2_0',
- # data = 'realisation'
- # data = 'asimov'
- data = 'real'
-))
-
-# Plot
-GLOBAL_PARAMS.update(dict(
- plot_angles = 'False',
- plot_elements = 'False',
-))
-
-outfile = 'dagman_FR_CONTOUR_EMCEE_{0}'.format(GLOBAL_PARAMS['data'])
-outfile += 'more_sys_flat'
-outfile += '.submit'
-
-output = '/data/user/smandalia/flavour_ratio/data/contour_emcee/{0}/{1}/'.format(
- GLOBAL_PARAMS['likelihood'], GLOBAL_PARAMS['data']
-)
-# output += 'more_sys/'
-output += 'more_sys_flat/'
-# output += 'noprompt/'
-# output += 'strictpriors/'
-
-with open(outfile, 'w') as f:
- job_number = 1
- for inj in injected_ratios:
- print 'inj', inj
- f.write('JOB\tjob{0}\t{1}\n'.format(job_number, condor_script))
- f.write('VARS\tjob{0}\tir0="{1}"\n'.format(job_number, inj[0]))
- f.write('VARS\tjob{0}\tir1="{1}"\n'.format(job_number, inj[1]))
- f.write('VARS\tjob{0}\tir2="{1}"\n'.format(job_number, inj[2]))
- for key in GLOBAL_PARAMS.iterkeys():
- f.write('VARS\tjob{0}\t{1}="{2}"\n'.format(job_number, key, GLOBAL_PARAMS[key]))
- f.write('VARS\tjob{0}\toutfile="{1}"\n'.format(job_number, output))
- job_number += 1
- if GLOBAL_PARAMS['data'] == 'real': break
-
-print 'dag file = {0}'.format(outfile)
diff --git a/submitter/contour_emcee_submit.sub b/submitter/contour_emcee_submit.sub
deleted file mode 100644
index df47cb7..0000000
--- a/submitter/contour_emcee_submit.sub
+++ /dev/null
@@ -1,42 +0,0 @@
-Executable = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/contour_emcee.py
-Arguments = "--ast $(ast) --data $(data) --likelihood $(likelihood) --injected-ratio $(ir0) $(ir1) $(ir2) --outfile $(outfile) --seed $(seed) --threads $(threads) --run-mcmc $(run_mcmc) --burnin $(burnin) --nsteps $(nsteps) --nwalkers $(nwalkers) --seed $(seed) --mcmc-seed-type $(mcmc_seed_type) --plot-angles $(plot_angles) --plot-elements $(plot_elements)"
-
-# All logs will go to a single file
-log = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).log
-output = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).out
-error = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).err
-
-getenv = True
-# environment = "X509_USER_PROXY=x509up_u14830"
-
-# Stage user cert to the node (Gridftp-Users is already on CVMFS)
-# transfer_input_files = /tmp/x509up_u14830
-
-# but do not try to copy outputs back (see: https://htcondor-wiki.cs.wisc.edu/index.cgi/tktview?tn=3081)
-# +TransferOutput=""
-Transfer_output_files = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/metaouts/
-
-request_memory = 3GB
-request_cpus = 1
-
-Universe = vanilla
-Notification = never
-
-# +AccountingGroup="sanctioned.$ENV(USER)"
-# run on both SL5 and 6
-# +WantRHEL6 = True
-# +WantSLC6 = False
-
-# # run on OSG
-# +WantGlidein = True
-
-# +TransferOutput=""
-
-+NATIVE_OS = True
-# Requirements = IS_GLIDEIN && HAS_CVMFS_icecube_opensciencegrid_org && (OpSysAndVer =?= "CentOS6" || OpSysAndVer =?= "RedHat6" || OpSysAndVer =?= "SL6")
-# Requirements = IS_GLIDEIN
-# Requirements = (OpSysMajorVer =?= 6)
-
-# GO!
-queue
-
diff --git a/submitter/contour_submit.sub b/submitter/contour_submit.sub
index f4e13e9..c507ec2 100644
--- a/submitter/contour_submit.sub
+++ b/submitter/contour_submit.sub
@@ -1,27 +1,23 @@
Executable = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/contour.py
-Arguments = "--ast $(ast) --data $(data) --likelihood $(likelihood) --injected-ratio $(ir0) $(ir1) $(ir2) --outfile $(outfile) --seed $(seed) --threads $(threads) --mn-live-points $(mn_live_points) --mn-tolerance $(mn_tolerance) --plot-chains $(plot_chains) --plot-triangle $(plot_triangle) --save-measured-fr $(save_measured_fr) --output-measured-fr=$(output_measured_fr)"
+Arguments = "--ast $(ast) --data $(data) --injected-ratio $(ir0) $(ir1) $(ir2) --datadir $(datadir) --seed $(seed) --threads $(threads) --run-mcmc $(run_mcmc) --burnin $(burnin) --nsteps $(nsteps) --nwalkers $(nwalkers) --mcmc-seed-type $(mcmc_seed_type) --plot-angles $(plot_angles) --plot-elements $(plot_elements)"
# All logs will go to a single file
-log = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).log
+log = /scratch/smandalia/flavour_ratio/submitter/logs/job_$(Cluster).log
output = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).out
error = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/logs/job_$(Cluster).err
getenv = True
# environment = "X509_USER_PROXY=x509up_u14830"
-# Stage user cert to the node (Gridftp-Users is already on CVMFS)
-# transfer_input_files = /tmp/x509up_u14830
-
-# but do not try to copy outputs back (see: https://htcondor-wiki.cs.wisc.edu/index.cgi/tktview?tn=3081)
-# +TransferOutput=""
-Transfer_output_files = /data/user/smandalia/GolemTools/sources/GolemFit/scripts/flavour_ratio/submitter/metaouts/
-
request_memory = 3GB
-request_cpus = 1
+request_cpus = 12
+
+initialdir = /home/smandalia/condor
Universe = vanilla
Notification = never
++AccountingGroup="quicktest.$ENV(USER)"
# +AccountingGroup="sanctioned.$ENV(USER)"
# run on both SL5 and 6
# +WantRHEL6 = True
diff --git a/submitter/out b/submitter/out
deleted file mode 100644
index e69de29..0000000
--- a/submitter/out
+++ /dev/null
diff --git a/submitter/sens_dag.py b/submitter/sens_dag.py
index 160e590..f30d420 100644
--- a/submitter/sens_dag.py
+++ b/submitter/sens_dag.py
@@ -9,10 +9,12 @@ import numpy as np
# (0, 1, 0),
# ]
-x_bins = 20
-x_array = np.linspace(0, 1, x_bins)
+MASK_X = (0.3, 0.8)
+x_segments = 20
+x_array = np.linspace(0, 1, x_segments)
sources = []
for x in x_array:
+ if x > MASK_X[0] and x < MASK_X[1]: continue
sources.append([x, 1-x, 0])
dims = [
diff --git a/submitter/sens_dag_source.py b/submitter/sens_dag_source.py
deleted file mode 100644
index bdb5924..0000000
--- a/submitter/sens_dag_source.py
+++ /dev/null
@@ -1,183 +0,0 @@
-#! /usr/bin/env python
-
-import os
-import numpy as np
-
-full_scan_mfr = [
- # (1, 1, 1), (1, 2, 0)
-]
-
-bins = 5
-binning = np.linspace(0, 1, bins)
-grid = np.dstack(np.meshgrid(binning, binning)).reshape(bins*bins, 2)
-sources = []
-for x in grid:
- if x[0]+x[1] > 1:
- continue
- sources.append([x[0], x[1], 1-x[0]-x[1]])
-
-# fix_sfr_mfr = [
-# (1, 1, 1, 1, 2, 0),
-# (1, 1, 1, 1, 0, 0),
-# (1, 1, 1, 0, 1, 0),
-# # (1, 1, 1, 0, 0, 1),
-# # (1, 1, 0, 1, 2, 0),
-# # (1, 1, 0, 1, 0, 0),
-# # (1, 1, 0, 0, 1, 0),
-# # (1, 0, 0, 1, 0, 0),
-# # (0, 1, 0, 0, 1, 0),
-# # (1, 2, 0, 1, 2, 0),
-# # (1, 2, 0, 0, 1, 0),
-# ]
-fix_sfr_mfr = []
-for s in sources:
- fix_sfr_mfr.append((1, 1, 1, s[0], s[1], s[2]))
-print 'fix_sfr_mfr', fix_sfr_mfr
-print 'len(fix_sfr_mfr)', len(fix_sfr_mfr)
-
-GLOBAL_PARAMS = {}
-
-# Bayes Factor
-sens_eval_bin = 'true' # set to 'all' to run normally
-GLOBAL_PARAMS.update(dict(
- sens_run = 'True',
- run_method = 'fixed_angle', # full, fixed_angle, corr_angle
- stat_method = 'bayesian',
- sens_bins = 10,
- seed = None
-))
-
-# MultiNest
-GLOBAL_PARAMS.update(dict(
- # mn_live_points = 1000,
- # mn_live_points = 600,
- mn_live_points = 100,
- # mn_tolerance = 0.1,
- mn_tolerance = 0.3,
- mn_output = './mnrun'
-))
-
-# FR
-dimension = [6]
-# dimension = [3, 6]
-# dimension = [3, 4, 5, 6, 7, 8]
-GLOBAL_PARAMS.update(dict(
- threads = 1,
- binning = '6e4 1e7 20',
- no_bsm = 'False',
- scale_region = "1E10",
- energy_dependance = 'spectral',
- spectral_index = -2,
- fix_mixing = 'None',
- fix_mixing_almost = 'False',
- fold_index = 'True',
- save_measured_fr = 'False',
- output_measured_fr = './frs/'
-))
-
-# Likelihood
-GLOBAL_PARAMS.update(dict(
- likelihood = 'golemfit',
- sigma_ratio = '0.01'
-))
-
-# GolemFit
-GLOBAL_PARAMS.update(dict(
- ast = 'p2_0',
- data = 'real'
-))
-
-# Plot
-GLOBAL_PARAMS.update(dict(
- plot_statistic = 'True'
-))
-
-outfile = 'dagman_FR_SENS_{0}_{1}_{2}_{3}'.format(
- GLOBAL_PARAMS['stat_method'], GLOBAL_PARAMS['run_method'],
- GLOBAL_PARAMS['likelihood'], GLOBAL_PARAMS['data']
-)
-# outfile += '_seed2'
-# outfile += '_tol03'
-# outfile += '_NULL'
-# outfile += '_prior'
-# outfile += '_strictprior'
-# outfile += '_noprior'
-outfile += '_sourcescan'
-outfile += '.submit'
-golemfitsourcepath = os.environ['GOLEMSOURCEPATH'] + '/GolemFit'
-condor_script = golemfitsourcepath + '/scripts/flavour_ratio/submitter/sens_submit.sub'
-
-if sens_eval_bin.lower() != 'all':
- if GLOBAL_PARAMS['run_method'].lower() == 'corr_angle':
- raise NotImplementedError
- sens_runs = GLOBAL_PARAMS['sens_bins']**2
- else:
- sens_runs = GLOBAL_PARAMS['sens_bins'] + 1
-else: sens_runs = 1
-
-with open(outfile, 'w') as f:
- job_number = 1
- for dim in dimension:
- print 'dimension', dim
- outchain_head = '/data/user/smandalia/flavour_ratio/data/{0}/DIM{1}'.format(
- GLOBAL_PARAMS['likelihood'], dim
- )
- for frs in fix_sfr_mfr:
- print 'frs', frs
- output = outchain_head + '/fix_ifr/'
- if GLOBAL_PARAMS['likelihood'].lower() == 'gaussian':
- output += '{0}/'.format(str(GLOBAL_PARAMS['sigma_ratio']).replace('.', '_'))
- # output += 'seed2/'
- # output += 'mn_noverlap/'
- # output += 'tol_03/'
- # output += 'prior/'
- # output += 'strictprior/'
- # output += 'noprior/'
- output += 'sourcescan/'
- for r in xrange(sens_runs):
- print 'run', r
- f.write('JOB\tjob{0}\t{1}\n'.format(job_number, condor_script))
- f.write('VARS\tjob{0}\tdimension="{1}"\n'.format(job_number, dim))
- f.write('VARS\tjob{0}\tmr0="{1}"\n'.format(job_number, frs[0]))
- f.write('VARS\tjob{0}\tmr1="{1}"\n'.format(job_number, frs[1]))
- f.write('VARS\tjob{0}\tmr2="{1}"\n'.format(job_number, frs[2]))
- f.write('VARS\tjob{0}\tfix_source_ratio="{1}"\n'.format(job_number, True))
- f.write('VARS\tjob{0}\tsr0="{1}"\n'.format(job_number, frs[3]))
- f.write('VARS\tjob{0}\tsr1="{1}"\n'.format(job_number, frs[4]))
- f.write('VARS\tjob{0}\tsr2="{1}"\n'.format(job_number, frs[5]))
- if sens_eval_bin.lower() != 'all':
- f.write('VARS\tjob{0}\tsens_eval_bin="{1}"\n'.format(job_number, r))
- else:
- f.write('VARS\tjob{0}\tsens_eval_bin="{1}"\n'.format(job_number, 'all'))
- for key in GLOBAL_PARAMS.iterkeys():
- f.write('VARS\tjob{0}\t{1}="{2}"\n'.format(job_number, key, GLOBAL_PARAMS[key]))
- f.write('VARS\tjob{0}\toutfile="{1}"\n'.format(job_number, output))
- job_number += 1
- # break
-
- # for frs in full_scan_mfr:
- # print 'frs', frs
- # output = outchain_head + '/full/'
- # if GLOBAL_PARAMS['likelihood'].lower() == 'gaussian':
- # output += '{0}/'.format(str(GLOBAL_PARAMS['sigma_ratio']).replace('.', '_'))
- # for r in xrange(sens_runs):
- # print 'run', r
- # f.write('JOB\tjob{0}\t{1}\n'.format(job_number, condor_script))
- # f.write('VARS\tjob{0}\tdimension="{1}"\n'.format(job_number, dim))
- # f.write('VARS\tjob{0}\tmr0="{1}"\n'.format(job_number, frs[0]))
- # f.write('VARS\tjob{0}\tmr1="{1}"\n'.format(job_number, frs[1]))
- # f.write('VARS\tjob{0}\tmr2="{1}"\n'.format(job_number, frs[2]))
- # f.write('VARS\tjob{0}\tfix_source_ratio="{1}"\n'.format(job_number, False))
- # f.write('VARS\tjob{0}\tsr0="{1}"\n'.format(job_number, 0))
- # f.write('VARS\tjob{0}\tsr1="{1}"\n'.format(job_number, 0))
- # f.write('VARS\tjob{0}\tsr2="{1}"\n'.format(job_number, 0))
- # if sens_eval_bin.lower() != 'all':
- # f.write('VARS\tjob{0}\tsens_eval_bin="{1}"\n'.format(job_number, r))
- # else:
- # f.write('VARS\tjob{0}\tsens_eval_bin="{1}"\n'.format(job_number, 'all'))
- # for key in GLOBAL_PARAMS.iterkeys():
- # f.write('VARS\tjob{0}\t{1}="{2}"\n'.format(job_number, key, GLOBAL_PARAMS[key]))
- # f.write('VARS\tjob{0}\toutfile="{1}"\n'.format(job_number, output))
- # job_number += 1
-
- print 'dag file = {0}'.format(outfile)
diff --git a/utils/gf.py b/utils/gf.py
index b0071f5..d0c62ca 100644
--- a/utils/gf.py
+++ b/utils/gf.py
@@ -126,7 +126,6 @@ def setup_fitter(args, asimov_paramset):
def get_llh(params):
- # print 'params', params
fitparams = gf.FitParameters(gf.sampleTag.MagicTau)
for parm in params:
fitparams.__setattr__(parm.name, float(parm.value))
diff --git a/utils/llh.py b/utils/llh.py
index d80e374..9821695 100644
--- a/utils/llh.py
+++ b/utils/llh.py
@@ -9,6 +9,7 @@ Likelihood functions for the BSM flavour ratio analysis
from __future__ import absolute_import, division
+from copy import deepcopy
from functools import partial
import numpy as np
@@ -144,10 +145,12 @@ def triangle_llh(theta, args, asimov_paramset, llh_paramset):
def ln_prob(theta, args, asimov_paramset, llh_paramset):
- lp = lnprior(theta, paramset=llh_paramset)
+ dc_asimov_paramset = deepcopy(asimov_paramset)
+ dc_llh_paramset = deepcopy(llh_paramset)
+ lp = lnprior(theta, paramset=dc_llh_paramset)
if not np.isfinite(lp):
return -np.inf
return lp + triangle_llh(
- theta, args=args, asimov_paramset=asimov_paramset,
- llh_paramset=llh_paramset
+ theta, args=args, asimov_paramset=dc_asimov_paramset,
+ llh_paramset=dc_llh_paramset
)
diff --git a/utils/mcmc.py b/utils/mcmc.py
index e5bd8da..a807cb7 100644
--- a/utils/mcmc.py
+++ b/utils/mcmc.py
@@ -73,7 +73,7 @@ def mcmc_argparse(parser):
help='Type of distrbution to make the initial MCMC seed'
)
parser.add_argument(
- '--plot-angles', type=parse_bool, default='True',
+ '--plot-angles', type=parse_bool, default='False',
help='Plot MCMC triangle in the angles space'
)
parser.add_argument(
diff --git a/utils/misc.py b/utils/misc.py
index e5fedb9..630aaf6 100644
--- a/utils/misc.py
+++ b/utils/misc.py
@@ -120,6 +120,7 @@ def make_dir(outfile):
else:
raise
+
def remove_option(parser, arg):
for action in parser._actions:
if (vars(action)['option_strings']
diff --git a/utils/mn.py b/utils/mn.py
index 563b3c8..335df96 100644
--- a/utils/mn.py
+++ b/utils/mn.py
@@ -60,8 +60,7 @@ def mn_argparse(parser):
)
-def mn_evidence(mn_paramset, llh_paramset, asimov_paramset, args,
- identifier='mn'):
+def mn_evidence(mn_paramset, llh_paramset, asimov_paramset, args, prefix='mn'):
"""Run the MultiNest algorithm to calculate the evidence."""
n_params = len(mn_paramset)
@@ -76,12 +75,6 @@ def mn_evidence(mn_paramset, llh_paramset, asimov_paramset, args,
args = args,
)
- llh = '{0}'.format(args.likelihood).split('.')[1]
- data = '{0}'.format(args.data).split('.')[1]
- src_string = solve_ratio(args.source_ratio)
- prefix = args.mn_output + '/DIM{0}/{1}/{2}/s{3}/{4}'.format(
- args.dimension, data, llh, src_string, identifier
- )
make_dir(prefix)
print 'Running evidence calculation for {0}'.format(prefix)
run(
diff --git a/utils/param.py b/utils/param.py
index 558018e..2378758 100644
--- a/utils/param.py
+++ b/utils/param.py
@@ -125,14 +125,7 @@ class ParamSet(Sequence):
return self._by_name[i]
def __getattr__(self, attr):
- try:
- return super(ParamSet, self).__getattribute__(attr)
- except AttributeError:
- t, v, tb = sys.exc_info()
- try:
- return self[attr]
- except KeyError:
- raise t, v, tb
+ return super(ParamSet, self).__getattribute__(attr)
def __iter__(self):
return iter(self._params)