aboutsummaryrefslogtreecommitdiffstats
path: root/utils
diff options
context:
space:
mode:
authorshivesh <s.p.mandalia@qmul.ac.uk>2019-03-08 13:36:58 -0600
committershivesh <s.p.mandalia@qmul.ac.uk>2019-03-08 13:36:58 -0600
commitc614f7216177745ddea1171d7ca0c6e68c378c17 (patch)
tree159db8cb4a8ad1d39d521d8ecef9e86d36aa671b /utils
parente0df6e7e5aea33c030ed63fe3d3319fcc9072661 (diff)
downloadGolemFlavor-c614f7216177745ddea1171d7ca0c6e68c378c17.tar.gz
GolemFlavor-c614f7216177745ddea1171d7ca0c6e68c378c17.zip
Fri 8 Mar 13:36:58 CST 2019
Diffstat (limited to 'utils')
-rw-r--r--utils/enums.py6
-rw-r--r--utils/fr.py6
-rw-r--r--utils/gf.py5
-rw-r--r--utils/llh.py27
-rw-r--r--utils/plot.py197
5 files changed, 177 insertions, 64 deletions
diff --git a/utils/enums.py b/utils/enums.py
index 22f91b8..a7982f8 100644
--- a/utils/enums.py
+++ b/utils/enums.py
@@ -50,9 +50,9 @@ class ParamTag(Enum):
class PriorsCateg(Enum):
- UNIFORM = 1
- GAUSSIAN = 2
- HALFGAUSS = 3
+ UNIFORM = 1
+ GAUSSIAN = 2
+ LIMITEDGAUSS = 3
class MCMCSeedType(Enum):
diff --git a/utils/fr.py b/utils/fr.py
index 528557a..1d12fe5 100644
--- a/utils/fr.py
+++ b/utils/fr.py
@@ -97,9 +97,9 @@ def angles_to_fr(src_angles):
spsi2 = SIN(psi)**2
cspi2 = 1. - spsi2
- x = sphi2*cspi2
- y = sphi2*spsi2
- z = cphi2
+ x = float(abs(sphi2*cspi2))
+ y = float(abs(sphi2*spsi2))
+ z = float(abs(cphi2))
return x, y, z
diff --git a/utils/gf.py b/utils/gf.py
index 1998484..2c794d3 100644
--- a/utils/gf.py
+++ b/utils/gf.py
@@ -85,6 +85,11 @@ def steering_params(args):
params.maxFitEnergy = 1E7 # GeV
params.load_data_from_text_file = False
+ params.sampleToLoad = gf.sampleTag.MagicTau
+ params.use_legacy_selfveto_calculation = False
+ params.spline_hole_ice = False
+ params.spline_dom_efficiency = False
+
return params
diff --git a/utils/llh.py b/utils/llh.py
index f4404c4..0c1e97d 100644
--- a/utils/llh.py
+++ b/utils/llh.py
@@ -12,7 +12,8 @@ from __future__ import absolute_import, division
from functools import partial
import numpy as np
-from scipy.stats import multivariate_normal, rv_continuous
+import scipy
+from scipy.stats import multivariate_normal, truncnorm
from utils import fr as fr_utils
from utils import gf as gf_utils
@@ -20,10 +21,11 @@ from utils.enums import EnergyDependance, Likelihood, ParamTag, PriorsCateg
from utils.misc import enum_parse, gen_identifier, parse_bool
-class Gaussian(rv_continuous):
- """Gaussian for one dimension."""
- def _pdf(self, x, mu, sig):
- return (1./np.sqrt(2*np.pi*sig**2))*np.exp(-((x-mu)**2)/(2*sig**2))
+def GaussianBoundedRV(loc=0., sigma=1., lower=-np.inf, upper=np.inf):
+ """Normalised gaussian bounded between lower and upper values"""
+ low, up = (lower - loc) / sigma, (upper - loc) / sigma
+ g = scipy.stats.truncnorm(loc=loc, scale=sigma, a=low, b=up)
+ return g
def multi_gaussian(fr, fr_bf, sigma, offset=-320):
@@ -69,13 +71,14 @@ def lnprior(theta, paramset):
prior = 0
for param in paramset:
if param.prior is PriorsCateg.GAUSSIAN:
- prior += Gaussian().logpdf(
- param.nominal_value, param.value, param.std
- )
- elif param.prior is PriorsCateg.HALFGAUSS:
- prior += Gaussian().logpdf(
- param.nominal_value, param.value, param.std
- ) + Gaussian().logcdf(1, param.value, param.std)
+ prior += GaussianBoundedRV(
+ loc=param.nominal_value, sigma=param.std
+ ).logpdf(param.value)
+ elif param.prior is PriorsCateg.LIMITEDGAUSS:
+ prior += GaussianBoundedRV(
+ loc=param.nominal_value, sigma=param.std,
+ lower=param.ranges[0], upper=param.ranges[1]
+ ).logpdf(param.value)
return prior
diff --git a/utils/plot.py b/utils/plot.py
index 0529d5d..91b8b4e 100644
--- a/utils/plot.py
+++ b/utils/plot.py
@@ -15,7 +15,8 @@ from copy import deepcopy
import numpy as np
import numpy.ma as ma
-from scipy import interpolate
+from scipy.interpolate import splev, splprep
+from scipy.ndimage.filters import gaussian_filter
import matplotlib as mpl
mpl.use('Agg')
@@ -28,13 +29,18 @@ from matplotlib.lines import Line2D
from matplotlib.patches import Patch
from getdist import plots, mcsamples
+
import ternary
+from ternary.heatmapping import polygon_generator
+
+import shapely.geometry as geometry
from utils import misc as misc_utils
from utils.enums import DataType, EnergyDependance
from utils.enums import Likelihood, MixingScenario, ParamTag, StatCateg
from utils.fr import angles_to_u, angles_to_fr
+
if os.path.isfile('./plot_llh/paper.mplstyle'):
plt.style.use('./plot_llh/paper.mplstyle')
elif os.environ.get('GOLEMSOURCEPATH') is not None:
@@ -287,12 +293,13 @@ def plot_statistic(data, outfile, outformat, args, scale_param, label=None):
print 'data', data
print 'data.shape', data.shape
scales, statistic = ma.compress_rows(data).T
- scales_rm = scales[1:]
- statistic_rm = statistic[1:]
- tck, u = interpolate.splprep([scales_rm, statistic_rm], s=0)
- scales_rm, statistic_rm = interpolate.splev(np.linspace(0, 1, 1000), tck)
- print 'scales_rm', scales_rm
- print 'statistic_rm', statistic_rm
+ try:
+ tck, u = splprep([scales, statistic], s=0)
+ except:
+ return
+ sc, st = splev(np.linspace(0, 1, 10000), tck)
+ scales_rm = sc[sc >= scales[1]]
+ statistic_rm = st[sc >= scales[1]]
min_idx = np.argmin(scales)
null = statistic[min_idx]
@@ -314,6 +321,10 @@ def plot_statistic(data, outfile, outformat, args, scale_param, label=None):
elif args.stat_method is StatCateg.FREQUENTIST:
ax.set_ylabel(r'$-2\Delta {\rm LLH}$')
+ # ymin = np.round(np.min(reduced_ev) - 1.5)
+ # ymax = np.round(np.max(reduced_ev) + 1.5)
+ # ax.set_ylim((ymin, ymax))
+
ax.plot(scales_rm, reduced_ev)
ax.axhline(y=np.log(10**(3/2.)), color='red', alpha=1., linewidth=1.3)
@@ -565,10 +576,14 @@ def plot_sens_fixed_angle_pretty(data, outfile, outformat, args):
ax.axvline(x=ps, color='purple', alpha=1., linewidth=1.5)
scales, statistic = ma.compress_rows(data[idim][isrc][ian]).T
- scales_rm = scales[1:]
- statistic_rm = statistic[1:]
- tck, u = interpolate.splprep([scales_rm, statistic_rm], s=0)
- scales_rm, statistic_rm = interpolate.splev(np.linspace(0, 1, 1000), tck)
+ try:
+ tck, u = splprep([scales, statistic], s=0)
+ except:
+ return
+ sc, st = splev(np.linspace(0, 1, 10000), tck)
+ scales_rm = sc[sc >= scales[1]]
+ statistic_rm = st[sc >= scales[1]]
+
min_idx = np.argmin(scales)
null = statistic[min_idx]
if args.stat_method is StatCateg.BAYESIAN:
@@ -708,8 +723,8 @@ def plot_sens_fixed_angle(data, outfile, outformat, args):
for ian in xrange(len(data[idim][isrc])):
print '=== an', ian
scales, statistic = data[idim][isrc][ian].T
- tck, u = interpolate.splprep([scales, statistic], s=0)
- scales, statistic = interpolate.splev(np.linspace(0, 1, 1000), tck)
+ tck, u = splprep([scales, statistic], s=0)
+ scales, statistic = splev(np.linspace(0, 1, 1000), tck)
min_idx = np.argmin(scales)
null = statistic[min_idx]
if args.stat_method is StatCateg.BAYESIAN:
@@ -861,7 +876,7 @@ def plot_sens_corr_angle(data, outfile, outformat, args):
print uni, c
print len(uni)
print np.unique(c)
-
+
n = len(uni)
assert len(np.unique(c)) == 1
c = c[0]
@@ -872,7 +887,7 @@ def plot_sens_corr_angle(data, outfile, outformat, args):
sep_arrays = []
for x_i in xrange(n):
sep_arrays.append(col_array[:,x_i])
-
+
print len(sep_arrays)
sep_arrays = sep_arrays[0][:3]
print sep_arrays
@@ -891,7 +906,7 @@ def plot_sens_corr_angle(data, outfile, outformat, args):
ax.set_xlim((mini, maxi))
ax.set_ylim(0, 1)
ax.grid(b=False)
-
+
x_v = reduced_pdat[0].round(decimals=4)
y_v = reduced_pdat[1].round(decimals=4)
uniques = np.unique(x_v)
@@ -924,8 +939,8 @@ def plot_sens_corr_angle(data, outfile, outformat, args):
print 'p_x', p_x
print 'p_y', p_y
try:
- tck, u = interpolate.splprep([p_x, p_y], s=1e-5, per=True)
- xi, yi = interpolate.splev(np.linspace(0, 1, 1000), tck)
+ tck, u = splprep([p_x, p_y], s=1e-5, per=True)
+ xi, yi = splev(np.linspace(0, 1, 1000), tck)
except:
xi, yi = p_x, p_y
ax.fill(xi, yi, 'r', edgecolor='r', linewidth=1)
@@ -953,6 +968,36 @@ def cmap_discretize(cmap, N):
return mpl.colors.LinearSegmentedColormap(cmap.name + "_%d"%N, cdict, 1024)
+def get_tax(ax, scale):
+ ax.set_aspect('equal')
+
+ # Boundary and Gridlines
+ fig, tax = ternary.figure(ax=ax, scale=scale)
+
+ # Draw Boundary and Gridlines
+ tax.boundary(linewidth=2.0)
+ tax.gridlines(color='grey', multiple=scale/5., linewidth=1.0, alpha=0.4, ls='--')
+ tax.gridlines(color='grey', multiple=scale/10., linewidth=0.5, alpha=0.4, ls=':')
+
+ # Set Axis labels and Title
+ fontsize = 23
+ tax.left_axis_label(r"$f_{\tau}$", fontsize=fontsize+8, offset=0.2, rotation=0)
+ tax.right_axis_label(r"$f_{\mu}$", fontsize=fontsize+8, offset=0.2, rotation=0)
+ tax.bottom_axis_label(r"$f_{e}$", fontsize=fontsize+8, position=(0.55, -0.20/2, 0.5), rotation=0)
+
+ # Remove default Matplotlib axis
+ tax.get_axes().axis('off')
+ tax.clear_matplotlib_ticks()
+
+ # Set ticks
+ ticks = np.linspace(0, 1, 6)
+ tax.ticks(ticks=ticks, locations=ticks*scale, axis='blr', linewidth=1,
+ offset=0.03, fontsize=fontsize, tick_formats='%.1f')
+
+ tax._redraw_labels()
+
+ return tax
+
def triangle_project(frs, llh, outfile, outformat, args, llh_paramset, fig_text):
print 'Making triangle projection'
fontsize = 23
@@ -976,8 +1021,8 @@ def triangle_project(frs, llh, outfile, outformat, args, llh_paramset, fig_text)
mean = np.mean(llh)
sig = np.std(llh)
- min_scale = np.min(llh)
- max_scale = np.max(mean+sig)
+ max_scale = np.max(llh)
+ min_scale = np.min(mean-sig)
norm = mpl.colors.Normalize(vmin=min_scale, vmax=max_scale)
colour = map(alp, map(cmap, map(norm, llh)))
@@ -990,26 +1035,7 @@ def triangle_project(frs, llh, outfile, outformat, args, llh_paramset, fig_text)
gs.update(hspace=0.3, wspace=0.15)
ax = fig.add_subplot(gs[0])
- ax.set_aspect('equal')
-
- # Boundary and Gridlines
- scale = 1
- fig, tax = ternary.figure(ax=ax, scale=scale)
-
- # Draw Boundary and Gridlines
- tax.boundary(linewidth=2.0)
- tax.gridlines(color='grey', multiple=scale/5., linewidth=1.0, alpha=0.4, ls='--')
- tax.gridlines(color='grey', multiple=scale/10., linewidth=0.5, alpha=0.4, ls=':')
-
- # Set Axis labels and Title
- fontsize = 23
- tax.left_axis_label(r"$f_{\tau}$", fontsize=fontsize+8, offset=0.2, rotation=0)
- tax.right_axis_label(r"$f_{\mu}$", fontsize=fontsize+8, offset=0.2, rotation=0)
- tax.bottom_axis_label(r"$f_{e}$", fontsize=fontsize+8, position=(0.55, -0.20/2, 0.5), rotation=0)
-
- # Remove default Matplotlib axis
- tax.get_axes().axis('off')
- tax.clear_matplotlib_ticks()
+ tax = get_tax(ax, scale=1)
# Plot
tax.scatter(frs, marker='o', s=0.1, color=colour)
@@ -1034,13 +1060,92 @@ def triangle_project(frs, llh, outfile, outformat, args, llh_paramset, fig_text)
cb.set_label(r'$LLH$', fontsize=fontsize+5, labelpad=20,
horizontalalignment='center')
- # Set ticks
- tax.ticks(axis='blr', multiple=scale/5., linewidth=1, offset=0.03,
- fontsize=fontsize, tick_formats='%.1f')
-
- tax._redraw_labels()
-
misc_utils.make_dir(outfile)
for of in outformat:
print 'Saving plot as {0}'.format(outfile+'.'+of)
fig.savefig(outfile+'.'+of, bbox_inches='tight', dpi=150)
+
+
+def heatmap(data, scale, vmin=None, vmax=None, style='triangular'):
+ for k, v in data.items():
+ data[k] = np.array(v)
+ style = style.lower()[0]
+ if style not in ["t", "h", 'd']:
+ raise ValueError("Heatmap style must be 'triangular', 'dual-triangular', or 'hexagonal'")
+
+ vertices_values = polygon_generator(data, scale, style)
+
+ vertices = []
+ for v, value in vertices_values:
+ vertices.append(v)
+ return vertices
+
+
+def flavour_contour(frs, ax, nbins, coverage, linewidth=2, color='black'):
+ """Plot the flavour contour for a specified coverage."""
+ # Histogram in flavour space
+ H, b = np.histogramdd(
+ (frs[:,0], frs[:,1], frs[:,2]),
+ bins=(nbins+1, nbins+1, nbins+1), range=((0, 1), (0, 1), (0, 1))
+ )
+ H = H / np.sum(H)
+
+ # 3D smoothing
+ smoothing = 0.05
+ H_s = gaussian_filter(H, sigma=smoothing)
+
+ # Finding coverage
+ H_r = np.ravel(H_s)
+ H_rs = np.argsort(H_r)[::-1]
+ H_crs = np.cumsum(H_r[H_rs])
+ thres = np.searchsorted(H_crs, coverage/100.)
+ mask_r = np.zeros(H_r.shape)
+ mask_r[H_rs[:thres]] = 1
+ mask = mask_r.reshape(H_s.shape)
+
+ # Get vertices inside covered region
+ binx = np.linspace(0, 1, nbins+1)
+ interp_dict = {}
+ for i in xrange(len(binx)):
+ for j in xrange(len(binx)):
+ for k in xrange(len(binx)):
+ if mask[i][j][k] == 1:
+ interp_dict[(i, j, k)] = H_s[i, j, k]
+ vertices = np.array(heatmap(interp_dict, nbins))
+ points = vertices.reshape((len(vertices)*3, 2))
+
+ # Convex full to find points forming exterior bound
+ pc = geometry.MultiPoint(points)
+ polygon = pc.convex_hull
+ ex_cor = np.array(list(polygon.exterior.coords))
+
+ # Join points with a spline
+ tck, u = splprep([ex_cor.T[0], ex_cor.T[1]], s=0., per=1, k=1)
+ xi, yi = map(np.array, splev(np.linspace(0, 1, 300), tck))
+
+ # Spline again to smooth
+ tck, u = splprep([xi, yi], s=0.4, per=1, k=3)
+ xi, yi = map(np.array, splev(np.linspace(0, 1, 300), tck))
+ ev_polygon = np.dstack((xi, yi))[0]
+
+ def project_toflavour(p):
+ """Convert from cartesian to flavour space."""
+ x, y = p
+ b = y / (np.sqrt(3)/2.)
+ a = x - b/2.
+ return [a, b, nbins-a-b]
+
+ # Remove points interpolated outside flavour triangle
+ f_ev_polygon = np.array(map(project_toflavour, ev_polygon))
+ xf, yf, zf = f_ev_polygon.T
+ mask = np.array((xf < 0) | (yf < 0) | (zf < 0) | (xf > nbins) |
+ (yf > nbins) | (zf > nbins))
+ ev_polygon = np.dstack((xi[~mask], yi[~mask]))[0]
+
+ # Plot
+ ax.plot(
+ ev_polygon.T[0], ev_polygon.T[1], linewidth=linewidth, color=color,
+ zorder=2, alpha=0.6, label=r'{0}\%'.format(int(coverage))
+ )
+ ax.scatter(points.T[0], points.T[1], marker='o', s=2, alpha=1, color=color,
+ zorder=3)