Skip to content

Commit

Permalink
PyGRB post-processing (#3972)
Browse files Browse the repository at this point in the history
* Placing GRB trigger information at the start of the results webpage and fixing imports in PyGRB post-processing executables

* Cleaning up old PyGRB results webpage material

* Turning slide_dict into an actual dictionary and using its dictionary powers

* Reinserting delete k...
  • Loading branch information
pannarale authored Apr 22, 2022
1 parent 7ea5d3a commit d0db604
Show file tree
Hide file tree
Showing 17 changed files with 142 additions and 1,257 deletions.
476 changes: 0 additions & 476 deletions bin/pygrb/pycbc_make_grb_summary_page

This file was deleted.

27 changes: 12 additions & 15 deletions bin/pygrb/pycbc_pygrb_efficiency
Original file line number Diff line number Diff line change
Expand Up @@ -177,29 +177,25 @@ slide_dict = ppu.load_time_slides(trig_file)
logging.info("Loading segments.")
segment_dict = ppu.load_segment_dict(trig_file)

# Identify the zero-lag slide and the number of slides
zero_lag_slide_id = ppu.find_zero_lag_slide_id(slide_dict)
num_slides = len(slide_dict)

# Construct trials
logging.info("Constructing trials.")
trial_dict = ppu.construct_trials(opts.seg_files, segment_dict,
ifos, slide_dict, vetoes)
total_trials = sum([len(trial_dict[slide_id]) for slide_id in range(num_slides)])
total_trials = sum([len(trial_dict[slide_id]) for slide_id in slide_dict])
logging.info("%d trials generated.", total_trials)

# Extract basic trigger properties and store as dictionaries
trig_time, trig_snr, trig_bestnr = \
ppu.extract_basic_trig_properties(trial_dict, trigs, num_slides, segment_dict, opts)
ppu.extract_basic_trig_properties(trial_dict, trigs, slide_dict, segment_dict, opts)

# Calculate BestNR values and maximum
time_veto_max_bestnr = {}

for slide_id in range(num_slides):
for slide_id in slide_dict:
num_slide_segs = len(trial_dict[slide_id])
time_veto_max_bestnr[slide_id] = np.zeros(num_slide_segs)

for slide_id in range(num_slides):
for slide_id in slide_dict:
for j, trial in enumerate(trial_dict[slide_id]):
trial_cut = (trial[0] <= trig_time[slide_id])\
& (trig_time[slide_id] < trial[1])
Expand All @@ -213,8 +209,8 @@ logging.info("SNR and bestNR maxima calculated.")

# Output details of loudest offsouce triggers
offsource_trigs = []
sorted_trigs = ppu.sort_trigs(trial_dict, trigs, num_slides, segment_dict)
for slide_id in range(num_slides):
sorted_trigs = ppu.sort_trigs(trial_dict, trigs, slide_dict, segment_dict)
for slide_id in slide_dict:
offsource_trigs.extend(zip(trig_bestnr[slide_id], sorted_trigs[slide_id]))
offsource_trigs.sort(key=lambda element: element[0])
offsource_trigs.reverse()
Expand All @@ -226,7 +222,7 @@ offsource_trigs.reverse()
# and loudest SNR, so could just add this to the above's caption.
# ==========================
max_bestnr, _, full_time_veto_max_bestnr =\
ppu.max_median_stat(num_slides, time_veto_max_bestnr, trig_bestnr, total_trials)
ppu.max_median_stat(slide_dict, time_veto_max_bestnr, trig_bestnr, total_trials)


# =======================
Expand Down Expand Up @@ -272,7 +268,7 @@ if onsource_file:
trig = loud_on_bestnr_trigs
num_trials_louder = 0
tot_off_snr = np.array([])
for slide_id in range(num_slides):
for slide_id in slide_dict:
num_trials_louder += sum(time_veto_max_bestnr[slide_id] > \
loud_on_bestnr)
tot_off_snr = np.concatenate([tot_off_snr,\
Expand All @@ -282,7 +278,7 @@ if onsource_file:

else:
tot_off_snr = np.array([])
for slide_id in range(num_slides):
for slide_id in slide_dict:
tot_off_snr = np.concatenate([tot_off_snr,\
time_veto_max_bestnr[slide_id]])
med_snr = np.median(tot_off_snr)
Expand Down Expand Up @@ -479,8 +475,9 @@ if do_injections:
near_test = np.zeros((found_excl).sum()).astype(bool)
for j, (t, bestnr) in enumerate(zip(found_inj['time'][found_excl],\
found_trig['bestnr'][found_excl])):
near_bestnr = trig_bestnr[zero_lag_slide_id]\
[np.abs(trig_time[zero_lag_slide_id]-t) < cluster_window]
# 0 is the zero-lag timeslide
near_bestnr = trig_bestnr[0]\
[np.abs(trig_time[0]-t) < cluster_window]
near_test[j] = ~((near_bestnr * glitch_check_fac > bestnr).any())
# Apply the local test
c = 0
Expand Down
44 changes: 20 additions & 24 deletions bin/pygrb/pycbc_pygrb_grb_info_table
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,12 @@
# Preamble
# =============================================================================
import sys
import logging
import argparse
from datetime import datetime
import numpy
import lal
import pycbc.version
import pycbc.results
import lal
import numpy
from datetime import datetime
from pycbc import init_logging
from pycbc.detector import Detector
from pycbc.results.pygrb_postprocessing_utils import get_antenna_dist_factor

Expand All @@ -43,68 +41,66 @@ __program__ = "pycbc_pygrb_grb_info_table"
# =============================================================================
parser = argparse.ArgumentParser(description=__doc__, formatter_class=
argparse.ArgumentDefaultsHelpFormatter)

parser.add_argument("--version", action="version", version=__version__)

parser.add_argument("-v", "--verbose", default=False, action="store_true",
help="verbose output")

parser.add_argument("--trigger-time", action="store",
default=None, required=True,
help="GPS time of the GRB.")

parser.add_argument("--ra", action="store",
default=None, required=True,
help="Right ascention (degrees) of the GRB.")

parser.add_argument("--dec", action="store",
default=None, required=True,
help="Declination (degrees) of the GRB.")

parser.add_argument("--sky-error", action="store",
default=0, required=False,
help="Sky-localisation error (degrees) of the GRB.")
parser.add_argument("--ifos", action="store", nargs='+',
default=None, required=True,
help="List containing the active IFOs.")

parser.add_argument("--output-file", action="store",
default=None, required=True,
help="The output file to write tha table to.")

opts = parser.parse_args()

init_logging(opts.verbose, format="%(asctime)s: %(levelname)s: %(message)s")

headers = []
data = [[]]

data[0].append(str(opts.trigger_time))
headers.append('GPS')
headers.append('GPS Time')

utc_time = datetime(*lal.GPSToUTC(int(opts.trigger_time))[0:6]).strftime("%B %d %Y, %H:%M:%S UTC")

data[0].append(utc_time)
headers.append('Date')
headers.append('Coordinated Universal Time')

data[0].append(str(opts.ra))
headers.append('RA')
headers.append('R.A.')

data[0].append(str(opts.dec))
headers.append('DEC')
headers.append('Dec')

data[0].append(str(opts.sky_error))
headers.append('Sky Error')

data[0].append(''.join(opts.ifos))
headers.append('IFOs')

for ifo in opts.ifos:
antenna = Detector(ifo)
factor = get_antenna_dist_factor(antenna, float(opts.ra) / 180. * numpy.pi, float(opts.dec) / 180. * numpy.pi, float(opts.trigger_time))
factor = get_antenna_dist_factor(antenna,
numpy.deg2rad(float(opts.ra)),
numpy.deg2rad(float(opts.dec)),
float(opts.trigger_time))
data[0].append('%.3f' % factor)
headers.append(ifo + ' Antenna Factor')

html = pycbc.results.dq.redirect_javascript + \
str(pycbc.results.static_table(data, headers))

title = 'GRB Summary Information'
caption = 'Parameters of the GRB. The reported antenna factors are the dist / eff distance'
caption += ' as defined by (4.3) in https://arxiv.org/abs/0705.1514.'
caption = 'Parameters of the GRB. The reported antenna factors are the '
caption += 'dist / eff distance as defined by (4.3) in '
caption += 'https://arxiv.org/abs/0705.1514.'

pycbc.results.save_fig_with_metadata(html, opts.output_file, {},
cmd = ' '.join(sys.argv),
Expand Down
31 changes: 14 additions & 17 deletions bin/pygrb/pycbc_pygrb_page_tables
Original file line number Diff line number Diff line change
Expand Up @@ -323,30 +323,26 @@ slide_dict = ppu.load_time_slides(trig_file)
logging.info("Loading segments.")
segment_dict = ppu.load_segment_dict(trig_file)

# Identify the zero-lag slide and the number of slides
zero_lag_slide_id = ppu.find_zero_lag_slide_id(slide_dict)
num_slides = len(slide_dict)

# Construct trials
logging.info("Constructing trials.")
trial_dict = ppu.construct_trials(opts.seg_files, segment_dict,
ifos, slide_dict, vetoes)
total_trials = sum([len(trial_dict[slide_id]) for slide_id in range(num_slides)])
total_trials = sum([len(trial_dict[slide_id]) for slide_id in slide_dict])
logging.info("%d trials generated.", total_trials)

# Extract basic trigger properties and store as dictionaries
trig_time, trig_snr, trig_bestnr = \
ppu.extract_basic_trig_properties(trial_dict, trigs, num_slides, segment_dict, opts)
ppu.extract_basic_trig_properties(trial_dict, trigs, slide_dict, segment_dict, opts)

# Calculate SNR and BestNR values and maxima
time_veto_max_snr = {}
time_veto_max_bestnr = {}
for slide_id in range(num_slides):
for slide_id in slide_dict:
num_slide_segs = len(trial_dict[slide_id])
time_veto_max_snr[slide_id] = np.zeros(num_slide_segs)
time_veto_max_bestnr[slide_id] = np.zeros(num_slide_segs)

for slide_id in range(num_slides):
for slide_id in slide_dict:
for j, trial in enumerate(trial_dict[slide_id]):
trial_cut = (trial[0] <= trig_time[slide_id])\
& (trig_time[slide_id] < trial[1])
Expand All @@ -367,16 +363,16 @@ logging.info("SNR and bestNR maxima calculated.")

# Output details of loudest offsouce triggers, sorted by BestNR
offsource_trigs = []
sorted_trigs = ppu.sort_trigs(trial_dict, trigs, num_slides, segment_dict)
for slide_id in range(num_slides):
sorted_trigs = ppu.sort_trigs(trial_dict, trigs, slide_dict, segment_dict)
for slide_id in slide_dict:
offsource_trigs.extend(zip(trig_bestnr[slide_id], sorted_trigs[slide_id]))
offsource_trigs.sort(key=lambda element: element[0])
offsource_trigs.reverse()

# Median and max values of SNR and BestNR
_, median_snr, _ = ppu.max_median_stat(num_slides, time_veto_max_snr, trig_snr, total_trials)
_, median_snr, _ = ppu.max_median_stat(slide_dict, time_veto_max_snr, trig_snr, total_trials)
max_bestnr, median_bestnr, full_time_veto_max_bestnr =\
ppu.max_median_stat(num_slides, time_veto_max_bestnr, trig_bestnr, total_trials)
ppu.max_median_stat(slide_dict, time_veto_max_bestnr, trig_bestnr, total_trials)

if lofft_outfile or lofft_h5_outfile:
# td: table data
Expand All @@ -403,7 +399,7 @@ if lofft_outfile or lofft_h5_outfile:

# Get FAP of trigger
num_trials_louder = 0
for slide_id in range(num_slides):
for slide_id in slide_dict:
for val in time_veto_max_bestnr[slide_id]:
if val > bestnr:
num_trials_louder += 1
Expand Down Expand Up @@ -525,7 +521,7 @@ if onsource_file:
trig = loud_on_bestnr_trigs
num_trials_louder = 0
tot_off_snr = np.array([])
for slide_id in range(num_slides):
for slide_id in slide_dict:
num_trials_louder += sum(time_veto_max_bestnr[slide_id] > \
loud_on_bestnr)
tot_off_snr = np.concatenate([tot_off_snr,\
Expand Down Expand Up @@ -580,7 +576,7 @@ if onsource_file:
pycbc.results.save_fig_with_metadata(str(html_table), lont_outfile, **kwds)
else:
tot_off_snr = np.array([])
for slide_id in range(num_slides):
for slide_id in slide_dict:
tot_off_snr = np.concatenate([tot_off_snr,\
time_veto_max_bestnr[slide_id]])
med_snr = np.median(tot_off_snr)
Expand Down Expand Up @@ -713,8 +709,9 @@ if do_injections:
near_test = np.zeros((found_excl).sum()).astype(bool)
for j, (t, bestnr) in enumerate(zip(found_injs['time'][found_excl],\
found_injs['bestnr'][found_excl])):
near_bestnr = trig_bestnr[zero_lag_slide_id]\
[np.abs(trig_time[zero_lag_slide_id]-t) < cluster_window]
# 0 is the zero-lag timeslide
near_bestnr = trig_bestnr[0]\
[np.abs(trig_time[0]-t) < cluster_window]
near_test[j] = ~((near_bestnr * glitch_check_fac > bestnr).any())
# Apply the local test
c = 0
Expand Down
5 changes: 2 additions & 3 deletions bin/pygrb/pycbc_pygrb_plot_chisq_veto
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,15 @@ Produces signal consistency plots of the form standard/bank/auto chi-square vs c
# Preamble
# =============================================================================
import sys
import glob
import os
import logging
import numpy
from matplotlib import pyplot as plt
from matplotlib import rc
import pycbc.version
from pycbc import init_logging
from pycbc.results import pygrb_postprocessing_utils as ppu
from pycbc.results import pygrb_plotting_utils as plu
from pycbc.results import pygrb_postprocessing_utils as ppu
from pycbc.results import pygrb_plotting_utils as plu

plt.switch_backend('Agg')
rc('font', size=14)
Expand Down
1 change: 0 additions & 1 deletion bin/pygrb/pycbc_pygrb_plot_coh_ifosnr
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ Plot single IFO SNR vs coherent SNR for a PyGRB run.
# Preamble
# =============================================================================
import sys
import glob
import os
import logging
import collections
Expand Down
15 changes: 6 additions & 9 deletions bin/pygrb/pycbc_pygrb_plot_injs_results
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import numpy as np
import pycbc.version
from pycbc import init_logging
from pycbc.results import save_fig_with_metadata
from pycbc.results import pygrb_postprocessing_utils as ppu
from pycbc.results import pygrb_postprocessing_utils as ppu
try:
from glue.ligolw import lsctables
except ImportError:
Expand Down Expand Up @@ -292,27 +292,24 @@ slide_dict = ppu.load_time_slides(trig_file)
logging.info("Loading segments.")
segment_dict = ppu.load_segment_dict(trig_file)

# Identify the number of slides
num_slides = len(slide_dict)

# Construct trials
logging.info("Constructing trials.")
trial_dict = ppu.construct_trials(opts.seg_files, segment_dict,
ifos, slide_dict, vetoes)
total_trials = sum([len(trial_dict[slide_id]) for slide_id in range(num_slides)])
total_trials = sum([len(trial_dict[slide_id]) for slide_id in slide_dict])
logging.info("%d trials generated.", total_trials)

# Extract basic trigger properties and store as dictionaries
trig_time, _, trig_bestnr = \
ppu.extract_basic_trig_properties(trial_dict, trigs, num_slides, segment_dict, opts)
ppu.extract_basic_trig_properties(trial_dict, trigs, slide_dict, segment_dict, opts)

# Calculate SNR and BestNR values and maxima
time_veto_max_bestnr = {}
for slide_id in range(num_slides):
for slide_id in slide_dict:
num_slide_segs = len(trial_dict[slide_id])
time_veto_max_bestnr[slide_id] = np.zeros(num_slide_segs)

for slide_id in range(num_slides):
for slide_id in slide_dict:
for j, trial in enumerate(trial_dict[slide_id]):
trial_cut = (trial[0] <= trig_time[slide_id])\
& (trig_time[slide_id] < trial[1])
Expand All @@ -322,7 +319,7 @@ for slide_id in range(num_slides):
time_veto_max_bestnr[slide_id][j] = \
max(trig_bestnr[slide_id][trial_cut])

max_bestnr, _, full_time_veto_max_bestnr = ppu.max_median_stat(num_slides,
max_bestnr, _, full_time_veto_max_bestnr = ppu.max_median_stat(slide_dict,
time_veto_max_bestnr,
trig_bestnr,
total_trials)
Expand Down
5 changes: 2 additions & 3 deletions bin/pygrb/pycbc_pygrb_plot_null_stats
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,15 @@ Plot null statistic or coincident SNR vs coherent SNR for a PyGRB run.
# Preamble
# =============================================================================
import sys
import glob
import os
import logging
import numpy
from matplotlib import rc
import matplotlib.pyplot as plt
import pycbc.version
from pycbc import init_logging
from pycbc.results import pygrb_postprocessing_utils as ppu
from pycbc.results import pygrb_plotting_utils as plu
from pycbc.results import pygrb_postprocessing_utils as ppu
from pycbc.results import pygrb_plotting_utils as plu

plt.switch_backend('Agg')

Expand Down
6 changes: 2 additions & 4 deletions bin/pygrb/pycbc_pygrb_plot_skygrid
Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,15 @@
# =============================================================================

import sys
import glob
import os
import copy
import logging
import numpy
from matplotlib import pyplot as plt
from matplotlib import rc
import pycbc.version
from pycbc import init_logging
from pycbc.results import save_fig_with_metadata
from pycbc.results import pygrb_postprocessing_utils as ppu
from matplotlib import pyplot as plt
from matplotlib import rc

plt.switch_backend('Agg')
rc('font', size=14)
Expand Down
Loading

0 comments on commit d0db604

Please # to comment.