From 98377f3310215161d846c9a4b86588eabafe7bfc Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Tue, 5 Dec 2023 12:14:46 -0800 Subject: [PATCH 01/15] Have pycbc live mark flagged triggers instead of removing --- bin/pycbc_live | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/bin/pycbc_live b/bin/pycbc_live index be649e6ae0e..ae278abc765 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -1310,18 +1310,26 @@ with ctx: for key in results[ifo]: if len(results[ifo][key]): results[ifo][key] = results[ifo][key][idx] - if data_reader[ifo].idq is not None: - logging.info("Checking %s's iDQ information", ifo) + if data_reader[ifo].idq: + logging.info("Reading %s's iDQ information", ifo) start = data_reader[ifo].start_time times = results[ifo]['end_time'] - idx = data_reader[ifo].idq.indices_of_flag( + unflagged_idx = data_reader[ifo].idq.indices_of_flag( start, valid_pad, times, padding=data_reader[ifo].dq_padding) - logging.info('Keeping %d/%d %s triggers after iDQ', - len(idx), len(times), ifo) - for key in results[ifo]: - if len(results[ifo][key]): - results[ifo][key] = results[ifo][key][idx] + + if args.idq_reweighting: + trig_flagged = numpy.ones_like(times, dtype=int) + trig_flagged[unflagged_idx] = 0 + results[ifo]['dq_flag'] = trig_flagged + else: + # use idq as a veto + logging.info('Keeping %d/%d %s triggers after iDQ', + len(unflagged_idx), len(times), ifo) + for key in results[ifo]: + if len(results[ifo][key]): + results[ifo][key] = \ + results[ifo][key][unflagged_idx] # Calculate and add the psd variation for the results if args.psd_variation: From c2e1af7ec066a9e649847778bbb20a2a84d46f0c Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Thu, 4 Apr 2024 14:54:59 -0700 Subject: [PATCH 02/15] Make stat usable in low-latency --- bin/pycbc_live | 11 ++++--- pycbc/events/stat.py | 71 ++++++++++++++++++++++++++++++++++---------- pycbc/frame/frame.py | 20 ++++++------- 3 files changed, 70 insertions(+), 32 deletions(-) diff --git a/bin/pycbc_live b/bin/pycbc_live index ae278abc765..fc11cc9a0d2 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -1314,22 +1314,21 @@ with ctx: logging.info("Reading %s's iDQ information", ifo) start = data_reader[ifo].start_time times = results[ifo]['end_time'] - unflagged_idx = data_reader[ifo].idq.indices_of_flag( + flag_active = data_reader[ifo].idq.flag_at_times( start, valid_pad, times, padding=data_reader[ifo].dq_padding) if args.idq_reweighting: - trig_flagged = numpy.ones_like(times, dtype=int) - trig_flagged[unflagged_idx] = 0 - results[ifo]['dq_flag'] = trig_flagged + results[ifo]['dq_state'] = flag_active.astype(int) else: # use idq as a veto + keep = numpy.logical_not(flag_active) logging.info('Keeping %d/%d %s triggers after iDQ', - len(unflagged_idx), len(times), ifo) + numpy.sum(keep), len(times), ifo) for key in results[ifo]: if len(results[ifo][key]): results[ifo][key] = \ - results[ifo][key][unflagged_idx] + results[ifo][key][keep] # Calculate and add the psd variation for the results if args.psd_variation: diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index f61e7c55b66..c529cd99b84 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2271,14 +2271,42 @@ def __init__(self, sngl_ranking, files=None, ifos=None, ifos=ifos, **kwargs) self.dq_rates_by_state = {} self.dq_bin_by_tid = {} - self.dq_state_segments = {} + self.dq_state_segments = None + self.low_latency = False for ifo in self.ifos: key = f'{ifo}-dq_stat_info' if key in self.files.keys(): self.dq_rates_by_state[ifo] = self.assign_dq_rates(key) self.dq_bin_by_tid[ifo] = self.assign_template_bins(key) - self.dq_state_segments[ifo] = self.setup_segments(key) + self.check_low_latency(key) + if not self.low_latency: + if self.dq_state_segments is None: + self.dq_state_segments = {} + self.dq_state_segments[ifo] = self.setup_segments(key) + + def check_low_latency(self, key): + """ + Check if the statistic file indicates low latency mode. + Parameters + ---------- + key: str + Statistic file key string. + Returns + ------- + None + """ + ifo = key.split('-')[0] + with h5py.File(self.files[key], 'r') as dq_file: + ifo_grp = dq_file[ifo] + if 'dq_segments' in ifo_grp.keys(): + # if segs are in stat file, we are not in LL + assert not self.low_latency, 'Should not have segments in LL' + else: + # we must be in LL, shouldn't have segments + assert not self.dq_state_segments, \ + 'Should not have segments in LL' + self.low_latency = True def assign_template_bins(self, key): """ @@ -2337,9 +2365,7 @@ def assign_dq_rates(self, key): def setup_segments(self, key): """ - Check if segments definitions are in stat file - If they are, we are running offline and need to store them - If they aren't, we are running online + Store segments from stat file """ ifo = key.split('-')[0] with h5py.File(self.files[key], 'r') as dq_file: @@ -2379,7 +2405,7 @@ def update_file(self, key): return True return False - def find_dq_noise_rate(self, trigs, dq_state): + def find_dq_noise_rate(self, trigs): """Get dq values for a specific ifo and dq states""" try: @@ -2395,6 +2421,7 @@ def find_dq_noise_rate(self, trigs, dq_state): # Should be exactly one ifo provided ifo = ifo[0] + dq_state = trigs['dq_state'] dq_val = numpy.zeros(len(dq_state)) if ifo in self.dq_rates_by_state: @@ -2437,23 +2464,35 @@ def lognoiserate(self, trigs): Array of log noise rate density for each input trigger. """ + dq_rate = self.find_dq_noise_rate(trigs) + dq_rate = numpy.maximum(dq_rate, 1) + + logr_n = ExpFitFgBgNormStatistic.lognoiserate( + self, trigs) + logr_n += numpy.log(dq_rate) + return logr_n + + def single(self, trigs): # make sure every trig has a dq state try: + # works in offline ifo = trigs.ifo except AttributeError: - ifo = trigs['ifo'] - assert len(numpy.unique(ifo)) == 1 + # works in low-latency + ifo = trigs['ifo'][0] # Should be exactly one ifo provided - ifo = ifo[0] + assert len(numpy.unique(trigs['ifo'])) == 1 - dq_state = self.find_dq_state_by_time(ifo, trigs['end_time'][:]) - dq_rate = self.find_dq_noise_rate(trigs, dq_state) - dq_rate = numpy.maximum(dq_rate, 1) + singles = ExpFitFgBgNormStatistic.single(self, trigs) - logr_n = ExpFitFgBgNormStatistic.lognoiserate( - self, trigs) - logr_n += numpy.log(dq_rate) - return logr_n + if self.low_latency: + # trigs should already have a dq state assigned + singles['dq_state'] = trigs['dq_state'][:] + else: + singles['dq_state'] = self.find_dq_state_by_time( + ifo, trigs['end_time'][:] + ) + return singles class DQExpFitFgBgKDEStatistic(DQExpFitFgBgNormStatistic): diff --git a/pycbc/frame/frame.py b/pycbc/frame/frame.py index a67a3d090d9..329abcc90bf 100644 --- a/pycbc/frame/frame.py +++ b/pycbc/frame/frame.py @@ -896,8 +896,8 @@ def __init__(self, frame_src, force_update_cache=force_update_cache, increment_update_cache=increment_update_cache) - def indices_of_flag(self, start_time, duration, times, padding=0): - """ Return the indices of the times lying in the flagged region + def flag_at_times(self, start_time, duration, times, padding=0): + """ Check whether the idq flag was on at given times Parameters ---------- @@ -906,16 +906,14 @@ def indices_of_flag(self, start_time, duration, times, padding=0): duration: int Number of seconds to check. padding: float - Number of seconds to add around flag inactive times to be considered - inactive as well. + Amount of time in seconds to flag around glitchy times Returns ------- - indices: numpy.ndarray - Array of indices marking the location of triggers within valid - time. + flag_state: numpy.ndarray + Boolean array of whether flag was on at given times """ - from pycbc.events.veto import indices_outside_times + from pycbc.events.veto import indices_within_times sr = self.idq.raw_buffer.sample_rate s = int((start_time - self.idq.raw_buffer.start_time - padding) * sr) - 1 e = s + int((duration + padding) * sr) + 1 @@ -929,8 +927,10 @@ def indices_of_flag(self, start_time, duration, times, padding=0): glitch_times = stamps[glitch_idx] starts = glitch_times - padding ends = starts + 1.0 / sr + padding * 2.0 - idx = indices_outside_times(times, starts, ends) - return idx + idx = indices_within_times(times, starts, ends) + out = numpy.zeros(len(times), dtype=bool) + out[idx] = True + return out def advance(self, blocksize): """ Add blocksize seconds more to the buffer, push blocksize seconds From 9b2b67adffc09f9b2b75f822a1fbee3bf8180f20 Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Mon, 11 Dec 2023 09:25:22 -0800 Subject: [PATCH 03/15] Add command line argument for whether to use idq for reweighting --- bin/pycbc_live | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bin/pycbc_live b/bin/pycbc_live index fc11cc9a0d2..650c8665020 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -848,6 +848,8 @@ parser.add_argument('--idq-state-channel', action=MultiDetMultiColonOptionAction parser.add_argument('--idq-threshold', type=float, help='Threshold used to veto triggers at times of ' 'low iDQ False Alarm Probability') +parser.add_argument('--idq-reweighting', action='store_true',default=False, + help='Reweight triggers based on iDQ False Alarm Probability') parser.add_argument('--data-quality-channel', action=MultiDetMultiColonOptionAction, help="Channel containing data quality information. Used " From a3e9069ec6c9d4ca643b758fbbc7f151cc2f8678 Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Mon, 11 Dec 2023 13:29:44 -0800 Subject: [PATCH 04/15] Add logging for iDQ flagged triggers --- bin/pycbc_live | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bin/pycbc_live b/bin/pycbc_live index 650c8665020..b80b165cc4f 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -1321,6 +1321,8 @@ with ctx: padding=data_reader[ifo].dq_padding) if args.idq_reweighting: + logging.info('iDQ flagged %d/%d %s triggers', + numpy.sum(flag_active), len(times), ifo) results[ifo]['dq_state'] = flag_active.astype(int) else: # use idq as a veto From ff7b2d11c2d25446a3a7a53bc75778fbe37921ba Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Fri, 5 Apr 2024 09:26:44 -0700 Subject: [PATCH 05/15] Fix bug when using ifo with no dq --- pycbc/events/stat.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index c529cd99b84..0bad9d069e7 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2408,11 +2408,6 @@ def update_file(self, key): def find_dq_noise_rate(self, trigs): """Get dq values for a specific ifo and dq states""" - try: - tnum = trigs.template_num - except AttributeError: - tnum = trigs['template_id'] - try: ifo = trigs.ifo except AttributeError: @@ -2422,8 +2417,9 @@ def find_dq_noise_rate(self, trigs): ifo = ifo[0] dq_state = trigs['dq_state'] - dq_val = numpy.zeros(len(dq_state)) + dq_val = numpy.ones(len(dq_state)) + tnum = self.curr_tnum if ifo in self.dq_rates_by_state: for (i, st) in enumerate(dq_state): if isinstance(tnum, numpy.ndarray): From 533df3b7eb101541a8f297594c826e5d638316a3 Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Mon, 22 Jul 2024 13:53:03 -0700 Subject: [PATCH 06/15] Improve logic for getting ifo frm trigs --- pycbc/events/stat.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index 0bad9d069e7..7cd72f0df51 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2411,10 +2411,9 @@ def find_dq_noise_rate(self, trigs): try: ifo = trigs.ifo except AttributeError: - ifo = trigs['ifo'] - assert len(numpy.unique(ifo)) == 1 # Should be exactly one ifo provided - ifo = ifo[0] + assert len(numpy.unique(trigs['ifo'])) == 1 + ifo = trigs['ifo'][0] dq_state = trigs['dq_state'] dq_val = numpy.ones(len(dq_state)) @@ -2471,13 +2470,11 @@ def lognoiserate(self, trigs): def single(self, trigs): # make sure every trig has a dq state try: - # works in offline ifo = trigs.ifo except AttributeError: - # works in low-latency - ifo = trigs['ifo'][0] # Should be exactly one ifo provided assert len(numpy.unique(trigs['ifo'])) == 1 + ifo = trigs['ifo'][0] singles = ExpFitFgBgNormStatistic.single(self, trigs) From d14f729ce07633225370753f7cfd90a85074c052 Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Tue, 23 Jul 2024 09:50:38 -0700 Subject: [PATCH 07/15] Update for compatibility with Gareth's stat reloading code --- pycbc/events/stat.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index 7cd72f0df51..5a4fb096325 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2273,6 +2273,7 @@ def __init__(self, sngl_ranking, files=None, ifos=None, self.dq_bin_by_tid = {} self.dq_state_segments = None self.low_latency = False + self.single_dtype.append(('dq_state', int)) for ifo in self.ifos: key = f'{ifo}-dq_stat_info' @@ -2394,14 +2395,14 @@ def update_file(self, key): return True # We also need to check if the DQ files have updated if key.endswith('dq_stat_info'): + ifo = key.split('-')[0] logger.info( "Updating %s statistic %s file", - ''.join(self.ifos), + ifo, key ) - self.assign_dq_rates(key) - self.assign_template_bins(key) - self.setup_segments(key) + self.dq_rates_by_state[ifo] = self.assign_dq_rates(key) + self.dq_bin_by_tid[ifo] = self.assign_template_bins(key) return True return False From 6c9b7281bee345956b68fa5deee414c1b4d71974 Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Wed, 7 Aug 2024 12:54:54 -0700 Subject: [PATCH 08/15] Modify how trig ifo is gotten and add debug statements --- pycbc/events/stat.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index 5a4fb096325..ade7d0a62e9 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2411,10 +2411,16 @@ def find_dq_noise_rate(self, trigs): try: ifo = trigs.ifo + print('DEBUG in find_dq_noise_rate: trigs.ifo worked successfully') except AttributeError: - # Should be exactly one ifo provided - assert len(numpy.unique(trigs['ifo'])) == 1 - ifo = trigs['ifo'][0] + ifo = trigs.get('ifo', None) + if ifo is None: + print('DEBUG in find_dq_noise_rate: ifo is none after trigs.get') + print(f'DEBUG in find_dq_noise_rate: stat ifos: {self.ifos}') + ifo = self.ifos[0] + else: + print('DEBUG in find_dq_noise_rate: trigs.get worked') + assert ifo in self.ifos dq_state = trigs['dq_state'] dq_val = numpy.ones(len(dq_state)) @@ -2472,10 +2478,16 @@ def single(self, trigs): # make sure every trig has a dq state try: ifo = trigs.ifo + print('DEBUG in single: trigs.ifo worked successfully') except AttributeError: - # Should be exactly one ifo provided - assert len(numpy.unique(trigs['ifo'])) == 1 - ifo = trigs['ifo'][0] + ifo = trigs.get('ifo', None) + if ifo is None: + print('DEBUG in single: ifo is none after trigs.get') + print(f'DEBUG in single: stat ifos: {self.ifos}') + ifo = self.ifos[0] + else: + print('DEBUG in single: trigs.get worked') + assert ifo in self.ifos singles = ExpFitFgBgNormStatistic.single(self, trigs) From a999f4b479321364d3ef32a00900bae44d502101 Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Wed, 7 Aug 2024 13:17:05 -0700 Subject: [PATCH 09/15] Use logging not print for debugging --- pycbc/events/stat.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index ade7d0a62e9..57cadbd6d72 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2411,21 +2411,22 @@ def find_dq_noise_rate(self, trigs): try: ifo = trigs.ifo - print('DEBUG in find_dq_noise_rate: trigs.ifo worked successfully') + logging.info('DEBUG in find_dq_noise_rate: trigs.ifo worked successfully') except AttributeError: ifo = trigs.get('ifo', None) if ifo is None: - print('DEBUG in find_dq_noise_rate: ifo is none after trigs.get') - print(f'DEBUG in find_dq_noise_rate: stat ifos: {self.ifos}') + logging.info('DEBUG in find_dq_noise_rate: ifo is none after trigs.get') + logging.info(f'DEBUG in find_dq_noise_rate: stat ifos: {self.ifos}') ifo = self.ifos[0] else: - print('DEBUG in find_dq_noise_rate: trigs.get worked') + logging.info('DEBUG in find_dq_noise_rate: trigs.get worked') assert ifo in self.ifos dq_state = trigs['dq_state'] dq_val = numpy.ones(len(dq_state)) tnum = self.curr_tnum + logging.info(f'DEBUG in find_dq_noise_rate: tnum: {tnum}') if ifo in self.dq_rates_by_state: for (i, st) in enumerate(dq_state): if isinstance(tnum, numpy.ndarray): @@ -2478,15 +2479,15 @@ def single(self, trigs): # make sure every trig has a dq state try: ifo = trigs.ifo - print('DEBUG in single: trigs.ifo worked successfully') + logging.info('DEBUG in single: trigs.ifo worked successfully') except AttributeError: ifo = trigs.get('ifo', None) if ifo is None: - print('DEBUG in single: ifo is none after trigs.get') - print(f'DEBUG in single: stat ifos: {self.ifos}') + logging.info('DEBUG in single: ifo is none after trigs.get') + logging.info(f'DEBUG in single: stat ifos: {self.ifos}') ifo = self.ifos[0] else: - print('DEBUG in single: trigs.get worked') + logging.info('DEBUG in single: trigs.get worked') assert ifo in self.ifos singles = ExpFitFgBgNormStatistic.single(self, trigs) From f4fdac80303c601bfc1c7dd989a2d423a93e1cec Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Wed, 7 Aug 2024 13:35:29 -0700 Subject: [PATCH 10/15] logger not logging --- pycbc/events/stat.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index 57cadbd6d72..c29bd431c18 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2411,22 +2411,22 @@ def find_dq_noise_rate(self, trigs): try: ifo = trigs.ifo - logging.info('DEBUG in find_dq_noise_rate: trigs.ifo worked successfully') + logger.info('DEBUG in find_dq_noise_rate: trigs.ifo worked successfully') except AttributeError: ifo = trigs.get('ifo', None) if ifo is None: - logging.info('DEBUG in find_dq_noise_rate: ifo is none after trigs.get') - logging.info(f'DEBUG in find_dq_noise_rate: stat ifos: {self.ifos}') + logger.info('DEBUG in find_dq_noise_rate: ifo is none after trigs.get') + logger.info(f'DEBUG in find_dq_noise_rate: stat ifos: {self.ifos}') ifo = self.ifos[0] else: - logging.info('DEBUG in find_dq_noise_rate: trigs.get worked') + logger.info('DEBUG in find_dq_noise_rate: trigs.get worked') assert ifo in self.ifos dq_state = trigs['dq_state'] dq_val = numpy.ones(len(dq_state)) tnum = self.curr_tnum - logging.info(f'DEBUG in find_dq_noise_rate: tnum: {tnum}') + logger.info(f'DEBUG in find_dq_noise_rate: tnum: {tnum}') if ifo in self.dq_rates_by_state: for (i, st) in enumerate(dq_state): if isinstance(tnum, numpy.ndarray): @@ -2479,15 +2479,15 @@ def single(self, trigs): # make sure every trig has a dq state try: ifo = trigs.ifo - logging.info('DEBUG in single: trigs.ifo worked successfully') + logger.info('DEBUG in single: trigs.ifo worked successfully') except AttributeError: ifo = trigs.get('ifo', None) if ifo is None: - logging.info('DEBUG in single: ifo is none after trigs.get') - logging.info(f'DEBUG in single: stat ifos: {self.ifos}') + logger.info('DEBUG in single: ifo is none after trigs.get') + logger.info(f'DEBUG in single: stat ifos: {self.ifos}') ifo = self.ifos[0] else: - logging.info('DEBUG in single: trigs.get worked') + logger.info('DEBUG in single: trigs.get worked') assert ifo in self.ifos singles = ExpFitFgBgNormStatistic.single(self, trigs) From 34bbaff665215341215c494d65b9b5a03192852e Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Wed, 7 Aug 2024 13:55:37 -0700 Subject: [PATCH 11/15] Fix where tnum is set --- pycbc/events/stat.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index c29bd431c18..d157f432389 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -1670,6 +1670,12 @@ def single(self, trigs): numpy.ndarray The array of single detector values """ + try: + # exists if accessed via coinc_findtrigs + self.curr_tnum = trigs.template_num + except AttributeError: + # exists for SingleDetTriggers & pycbc_live get_coinc + self.curr_tnum = trigs['template_id'] # single-ifo stat = log of noise rate sngl_stat = self.lognoiserate(trigs) @@ -1681,12 +1687,6 @@ def single(self, trigs): singles['end_time'] = trigs['end_time'][:] singles['sigmasq'] = trigs['sigmasq'][:] singles['snr'] = trigs['snr'][:] - try: - # exists if accessed via coinc_findtrigs - self.curr_tnum = trigs.template_num - except AttributeError: - # exists for SingleDetTriggers & pycbc_live get_coinc - self.curr_tnum = trigs['template_id'] # Store benchmark log volume as single-ifo information since the coinc # method does not have access to template id @@ -2412,14 +2412,17 @@ def find_dq_noise_rate(self, trigs): try: ifo = trigs.ifo logger.info('DEBUG in find_dq_noise_rate: trigs.ifo worked successfully') + logger.info(f'DEBUG in find_dq_noise_rate: trig ifo: {ifo}') + logger.info(f'DEBUG in find_dq_noise_rate: stat ifos: {self.ifos}') except AttributeError: ifo = trigs.get('ifo', None) if ifo is None: - logger.info('DEBUG in find_dq_noise_rate: ifo is none after trigs.get') - logger.info(f'DEBUG in find_dq_noise_rate: stat ifos: {self.ifos}') ifo = self.ifos[0] + logger.info('DEBUG in find_dq_noise_rate: ifo is none after trigs.get') else: logger.info('DEBUG in find_dq_noise_rate: trigs.get worked') + logger.info(f'DEBUG in find_dq_noise_rate: trig ifo: {ifo}') + logger.info(f'DEBUG in find_dq_noise_rate: stat ifos: {self.ifos}') assert ifo in self.ifos dq_state = trigs['dq_state'] @@ -2480,14 +2483,17 @@ def single(self, trigs): try: ifo = trigs.ifo logger.info('DEBUG in single: trigs.ifo worked successfully') + logger.info(f'DEBUG in single: trig ifo: {ifo}') + logger.info(f'DEBUG in single: stat ifos: {self.ifos}') except AttributeError: ifo = trigs.get('ifo', None) if ifo is None: - logger.info('DEBUG in single: ifo is none after trigs.get') - logger.info(f'DEBUG in single: stat ifos: {self.ifos}') ifo = self.ifos[0] + logger.info('DEBUG in single: ifo is none after trigs.get') else: logger.info('DEBUG in single: trigs.get worked') + logger.info(f'DEBUG in single: trig ifo: {ifo}') + logger.info(f'DEBUG in single: stat ifos: {self.ifos}') assert ifo in self.ifos singles = ExpFitFgBgNormStatistic.single(self, trigs) From 034ad0eba9174aea48edd8f9549ff4e87d036e76 Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Fri, 9 Aug 2024 13:37:01 -0700 Subject: [PATCH 12/15] Get rid of excess logging --- pycbc/events/stat.py | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index d157f432389..82516f7df3d 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2411,25 +2411,16 @@ def find_dq_noise_rate(self, trigs): try: ifo = trigs.ifo - logger.info('DEBUG in find_dq_noise_rate: trigs.ifo worked successfully') - logger.info(f'DEBUG in find_dq_noise_rate: trig ifo: {ifo}') - logger.info(f'DEBUG in find_dq_noise_rate: stat ifos: {self.ifos}') except AttributeError: ifo = trigs.get('ifo', None) if ifo is None: ifo = self.ifos[0] - logger.info('DEBUG in find_dq_noise_rate: ifo is none after trigs.get') - else: - logger.info('DEBUG in find_dq_noise_rate: trigs.get worked') - logger.info(f'DEBUG in find_dq_noise_rate: trig ifo: {ifo}') - logger.info(f'DEBUG in find_dq_noise_rate: stat ifos: {self.ifos}') assert ifo in self.ifos dq_state = trigs['dq_state'] dq_val = numpy.ones(len(dq_state)) tnum = self.curr_tnum - logger.info(f'DEBUG in find_dq_noise_rate: tnum: {tnum}') if ifo in self.dq_rates_by_state: for (i, st) in enumerate(dq_state): if isinstance(tnum, numpy.ndarray): @@ -2482,18 +2473,10 @@ def single(self, trigs): # make sure every trig has a dq state try: ifo = trigs.ifo - logger.info('DEBUG in single: trigs.ifo worked successfully') - logger.info(f'DEBUG in single: trig ifo: {ifo}') - logger.info(f'DEBUG in single: stat ifos: {self.ifos}') except AttributeError: ifo = trigs.get('ifo', None) if ifo is None: ifo = self.ifos[0] - logger.info('DEBUG in single: ifo is none after trigs.get') - else: - logger.info('DEBUG in single: trigs.get worked') - logger.info(f'DEBUG in single: trig ifo: {ifo}') - logger.info(f'DEBUG in single: stat ifos: {self.ifos}') assert ifo in self.ifos singles = ExpFitFgBgNormStatistic.single(self, trigs) From 7f13fd9ac1256ef239b543568f20ae76e664ef49 Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Wed, 14 Aug 2024 14:08:11 -0700 Subject: [PATCH 13/15] Address Gareth's comments --- bin/pycbc_live | 2 +- pycbc/events/stat.py | 13 ++++++------- pycbc/frame/frame.py | 25 +++++++++++++++++++------ 3 files changed, 26 insertions(+), 14 deletions(-) diff --git a/bin/pycbc_live b/bin/pycbc_live index b80b165cc4f..5dbe2273a83 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -1312,7 +1312,7 @@ with ctx: for key in results[ifo]: if len(results[ifo][key]): results[ifo][key] = results[ifo][key][idx] - if data_reader[ifo].idq: + if data_reader[ifo].idq is not None: logging.info("Reading %s's iDQ information", ifo) start = data_reader[ifo].start_time times = results[ifo]['end_time'] diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index 82516f7df3d..c1cd9912c7b 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2300,14 +2300,13 @@ def check_low_latency(self, key): ifo = key.split('-')[0] with h5py.File(self.files[key], 'r') as dq_file: ifo_grp = dq_file[ifo] - if 'dq_segments' in ifo_grp.keys(): - # if segs are in stat file, we are not in LL - assert not self.low_latency, 'Should not have segments in LL' - else: - # we must be in LL, shouldn't have segments - assert not self.dq_state_segments, \ - 'Should not have segments in LL' + if 'dq_segments' not in ifo_grp.keys(): + # if segs are not in file, we must be in LL + if self.dq_state_segments is not None: + raise ValueError('Either all dq stat files must have segments or none') self.low_latency = True + elif self.low_latency: + raise ValueError('Either all dq stat files must have segments or none') def assign_template_bins(self, key): """ diff --git a/pycbc/frame/frame.py b/pycbc/frame/frame.py index 329abcc90bf..e7f2d0296ec 100644 --- a/pycbc/frame/frame.py +++ b/pycbc/frame/frame.py @@ -905,8 +905,11 @@ def flag_at_times(self, start_time, duration, times, padding=0): Beginning time to request for duration: int Number of seconds to check. + times: array of floats + Times to check for an active flag padding: float - Amount of time in seconds to flag around glitchy times + Amount of time in seconds to flag around samples + below the iDQ FAP threshold Returns ------- @@ -914,23 +917,33 @@ def flag_at_times(self, start_time, duration, times, padding=0): Boolean array of whether flag was on at given times """ from pycbc.events.veto import indices_within_times + + # convert start and end times to buffer indices sr = self.idq.raw_buffer.sample_rate s = int((start_time - self.idq.raw_buffer.start_time - padding) * sr) - 1 e = s + int((duration + padding) * sr) + 1 - idq_fap = self.idq.raw_buffer[s:e] - stamps = idq_fap.sample_times.numpy() + + # find samples when iDQ FAP is below threshold and state is valid + idq_fap = self.idq.raw_buffer[s:e] low_fap = idq_fap.numpy() <= self.threshold idq_valid = self.idq_state.raw_buffer[s:e] idq_valid = idq_valid.numpy().astype(bool) valid_low_fap = numpy.logical_and(idq_valid, low_fap) + + # find times corresponding to the valid low FAP samples glitch_idx = numpy.flatnonzero(valid_low_fap) + stamps = idq_fap.sample_times.numpy() glitch_times = stamps[glitch_idx] + + # construct start and end times of flag segments starts = glitch_times - padding ends = starts + 1.0 / sr + padding * 2.0 + + # check if times were flagged idx = indices_within_times(times, starts, ends) - out = numpy.zeros(len(times), dtype=bool) - out[idx] = True - return out + flagged_bool = numpy.zeros(len(times), dtype=bool) + flagged_bool[idx] = True + return flagged_bool def advance(self, blocksize): """ Add blocksize seconds more to the buffer, push blocksize seconds From 5a8ecc74d83356735498b2683cfb04e1d706108c Mon Sep 17 00:00:00 2001 From: maxtrevor Date: Thu, 15 Aug 2024 10:35:31 -0700 Subject: [PATCH 14/15] Codeclimate --- pycbc/events/stat.py | 6 ++++-- pycbc/frame/frame.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index c1cd9912c7b..786ac60a7e6 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2303,10 +2303,12 @@ def check_low_latency(self, key): if 'dq_segments' not in ifo_grp.keys(): # if segs are not in file, we must be in LL if self.dq_state_segments is not None: - raise ValueError('Either all dq stat files must have segments or none') + raise ValueError( + 'Either all dq stat files must have segments or none') self.low_latency = True elif self.low_latency: - raise ValueError('Either all dq stat files must have segments or none') + raise ValueError( + 'Either all dq stat files must have segments or none') def assign_template_bins(self, key): """ diff --git a/pycbc/frame/frame.py b/pycbc/frame/frame.py index e7f2d0296ec..bea7386418a 100644 --- a/pycbc/frame/frame.py +++ b/pycbc/frame/frame.py @@ -924,7 +924,7 @@ def flag_at_times(self, start_time, duration, times, padding=0): e = s + int((duration + padding) * sr) + 1 # find samples when iDQ FAP is below threshold and state is valid - idq_fap = self.idq.raw_buffer[s:e] + idq_fap = self.idq.raw_buffer[s:e] low_fap = idq_fap.numpy() <= self.threshold idq_valid = self.idq_state.raw_buffer[s:e] idq_valid = idq_valid.numpy().astype(bool) From 3e8ae49f2f0ece2bf9733cb7f6619668d743ffd6 Mon Sep 17 00:00:00 2001 From: maxtrevor <65971534+maxtrevor@users.noreply.github.com> Date: Mon, 19 Aug 2024 08:57:01 -0400 Subject: [PATCH 15/15] Apply suggestions from code review Co-authored-by: Gareth S Cabourn Davies --- bin/pycbc_live | 19 ++++++++++++++----- pycbc/events/stat.py | 6 ++++-- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/bin/pycbc_live b/bin/pycbc_live index 5dbe2273a83..52a0de2433a 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -1318,17 +1318,26 @@ with ctx: times = results[ifo]['end_time'] flag_active = data_reader[ifo].idq.flag_at_times( start, valid_pad, times, - padding=data_reader[ifo].dq_padding) + padding=data_reader[ifo].dq_padding + ) if args.idq_reweighting: - logging.info('iDQ flagged %d/%d %s triggers', - numpy.sum(flag_active), len(times), ifo) + logging.info( + 'iDQ flagged %d/%d %s triggers', + numpy.sum(flag_active), + len(times), + ifo + ) results[ifo]['dq_state'] = flag_active.astype(int) else: # use idq as a veto keep = numpy.logical_not(flag_active) - logging.info('Keeping %d/%d %s triggers after iDQ', - numpy.sum(keep), len(times), ifo) + logging.info( + 'Keeping %d/%d %s triggers after iDQ', + numpy.sum(keep), + len(times), + ifo + ) for key in results[ifo]: if len(results[ifo][key]): results[ifo][key] = \ diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index 786ac60a7e6..ff8da9a2f19 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -2304,11 +2304,13 @@ def check_low_latency(self, key): # if segs are not in file, we must be in LL if self.dq_state_segments is not None: raise ValueError( - 'Either all dq stat files must have segments or none') + 'Either all dq stat files must have segments or none' + ) self.low_latency = True elif self.low_latency: raise ValueError( - 'Either all dq stat files must have segments or none') + 'Either all dq stat files must have segments or none' + ) def assign_template_bins(self, key): """