From 94897c39da33e0b4ee79931db5b8683f83e48970 Mon Sep 17 00:00:00 2001 From: Anthony Romaniello Date: Thu, 26 Oct 2023 15:35:09 -0600 Subject: [PATCH 01/11] Change PSD resolution to 80 kHz and add percentiles --- .../actions/acquire_sea_data_product.py | 48 +++++++++++-------- 1 file changed, 27 insertions(+), 21 deletions(-) diff --git a/scos_actions/actions/acquire_sea_data_product.py b/scos_actions/actions/acquire_sea_data_product.py index 5fc636c5..89397755 100644 --- a/scos_actions/actions/acquire_sea_data_product.py +++ b/scos_actions/actions/acquire_sea_data_product.py @@ -109,7 +109,8 @@ # Constants DATA_TYPE = np.half PFP_FRAME_RESOLUTION_S = (1e-3 * (1 + 1 / (14)) / 15) / 4 -FFT_SIZE = 875 +FFT_SIZE = 175 # 80 kHz resolution @ 14 MHz sampling rate +FFT_PERCENTILES = np.arange(0, 101, 5) # 0, 5, ..., 100 FFT_WINDOW_TYPE = "flattop" FFT_WINDOW = get_fft_window(FFT_WINDOW_TYPE, FFT_SIZE) FFT_WINDOW_ECF = get_fft_window_correction(FFT_WINDOW, "energy") @@ -119,7 +120,7 @@ # Create power detectors TD_DETECTOR = create_statistical_detector("TdMeanMaxDetector", ["mean", "max"]) -FFT_DETECTOR = create_statistical_detector("FftMeanMaxDetector", ["mean", "max"]) +FFT_DETECTOR = create_statistical_detector("FftMeanDetector", ["mean"]) PFP_M3_DETECTOR = create_statistical_detector("PfpM3Detector", ["min", "max", "mean"]) # Expected webswitch configuration: @@ -156,15 +157,17 @@ def __init__( fft_window: np.ndarray = FFT_WINDOW, window_ecf: float = FFT_WINDOW_ECF, detector: EnumMeta = FFT_DETECTOR, + percentiles: np.ndarray = FFT_PERCENTILES, impedance_ohms: float = IMPEDANCE_OHMS, ): self.detector = detector + self.percentiles = percentiles self.fft_size = fft_size self.fft_window = fft_window self.num_ffts = num_ffts - # Get truncation points: truncate FFT result to middle 625 samples (middle 10 MHz from 14 MHz) - self.bin_start = int(fft_size / 7) # bin_start = 125 with FFT_SIZE 875 - self.bin_end = fft_size - self.bin_start # bin_end = 750 with FFT_SIZE 875 + # Get truncation points: truncate FFT result to middle 125 samples (middle 10 MHz from 14 MHz) + self.bin_start = int(fft_size / 7) # bin_start = 25 with FFT_SIZE 175 + self.bin_end = fft_size - self.bin_start # bin_end = 150 with FFT_SIZE 175 # Compute the amplitude shift for PSD scaling. The FFT result # is in pseudo-power log units and must be scaled to a PSD. self.fft_scale_factor = ( @@ -183,20 +186,26 @@ def run(self, iq: ray.ObjectRef) -> np.ndarray: :return: A 2D NumPy array of statistical detector results computed from PSD amplitudes, ordered (max, mean). """ - fft_result = get_fft( + fft_amplitudes = get_fft( iq, self.fft_size, "backward", self.fft_window, self.num_ffts, False, 1 ) - fft_result = calculate_pseudo_power(fft_result) - fft_result = apply_statistical_detector( - fft_result, self.detector - ) # (max, mean) + # Power in Watts + fft_amplitudes = calculate_pseudo_power(fft_result) + fft_result = apply_statistical_detector(fft_amplitudes, self.detector) # (mean) + percentile_result = np.percentile(fft_amplitudes, self.percentiles, axis=0) + fft_result = np.vstack((fft_result, percentile_result)) fft_result = np.fft.fftshift(fft_result, axes=(1,)) # Shift frequencies fft_result = fft_result[ :, self.bin_start : self.bin_end ] # Truncation to middle bins fft_result = 10.0 * np.log10(fft_result) + self.fft_scale_factor - # Returned order is (max, mean) + # Returned order is (mean, 0_percentile, 5_percentile, ... 100_percentile) + # Total of 22 arrays, each of length 125 (output shape (22, 125)) + # 0 percentile is equivalent to the minimum + # 50 percentile is equivalent to the median + # 100 percentile is equivalent to the maximum + # Percentile computation linearly interpolates. See numpy documentation. return fft_result @@ -871,18 +880,15 @@ def create_global_data_product_metadata(self) -> None: self.sigmf_builder.set_processing_info([iir_obj, dft_obj]) psd_length = int(FFT_SIZE * (5 / 7)) - psd_bin_center_offset = p[SAMPLE_RATE] / FFT_SIZE / 2 - psd_x_axis__Hz = np.arange(psd_length) * ( - (p[SAMPLE_RATE] / FFT_SIZE) - - (p[SAMPLE_RATE] * (5 / 7) / 2) - + psd_bin_center_offset - ) psd_bin_start = int(FFT_SIZE / 7) # bin_start = 125 with FFT_SIZE 875 psd_bin_end = FFT_SIZE - psd_bin_start # bin_end = 750 with FFT_SIZE 875 - psd_x_axis__Hz = get_fft_frequencies(FFT_SIZE, 14e6, 0.0) # Baseband + psd_x_axis__Hz = get_fft_frequencies(FFT_SIZE, p[SAMPLE_RATE], 0.0) # Baseband psd_graph = ntia_algorithm.Graph( name="Power Spectral Density", - series=[d.value for d in FFT_DETECTOR], # ["max", "mean"] + series=[d.value for d in FFT_DETECTOR] + + [ + f"q_{p}" for p in FFT_PERCENTILES + ], # ["mean", "q_0", "q_5", ... "q_100"] length=int(FFT_SIZE * (5 / 7)), x_units="Hz", x_start=[psd_x_axis__Hz[psd_bin_start]], @@ -892,7 +898,7 @@ def create_global_data_product_metadata(self) -> None: processing=[dft_obj.id], reference=DATA_REFERENCE_POINT, description=( - "Max- and mean-detected power spectral density, with the " + "Mean-detected and percentiles (5pct steps, 0-100) power spectral density, with the " + f"first and last {int(FFT_SIZE / 7)} samples discarded. " + "FFTs computed on IIR-filtered data." ), @@ -972,7 +978,7 @@ def create_global_data_product_metadata(self) -> None: [psd_graph, pvt_graph, pfp_graph, apd_graph] ) self.total_channel_data_length = ( - psd_length * len(FFT_DETECTOR) + psd_length * (len(FFT_DETECTOR) + len(FFT_PERCENTILES)) + pvt_length * len(TD_DETECTOR) + pfp_length * len(PFP_M3_DETECTOR) * 2 + apd_graph.length From babe6d2f97f0fa7d658a7e7fdd8e6d1a23acb9ae Mon Sep 17 00:00:00 2001 From: Anthony Romaniello Date: Thu, 26 Oct 2023 15:36:28 -0600 Subject: [PATCH 02/11] Temporarily mark metadata version for testing --- scos_actions/metadata/sigmf_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scos_actions/metadata/sigmf_builder.py b/scos_actions/metadata/sigmf_builder.py index 2914096f..efe04512 100644 --- a/scos_actions/metadata/sigmf_builder.py +++ b/scos_actions/metadata/sigmf_builder.py @@ -47,7 +47,7 @@ }, { "name": "ntia-nasctn-sea", - "version": "v0.6.0", + "version": "v0.6.0-TESTING", # Temporary "optional": True, }, ], From a14de0eae29f1b66b3308f59f3ddfa4fc3a38df4 Mon Sep 17 00:00:00 2001 From: Anthony Romaniello Date: Tue, 31 Oct 2023 11:15:56 -0600 Subject: [PATCH 03/11] Fix unboundlocalerror --- scos_actions/actions/acquire_sea_data_product.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scos_actions/actions/acquire_sea_data_product.py b/scos_actions/actions/acquire_sea_data_product.py index 89397755..57c9a35b 100644 --- a/scos_actions/actions/acquire_sea_data_product.py +++ b/scos_actions/actions/acquire_sea_data_product.py @@ -190,7 +190,7 @@ def run(self, iq: ray.ObjectRef) -> np.ndarray: iq, self.fft_size, "backward", self.fft_window, self.num_ffts, False, 1 ) # Power in Watts - fft_amplitudes = calculate_pseudo_power(fft_result) + fft_amplitudes = calculate_pseudo_power(fft_amplitudes) fft_result = apply_statistical_detector(fft_amplitudes, self.detector) # (mean) percentile_result = np.percentile(fft_amplitudes, self.percentiles, axis=0) fft_result = np.vstack((fft_result, percentile_result)) From 862a33ca9631cd29669ed6ad084dc16aebceb240 Mon Sep 17 00:00:00 2001 From: Anthony Romaniello <66272872+aromanielloNTIA@users.noreply.github.com> Date: Thu, 16 Nov 2023 16:49:48 -0500 Subject: [PATCH 04/11] remove testing sigmf version tag --- scos_actions/metadata/sigmf_builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scos_actions/metadata/sigmf_builder.py b/scos_actions/metadata/sigmf_builder.py index efe04512..2914096f 100644 --- a/scos_actions/metadata/sigmf_builder.py +++ b/scos_actions/metadata/sigmf_builder.py @@ -47,7 +47,7 @@ }, { "name": "ntia-nasctn-sea", - "version": "v0.6.0-TESTING", # Temporary + "version": "v0.6.0", "optional": True, }, ], From eda2d5185a3d34f911f7c4c1a94cb52154448ad5 Mon Sep 17 00:00:00 2001 From: Anthony Romaniello Date: Thu, 14 Dec 2023 11:29:13 -0500 Subject: [PATCH 05/11] update percentile set for testing --- scos_actions/actions/acquire_sea_data_product.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scos_actions/actions/acquire_sea_data_product.py b/scos_actions/actions/acquire_sea_data_product.py index 4fa24aa0..63553006 100644 --- a/scos_actions/actions/acquire_sea_data_product.py +++ b/scos_actions/actions/acquire_sea_data_product.py @@ -111,7 +111,8 @@ DATA_TYPE = np.half PFP_FRAME_RESOLUTION_S = (1e-3 * (1 + 1 / (14)) / 15) / 4 FFT_SIZE = 175 # 80 kHz resolution @ 14 MHz sampling rate -FFT_PERCENTILES = np.arange(0, 101, 5) # 0, 5, ..., 100 +# FFT_PERCENTILES = np.arange(0, 101, 5) # 0, 5, ..., 100 +FFT_PERCENTILES = np.array([0.0, 50, 75, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100]) FFT_WINDOW_TYPE = "flattop" FFT_WINDOW = get_fft_window(FFT_WINDOW_TYPE, FFT_SIZE) FFT_WINDOW_ECF = get_fft_window_correction(FFT_WINDOW, "energy") @@ -187,8 +188,8 @@ def run(self, iq: ray.ObjectRef) -> np.ndarray: ] # Truncation to middle bins fft_result = 10.0 * np.log10(fft_result) + self.fft_scale_factor - # Returned order is (mean, 0_percentile, 5_percentile, ... 100_percentile) - # Total of 22 arrays, each of length 125 (output shape (22, 125)) + # Returned order is (mean, 0_percentile, 25_percentile, ... 100_percentile) + # Total of 15 arrays, each of length 125 (output shape (15, 125)) # 0 percentile is equivalent to the minimum # 50 percentile is equivalent to the median # 100 percentile is equivalent to the maximum From 339cb51e939efc84656a0fa1022c6e552e2763a1 Mon Sep 17 00:00:00 2001 From: Anthony Romaniello Date: Tue, 26 Dec 2023 17:18:42 -0500 Subject: [PATCH 06/11] Update PSD detectors based on new testing --- scos_actions/actions/acquire_sea_data_product.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/scos_actions/actions/acquire_sea_data_product.py b/scos_actions/actions/acquire_sea_data_product.py index 63553006..dbb56fa6 100644 --- a/scos_actions/actions/acquire_sea_data_product.py +++ b/scos_actions/actions/acquire_sea_data_product.py @@ -111,8 +111,7 @@ DATA_TYPE = np.half PFP_FRAME_RESOLUTION_S = (1e-3 * (1 + 1 / (14)) / 15) / 4 FFT_SIZE = 175 # 80 kHz resolution @ 14 MHz sampling rate -# FFT_PERCENTILES = np.arange(0, 101, 5) # 0, 5, ..., 100 -FFT_PERCENTILES = np.array([0.0, 50, 75, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100]) +FFT_PERCENTILES = np.array([25, 75, 90, 95, 99, 99.9, 99.99]) FFT_WINDOW_TYPE = "flattop" FFT_WINDOW = get_fft_window(FFT_WINDOW_TYPE, FFT_SIZE) FFT_WINDOW_ECF = get_fft_window_correction(FFT_WINDOW, "energy") @@ -122,7 +121,7 @@ # Create power detectors TD_DETECTOR = create_statistical_detector("TdMeanMaxDetector", ["mean", "max"]) -FFT_DETECTOR = create_statistical_detector("FftMeanDetector", ["mean"]) +FFT_M3_DETECTOR = create_statistical_detector("FftM3Detector", ["mean", "median", "max"]) PFP_M3_DETECTOR = create_statistical_detector("PfpM3Detector", ["min", "max", "mean"]) @@ -144,7 +143,7 @@ def __init__( fft_size: int = FFT_SIZE, fft_window: np.ndarray = FFT_WINDOW, window_ecf: float = FFT_WINDOW_ECF, - detector: EnumMeta = FFT_DETECTOR, + detector: EnumMeta = FFT_M3_DETECTOR, percentiles: np.ndarray = FFT_PERCENTILES, impedance_ohms: float = IMPEDANCE_OHMS, ): @@ -179,7 +178,7 @@ def run(self, iq: ray.ObjectRef) -> np.ndarray: ) # Power in Watts fft_amplitudes = calculate_pseudo_power(fft_amplitudes) - fft_result = apply_statistical_detector(fft_amplitudes, self.detector) # (mean) + fft_result = apply_statistical_detector(fft_amplitudes, self.detector) # (mean, median, max) percentile_result = np.percentile(fft_amplitudes, self.percentiles, axis=0) fft_result = np.vstack((fft_result, percentile_result)) fft_result = np.fft.fftshift(fft_result, axes=(1,)) # Shift frequencies @@ -188,11 +187,8 @@ def run(self, iq: ray.ObjectRef) -> np.ndarray: ] # Truncation to middle bins fft_result = 10.0 * np.log10(fft_result) + self.fft_scale_factor - # Returned order is (mean, 0_percentile, 25_percentile, ... 100_percentile) - # Total of 15 arrays, each of length 125 (output shape (15, 125)) - # 0 percentile is equivalent to the minimum - # 50 percentile is equivalent to the median - # 100 percentile is equivalent to the maximum + # Returned order is (mean, median, max, 25%, 75%, 90%, 95%, 99%, 99.9%, 99.99%) + # Total of 10 arrays, each of length 125 (output shape (10, 125)) # Percentile computation linearly interpolates. See numpy documentation. return fft_result From 2b63adc56672f9b25a26a452370de956cf447cba Mon Sep 17 00:00:00 2001 From: Anthony Romaniello Date: Tue, 26 Dec 2023 17:25:25 -0500 Subject: [PATCH 07/11] Update PSD metadata generation for new detector set --- .../actions/acquire_sea_data_product.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/scos_actions/actions/acquire_sea_data_product.py b/scos_actions/actions/acquire_sea_data_product.py index dbb56fa6..0d7e609a 100644 --- a/scos_actions/actions/acquire_sea_data_product.py +++ b/scos_actions/actions/acquire_sea_data_product.py @@ -120,8 +120,8 @@ NUM_ACTORS = 3 # Number of ray actors to initialize # Create power detectors -TD_DETECTOR = create_statistical_detector("TdMeanMaxDetector", ["mean", "max"]) -FFT_M3_DETECTOR = create_statistical_detector("FftM3Detector", ["mean", "median", "max"]) +TD_DETECTOR = create_statistical_detector("TdMeanMaxDetector", ["max", "mean"]) +FFT_M3_DETECTOR = create_statistical_detector("FftM3Detector", ["max", "mean", "median"]) PFP_M3_DETECTOR = create_statistical_detector("PfpM3Detector", ["min", "max", "mean"]) @@ -996,10 +996,10 @@ def create_global_data_product_metadata(self) -> None: psd_x_axis__Hz = get_fft_frequencies(FFT_SIZE, p[SAMPLE_RATE], 0.0) # Baseband psd_graph = ntia_algorithm.Graph( name="Power Spectral Density", - series=[d.value for d in FFT_DETECTOR] + series=[d.value for d in FFT_M3_DETECTOR] + [ - f"q_{p}" for p in FFT_PERCENTILES - ], # ["mean", "q_0", "q_5", ... "q_100"] + f"{int(p)}th_percentile" if p.is_integer() else f"{p}th_percentile" for p in FFT_PERCENTILES + ], # ["max", "mean", "median", "25th_percentile", "75th_percentile", ... "99.99th_percentile"] length=int(FFT_SIZE * (5 / 7)), x_units="Hz", x_start=[psd_x_axis__Hz[psd_bin_start]], @@ -1009,9 +1009,10 @@ def create_global_data_product_metadata(self) -> None: processing=[dft_obj.id], reference=DATA_REFERENCE_POINT, description=( - "Mean-detected and percentiles (5pct steps, 0-100) power spectral density, with the " - + f"first and last {int(FFT_SIZE / 7)} samples discarded. " - + "FFTs computed on IIR-filtered data." + "Results of statistical detectors (max, mean, median, 25th_percentile, 75th_percentile, " + + "90th_percentile, 95th_percentile, 99th_percentile, 99.9th_percentile, 99.99th_percentile) " + + f"applied to power spectral density samples, with the first and last {int(FFT_SIZE / 7)} " + + "samples discarded. FFTs computed on IIR-filtered data." ), ) @@ -1089,7 +1090,7 @@ def create_global_data_product_metadata(self) -> None: [psd_graph, pvt_graph, pfp_graph, apd_graph] ) self.total_channel_data_length = ( - psd_length * (len(FFT_DETECTOR) + len(FFT_PERCENTILES)) + psd_length * (len(FFT_M3_DETECTOR) + len(FFT_PERCENTILES)) + pvt_length * len(TD_DETECTOR) + pfp_length * len(PFP_M3_DETECTOR) * 2 + apd_graph.length From 4f0c8ddb37b152be77ec5c96ef6db650c37b3981 Mon Sep 17 00:00:00 2001 From: Anthony Romaniello Date: Wed, 27 Dec 2023 14:33:21 -0500 Subject: [PATCH 08/11] Correct comments for PSD result ordering --- scos_actions/actions/acquire_sea_data_product.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scos_actions/actions/acquire_sea_data_product.py b/scos_actions/actions/acquire_sea_data_product.py index 0d7e609a..d956fbbf 100644 --- a/scos_actions/actions/acquire_sea_data_product.py +++ b/scos_actions/actions/acquire_sea_data_product.py @@ -158,7 +158,7 @@ def __init__( # Compute the amplitude shift for PSD scaling. The FFT result # is in pseudo-power log units and must be scaled to a PSD. self.fft_scale_factor = ( - -10.0 * np.log10(impedance_ohms) # Pseudo-power to power + - 10.0 * np.log10(impedance_ohms) # Pseudo-power to power + 27.0 # Watts to dBm (+30) and baseband to RF (-3) - 10.0 * np.log10(sample_rate_Hz * fft_size) # PSD scaling + 20.0 * np.log10(window_ecf) # Window energy correction @@ -178,7 +178,7 @@ def run(self, iq: ray.ObjectRef) -> np.ndarray: ) # Power in Watts fft_amplitudes = calculate_pseudo_power(fft_amplitudes) - fft_result = apply_statistical_detector(fft_amplitudes, self.detector) # (mean, median, max) + fft_result = apply_statistical_detector(fft_amplitudes, self.detector) # (max, mean, median) percentile_result = np.percentile(fft_amplitudes, self.percentiles, axis=0) fft_result = np.vstack((fft_result, percentile_result)) fft_result = np.fft.fftshift(fft_result, axes=(1,)) # Shift frequencies @@ -187,7 +187,7 @@ def run(self, iq: ray.ObjectRef) -> np.ndarray: ] # Truncation to middle bins fft_result = 10.0 * np.log10(fft_result) + self.fft_scale_factor - # Returned order is (mean, median, max, 25%, 75%, 90%, 95%, 99%, 99.9%, 99.99%) + # Returned order is (max, mean, median, 25%, 75%, 90%, 95%, 99%, 99.9%, 99.99%) # Total of 10 arrays, each of length 125 (output shape (10, 125)) # Percentile computation linearly interpolates. See numpy documentation. return fft_result From 832c1b637e5ee304910c86187eed2439a66d8116 Mon Sep 17 00:00:00 2001 From: Anthony Romaniello Date: Wed, 27 Dec 2023 14:36:33 -0500 Subject: [PATCH 09/11] Add debug message when retrieving processed data --- scos_actions/actions/acquire_sea_data_product.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scos_actions/actions/acquire_sea_data_product.py b/scos_actions/actions/acquire_sea_data_product.py index d956fbbf..d8b8bfb7 100644 --- a/scos_actions/actions/acquire_sea_data_product.py +++ b/scos_actions/actions/acquire_sea_data_product.py @@ -575,6 +575,7 @@ def __call__(self, schedule_entry, task_id): for i, data_ref in enumerate(channel_data_refs): # Now block until the data is ready data = ray.get(data_ref) + logger.debug(f"Retrieved data: {i}\n\t{data}") if i == 1: # Power-vs-Time results, a tuple of arrays data, summaries = data # Split the tuple From cc26aa45bf5c1fe2b6ca7a55f00cffba3b51066f Mon Sep 17 00:00:00 2001 From: Anthony Romaniello Date: Thu, 28 Dec 2023 16:34:43 -0500 Subject: [PATCH 10/11] Remove temporary debug message --- scos_actions/actions/acquire_sea_data_product.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scos_actions/actions/acquire_sea_data_product.py b/scos_actions/actions/acquire_sea_data_product.py index d8b8bfb7..d956fbbf 100644 --- a/scos_actions/actions/acquire_sea_data_product.py +++ b/scos_actions/actions/acquire_sea_data_product.py @@ -575,7 +575,6 @@ def __call__(self, schedule_entry, task_id): for i, data_ref in enumerate(channel_data_refs): # Now block until the data is ready data = ray.get(data_ref) - logger.debug(f"Retrieved data: {i}\n\t{data}") if i == 1: # Power-vs-Time results, a tuple of arrays data, summaries = data # Split the tuple From 4fbdc403e03f9e4e4e6b2b703549ebc6cb5c6d0e Mon Sep 17 00:00:00 2001 From: Anthony Romaniello Date: Thu, 4 Jan 2024 13:30:13 -0500 Subject: [PATCH 11/11] Bump version number --- scos_actions/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scos_actions/__init__.py b/scos_actions/__init__.py index fb16410a..b2ea6c25 100644 --- a/scos_actions/__init__.py +++ b/scos_actions/__init__.py @@ -1 +1 @@ -__version__ = "7.0.1" +__version__ = "7.0.2"