@@ -417,12 +417,7 @@ def __init__(self, params: dict, iir_sos: np.ndarray):
417
417
self .apd_worker = AmplitudeProbabilityDistribution .remote (
418
418
params [APD_BIN_SIZE_DB ], params [APD_MIN_BIN_DBM ], params [APD_MAX_BIN_DBM ]
419
419
)
420
- self .workers = [
421
- self .fft_worker ,
422
- self .pvt_worker ,
423
- self .pfp_worker ,
424
- self .apd_worker ,
425
- ]
420
+
426
421
del params
427
422
428
423
def run (self , iqdata : np .ndarray ) -> list :
@@ -436,9 +431,11 @@ def run(self, iqdata: np.ndarray) -> list:
436
431
# Filter IQ and place it in the object store
437
432
iqdata = ray .put (sosfilt (self .iir_sos , iqdata ))
438
433
# Compute PSD, PVT, PFP, and APD concurrently.
439
- # Do not wait until they finish. Yield references to their results.
440
- yield [worker .run .remote (iqdata ) for worker in self .workers ]
441
- del iqdata
434
+ fft_reference = self .fft_worker .run .remote (iqdata )
435
+ pvt_reference = self .pvt_worker .run .remote (iqdata )
436
+ pfp_reference = self .pfp_worker .run .remote (iqdata )
437
+ apd_reference = self .apd_worker .run .remote (iqdata )
438
+ return fft_reference , pvt_reference , pfp_reference , apd_reference
442
439
443
440
444
441
class NasctnSeaDataProduct (Action ):
@@ -541,7 +538,7 @@ def __call__(self, sensor: Sensor, schedule_entry: dict, task_id: int):
541
538
logger .debug (f"Spawned { NUM_ACTORS } supervisor actors in { toc - tic :.2f} s" )
542
539
543
540
# Collect all IQ data and spawn data product computation processes
544
- dp_procs , cpu_speed , reference_points = [], [], []
541
+ psd_refs , pvt_refs , pfp_refs , apd_refs , cpu_speed , reference_points = [], [], [], [], [], []
545
542
capture_tic = perf_counter ()
546
543
547
544
for i , parameters in enumerate (self .iteration_params ):
@@ -552,10 +549,12 @@ def __call__(self, sensor: Sensor, schedule_entry: dict, task_id: int):
552
549
)
553
550
# Start data product processing but do not block next IQ capture
554
551
tic = perf_counter ()
552
+ fft_reference , pvt_reference , pfp_reference , apd_reference = iq_processors [i % NUM_ACTORS ].run .remote (measurement_result ["data" ])
553
+ psd_refs .append (fft_reference )
554
+ pvt_refs .append (pvt_reference )
555
+ pfp_refs .append (pfp_reference )
556
+ apd_refs .append (apd_reference )
555
557
556
- dp_procs .append (
557
- iq_processors [i % NUM_ACTORS ].run .remote (measurement_result ["data" ])
558
- )
559
558
del measurement_result ["data" ]
560
559
toc = perf_counter ()
561
560
logger .debug (f"IQ data delivered for processing in { toc - tic :.2f} s" )
@@ -585,49 +584,48 @@ def __call__(self, sensor: Sensor, schedule_entry: dict, task_id: int):
585
584
[],
586
585
)
587
586
result_tic = perf_counter ()
588
- logger .debug (f"Have { len (dp_procs )} results" )
589
- for channel_data_process in dp_procs :
590
- # Retrieve object references for channel data
591
- channel_data_refs = ray .get (channel_data_process )
592
- logger .debug (f"channel_data_refs is { type (channel_data_refs )} : { channel_data_refs } " )
587
+ channel_count = len (psd_refs )
588
+ logger .debug (f"Have { channel_count } channel results" )
589
+ for index in range (len (psd_refs )):
590
+ logger .debug (f"Working on channel { index } " )
593
591
channel_data = []
594
- for i , data_ref in enumerate (channel_data_refs ):
595
- # Now block until the data is ready
596
- logger .debug (f"{ i } Requesting object { data_ref } " )
597
- data_products = ray .get (data_ref )
598
- logger .debug (f"data products is { type (data_products )} " )
599
- for dp_num , data_product_reference in enumerate (data_products ):
600
- logger .debug (f"Getting dp { dp_num } , { data_product_reference } " )
601
- data_product = ray .get (data_product_reference )
602
- logger .debug (f"{ dp_num } data product is: { type (data_product )} " )
603
- if dp_num == 1 :
604
- # Power-vs-Time results, a tuple of arrays
605
- logger .debug ("splitting tuple" )
606
- data , summaries = data_product # Split the tuple
607
- logger .debug (f"data is { type (data )} : { data } " )
608
- logger .debug (f"summaries is { type (summaries )} : { summaries } " )
609
- max_max_ch_pwrs .append (DATA_TYPE (summaries [0 ]))
610
- med_mean_ch_pwrs .append (DATA_TYPE (summaries [1 ]))
611
- mean_ch_pwrs .append (DATA_TYPE (summaries [2 ]))
612
- median_ch_pwrs .append (DATA_TYPE (summaries [3 ]))
613
- del summaries
614
- elif dp_num == 3 : # Separate condition is intentional
615
- # APD result: append instead of extend,
616
- # since the result is a single 1D array
617
- logger .debug ("appending data product" )
618
- channel_data .append (data_product )
619
- else :
620
- # For 2D arrays (PSD, PVT, PFP)
621
- logger .debug (f"dp { dp_num } extending channel data" )
622
- channel_data .extend (data_product )
592
+ # Now block until the data is ready
593
+ apd_refs .append (apd_reference )
594
+
595
+ psd_data = ray .get (psd_refs [index ])
596
+ logger .debug (f"PSD: { psd_data } " )
597
+ channel_data .extend (psd_data )
598
+ pvt_data = ray .get (pvt_refs [index ])
599
+ logger .debug (f"PVT DATA: { pvt_data } " )
600
+ # Power-vs-Time results, a tuple of arrays
601
+ logger .debug ("splitting tuple" )
602
+ data , summaries = pvt_data # Split the tuple
603
+ logger .debug (f"data is { type (data )} : { data } " )
604
+ logger .debug (f"summaries is { type (summaries )} : { summaries } " )
605
+ max_max_ch_pwrs .append (DATA_TYPE (summaries [0 ]))
606
+ med_mean_ch_pwrs .append (DATA_TYPE (summaries [1 ]))
607
+ mean_ch_pwrs .append (DATA_TYPE (summaries [2 ]))
608
+ median_ch_pwrs .append (DATA_TYPE (summaries [3 ]))
609
+ del summaries
610
+
611
+ pfp_data = ray .get (pfp_refs [index ])
612
+ logger .debug (f"PFP: { pfp_data } " )
613
+ channel_data .extend (pfp_data )
614
+
615
+ # APD result: append instead of extend,
616
+ # since the result is a single 1D array
617
+ apd_data = ray .get (apd_refs [index ])
618
+ logger .debug (f"APD: { apd_data } " )
619
+ channel_data .append (apd_data )
623
620
624
621
toc = perf_counter ()
625
622
logger .debug (f"Waited { toc - tic } s for channel data" )
626
623
all_data .extend (NasctnSeaDataProduct .transform_data (channel_data ))
624
+
627
625
for ray_actor in iq_processors :
628
626
ray .kill (ray_actor )
629
627
result_toc = perf_counter ()
630
- del dp_procs , iq_processors , channel_data , channel_data_refs
628
+ del iq_processors , channel_data
631
629
logger .debug (f"Got all processed data in { result_toc - result_tic :.2f} s" )
632
630
633
631
# Build metadata and convert data to compressed bytes
0 commit comments