Skip to content

Commit

Permalink
Merge pull request #114 from NTIA/calibrate_to_antenna
Browse files Browse the repository at this point in the history
Separate on board calibrations, support additional loss to antenna, add disk usage to diagnostics
  • Loading branch information
jhazentia authored Mar 27, 2024
2 parents 07ef42b + f8c9316 commit 2763dab
Show file tree
Hide file tree
Showing 45 changed files with 1,586 additions and 981 deletions.
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ repos:
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.0
rev: v3.15.2
hooks:
- id: pyupgrade
args: ["--py38-plus"]
Expand All @@ -30,12 +30,12 @@ repos:
types: [file, python]
args: ["--profile", "black", "--filter-files", "--gitignore"]
- repo: https://github.com/psf/black
rev: 23.12.1
rev: 24.3.0
hooks:
- id: black
types: [file, python]
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.38.0
rev: v0.39.0
hooks:
- id: markdownlint
types: [file, markdown]
Expand Down
1 change: 1 addition & 0 deletions sample_debug.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
This is a sample file showing how an action be created and called for debugging purposes
using a mock signal analyzer.
"""

import json

from scos_actions.actions.acquire_single_freq_fft import SingleFrequencyFftAcquisition
Expand Down
2 changes: 1 addition & 1 deletion scos_actions/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "8.0.1"
__version__ = "9.0.0"
70 changes: 48 additions & 22 deletions scos_actions/actions/acquire_sea_data_product.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,11 @@
create_statistical_detector,
)
from scos_actions.signals import measurement_action_completed, trigger_api_restart
from scos_actions.utils import convert_datetime_to_millisecond_iso_format, get_days_up
from scos_actions.utils import (
convert_datetime_to_millisecond_iso_format,
get_days_up,
get_disk_usage,
)

env = Env()
logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -109,7 +113,6 @@
FFT_WINDOW = get_fft_window(FFT_WINDOW_TYPE, FFT_SIZE)
FFT_WINDOW_ECF = get_fft_window_correction(FFT_WINDOW, "energy")
IMPEDANCE_OHMS = 50.0
DATA_REFERENCE_POINT = "noise source output"
NUM_ACTORS = 3 # Number of ray actors to initialize

# Create power detectors
Expand Down Expand Up @@ -449,6 +452,7 @@ class NasctnSeaDataProduct(Action):
def __init__(self, parameters: dict):
super().__init__(parameters)
# Assume preselector is present
self.total_channel_data_length = None
rf_path_name = utils.get_parameter(RF_PATH, self.parameters)
self.rf_path = {self.PRESELECTOR_PATH_KEY: rf_path_name}

Expand Down Expand Up @@ -506,6 +510,7 @@ def __call__(self, sensor: Sensor, schedule_entry: dict, task_id: int):
action_start_tic = perf_counter()
# Ray should have already been initialized within scos-sensor,
# but check and initialize just in case.

if not ray.is_initialized():
logger.info("Initializing ray.")
logger.info("Set RAY_INIT=true to avoid initializing within " + __name__)
Expand All @@ -525,8 +530,6 @@ def __call__(self, sensor: Sensor, schedule_entry: dict, task_id: int):
self.iteration_params,
)
self.create_global_sensor_metadata(self.sensor)
self.create_global_data_product_metadata()

# Initialize remote supervisor actors for IQ processing
tic = perf_counter()
# This uses iteration_params[0] because
Expand All @@ -538,10 +541,15 @@ def __call__(self, sensor: Sensor, schedule_entry: dict, task_id: int):
logger.debug(f"Spawned {NUM_ACTORS} supervisor actors in {toc-tic:.2f} s")

# Collect all IQ data and spawn data product computation processes
dp_procs, cpu_speed = [], []
dp_procs, cpu_speed, reference_points = [], [], []
capture_tic = perf_counter()

for i, parameters in enumerate(self.iteration_params):
measurement_result = self.capture_iq(parameters)
if i == 0:
self.create_global_data_product_metadata(
measurement_result["reference"]
)
# Start data product processing but do not block next IQ capture
tic = perf_counter()

Expand All @@ -552,16 +560,22 @@ def __call__(self, sensor: Sensor, schedule_entry: dict, task_id: int):
toc = perf_counter()
logger.debug(f"IQ data delivered for processing in {toc-tic:.2f} s")
# Create capture segment with channel-specific metadata before sigan is reconfigured
tic = perf_counter()
self.create_capture_segment(i, measurement_result)
toc = perf_counter()
logger.debug(f"Created capture metadata in {toc-tic:.2f} s")
# Query CPU speed for later averaging in diagnostics metadata
cpu_speed.append(get_current_cpu_clock_speed())
# Append list of data reference points; later we require these to be identical
reference_points.append(measurement_result["reference"])
capture_toc = perf_counter()
logger.debug(
f"Collected all IQ data and started all processing in {capture_toc-capture_tic:.2f} s"
)

# Create data product metadata: requires all data reference points
# to be identical.
assert (
len(set(reference_points)) == 1
), "Channel data were scaled to different reference points. Cannot build metadata."

# Collect processed data product results
all_data, max_max_ch_pwrs, med_mean_ch_pwrs, mean_ch_pwrs, median_ch_pwrs = (
[],
Expand Down Expand Up @@ -630,14 +644,11 @@ def capture_iq(self, params: dict) -> dict:
nskip = utils.get_parameter(NUM_SKIP, params)
num_samples = int(params[SAMPLE_RATE] * duration_ms * 1e-3)
# Collect IQ data
measurement_result = self.sensor.signal_analyzer.acquire_time_domain_samples(
num_samples, nskip
measurement_result = self.sensor.acquire_time_domain_samples(
num_samples, nskip, cal_params=params
)
# Store some metadata with the IQ
measurement_result.update(params)
measurement_result[
"sensor_cal"
] = self.sensor.signal_analyzer.sensor_calibration_data
toc = perf_counter()
logger.debug(
f"IQ Capture ({duration_ms} ms @ {(params[FREQUENCY]/1e6):.1f} MHz) completed in {toc-tic:.2f} s."
Expand Down Expand Up @@ -778,6 +789,11 @@ def capture_diagnostics(
cpu_diag["ssd_smart_data"] = ntia_diagnostics.SsdSmartData(**smart_data)
except:
logger.warning("Failed to get SSD SMART data")
try: # Disk usage
disk_usage = get_disk_usage()
cpu_diag["disk_usage"] = disk_usage
except:
logger.warning("Failed to get disk usage")

# Get software versions
software_diag = {
Expand Down Expand Up @@ -978,7 +994,7 @@ def test_required_components(self):
trigger_api_restart.send(sender=self.__class__)
return None

def create_global_data_product_metadata(self) -> None:
def create_global_data_product_metadata(self, data_products_reference: str) -> None:
p = self.parameters
num_iq_samples = int(p[SAMPLE_RATE] * p[DURATION_MS] * 1e-3)
iir_obj = ntia_algorithm.DigitalFilter(
Expand Down Expand Up @@ -1023,7 +1039,7 @@ def create_global_data_product_metadata(self) -> None:
x_step=[p[SAMPLE_RATE] / FFT_SIZE],
y_units="dBm/Hz",
processing=[dft_obj.id],
reference=DATA_REFERENCE_POINT,
reference=data_products_reference,
description=(
"Results of statistical detectors (max, mean, median, 25th_percentile, 75th_percentile, "
+ "90th_percentile, 95th_percentile, 99th_percentile, 99.9th_percentile, 99.99th_percentile) "
Expand All @@ -1043,7 +1059,7 @@ def create_global_data_product_metadata(self) -> None:
x_stop=[pvt_x_axis__s[-1]],
x_step=[pvt_x_axis__s[1] - pvt_x_axis__s[0]],
y_units="dBm",
reference=DATA_REFERENCE_POINT,
reference=data_products_reference,
description=(
"Max- and mean-detected channel power vs. time, with "
+ f"an integration time of {p[TD_BIN_SIZE_MS]} ms. "
Expand All @@ -1070,7 +1086,7 @@ def create_global_data_product_metadata(self) -> None:
x_stop=[pfp_x_axis__s[-1]],
x_step=[pfp_x_axis__s[1] - pfp_x_axis__s[0]],
y_units="dBm",
reference=DATA_REFERENCE_POINT,
reference=data_products_reference,
description=(
"Channelized periodic frame power statistics reported over"
+ f" a {p[PFP_FRAME_PERIOD_MS]} ms frame period, with frame resolution"
Expand All @@ -1093,6 +1109,7 @@ def create_global_data_product_metadata(self) -> None:
y_start=[apd_y_axis__dBm[0]],
y_stop=[apd_y_axis__dBm[-1]],
y_step=[apd_y_axis__dBm[1] - apd_y_axis__dBm[0]],
reference=data_products_reference,
description=(
f"Estimate of the APD, using a {p[APD_BIN_SIZE_DB]} dB "
+ "bin size for amplitude values. The data payload includes"
Expand All @@ -1111,6 +1128,7 @@ def create_global_data_product_metadata(self) -> None:
+ pfp_length * len(PFP_M3_DETECTOR) * 2
+ apd_graph.length
)
logger.debug(f"Total channel length:{self.total_channel_data_length}")

def create_capture_segment(
self,
Expand All @@ -1126,18 +1144,26 @@ def create_capture_segment(
duration=measurement_result[DURATION_MS],
overload=measurement_result["overload"],
sensor_calibration=ntia_sensor.Calibration(
datetime=measurement_result["sensor_cal"]["datetime"],
gain=round(measurement_result["sensor_cal"]["gain"], 3),
noise_figure=round(measurement_result["sensor_cal"]["noise_figure"], 3),
temperature=round(measurement_result["sensor_cal"]["temperature"], 1),
reference=DATA_REFERENCE_POINT,
datetime=self.sensor.sensor_calibration_data["datetime"],
gain=round(measurement_result["applied_calibration"]["gain"], 3),
noise_figure=round(
measurement_result["applied_calibration"]["noise_figure"], 3
),
temperature=round(
self.sensor.sensor_calibration_data["temperature"], 1
),
reference=measurement_result["reference"],
),
sigan_settings=ntia_sensor.SiganSettings(
reference_level=self.sensor.signal_analyzer.reference_level,
attenuation=self.sensor.signal_analyzer.attenuation,
preamp_enable=self.sensor.signal_analyzer.preamp_enable,
),
)
if "compression_point" in measurement_result["applied_calibration"]:
capture_segment.sensor_calibration.compression_point = measurement_result[
"applied_calibration"
]["compression_point"]
self.sigmf_builder.add_capture(capture_segment)

def get_sigmf_builder(
Expand Down
20 changes: 6 additions & 14 deletions scos_actions/actions/acquire_single_freq_fft.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@
from numpy import float32, ndarray

from scos_actions.actions.interfaces.measurement_action import MeasurementAction
from scos_actions.hardware.mocks.mock_gps import MockGPS
from scos_actions.metadata.structs import ntia_algorithm
from scos_actions.signal_processing.fft import (
get_fft,
Expand Down Expand Up @@ -153,10 +152,6 @@ def __init__(self, parameters: dict):
self.classification = get_parameter(CLASSIFICATION, self.parameters)
self.cal_adjust = get_parameter(CAL_ADJUST, self.parameters)
assert isinstance(self.cal_adjust, bool)
if self.cal_adjust:
self.data_reference = "calibration terminal"
else:
self.data_reference = "signal analyzer input"
# FFT setup
self.fft_detector = create_statistical_detector(
"M4sDetector", ["min", "max", "mean", "median", "sample"]
Expand All @@ -169,7 +164,7 @@ def __init__(self, parameters: dict):
def execute(self, schedule_entry: dict, task_id: int) -> dict:
# Acquire IQ data and generate M4S result
measurement_result = self.acquire_data(
self.num_samples, self.nskip, self.cal_adjust
self.num_samples, self.nskip, self.cal_adjust, cal_params=self.parameters
)
# Actual sample rate may differ from configured value
sample_rate_Hz = measurement_result["sample_rate"]
Expand All @@ -178,22 +173,19 @@ def execute(self, schedule_entry: dict, task_id: int) -> dict:
# Save measurement results
measurement_result["data"] = m4s_result
measurement_result.update(self.parameters)
measurement_result[
"calibration_datetime"
] = self.sensor.signal_analyzer.sensor_calibration_data["datetime"]
measurement_result["task_id"] = task_id
measurement_result["classification"] = self.classification

# Build capture metadata
sigan_settings = self.get_sigan_settings(measurement_result)
logger.debug(f"sigan settings:{sigan_settings}")
measurement_result["duration_ms"] = round(
(self.num_samples / sample_rate_Hz) * 1000
)
measurement_result["capture_segment"] = self.create_capture_segment(
sample_start=0,
start_time=measurement_result["capture_time"],
center_frequency_Hz=self.frequency_Hz,
duration_ms=int(self.num_samples / sample_rate_Hz),
overload=measurement_result["overload"],
sigan_settings=sigan_settings,
measurement_result=measurement_result,
)

return measurement_result
Expand Down Expand Up @@ -270,7 +262,7 @@ def create_metadata(self, measurement_result: dict, recording: int = None) -> No
x_stop=[frequencies[-1]],
x_step=[frequencies[1] - frequencies[0]],
y_units="dBm",
reference=self.data_reference,
reference=measurement_result["reference"],
description=(
"Results of min, max, mean, and median statistical detectors, "
+ f"along with a random sampling, from a set of {self.nffts} "
Expand Down
13 changes: 4 additions & 9 deletions scos_actions/actions/acquire_single_freq_tdomain_iq.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@

from scos_actions import utils
from scos_actions.actions.interfaces.measurement_action import MeasurementAction
from scos_actions.hardware.mocks.mock_gps import MockGPS
from scos_actions.utils import get_parameter

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -84,24 +83,20 @@ def execute(self, schedule_entry: dict, task_id: int) -> dict:
# Use the sigan's actual reported instead of requested sample rate
sample_rate = self.sensor.signal_analyzer.sample_rate
num_samples = int(sample_rate * self.duration_ms * 1e-3)
measurement_result = self.acquire_data(num_samples, self.nskip, self.cal_adjust)
measurement_result = self.acquire_data(
num_samples, self.nskip, self.cal_adjust, cal_params=self.parameters
)
end_time = utils.get_datetime_str_now()
measurement_result.update(self.parameters)
measurement_result["end_time"] = end_time
measurement_result["task_id"] = task_id
measurement_result[
"calibration_datetime"
] = self.sensor.signal_analyzer.sensor_calibration_data["datetime"]
measurement_result["classification"] = self.classification
sigan_settings = self.get_sigan_settings(measurement_result)
logger.debug(f"sigan settings:{sigan_settings}")
measurement_result["capture_segment"] = self.create_capture_segment(
sample_start=0,
start_time=measurement_result["capture_time"],
center_frequency_Hz=self.frequency_Hz,
duration_ms=self.duration_ms,
overload=measurement_result["overload"],
sigan_settings=sigan_settings,
measurement_result=measurement_result,
)
return measurement_result

Expand Down
30 changes: 5 additions & 25 deletions scos_actions/actions/acquire_stepped_freq_tdomain_iq.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,39 +100,19 @@ def __call__(self, sensor: Sensor, schedule_entry: dict, task_id: int):
cal_adjust = get_parameter(CAL_ADJUST, measurement_params)
sample_rate = self.sensor.signal_analyzer.sample_rate
num_samples = int(sample_rate * duration_ms * 1e-3)
measurement_result = super().acquire_data(num_samples, nskip, cal_adjust)
measurement_result = super().acquire_data(
num_samples, nskip, cal_adjust, cal_params=measurement_params
)
measurement_result.update(measurement_params)
end_time = utils.get_datetime_str_now()
measurement_result["end_time"] = end_time
measurement_result["task_id"] = task_id
measurement_result["name"] = self.name
measurement_result["classification"] = self.classification
sigan_settings = self.get_sigan_settings(measurement_result)
capture_segment = CaptureSegment(
sample_start=0,
global_index=saved_samples,
frequency=measurement_params[FREQUENCY],
datetime=measurement_result["capture_time"],
duration=duration_ms,
overload=measurement_result["overload"],
sigan_settings=sigan_settings,
capture_segment = self.create_capture_segment(
0, sigan_settings, measurement_result
)
sigan_cal = self.sensor.signal_analyzer.sigan_calibration_data
sensor_cal = self.sensor.signal_analyzer.sensor_calibration_data
if sigan_cal is not None:
if "1db_compression_point" in sigan_cal:
sigan_cal["compression_point"] = sigan_cal.pop(
"1db_compression_point"
)
capture_segment.sigan_calibration = ntia_sensor.Calibration(**sigan_cal)
if sensor_cal is not None:
if "1db_compression_point" in sensor_cal:
sensor_cal["compression_point"] = sensor_cal.pop(
"1db_compression_point"
)
capture_segment.sensor_calibration = ntia_sensor.Calibration(
**sensor_cal
)
measurement_result["capture_segment"] = capture_segment

self.create_metadata(measurement_result, recording_id)
Expand Down
Loading

0 comments on commit 2763dab

Please sign in to comment.