Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions imap_processing/cdf/config/imap_hi_variable_attrs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -428,6 +428,26 @@ hi_de_nominal_bin:
VALIDMIN: 0
VALIDMAX: 89

hi_de_esa_step_met:
<<: *default_float64
CATDESC: Mission Elapsed Time (MET) in seconds of when the ESA was stepped
FIELDNAM: ESA Start MET
LABLAXIS: ESA Start MET
UNITS: seconds
VAR_TYPE: support_data

hi_de_ccsds_qf:
<<: *default_uint8
CATDESC: CCSDS packet quality flag bitmask
FIELDNAM: CCSDS Quality Flag
FORMAT: I3
LABLAXIS: CCSDS QF
VALIDMIN: 0
VALIDMAX: 255
VAR_NOTES: >
Bitwise quality flag for CCSDS packet. Bit 0 (value 1): packet was full
(contained 664 events).

# ======= L1C PSET Section =======

# Define override values for epoch as defined in imap_constant_attrs.yaml
Expand Down
88 changes: 87 additions & 1 deletion imap_processing/hi/hi_l1b.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from imap_processing import imap_module_directory
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
from imap_processing.cdf.utils import parse_filename_like
from imap_processing.hi.hi_l1a import HALF_CLOCK_TICK_S
from imap_processing.hi.hi_l1a import HALF_CLOCK_TICK_S, MILLISECOND_TO_S
from imap_processing.hi.utils import (
HIAPID,
CoincidenceBitmap,
Expand All @@ -20,6 +20,7 @@
create_dataset_variables,
parse_sensor_number,
)
from imap_processing.quality_flags import ImapHiL1bDeFlags
from imap_processing.spice.geometry import (
SpiceFrame,
instrument_pointing,
Expand Down Expand Up @@ -133,6 +134,8 @@ def annotate_direct_events(
att_manager_lookup_str="hi_de_{0}",
)
)
l1b_de_dataset.update(de_esa_step_met(l1b_de_dataset))
l1b_de_dataset.update(de_ccsds_qf(l1b_de_dataset))
l1b_de_dataset = l1b_de_dataset.drop_vars(
[
"src_seq_ctr",
Expand Down Expand Up @@ -531,3 +534,86 @@ def get_esa_to_esa_energy_step_lut(
matching_esa_energy["esa_energy_step"].values[0],
)
return esa_energy_step_lut


def de_esa_step_met(dataset: xr.Dataset) -> dict[str, xr.DataArray]:
"""
Compute esa_step_met for each CCSDS packet.

The esa_step_met is the MET time when the ESA was stepped, computed from
esa_step_seconds and esa_step_milliseconds.

Parameters
----------
dataset : xarray.Dataset
The L1A/B dataset containing esa_step_seconds and esa_step_milliseconds.

Returns
-------
new_vars : dict[str, xarray.DataArray]
Dictionary with "esa_step_met" key and float64 DataArray value.
"""
new_vars = create_dataset_variables(
["esa_step_met"],
len(dataset.epoch),
att_manager_lookup_str="hi_de_{0}",
)

# Compute esa_step_met from esa_step_seconds and esa_step_milliseconds
new_vars["esa_step_met"].values = (
dataset["esa_step_seconds"].values.astype(np.float64)
+ dataset["esa_step_milliseconds"].values * MILLISECOND_TO_S
)

return new_vars


def de_ccsds_qf(dataset: xr.Dataset) -> dict[str, xr.DataArray]:
"""
Compute ccsds_qf quality flag for each CCSDS packet.

The ccsds_qf is a quality flag bitmask indicating packet characteristics.

Parameters
----------
dataset : xarray.Dataset
The L1A/B dataset containing ccsds_index for mapping events to packets.

Returns
-------
new_vars : dict[str, xarray.DataArray]
Dictionary with "ccsds_qf" key and uint8 DataArray value.
"""
max_events_per_packet = 664

new_vars = create_dataset_variables(
["ccsds_qf"],
len(dataset.epoch),
att_manager_lookup_str="hi_de_{0}",
)

# Initialize all values to 0 (no flags set)
new_vars["ccsds_qf"].values[:] = 0

# Count events per CCSDS packet
# ccsds_index maps each event to its originating packet
ccsds_indices = dataset["ccsds_index"].values
n_packets = len(dataset.epoch)

# Filter out fill/out-of-range indices (e.g., uint16 FILLVAL 65535)
valid_mask = (ccsds_indices >= 0) & (ccsds_indices < n_packets)

# If there are no valid events, all packets keep default quality flag 0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would think that if there were no valid events you would want everything flagged? Not with ImapHiL1bDeFlags.PACKET_FULL but another one?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I could add another flag to indicate an empty packet but currently, the only flag I have defined indicates that the CCSDS packet contained the maximum number of events. You are correct that a packet having no packets probably indicates a problem but that may be caught elsewhere.

if not np.any(valid_mask):
return new_vars

# Compute event counts per valid CCSDS packet
event_counts = np.bincount(
ccsds_indices[valid_mask].astype(np.int64),
minlength=n_packets,
)
# Set PACKET_FULL flag for packets with 664 events
full_packet_mask = event_counts == max_events_per_packet
new_vars["ccsds_qf"].values[full_packet_mask] = ImapHiL1bDeFlags.PACKET_FULL

return new_vars
7 changes: 7 additions & 0 deletions imap_processing/quality_flags.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,3 +148,10 @@ class GLOWSL1bFlags(FlagNameMixin):
"""Glows L1b flags."""

NONE = CommonFlags.NONE


class ImapHiL1bDeFlags(FlagNameMixin):
"""IMAP Hi L1B Direct Event CCSDS packet quality flags."""

NONE = CommonFlags.NONE
PACKET_FULL = 2**0 # bit 0, packet contained 664 events (max capacity)
155 changes: 153 additions & 2 deletions imap_processing/tests/hi/test_hi_l1b.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,9 @@
any_good_direct_events,
compute_coincidence_type_and_tofs,
compute_hae_coordinates,
de_ccsds_qf,
de_esa_energy_step,
de_esa_step_met,
de_nominal_bin_and_spin_phase,
get_esa_to_esa_energy_step_lut,
housekeeping,
Expand All @@ -23,6 +25,7 @@
EsaEnergyStepLookupTable,
HiConstants,
)
from imap_processing.quality_flags import ImapHiL1bDeFlags
from imap_processing.spice.geometry import SpiceFrame


Expand Down Expand Up @@ -66,7 +69,7 @@ def test_hi_annotate_direct_events(
l1b_datasets = annotate_direct_events(l1a_dataset, xr.Dataset(), esa_energies_csv)
assert len(l1b_datasets) == 1
assert l1b_datasets[0].attrs["Logical_source"] == "imap_hi_l1b_45sensor-de"
assert len(l1b_datasets[0].data_vars) == 15
assert len(l1b_datasets[0].data_vars) == 17


@pytest.mark.parametrize(
Expand Down Expand Up @@ -128,7 +131,10 @@ def test_annotate_direct_events_with_hk(
l1b_datasets = annotate_direct_events(l1a_dataset, hk_dataset, esa_energies_csv)
assert len(l1b_datasets) == 1
assert l1b_datasets[0].attrs["Logical_source"] == "imap_hi_l1b_90sensor-de"
assert len(l1b_datasets[0].data_vars) == 15
assert len(l1b_datasets[0].data_vars) == 17
# Verify new L1B variables exist
assert "esa_step_met" in l1b_datasets[0].data_vars
assert "ccsds_qf" in l1b_datasets[0].data_vars


@pytest.fixture
Expand Down Expand Up @@ -640,3 +646,148 @@ def test_cal_data(self, hi_l1_test_data_path):
# Check the generated lookup table
# We expect 1 dataframe entry per esa step in the range [1, 9]
np.testing.assert_array_equal(lut.df["esa_step"].values, np.arange(9) + 1)


class TestDeEsaStepMet:
"""Tests for de_esa_step_met function."""

def test_computes_esa_step_met(self):
"""Test that esa_step_met calculation from seconds and milliseconds."""
ds = xr.Dataset(
coords={"epoch": [0, 1, 2], "event_met": [0.0, 1.0]},
data_vars={
"esa_step_seconds": (
["epoch"],
np.array([100, 200, 300], dtype=np.uint32),
),
"esa_step_milliseconds": (
["epoch"],
np.array([500, 250, 750], dtype=np.uint16),
),
"trigger_id": xr.DataArray(
[1, 2], dims=["event_met"], attrs={"FILLVAL": 0}
),
},
)
result = de_esa_step_met(ds)
expected = np.array([100.5, 200.25, 300.75])
np.testing.assert_array_almost_equal(result["esa_step_met"].values, expected)


class TestDeCcsdsQf:
"""Tests for de_ccsds_qf function."""

def test_packet_full_flag_set(self):
"""Test that PACKET_FULL flag is set for packets with 664 events."""
n_packets = 3
# Create events: packet 0 has 664 events, packet 1 has 100, packet 2 has 664
ccsds_indices = np.concatenate(
[
np.zeros(664, dtype=np.uint16), # 664 events for packet 0
np.ones(100, dtype=np.uint16), # 100 events for packet 1
np.full(664, 2, dtype=np.uint16), # 664 events for packet 2
]
)
ds = xr.Dataset(
coords={
"epoch": np.arange(n_packets),
"event_met": np.arange(len(ccsds_indices), dtype=np.float64),
},
data_vars={
"ccsds_index": (["event_met"], ccsds_indices),
"trigger_id": xr.DataArray(
np.ones(len(ccsds_indices), dtype=np.uint8),
dims=["event_met"],
attrs={"FILLVAL": 0},
),
},
)
result = de_ccsds_qf(ds)
# Packet 0 and 2 should have PACKET_FULL flag (1), packet 1 should be 0
assert result["ccsds_qf"].values[0] == ImapHiL1bDeFlags.PACKET_FULL
assert result["ccsds_qf"].values[1] == 0
assert result["ccsds_qf"].values[2] == ImapHiL1bDeFlags.PACKET_FULL

def test_no_full_packets(self):
"""Test that no flags are set when no packets are full."""
n_packets = 2
ccsds_indices = np.concatenate(
[
np.zeros(100, dtype=np.uint16),
np.ones(200, dtype=np.uint16),
]
)
ds = xr.Dataset(
coords={
"epoch": np.arange(n_packets),
"event_met": np.arange(len(ccsds_indices), dtype=np.float64),
},
data_vars={
"ccsds_index": (["event_met"], ccsds_indices),
"trigger_id": xr.DataArray(
np.ones(len(ccsds_indices), dtype=np.uint8),
dims=["event_met"],
attrs={"FILLVAL": 0},
),
},
)
result = de_ccsds_qf(ds)
assert result["ccsds_qf"].values[0] == 0
assert result["ccsds_qf"].values[1] == 0

def test_no_valid_direct_events_all_fill_trigger_id(self):
"""de_ccsds_qf returns all zeros when trigger_id is entirely FILLVAL."""
n_packets = 3
# Some arbitrary, in-range CCSDS indices that would normally map to packets
ccsds_indices = np.array([0, 0, 1, 1, 2, 2, 0, 1, 2], dtype=np.uint16)
n_events = len(ccsds_indices)
# All trigger_id values are set to the FILLVAL (0),
# meaning no valid direct events
trigger_fillval = 0
ds = xr.Dataset(
coords={
"epoch": np.arange(n_packets),
"event_met": np.arange(n_events, dtype=np.float64),
},
data_vars={
"ccsds_index": (["event_met"], ccsds_indices),
"trigger_id": xr.DataArray(
np.full(n_events, trigger_fillval, dtype=np.uint8),
dims=["event_met"],
attrs={"FILLVAL": trigger_fillval},
),
},
)
result = de_ccsds_qf(ds)
# With no valid direct events, all CCSDS quality flags should be zero
assert "ccsds_qf" in result
assert result["ccsds_qf"].shape[0] == n_packets
assert np.all(result["ccsds_qf"].values == 0)

def test_ccsds_index_fillvals_ignored(self):
"""de_ccsds_qf returns all zeros when ccsds_index includes FILLVALs (65535)."""
n_packets = 2
fillval = np.uint16(65535)
# Include some events with CCSDS index FILLVAL that should be ignored
ccsds_indices = np.array([fillval, fillval, 0, 0, 1, 1], dtype=np.uint16)
n_events = len(ccsds_indices)
ds = xr.Dataset(
coords={
"epoch": np.arange(n_packets),
"event_met": np.arange(n_events, dtype=np.float64),
},
data_vars={
"ccsds_index": (["event_met"], ccsds_indices),
"trigger_id": xr.DataArray(
np.ones(n_events, dtype=np.uint8),
dims=["event_met"],
attrs={"FILLVAL": 0},
),
},
)
result = de_ccsds_qf(ds)
# No packet reaches the full-packet threshold;
# FILLVAL indices must not cause errors
assert "ccsds_qf" in result
assert result["ccsds_qf"].shape[0] == n_packets
assert np.all(result["ccsds_qf"].values == 0)