Skip to content
Snippets Groups Projects
Commit 5c9e7adc authored by pswain's avatar pswain
Browse files

feat(Signal): returns None rather than error if data is missing

parent 02f7587b
No related branches found
No related tags found
No related merge requests found
...@@ -15,7 +15,6 @@ from agora.io.decorators import _first_arg_str_to_raw_df ...@@ -15,7 +15,6 @@ from agora.io.decorators import _first_arg_str_to_raw_df
from agora.utils.indexing import validate_lineage from agora.utils.indexing import validate_lineage
from agora.utils.kymograph import add_index_levels from agora.utils.kymograph import add_index_levels
from agora.utils.merge import apply_merges from agora.utils.merge import apply_merges
from postprocessor.core.reshapers.picker import Picker, PickerParameters
class Signal(BridgeH5): class Signal(BridgeH5):
...@@ -57,8 +56,11 @@ class Signal(BridgeH5): ...@@ -57,8 +56,11 @@ class Signal(BridgeH5):
"""Get Signal after merging and picking.""" """Get Signal after merging and picking."""
if isinstance(dset_name, str): if isinstance(dset_name, str):
dsets = self.get_raw(dset_name, **kwargs) dsets = self.get_raw(dset_name, **kwargs)
picked_merged = self.apply_merging_picking(dsets, **kwargs) if dsets is not None:
return self.add_name(picked_merged, dset_name) picked_merged = self.apply_merging_picking(dsets, **kwargs)
return self.add_name(picked_merged, dset_name)
else:
return None
else: else:
raise Exception("Error in Signal.get") raise Exception("Error in Signal.get")
...@@ -266,32 +268,37 @@ class Signal(BridgeH5): ...@@ -266,32 +268,37 @@ class Signal(BridgeH5):
try: try:
if isinstance(dataset, str): if isinstance(dataset, str):
with h5py.File(self.filename, "r") as f: with h5py.File(self.filename, "r") as f:
df = self.dataset_to_df(f, dataset).sort_index() df = self.dataset_to_df(f, dataset)
if in_minutes: if df is not None:
df = self.cols_in_mins(df) df = df.sort_index()
if in_minutes:
df = self.cols_in_mins(df)
# apply merging or picking or both or neither
df = self.apply_merging_picking(df, merges, picks)
# add mother label to data frame
if lineage:
mother_label = np.zeros(len(df), dtype=int)
lineage = self.lineage()
valid_lineage, valid_indices = validate_lineage(
lineage,
indices=np.array(df.index.to_list()),
how="daughters",
)
mother_label[valid_indices] = lineage[
valid_lineage, 1
]
df = add_index_levels(
df, {"mother_label": mother_label}
)
return df
elif isinstance(dataset, list): elif isinstance(dataset, list):
return [ return [
self.get_raw(dset, in_minutes=in_minutes, lineage=lineage) self.get_raw(dset, in_minutes=in_minutes, lineage=lineage)
for dset in dataset for dset in dataset
] ]
# apply merging or picking or both or neither
df = self.apply_merging_picking(df, merges, picks)
# add mother label to data frame
if lineage:
mother_label = np.zeros(len(df), dtype=int)
lineage = self.lineage()
valid_lineage, valid_indices = validate_lineage(
lineage,
indices=np.array(df.index.to_list()),
how="daughters",
)
mother_label[valid_indices] = lineage[valid_lineage, 1]
df = add_index_levels(df, {"mother_label": mother_label})
return df
except Exception as e: except Exception as e:
self._log(f"Could not fetch dataset {dataset}: {e}", "error") message = f"Signal could not find data {dataset}: {e}."
raise e self._log(message)
def load_merges(self): def load_merges(self):
"""Get merge events going up to the first level.""" """Get merge events going up to the first level."""
...@@ -318,21 +325,25 @@ class Signal(BridgeH5): ...@@ -318,21 +325,25 @@ class Signal(BridgeH5):
def dataset_to_df(self, f: h5py.File, path: str) -> pd.DataFrame: def dataset_to_df(self, f: h5py.File, path: str) -> pd.DataFrame:
"""Get data from h5 file as a dataframe.""" """Get data from h5 file as a dataframe."""
assert path in f, f"{path} not in {f}" if path not in f:
dset = f[path] message = f"{path} not in {f}."
values, index, columns = [], [], [] self._log(message)
index_names = copy(self.index_names) return None
valid_names = [lbl for lbl in index_names if lbl in dset.keys()] else:
if valid_names: dset = f[path]
index = pd.MultiIndex.from_arrays( values, index, columns = [], [], []
[dset[lbl] for lbl in valid_names], names=valid_names index_names = copy(self.index_names)
) valid_names = [lbl for lbl in index_names if lbl in dset.keys()]
columns = dset.attrs.get("columns", None) if valid_names:
if "timepoint" in dset: index = pd.MultiIndex.from_arrays(
columns = f[path + "/timepoint"][()] [dset[lbl] for lbl in valid_names], names=valid_names
values = f[path + "/values"][()] )
df = pd.DataFrame(values, index=index, columns=columns) columns = dset.attrs.get("columns", None)
return df if "timepoint" in dset:
columns = f[path + "/timepoint"][()]
values = f[path + "/values"][()]
df = pd.DataFrame(values, index=index, columns=columns)
return df
@property @property
def stem(self): def stem(self):
......
...@@ -192,7 +192,9 @@ class PostProcessor(ProcessABC): ...@@ -192,7 +192,9 @@ class PostProcessor(ProcessABC):
else: else:
raise ("Incorrect dataset") raise ("Incorrect dataset")
# run process on signal # run process on signal
if len(signal) and ( if signal is None:
return None
elif len(signal) and (
not isinstance(loaded_process, LineageProcess) not isinstance(loaded_process, LineageProcess)
or len(loaded_process.lineage) or len(loaded_process.lineage)
): ):
......
...@@ -128,7 +128,7 @@ class Grouper(ABC): ...@@ -128,7 +128,7 @@ class Grouper(ABC):
] ]
records = [record for record in records if record is not None] records = [record for record in records if record is not None]
if len(errors): if len(errors):
print("Warning: Positions contain errors {errors}") print(f"Warning: Positions ({errors}) contain errors.")
assert len(records), "All data sets contain errors" assert len(records), "All data sets contain errors"
# combine into one dataframe # combine into one dataframe
concat = pd.concat(records, axis=0) concat = pd.concat(records, axis=0)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment