diff --git a/core/functions/test_hdf.py b/core/functions/test_hdf.py
index bd7fd764345840e80c63bdd49f7aad4a8d5ad2ca..a9f8faca65a2c5830f8b751139079d890c3fe442 100644
--- a/core/functions/test_hdf.py
+++ b/core/functions/test_hdf.py
@@ -4,7 +4,10 @@ import h5py
 from core.cells import Cells
 import pandas as pd
 
-f = h5py.File("/home/alan/Documents/sync_docs/PhD/tmp/DO6MS2_003store.h5")
+# f = h5py.File("/home/alan/Documents/sync_docs/PhD/tmp/DO6MS2_003store.h5")
+f = h5py.File(
+    "/shared_libs/pipeline-core/scripts/data/20191026_ss_experiments_01/DO6MS2_003store.h5"
+)
 tracks = f["/extraction/general/None/area"][()]
 cell = Cells.from_source("/home/alan/Documents/sync_docs/PhD/tmp/DO6MS2_003store.h5")
 from postprocessor.core.picker import Picker
diff --git a/core/parameters.py b/core/parameters.py
new file mode 100644
index 0000000000000000000000000000000000000000..b9e67ef3b3a61a5a70bbc1d8bbb3dd96aa4cbf78
--- /dev/null
+++ b/core/parameters.py
@@ -0,0 +1,33 @@
+from abc import ABC, abstractmethod
+
+
+class ParametersABC(ABC):
+    """
+    Base class to add yaml functionality to parameters
+
+    """
+
+    def to_dict(self):
+        return self.__dict__
+
+    @classmethod
+    def from_dict(cls, d):
+        return cls(**d)
+
+    def to_yaml(self, path=None):
+        return dump(self.__dict__, path)
+
+    @classmethod
+    def from_yaml(cls, yam):
+        with open(Path(yam)) as f:
+            params = safe_load(f)
+        return cls(**params)
+
+    @abstractmethod
+    @classmethod
+    def default(cls):
+        pass
+
+    @abstractmethod
+    def run(self):
+        pass
diff --git a/core/picker.py b/core/picker.py
index 2618ae99a99b307918eebec00a1c8fb4b10dc728..eaed8e9014ab4b4451093c58d8f848bd7e66b4bc 100644
--- a/core/picker.py
+++ b/core/picker.py
@@ -1,18 +1,36 @@
 from typing import Tuple, Union, List
+from abc import ABC, abstractmethod
 
 import numpy as np
 import pandas as pd
 
 from core.cells import CellsHDF
+
+from postprocessor.core.parameters import ParametersABC
 from postprocessor.core.functions.signals import max_ntps, max_nonstop_ntps
 
 
-# def BasePicker(ABC):
-#     """
-#     Base class to add mother-bud filtering support
-#     """
-#     def __init__(self, branch=None, lineage=None):
-#         self.lineage = lineage
+@ParametersABC.register
+class PickerParameters:
+    def __init__(
+        self,
+        condition: Tuple[str, Union[float, int]] = None,
+        lineage: str = None,
+        sequence: List[str] = ["lineage", "condition"],
+    ):
+        self.condition = condition
+        self.lineage = lineage
+        self.sequence = sequence
+
+    @classmethod
+    def defaults(cls):
+        return cls.from_dict(
+            {
+                "condition": ("present", 0.8),
+                "lineage": None,
+                "sequence": ["lineage", "condition"],
+            }
+        )
 
 
 class Picker:
@@ -29,16 +47,15 @@ class Picker:
         self,
         signals: pd.DataFrame,
         cells: CellsHDF,
-        condition: Tuple[str, Union[float, int]] = None,
-        lineage: str = None,
-        sequence: List[str] = ["lineage", "condition"],
+        parameters: PickerParameters,
     ):
         self.signals = signals
         self._index = signals.index
         self._cells = cells
-        self.condition = condition
-        self.lineage = lineage
-        self.sequence = sequence
+        self.parameters = parameters
+
+        for k, v in parameters.to_dict().items():  # access parameters directly
+            setattr(self, k, v)
 
     @staticmethod
     def mother_assign_to_mb_matrix(ma: List[np.array]):
diff --git a/core/processor.py b/core/processor.py
index c4e4acee964b24d7ad332d07583dc9f8d5e6f83b..81f302de27a2f1f5cc714b6f53e12d40a65dd042 100644
--- a/core/processor.py
+++ b/core/processor.py
@@ -2,18 +2,23 @@ import pandas as pd
 
 
 class Parameters:
-    def __init__(self, merger=None, picker=None, processes=None, branches=None):
+    """
+    Anthology of parameters used for postprocessing
+    """
+
+    def __init__(self, merger=None, picker=None, processes=None, datasets=None):
         self.merger = merger
         self.picker = picker
         self.processes = processes
-        self.branches = branches
+
+        self.datasets = datasets
 
     def __getitem__(self, item):
         return getattr(self, item)
 
 
 class PostProcessor:
-    def __init__(self, parameters, signals):
+    def __init__(self, fname, parameters, signals):
         self.parameters = parameters
 
         self.merger = Merger(parameters["merger"])
@@ -21,13 +26,14 @@ class PostProcessor:
         self.processes = [
             self.get_process(process) for process in parameters["processes"]
         ]
-        self.branches = parameters["branches"]
+        self.datasets = parameters["datasets"]
 
     def run(self):
-        self.merger.run(signals.get_branch(self.branches["merger"]))
-        self.picker.run(signals.get_branch(self.branches["picker"]))
-        for process, branch in zip(self.processes, self.branches["processes"]):
-            process.run(signals.get_branch(branch))
+        self.merger.run(signals[self.datasets["merger"]])
+        self.picker.run(signals[self.datasets["picker"]])
+        for process, dataset in zip(self.processes, self.datasets["processes"]):
+            process_result = process.run(signals.get_dataset(dataset))
+            self.writer.write(process_result, dataset)
 
 
 class Signals:
@@ -38,26 +44,13 @@ class Signals:
     def __init__(self, file):
         self._hdf = h5py.File(file, "r")
 
-    @staticmethod
-    def _if_ext_or_post(name):
-        if name.startswith("extraction") or name.startswith("postprocessing"):
-            if len(name.split("/")) > 3:
-                return name
-
-    @property
-    def branches(self):
-        return signals._hdf.visit(self._if_ext_or_post)
+    def __getitem__(self, dataset):
+        dset = self._hdf[dataset][()]
+        attrs = self._hdf[dataset].attrs
+        first_dataset = "/" + dataset.split("/")[0] + "/"
+        timepoints = self._hdf[first_dataset].attrs["processed_timepoints"]
 
-    def get_branch(self, branch):
-        return self._hdf[branch][()]
-
-    def branch_to_df(self, branch):
-        dset = self._hdf[branch][()]
-        attrs = self._hdf[branch].attrs
-        first_branch = "/" + branch.split("/")[0] + "/"
-        timepoints = self._hdf[first_branch].attrs["processed_timepoints"]
-
-        if "cell_label" in self._hdf[branch].attrs:
+        if "cell_label" in self._hdf[dataset].attrs:
             ids = pd.MultiIndex.from_tuples(
                 zip(attrs["trap"], attrs["cell_label"]), names=["trap", "cell_label"]
             )
@@ -66,5 +59,15 @@ class Signals:
 
         return pd.DataFrame(dset, index=ids, columns=timepoints)
 
+    @staticmethod
+    def _if_ext_or_post(name):
+        if name.startswith("extraction") or name.startswith("postprocessing"):
+            if len(name.split("/")) > 3:
+                return name
+
+    @property
+    def datasets(self):
+        return signals._hdf.visit(self._if_ext_or_post)
+
     def close(self):
         self._hdf.close()