diff --git a/aliby/io/metadata_parser.py b/aliby/io/metadata_parser.py
index 81938152ecdf00bf3d892d4231ca44e5b47d6635..86f42f8cd386d6ad9282ceb3fe870ad6cd34d810 100644
--- a/aliby/io/metadata_parser.py
+++ b/aliby/io/metadata_parser.py
@@ -15,21 +15,23 @@ from logfile_parser import Parser
 # then pare down on what specific information is really useful later.
 
 # Needed because HDF5 attributes do not support dictionaries
-def flatten_dict(nested_dict, separator='/'):
-    '''
+def flatten_dict(nested_dict, separator="/"):
+    """
     Flattens nested dictionary
-    '''
+    """
     df = pd.json_normalize(nested_dict, sep=separator)
-    return df.to_dict(orient='records')[0]
+    return df.to_dict(orient="records")[0]
+
 
 # Needed because HDF5 attributes do not support datetime objects
 # Takes care of time zones & daylight saving
-def datetime_to_timestamp(time, locale = 'Europe/London'):
-    '''
+def datetime_to_timestamp(time, locale="Europe/London"):
+    """
     Convert datetime object to UNIX timestamp
-    '''
+    """
     return timezone(locale).localize(time).timestamp()
 
+
 def find_file(root_dir, regex):
     file = glob.glob(os.path.join(str(root_dir), regex))
     if len(file) != 1:
@@ -37,33 +39,36 @@ def find_file(root_dir, regex):
     else:
         return file[0]
 
+
 # TODO: re-write this as a class if appropriate
 # WARNING: grammars depend on the directory structure of a locally installed
 # logfile_parser repo
-def parse_logfiles(root_dir,
-                   acq_grammar = 'multiDGUI_acq_format.json',
-                   log_grammar = 'multiDGUI_log_format.json'):
-    '''
+def parse_logfiles(
+    root_dir,
+    acq_grammar="multiDGUI_acq_format.json",
+    log_grammar="multiDGUI_log_format.json",
+):
+    """
     Parse acq and log files depending on the grammar specified, then merge into
     single dict.
-    '''
+    """
     # Both acq and log files contain useful information.
-    #ACQ_FILE = 'flavin_htb2_glucose_long_ramp_DelftAcq.txt'
-    #LOG_FILE = 'flavin_htb2_glucose_long_ramp_Delftlog.txt'
+    # ACQ_FILE = 'flavin_htb2_glucose_long_ramp_DelftAcq.txt'
+    # LOG_FILE = 'flavin_htb2_glucose_long_ramp_Delftlog.txt'
     log_parser = Parser(log_grammar)
     try:
-        log_file = find_file(root_dir, '*log.txt')
+        log_file = find_file(root_dir, "*log.txt")
     except:
-        raise ValueError('Experiment log file not found.')
-    with open(log_file, 'r') as f:
+        raise ValueError("Experiment log file not found.")
+    with open(log_file, "r") as f:
         log_parsed = log_parser.parse(f)
 
     acq_parser = Parser(acq_grammar)
     try:
-        acq_file = find_file(root_dir, '*[Aa]cq.txt')
+        acq_file = find_file(root_dir, "*[Aa]cq.txt")
     except:
-        raise ValueError('Experiment acq file not found.')
-    with open(acq_file, 'r') as f:
+        raise ValueError("Experiment acq file not found.")
+    with open(acq_file, "r") as f:
         acq_parsed = acq_parser.parse(f)
 
     parsed = {**acq_parsed, **log_parsed}
@@ -73,5 +78,8 @@ def parse_logfiles(root_dir,
             parsed[key] = datetime_to_timestamp(value)
 
     parsed_flattened = flatten_dict(parsed)
+    for k, v in parsed_flattened.items():
+        if isinstance(v, list):
+            parsed_flattened[k] = [0 if el is None else el for el in v]
 
     return parsed_flattened