diff --git a/config.json b/config.json
index 2cadfef204b644a53e8204ead5a5123a825cbe4a..1dcbbd9c88fbcc31c2af6054a58c3f7c2b7699d4 100644
--- a/config.json
+++ b/config.json
@@ -1 +1,2 @@
-{"host": "sce-bio-c04287.bio.ed.ac.uk", "password": "***REMOVED***", "port": 4064, "user": "upload"}
+{"host": "sce-bio-c04287.bio.ed.ac.uk", "password": "***REMOVED***", "port": 4064,
+  "user": "upload", "experiment": 10932}
diff --git a/core/__init__.py b/core/__init__.py
index d28ccbde963b2ea17c6bef1cbaa6dad3d77cb8f0..df11ad372c38be5a11ca671ef182d9e0eafdb1e3 100644
--- a/core/__init__.py
+++ b/core/__init__.py
@@ -1 +1,24 @@
 import omero_py
+import logging
+from logging.handlers import RotatingFileHandler
+
+logger = logging.getLogger(__name__)
+logger.setLevel(logging.INFO)
+
+console = logging.StreamHandler()
+console.setLevel(logging.WARNING)
+formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
+console.setFormatter(formatter)
+logger.addHandler(console)
+
+file_handler = RotatingFileHandler(filename='core.log',
+                                   maxBytes=1e5,
+                                   backupCount=1)
+file_handler.setLevel(logging.DEBUG)
+file_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s '
+                                   '- %(message)s')
+file_handler.setFormatter(file_formatter)
+logger.addHandler(file_handler)
+
+logger.info('Set up the loggers.')
+
diff --git a/core/experiment.py b/core/experiment.py
index a1592fe87c67e6fc0b6fdf7fdd462534ac6a4f8c..1118b1c21a0d0774dab57823607dcd8c25313af3 100644
--- a/core/experiment.py
+++ b/core/experiment.py
@@ -4,14 +4,20 @@ import abc
 import re
 import glob
 from pathlib import Path
-
-from omero_metadata_parser.extract_acq_metadata import AcqMetadataParser
+import logging
+import imageio
+from tqdm import tqdm
+import numpy as np
+import json
 
 import omero
 from omero.gateway import BlitzGateway
+from omero_metadata_parser.extract_acq_metadata import AcqMetadataParser
 
 from timelapse import TimelapseOMERO, TimelapseLocal
 
+logger = logging.getLogger(__name__)
+
 
 class Experiment:
     """
@@ -26,7 +32,7 @@ class Experiment:
         self._current_position = None
 
     @staticmethod
-    def from_source(source, **kwargs):
+    def from_source(*args, **kwargs):
         """
         Factory method to construct an instance of an Experiment subclass (
         either ExperimentOMERO or ExperimentLocal).
@@ -37,8 +43,12 @@ class Experiment:
         arguments are required. If the data is stored locally keyword
         arguments are ignored.
         """
-
-        pass
+        if len(args) > 1:
+            logger.debug('ExperimentOMERO: {}'.format(args, kwargs))
+            return ExperimentOMERO(*args, **kwargs)
+        else:
+            logger.debug('ExperimentLocal: {}'.format(args, kwargs))
+            return ExperimentLocal(*args, **kwargs)
 
     @staticmethod
     def parse_metadata(filename):
@@ -83,17 +93,17 @@ class ExperimentOMERO(Experiment):
         connected = self.connection.connect()
         assert connected is True, "Could not connect to server."
         self.dataset = self.connection.getObject("Dataset", self.exptID)
+        self.name = self.dataset.getName()
 
         self._positions = {img.getName(): img.getId() for img in
                            self.dataset.listChildren()}
 
-        # Get annotation Acq file (cached as a tmp file)
+        # Get annotation Acq file
         try:
             acq_annotation = [item for item in self.dataset.listAnnotations()
-                              if (isinstance(item,
-                                             omero.gateway.FileAnnotationWrapper)
-                                  and item.getFileName().endswith('Acq.txt'))][
-                0]
+                              if (isinstance(
+                                    item, omero.gateway.FileAnnotationWrapper)
+                                and item.getFileName().endswith('Acq.txt'))][0]
         except IndexError as e:
             raise (e, "No acquisition file found for this experiment")
 
@@ -119,6 +129,78 @@ class ExperimentOMERO(Experiment):
         img = self.connection.getObject("Image", self._positions[position])
         return TimelapseOMERO(img)
 
+    def cache_locally(self, root_dir='./', positions=None, channels=None,
+                      timepoints=None, z_positions=None):
+        """
+        Save the experiment locally.
+
+        :param root_dir: The directory in which the experiment will be
+        saved. The experiment will be a subdirectory of "root_directory"
+        and will be named by its id.
+        """
+        logger.warning('Saving experiment {}; may take some time.'.format(
+            self.name))
+
+        if positions is None:
+            positions = self.positions
+        if channels is None:
+            channels = self.current_position.channels
+        if timepoints is None:
+            timepoints = range(self.current_position.size_t)
+        if z_positions is None:
+            z_positions = range(self.current_position.size_z)
+
+        save_dir = Path(root_dir) / self.name
+        if not save_dir.exists():
+            save_dir.mkdir()
+        print(save_dir)
+        # Save the images
+        for pos_name in tqdm(positions):
+            pos = self.get_position(pos_name)
+            pos_dir = save_dir / pos_name
+            if not pos_dir.exists():
+                pos_dir.mkdir()
+            for channel in tqdm(channels):
+                for tp in tqdm(timepoints):
+                    for z_pos in tqdm(z_positions):
+                        ch_id = pos.get_channel_index(channel)
+                        image = pos.get_hypercube(x=None, y=None,
+                                                  width=None,
+                                                  height=None,
+                                                  channels=[ch_id],
+                                                  z_positions=[z_pos],
+                                                  timepoints=[tp])
+
+                        im_name = "{}_{:06d}_{}_{:03d}.png".format(self.name,
+                                                           tp + 1,
+                                                           channel,
+                                                           z_pos + 1)
+                        imageio.imwrite(str(pos_dir / im_name), np.squeeze(
+                            image))
+
+        # Save the file annotations
+        # Get annotation Acq file
+        try:
+            acq_annotation = [item for item in self.dataset.listAnnotations()
+                              if (isinstance(
+                                    item, omero.gateway.FileAnnotationWrapper)
+                                and item.getFileName().endswith('Acq.txt'))][0]
+        except IndexError as e:
+            raise (e, "No acquisition file found for this experiment")
+
+        filepath = save_dir / acq_annotation.getFileName()
+        with open(str(filepath), 'w') as acq_fd:
+            # Download the file
+            for chunk in acq_annotation.getFileInChunks():
+                acq_fd.write(chunk)
+
+        # Create caching log
+        cache_config = dict(positions=positions, channels=channels,
+                            timepoints=timepoints, z_positions=z_positions)
+
+        with open(str(save_dir / 'cache.config'), 'w') as fd:
+            json.dump(cache_config, fd)
+        logger.info('Downloaded experiment {}'.format(self.exptID))
 
 class ExperimentLocal(Experiment):
     """
@@ -127,9 +209,13 @@ class ExperimentLocal(Experiment):
     def __init__(self, root_dir):
         self.root_dir = Path(root_dir)
         self.exptID = self.root_dir.name
-        pos, acq_file = ExperimentLocal.parse_dir_structure(self.root_dir)
+        pos, acq_file, cache = ExperimentLocal.parse_dir_structure(self.root_dir)
         self._positions = pos
         self.metadata = Experiment.parse_metadata(acq_file)
+        if cache is not None:
+            with open(cache, 'r') as fd:
+                cache_config = json.load(fd)
+            self.cache_config(cache_config)
         self._current_position = self.get_position(self.positions[0])
 
 
@@ -154,8 +240,32 @@ class ExperimentLocal(Experiment):
                          f.is_dir())]
         acq_file = glob.glob(os.path.join(str(root_dir), '*[Aa]cq.txt'))[0]
 
-        return positions, acq_file
-
+        cache_file = glob.glob(os.path.join(str(root_dir), 'cache.config'))
+        if len(cache_file) == 1:
+            cache_file = cache_file[0]
+        else:
+            cache_file = None
+        return positions, acq_file, cache_file
+
+    def cache_config(self, cache):
+        self.metadata.positions = self.metadata.positions[
+                    self.metadata.positions.name.isin(cache['positions'])]
+        self.metadata.channels = self.metadata.channels[
+                    self.metadata.channels.names.isin(cache['channels'])]
+        ntimepoints = len(cache['timepoints'])
+        totalduration = ntimepoints*self.metadata.times['interval']
+        self.metadata.times.update(dict(ntimepoints=ntimepoints,
+                                        totalduration=totalduration))
+
+        diffs = np.unique([cache['z_positions'][i+1] - cache['z_positions'][i]
+                            for i in range(len(cache['z_positions']) - 1)])
+        if len(diffs) != 1:
+            self.metadata.zsections.spacing = np.nan
+        else:
+            self.metadata.zsections.spacing = \
+                self.metadata.zsections.spacing* diffs[0]
+
+        self.metadata.zsections.sections = len(cache['z_positions'])
 
     @property
     def positions(self):
diff --git a/core/timelapse.py b/core/timelapse.py
index 4aa97e033458680128f3009c9c310263e387c4d8..07259214392d3489b893c1af0ab087438b4dbebe 100644
--- a/core/timelapse.py
+++ b/core/timelapse.py
@@ -4,6 +4,10 @@ import numpy as np
 import imageio
 from operator import itemgetter
 
+import logging
+
+logger = logging.getLogger(__name__)
+
 class Timelapse:
     """
     Timelapse class contains the specifics of one position.
@@ -138,6 +142,7 @@ class TimelapseLocal(Timelapse):
         -- exptID_{timepointID}_{ChannelID}_{z_position_id}.png
 
     """
+
     def __init__(self, position, root_dir, metadata):
         self.pos_dir = root_dir / position
         assert self.pos_dir.exists()
@@ -167,29 +172,27 @@ class TimelapseLocal(Timelapse):
             # Check that the metadata was correct/we are not missing any images
             assert len(img_list) != 0, "Channel {} not available, incorrect " \
                                        "metadata"
-            img_mapper[channel] = [sorted(list((group)), key= lambda item:
-                                   item.stem.split('_')[-1])
+            img_mapper[channel] = [sorted(list((group)), key=lambda item:
+            item.stem.split('_')[-1])
                                    for _, group in
                                    itertools.groupby(sorted(img_list),
-                                   key=lambda img: img.stem.split('_')[-3])]
-
+                                                     key=lambda img:
+                                                     img.stem.split('_')[-3])]
 
-        # TODO just remove those sets that are incomplete and keep the rest
-        # Check that there are enough timepoints
         for ch, item in img_mapper.items():
-            #assert len(item) == self.size_t, "Not enough timepoints in " \
-            #                                 "channel {}: {} out of {}" \
-            #                                 "".format(ch, len(item),
-            #                                           self.size_t)
-            pass
+            if len(item) != int(self.size_t):
+                logger.warning("Not enough timepoints in position {}, "
+                               "channel {}: {} out of {}".format(self.id, ch,
+                                                                 len(item),
+                                                                 self.size_t))
         for ix, (ch, im_list) in enumerate(img_mapper.items()):
             for item in im_list:
-                #assert len(item) == self.size_z, "Not enough z-stacks for " \
-                #                                 "channel {}, tp {}; {" \
-                #                                 "} out of {}".format(
-                #                                 ch, ix, len(item),
-                #                                 self.size_z)
-                pass
+                if len(item) != int(self.size_z):
+                    logger.warning("Not enough z-stacks for position {}, " \
+                                   "channel {}, tp {}; {} out of " \
+                                   "{}".format(self.id, ch, ix, len(item),
+                                               self.size_z))
+
         self.image_mapper = img_mapper
 
     @property
@@ -221,13 +224,16 @@ class TimelapseLocal(Timelapse):
 
     def get_hypercube(self, x, y, width, height, z_positions, channels,
                       timepoints):
-        # TODO Ability to specify only one direction
-        if None in [x, y, width, height]:
-            # Get full tile
-            xmin, xmax, ymin, ymax = None, None, None, None
+        if None in [x, height]:
+            # Get full x direction
+            xmin, xmax = None, None
         else:
             xmin = x
             xmax = x + height
+        if None in [y, width]:
+            # Get full y direction
+            ymin, ymax = None, None
+        else:
             ymin = y
             ymax = y + width
 
@@ -245,7 +251,8 @@ class TimelapseLocal(Timelapse):
             txyz = []
             for t in timepoints:
                 xyz = map(imageio.imread, z_pos_getter(self.image_mapper[
-                                                self.channels[ch_id]][t]))
+                                                           self.channels[
+                                                               ch_id]][t]))
                 txyz.append(np.dstack(xyz)[xmin:xmax, ymin:ymax])
             ctxyz.append(np.stack(txyz))
         return np.stack(ctxyz)
diff --git a/test/config.json b/test/config.json
deleted file mode 100644
index 2cadfef204b644a53e8204ead5a5123a825cbe4a..0000000000000000000000000000000000000000
--- a/test/config.json
+++ /dev/null
@@ -1 +0,0 @@
-{"host": "sce-bio-c04287.bio.ed.ac.uk", "password": "***REMOVED***", "port": 4064, "user": "upload"}
diff --git a/test/create_experiment.py b/test/create_experiment.py
index a26b3a4b6579dca437abb4a9923bacc63cacbab3..d4178cc9838548d782e1509066f2ed07f68bf46c 100644
--- a/test/create_experiment.py
+++ b/test/create_experiment.py
@@ -1,15 +1,40 @@
 import os
-import sys 
+import sys
+
 sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
 import json
+import logging
+from logging.handlers import RotatingFileHandler
+
+from core.experiment import Experiment
+logger = logging.getLogger('core')
+logger.handlers = []
+logger.setLevel(logging.DEBUG)
+
+console = logging.StreamHandler()
+console.setLevel(logging.WARNING)
+formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
+console.setFormatter(formatter)
+logger.addHandler(console)
+
+file_handler = RotatingFileHandler(filename='test.log',
+                                   maxBytes=1e5,
+                                   backupCount=1)
+
+file_handler.setLevel(logging.DEBUG)
+file_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s '
+                                   '- %(message)s')
+file_handler.setFormatter(file_formatter)
+logger.addHandler(file_handler)
+
+logger.debug('Set up the loggers as test.')
 
 with open('config.json', 'r') as fd:
     config = json.load(fd)
 
-from core.experiment import ExperimentOMERO
-
-expt = ExperimentOMERO(10421, config['user'], config['password'], config['host'],
-        config['port'])
+expt = Experiment.from_source(config['experiment'], config['user'],
+                              config['password'], config['host'],
+                              config['port'])
 
 print(expt.metadata.channels)
 print(expt.metadata.times)
@@ -17,8 +42,12 @@ print(expt.metadata.switch_params)
 print(expt.metadata.zsections)
 print(expt.metadata.positions)
 
-print(expt.get_hypercube(x=None, y=None, width=None, height=None,
-                         z_positions=[0], channels=[0], timepoints=[0]))
-
+# print(expt.get_hypercube(x=None, y=None, width=None, height=None,
+#                         z_positions=[0], channels=[0], timepoints=[0]))
+# expt.cache_locally(root_dir='/Users/s1893247/PhD/pipeline-core/data/',
+#                   positions=['pos001', 'pos002', 'pos003'],
+#                   channels=['Brightfield', 'GFP'],
+#                   timepoints=range(3),
+#                   z_positions=None)
 
 expt.connection.seppuku()
diff --git a/test/create_local_expt.py b/test/create_local_expt.py
index 363fe180a7ee2c77bd548906b23c0037a7ee4fd5..285db31cf30f34952c241bf6550a680aac67d4c8 100644
--- a/test/create_local_expt.py
+++ b/test/create_local_expt.py
@@ -1,25 +1,55 @@
 import os
 import sys 
 sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
+import logging
+from logging.handlers import RotatingFileHandler
 
-root_directory = '/Users/s1893247/PhD/omero_connect_demo/test_data'
 
-from core.experiment import ExperimentLocal
+from core.experiment import Experiment
 
-expt = ExperimentLocal(root_directory)
+logger = logging.getLogger('core')
+logger.handlers = []
+logger.setLevel(logging.DEBUG)
 
-print(expt.metadata.channels)
-print(expt.metadata.times)
-print(expt.metadata.switch_params)
-print(expt.metadata.zsections)
-print(expt.metadata.positions)
+console = logging.StreamHandler()
+console.setLevel(logging.WARNING)
+formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
+console.setFormatter(formatter)
+logger.addHandler(console)
 
-print(expt.current_position.image_mapper.keys())
-print(map(lambda x: x.name, expt.current_position.image_mapper['GFP'][0]))
+file_handler = RotatingFileHandler(filename='test.log',
+                                   maxBytes=1e5,
+                                   backupCount=1)
 
-print(expt.get_hypercube(x=0, y=0, width=100, height=None,
-                         z_positions=[0, 2, 4], channels=[0, 1], timepoints=[
-        0]).shape)
+file_handler.setLevel(logging.DEBUG)
+file_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s '
+                                   '- %(message)s')
+file_handler.setFormatter(file_formatter)
+logger.addHandler(file_handler)
 
 
-#expt.connection.seppuku()
+logger.debug('Set up the loggers as test.')
+
+if __name__ == "__main__":
+    #root_directory = '/Users/s1893247/PhD/pipeline-core/data
+    # /sga_glc0_1_Mig1Nhp_Maf1Nhp_Msn2Maf1_Mig1Mig1_Msn2Dot6_05'
+
+    root_directory = '/Users/s1893247/PhD/omero_connect_demo/test_data'
+    expt = Experiment.from_source(root_directory)
+
+    print(expt.metadata.channels)
+    print(expt.metadata.times)
+    print(expt.metadata.switch_params)
+    print(expt.metadata.zsections)
+    print(expt.metadata.positions)
+
+
+    print(expt.current_position.image_mapper.keys())
+    print(map(lambda x: x.name, expt.current_position.image_mapper['GFP'][0]))
+
+    print(expt.get_hypercube(x=0, y=0, width=100, height=None,
+                             z_positions=[0, 2, 4], channels=[0, 1],
+                             timepoints=[0]).shape)
+
+    expt.current_position = expt.positions[-1]
+    print(map(lambda x: x.name, expt.current_position.image_mapper['GFP'][0]))