Skip to content
Snippets Groups Projects
Commit 2635e53d authored by Alán Muñoz's avatar Alán Muñoz
Browse files

update imports

parent 82c54806
No related branches found
No related tags found
No related merge requests found
...@@ -20,7 +20,7 @@ from baby.crawler import BabyCrawler ...@@ -20,7 +20,7 @@ from baby.crawler import BabyCrawler
from requests.exceptions import Timeout, HTTPError from requests.exceptions import Timeout, HTTPError
from requests_toolbelt.multipart.encoder import MultipartEncoder from requests_toolbelt.multipart.encoder import MultipartEncoder
from core.utils import Cache, accumulate, get_store_path from pcore.utils import Cache, accumulate, get_store_path
################### Dask Methods ################################ ################### Dask Methods ################################
def format_segmentation(segmentation, tp): def format_segmentation(segmentation, tp):
......
...@@ -4,7 +4,7 @@ from pathlib import Path ...@@ -4,7 +4,7 @@ from pathlib import Path
import tensorflow as tf import tensorflow as tf
from core.io.writer import DynamicWriter from pcore.io.writer import DynamicWriter
def initialise_tf(version): def initialise_tf(version):
...@@ -16,29 +16,29 @@ def initialise_tf(version): ...@@ -16,29 +16,29 @@ def initialise_tf(version):
return session return session
# TODO this only works for TF2 # TODO this only works for TF2
if version == 2: if version == 2:
gpus = tf.config.experimental.list_physical_devices('GPU') gpus = tf.config.experimental.list_physical_devices("GPU")
if gpus: if gpus:
for gpu in gpus: for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True) tf.config.experimental.set_memory_growth(gpu, True)
logical_gpus = tf.config.experimental.list_logical_devices('GPU') logical_gpus = tf.config.experimental.list_logical_devices("GPU")
print(len(gpus), "Physical GPUs,", len(logical_gpus), print(len(gpus), "Physical GPUs,", len(logical_gpus), "Logical GPUs")
"Logical GPUs")
return None return None
def timer(func, *args, **kwargs): def timer(func, *args, **kwargs):
start = perf_counter() start = perf_counter()
result = func(*args, **kwargs) result = func(*args, **kwargs)
print(f'Function {func.__name__}: {perf_counter() - start}s') print(f"Function {func.__name__}: {perf_counter() - start}s")
return result return result
################## CUSTOM OBJECTS ################################## ################## CUSTOM OBJECTS ##################################
class ModelPredictor: class ModelPredictor:
"""Generic object that takes a NN and returns the prediction. """Generic object that takes a NN and returns the prediction.
Use for predicting fluorescence/other from bright field. Use for predicting fluorescence/other from bright field.
This does not do instance segmentations of anything. This does not do instance segmentations of anything.
""" """
...@@ -49,15 +49,13 @@ class ModelPredictor: ...@@ -49,15 +49,13 @@ class ModelPredictor:
def get_data(self, tp): def get_data(self, tp):
# Change axes to X,Y,Z rather than Z,Y,X # Change axes to X,Y,Z rather than Z,Y,X
return self.tiler.get_tp_data(tp, self.bf_channel).swapaxes(1, return self.tiler.get_tp_data(tp, self.bf_channel).swapaxes(1, 3).swapaxes(1, 2)
3).swapaxes(
1, 2)
def format_result(self, result, tp): def format_result(self, result, tp):
return {self.name: result, 'timepoints': [tp] * len(result)} return {self.name: result, "timepoints": [tp] * len(result)}
def run_tp(self, tp, **kwargs): def run_tp(self, tp, **kwargs):
""" Simulating processing time with sleep""" """Simulating processing time with sleep"""
# Access the image # Access the image
segmentation = self.model.predict(self.get_data(tp)) segmentation = self.model.predict(self.get_data(tp))
return self._format_result(segmentation, tp) return self._format_result(segmentation, tp)
...@@ -66,13 +64,12 @@ class ModelPredictor: ...@@ -66,13 +64,12 @@ class ModelPredictor:
class ModelPredictorWriter(DynamicWriter): class ModelPredictorWriter(DynamicWriter):
def __init__(self, file, name, shape, dtype): def __init__(self, file, name, shape, dtype):
super.__init__(file) super.__init__(file)
self.datatypes = {name: (shape, dtype), self.datatypes = {name: (shape, dtype), "timepoint": ((None,), np.uint16)}
'timepoint': ((None,), np.uint16)} self.group = f"{self.name}_info"
self.group = f'{self.name}_info'
class Saver: class Saver:
channel_names = {0: 'BrightField', 1: 'GFP'} channel_names = {0: "BrightField", 1: "GFP"}
def __init__(self, tiler, save_directory, pos_name): def __init__(self, tiler, save_directory, pos_name):
"""This class straight up saves the trap data for use with neural networks in the future.""" """This class straight up saves the trap data for use with neural networks in the future."""
...@@ -96,5 +93,5 @@ class Saver: ...@@ -96,5 +93,5 @@ class Saver:
ch_dir = self.channel_dir(ch) ch_dir = self.channel_dir(ch)
data = self.get_data(tp, ch) data = self.get_data(tp, ch)
for tid, trap in enumerate(data): for tid, trap in enumerate(data):
np.save(ch_dir / f'{self.name}_{tid}_{tp}.npy', trap) np.save(ch_dir / f"{self.name}_{tid}_{tp}.npy", trap)
return return
...@@ -8,7 +8,7 @@ import h5py ...@@ -8,7 +8,7 @@ import h5py
import pandas as pd import pandas as pd
from utils_find_1st import find_1st, cmp_larger from utils_find_1st import find_1st, cmp_larger
from core.io.base import BridgeH5 from pcore.io.base import BridgeH5
class Signal(BridgeH5): class Signal(BridgeH5):
......
...@@ -3,13 +3,10 @@ import json ...@@ -3,13 +3,10 @@ import json
from time import perf_counter from time import perf_counter
import logging import logging
from core.experiment import MetaData
from pathos.multiprocessing import Pool from pathos.multiprocessing import Pool
from multiprocessing import set_start_method from multiprocessing import set_start_method
import numpy as np import numpy as np
from extraction.core.functions.defaults import exparams_from_meta
from core.io.signal import Signal
# set_start_method("spawn") # set_start_method("spawn")
...@@ -21,13 +18,16 @@ import operator ...@@ -21,13 +18,16 @@ import operator
from baby.brain import BabyBrain from baby.brain import BabyBrain
from core.io.omero import Dataset, Image from pcore.experiment import MetaData
from core.haystack import initialise_tf from pcore.io.omero import Dataset, Image
from core.baby_client import DummyRunner from pcore.haystack import initialise_tf
from core.segment import Tiler from pcore.baby_client import DummyRunner
from core.io.writer import TilerWriter, BabyWriter from pcore.segment import Tiler
from core.utils import timed from pcore.io.writer import TilerWriter, BabyWriter
from pcore.utils import timed
from pcore.io.signal import Signal
from extraction.core.functions.defaults import exparams_from_meta
from extraction.core.extractor import Extractor from extraction.core.extractor import Extractor
from extraction.core.parameters import Parameters from extraction.core.parameters import Parameters
from extraction.core.functions.defaults import get_params from extraction.core.functions.defaults import get_params
...@@ -278,9 +278,9 @@ def visualise_timing(timings: dict, save_file: str): ...@@ -278,9 +278,9 @@ def visualise_timing(timings: dict, save_file: str):
strain = "" strain = ""
# exp = 18616 # exp = 18616
# exp = 19232 # exp = 19232
# exp = 19995 exp = 19995
# exp = 19993 # exp = 19993
exp = 20191 # exp = 20191
# exp = 19831 # exp = 19831
with Dataset(exp) as conn: with Dataset(exp) as conn:
...@@ -294,17 +294,17 @@ tps = int(meta["size_t"]) ...@@ -294,17 +294,17 @@ tps = int(meta["size_t"])
config = dict( config = dict(
general=dict( general=dict(
id=exp, id=exp,
distributed=4, distributed=5,
tps=tps, tps=tps,
directory="../data/", directory="../data/",
strain=strain, strain=strain,
tile_size=117, tile_size=96,
), ),
# general=dict(id=19303, distributed=0, tps=tps, strain=strain, directory="../data/"), # general=dict(id=19303, distributed=0, tps=tps, strain=strain, directory="../data/"),
tiler=dict(), tiler=dict(),
baby=dict(tf_version=2), baby=dict(tf_version=2),
earlystop=dict( earlystop=dict(
min_tp=200, min_tp=300,
thresh_pos_clogged=0.3, thresh_pos_clogged=0.3,
thresh_trap_clogged=7, thresh_trap_clogged=7,
ntps_to_eval=5, ntps_to_eval=5,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment