Skip to content
Snippets Groups Projects
Commit 22318d06 authored by Alán Muñoz's avatar Alán Muñoz
Browse files

add support for experiment restart and com prints

parent 8b0f55b5
No related branches found
No related tags found
No related merge requests found
...@@ -234,13 +234,18 @@ class Pipeline(ProcessABC): ...@@ -234,13 +234,18 @@ class Pipeline(ProcessABC):
trackers_state = StateReader( trackers_state = StateReader(
filename filename
).get_formatted_states() ).get_formatted_states()
# print(
# f"Loaded trackers_state with control values {trackers_state[20]['prev_feats']}"
# )
tiler.n_processed = process_from tiler.n_processed = process_from
process_from += 1 process_from += 1
from_start = False from_start = False
except: except Exception as e:
# print(e)
pass pass
if from_start: # New experiment or overwriting if from_start: # New experiment or overwriting
# print("Starting from scratch")
try: try:
os.remove(filename) os.remove(filename)
except: except:
...@@ -265,7 +270,7 @@ class Pipeline(ProcessABC): ...@@ -265,7 +270,7 @@ class Pipeline(ProcessABC):
BabyParameters.from_dict(config["baby"]), tiler BabyParameters.from_dict(config["baby"]), tiler
) )
if trackers_state: if trackers_state:
runner.crawler.trackers_state = trackers_state runner.crawler.tracker_states = trackers_state
bwriter = BabyWriter(filename) bwriter = BabyWriter(filename)
swriter = StateWriter(filename) swriter = StateWriter(filename)
...@@ -303,13 +308,18 @@ class Pipeline(ProcessABC): ...@@ -303,13 +308,18 @@ class Pipeline(ProcessABC):
# Adjust tps based on how many tps are available on the server # Adjust tps based on how many tps are available on the server
tps = min(general_config["tps"], image.data.shape[0]) tps = min(general_config["tps"], image.data.shape[0])
frac_clogged_traps = 0 frac_clogged_traps = 0
# print(f"Processing from {process_from}")
for i in tqdm( for i in tqdm(
range(process_from, tps), desc=image.name, initial=process_from range(process_from, tps), desc=image.name, initial=process_from
): ):
# print(f"Processing timepoint {i}")
if ( if (
frac_clogged_traps < earlystop["thresh_pos_clogged"] frac_clogged_traps < earlystop["thresh_pos_clogged"]
or i < earlystop["min_tp"] or i < earlystop["min_tp"]
): ):
# if tps - process_from > 2 and i == 2:
# exit()
t = perf_counter() t = perf_counter()
trap_info = tiler.run_tp(i) trap_info = tiler.run_tp(i)
logging.debug(f"Timing:Trap:{perf_counter() - t}s") logging.debug(f"Timing:Trap:{perf_counter() - t}s")
...@@ -317,6 +327,14 @@ class Pipeline(ProcessABC): ...@@ -317,6 +327,14 @@ class Pipeline(ProcessABC):
writer.write(trap_info, overwrite=[], tp=i) writer.write(trap_info, overwrite=[], tp=i)
logging.debug(f"Timing:Writing-trap:{perf_counter() - t}s") logging.debug(f"Timing:Writing-trap:{perf_counter() - t}s")
t = perf_counter() t = perf_counter()
# try:
# print(
# f"Performing seg with control values {runner.crawler.tracker_states[20]['prev_feats']}"
# )
# except:
# pass
seg = runner.run_tp(i) seg = runner.run_tp(i)
logging.debug(f"Timing:Segmentation:{perf_counter() - t}s") logging.debug(f"Timing:Segmentation:{perf_counter() - t}s")
# logging.debug( # logging.debug(
...@@ -324,6 +342,14 @@ class Pipeline(ProcessABC): ...@@ -324,6 +342,14 @@ class Pipeline(ProcessABC):
# ) # )
t = perf_counter() t = perf_counter()
bwriter.write(seg, overwrite=["mother_assign"], tp=i) bwriter.write(seg, overwrite=["mother_assign"], tp=i)
print(
f"Writing state in tp {i} with control values {runner.crawler.tracker_states[20]['prev_feats']}"
)
swriter.write(
data=runner.crawler.tracker_states,
overwrite=swriter.datatypes.keys(),
tp=i,
)
logging.debug(f"Timing:Writing-baby:{perf_counter() - t}s") logging.debug(f"Timing:Writing-baby:{perf_counter() - t}s")
# TODO add time-skipping for cases when the # TODO add time-skipping for cases when the
...@@ -355,22 +381,19 @@ class Pipeline(ProcessABC): ...@@ -355,22 +381,19 @@ class Pipeline(ProcessABC):
print("Frac clogged traps: ", frac_clogged_traps) print("Frac clogged traps: ", frac_clogged_traps)
# State Writer to recover interrupted experiments # State Writer to recover interrupted experiments
swriter.write(
data=runner.crawler.tracker_states,
overwrite=swriter.datatypes.keys(),
)
meta.add_fields({"last_processed": i}) meta.add_fields({"last_processed": i})
import pickle as pkl # import pickle as pkl
with open( # with open(
Path(bwriter.file).parent / f"{i}_live_state.pkl", "wb" # Path(bwriter.file).parent / f"{i}_live_state.pkl", "wb"
) as f: # ) as f:
pkl.dump(runner.crawler.tracker_states, f) # pkl.dump(runner.crawler.tracker_states, f)
with open( # with open(
Path(bwriter.file).parent / f"{i}_read_state.pkl", "wb" # Path(bwriter.file).parent / f"{i}_read_state.pkl", "wb"
) as f: # ) as f:
pkl.dump(StateReader(bwriter.file).get_formatted_states(), f) # pkl.dump(StateReader(bwriter.file).get_formatted_states(), f)
# Run post processing # Run post processing
meta.add_fields({"end_status": "Success"}) meta.add_fields({"end_status": "Success"})
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment