Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
A
alibylite
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Deploy
Releases
Package registry
Container Registry
Model registry
Operate
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Swain Lab
aliby
alibylite
Commits
22318d06
Commit
22318d06
authored
3 years ago
by
Alán Muñoz
Browse files
Options
Downloads
Patches
Plain Diff
add support for experiment restart and com prints
parent
8b0f55b5
No related branches found
Branches containing commit
No related tags found
Tags containing commit
No related merge requests found
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
aliby/pipeline.py
+38
-15
38 additions, 15 deletions
aliby/pipeline.py
with
38 additions
and
15 deletions
aliby/pipeline.py
+
38
−
15
View file @
22318d06
...
@@ -234,13 +234,18 @@ class Pipeline(ProcessABC):
...
@@ -234,13 +234,18 @@ class Pipeline(ProcessABC):
trackers_state
=
StateReader
(
trackers_state
=
StateReader
(
filename
filename
).
get_formatted_states
()
).
get_formatted_states
()
# print(
# f"Loaded trackers_state with control values {trackers_state[20]['prev_feats']}"
# )
tiler
.
n_processed
=
process_from
tiler
.
n_processed
=
process_from
process_from
+=
1
process_from
+=
1
from_start
=
False
from_start
=
False
except
:
except
Exception
as
e
:
# print(e)
pass
pass
if
from_start
:
# New experiment or overwriting
if
from_start
:
# New experiment or overwriting
# print("Starting from scratch")
try
:
try
:
os
.
remove
(
filename
)
os
.
remove
(
filename
)
except
:
except
:
...
@@ -265,7 +270,7 @@ class Pipeline(ProcessABC):
...
@@ -265,7 +270,7 @@ class Pipeline(ProcessABC):
BabyParameters
.
from_dict
(
config
[
"
baby
"
]),
tiler
BabyParameters
.
from_dict
(
config
[
"
baby
"
]),
tiler
)
)
if
trackers_state
:
if
trackers_state
:
runner
.
crawler
.
tracker
s
_state
=
trackers_state
runner
.
crawler
.
tracker_state
s
=
trackers_state
bwriter
=
BabyWriter
(
filename
)
bwriter
=
BabyWriter
(
filename
)
swriter
=
StateWriter
(
filename
)
swriter
=
StateWriter
(
filename
)
...
@@ -303,13 +308,18 @@ class Pipeline(ProcessABC):
...
@@ -303,13 +308,18 @@ class Pipeline(ProcessABC):
# Adjust tps based on how many tps are available on the server
# Adjust tps based on how many tps are available on the server
tps
=
min
(
general_config
[
"
tps
"
],
image
.
data
.
shape
[
0
])
tps
=
min
(
general_config
[
"
tps
"
],
image
.
data
.
shape
[
0
])
frac_clogged_traps
=
0
frac_clogged_traps
=
0
# print(f"Processing from {process_from}")
for
i
in
tqdm
(
for
i
in
tqdm
(
range
(
process_from
,
tps
),
desc
=
image
.
name
,
initial
=
process_from
range
(
process_from
,
tps
),
desc
=
image
.
name
,
initial
=
process_from
):
):
# print(f"Processing timepoint {i}")
if
(
if
(
frac_clogged_traps
<
earlystop
[
"
thresh_pos_clogged
"
]
frac_clogged_traps
<
earlystop
[
"
thresh_pos_clogged
"
]
or
i
<
earlystop
[
"
min_tp
"
]
or
i
<
earlystop
[
"
min_tp
"
]
):
):
# if tps - process_from > 2 and i == 2:
# exit()
t
=
perf_counter
()
t
=
perf_counter
()
trap_info
=
tiler
.
run_tp
(
i
)
trap_info
=
tiler
.
run_tp
(
i
)
logging
.
debug
(
f
"
Timing:Trap:
{
perf_counter
()
-
t
}
s
"
)
logging
.
debug
(
f
"
Timing:Trap:
{
perf_counter
()
-
t
}
s
"
)
...
@@ -317,6 +327,14 @@ class Pipeline(ProcessABC):
...
@@ -317,6 +327,14 @@ class Pipeline(ProcessABC):
writer
.
write
(
trap_info
,
overwrite
=
[],
tp
=
i
)
writer
.
write
(
trap_info
,
overwrite
=
[],
tp
=
i
)
logging
.
debug
(
f
"
Timing:Writing-trap:
{
perf_counter
()
-
t
}
s
"
)
logging
.
debug
(
f
"
Timing:Writing-trap:
{
perf_counter
()
-
t
}
s
"
)
t
=
perf_counter
()
t
=
perf_counter
()
# try:
# print(
# f"Performing seg with control values {runner.crawler.tracker_states[20]['prev_feats']}"
# )
# except:
# pass
seg
=
runner
.
run_tp
(
i
)
seg
=
runner
.
run_tp
(
i
)
logging
.
debug
(
f
"
Timing:Segmentation:
{
perf_counter
()
-
t
}
s
"
)
logging
.
debug
(
f
"
Timing:Segmentation:
{
perf_counter
()
-
t
}
s
"
)
# logging.debug(
# logging.debug(
...
@@ -324,6 +342,14 @@ class Pipeline(ProcessABC):
...
@@ -324,6 +342,14 @@ class Pipeline(ProcessABC):
# )
# )
t
=
perf_counter
()
t
=
perf_counter
()
bwriter
.
write
(
seg
,
overwrite
=
[
"
mother_assign
"
],
tp
=
i
)
bwriter
.
write
(
seg
,
overwrite
=
[
"
mother_assign
"
],
tp
=
i
)
print
(
f
"
Writing state in tp
{
i
}
with control values
{
runner
.
crawler
.
tracker_states
[
20
][
'
prev_feats
'
]
}
"
)
swriter
.
write
(
data
=
runner
.
crawler
.
tracker_states
,
overwrite
=
swriter
.
datatypes
.
keys
(),
tp
=
i
,
)
logging
.
debug
(
f
"
Timing:Writing-baby:
{
perf_counter
()
-
t
}
s
"
)
logging
.
debug
(
f
"
Timing:Writing-baby:
{
perf_counter
()
-
t
}
s
"
)
# TODO add time-skipping for cases when the
# TODO add time-skipping for cases when the
...
@@ -355,22 +381,19 @@ class Pipeline(ProcessABC):
...
@@ -355,22 +381,19 @@ class Pipeline(ProcessABC):
print
(
"
Frac clogged traps:
"
,
frac_clogged_traps
)
print
(
"
Frac clogged traps:
"
,
frac_clogged_traps
)
# State Writer to recover interrupted experiments
# State Writer to recover interrupted experiments
swriter
.
write
(
data
=
runner
.
crawler
.
tracker_states
,
overwrite
=
swriter
.
datatypes
.
keys
(),
)
meta
.
add_fields
({
"
last_processed
"
:
i
})
meta
.
add_fields
({
"
last_processed
"
:
i
})
import
pickle
as
pkl
#
import pickle as pkl
with
open
(
#
with open(
Path
(
bwriter
.
file
).
parent
/
f
"
{
i
}
_live_state.pkl
"
,
"
wb
"
#
Path(bwriter.file).parent / f"{i}_live_state.pkl", "wb"
)
as
f
:
#
) as f:
pkl
.
dump
(
runner
.
crawler
.
tracker_states
,
f
)
#
pkl.dump(runner.crawler.tracker_states, f)
with
open
(
#
with open(
Path
(
bwriter
.
file
).
parent
/
f
"
{
i
}
_read_state.pkl
"
,
"
wb
"
#
Path(bwriter.file).parent / f"{i}_read_state.pkl", "wb"
)
as
f
:
#
) as f:
pkl
.
dump
(
StateReader
(
bwriter
.
file
).
get_formatted_states
(),
f
)
#
pkl.dump(StateReader(bwriter.file).get_formatted_states(), f)
# Run post processing
# Run post processing
meta
.
add_fields
({
"
end_status
"
:
"
Success
"
})
meta
.
add_fields
({
"
end_status
"
:
"
Success
"
})
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment