Skip to content
Snippets Groups Projects
Commit e66d0401 authored by Alan Munoz's avatar Alan Munoz
Browse files

add fns and testing

Former-commit-id: 50ad78200ba456b38cb6049c0bdec0824934d8f5
parent 7172de26
No related branches found
No related tags found
No related merge requests found
......@@ -64,11 +64,23 @@ class PostProcessor:
self.cells = self.cells.from_source(
self.expt.current_position.annotation)
def get_exp_mo_bud(self):
d = {}
for pos in self.expt.positions:
self.expt.current_position = pos
d = {**d, **self.get_pos_mo_bud()}
self.expt.current_position = self.expt.positions[0]
return d
def load_extraction(self, folder=None)-> None:
if folder is None:
folder = Path(self.expt.name + '/extraction')
self.extraction = {}
for pos in self.expt.positions:
self.extraction[pos] = load(folder / Path(pos + '.gz'))
try:
self.extraction[pos] = load(folder / Path(pos + '.gz'))
except:
print(pos, ' not found')
#!/usr/bin/env python3
from typing import Dict, List, Union
import numpy as np
import pandas as pd
from pandas import Series, DataFrame
from matplotlib import pyplot as plt
from pandas import Series
import seaborn as sns
from postprocessor.core.postprocessor import PostProcessor
from postprocessor.core.tracks import non_uniform_savgol
pp = PostProcessor(source=19831)
pp.load_tiler_cells()
f = '/home/alan/Documents/libs/extraction/extraction/examples/gluStarv_2_0_x2_dual_phl_ura8_00/extraction'
pp.load_extraction('/home/alan/Documents/libs/extraction/extraction/examples/' + pp.expt.name + '/extraction/')
tmp=pp.extraction['phl_ura8_002']
# prepare data
test = tmp[('GFPFast', np.maximum, 'mean')]
clean = test.loc[test.notna().sum(axis=1) > 30]
window = 9
degree = 3
savgol_on_srs = lambda x: Series(non_uniform_savgol(x.dropna().index, x.dropna().values,
window, degree), index=x.dropna().index)
smooth = clean.apply(savgol_on_srs, axis=1)
from random import randint
x = randint(0, len(smooth))
plt.plot(clean.iloc[x], 'b')
plt.plot(smooth.iloc[x], 'r')
plt.show()
def growth_rate(data:Series, alg=None, filt = {'kind':'savgol','window':9, 'degree':3}):
if alg is None:
alg='standard'
if filt: #TODO add support for multiple algorithms
data = Series(non_uniform_savgol(data.dropna().index, data.dropna().values,
window, degree), index = data.dropna().index)
return Series(np.convolve(data,diff_kernel ,'same'), index=data.dropna().index)
import numpy as np
diff_kernel = np.array([1,-1])
gr = clean.apply(growth_rate, axis=1)
from postprocessor.core.tracks import non_uniform_savgol, clean_tracks
def sort_df(df, by='first', rev=True):
nona = df.notna()
......@@ -61,13 +21,6 @@ def sort_df(df, by='first', rev=True):
idx = idx[::-1]
return df.loc[idx]
test = tmp[('GFPFast', np.maximum, 'median')]
test2 = tmp[('pHluorin405', np.maximum, 'median')]
ph = test/test2
ph = ph.stack().reset_index(1)
ph.columns = ['tp', 'fl']
def m2p5_med(ext, ch, red=np.maximum):
m2p5pc = ext[(ch, red, 'max2p5pc')]
med = ext[(ch, red, 'median')]
......@@ -82,3 +35,100 @@ def plot_avg(df):
sns.relplot(x=df['tp'],y=df['val'], kind='line')
plt.show()
def split_data(df:DataFrame, splits:List[int]):
dfs = [df.iloc[:,i:j] for i,j in zip( (0,) + splits,
splits + (df.shape[1],))]
return dfs
def growth_rate(data:Series, alg=None, filt = {'kind':'savgol','window':7, 'degree':3}):
if alg is None:
alg='standard'
if filt: #TODO add support for multiple algorithms
window = filt['window']
degree = filt['degree']
data = Series(non_uniform_savgol(data.dropna().index, data.dropna().values,
window, degree), index = data.dropna().index)
diff_kernel = np.array([1,-1])
return Series(np.convolve(data,diff_kernel ,'same'), index=data.dropna().index)
pp = PostProcessor(source=19831)
pp.load_tiler_cells()
f = '/home/alan/Documents/sync_docs/libs/postproc/gluStarv_2_0_x2_dual_phl_ura8_00/extraction'
pp.load_extraction('/home/alan/Documents/sync_docs/libs/postproc/postprocessor/' + pp.expt.name + '/extraction/')
tmp=pp.extraction['phl_ura8_002']
def _check_bg(data):
for k in list(pp.extraction.values())[0].keys():
for p in pp.expt.positions:
if k not in pp.extraction[p]:
print(p, k)
data = {k:pd.concat([pp.extraction[pos][k] for pos in \
pp.expt.positions[:-3]]) for k in list(pp.extraction.values())[0].keys()}
hmap = lambda df: sns.heatmap(sort_df(df), robust=True);
# from random import randint
# x = randint(0, len(smooth))
# plt.plot(clean.iloc[x], 'b')
# plt.plot(smooth.iloc[x], 'r')
# plt.show()
# data = tmp
df= data[('general',None,'area')]
clean = clean_tracks(df, min_len=160)
clean = clean.loc[clean.notna().sum(axis=1) > 9]
gr = clean.apply(growth_rate, axis=1)
splits = (72,108,180)
gr_sp = split_data(gr, splits)
idx = gr.index
bg = get_bg(data)
test = data[('GFPFast', np.maximum, 'median')]
test2 = data[('pHluorin405', np.maximum, 'median')]
ph = (test/test2).loc[idx]
c=pd.concat((ph.mean(1), gr.max(1)), axis=1); c.columns = ['ph', 'gr_max']
# ph = ph.stack().reset_index(1)
# ph.columns = ['tp', 'fl']
ph_sp=split_data(gr, splits)
def get_bg(data):
bg = {}
fl_subkeys = [x for x in data.keys() if x[0] in \
['GFP', 'GFPFast', 'mCherry', 'pHluorin405'] and x[-1]!='imBackground']
for k in fl_subkeys:
nk = list(k)
bk = tuple(nk[:-1] + ['imBackground'])
nk = tuple(nk[:-1] + [nk[-1] + '_BgSub'])
tmp = []
for i,v in data[bk].iterrows():
if i in data[k].index:
newdf = data[k].loc[i] / v
newdf.index = pd.MultiIndex.from_tuples([(*i, c) for c in \
newdf.index])
tmp.append(newdf)
bg[nk] = pd.concat(tmp)
return bg
def calc_ph(bg):
fl_subkeys = [x for x in bg.keys() if x[0] in \
['GFP', 'GFPFast', 'pHluorin405']]
chs = list(set([x[0] for x in fl_subkeys]))
assert len(chs)==2, 'Too many channels'
ch1 = [x[1:] for x in fl_subkeys if x[0]==chs[0]]
ch2 = [x[1:] for x in fl_subkeys if x[0]==chs[1]]
inter = list(set(ch1).intersection(ch2))
ph = {}
for red_fld in inter:
ph[tuple(('ph',) + red_fld)] = bg[tuple((chs[0],) + red_fld)] / bg[tuple((chs[1],) + red_fld)]
# sns.heatmap(sort_df(data[('mCherry', np.maximum, 'max2p5pc_BgSub')] / data[('mCherry', np.maximum, 'median_BgSub')]), robust=True)
# from postprocessor.core.tracks import clean_tracks
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment