Commit 57c57804 authored by Jiri Borovec's avatar Jiri Borovec

update STA

parent c2c9d7ae
......@@ -14,6 +14,8 @@ import glob
import time
import numpy as np
import pandas as pd
from matplotlib.pyplot import axis
from skimage import io
from skimage import morphology as morph
from scipy import ndimage
......@@ -21,33 +23,47 @@ import matplotlib.pyplot as plt
import multiprocessing as mproc
import logging
import src.segmentation.pipelines as segm
import src.own_utils.tool_experiments as tl_expt
logger = logging.getLogger(__name__)
jirka = True
if jirka:
PATH_DATA = '/home/jirka/TEMP/APD_real_data'
PATH_OUTPUT = '/home/jirka/TEMP/APD_real_data'
else:
b_cmp = False
if b_cmp:
PATH_DATA = '/datagrid/Medical/microscopy/drosophila/real_segmentations'
PATH_OUTPUT = '/datagrid/temporary/Medical'
PATH_INPUT_IMAGES = os.path.join(PATH_DATA, 'orig')
PATH_OUTPUT = '/datagrid/Medical/microscopy/drosophila/real_segmentations'
PATH_OUTPUT_RESULTS = os.path.join(PATH_OUTPUT, 'segmentation_new')
# PATH_OUTPUT = '/datagrid/temporary/Medical'
else:
PATH_DATA = os.path.expanduser('~/Dropbox/Workspace/py_ImageProcessing/images')
PATH_INPUT_IMAGES = os.path.join(PATH_DATA, 'drosophila_disc')
PATH_OUTPUT = '/home/jirka/TEMP/APD_real_data'
PATH_OUTPUT_RESULTS = os.path.join(PATH_OUTPUT, 'segmentation')
DEFAULT_NB_THREADS = int(mproc.cpu_count() * .8)
PATH_INPUT_IMAGES = os.path.join(PATH_DATA, 'original')
PATH_OUTPUT_RESULTS = os.path.join(PATH_OUTPUT, 'segmentation')
NB_THREADS = int(mproc.cpu_count() * .7)
DEFAULT_PARAMS = {
'computer': os.uname(),
'name': 'name',
'sp_size': 15,
'sp_regul': 0.01,
'gc_regul': 0.,
'sp_size': 35,
'sp_regul': 0.25,
'gc_regul': 0.15,
'nb_lbs': 3,
# 'fts': {'clr': ['mean', 'std', 'eng']},
'fts': {'clr': ['mean', 'eng']},
'clr': 'rgb'
'fts': {'clr': ['mean']},
'clr': 'rgb',
# 'clr': 'lab',
'path_in': PATH_INPUT_IMAGES,
'path_out': PATH_OUTPUT_RESULTS,
'path_visu': os.path.join(PATH_OUTPUT, 'visual'),
'visu': True,
}
def segments_filled(seg):
def segments_sum_filled(seg):
""" take each class and find smallest compact class
:param seg: np.array<w, h>
:return: [int]
"""
# plt.figure()
lb_sum = {}
for i, lb in enumerate(np.unique(seg)):
......@@ -62,7 +78,69 @@ def segments_filled(seg):
return lbs
def segment_image(params, p_im, p_out):
def segment_values(img, seg):
""" sort segments acoording mean image value
:param img:
:param seg:
:return:
"""
img_vec = img.reshape(-1, img.shape[-1])
seg_vec = seg.reshape(-1)
lb_val = {}
for i, lb in enumerate(np.unique(seg_vec)):
im = img_vec[(seg_vec == lb)]
lb_val[lb] = np.mean(im)
lbs = sorted(lb_val, key=lambda x: lb_val[x], reverse=True)
logger.debug('lb_sum: {}'.format(lb_val))
return lbs
def estim_lut(img, seg):
""" estimate relabeling LUT
:param img:
:param seg:
:return:
"""
# lbs = segments_sum_filled(seg)
lbs = segment_values(img, seg)
logger.debug('lbs: {} & seg in range {}:{}'.format(lbs, np.min(seg), np.max(seg)))
lut = range(np.max(seg) + 1)
for i, lb in enumerate(lbs):
lut[lb] = i
return lut
def visual_pair_orig_segm(params, img, seg, n_img):
""" visualise pair of original image and segm
:param params:
:param img:
:param seg:
:param n_img:
:return:
"""
fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(24, 12))
ax[0].imshow(img), ax[0].axis('off')
ax[0].contour(seg, linewidth=2, cmap=plt.cm.Reds)
ax[1].imshow(seg), ax[1].axis('off')
# ax[0].imshow(seg_sml), plt.axis('off')
p_fig = os.path.join(params['path_visu'], 'visu_' + n_img)
fig.savefig(p_fig, bbox_inches='tight')
plt.close()
return
def segment_image(params, p_im):
""" segment individual image
:param params: {str: ...}
:param p_im: str
:param p_out: str
:param visu: bool
:return:
"""
n_img = os.path.basename(p_im)
img = io.imread(p_im)
logger.debug('image name: {}'.format(n_img))
......@@ -70,40 +148,107 @@ def segment_image(params, p_im, p_out):
t = time.time()
seg = segm.pipe_color2d_spx_features_gmm_graphcut(img, nb_cls=params['nb_lbs'],
clr=params['clr'], sp_size=params['sp_size'], sp_reg=params['sp_regul'],
gc_reg=params['gc_regul'], ls_fts=params['fts'])
gc_reg=params['gc_regul'], ls_fts=params['fts'], gc_tp='w_edge')
logger.info('exection time [s]: {}'.format(time.time()-t))
lbs = segments_filled(seg)
seg_sml = (seg == lbs[-1])
# io.imsave(os.path.join(p_out, n_img), np.array(seg, dtype=np.uint8))
io.imsave(os.path.join(p_out, n_img), np.array(seg_sml * 255, dtype=np.uint8))
seg -= np.min(seg)
lut = estim_lut(img, seg)
logger.debug('LUT: {}'.format(lut))
seg = np.asarray(lut)[seg]
fig = plt.figure()
plt.subplot(131), plt.imshow(img), plt.axis('off')
plt.subplot(132), plt.imshow(seg), plt.axis('off')
plt.subplot(133), plt.imshow(seg_sml), plt.axis('off')
p_fig = os.path.join(p_out, 'visu_' + n_img)
fig.savefig(p_fig, bbox_inches='tight')
plt.close()
p_img = os.path.join(params['path_out'], n_img)
# io.imsave(p_img, np.array((seg == lbs[-1]) * 255, dtype=np.uint8))
io.imsave(p_img, np.array(seg, dtype=np.uint8))
return
def segment_image_folder(p_in=PATH_INPUT_IMAGES, p_out=PATH_OUTPUT_RESULTS,
params=DEFAULT_PARAMS, im_pattern='*.png'):
logger.info('input dir: ({}) <- {}'.format(os.path.exists(p_in), p_in))
logger.info('output dir: ({}) <- {}'.format(os.path.exists(p_out), p_out))
p_imgs = glob.glob(os.path.join(p_in, im_pattern))
def segment_image_pset(pset):
segment_image(*pset)
return
def segment_image_folder(params=DEFAULT_PARAMS, im_pattern='*.png', nb_jobs=1):
""" segment complete image folder
:param p_in: str
:param p_out: str
:param params: {str: ...}
:param im_pattern: str
:return:
"""
for n in ['path_in', 'path_out']:
logger.info('"{}" dir: ({}) <- {}'.format(n, os.path.exists(params[n]),
params[n]))
if any([not os.path.exists(params[n]) for n in ['path_in', 'path_out']]):
return None
with open(os.path.join(params['path_out'], 'config.txt'), 'w') as f:
f.write(tl_expt.string_dict(params))
p_imgs = sorted(glob.glob(os.path.join(params['path_in'], im_pattern)))
logger.info('found {} images'.format(len(p_imgs)))
for p_im in p_imgs:
segment_image(params, p_im, p_out)
return None
pset = [(params, p_im) for p_im in p_imgs]
if nb_jobs > 1:
logger.debug('perform_sequence in {} threads'.format(nb_jobs))
mproc_pool = mproc.Pool(nb_jobs)
mproc_pool.map(segment_image_pset, pset)
mproc_pool.close()
mproc_pool.join()
else:
for ps in pset:
segment_image_pset(ps)
return
def visual_pair_orig_segm_pset(pset):
""" visualise segmentation together with original images, version fro mproc
:param pset: (params, n_img)
:return:
"""
params, n_img = pset
logging.debug('vusial: "{}"'.format(n_img))
p_img = os.path.join(params['path_in'], n_img)
p_seg = os.path.join(params['path_out'], n_img)
if not os.path.exists(p_img) or not os.path.exists(p_seg):
logging.warning('image of segm does not exist')
return
img = io.imread(p_img)
seg = io.imread(p_seg)
visual_pair_orig_segm(params, img, seg, n_img)
return
def show_folder_imgs_segm(params=DEFAULT_PARAMS, im_pattern='*.png'):
""" visualise complete folder
:param params: {str: ...}
:param im_pattern: str
:return:
"""
l_path = ['path_in', 'path_out', 'path_visu']
if any([not os.path.exists(params[n]) for n in l_path]):
return None
p_segs = glob.glob(os.path.join(params['path_out'], im_pattern))
logger.info('found {} segmentation'.format(len(p_segs)))
logger.debug('segmentation: {} ...'.format(p_segs[:3]))
pset = [(params, os.path.basename(p)) for p in p_segs]
mproc_pool = mproc.Pool(NB_THREADS)
mproc_pool.map(visual_pair_orig_segm_pset, pset)
mproc_pool.close()
mproc_pool.join()
# for ps in pset:
# visual_pair_orig_segm_pset(ps)
return
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logger.info('running...')
segment_image_folder()
segment_image_folder(nb_jobs=NB_THREADS)
show_folder_imgs_segm()
logger.info('DONE')
# plt.show()
\ No newline at end of file
......@@ -11,10 +11,6 @@ Example run:
# matplotlib.use('Agg')
import os
import time
import gc
import copy
import traceback
import numpy as np
import pandas as pd
import multiprocessing as mproc
......@@ -26,7 +22,6 @@ import generate_dataset as gen_data
import ptn_disctionary as ptn_dict
import src.own_utils.tool_experiments as tl_expt
import logging
import multiprocessing as mproc
import copy_reg
import types
logger = logging.getLogger(__name__)
......@@ -42,15 +37,15 @@ def _reduce_method(m):
copy_reg.pickle(types.MethodType, _reduce_method)
jirka = False
if jirka:
PATH_DATA_SYNTH = '/home/jirka/TEMP/APD_synthetic_data'
PATH_DATA_REAL = ''
DEFAULT_PATH_OUTPUT = '/home/jirka/TEMP'
else:
b_cmp = True
if b_cmp:
PATH_DATA_SYNTH = '/datagrid/Medical/microscopy/drosophila/synthetic_data'
PATH_DATA_REAL = '/datagrid/Medical/microscopy/drosophila/real_segmentations'
DEFAULT_PATH_OUTPUT = '/datagrid/temporary/Medical'
else:
PATH_DATA_SYNTH = '/home/jirka/TEMP/APD_synthetic_data'
PATH_DATA_REAL = ''
DEFAULT_PATH_OUTPUT = '/home/jirka/TEMP'
DEFAULT_NB_THREADS = int(mproc.cpu_count() * .8)
DEFAULT_PATH_RESULTS = os.path.join(DEFAULT_PATH_OUTPUT, 'experiments_APD')
......@@ -67,7 +62,7 @@ DEFAULT_PARAMS = {
'overlap_mj': False,
}
SYNTH_DATASET_VERSION = 'v1'
SYNTH_DATASET_VERSION = 'v2'
SYNTH_DATASET_TEMP = 'atomicPatternDictionary_'
SYNTH_DATASET_NAME = SYNTH_DATASET_TEMP + SYNTH_DATASET_VERSION
SYNTH_PATH_APD = os.path.join(PATH_DATA_SYNTH, SYNTH_DATASET_NAME)
......@@ -309,8 +304,15 @@ class ExperimentLinearComb(ExperimentAPD_mp):
return atlas_ptns, rct_imgs
def estim_atlas_as_argmax(self, atlas_ptns):
# take max pattern vith max value
atlas = np.argmax(np.abs(atlas_ptns), axis=0)
# take max pattern with max value
ptn_used = np.sum(np.abs(self.fit_result), axis=0) > 0
# filter just used patterns
atlas_ptns = atlas_ptns[ptn_used, :]
# argmax on abs
atlas = np.argmax(np.abs(atlas_ptns), axis=0) + 1
atlas_sum = np.sum(np.abs(atlas_ptns), axis=0)
# filter small values
atlas[atlas_sum < 1e-3] = 0
return atlas
def estim_atlas_as_unique_sum(self, atlas_ptns):
......@@ -325,7 +327,8 @@ class ExperimentLinearComb(ExperimentAPD_mp):
:param atlas_ptns: np.array<nb_patterns, w*h>
:return:
"""
atlas = self.estim_atlas_as_unique_sum(atlas_ptns)
atlas = self.estim_atlas_as_argmax(atlas_ptns)
# atlas = self.estim_atlas_as_unique_sum(atlas_ptns)
return atlas
def _binarize_img_reconstruction(self, img_rct, thr=0.5):
......@@ -389,7 +392,7 @@ class ExperimentSparsePCA(ExperimentLinearComb):
def _estimate_linear_combination(self, imgs_vec):
self.estimator = decomposition.SparsePCA(n_components=self.params.get('nb_lbs'),
max_iter=self.params.get('max_iter'), n_jobs=-1)
max_iter=self.params.get('max_iter'))#, n_jobs=-1
self.fit_result = self.estimator.fit_transform(imgs_vec)
self.components = self.estimator.components_
......@@ -403,10 +406,9 @@ class ExperimentDictLearn(ExperimentLinearComb):
def _estimate_linear_combination(self, imgs_vec):
self.estimator = decomposition.DictionaryLearning(fit_algorithm='lars',
transform_algorithm='omp',
transform_algorithm='omp', split_sign=False,
n_components=self.params.get('nb_lbs'),
max_iter=self.params.get('max_iter'),
n_jobs=-1, split_sign=False)
max_iter=self.params.get('max_iter'))#, n_jobs=-1
self.fit_result = self.estimator.fit_transform(imgs_vec)
self.components = self.estimator.components_
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment