Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
A
APDL
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
Operations
Operations
Incidents
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Biomedical Imaging Algorithms
APDL
Commits
d1904cf4
Commit
d1904cf4
authored
Apr 20, 2016
by
Jiri Borovec
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update
parent
4f127a11
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
76 additions
and
63 deletions
+76
-63
expt_apd_sta.py
expt_apd_sta.py
+4
-4
expt_apdl.py
expt_apdl.py
+1
-1
expt_segm_disc.py
expt_segm_disc.py
+65
-47
generate_dataset.py
generate_dataset.py
+6
-11
No files found.
expt_sta.py
→
expt_
apd_
sta.py
View file @
d1904cf4
...
...
@@ -56,7 +56,7 @@ DEFAULT_PARAMS = {
'max_iter'
:
39
,
'gc_regul'
:
0.
,
'nb_lbs'
:
7
,
'nb_runs'
:
300
,
# 500
'nb_runs'
:
25
,
# 500
'gc_reinit'
:
True
,
'ptn_split'
:
True
,
'overlap_mj'
:
False
,
...
...
@@ -92,7 +92,7 @@ REAL_PARAMS.update({'name': 'real-data',
'dataset'
:
REAL_DATASET_NAME
,
'sub_dataset'
:
REAL_SUB_DATASETS
[
0
],
'out_path'
:
DEFAULT_PATH_RESULTS
,
'max_iter'
:
4
9
,
'max_iter'
:
9
9
,
'nb_runs'
:
9
})
# DEFAULT_PATH_OUTPUT = os.path.join('..','..','output')
...
...
@@ -430,7 +430,7 @@ def parameters_sta(dataset):
params
=
SYNTH_PARAMS
.
copy
()
if
type
(
dataset
)
is
str
:
params
.
update
({
'dataset'
:
dataset
})
params
[
'nb_runs'
]
=
30
params
[
'nb_runs'
]
=
9
params
[
'max_iter'
]
=
999
params
[
'out_path'
]
+=
'-sta'
...
...
@@ -462,7 +462,7 @@ def experiments_synthetic_m_run(dataset=None):
for
params
in
l_params
:
for
cls_expt
in
l_cls_exp
:
expt
=
cls_expt
(
params
)
expt
.
run
(
it_var
=
'case'
,
it_vals
=
range
(
params
[
'nb_runs'
]))
expt
.
run
(
it_var
=
'case'
,
it_vals
=
range
(
params
[
'nb_
q
runs'
]))
if
__name__
==
"__main__"
:
...
...
expt_apdl.py
View file @
d1904cf4
...
...
@@ -25,7 +25,7 @@ import generate_dataset as gen_data
import
dictionary_learning
as
dl
import
ptn_disctionary
as
ptn_dict
import
ptn_weights
as
ptn_weigth
import
expt_sta
as
exp_sta
import
expt_
apd_
sta
as
exp_sta
logger
=
logging
.
getLogger
(
__name__
)
# REQURED FOR MPROC POOL
...
...
expt_
disc_segm
.py
→
expt_
segm_disc
.py
View file @
d1904cf4
"""
run experiments with Stat-of-the-art methods
segmentation on disc images with specific configuration
* perform the segmentation and export results
* load results and does visualisation
Example run:
Example run:
(no params)
"""
...
...
@@ -29,23 +31,16 @@ import src.own_utils.tool_experiments as tl_expt
logger
=
logging
.
getLogger
(
__name__
)
b_debug
=
False
v_data
=
'stage'
if
b_debug
:
PATH_DATA
=
os
.
path
.
expanduser
(
'~/Dropbox/Workspace/py_ImageProcessing/images'
)
DATASET
=
'drosophila_disc'
PATH_INPUT_IMAGES
=
os
.
path
.
join
(
PATH_DATA
,
DATASET
)
# PATH_OUTPUT = os.path.expanduser('~/TEMP')
PATH_OUTPUT
=
'/datagrid/Medical/microscopy/TEMPORARY'
NB_THREADS
=
1
NB_THREADS
=
3
else
:
if
v_data
==
'orig'
:
PATH_DATA
=
'/datagrid/Medical/microscopy/drosophila/real_segmentations'
DATASET
=
'orig'
if
v_data
==
'stage'
:
PATH_DATA
=
'/datagrid/Medical/microscopy/drosophila/disc_stages'
DATASET
=
'stage_{}'
.
format
(
3
)
PATH_INPUT_IMAGES
=
os
.
path
.
join
(
PATH_DATA
,
DATASET
)
DATASET
=
'orig'
PATH_DATA
=
'/datagrid/Medical/microscopy/drosophila/real_segmentations'
PATH_OUTPUT
=
'/datagrid/Medical/microscopy/drosophila/real_segmentations'
NB_THREADS
=
int
(
mproc
.
cpu_count
()
*
.
8
)
...
...
@@ -53,16 +48,16 @@ else:
DEFAULT_PARAMS
=
{
'computer'
:
os
.
uname
(),
'name'
:
'name'
,
'sp_size'
:
2
,
'sp_size'
:
2
0
,
'sp_regul'
:
0.15
,
'gc_regul'
:
0.
7
,
'gc_regul'
:
0.
9
,
# 1.2
'nb_lbs'
:
3
,
# 'fts': {'clr': ['mean', 'std', 'eng']},
'fts'
:
{
'clr'
:
[
'mean'
,
'std'
,
'eng'
]},
'clr'
:
'rgb'
,
# 'clr': 'lab',
'path_in'
:
PATH_INPUT_IMAGES
,
'dataset'
:
DATASET
,
'path_in'
:
os
.
path
.
join
(
PATH_DATA
,
DATASET
),
'path_out'
:
os
.
path
.
join
(
PATH_OUTPUT
,
DATASET
+
'_segm'
),
'path_visu'
:
os
.
path
.
join
(
PATH_OUTPUT
,
DATASET
+
'_visu'
),
'visu'
:
True
,
...
...
@@ -90,7 +85,7 @@ def segments_sum_filled(seg):
lb_sum
[
lb
]
=
np
.
sum
(
im
)
# plt.subplot(1, 3, i+1), plt.imshow(im)
lbs
=
sorted
(
lb_sum
,
key
=
lambda
x
:
lb_sum
[
x
],
reverse
=
True
)
logger
.
debug
(
'lb_sum:
{}'
.
format
(
lb_sum
))
logger
.
debug
(
'lb_sum:
%s'
,
repr
(
lb_sum
))
# plt.show()
return
lbs
...
...
@@ -109,7 +104,7 @@ def segment_values(img, seg):
im
=
img_vec
[(
seg_vec
==
lb
)]
lb_val
[
lb
]
=
np
.
median
(
im
)
lbs
=
sorted
(
lb_val
,
key
=
lambda
x
:
lb_val
[
x
],
reverse
=
True
)
logger
.
debug
(
'lb_sum:
{}'
.
format
(
lb_val
))
logger
.
debug
(
'lb_sum:
%s'
,
repr
(
lb_val
))
return
lbs
...
...
@@ -122,8 +117,8 @@ def estim_lut(img, seg):
"""
# lbs = segments_sum_filled(seg)
lbs
=
segment_values
(
img
,
seg
)
logger
.
debug
(
'lbs:
{}
& seg in range '
'
{}:{}'
.
format
(
lbs
,
np
.
min
(
seg
),
np
.
max
(
seg
)
))
logger
.
debug
(
'lbs:
%s
& seg in range '
'
%i:%i'
,
repr
(
lbs
),
np
.
min
(
seg
),
np
.
max
(
seg
))
lut
=
range
(
np
.
max
(
seg
)
+
1
)
for
i
,
lb
in
enumerate
(
lbs
):
lut
[
lb
]
=
i
...
...
@@ -165,7 +160,7 @@ def preprocessing_image(img):
return
img_new
def
postprocessing_segm3cls
(
img
,
seg
):
def
postprocessing_segm3cls
(
img
,
seg
,
lb_disc
=
1
,
lb_act
=
2
):
""" postprocessing and some morpho operations
:param img: np.array<h, w, 3>
...
...
@@ -179,15 +174,21 @@ def postprocessing_segm3cls(img, seg):
logger
.
debug
(
'LUT: {}'
.
format
(
lut
))
seg
=
np
.
asarray
(
lut
)[
seg
]
# DICKS
seg_fg
=
seg
>=
lb_disc
seg_act
=
seg
==
lb_act
# correct the foreground layer
seg_fg
=
seg
>
0
seg_fg
=
morph
.
binary_closing
(
seg_fg
,
morph
.
disk
(
25
))
seg_fg
=
morph
.
binary_closing
(
seg_fg
,
morph
.
disk
(
15
))
seg_fg
=
ndimage
.
binary_fill_holes
(
seg_fg
)
# # filter activation
seg_act
=
morph
.
binary_opening
(
seg_act
,
morph
.
disk
(
9
))
# take centered elements
sg
,
_
=
ndimage
.
label
(
seg_fg
)
m_seg
=
measure
.
regionprops
(
sg
)
dist
=
[
sum
((
0.5
*
np
.
asarray
(
seg
.
shape
)
-
np
.
asarray
(
m
.
centroid
))
**
2
)
for
m
in
m_seg
]
dist
=
[
sum
((
0.5
*
np
.
asarray
(
seg
.
shape
)
-
np
.
asarray
(
m
.
centroid
))
**
2
)
for
m
in
m_seg
]
lb_sel
=
m_seg
[
dist
.
index
(
min
(
dist
))].
label
seg_sel
=
(
sg
==
lb_sel
)
...
...
@@ -195,8 +196,8 @@ def postprocessing_segm3cls(img, seg):
# http://scikit-image.org/docs/dev/api/skimage.segmentation.html#skimage.segmentation.active_contour
seg_new
=
np
.
zeros_like
(
seg
)
seg_new
[
seg_sel
]
=
1
seg_new
[
np
.
logical_and
(
seg_sel
,
seg
>
1
)]
=
2
seg_new
[
seg_sel
]
=
lb_disc
seg_new
[
np
.
logical_and
(
seg_sel
,
seg
_act
)]
=
lb_act
return
seg_new
...
...
@@ -209,7 +210,7 @@ def export_debug_images(p_out, n_img, d_debug):
:return:
"""
p_debug
=
os
.
path
.
join
(
p_out
,
'debug_'
+
os
.
path
.
splitext
(
n_img
)[
0
])
logger
.
debug
(
'debug images: "
{}"'
.
format
(
p_debug
))
logger
.
debug
(
'debug images: "
%s"'
,
repr
(
p_debug
))
if
not
os
.
path
.
exists
(
p_debug
):
os
.
mkdir
(
p_debug
)
for
k
in
[
'slic_mean'
,
'im_g_edges'
,
'im_g_seg'
]:
...
...
@@ -236,14 +237,14 @@ def segment_image(params, p_im):
logger
.
info
(
'perform the SEGMENTATION...'
)
logger
.
debug
(
'image name: {}'
.
format
(
n_img
))
logger
.
debug
(
'img values range:
{} - {}'
.
format
(
np
.
min
(
img
),
np
.
max
(
img
)
))
logger
.
debug
(
'img values range:
%f - %f'
,
np
.
min
(
img
),
np
.
max
(
img
))
d_debug
=
dict
()
if
b_debug
else
None
t
=
time
.
time
()
seg
=
segm
.
pipe_clr2d_spx_fts_gmm_gc
(
img
,
nb_cls
=
params
[
'nb_lbs'
],
clr
=
params
[
'clr'
],
sp_size
=
params
[
'sp_size'
],
sp_reg
=
params
[
'sp_regul'
],
gc_reg
=
params
[
'gc_regul'
],
ls_fts
=
params
[
'fts'
],
gc_tp
=
'w_edge'
,
d_debug
=
d_debug
)
logger
.
info
(
'execution time [s]:
{}'
.
format
(
time
.
time
()
-
t
))
logger
.
info
(
'execution time [s]:
%f'
,
(
time
.
time
()
-
t
))
if
b_debug
:
d_debug
[
'img'
]
=
img
...
...
@@ -253,16 +254,16 @@ def segment_image(params, p_im):
p_seg
=
os
.
path
.
join
(
params
[
'path_out'
],
n_img
)
# io.imsave(p_img, np.array((seg == lbs[-1]) * 255, dtype=np.uint8))
logger
.
debug
(
'image save to "
{}"'
.
format
(
p_seg
)
)
logger
.
debug
(
'image save to "
%s"'
,
p_seg
)
io
.
imsave
(
p_seg
,
np
.
array
(
seg
,
dtype
=
np
.
uint8
))
def
segment_image_pset
(
pset
):
def
perform_mproc
(
pset
):
if
b_debug
:
segment_image
(
*
pset
)
else
:
try
:
logging
.
debug
(
'run segm. in try-catch mode...'
)
logging
.
debug
(
'run segm
ent
. in try-catch mode...'
)
segment_image
(
*
pset
)
except
:
pass
...
...
@@ -280,25 +281,23 @@ def segment_image_folder(params=DEFAULT_PARAMS, im_pattern='*.png', nb_jobs=1):
os
.
mkdir
(
params
[
'path_out'
])
l_path
=
[
'path_in'
,
'path_out'
]
for
n
in
l_path
:
logger
.
info
(
'"{}" dir: ({}) '
'<- {}'
.
format
(
n
,
os
.
path
.
exists
(
params
[
n
]),
params
[
n
]))
logger
.
info
(
'"%s" dir: (%i) <- %s'
,
n
,
os
.
path
.
exists
(
params
[
n
]),
params
[
n
])
if
any
([
not
os
.
path
.
exists
(
params
[
n
])
for
n
in
l_path
]):
return
None
with
open
(
os
.
path
.
join
(
params
[
'path_out'
],
'config.txt'
),
'w'
)
as
f
:
f
.
write
(
tl_expt
.
string_dict
(
params
))
p_imgs
=
sorted
(
glob
.
glob
(
os
.
path
.
join
(
params
[
'path_in'
],
im_pattern
)))
logger
.
info
(
'found
{} images'
.
format
(
len
(
p_imgs
)
))
logger
.
info
(
'found
%i images'
,
len
(
p_imgs
))
p
set
=
[(
params
,
p_im
)
for
p_im
in
p_imgs
]
mp_
set
=
[(
params
,
p_im
)
for
p_im
in
p_imgs
]
if
nb_jobs
>
1
:
logger
.
debug
(
'perform_sequence in
{} threads'
.
format
(
nb_jobs
)
)
logger
.
debug
(
'perform_sequence in
%i threads'
,
nb_jobs
)
mproc_pool
=
mproc
.
Pool
(
nb_jobs
)
mproc_pool
.
map
(
segment_image_pset
,
p
set
)
mproc_pool
.
map
(
perform_mproc
,
mp_
set
)
mproc_pool
.
close
()
mproc_pool
.
join
()
else
:
for
ps
in
pset
:
segment_image_pset
(
ps
)
map
(
perform_mproc
,
mp_set
)
def
visual_pair_orig_segm_pset
(
pset
):
...
...
@@ -307,11 +306,11 @@ def visual_pair_orig_segm_pset(pset):
:param pset: (params, n_img)
"""
params
,
n_img
=
pset
logging
.
debug
(
'vusial: "
{}"'
.
format
(
n_img
)
)
logging
.
debug
(
'vusial: "
%s"'
,
n_img
)
p_img
=
os
.
path
.
join
(
params
[
'path_in'
],
n_img
)
p_seg
=
os
.
path
.
join
(
params
[
'path_out'
],
n_img
)
if
not
os
.
path
.
exists
(
p_img
)
or
not
os
.
path
.
exists
(
p_seg
):
logging
.
warning
(
'image of segm does not exist'
)
logging
.
warning
(
'image of segm
ent.
does not exist'
)
return
img
=
io
.
imread
(
p_img
)
seg
=
io
.
imread
(
p_seg
)
...
...
@@ -330,8 +329,8 @@ def show_folder_imgs_segm(params=DEFAULT_PARAMS, im_pattern='*.png'):
if
not
os
.
path
.
exists
(
params
[
'path_visu'
]):
os
.
mkdir
(
params
[
'path_visu'
])
p_segs
=
glob
.
glob
(
os
.
path
.
join
(
params
[
'path_out'
],
im_pattern
))
logger
.
info
(
'found
{} segmentation'
.
format
(
len
(
p_segs
)
))
logger
.
debug
(
'segmentation:
{} ...'
.
format
(
p_segs
[:
3
]))
logger
.
info
(
'found
%i segmentation'
,
len
(
p_segs
))
logger
.
debug
(
'segmentation:
%s...'
,
repr
(
p_segs
[:
3
]))
pset
=
[(
params
,
os
.
path
.
basename
(
p
))
for
p
in
p_segs
]
mproc_pool
=
mproc
.
Pool
(
NB_THREADS
)
...
...
@@ -342,13 +341,32 @@ def show_folder_imgs_segm(params=DEFAULT_PARAMS, im_pattern='*.png'):
# visual_pair_orig_segm_pset(ps)
def
main_segment_samples
():
segment_image_folder
(
nb_jobs
=
NB_THREADS
)
show_folder_imgs_segm
()
def
main_segment_stages
():
for
idx
in
range
(
1
,
5
):
logger
n_dataset
=
'stage_{}'
.
format
(
idx
)
p_data
=
'/datagrid/Medical/microscopy/drosophila/disc_stages'
param
=
DEFAULT_PARAMS
.
copy
()
param
.
update
({
'dataset'
:
n_dataset
,
'path_in'
:
os
.
path
.
join
(
p_data
,
n_dataset
),
'path_out'
:
os
.
path
.
join
(
PATH_OUTPUT
,
n_dataset
+
'_segm'
),
'path_visu'
:
os
.
path
.
join
(
PATH_OUTPUT
,
n_dataset
+
'_visu'
),})
segment_image_folder
(
params
=
param
,
nb_jobs
=
NB_THREADS
)
show_folder_imgs_segm
(
params
=
param
)
if
__name__
==
"__main__"
:
logging
.
basicConfig
(
level
=
logging
.
DEBUG
)
logger
.
info
(
'running...'
)
segment_image_folder
(
nb_jobs
=
NB_THREADS
)
main_segment_samples
(
)
show_folder_imgs_segm
()
main_segment_stages
()
logger
.
info
(
'DONE'
)
# plt.show()
\ No newline at end of file
logger
.
info
(
'DONE'
)
\ No newline at end of file
generate_dataset.py
View file @
d1904cf4
...
...
@@ -392,9 +392,7 @@ def dataset_load_images(name=DEFAULT_DATASET, path_base=DEFAULT_PATH_APD,
mproc_pool
.
join
()
else
:
logger
.
debug
(
'running in single thread...'
)
imgs
=
[]
for
i
,
p_im
in
enumerate
(
p_imgs
):
imgs
.
append
(
load_image
(
p_im
))
imgs
=
map
(
load_image
,
p_imgs
)
im_names
=
[
os
.
path
.
splitext
(
os
.
path
.
basename
(
p
))[
0
]
for
p
in
p_imgs
]
return
imgs
,
im_names
...
...
@@ -455,26 +453,23 @@ def dataset_export_images(p_out, imgs, names=None, nb_jobs=1):
if
names
is
None
:
names
=
range
(
len
(
imgs
))
mp_set
=
[(
p_out
,
im
,
names
[
i
])
for
i
,
im
in
enumerate
(
imgs
)]
if
nb_jobs
>
1
:
logger
.
debug
(
'running in {} threads...'
.
format
(
nb_jobs
))
sp
=
[(
p_out
,
im
,
names
[
i
])
for
i
,
im
in
enumerate
(
imgs
)]
mproc_pool
=
mproc
.
Pool
(
nb_jobs
)
mproc_pool
.
map
(
export_image_sp
,
sp
)
mproc_pool
.
map
(
perfom_mproc
,
mp_set
)
mproc_pool
.
close
()
mproc_pool
.
join
()
else
:
logger
.
debug
(
'running in single thread...'
)
for
i
,
im
in
enumerate
(
imgs
):
export_image
(
p_out
,
im
,
names
[
i
])
map
(
perfom_mproc
,
mp_set
)
p_npz
=
os
.
path
.
join
(
p_out
,
'input_images.npz'
)
np
.
savez
(
open
(
p_npz
,
'w'
),
imgs
)
return
def
export_image_sp
(
sp
):
p_out
,
im
,
name
=
sp
export_image
(
p_out
,
im
,
name
)
return
None
def
perfom_mproc
(
mp_set
):
export_image
(
*
mp_set
)
def
dataset_convert_nifti
(
path_in
,
path_out
,
posix
=
DEFAULT_IM_POSIX
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment