Commit 9b62f02c by Jiri Borovec

generating 3D

parent 12c99fce
......@@ -21,9 +21,9 @@ logger = logging.getLogger(__name__)
DEFAULT_UNARY_BACKGROUND = 1
# TODO: init: spatial clustering
# TODO: init: use ICA
# TODO: init: greedy
# TRY: init: spatial clustering
# TRY: init: use ICA
# TRY: init: greedy
# Spis by to mozna slo "greedy" metodou
# prvni obrazek vezmu nahodne, nebo naopak co "nejaktivnejsi" = co nejvetsi
# energie. Pak od vsech ostatnich odectu vhodne zvoleny nasobek a spocitam
......@@ -279,7 +279,7 @@ def alpe_initialisation(imgs, init_atlas, init_weights, out_dir, out_prefix):
if init_atlas is None:
max_nb_lbs = int(np.sqrt(len(imgs)))
logger.debug('... initialise Atlas with ')
# TODO: find better way of initialisation
# IDEA: find better way of initialisation
init_atlas = ptn_dict.initialise_atlas_mosaic(imgs[0].shape, max_nb_lbs)
export_visual_atlas(0, out_dir, atlas=init_atlas, prefix=out_prefix)
......
......@@ -34,7 +34,7 @@ def convert_weights_binary2indexes(weights):
"""
logger.debug('convert binary weights %s to list of indexes with True',
repr(weights.shape))
# if type(weights)==np.ndarray: weights = weights.tolist()
# if type(weights) is np.ndarray: weights = weights.tolist()
w_index = [None] * weights.shape[0]
for i in range(weights.shape[0]):
# find postions equal 1
......
......@@ -170,7 +170,7 @@ def load_segmentation(dict_params, img_name):
# return df_diff
# def mproc_wrapper(mp_tuple):
# def wrapper_export_image(mp_tuple):
# # encoding from the input csv
# return compute_reconstruction(*mp_tuple)
......
......@@ -208,7 +208,7 @@ def experiments_synthetic(dataset=None, nb_jobs=NB_THREADS,
logging.basicConfig(level=logging.INFO)
params = copy.deepcopy(dict_params)
if type(dataset) is str:
if isinstance(dataset, str):
params.update({'dataset': dataset})
l_params = [params]
......
......@@ -182,7 +182,7 @@ def parameters_sta(dataset, dict_params=experiment_apd.SYNTH_PARAMS,
:return:
"""
params = dict_params.copy()
if type(dataset) is str:
if isinstance(dataset, str):
params.update({'dataset': dataset})
params['nb_runs'] = 9
params['max_iter'] = 999
......
"""
The main script for generating synthetic datasets
Copyright (C) 2015-2016 Jiri Borovec <jiri.borovec@fel.cvut.cz>
"""
import os
import logging
import multiprocessing as mproc
from functools import partial
import dataset_utils as tl_dataset
......@@ -18,50 +20,55 @@ else:
# PATH_DATA_SYNTH = '/datagrid/Medical/microscopy/drosophila_segmOvary/'
DEFAULT_PATH_DATA = '/datagrid/temporary/Medical/'
NB_THREADS = int(mproc.cpu_count() * 0.7)
DEFAULT_DIR_APD = 'atomicPatternDictionary_vx'
DEFAULT_PATH_APD = os.path.join(DEFAULT_PATH_DATA, DEFAULT_DIR_APD)
NAME_WEIGHTS = 'combination.csv'
DATASET_TYPE = '2D'
IMAGE_SIZE = {
'2D': (128, 128),
'3D': (16, 128, 128),
}
NOISE_BINARY = 0.03
NOISE_PROB = 0.2
def generate_all_2d(path_out=DEFAULT_PATH_APD, csv_name=NAME_WEIGHTS):
def generate_all(path_out=DEFAULT_PATH_APD, csv_name=NAME_WEIGHTS,
atlas_size=IMAGE_SIZE[DATASET_TYPE]):
""" generate complete dataset containing dictionary od patterns and also
input binary / probab. images with geometrical deformation and random noise
:param csv_name: str
:param path_out: str, path to the results directory
"""
assert os.path.exists(os.path.dirname(path_out))
if not os.path.exists(path_out):
os.mkdir(path_out)
path_dir = lambda d: os.path.join(path_out, d)
# im_dict = dictionary_generate_rnd_pattern()
im_dict = tl_dataset.dictionary_generate_atlas_2d(path_out)
path_out = os.path.join(path_out, 'datasetBinary_raw')
im_comb, df_comb = tl_dataset.dataset_binary_combine_patterns(im_dict, path_out)
df_comb.to_csv(os.path.join(path_out, csv_name))
im_dict = tl_dataset.dictionary_generate_atlas(path_out, im_size=atlas_size)
path_out = os.path.join(path_out, 'datasetBinary_deform')
im_deform = tl_dataset.dataset_binary_deform_images(im_comb, path_out)
im_comb, df_comb = tl_dataset.dataset_binary_combine_patterns(im_dict,
path_dir('datasetBinary_raw'))
df_comb.to_csv(os.path.join(path_dir('datasetBinary_raw'), csv_name))
path_out = os.path.join(path_out, 'datasetBinary_noise')
tl_dataset.dataset_add_image_noise(im_comb, path_out,
tl_dataset.add_image_binary_noise, 0.03)
ds_apply = partial(tl_dataset.dataset_apply_image_function, nb_jobs=NB_THREADS)
path_out = os.path.join(path_out, 'datasetBinary_defNoise')
tl_dataset.dataset_add_image_noise(im_deform, path_out,
tl_dataset.add_image_binary_noise, 0.03)
im_deform = ds_apply(im_comb, path_dir('datasetBinary_deform'),
tl_dataset.image_deform_elastic)
ds_apply(im_comb, path_dir('datasetBinary_noise'),
tl_dataset.add_image_binary_noise, NOISE_BINARY)
ds_apply(im_deform, path_dir('datasetBinary_defNoise'),
tl_dataset.add_image_binary_noise, NOISE_BINARY)
path_out = os.path.join(path_out, 'datasetProb_raw')
im_comb_prob = tl_dataset.dataset_prob_construct(im_comb, path_out)
path_out = os.path.join(path_out, 'datasetProb_deform')
im_def_prob = tl_dataset.dataset_prob_construct(im_deform, path_out)
path_out = os.path.join(path_out, 'datasetProb_noise')
tl_dataset.dataset_add_image_noise(im_comb_prob, path_out,
tl_dataset.add_image_prob_noise, 0.2)
path_out = os.path.join(path_out, 'datasetProb_defNoise')
tl_dataset.dataset_add_image_noise(im_def_prob, path_out,
tl_dataset.add_image_prob_noise, 0.2)
im_comb_prob = ds_apply(im_comb, path_dir('datasetProb_raw'),
tl_dataset.image_transform_binary2prob, 0.5)
im_def_prob = ds_apply(im_deform, path_dir('datasetProb_deform'),
tl_dataset.add_image_prob_noise, 0.5)
ds_apply(im_comb_prob, path_dir('datasetProb_noise'),
tl_dataset.add_image_prob_noise, NOISE_PROB)
ds_apply(im_def_prob, path_dir('datasetProb_defNoise'),
tl_dataset.add_image_prob_noise, NOISE_PROB)
def convert_dataset_nifti(p_datasets=DEFAULT_PATH_APD):
......@@ -69,17 +76,13 @@ def convert_dataset_nifti(p_datasets=DEFAULT_PATH_APD):
os.path.join(p_datasets, 'datasetBinary_raw_nifti'))
def main():
def main(data_dim=DATASET_TYPE):
logging.basicConfig(level=logging.DEBUG)
logger.info('running...')
# test_Ellipse()
if DATASET_TYPE == '2D':
generate_all_2d()
elif DATASET_TYPE == '3D':
# TODO
pass
generate_all(atlas_size=IMAGE_SIZE[data_dim])
# convert_dataset_nifti()
......@@ -87,4 +90,4 @@ def main():
if __name__ == "__main__":
main()
\ No newline at end of file
main('3D')
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment