Commit 69f975ce authored by Jiri Borovec's avatar Jiri Borovec

set arg parser

parent 4ce25b77
......@@ -4,4 +4,6 @@ pandas
matplotlib
Pillow
scikit-learn
scikit-image
\ No newline at end of file
scikit-image
libtiff
tqdm
\ No newline at end of file
......@@ -8,6 +8,7 @@ import os
import logging
import inspect
import json
import argparse
import multiprocessing as mproc
from functools import partial
......@@ -15,14 +16,8 @@ import dataset_utils as tl_dataset
logger = logging.getLogger(__name__)
b_jirka = False
if b_jirka:
DEFAULT_PATH_DATA = '/jirka/b_jirka/TEMP/'
else:
# PATH_DATA_SYNTH = '/datagrid/Medical/microscopy/drosophila_segmOvary/'
DEFAULT_PATH_DATA = '/datagrid/temporary/Medical/'
NB_THREADS = int(mproc.cpu_count() * 0.7)
DEFAULT_PATH_DATA = '/datagrid/temporary/Medical/'
DEFAULT_DIR_APD = 'atomicPatternDictionary_vx'
DEFAULT_PATH_APD = os.path.join(DEFAULT_PATH_DATA, DEFAULT_DIR_APD)
NAME_WEIGHTS = 'combination.csv'
......@@ -32,12 +27,38 @@ IMAGE_SIZE = {
'2D': (128, 128),
'3D': (16, 128, 128),
}
NB_SAMPLES = 800
NB_ATM_PATTERNS = 9
NB_SAMPLES = 1500
NB_ATM_PATTERNS = 18
NOISE_BINARY = 0.03
NOISE_PROB = 0.2
def aparse_params():
"""
SEE: https://docs.python.org/3/library/argparse.html
:return:
"""
parser = argparse.ArgumentParser()
parser.add_argument('--nb_samples', type=int, required=False, default=NB_SAMPLES,
help='number of samples to be generated in each dataset')
parser.add_argument('--nb_patterns', type=int, required=False,
default=NB_ATM_PATTERNS,
help='number of atom. patterns in created dictionary')
parser.add_argument('--path_out', type=str, required=False,
default=DEFAULT_PATH_APD,
help='path to the datasets ending '
'with name of datasets parent folder')
parser.add_argument('--image_size', type=int, required=False, nargs='+',
default=IMAGE_SIZE[DATASET_TYPE],
help='dimensions of generated images in axis Z, X, Y')
parser.add_argument('--nb_jobs', type=int, required=False, default=NB_THREADS,
help='number of processes in parallel')
args = parser.parse_args()
assert len(args.image_size) == 2 or len(args.image_size) == 3
args.path_out = os.path.abspath(os.path.expanduser(args.path_out))
return args
def view_func_params(frame=inspect.currentframe(), path_out=''):
args, _, _, values = inspect.getargvalues(frame)
logger.info('PARAMETERS: \n%s',
......@@ -49,12 +70,14 @@ def view_func_params(frame=inspect.currentframe(), path_out=''):
return values
def generate_all(path_out=DEFAULT_PATH_APD, csv_name=NAME_WEIGHTS,
atlas_size=IMAGE_SIZE[DATASET_TYPE], nb_patterns=NB_ATM_PATTERNS,
nb_samples=NB_SAMPLES):
def generate_all(path_out=DEFAULT_PATH_APD, atlas_size=IMAGE_SIZE[DATASET_TYPE],
nb_patterns=NB_ATM_PATTERNS, nb_samples=NB_SAMPLES, nb_jobs=NB_THREADS):
""" generate complete dataset containing dictionary od patterns and also
input binary / probab. images with geometrical deformation and random noise
:param atlas_size:
:param nb_samples:
:param nb_patterns:
:param csv_name: str
:param path_out: str, path to the results directory
"""
......@@ -69,9 +92,9 @@ def generate_all(path_out=DEFAULT_PATH_APD, csv_name=NAME_WEIGHTS,
im_comb, df_weights = tl_dataset.dataset_binary_combine_patterns(im_dict,
path_dir('datasetBinary_raw'), nb_samples)
df_weights.to_csv(os.path.join(path_out, csv_name))
df_weights.to_csv(os.path.join(path_out, NAME_WEIGHTS))
ds_apply = partial(tl_dataset.dataset_apply_image_function, nb_jobs=NB_THREADS)
ds_apply = partial(tl_dataset.dataset_apply_image_function, nb_jobs=nb_jobs)
im_deform = ds_apply(im_comb, path_dir('datasetBinary_deform'),
tl_dataset.image_deform_elastic)
......@@ -95,13 +118,16 @@ def convert_dataset_nifti(p_datasets=DEFAULT_PATH_APD):
os.path.join(p_datasets, 'datasetBinary_raw_nifti'))
def main(data_dim=DATASET_TYPE):
def main():
logging.basicConfig(level=logging.INFO)
logger.info('running...')
params = aparse_params()
# test_Ellipse()
generate_all(atlas_size=IMAGE_SIZE[data_dim])
generate_all(path_out=params.path_out, atlas_size=params.image_size,
nb_patterns=params.nb_patterns, nb_samples=params.nb_samples,
nb_jobs=params.nb_jobs)
# convert_dataset_nifti()
......@@ -109,4 +135,4 @@ def main(data_dim=DATASET_TYPE):
if __name__ == "__main__":
main('3D')
\ No newline at end of file
main()
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment