Commit d4f03b3f authored by Jiri Borovec's avatar Jiri Borovec

loading tiff

parent 9b62f02c
......@@ -17,7 +17,7 @@ import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy import ndimage
from skimage import io, draw, transform, filters
from skimage import draw, transform, filters
from PIL import Image
import libtiff
import tqdm
......@@ -28,7 +28,6 @@ NB_THREADS = mproc.cpu_count()
IMAGE_SIZE_2D = (128, 128)
IMAGE_SIZE_3D = (16, 128, 128)
NB_ATM_PATTERNS = 9
# NB_SAMPLES = 1500
NB_SAMPLES = 50
IMAGE_POSIX = '.png'
IMAGE_PATTERN = 'pattern_{:03d}'
......@@ -217,9 +216,9 @@ def dictionary_generate_atlas(path_out, dir_name=DIR_NAME_DICTIONARY,
plt.imshow(atlas[int(atlas.shape[0] / 2)])
plt.show()
atlas_new, imgs_patterns = atlas_filter_larges_components(atlas)
export_image(out_dir, atlas_new, 'atlas')
for i, img in enumerate(imgs_patterns):
export_image(out_dir, img, i, temp_img_name)
export_image(out_dir, atlas_new, 'atlas')
return imgs_patterns
......@@ -455,9 +454,17 @@ def dataset_load_images(path_base, name, im_pattern='*', im_posix=IMAGE_POSIX,
def load_image(path_img):
n_img = os.path.splitext(os.path.basename(path_img))[0]
# img = io.imread(path_img)
img = np.array(Image.open(path_img))
assert os.path.exists(path_img)
n_img, img_ext = os.path.splitext(os.path.basename(path_img))
if img_ext == '.tiff':
im = libtiff.TiffFile(path_img).get_tiff_array()
img = np.empty(im.shape)
for i in range(img.shape[0]):
img[i, :, :] = im[i]
img = np.array(img.tolist())
else:
# img = io.imread(path_img)
img = np.array(Image.open(path_img))
img = (img / float(img.max()))
return n_img, img
......
......@@ -6,6 +6,8 @@ Copyright (C) 2015-2016 Jiri Borovec <jiri.borovec@fel.cvut.cz>
import os
import logging
import inspect
import json
import multiprocessing as mproc
from functools import partial
......@@ -24,17 +26,32 @@ NB_THREADS = int(mproc.cpu_count() * 0.7)
DEFAULT_DIR_APD = 'atomicPatternDictionary_vx'
DEFAULT_PATH_APD = os.path.join(DEFAULT_PATH_DATA, DEFAULT_DIR_APD)
NAME_WEIGHTS = 'combination.csv'
NAME_CONFIG = 'config.json'
DATASET_TYPE = '2D'
IMAGE_SIZE = {
'2D': (128, 128),
'3D': (16, 128, 128),
}
NB_SAMPLES = 800
NB_ATM_PATTERNS = 9
NOISE_BINARY = 0.03
NOISE_PROB = 0.2
def view_func_params(frame=inspect.currentframe(), path_out=''):
args, _, _, values = inspect.getargvalues(frame)
logger.info('PARAMETERS: \n%s',
'\n'.join('"{}": \t {}'.format(k, v) for k, v in values.iteritems()))
if os.path.exists(path_out):
path_json = os.path.join(path_out, NAME_CONFIG)
with open(path_json, 'w') as fp:
json.dump(values, fp)
return values
def generate_all(path_out=DEFAULT_PATH_APD, csv_name=NAME_WEIGHTS,
atlas_size=IMAGE_SIZE[DATASET_TYPE]):
atlas_size=IMAGE_SIZE[DATASET_TYPE], nb_patterns=NB_ATM_PATTERNS,
nb_samples=NB_SAMPLES):
""" generate complete dataset containing dictionary od patterns and also
input binary / probab. images with geometrical deformation and random noise
......@@ -44,12 +61,14 @@ def generate_all(path_out=DEFAULT_PATH_APD, csv_name=NAME_WEIGHTS,
assert os.path.exists(os.path.dirname(path_out))
if not os.path.exists(path_out):
os.mkdir(path_out)
view_func_params(inspect.currentframe(), path_out)
path_dir = lambda d: os.path.join(path_out, d)
# im_dict = dictionary_generate_rnd_pattern()
im_dict = tl_dataset.dictionary_generate_atlas(path_out, im_size=atlas_size)
im_dict = tl_dataset.dictionary_generate_atlas(path_out, im_size=atlas_size,
nb_ptns=nb_patterns)
im_comb, df_comb = tl_dataset.dataset_binary_combine_patterns(im_dict,
path_dir('datasetBinary_raw'))
path_dir('datasetBinary_raw'), nb_samples)
df_comb.to_csv(os.path.join(path_dir('datasetBinary_raw'), csv_name))
ds_apply = partial(tl_dataset.dataset_apply_image_function, nb_jobs=NB_THREADS)
......@@ -77,7 +96,7 @@ def convert_dataset_nifti(p_datasets=DEFAULT_PATH_APD):
def main(data_dim=DATASET_TYPE):
logging.basicConfig(level=logging.DEBUG)
logging.basicConfig(level=logging.INFO)
logger.info('running...')
# test_Ellipse()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment