Commit af197919 authored by Jiri Borovec's avatar Jiri Borovec

update

parent 08ac8497
import os
import numpy as np
import matplotlib.pyplot as plt
import logging
import skimage.segmentation as sk_image
import pattern_disctionary as ptn_dict
import pattern_weights as ptn_weight
import metric_similarity as sim_metric
import generate_dataset as gen_data
import matplotlib.pyplot as plt
import logging
logger = logging.getLogger(__name__)
......@@ -393,5 +396,6 @@ def alpe_pipe_atlas_learning_ptn_weights(imgs, init_atlas=None, init_weights=Non
logger.info('>> exiting while the atlas diff %f is smaller then %f',
step_diff, thr_step_diff)
break
atlas = sk_image.relabel_sequential(atlas)
w_bins = [ptn_weight.weights_image_atlas_overlap_major(img, atlas) for img in imgs]
return atlas, np.array(w_bins)
......@@ -24,6 +24,7 @@ import src.own_utils.tool_data_io as tl_data
logger = logging.getLogger(__name__)
jirka = False
NB_THREADS = mproc.cpu_count()
if jirka:
DEFAULT_PATH_DATA = '/jirka/jirka/TEMP/'
else:
......@@ -395,7 +396,8 @@ def dataset_load_images(name=DEFAULT_DATASET, path_base=DEFAULT_PATH_APD,
:return: [np.array], [str]
"""
path_dir = os.path.join(path_base, name)
logger.info('loading folder (%i) <- "%s"', os.path.exists(path_dir), path_dir)
logger.info('loading folder (%s) <- "%s"', os.path.exists(path_dir), path_dir)
assert os.path.exists(path_dir)
path_search = os.path.join(path_dir, im_ptn + im_posix)
logger.debug('image search "%s"', path_search)
path_imgs = glob.glob(path_search)
......@@ -427,7 +429,7 @@ def load_image(path_img):
n_img = os.path.splitext(os.path.basename(path_img))[0]
# img = io.imread(path_img)
img = np.array(Image.open(path_img))
img /= float(img.max())
img = img / float(img.max())
return n_img, img
......
......@@ -17,11 +17,10 @@ import pandas as pd
from PIL import Image
import matplotlib.pyplot as plt
# import src.segmentation.tool_superpixels as tl_spx
# import pattern_weights as ptn_weight
import pattern_weights as ptn_weight
logger = logging.getLogger(__name__)
RUN_DEBUG = True
RUN_DEBUG = False
VISUAL = True
PATH_BASE = '/datagrid/Medical/microscopy/drosophila/'
......@@ -169,16 +168,16 @@ def load_segmentation(dict_params, img_name):
# return df_diff
def mproc_wrapper(mp_tuple):
# encoding from the input csv
return compute_reconstruction(*mp_tuple)
# def mproc_wrapper(mp_tuple):
# # recompute encoding and then does the reconstruction
# dict_params, path_out, img_atlas, weights, img_name = mp_tuple
# segm_orig = load_segmentation(dict_params, img_name)
# weights = ptn_weight.weights_image_atlas_overlap_major(segm_orig, img_atlas)
# return compute_reconstruction(dict_params, path_out, img_atlas, weights, img_name)
# # encoding from the input csv
# return compute_reconstruction(*mp_tuple)
def mproc_wrapper(mp_tuple):
# recompute encoding and then does the reconstruction
dict_params, path_out, img_atlas, weights, img_name = mp_tuple
segm_orig = load_segmentation(dict_params, img_name)
weights = ptn_weight.weights_image_atlas_overlap_major(segm_orig, img_atlas)
return compute_reconstruction(dict_params, path_out, img_atlas, weights, img_name)
def export_fig_atlas(path_out, name, img_atlas):
......
......@@ -412,7 +412,7 @@ def check_create_dirs(d_paths, l_req, l_crt):
if not os.path.exists(d_paths[n]):
os.mkdir(d_paths[n])
for n in l_req + l_crt:
logger.info('"%s" dir: (%i) <- %s', n, os.path.exists(d_paths[n]), d_paths[n])
logger.info('"%s" dir: (%s) <- %s', n, os.path.exists(d_paths[n]), d_paths[n])
if any([not os.path.exists(d_paths[n]) for n in l_req + l_crt]):
raise Exception('one or more paths do not exist')
......
......@@ -7,14 +7,19 @@ Second it does mean activation on aggregated gene ids
import os
import sys
import glob
import json
import time
import gc
import logging
import multiprocessing as mproc
import numpy as np
import pandas as pd
sys.path.append(os.path.abspath(os.path.join('..','..'))) # Add path to root
sys.path.append(os.path.abspath(os.path.join('..', '..'))) # Add path to root
import src.segmentation.tool_superpixels as tl_spx
import src.atm_ptn_dict.generate_dataset as gen_data
import src.atm_ptn_dict.pattern_weights as ptn_weight
import src.atm_ptn_dict.run_apd_reconstruction as r_reconst
NB_THREADS = int(mproc.cpu_count() * .8)
......@@ -26,6 +31,7 @@ PREFIX_ATLAS = 'atlas_'
PREFIX_ENCODE = 'encoding_'
PREFIX_CONNECT = 'connectivity_'
POSIX_GENE = '_gene.csv'
CONFIG_JSON = 'config.json'
logger = logging.getLogger(__name__)
......@@ -69,23 +75,48 @@ def export_atlas_connectivity(path_atlas):
df_connect = pd.DataFrame(matrix_connect, columns=list_ptns, index=list_ptns)
path_csv = path_atlas.replace(PREFIX_ATLAS, PREFIX_CONNECT).replace('.png', '.csv')
df_connect.to_csv(path_csv)
return img_atlas
def mproc_wrapper(mp_tuple):
return process_experiment(*mp_tuple)
def load_config_json(path_expt, config_name=CONFIG_JSON):
path_config = os.path.join(path_expt, config_name)
if not os.path.exists(path_config):
logger.warning('missing particular json config "%s"', path_config)
return
with open(path_config, 'r') as fp:
config = json.load(fp)
return config
def recompute_encoding(atlas, path_csv):
path_expt = os.path.dirname(path_csv)
config = load_config_json(path_expt)
path_in = os.path.join(config.get('path_in'), config.get('dataset'))
imgs, im_names = gen_data.dataset_load_images(config.get('sub_dataset'), path_in)
weights = [ptn_weight.weights_image_atlas_overlap_major(img, atlas) for img in imgs]
df = pd.DataFrame(data=np.array(weights), index=im_names)
df.columns = ['ptn {}'.format(lb + 1) for lb in df.columns]
df.index.name = 'image'
gc.collect(), time.sleep(1)
return df
def process_experiment(path_csv, df_main):
logger.info(' -> %s', os.path.basename(path_csv))
df_encode = pd.DataFrame.from_csv(path_csv)
path_atlas = path_csv.replace(PREFIX_ENCODE, PREFIX_ATLAS).replace('.csv', '.png')
atlas = export_atlas_connectivity(path_atlas)
df_encode = recompute_encoding(atlas, path_csv)
# df_encode = pd.DataFrame.from_csv(path_csv)
df_encode = extend_df(df_encode, df_main)
if 'image' in df_encode.columns:
df_encode = df_encode.set_index('image')
df_encode.to_csv(path_csv)
df_result = aggregate_encoding(df_encode)
df_result.to_csv(path_csv.replace('.csv', '_gene.csv'))
path_atlas = path_csv.replace(PREFIX_ENCODE, PREFIX_ATLAS).replace('.csv', '.png')
export_atlas_connectivity(path_atlas)
def main(path_csv_main=PATH_CSV_MAIN, path_experiemnts=PATH_EXPERIMENTS):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment