Commit d09ea5ae authored by Jirka's avatar Jirka

pep8

parent 2dac32d9
import os
import numpy as np
import generate_dataset
import pattern_weights as ptnWeight
import similarity_metric as simMetric
import matplotlib.pyplot as plt
DEFAULT_UNARY_BACKGROUND = 1
import logging
logger = logging.getLogger(__name__)
def initialise_atlas_random(im_size, max_lb):
logger.debug('initialise atlas {} as random labeling'.format(im_size))
nb_lbs = max_lb + 1
im = np.random.randint(0, nb_lbs, im_size)
return np.array(im, dtype=np.int)
def initialise_atlas_mosaic(im_size, max_lb):
logger.debug('initialise atlas {} as grid labeling'.format(im_size))
nb_lbs = max_lb + 1
block = np.ones(np.ceil(im_size / np.array(nb_lbs, dtype=np.float)))
logger.debug('block size is {}'.format(block.shape))
for l in range(nb_lbs):
idx = np.random.permutation(range(nb_lbs))
for k in range(nb_lbs):
b = block.copy() * idx[k]
if k == 0: row = b
else: row = np.hstack((row, b))
if l == 0: mosaic = row
else: mosaic = np.vstack((mosaic, row))
logger.debug('generated mosaic {} with labeling'.format(mosaic.shape,
np.unique(mosaic).tolist()))
im = mosaic[:im_size[0], :im_size[1]]
return np.array(im, dtype=np.int)
def initialise_atlas_deform_original(atlas):
logger.debug('initialise atlas by deforming original one')
res = generate_dataset.image_deform_elastic(atlas)
return np.array(res, dtype=np.int)
# TODO: spatial clustering
# TODO: init with ICA
# TODO: # Spis by to mozna slo "greedy" metodou
# prvni obrazek vezmu nahodne, nebo naopak co "nejaktivnejsi" = co nejvetsi
# energie. Pak od vsech ostatnich odectu vhodne zvoleny nasobek a spocitam
# "zbytky". Ze "zbytku" opet vezmu ten s nejvetsi energii atp. Je mozne
# odecitat algebraicky, nebo logicky, v kazdem pripade bych ponechaval jen
# kladne hodnoty.
def initialise_weights_random(nb_imgs, nb_lbs, ratio_sel=0.2):
logger.debug('initialise weights for {} images and {} labels '
'as random selection'.format(nb_imgs, nb_lbs))
prob = np.random.random((nb_imgs, nb_lbs))
weights = np.zeros_like(prob)
weights[prob <= ratio_sel] = 1
return weights
def compute_relative_penaly_images_weights(imgs, weights):
logger.info('compute unary cost from images and related weights')
# weightsIdx = ptnWeight.convert_weights_binary2indexes(weights)
nb_lbs = weights.shape[1] + 1
assert len(imgs) == weights.shape[0]
pott_sum = np.zeros(imgs[0].shape + (nb_lbs,))
# extenf the weights by background value 0
weights_ext = np.append(np.zeros((weights.shape[0], 1)), weights, axis=1)
# logger.debug(weights_ext)
# walkmover all pixels in image
logger.debug('... walk over all pixels in each image')
# TODO: make it as matrix ops
for i in range(pott_sum.shape[0]):
for j in range(pott_sum.shape[1]):
# per all images in list
for k in range(len(imgs)):
cost = abs(weights_ext[k] - imgs[k][i,j])
# logger.debug(cost)
pott_sum[i,j] += cost
pott_sum_norm = pott_sum / float(len(imgs))
return pott_sum_norm
def compute_positive_cost_images_weights(imgs, weights):
# not using any more...
logger.info('compute unary cost from images and related weights')
w_idx = ptnWeight.convert_weights_binary2indexes(weights)
nb_lbs = weights.shape[1] + 1
assert len(imgs)==len(w_idx)
pott_sum = np.zeros(imgs[0].shape + (nb_lbs,))
# walkmover all pixels in image
logger.debug('... walk over all pixels in each image')
for i in range(pott_sum.shape[0]):
for j in range(pott_sum.shape[1]):
# per all images in list
for k in range(len(imgs)):
# if pixel is active
if imgs[k][i,j] == 1:
# increment all possible spots
for id in w_idx[k]:
pott_sum[i,j,id] += 1
# else:
# graphSum[i,j,0] += 1e-9
# set also the background values
pott_sum[i,j,0] = DEFAULT_UNARY_BACKGROUND
# graph = 1. / (graphSum +1)
return pott_sum
def get_edges_image_plane(im_size):
idxs = np.arange(np.product(im_size))
idxs = idxs.reshape(im_size)
# logger.debug(idxs)
eA = idxs[:,:-1].ravel().tolist() + idxs[:-1,:].ravel().tolist()
eB = idxs[:, 1:].ravel().tolist() + idxs[1:,:].ravel().tolist()
edges = np.array([eA, eB]).transpose()
logger.debug('edges for image plane are shape {}'.format(edges.shape))
return edges
def estimate_atlas_graphcut_simple(imgs, encodding, coef=1.):
logger.info('estimate atlas via GraphCut from Potts model')
# source: https://github.com/yujiali/pygco
from src.wrappers.GraphCut.pygco import cut_grid_graph_simple
labeling_sum = compute_positive_cost_images_weights(imgs, encodding)
unary_cost = np.array(-1 * labeling_sum , dtype=np.int32)
logger.debug('graph unaries potentials {}: \n{}'.format(unary_cost.shape,
np.histogram(unary_cost, bins=10)))
# original and the right way..
pairwise = (1 - np.eye(labeling_sum.shape[-1])) * coef
pairwise_cost = np.array(pairwise , dtype=np.int32)
logger.debug('graph pairwise coefs {}'.format(pairwise_cost.shape))
# run GraphCut
labels = cut_grid_graph_simple(unary_cost, pairwise_cost, algorithm='expansion')
# reshape labels
labels = labels.reshape(labeling_sum.shape[:2])
logger.debug('resulting labelling {}: \n{}'.format(labels.shape, labels))
return labels
def estimate_atlas_graphcut_general(imgs, encoding, coef=1., init_lbs=None):
logger.info('estimate atlas via GraphCut from Potts model')
# source: https://github.com/yujiali/pygco
from src.wrappers.GraphCut.pygco import cut_general_graph
u_cost = compute_relative_penaly_images_weights(imgs, encoding)
# u_cost = 1. / (labelingSum +1)
unary_cost = np.array(u_cost , dtype=np.float64)
unary_cost = unary_cost.reshape(-1, u_cost.shape[-1])
logger.debug('graph unaries potentials {}: \n{}'.format(unary_cost.shape,
np.histogram(unary_cost, bins=10)))
edges = get_edges_image_plane(u_cost.shape[:2])
logger.debug('edges for image plane are shape {}'.format(edges.shape))
edge_weights = np.ones(edges.shape[0])
logger.debug('edges weights are shape {}'.format(edge_weights.shape))
# original and the right way...
pairwise = (1 - np.eye(u_cost.shape[-1])) * coef
pairwise_cost = np.array(pairwise , dtype=np.float64)
logger.debug('graph pairwise coefs {}'.format(pairwise_cost.shape))
if init_lbs is None:
init_lbs = np.argmin(unary_cost, axis=1)
else:
init_lbs = init_lbs.ravel()
logger.debug('graph initial labels {}'.format(init_lbs.shape))
# run GraphCut
labels = cut_general_graph(edges, edge_weights, unary_cost, pairwise_cost,
algorithm='expansion', init_labels=init_lbs)
# reshape labels
labels = labels.reshape(u_cost.shape[:2])
logger.debug('resulting labelling {}'.format(labels.shape))
return labels
def export_visualization_image(img, i, out_dir, prefix='debug', name='',
ration=None, labels=['', '']):
fig = plt.figure()
plt.imshow(img, interpolation='none', aspect=ration)
plt.xlabel(labels[0]), plt.ylabel(labels[1])
p_out = os.path.join(out_dir, '{}_ALPE_{}_iter_{:04d}.png'.format(prefix,
name, i))
logger.debug('.. export Vusialization as "{}"'.format(p_out))
fig.savefig(p_out, bbox_inches='tight', pad_inches=0.05)
plt.close()
def export_visual_atlas(i, out_dir, atlas=None, weights=None, prefix='debug'):
if logger.getEffectiveLevel()==logging.DEBUG:
if not os.path.exists(out_dir):
logger.debug('output path "{}" does not exist'.format(out_dir))
return None
if atlas is not None:
export_visualization_image(atlas, i, out_dir, prefix, 'atlas',
labels=['X', 'Y'])
if weights is not None:
export_visualization_image(weights, i, out_dir, prefix, 'weights',
'auto', ['patterns', 'images'])
return None
def pipeline_estim_atlas_learning_ptn_weights(imgs, init_atlas=None,
init_weights=None, c_compact=0.0, thr_step_diff=0.0, max_iter=99,
stop=True, reinit=True, out_prefix='debug', out_dir=''):
logger.info('compute an Atlas and perform images w_bins')
assert len(imgs) >= 0
# assert initAtlas is not None or type(max_nb_lbs)==int
# initialise
if init_weights is not None and init_atlas is None:
logger.debug('... initialise Atlas from w_bins')
init_atlas = estimate_atlas_graphcut_general(imgs, init_weights, 0.)
export_visual_atlas(0, out_dir, weights=init_weights, prefix=out_prefix)
if init_atlas is None:
max_nb_lbs = int(np.sqrt(len(imgs)))
logger.debug('... initialise Atlas with ')
# TODO: find better way of initialisation
init_atlas = initialise_atlas_mosaic(imgs[0].shape, max_nb_lbs)
export_visual_atlas(0, out_dir, atlas=init_atlas, prefix=out_prefix)
atlas = init_atlas
w_bins = init_weights
if len(np.unique(atlas)) == 1:
logger.info('ERROR: the atlas does not contain '
'any label... {}'.format(np.unique(atlas)))
export_visual_atlas(0, out_dir, atlas, w_bins, prefix=out_prefix)
for i in range(max_iter):
if len(np.unique(atlas)) == 1:
logger.info('ERROR: the atlas does not contain '
'any label... {}'.format(np.unique(atlas)))
# update w_bins
logger.debug('... perform pattern Weights')
# w_bins = [ptnWeight.weighs_image_atlas_overlap_major(img, atlas)
# for img in imgs]
w_bins = [ptnWeight.weighs_image_atlas_overlap_partial(img, atlas)
for img in imgs]
# add once for patterns that are not used at all
w_bins = ptnWeight.fill_empty_patterns(np.array(w_bins))
if np.sum(w_bins) == 0:
logger.info('ERROR: the w_bins is empty... {}'.format(np.unique(atlas)))
# update atlas
logger.debug('... perform Atlas estimation')
# atlasNew = estimate_atlas_graphcut_simple(imgs, w_bins)
if reinit:
atlasNew = estimate_atlas_graphcut_general(imgs, w_bins,
c_compact, atlas)
else:
atlasNew = estimate_atlas_graphcut_general(imgs, w_bins, c_compact)
step_diff = simMetric.compare_atlas_adjusted_rand(atlas, atlasNew)
logger.info('-> iter. #{} with rltv Atlas diff {}'.format(i+1, step_diff))
atlas = atlasNew
export_visual_atlas(i + 1, out_dir, atlas, w_bins, prefix=out_prefix)
# stoping criterion
if stop and step_diff <= thr_step_diff:
logger.info('>> exiting while theatlas diff {} is '
'smaller then {}'.format(step_diff, thr_step_diff))
break
return atlas, w_bins
# TODO: Matching Pursuit
# http://scikit-learn.org/stable/modules/generated/sklearn.linear_model.OrthogonalMatchingPursuit.html
# prijde mi, ze by melo byt velmi snadne jednoduche naprogramovat variantu Matching Pursuit, kde v kazde iteraci nebudu
# maximalizovat absolutni hodnotu skalarniho produktu, ale skalarni produkt. Tim se zbavim zapornych hodnot. Pokud bych
# chtel vylozene pouze koeficienty v intervalu 0,1, tak pro kazdy vzor zjistim, jaka bude aproximacni chyba za tohoto
# omezeni. Ale prijde mi to pro nas pripad zbytecne.
# Other: http://winsty.net/onndl.html
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
import os
import numpy as np
import generate_dataset
import pattern_weights as ptnWeight
import similarity_metric as simMetric
import matplotlib.pyplot as plt
DEFAULT_UNARY_BACKGROUND = 1
import logging
logger = logging.getLogger(__name__)
def initialise_atlas_random(im_size, max_lb):
logger.debug('initialise atlas {} as random labeling'.format(im_size))
nb_lbs = max_lb + 1
im = np.random.randint(0, nb_lbs, im_size)
return np.array(im, dtype=np.int)
def initialise_atlas_mosaic(im_size, max_lb):
logger.debug('initialise atlas {} as grid labeling'.format(im_size))
nb_lbs = max_lb + 1
block = np.ones(np.ceil(im_size / np.array(nb_lbs, dtype=np.float)))
logger.debug('block size is {}'.format(block.shape))
for l in range(nb_lbs):
idx = np.random.permutation(range(nb_lbs))
for k in range(nb_lbs):
b = block.copy() * idx[k]
if k == 0: row = b
else: row = np.hstack((row, b))
if l == 0: mosaic = row
else: mosaic = np.vstack((mosaic, row))
logger.debug('generated mosaic {} with labeling'.format(mosaic.shape,
np.unique(mosaic).tolist()))
im = mosaic[:im_size[0], :im_size[1]]
return np.array(im, dtype=np.int)
def initialise_atlas_deform_original(atlas):
logger.debug('initialise atlas by deforming original one')
res = generate_dataset.image_deform_elastic(atlas)
return np.array(res, dtype=np.int)
# TODO: spatial clustering
# TODO: init with ICA
# TODO: # Spis by to mozna slo "greedy" metodou
# prvni obrazek vezmu nahodne, nebo naopak co "nejaktivnejsi" = co nejvetsi
# energie. Pak od vsech ostatnich odectu vhodne zvoleny nasobek a spocitam
# "zbytky". Ze "zbytku" opet vezmu ten s nejvetsi energii atp. Je mozne
# odecitat algebraicky, nebo logicky, v kazdem pripade bych ponechaval jen
# kladne hodnoty.
def initialise_weights_random(nb_imgs, nb_lbs, ratio_sel=0.2):
logger.debug('initialise weights for {} images and {} labels '
'as random selection'.format(nb_imgs, nb_lbs))
prob = np.random.random((nb_imgs, nb_lbs))
weights = np.zeros_like(prob)
weights[prob <= ratio_sel] = 1
return weights
def compute_relative_penaly_images_weights(imgs, weights):
logger.info('compute unary cost from images and related weights')
# weightsIdx = ptnWeight.convert_weights_binary2indexes(weights)
nb_lbs = weights.shape[1] + 1
assert len(imgs) == weights.shape[0]
pott_sum = np.zeros(imgs[0].shape + (nb_lbs,))
# extenf the weights by background value 0
weights_ext = np.append(np.zeros((weights.shape[0], 1)), weights, axis=1)
# logger.debug(weights_ext)
# walkmover all pixels in image
logger.debug('... walk over all pixels in each image')
# TODO: make it as matrix ops
for i in range(pott_sum.shape[0]):
for j in range(pott_sum.shape[1]):
# per all images in list
for k in range(len(imgs)):
cost = abs(weights_ext[k] - imgs[k][i,j])
# logger.debug(cost)
pott_sum[i,j] += cost
pott_sum_norm = pott_sum / float(len(imgs))
return pott_sum_norm
def compute_positive_cost_images_weights(imgs, weights):
# not using any more...
logger.info('compute unary cost from images and related weights')
w_idx = ptnWeight.convert_weights_binary2indexes(weights)
nb_lbs = weights.shape[1] + 1
assert len(imgs)==len(w_idx)
pott_sum = np.zeros(imgs[0].shape + (nb_lbs,))
# walkmover all pixels in image
logger.debug('... walk over all pixels in each image')
for i in range(pott_sum.shape[0]):
for j in range(pott_sum.shape[1]):
# per all images in list
for k in range(len(imgs)):
# if pixel is active
if imgs[k][i,j] == 1:
# increment all possible spots
for id in w_idx[k]:
pott_sum[i,j,id] += 1
# else:
# graphSum[i,j,0] += 1e-9
# set also the background values
pott_sum[i,j,0] = DEFAULT_UNARY_BACKGROUND
# graph = 1. / (graphSum +1)
return pott_sum
def get_edges_image_plane(im_size):
idxs = np.arange(np.product(im_size))
idxs = idxs.reshape(im_size)
# logger.debug(idxs)
eA = idxs[:,:-1].ravel().tolist() + idxs[:-1,:].ravel().tolist()
eB = idxs[:, 1:].ravel().tolist() + idxs[1:,:].ravel().tolist()
edges = np.array([eA, eB]).transpose()
logger.debug('edges for image plane are shape {}'.format(edges.shape))
return edges
def estimate_atlas_graphcut_simple(imgs, encodding, coef=1.):
logger.info('estimate atlas via GraphCut from Potts model')
# source: https://github.com/yujiali/pygco
from src.wrappers.GraphCut.pygco import cut_grid_graph_simple
labeling_sum = compute_positive_cost_images_weights(imgs, encodding)
unary_cost = np.array(-1 * labeling_sum , dtype=np.int32)
logger.debug('graph unaries potentials {}: \n{}'.format(unary_cost.shape,
np.histogram(unary_cost, bins=10)))
# original and the right way..
pairwise = (1 - np.eye(labeling_sum.shape[-1])) * coef
pairwise_cost = np.array(pairwise , dtype=np.int32)
logger.debug('graph pairwise coefs {}'.format(pairwise_cost.shape))
# run GraphCut
labels = cut_grid_graph_simple(unary_cost, pairwise_cost, algorithm='expansion')
# reshape labels
labels = labels.reshape(labeling_sum.shape[:2])
logger.debug('resulting labelling {}: \n{}'.format(labels.shape, labels))
return labels
def estimate_atlas_graphcut_general(imgs, encoding, coef=1., init_lbs=None):
logger.info('estimate atlas via GraphCut from Potts model')
# source: https://github.com/yujiali/pygco
from src.wrappers.GraphCut.pygco import cut_general_graph
u_cost = compute_relative_penaly_images_weights(imgs, encoding)
# u_cost = 1. / (labelingSum +1)
unary_cost = np.array(u_cost , dtype=np.float64)
unary_cost = unary_cost.reshape(-1, u_cost.shape[-1])
logger.debug('graph unaries potentials {}: \n{}'.format(unary_cost.shape,
np.histogram(unary_cost, bins=10)))
edges = get_edges_image_plane(u_cost.shape[:2])
logger.debug('edges for image plane are shape {}'.format(edges.shape))
edge_weights = np.ones(edges.shape[0])
logger.debug('edges weights are shape {}'.format(edge_weights.shape))
# original and the right way...
pairwise = (1 - np.eye(u_cost.shape[-1])) * coef
pairwise_cost = np.array(pairwise , dtype=np.float64)
logger.debug('graph pairwise coefs {}'.format(pairwise_cost.shape))
if init_lbs is None:
init_lbs = np.argmin(unary_cost, axis=1)
else:
init_lbs = init_lbs.ravel()
logger.debug('graph initial labels {}'.format(init_lbs.shape))
# run GraphCut
labels = cut_general_graph(edges, edge_weights, unary_cost, pairwise_cost,
algorithm='expansion', init_labels=init_lbs)
# reshape labels
labels = labels.reshape(u_cost.shape[:2])
logger.debug('resulting labelling {}'.format(labels.shape))
return labels
def export_visualization_image(img, i, out_dir, prefix='debug', name='',
ration=None, labels=['', '']):
fig = plt.figure()
plt.imshow(img, interpolation='none', aspect=ration)
plt.xlabel(labels[0]), plt.ylabel(labels[1])
p_out = os.path.join(out_dir, '{}_ALPE_{}_iter_{:04d}.png'.format(prefix,
name, i))
logger.debug('.. export Vusialization as "{}"'.format(p_out))
fig.savefig(p_out, bbox_inches='tight', pad_inches=0.05)
plt.close()
def export_visual_atlas(i, out_dir, atlas=None, weights=None, prefix='debug'):
if logger.getEffectiveLevel()==logging.DEBUG:
if not os.path.exists(out_dir):
logger.debug('output path "{}" does not exist'.format(out_dir))
return None
if atlas is not None:
export_visualization_image(atlas, i, out_dir, prefix, 'atlas',
labels=['X', 'Y'])
if weights is not None:
export_visualization_image(weights, i, out_dir, prefix, 'weights',
'auto', ['patterns', 'images'])
return None
def pipeline_estim_atlas_learning_ptn_weights(imgs, init_atlas=None,
init_weights=None, c_compact=0.0, thr_step_diff=0.0, max_iter=99,
stop=True, reinit=True, out_prefix='debug', out_dir=''):
logger.info('compute an Atlas and perform images w_bins')
assert len(imgs) >= 0
# assert initAtlas is not None or type(max_nb_lbs)==int
# initialise
if init_weights is not None and init_atlas is None:
logger.debug('... initialise Atlas from w_bins')
init_atlas = estimate_atlas_graphcut_general(imgs, init_weights, 0.)
export_visual_atlas(0, out_dir, weights=init_weights, prefix=out_prefix)
if init_atlas is None:
max_nb_lbs = int(np.sqrt(len(imgs)))
logger.debug('... initialise Atlas with ')
# TODO: find better way of initialisation
init_atlas = initialise_atlas_mosaic(imgs[0].shape, max_nb_lbs)
export_visual_atlas(0, out_dir, atlas=init_atlas, prefix=out_prefix)
atlas = init_atlas
w_bins = init_weights
if len(np.unique(atlas)) == 1:
logger.info('ERROR: the atlas does not contain '
'any label... {}'.format(np.unique(atlas)))
export_visual_atlas(0, out_dir, atlas, w_bins, prefix=out_prefix)
for i in range(max_iter):
if len(np.unique(atlas)) == 1:
logger.info('ERROR: the atlas does not contain '
'any label... {}'.format(np.unique(atlas)))
# update w_bins
logger.debug('... perform pattern Weights')
# w_bins = [ptnWeight.weighs_image_atlas_overlap_major(img, atlas)
# for img in imgs]
w_bins = [ptnWeight.weighs_image_atlas_overlap_partial(img, atlas)
for img in imgs]
# add once for patterns that are not used at all
w_bins = ptnWeight.fill_empty_patterns(np.array(w_bins))
if np.sum(w_bins) == 0:
logger.info('ERROR: the w_bins is empty... {}'.format(np.unique(atlas)))
# update atlas
logger.debug('... perform Atlas estimation')
# atlasNew = estimate_atlas_graphcut_simple(imgs, w_bins)
if reinit:
atlasNew = estimate_atlas_graphcut_general(imgs, w_bins,
c_compact, atlas)
else:
atlasNew = estimate_atlas_graphcut_general(imgs, w_bins, c_compact)
step_diff = simMetric.compare_atlas_adjusted_rand(atlas, atlasNew)
logger.info('-> iter. #{} with rltv Atlas diff {}'.format(i+1, step_diff))
atlas = atlasNew
export_visual_atlas(i + 1, out_dir, atlas, w_bins, prefix=out_prefix)
# stoping criterion
if stop and step_diff <= thr_step_diff:
logger.info('>> exiting while theatlas diff {} is '
'smaller then {}'.format(step_diff, thr_step_diff))
break
return atlas, w_bins
# TODO: Matching Pursuit
# http://scikit-learn.org/stable/modules/generated/sklearn.linear_model.OrthogonalMatchingPursuit.html
# prijde mi, ze by melo byt velmi snadne jednoduche naprogramovat variantu Matching Pursuit, kde v kazde iteraci nebudu
# maximalizovat absolutni hodnotu skalarniho produktu, ale skalarni produkt. Tim se zbavim zapornych hodnot. Pokud bych
# chtel vylozene pouze koeficienty v intervalu 0,1, tak pro kazdy vzor zjistim, jaka bude aproximacni chyba za tohoto
# omezeni. Ale prijde mi to pro nas pripad zbytecne.
# Other: http://winsty.net/onndl.html
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logger.info('DONE')
\ No newline at end of file
import numpy as np
import logging
logger = logging.getLogger(__name__)
def convert_weights_binary2indexes(weights):
logger.debug('convert binary weights {} '
'to list of indexes with True'.format(weights.shape))
# if type(weights)==np.ndarray: weights = weights.tolist()
w_idx = [None] * weights.shape[0]
for i in range(weights.shape[0]):
# find postions equal 1
# vec = [j for j in range(weights.shape[1]) if weights[i,j]==1]
vec = np.where(weights[i,:] == 1)[0]
w_idx[i] = vec +1
# idxs = np.where(weights == 1)
# for i in range(weights.shape[0]):
# w_idx[i] = idxs[1][idxs[0]==i] +1
return w_idx
def weighs_image_atlas_overlap_major(img, atlas):
# logger.debug('weights input image according given atlas')
weights = weighs_image_atlas_overlap_threshold(img, atlas, 0.5)
return weights
def weighs_image_atlas_overlap_partial(img, atlas):
# logger.debug('weights input image according given atlas')
lbs = np.unique(atlas).tolist()
weights = weighs_image_atlas_overlap_threshold(img, atlas, (1. / len(lbs)))
return weights
def weighs_image_atlas_overlap_threshold(img, atlas, thr=0.5):
# logger.debug('weights input image according given atlas')
# simple weight
lbs = np.unique(atlas).tolist()
# logger.debug('weights image by atlas with labels: {}'.format(lbs))
i