Commit d6bbed61 authored by Jiri Borovec's avatar Jiri Borovec

refactoring and add basic show unary costs

parent 44efb410
__author__ = 'Jiri Borovec'
import os, sys
import numpy as np
import generateDataset
import patternWeights as ptnWeight
import similarityMetric as simMetric
import matplotlib.pyplot as plt
DEFAULT_UNARY_BACKGROUND = 1
import logging
logger = logging.getLogger(__name__)
def initialiseAtlas_random (imSize, maxLb) :
logger.debug('initialise atlas {} as random labeling'.format(imSize))
nbLbs = maxLb + 1
im = np.random.randint(0, nbLbs, imSize)
return np.array(im, dtype=np.int)
def initialiseAtlas_mosaic (imSize, maxLb) :
logger.debug('initialise atlas {} as grid labeling'.format(imSize))
nbLbs = maxLb + 1
block = np.ones( np.ceil(imSize/np.array(nbLbs, dtype=np.float)) )
logger.debug('block size is {}'.format(block.shape))
for l in range(nbLbs) :
idx = np.random.permutation( range(nbLbs) )
for k in range(nbLbs) :
b = block.copy() * idx[k]
if k == 0 : row = b
else : row = np.hstack( (row, b) )
if l == 0 : mosaic = row
else : mosaic = np.vstack( (mosaic, row) )
logger.debug('generated mosaic {} with labeling'.format(mosaic.shape, np.unique(mosaic).tolist()))
im = mosaic[:imSize[0], :imSize[1]]
return np.array(im, dtype=np.int)
def initialiseAtlas_deformOriginal (atlas) :
logger.debug('initialise atlas by deforming original one')
res = generateDataset.deformImageElastic(atlas)
return np.array(res, dtype=np.int)
# TODO: spatial clustering
# TODO: init with ICA
# TODO: # Spis by to mozna slo "greedy" metodou
# prvni obrazek vezmu nahodne, nebo naopak co "nejaktivnejsi" = co nejvetsi energie. Pak od vsech ostatnich odectu vhodne
# zvoleny nasobek a spocitam "zbytky". Ze "zbytku" opet vezmu ten s nejvetsi energii atp. Je mozne odecitat algebraicky,
# nebo logicky, v kazdem pripade bych ponechaval jen kladne hodnoty.
def initialiseWeights_random (nbImgs, nbLbs, ratioSel=0.2) :
logger.debug('initialise weights for {} images and {} labels as random selection'.format(nbImgs, nbLbs))
prob = np.random.random( (nbImgs, nbLbs) )
weights = np.zeros_like(prob)
weights[prob <= ratioSel] = 1
return weights
def computeRelativePenalyFromImagesWeights (imgs, weights) :
logger.info('compute unary cost from images and related weights')
# weightsIdx = ptnWeight.convertWeightsBinary2indexes(weights)
nbLbs = weights.shape[1] + 1
assert len(imgs) == weights.shape[0]
pottSum = np.zeros( imgs[0].shape + (nbLbs,) )
# extenf the weights by background value 0
weightsExt = np.append(np.zeros((weights.shape[0], 1)), weights, axis=1)
# logger.debug(weightsExt)
# walkmover all pixels in image
logger.debug('... walk over all pixels in each image')
# TODO: make it as matrix ops
for i in range(pottSum.shape[0]) :
for j in range(pottSum.shape[1]) :
# per all images in list
for k in range(len(imgs)) :
cost = abs( weightsExt[k] - imgs[k][i,j] )
# logger.debug(cost)
pottSum[i,j] += cost
pottSumNorm = pottSum / float(len(imgs))
return pottSumNorm
def computePositiveCostFromImagesWeights (imgs, weights) :
# not using any more...
logger.info('compute unary cost from images and related weights')
weightIdx = ptnWeight.convertWeightsBinary2indexes(weights)
nbLbs = weights.shape[1] + 1
assert len(imgs)==len(weightIdx)
pottSum = np.zeros( imgs[0].shape + (nbLbs,) )
# walkmover all pixels in image
logger.debug('... walk over all pixels in each image')
for i in range(pottSum.shape[0]) :
for j in range(pottSum.shape[1]) :
# per all images in list
for k in range(len(imgs)) :
# if pixel is active
if imgs[k][i,j] == 1 :
# increment all possible spots
for id in weightIdx[k] :
pottSum[i,j,id] += 1
# else :
# graphSum[i,j,0] += 1e-9
# set also the background values
pottSum[i,j,0] = DEFAULT_UNARY_BACKGROUND
# graph = 1. / (graphSum +1)
return pottSum
def getEdgesImagePlane (imSize) :
idxs = np.arange( np.product(imSize) )
idxs = idxs.reshape(imSize)
# logger.debug(idxs)
eA = idxs[:, :-1].ravel().tolist() + idxs[:-1, :].ravel().tolist()
eB = idxs[:, 1:].ravel().tolist() + idxs[1:, :].ravel().tolist()
edges = np.array( [eA, eB] ).transpose()
logger.debug('edges for image plane are shape {}'.format(edges.shape))
return edges
def estimateAtlasGraphCut_simple (imgs, encodding, pCoef=1.) :
logger.info('estimate atlas via GraphCut from Potts model')
# source: https://github.com/yujiali/pygco
from src.wrappers.GraphCut.pygco import cut_grid_graph_simple
labelingSum = computePositiveCostFromImagesWeights(imgs, encodding)
unary_cost = np.array( -1 * labelingSum , dtype=np.int32)
logger.debug('graph unaries potentials {}: \n{}'.format(unary_cost.shape, np.histogram(unary_cost, bins=10)))
# original and the right way..
pairwise = (1 - np.eye(labelingSum.shape[-1])) * pCoef
pairwise_cost = np.array( pairwise , dtype=np.int32)
logger.debug('graph pairwise coefs {}'.format(pairwise_cost.shape))
# run GraphCut
labels = cut_grid_graph_simple(unary_cost, pairwise_cost, algorithm='expansion')
# reshape labels
labels = labels.reshape( labelingSum.shape[:2] )
logger.debug('resulting labelling {}: \n{}'.format(labels.shape, labels))
return labels
def estimateAtlasGraphCut_general (imgs, encodding, pCoef=1., initLbs=None) :
logger.info('estimate atlas via GraphCut from Potts model')
# source: https://github.com/yujiali/pygco
from src.wrappers.GraphCut.pygco import cut_general_graph
unaryCost = computeRelativePenalyFromImagesWeights(imgs, encodding)
# unaryCost = 1. / (labelingSum +1)
unary_cost = np.array( unaryCost , dtype=np.float64)
unary_cost = unary_cost.reshape( -1, unaryCost.shape[-1] )
logger.debug( 'graph unaries potentials {}: \n{}'.format(unary_cost.shape, np.histogram(unary_cost, bins=10)) )
edges = getEdgesImagePlane( unaryCost.shape[:2] )
logger.debug('edges for image plane are shape {}'.format(edges.shape))
edge_weights = np.ones( edges.shape[0] )
logger.debug('edges weights are shape {}'.format(edge_weights.shape))
# original and the right way...
pairwise = (1 - np.eye(unaryCost.shape[-1])) * pCoef
pairwise_cost = np.array( pairwise , dtype=np.float64)
logger.debug('graph pairwise coefs {}'.format(pairwise_cost.shape) )
if initLbs is None :
initLbs = np.argmin(unary_cost, axis=1)
else :
initLbs = initLbs.ravel()
logger.debug('graph initial labels {}'.format(initLbs.shape))
# run GraphCut
labels = cut_general_graph(edges, edge_weights, unary_cost, pairwise_cost, algorithm='expansion', init_labels=initLbs)
# reshape labels
labels = labels.reshape( unaryCost.shape[:2] )
logger.debug('resulting labelling {}'.format(labels.shape))
return labels
def exportVisualizationImage (img, i, outDir, prefix='debug', name='', ration=None, labels=['','']) :
fig = plt.figure()
plt.imshow(img, interpolation='none', aspect=ration)
plt.xlabel(labels[0]), plt.ylabel(labels[1])
pOut = os.path.join(outDir,'{}_ALPE_{}_iter_{:04d}.png'.format(prefix, name, i))
logger.debug('.. export Vusialization as "{}"'.format(pOut))
fig.savefig(pOut)
plt.close()
def exportVisualAtlas (i, outDir, atlas=None, weights=None, prefix='debug') :
if logger.getEffectiveLevel()==logging.DEBUG :
if not os.path.exists(outDir) :
logger.debug('output path "{}" does not exist'.format(outDir))
return None
if atlas is not None :
exportVisualizationImage(atlas, i, outDir, prefix, 'atlas', labels=['X','Y'])
if weights is not None :
exportVisualizationImage(weights, i, outDir, prefix, 'weights', 'auto', ['patterns', 'images'])
return None
def pipeline_estimAtlasLearningPatternWeights (imgs, initAtlas=None, initWeights=None, coefCompact=0.0, thStepDiff=0.0, maxIter=99, stop=True, reInit=True, outPrefix='debug', outDir='') :
logger.info('compute an Atlas and perform images binWeights according this estimated Atlas')
assert len(imgs) >= 0
# assert initAtlas is not None or type(maxNbLbs)==int
# initialise
if initWeights is not None and initAtlas is None :
logger.debug('... initialise Atlas from binWeights')
initAtlas = estimateAtlasGraphCut_general(imgs, initWeights, 0.)
exportVisualAtlas (0, outDir, weights=initWeights, prefix=outPrefix)
if initAtlas is None :
maxNbLbs = int( np.sqrt(len(imgs)) )
logger.debug('... initialise Atlas with ')
# TODO: find better way of initialisation
initAtlas = initialiseAtlas_mosaic(imgs[0].shape, maxNbLbs)
exportVisualAtlas (0, outDir, atlas=initAtlas, prefix=outPrefix)
atlas = initAtlas
binWeights = initWeights
if len(np.unique(atlas)) == 1 :
logger.info('ERROR: the atlas does not contain any label... {}'.format(np.unique(atlas)))
exportVisualAtlas (0, outDir, atlas, binWeights, prefix=outPrefix)
for i in range(maxIter):
if len(np.unique(atlas)) == 1 :
logger.info('ERROR: the atlas does not contain any label... {}'.format(np.unique(atlas)))
# update binWeights
logger.debug('... perform pattern Weights')
# binWeights = np.array([ptnWeight.weighsImageByAtlas_overlapMajor(img, atlas) for img in imgs])
binWeights = np.array([ptnWeight.weighsImageByAtlas_overlapPartial(img, atlas) for img in imgs])
# add once for patterns that are not used at all
binWeights = ptnWeight.fillEmptyPatterns(binWeights)
if np.sum(binWeights) == 0 :
logger.info('ERROR: the binWeights is empty... {}'.format(np.unique(atlas)))
# update atlas
logger.debug('... perform Atlas estimation')
# atlasNew = estimateAtlasGraphCut_simple(imgs, binWeights)
if reInit :
atlasNew = estimateAtlasGraphCut_general(imgs, binWeights, coefCompact, atlas)
else :
atlasNew = estimateAtlasGraphCut_general(imgs, binWeights, coefCompact)
stepDiff = simMetric.compareAtlas_adjustedRand(atlas,atlasNew)
logger.info('-> iteration #{} with relative Atlas difference {}'.format(i+1, stepDiff))
atlas = atlasNew
exportVisualAtlas (i+1, outDir, atlas, binWeights, prefix=outPrefix)
# stoping criterion
if stop and stepDiff <= thStepDiff :
logger.info('>> exiting while theatlas diff {} is smaller then {}'.format(stepDiff, thStepDiff))
break
return atlas, binWeights
# TODO: Matching Pursuit
# http://scikit-learn.org/stable/modules/generated/sklearn.linear_model.OrthogonalMatchingPursuit.html
# prijde mi, ze by melo byt velmi snadne jednoduche naprogramovat variantu Matching Pursuit, kde v kazde iteraci nebudu
# maximalizovat absolutni hodnotu skalarniho produktu, ale skalarni produkt. Tim se zbavim zapornych hodnot. Pokud bych
# chtel vylozene pouze koeficienty v intervalu 0,1, tak pro kazdy vzor zjistim, jaka bude aproximacni chyba za tohoto
# omezeni. Ale prijde mi to pro nas pripad zbytecne.
# Other: http://winsty.net/onndl.html
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logger.info('DONE')
\ No newline at end of file
__author__ = 'Jiri Borovec'
import os, sys
import random, glob
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy import ndimage
# from PIL import Image
from skimage import io, draw, transform, filters
sys.path.append(os.path.abspath(os.path.join('..','..'))) # Add path to root
import src.ownUtils.toolDataIO as tD
# DEFAULT_PATH_DATA = '/datagrid/Medical/microscopy/drosophila_segmOvary/'
DEFAULT_PATH_DATA = '/datagrid/temporary/Medical/'
# DEFAULT_PATH_DATA = 'C:\Users\IBM_ADMIN\Desktop'
# DEFAULT_PATH_DATA = '/home/jirka/TEMP/'
DEFAULT_DIR_APD = 'atomicPatternDictionary'
DEFAULT_PATH_APD = os.path.join(DEFAULT_PATH_DATA,DEFAULT_DIR_APD)
DEFAULT_PATH_TEMP = '/datagrid/temporary/Medical'
# DEFAULT_IM_SIZE = (512, 512)
# DEFAULT_IM_SIZE = (256, 256)
DEFAULT_IM_SIZE = (64, 64)
DEFAULT_NB_PTNS = 15
# DEFAULT_NB_SPLS = 1500
DEFAULT_NB_SPLS = 500
DEFAULT_DIR_DICT = 'dictionary'
DEFAULT_IM_POSIX = '.png'
DEFAULT_IM_PATTERN = 'pattern_{:03d}'
DEFAULT_IM_SEGM = 'sample_{:05d}'
import logging
logger = logging.getLogger(__name__)
# TODO:
# We assume several levels of difficulty:
# 3. simple combination with noise in F/B probability
# 4. both 2) and 3)
def deformImageElastic (im, coef=0.5, gridSize=(20, 20)) :
logger.debug('deform image plane by elastic transform with grid {}'.format(gridSize))
# logger.debug(im.shape)
rows, cols = np.meshgrid(np.linspace(0, im.shape[0], gridSize[0]),
np.linspace(0, im.shape[1], gridSize[1]))
src = np.dstack([cols.flat, rows.flat])[0]
# logger.debug(src)
dst = src.copy()
for i in [0,1] :
dst[:,i] += (np.random.random((src.shape[0],1)) -0.5)[:,0] * (im.shape[i]/gridSize[i]*coef)
dst = filters.gaussian_filter(dst, 0.1)
# logger.debug(dst)
tform = transform.PiecewiseAffineTransform()
tform.estimate(src, dst)
img = transform.warp(im, tform, output_shape=im.shape, order=0, cval=im[0,0])
img = np.array(255 * img, dtype=np.int8)
return img
def drawEllipse (ratio=0.1, img=None, clr=255, imSize=DEFAULT_IM_SIZE) :
logger.debug('draw an ellipse to an image with value {}'.format(clr))
if img is None :
img = np.zeros(imSize, dtype=np.uint8)
imSize = img.shape
c, r = [0,0], [0,0]
for i in [0,1] :
size = imSize[i]
c[i] = random.randint( int(1.5*ratio*size), int((1.-1.5*ratio)*size) )
r[i] = random.randint( int(0.25*ratio*size), int(1.*ratio*size) )
r[i] += 0.03 * size
x, y = draw.ellipse(c[0], c[1], r[0], r[1])
img[x, y] = clr
# img = transform.rotate(img, angle=random.randint(0, 180), center=c, order=0, cval=img[0,0])
# img = transform.rotate(img, angle=random.randint(0, 180), center=np.array(imSize)/2, order=0, cval=img[0,0])
# TODO: add rotation
return img
def createCleanFolder (pDir) :
logger.info('create clean folder "{}"'.format(pDir))
if not os.path.exists(pDir) :
logger.debug('... creating new output folder: {}'.format(pDir))
os.mkdir(pDir)
else :
# remove all images
for p in glob.glob(os.path.join(pDir,'*')) :
os.remove(p)
return None
def extractImageLargestElement (im) :
labeled, nbObjects = ndimage.label(im)
areas = [(j, np.sum(labeled==j)) for j in np.unique(labeled)]
areas = sorted(areas, key=lambda x: x[1], reverse=True)
logger.debug('... elements area: {}'.format(areas))
if len(areas) > 1 :
im[:] = 0
im[labeled==areas[1][0]] = 1
return im
def dictionary_generateAtlas (outPath=DEFAULT_PATH_APD, nb=DEFAULT_NB_PTNS, imSize=DEFAULT_IM_SIZE) :
logger.info('generate an Atlas composed from {} patterns and image size {}'.format(nb, imSize))
outDir = os.path.join(outPath,DEFAULT_DIR_DICT)
atlas = np.zeros(imSize, dtype=np.uint8)
for i in range(nb) :
atlas = drawEllipse(img=atlas, clr=i+1)
logger.debug(type(atlas))
atlas = deformImageElastic(atlas)
logger.debug(np.unique(atlas))
exportImage (outDir, atlas, 'atlas')
# in case run in DEBUG show atlas and wait till close
if logger.getEffectiveLevel() == logging.DEBUG :
logger.debug('labels: {}'.format(np.unique(atlas)))
plt.imshow(atlas)
plt.show()
atlasFinal = np.zeros(imSize, dtype=np.uint8)
# export to dictionary
createCleanFolder(outDir)
idx, lImgs = 0, []
logger.debug('... post-processing over generated patterns: {}'.format(np.unique(atlas).tolist()))
for id in np.unique(atlas) :
im = np.zeros(imSize, dtype=np.uint8)
# create pattern
im[atlas==(id+1)] = 1
# remove all smaller unconnected elements
im = extractImageLargestElement(im)
if np.sum(im) == 0 : continue
lImgs.append(im)
exportImage (outDir, im, idx, DEFAULT_IM_PATTERN)
# add them to the final arlas
atlasFinal[im==1] = id
idx += 1
exportImage (outDir, atlasFinal, 'atlas')
return lImgs
def dictionary_GenerateRandomPattern (outPath=DEFAULT_PATH_APD, nb=DEFAULT_NB_PTNS, imSize=DEFAULT_IM_SIZE) :
logger.info('generate a Dictionary composed from {} patterns and image size {}'.format(nb, imSize))
outDir = os.path.join(outPath,DEFAULT_DIR_DICT)
createCleanFolder(outDir)
lImgs = []
for i in range(nb) :
im = drawEllipse(imSize=imSize)
im = deformImageElastic(im)
lImgs.append(im)
exportImage (outDir, im, i, DEFAULT_IM_PATTERN)
return lImgs
def datasetBinary_combinePatterns (imPtns, outDir, nb=DEFAULT_NB_SPLS, ptnRation=0.25) :
logger.info('generate a Binary dataset composed from {} samples and ration pattern occlusion {}'.format(nb, ptnRation))
createCleanFolder(outDir)
dfCoding = pd.DataFrame()
imSpls = []
for i in range(nb) :
# random vector for thresholding
combine = np.random.random(len(imPtns)) < ptnRation
# if there is non above treshold select one random
if not any(combine) :
combine[np.random.randint(0,len(combine))] = True
logger.debug('combination vector is {}'.format(combine.tolist()))
dfCoding = dfCoding.append({'name': DEFAULT_IM_SEGM.format(i),
'combination': ';'.join(str(int(x)) for x in combine)},
ignore_index=True)
im = sum(np.asarray(imPtns)[combine])
# conver sum to union such as all above 0 set as 1
im[im>0.] = 1
imSpls.append(im)
exportImage (outDir, im, i)
dfCoding.set_index('name')
logger.debug(dfCoding)
return imSpls, dfCoding
def addImageBinaryNoise (im, ration=0.1) :
logger.debug('... add random noise to a binary image')
rnd = np.random.random(im.shape)
rnd = np.array(rnd < ration, dtype=np.int16)
imNoise = np.abs(np.asanyarray(im, dtype=np.int16) - rnd)
# plt.subplot(1,3,1), plt.imshow(im)
# plt.subplot(1,3,2), plt.imshow(rnd)
# plt.subplot(1,3,3), plt.imshow(im - rnd)
# plt.show()
return np.array(imNoise, dtype=np.int16)
def exportImage (outDir, im, imName, nTemplate=DEFAULT_IM_SEGM) :
if not type(imName)==str :
imName = nTemplate.format(imName)
pImg = os.path.join(outDir,imName+DEFAULT_IM_POSIX)
logger.debug('... saving pattern image "{}"'.format(pImg))
io.imsave(pImg, im/float(np.max(im))*1.)
# Image.fromarray(im).save(pImg)
return pImg
def datasetBinary_deformImages (imgs, outDir, coef=0.5) :
logger.info('generate a Binary dataset composed from {} samples and deformation coef. {}'.format(len(imgs), coef))
createCleanFolder(outDir)
imgsDef = []
for i, im in enumerate(imgs) :
imgsDef.append( deformImageElastic(im, coef) )
exportImage(outDir, imgsDef[-1], i)
return imgsDef
def dataset_addImageNiose (imgs, outDir, noiseFn, ratio) :
logger.info('generate a Binary dataset composed from {} samples and noise ratio {}'.format(len(imgs), ratio))
createCleanFolder(outDir)
imgsNoise = []
for i, im in enumerate(imgs) :
imgsNoise.append( noiseFn(im, ratio) )
exportImage(outDir, imgsNoise[-1], i)
return imgsNoise
def imageTransformBinary2prob (im, coef=0.1) :
logger.debug('... transform binary image to probability')
imDist = ndimage.distance_transform_edt(im)
imDist -= ndimage.distance_transform_edt(1-im)
imProb = 1. / (1. + np.exp(-coef * imDist))
# plt.subplot(1,3,1), plt.imshow(im)
# plt.subplot(1,3,2), plt.imshow(imDist)
# plt.subplot(1,3,3), plt.imshow(imProb)
# plt.show()
return imProb
def addImageProbNoise (im, ration=0.1) :
logger.debug('... add smooth noise to a probability image')
rnd = 2* (np.random.random(im.shape) -0.5)
rnd[abs(rnd) > ration] = 0
imNoise = np.abs(im - rnd)
# plt.subplot(1,3,1), plt.imshow(im)
# plt.subplot(1,3,2), plt.imshow(rnd)
# plt.subplot(1,3,3), plt.imshow(im - rnd)
# plt.show()
return imNoise
def datasetProb_construct (imgs, outDir, coef=0.5) :
logger.info('generate a Probability dataset composed from {} samples and smoothness coef. {}'.format(len(imgs), coef))
createCleanFolder(outDir)
imgsProb = []
for i, im in enumerate(imgs) :
imgsProb.append( imageTransformBinary2prob(im, coef) )
exportImage(outDir, imgsProb[-1], i)
return imgsProb
def dataset_load (pDir, posix=DEFAULT_IM_POSIX) :
logger.info('load a dataset composed located "{}" with samples of "{}"'.format(pDir, posix))
pNames = glob.glob(os.path.join(pDir,'*'+posix))
pNames = sorted(pNames)
imgs = []
for pIm in pNames :
im = io.imread(pIm)
im = im / np.max(im)
imgs.append(imgs)
return imgs
def dataset_convertToNifti (inDir, outDir, posix=DEFAULT_IM_POSIX) :
logger.info('convert a dataset to Nifti')
pNames = glob.glob(os.path.join(inDir,'*'+posix))
createCleanFolder(outDir)
# pNames = sorted(pNames)
for pIm in pNames :
name = os.path.splitext(os.path.basename(pIm))[0]
pOut = os.path.join(outDir,name)
logger.debug('... converting "{}" -> "{}"'.format(pIm, pOut))
tD.convertImage_img2niftiGray(pIm, pOut)
return None
# def test_Ellipse (size=DEFAULT_IM_SIZE) :
# img = drawEllipse()
# img = deformImageElastic(img)
# plt.imshow(img)
# plt.show()
def generateAll (outPath=DEFAULT_PATH_APD) :
# imDict = dictionary_GenerateRandomPattern()
imDict = dictionary_generateAtlas()
imComb, dfComb = datasetBinary_combinePatterns(imDict, os.path.join(outPath, 'datasetBinary_raw'))
dfComb.to_csv(os.path.join(outPath,'combination.csv'))
imDeform = datasetBinary_deformImages(imComb, os.path.join(outPath, 'datasetBinary_deform'))
dataset_addImageNiose(imComb, os.path.join(outPath, 'datasetBinary_noise'), addImageBinaryNoise, 0.03)
dataset_addImageNiose(imDeform, os.path.join(outPath, 'datasetBinary_defNoise'), addImageBinaryNoise, 0.03)
imCombProb = datasetProb_construct(imComb, os.path.join(outPath, 'datasetProb_raw'))
imDefProb = datasetProb_construct(imDeform, os.path.join(outPath, 'datasetProb_deform'))
dataset_addImageNiose(imCombProb, os.path.join(outPath, 'datasetProb_noise'), addImageProbNoise, 0.2)
dataset_addImageNiose(imDefProb, os.path.join(outPath, 'datasetProb_defNoise'), addImageProbNoise, 0.2)
def convertDataset_Nifti (pDatasets=DEFAULT_PATH_APD) :
dataset_convertToNifti( os.path.join(pDatasets, 'datasetBinary_raw'), os.path.join(pDatasets, 'datasetBinary_raw_nifti') )
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logger.info('running...')
# test_Ellipse()
generateAll()
# convertDataset_Nifti()
logger.info('DONE')
__author__ = 'Jiri Borovec'
import os, sys
import numpy as np
import logging
logger = logging.getLogger(__name__)
def convertWeightsBinary2indexes (weights) :
logger.debug('convert binary weights {} to list of indexes with True'.format(weights.shape))
# if type(weights)==np.ndarray : weights = weights.tolist()
weighIdx = [None] * weights.shape[0]
for i in range(weights.shape[0]) :
# find postions equal 1
# vec = [j for j in range(weights.shape[1]) if weights[i,j]==1]
vec = np.where(weights[i, :] == 1)[0]
weighIdx[i] = vec +1
# idxs = np.where(weights == 1)
# for i in range(weights.shape[0]) :
# weighIdx[i] = idxs[1][idxs[0]==i] +1
return weighIdx
def weighsImageByAtlas_overlapMajor (img, atlas) :
# logger.debug('weights input image according given atlas')
weights = weighsImageByAtlas_overlapTreshold(img, atlas, 0.5)
return weights
def weighsImageByAtlas_overlapPartial (img, atlas) :
# logger.debug('weights input image according given atlas')
lbs = np.unique(atlas).tolist()
weights = weighsImageByAtlas_overlapTreshold(img, atlas, (1. / len(lbs)))
return weights
def weighsImageByAtlas_overlapTreshold (img, atlas, thr=0.5) :
# logger.debug('weights input image according given atlas')