Commit b26ec07d authored by Jiri Borovec's avatar Jiri Borovec

examples

parent f4c973f5
......@@ -4,6 +4,7 @@ import os, sys
import numpy as np
import generateDataset
import patternWeights as ptnWeight
import generateDataset as genAPD
import similarityMetric as simMetric
import matplotlib.pyplot as plt
......@@ -12,12 +13,14 @@ DEFAULT_UNARY_BACKGROUND = 1
import logging
logger = logging.getLogger(__name__)
def initialiseAtlas_random (imSize, maxLb) :
logger.debug('initialise atlas {} as random labeling'.format(imSize))
nbLbs = maxLb + 1
im = np.random.randint(0, nbLbs, imSize)
return np.array(im, dtype=np.int)
def initialiseAtlas_mosaic (imSize, maxLb) :
logger.debug('initialise atlas {} as grid labeling'.format(imSize))
nbLbs = maxLb + 1
......@@ -35,6 +38,7 @@ def initialiseAtlas_mosaic (imSize, maxLb) :
im = mosaic[:imSize[0], :imSize[1]]
return np.array(im, dtype=np.int)
def initialiseAtlas_deformOriginal (atlas) :
logger.debug('initialise atlas by deforming original one')
res = generateDataset.deformImageElastic(atlas)
......@@ -49,6 +53,7 @@ def initialiseAtlas_deformOriginal (atlas) :
# zvoleny nasobek a spocitam "zbytky". Ze "zbytku" opet vezmu ten s nejvetsi energii atp. Je mozne odecitat algebraicky,
# nebo logicky, v kazdem pripade bych ponechaval jen kladne hodnoty.
def initialiseWeights_random (nbImgs, nbLbs, ratioSel=0.2) :
logger.debug('initialise weights for {} images and {} labels as random selection'.format(nbImgs, nbLbs))
prob = np.random.random( (nbImgs, nbLbs) )
......@@ -56,6 +61,7 @@ def initialiseWeights_random (nbImgs, nbLbs, ratioSel=0.2) :
weights[prob <= ratioSel] = 1
return weights
def computeRelativePenalyFromImagesWeights (imgs, weights) :
logger.info('compute unary cost from images and related weights')
# weightsIdx = ptnWeight.convertWeightsBinary2indexes(weights)
......@@ -78,6 +84,7 @@ def computeRelativePenalyFromImagesWeights (imgs, weights) :
pottSumNorm = pottSum / float(len(imgs))
return pottSumNorm
def computePositiveCostFromImagesWeights (imgs, weights) :
# not using any more...
logger.info('compute unary cost from images and related weights')
......@@ -103,6 +110,7 @@ def computePositiveCostFromImagesWeights (imgs, weights) :
# graph = 1. / (graphSum +1)
return pottSum
def getEdgesImagePlane (imSize) :
idxs = np.arange( np.product(imSize) )
idxs = idxs.reshape(imSize)
......@@ -113,6 +121,7 @@ def getEdgesImagePlane (imSize) :
logger.debug('edges for image plane are shape {}'.format(edges.shape))
return edges
def estimateAtlasGraphCut_simple (imgs, encodding, pCoef=1.) :
logger.info('estimate atlas via GraphCut from Potts model')
# source: https://github.com/yujiali/pygco
......@@ -135,6 +144,7 @@ def estimateAtlasGraphCut_simple (imgs, encodding, pCoef=1.) :
return labels
def estimateAtlasGraphCut_general (imgs, encodding, pCoef=1., initLbs=None) :
logger.info('estimate atlas via GraphCut from Potts model')
# source: https://github.com/yujiali/pygco
......@@ -148,7 +158,7 @@ def estimateAtlasGraphCut_general (imgs, encodding, pCoef=1., initLbs=None) :
edges = getEdgesImagePlane( unaryCost.shape[:2] )
logger.debug('edges for image plane are shape {}'.format(edges.shape))
edge_weights = np.ones( edges.shape[0] )
edge_weights = np.zeros( edges.shape[0] )
logger.debug('edges weights are shape {}'.format(edge_weights.shape))
# original and the right way...
......@@ -170,6 +180,7 @@ def estimateAtlasGraphCut_general (imgs, encodding, pCoef=1., initLbs=None) :
return labels
def exportVisualizationImage (img, i, outDir, prefix='debug', name='', ration=None, labels=['','']) :
fig = plt.figure()
plt.imshow(img, interpolation='none', aspect=ration)
......@@ -190,6 +201,7 @@ def exportVisualAtlas (i, outDir, atlas=None, weights=None, prefix='debug') :
exportVisualizationImage(weights, i, outDir, prefix, 'weights', 'auto', ['patterns', 'images'])
return None
def pipeline_estimAtlasLearningPatternWeights (imgs, initAtlas=None, initWeights=None, coefCompact=0.0, thStepDiff=0.0, maxIter=99, stop=True, reInit=True, outPrefix='debug', outDir='') :
logger.info('compute an Atlas and perform images binWeights according this estimated Atlas')
assert len(imgs) >= 0
......
......@@ -10,6 +10,8 @@ from scipy import ndimage
from skimage import io, draw, transform, filters
sys.path.append(os.path.abspath(os.path.join('..','..'))) # Add path to root
import src.ownUtils.toolDataIO as tD
import logging
logger = logging.getLogger(__name__)
# DEFAULT_PATH_DATA = '/datagrid/Medical/microscopy/drosophila_segmOvary/'
DEFAULT_PATH_DATA = '/datagrid/temporary/Medical/'
......@@ -28,15 +30,16 @@ DEFAULT_DIR_DICT = 'dictionary'
DEFAULT_IM_POSIX = '.png'
DEFAULT_IM_PATTERN = 'pattern_{:03d}'
DEFAULT_IM_SEGM = 'sample_{:05d}'
DEFAULT_DATASET = 'datasetBinary_raw'
DEFAULT_PATH_DATASET = os.path.join(DEFAULT_PATH_DATA,DEFAULT_DIR_APD)
import logging
logger = logging.getLogger(__name__)
# TODO:
# We assume several levels of difficulty:
# 3. simple combination with noise in F/B probability
# 4. both 2) and 3)
def deformImageElastic (im, coef=0.5, gridSize=(20, 20)) :
logger.debug('deform image plane by elastic transform with grid {}'.format(gridSize))
# logger.debug(im.shape)
......@@ -55,6 +58,7 @@ def deformImageElastic (im, coef=0.5, gridSize=(20, 20)) :
img = np.array(255 * img, dtype=np.int8)
return img
def drawEllipse (ratio=0.1, img=None, clr=255, imSize=DEFAULT_IM_SIZE) :
logger.debug('draw an ellipse to an image with value {}'.format(clr))
if img is None :
......@@ -73,6 +77,7 @@ def drawEllipse (ratio=0.1, img=None, clr=255, imSize=DEFAULT_IM_SIZE) :
# TODO: add rotation
return img
def createCleanFolder (pDir) :
logger.info('create clean folder "{}"'.format(pDir))
if not os.path.exists(pDir) :
......@@ -84,6 +89,7 @@ def createCleanFolder (pDir) :
os.remove(p)
return None
def extractImageLargestElement (im) :
labeled, nbObjects = ndimage.label(im)
areas = [(j, np.sum(labeled==j)) for j in np.unique(labeled)]
......@@ -94,6 +100,7 @@ def extractImageLargestElement (im) :
im[labeled==areas[1][0]] = 1
return im
def dictionary_generateAtlas (outPath=DEFAULT_PATH_APD, nb=DEFAULT_NB_PTNS, imSize=DEFAULT_IM_SIZE) :
logger.info('generate an Atlas composed from {} patterns and image size {}'.format(nb, imSize))
outDir = os.path.join(outPath,DEFAULT_DIR_DICT)
......@@ -129,6 +136,7 @@ def dictionary_generateAtlas (outPath=DEFAULT_PATH_APD, nb=DEFAULT_NB_PTNS, imSi
exportImage (outDir, atlasFinal, 'atlas')
return lImgs
def dictionary_GenerateRandomPattern (outPath=DEFAULT_PATH_APD, nb=DEFAULT_NB_PTNS, imSize=DEFAULT_IM_SIZE) :
logger.info('generate a Dictionary composed from {} patterns and image size {}'.format(nb, imSize))
outDir = os.path.join(outPath,DEFAULT_DIR_DICT)
......@@ -141,6 +149,7 @@ def dictionary_GenerateRandomPattern (outPath=DEFAULT_PATH_APD, nb=DEFAULT_NB_PT
exportImage (outDir, im, i, DEFAULT_IM_PATTERN)
return lImgs
def datasetBinary_combinePatterns (imPtns, outDir, nb=DEFAULT_NB_SPLS, ptnRation=0.25) :
logger.info('generate a Binary dataset composed from {} samples and ration pattern occlusion {}'.format(nb, ptnRation))
createCleanFolder(outDir)
......@@ -165,6 +174,7 @@ def datasetBinary_combinePatterns (imPtns, outDir, nb=DEFAULT_NB_SPLS, ptnRation
logger.debug(dfCoding)
return imSpls, dfCoding
def addImageBinaryNoise (im, ration=0.1) :
logger.debug('... add random noise to a binary image')
rnd = np.random.random(im.shape)
......@@ -176,6 +186,7 @@ def addImageBinaryNoise (im, ration=0.1) :
# plt.show()
return np.array(imNoise, dtype=np.int16)
def exportImage (outDir, im, imName, nTemplate=DEFAULT_IM_SEGM) :
if not type(imName)==str :
imName = nTemplate.format(imName)
......@@ -185,6 +196,7 @@ def exportImage (outDir, im, imName, nTemplate=DEFAULT_IM_SEGM) :
# Image.fromarray(im).save(pImg)
return pImg
def datasetBinary_deformImages (imgs, outDir, coef=0.5) :
logger.info('generate a Binary dataset composed from {} samples and deformation coef. {}'.format(len(imgs), coef))
createCleanFolder(outDir)
......@@ -194,6 +206,7 @@ def datasetBinary_deformImages (imgs, outDir, coef=0.5) :
exportImage(outDir, imgsDef[-1], i)
return imgsDef
def dataset_addImageNiose (imgs, outDir, noiseFn, ratio) :
logger.info('generate a Binary dataset composed from {} samples and noise ratio {}'.format(len(imgs), ratio))
createCleanFolder(outDir)
......@@ -203,6 +216,7 @@ def dataset_addImageNiose (imgs, outDir, noiseFn, ratio) :
exportImage(outDir, imgsNoise[-1], i)
return imgsNoise
def imageTransformBinary2prob (im, coef=0.1) :
logger.debug('... transform binary image to probability')
imDist = ndimage.distance_transform_edt(im)
......@@ -214,6 +228,7 @@ def imageTransformBinary2prob (im, coef=0.1) :
# plt.show()
return imProb
def addImageProbNoise (im, ration=0.1) :
logger.debug('... add smooth noise to a probability image')
rnd = 2* (np.random.random(im.shape) -0.5)
......@@ -225,6 +240,7 @@ def addImageProbNoise (im, ration=0.1) :
# plt.show()
return imNoise
def datasetProb_construct (imgs, outDir, coef=0.5) :
logger.info('generate a Probability dataset composed from {} samples and smoothness coef. {}'.format(len(imgs), coef))
createCleanFolder(outDir)
......@@ -234,16 +250,40 @@ def datasetProb_construct (imgs, outDir, coef=0.5) :
exportImage(outDir, imgsProb[-1], i)
return imgsProb
def dataset_load (pDir, posix=DEFAULT_IM_POSIX) :
logger.info('load a dataset composed located "{}" with samples of "{}"'.format(pDir, posix))
pNames = glob.glob(os.path.join(pDir,'*'+posix))
pNames = sorted(pNames)
imgs = []
for pIm in pNames :
def dataset_loadImages (name=DEFAULT_DATASET, pathBase=DEFAULT_PATH_DATASET, imPattern='', imPosix=DEFAULT_IM_POSIX, nbSamples=None) :
pFolder = os.path.join(pathBase,name)
logger.info('loading folder "{}" -> {}'.format(pFolder, os.path.exists(pFolder)))
pImages = glob.glob(os.path.join(pFolder,imPattern+'*'+imPosix))
logger.debug( 'number samples in dataset "{}" is: {}'.format(name, len(pImages)) )
pImages = sorted(pImages)[:nbSamples]
lImgs = []
for i, pIm in enumerate(pImages) :
im = io.imread(pIm)
im = im / np.max(im)
imgs.append(imgs)
return imgs
lImgs.append( im / np.max(im) )
return lImgs
def dataset_loadEncoding (pathBase=DEFAULT_PATH_DATASET, fileName='combination.csv') :
pFile = os.path.join(pathBase,fileName)
df = pd.DataFrame().from_csv(pFile)
coding = df['combination'].values.tolist()
logger.debug('encoding of length: {}'.format(len(coding)))
encoding = []
for c in coding :
vec = [int(x) for x in c.split(';')]
encoding.append(vec)
return np.array(encoding)
def dataset_createAtlas (name='dictionary', pathBase=DEFAULT_PATH_DATASET, imPattern='pattern_') :
imgs = dataset_loadImages (name, pathBase, imPattern)
assert len(imgs) > 0
atlas = np.zeros_like( imgs[0] )
for i, im in enumerate(imgs) :
atlas[ im == 1 ] = i+1
return np.array(atlas, dtype=np.uint8)
def dataset_convertToNifti (inDir, outDir, posix=DEFAULT_IM_POSIX) :
logger.info('convert a dataset to Nifti')
......@@ -264,6 +304,7 @@ def dataset_convertToNifti (inDir, outDir, posix=DEFAULT_IM_POSIX) :
# plt.imshow(img)
# plt.show()
def generateAll (outPath=DEFAULT_PATH_APD) :
# imDict = dictionary_GenerateRandomPattern()
imDict = dictionary_generateAtlas()
......@@ -279,9 +320,11 @@ def generateAll (outPath=DEFAULT_PATH_APD) :
dataset_addImageNiose(imCombProb, os.path.join(outPath, 'datasetProb_noise'), addImageProbNoise, 0.2)
dataset_addImageNiose(imDefProb, os.path.join(outPath, 'datasetProb_defNoise'), addImageProbNoise, 0.2)
def convertDataset_Nifti (pDatasets=DEFAULT_PATH_APD) :
dataset_convertToNifti( os.path.join(pDatasets, 'datasetBinary_raw'), os.path.join(pDatasets, 'datasetBinary_raw_nifti') )
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logger.info('running...')
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment