Skip to content

Commit

Permalink
added Segmentation and Classification codes
Browse files Browse the repository at this point in the history
  • Loading branch information
fitushar committed Apr 26, 2021
1 parent 91b4a98 commit 93a45e2
Show file tree
Hide file tree
Showing 3 changed files with 111 additions and 0 deletions.
Empty file added Segmentation/__init__.py
Empty file.
65 changes: 65 additions & 0 deletions Segmentation/config.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
############################ input configuration sections
[ct]
path_to_search = /Local/Kidney_batch10_preprocessing_forsegmentation/img/tumor
filename_contains =CT_
spatial_window_size = (144, 144, 144)
interp_order = 1
axcodes=(A, R, S)

[label]
path_to_search = /Local/Segmentation/vignesh_correctedData/Preprocessed/lbl
filename_contains = label_
spatial_window_size = (144, 144, 144)
interp_order = 0
axcodes=(A, R, S)

[masking]
path_to_search = /Local/Segmentation/vignesh_correctedData/Preprocessed/lbl
filename_contains = label_
spatial_window_size = (144, 144, 144)
interp_order = 0
axcodes=(A, R, S)



############################## system configuration sections
[SYSTEM]
cuda_devices = 0
num_threads = 1
num_gpus = 1
model_dir = /Local/Segmentation/Final_DenseVnet/Diseased_weights
queue_length = 36

[NETWORK]
name = dense_vnet
batch_size = 3

# volume level preprocessing
volume_padding_size = 0
window_sampling = resize

[TRAINING]
sample_per_volume = 4
lr = 0.001
loss_type = dense_vnet_abdominal_ct.dice_hinge.dice
starting_iter =-1
save_every_n =60
max_iter =16000
rotation_angle= -10.0,10.0
scaling_percentage=-20.0,20.0
random_flipping_axes=1,2

[INFERENCE]
border = (0, 0, 0)
inference_iter = 31800
output_interp_order = 0
spatial_window_size = (144, 144, 144)
save_seg_dir = /Local/Kidney_batch10_preprocessing_forsegmentation/prediction/Densevnet_42000/tumor
############################ custom configuration sections
[SEGMENTATION]
image = ct
label = label
sampler = label
label_normalisation = False
output_prob = False
num_classes = 31
46 changes: 46 additions & 0 deletions Segmentation/dice_hinge.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division

import numpy as np
import tensorflow as tf

from niftynet.layer.base_layer import Layer


def dice(prediction, ground_truth, weight_map=None):
ground_truth = tf.to_int64(ground_truth)
prediction = tf.cast(prediction, tf.float32)
ids = tf.range(tf.to_int64(tf.shape(ground_truth)[0]), dtype=tf.int64)
ids = tf.stack([ids, ground_truth], axis=1)
one_hot = tf.SparseTensor(
indices=ids,
values=tf.ones_like(ground_truth, dtype=tf.float32),
dense_shape=tf.to_int64(tf.shape(prediction)))
# if weight_map is not None:
# weight_map_nclasses = tf.reshape(
# tf.tile(weight_map, [n_classes]), prediction.get_shape())
# dice_numerator = 2.0 * tf.sparse_reduce_sum(
# weight_map_nclasses * one_hot * prediction, reduction_axes=[0])
# else:
dice_numerator = 2.0 * tf.sparse_reduce_sum(
one_hot * prediction, reduction_axes=[0])
dice_denominator = \
tf.reduce_sum(tf.square(prediction), reduction_indices=[0]) + \
tf.sparse_reduce_sum(one_hot, reduction_axes=[0])
epsilon_denominator = 0.00001

dice_score = dice_numerator / (dice_denominator + epsilon_denominator)
# dice_score.set_shape([n_classes])
# minimising (1 - dice_coefficients)
[tf.summary.scalar('Dice{}'.format(i),d) for i,d in enumerate(tf.unstack(dice_score,0))]
dice_score=tf.Print(dice_score,[dice_score],summarize=10,message='dice')
h1=tf.square(tf.minimum(0.1,dice_score)*10-1)
h2=tf.square(tf.minimum(0.01,dice_score)*100-1)


return 1.0 - tf.reduce_mean(dice_score) + \
tf.reduce_mean(h1)*10 + \
tf.reduce_mean(h2)*10



0 comments on commit 93a45e2

Please sign in to comment.