2022-03-21 10:14:00 +01:00
|
|
|
import SimpleITK as sitk
|
|
|
|
import tensorflow as tf
|
|
|
|
from tensorflow.keras.models import load_model
|
|
|
|
from focal_loss import BinaryFocalLoss
|
|
|
|
import numpy as np
|
|
|
|
import multiprocessing
|
|
|
|
from functools import partial
|
2022-03-21 12:25:15 +01:00
|
|
|
import os
|
2022-03-21 10:14:00 +01:00
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
from sfransen.utils_quintin import *
|
|
|
|
from sfransen.DWI_exp.helpers import *
|
|
|
|
from sfransen.DWI_exp.preprocessing_function import preprocess
|
|
|
|
from sfransen.DWI_exp.callbacks import dice_coef
|
|
|
|
from sfransen.FROC.blob_preprocess import *
|
|
|
|
from sfransen.FROC.cal_froc_from_np import *
|
|
|
|
from sfransen.load_images import load_images_parrallel
|
2022-03-21 10:14:00 +01:00
|
|
|
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(
|
2022-03-21 12:25:15 +01:00
|
|
|
description='Calculate the froc metrics and store in froc_metrics.yml')
|
|
|
|
parser.add_argument('-experiment',
|
|
|
|
help='Title of experiment')
|
2022-03-21 10:14:00 +01:00
|
|
|
parser.add_argument('--series', '-s',
|
|
|
|
metavar='[series_name]', required=True, nargs='+',
|
2022-03-21 12:25:15 +01:00
|
|
|
help='List of series to include')
|
2022-03-21 10:14:00 +01:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
######## CUDA ################
|
|
|
|
os.environ["CUDA_VISIBLE_DEVICES"] = "2"
|
|
|
|
|
|
|
|
######## constants #############
|
2022-03-21 10:14:00 +01:00
|
|
|
SERIES = args.series
|
|
|
|
series_ = '_'.join(args.series)
|
2022-03-21 12:25:15 +01:00
|
|
|
EXPERIMENT = args.experiment
|
|
|
|
|
|
|
|
MODEL_PATH = f'./../train_output/{EXPERIMENT}_{series_}/models/{EXPERIMENT}_{series_}.h5'
|
|
|
|
YAML_DIR = f'./../train_output/{EXPERIMENT}_{series_}'
|
2022-03-23 17:00:22 +01:00
|
|
|
IMAGE_DIR = f'./../train_output/{EXPERIMENT}_{series_}'
|
2022-03-21 12:25:15 +01:00
|
|
|
|
2022-03-21 10:14:00 +01:00
|
|
|
DATA_DIR = "./../data/Nijmegen paths/"
|
|
|
|
TARGET_SPACING = (0.5, 0.5, 3)
|
|
|
|
INPUT_SHAPE = (192, 192, 24, len(SERIES))
|
|
|
|
IMAGE_SHAPE = INPUT_SHAPE[:3]
|
|
|
|
|
|
|
|
DATA_SPLIT_INDEX = read_yaml_to_dict('./../data/Nijmegen paths/train_val_test_idxs.yml')
|
2022-03-23 17:00:22 +01:00
|
|
|
TEST_INDEX = DATA_SPLIT_INDEX['val_set0']
|
2022-03-21 10:14:00 +01:00
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
N_CPUS = 12
|
2022-03-21 10:14:00 +01:00
|
|
|
|
2022-03-23 17:00:22 +01:00
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
########## load images in parrallel ##############
|
2022-03-21 10:14:00 +01:00
|
|
|
print_(f"> Loading images into RAM...")
|
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
# read paths from txt
|
|
|
|
image_paths = {}
|
2022-03-21 10:14:00 +01:00
|
|
|
for s in SERIES:
|
|
|
|
with open(path.join(DATA_DIR, f"{s}.txt"), 'r') as f:
|
|
|
|
image_paths[s] = [l.strip() for l in f.readlines()]
|
|
|
|
with open(path.join(DATA_DIR, f"seg.txt"), 'r') as f:
|
|
|
|
seg_paths = [l.strip() for l in f.readlines()]
|
|
|
|
num_images = len(seg_paths)
|
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
# create pool of workers
|
2022-03-21 10:14:00 +01:00
|
|
|
pool = multiprocessing.Pool(processes=N_CPUS)
|
2022-03-21 12:25:15 +01:00
|
|
|
partial_images = partial(load_images_parrallel,
|
2022-03-21 10:14:00 +01:00
|
|
|
seq = 'images',
|
|
|
|
target_shape=IMAGE_SHAPE,
|
|
|
|
target_space = TARGET_SPACING)
|
2022-03-21 12:25:15 +01:00
|
|
|
partial_seg = partial(load_images_parrallel,
|
|
|
|
seq = 'seg',
|
|
|
|
target_shape=IMAGE_SHAPE,
|
|
|
|
target_space = TARGET_SPACING)
|
2022-03-21 10:14:00 +01:00
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
#load images
|
|
|
|
images = []
|
2022-03-21 10:14:00 +01:00
|
|
|
for s in SERIES:
|
|
|
|
image_paths_seq = image_paths[s]
|
|
|
|
image_paths_index = np.asarray(image_paths_seq)[TEST_INDEX]
|
2022-03-21 12:25:15 +01:00
|
|
|
data_list = pool.map(partial_images,image_paths_index)
|
2022-03-21 10:14:00 +01:00
|
|
|
data = np.stack(data_list, axis=0)
|
2022-03-21 12:25:15 +01:00
|
|
|
images.append(data)
|
2022-03-21 10:14:00 +01:00
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
images_list = np.transpose(images, (1, 2, 3, 4, 0))
|
|
|
|
|
|
|
|
#load segmentations
|
2022-03-21 10:14:00 +01:00
|
|
|
seg_paths_index = np.asarray(seg_paths)[TEST_INDEX]
|
2022-03-21 12:25:15 +01:00
|
|
|
data_list = pool.map(partial_seg,seg_paths_index)
|
2022-03-21 10:14:00 +01:00
|
|
|
segmentations = np.stack(data_list, axis=0)
|
|
|
|
|
|
|
|
########### load module ##################
|
|
|
|
print(' >>>>>>> LOAD MODEL <<<<<<<<<')
|
|
|
|
|
|
|
|
dependencies = {
|
|
|
|
'dice_coef': dice_coef
|
|
|
|
}
|
|
|
|
reconstructed_model = load_model(MODEL_PATH, custom_objects=dependencies)
|
|
|
|
# reconstructed_model.summary(line_length=120)
|
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
# make predictions on all TEST_INDEX
|
2022-03-21 10:14:00 +01:00
|
|
|
print(' >>>>>>> START prediction <<<<<<<<<')
|
|
|
|
predictions_blur = reconstructed_model.predict(images_list, batch_size=1)
|
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
############# preprocess #################
|
2022-03-21 10:14:00 +01:00
|
|
|
# preprocess predictions by removing the blur and making individual blobs
|
|
|
|
print('>>>>>>>> START preprocess')
|
|
|
|
|
|
|
|
def move_dims(arr):
|
|
|
|
# UMCG numpy dimensions convention: dims = (batch, width, heigth, depth)
|
|
|
|
# Joeran numpy dimensions convention: dims = (batch, depth, heigth, width)
|
2022-03-21 12:25:15 +01:00
|
|
|
arr = np.moveaxis(arr, 3, 1)
|
2022-03-21 10:14:00 +01:00
|
|
|
arr = np.moveaxis(arr, 3, 2)
|
|
|
|
return arr
|
|
|
|
|
|
|
|
# Joeran has his numpy arrays ordered differently.
|
|
|
|
predictions_blur = move_dims(np.squeeze(predictions_blur))
|
|
|
|
segmentations = move_dims(np.squeeze(segmentations))
|
|
|
|
predictions = [preprocess_softmax(pred, threshold="dynamic")[0] for pred in predictions_blur]
|
|
|
|
|
|
|
|
# Remove outer edges
|
|
|
|
zeros = np.zeros(np.shape(predictions))
|
2022-03-23 17:00:22 +01:00
|
|
|
test = np.squeeze(predictions)[:,2:-2,2:190,2:190]
|
|
|
|
zeros[:,2:-2,2:190,2:190] = test
|
2022-03-21 10:14:00 +01:00
|
|
|
predictions = zeros
|
|
|
|
|
|
|
|
# perform Froc
|
|
|
|
metrics = evaluate(y_true=segmentations, y_pred=predictions)
|
|
|
|
dump_dict_to_yaml(metrics, YAML_DIR, "froc_metrics", verbose=True)
|
|
|
|
|
|
|
|
|
2022-03-21 12:25:15 +01:00
|
|
|
############## save image as example #################
|
|
|
|
# save image nmr 3
|
2022-03-21 10:14:00 +01:00
|
|
|
img_s = sitk.GetImageFromArray(predictions_blur[3].squeeze())
|
|
|
|
sitk.WriteImage(img_s, f"{IMAGE_DIR}/predictions_blur_001.nii.gz")
|
|
|
|
|
|
|
|
img_s = sitk.GetImageFromArray(predictions[3].squeeze())
|
|
|
|
sitk.WriteImage(img_s, f"{IMAGE_DIR}/predictions_001.nii.gz")
|
|
|
|
|
|
|
|
img_s = sitk.GetImageFromArray(segmentations[3].squeeze())
|
2022-03-21 12:25:15 +01:00
|
|
|
sitk.WriteImage(img_s, f"{IMAGE_DIR}/segmentations_001.nii.gz")
|