import argparse from os import path import SimpleITK as sitk import tensorflow as tf from tensorflow.keras.models import load_model import numpy as np from sfransen.utils_quintin import * from sfransen.DWI_exp import preprocess from sfransen.DWI_exp.helpers import * from sfransen.DWI_exp.callbacks import dice_coef from sfransen.FROC.blob_preprocess import * from sfransen.FROC.cal_froc_from_np import * from sfransen.load_images import load_images_parrallel from sfransen.Saliency.base import * from sfransen.Saliency.integrated_gradients import * parser = argparse.ArgumentParser( description='Calculate the froc metrics and store in froc_metrics.yml') parser.add_argument('-experiment', help='Title of experiment') parser.add_argument('--series', '-s', metavar='[series_name]', required=True, nargs='+', help='List of series to include') args = parser.parse_args() ######## CUDA ################ os.environ["CUDA_VISIBLE_DEVICES"] = "2" ######## constants ############# SERIES = args.series series_ = '_'.join(args.series) EXPERIMENT = args.experiment MODEL_PATH = f'./../train_output/{EXPERIMENT}_{series_}/models/{EXPERIMENT}_{series_}.h5' DATA_DIR = "./../data/Nijmegen paths/" TARGET_SPACING = (0.5, 0.5, 3) INPUT_SHAPE = (192, 192, 24, len(SERIES)) IMAGE_SHAPE = INPUT_SHAPE[:3] TEST_INDEX_image = [371,12] N_CPUS = 12 ########## load images in parrallel ############## print_(f"> Loading images into RAM...") # read paths from txt image_paths = {} for s in SERIES: with open(path.join(DATA_DIR, f"{s}.txt"), 'r') as f: image_paths[s] = [l.strip() for l in f.readlines()] with open(path.join(DATA_DIR, f"seg.txt"), 'r') as f: seg_paths = [l.strip() for l in f.readlines()] num_images = len(seg_paths) # create pool of workers pool = multiprocessing.Pool(processes=N_CPUS) partial_images = partial(load_images_parrallel, seq = 'images', target_shape=IMAGE_SHAPE, target_space = TARGET_SPACING) partial_seg = partial(load_images_parrallel, seq = 'seg', target_shape=IMAGE_SHAPE, target_space = TARGET_SPACING) #load images images = [] for s in SERIES: image_paths_seq = image_paths[s] image_paths_index = np.asarray(image_paths_seq)[TEST_INDEX_image] data_list = pool.map(partial_images,image_paths_index) data = np.stack(data_list, axis=0) images.append(data) images_list = np.transpose(images, (1, 2, 3, 4, 0)) #load segmentations seg_paths_index = np.asarray(seg_paths)[TEST_INDEX_image] data_list = pool.map(partial_seg,seg_paths_index) segmentations = np.stack(data_list, axis=0) ########### load module ################## print(' >>>>>>> LOAD MODEL <<<<<<<<<') dependencies = { 'dice_coef': dice_coef } reconstructed_model = load_model(MODEL_PATH, custom_objects=dependencies) # reconstructed_model.layers[-1].activation = tf.keras.activations.linear predictions_blur = reconstructed_model.predict(images_list, batch_size=1) ############# preprocess ################# # preprocess predictions by removing the blur and making individual blobs print('>>>>>>>> START preprocess') def move_dims(arr): # UMCG numpy dimensions convention: dims = (batch, width, heigth, depth) # Joeran numpy dimensions convention: dims = (batch, depth, heigth, width) arr = np.moveaxis(arr, 3, 1) arr = np.moveaxis(arr, 3, 2) return arr # Joeran has his numpy arrays ordered differently. print("images_list:",np.shape(images_list)) print("predictions_blur:",np.shape(predictions_blur)) print("segmentations:",np.shape(segmentations)) predictions_blur = move_dims(np.squeeze(predictions_blur)) segmentations = move_dims(np.squeeze(segmentations)) # images_list = move_dims(np.squeeze(images_list)) predictions = [preprocess_softmax(pred, threshold="dynamic")[0] for pred in predictions_blur] print("images_list:",np.shape(images_list)) print("predictions_blur:",np.shape(predictions_blur)) print("segmentations:",np.shape(segmentations)) print("predictions:",np.shape(predictions)) # Remove outer edges zeros = np.zeros(np.shape(np.squeeze(predictions))) test = np.squeeze(predictions)[:,2:-2,2:190,2:190] zeros[:,2:-2,2:190,2:190] = test predictions = zeros ############## save image as example ################# print("images_list size ",np.shape(images_list[0,:,:,:,0])) img_s = sitk.GetImageFromArray(np.transpose(images_list[0,:,:,:,0].squeeze())) sitk.WriteImage(img_s, f"./../train_output/{EXPERIMENT}_{series_}/t2_002.nii.gz") img_s = sitk.GetImageFromArray(predictions_blur[0].squeeze()) sitk.WriteImage(img_s, f"./../train_output/{EXPERIMENT}_{series_}/predictions_blur_002.nii.gz") img_s = sitk.GetImageFromArray(predictions[0].squeeze()) sitk.WriteImage(img_s, f"./../train_output/{EXPERIMENT}_{series_}/predictions_002.nii.gz") img_s = sitk.GetImageFromArray(segmentations[0].squeeze()) sitk.WriteImage(img_s, f"./../train_output/{EXPERIMENT}_{series_}/segmentations_002.nii.gz")