from inspect import _ParameterKind import SimpleITK as sitk import tensorflow as tf from tensorflow.keras.models import load_model from focal_loss import BinaryFocalLoss import numpy as np import multiprocessing from functools import partial import os from os import path from tqdm import tqdm import argparse from sfransen.utils_quintin import * from sfransen.DWI_exp.helpers import * from sfransen.DWI_exp.preprocessing_function import preprocess from sfransen.DWI_exp.callbacks import dice_coef #from sfransen.FROC.blob_preprocess import * from sfransen.FROC.cal_froc_from_np import * from sfransen.load_images import load_images_parrallel from sfransen.DWI_exp.losses import weighted_binary_cross_entropy from umcglib.froc import * from umcglib.binarize import dynamic_threshold parser = argparse.ArgumentParser( description='Calculate the froc metrics and store in froc_metrics.yml') parser.add_argument('-experiment', help='Title of experiment') parser.add_argument('--series', '-s', metavar='[series_name]', required=True, nargs='+', help='List of series to include') parser.add_argument('-fold', default='', help='List of series to include') args = parser.parse_args() # if __name__ = '__main__': # bovenstaande nodig om fork probleem op te lossen (windows cs linux) ######## CUDA ################ os.environ["CUDA_VISIBLE_DEVICES"] = "2" ######## constants ############# SERIES = args.series series_ = '_'.join(args.series) EXPERIMENT = args.experiment # fold = args.fold predictions_added = [] segmentations_added = [] for fold in range(0,5): MODEL_PATH = f'./../train_output/{EXPERIMENT}_{series_}_{fold}/models/{EXPERIMENT}_{series_}_{fold}.h5' YAML_DIR = f'./../train_output/{EXPERIMENT}_{series_}_{fold}' IMAGE_DIR = f'./../train_output/{EXPERIMENT}_{series_}_{fold}' # MODEL_PATH = f'./../train_output/{EXPERIMENT}_{series_}/models/{EXPERIMENT}_{series_}.h5' # YAML_DIR = f'./../train_output/{EXPERIMENT}_{series_}' # IMAGE_DIR = f'./../train_output/{EXPERIMENT}_{series_}' DATA_DIR = "./../data/Nijmegen paths/" TARGET_SPACING = (0.5, 0.5, 3) INPUT_SHAPE = (192, 192, 24, len(SERIES)) IMAGE_SHAPE = INPUT_SHAPE[:3] DATA_SPLIT_INDEX = read_yaml_to_dict(f'./../data/Nijmegen paths/train_val_test_idxs_{fold}.yml') # DATA_SPLIT_INDEX = read_yaml_to_dict(f'./../data/Nijmegen paths/train_val_test_idxs.yml') TEST_INDEX = DATA_SPLIT_INDEX['test_set0'] N_CPUS = 12 ########## test with old method ############# print_(f"> Loading images into RAM...") image_paths = {} for s in SERIES: with open(path.join(DATA_DIR, f"{s}.txt"), 'r') as f: image_paths[s] = [l.strip() for l in f.readlines()] with open(path.join(DATA_DIR, f"seg.txt"), 'r') as f: seg_paths = [l.strip() for l in f.readlines()] num_images = len(seg_paths) images = [] images_list = [] segmentations = [] # Read and preprocess each of the paths for each series, and the segmentations. for img_idx in tqdm(range(len(TEST_INDEX))): #[:40]): #for less images # print('images number',[TEST_INDEX[img_idx]]) img_s = {f'{s}': sitk.ReadImage(image_paths[s][TEST_INDEX[img_idx]], sitk.sitkFloat32) for s in SERIES} seg_s = sitk.ReadImage(seg_paths[TEST_INDEX[img_idx]], sitk.sitkFloat32) img_n, seg_n = preprocess(img_s, seg_s, shape=IMAGE_SHAPE, spacing=TARGET_SPACING) for seq in img_n: images.append(img_n[f'{seq}']) images_list.append(images) images = [] segmentations.append(seg_n) images_list = np.transpose(images_list, (0, 2, 3, 4, 1)) print('>>>>> size image_list nmr 2:', np.shape(images_list), '. equal to: (5, 192, 192, 24, 3)?') ########### load module ################## print(' >>>>>>> LOAD MODEL <<<<<<<<<') dependencies = { 'dice_coef': dice_coef, 'weighted_cross_entropy_fn':weighted_binary_cross_entropy } reconstructed_model = load_model(MODEL_PATH, custom_objects=dependencies) # reconstructed_model.summary(line_length=120) # make predictions on all TEST_INDEX print(' >>>>>>> START prediction <<<<<<<<<') predictions_blur = reconstructed_model.predict(images_list, batch_size=1) ############# preprocess ################# # preprocess predictions by removing the blur and making individual blobs print('>>>>>>>> START preprocess') def move_dims(arr): # UMCG numpy dimensions convention: dims = (batch, width, heigth, depth) # Joeran numpy dimensions convention: dims = (batch, depth, heigth, width) arr = np.moveaxis(arr, 3, 1) arr = np.moveaxis(arr, 3, 2) return arr # Joeran has his numpy arrays ordered differently. predictions_blur = move_dims(np.squeeze(predictions_blur)) segmentations = move_dims(np.squeeze(segmentations)) # predictions = [preprocess_softmax(pred, threshold="dynamic")[0] for pred in predictions_blur] predictions = predictions_blur print("the size of predictions is:",np.shape(predictions)) # Remove outer edges zeros = np.zeros(np.shape(predictions)) test = np.squeeze(predictions)[:,2:-2,2:190,2:190] zeros[:,2:-2,2:190,2:190] = test predictions = zeros # # perform Froc method joeran # metrics = evaluate(y_true=segmentations, y_pred=predictions) # dump_dict_to_yaml(metrics, YAML_DIR, "froc_metrics_focal_10_test", verbose=True) ############# save image as example ################# # save image nmr 6 # img_s = sitk.GetImageFromArray(predictions_blur[6].squeeze()) # sitk.WriteImage(img_s, f"{IMAGE_DIR}/predictions_blur_006_dyn_0.6.nii.gz") # img_s = sitk.GetImageFromArray(predictions[6].squeeze()) # sitk.WriteImage(img_s, f"{IMAGE_DIR}/predictions_006_dyn_0.6.nii.gz") # img_s = sitk.GetImageFromArray(segmentations[6].squeeze()) # sitk.WriteImage(img_s, f"{IMAGE_DIR}/segmentations_006_dyn_0.6.nii.gz") # img_s = sitk.GetImageFromArray(np.transpose(images_list[6,:,:,:,0].squeeze())) # sitk.WriteImage(img_s, f"{IMAGE_DIR}/t2_006_dyn_0.6.nii.gz") if fold == 0: segmentations_added = segmentations predictions_added = predictions else: segmentations_added = np.append(segmentations_added,segmentations,axis=0) predictions_added = np.append(predictions_added,predictions,axis=0) # Froc method umcglib stats = calculate_froc(y_true=segmentations_added, y_pred=predictions_added, preprocess_func=dynamic_threshold, dynamic_threshold_factor=0.5, minimum_confidence=0.1, bootstrap = 1000, min_overlap = 0.01, overlap_function = 'dsc') dump_stats_to_yaml(stats, YAML_DIR, "umcglib_stats_overlap_0.01", verbose=True) quit() plot_multiple_froc( sensitivities=[np.array(stats['sensitivity'])], fp_per_patient=[np.array(stats["fp_per_patient"])], ci_low=[np.array(stats['sens_95_boot_ci_low'])], ci_high=[np.array(stats["sens_95_boot_ci_high"])], model_names=["test only"], title="testtest", height=12, width=15, save_as="froc_test_0.5_conf_0.1_overlap_0.1_dsc.png", xlims=(0.1, 5) )