fast-mri/scripts/6.saliency_map.py

118 lines
4.0 KiB
Python
Executable File

import argparse
from os import path
import SimpleITK as sitk
import tensorflow as tf
from tensorflow.keras.models import load_model
import numpy as np
from sfransen.utils_quintin import *
from sfransen.DWI_exp import preprocess
from sfransen.DWI_exp.helpers import *
from sfransen.DWI_exp.callbacks import dice_coef
from sfransen.DWI_exp.losses import weighted_binary_cross_entropy
from sfransen.FROC.blob_preprocess import *
from sfransen.FROC.cal_froc_from_np import *
from sfransen.load_images import load_images_parrallel
from sfransen.Saliency.base import *
from sfransen.Saliency.integrated_gradients import *
parser = argparse.ArgumentParser(
description='Calculate the froc metrics and store in froc_metrics.yml')
parser.add_argument('-experiment',
help='Title of experiment')
parser.add_argument('--series', '-s',
metavar='[series_name]', required=True, nargs='+',
help='List of series to include')
args = parser.parse_args()
######## CUDA ################
os.environ["CUDA_VISIBLE_DEVICES"] = "2"
######## constants #############
SERIES = args.series
series_ = '_'.join(args.series)
EXPERIMENT = args.experiment
MODEL_PATH = f'./../train_output/{EXPERIMENT}_{series_}/models/{EXPERIMENT}_{series_}.h5'
YAML_DIR = f'./../train_output/{EXPERIMENT}_{series_}'
DATA_DIR = "./../data/Nijmegen paths/"
TARGET_SPACING = (0.5, 0.5, 3)
INPUT_SHAPE = (192, 192, 24, len(SERIES))
IMAGE_SHAPE = INPUT_SHAPE[:3]
# froc_metrics = read_yaml_to_dict(f'{YAML_DIR}/froc_metrics.yml')
# top_10_idx = np.argsort(froc_metrics['roc_pred'])[-1 :]
DATA_SPLIT_INDEX = read_yaml_to_dict('./../data/Nijmegen paths/train_val_test_idxs.yml')
TEST_INDEX = DATA_SPLIT_INDEX['val_set0']
# TEST_INDEX_top10 = [TEST_INDEX[i] for i in top_10_idx]
TEST_INDEX_image = [371]
N_CPUS = 12
########## load images in parrallel ##############
print_(f"> Loading images into RAM...")
# read paths from txt
image_paths = {}
for s in SERIES:
with open(path.join(DATA_DIR, f"{s}.txt"), 'r') as f:
image_paths[s] = [l.strip() for l in f.readlines()]
with open(path.join(DATA_DIR, f"seg.txt"), 'r') as f:
seg_paths = [l.strip() for l in f.readlines()]
num_images = len(seg_paths)
# create pool of workers
pool = multiprocessing.Pool(processes=N_CPUS)
partial_images = partial(load_images_parrallel,
seq = 'images',
target_shape=IMAGE_SHAPE,
target_space = TARGET_SPACING)
partial_seg = partial(load_images_parrallel,
seq = 'seg',
target_shape=IMAGE_SHAPE,
target_space = TARGET_SPACING)
#load images
images = []
for s in SERIES:
image_paths_seq = image_paths[s]
image_paths_index = np.asarray(image_paths_seq)[TEST_INDEX_image]
data_list = pool.map(partial_images,image_paths_index)
data = np.stack(data_list, axis=0)
images.append(data)
images_list = np.transpose(images, (1, 2, 3, 4, 0))
#load segmentations
seg_paths_index = np.asarray(seg_paths)[TEST_INDEX_image]
data_list = pool.map(partial_seg,seg_paths_index)
segmentations = np.stack(data_list, axis=0)
########### load module ##################
print(' >>>>>>> LOAD MODEL <<<<<<<<<')
dependencies = {
'dice_coef': dice_coef,
'weighted_cross_entropy_fn':weighted_binary_cross_entropy
}
reconstructed_model = load_model(MODEL_PATH, custom_objects=dependencies)
# reconstructed_model.layers[-1].activation = tf.keras.activations.linear
######### Build Saliency heatmap ##############
print(' >>>>>>> Build saliency map <<<<<<<<<')
ig = IntegratedGradients(reconstructed_model)
saliency_map = []
for img_idx in range(len(images_list)):
input_img = np.resize(images_list[img_idx],(1,192,192,24,len(SERIES)))
saliency_map.append(ig.get_mask(input_img).numpy())
print("size saliency map",np.shape(saliency_map))
np.save(f'{YAML_DIR}/saliency',saliency_map)
np.save(f'{YAML_DIR}/images_list',images_list)
np.save(f'{YAML_DIR}/segmentations',segmentations)