automatisch inladen in 2.frocs.py en opschonen bestanden

This commit is contained in:
Stefan 2022-03-21 10:10:52 +01:00
parent 11591d5752
commit 8cd3d865da
5 changed files with 168 additions and 1 deletions

17
requirements.txt Executable file
View File

@ -0,0 +1,17 @@
gast==0.3.3
h5py==2.10.0
Keras-Preprocessing==1.1.0
matplotlib==3.4.3
numpy==1.17.3
pandas==0.25.3
Pillow==8.4.0
pyparsing==3.0.4
python-dateutil==2.8.2
rsa==4.0
scipy==1.4.1
SimpleITK==2.0.2
six==1.16.0
tensorboard==2.2.1
tensorboard-plugin-wit==1.6.0.post3
tensorflow==2.2.0
tensorflow-estimator==2.2.0

View File

@ -1,6 +1,6 @@
import numpy as np
import SimpleITK as sitk
from helpers import *
from sfransen.DWI_exp.helpers import *
import tensorflow.keras.backend as K
import tensorflow as tf
from tensorflow.keras.callbacks import Callback

81
src/sfransen/Saliency/base.py Executable file
View File

@ -0,0 +1,81 @@
import numpy as np
import tensorflow as tf
class SaliencyMap():
def __init__(self, model):
"""Constructs a Vanilla Gradient Map by computing dy/dx.
Args:
model: The TensorFlow model used to evaluate Gradient Map.
model takes image as input and outputs probabilities vector.
"""
self.model = model
def get_top_predicted_idx(self, image):
"""Outputs top predicted class for the input image.
Args:
img_processed: numpy image array in NHWC format, pre-processed according
to the defined model standard.
Returns:
Index of the top predicted class for the input image.
"""
preds = self.model.predict(image)
# top_pred_idx = tf.argmax(preds[0])
top_pred_idx = 1
return top_pred_idx
def get_gradients(self, image):
"""Computes the gradients of outputs w.r.t input image.
Args:
image: numpy image array in NHWC format, pre-processed according
to the defined model standard.
Returns:
Gradients of the predictions w.r.t image (same shape as input image)
"""
image = tf.convert_to_tensor(image)
top_pred_idx = self.get_top_predicted_idx(image)
with tf.GradientTape() as tape:
tape.watch(image)
preds = self.model(image)
print("get_gradients, size of preds",np.shape(preds))
top_class = preds[:]
print("get_gradients, size of top_class",np.shape(top_class))
grads = tape.gradient(top_class, image)
return grads
def norm_grad(self, grad_x):
"""Normalizes gradient to the range between 0 and 1
(for visualization purposes).
Args:
grad_x: numpy gradients array.
Returns:
Gradients of the predictions w.r.t image (same shape as input image)
"""
abs_grads = np.abs(grad_x)
grad_max_ = np.max(abs_grads, axis=3)[0]
arr_min, arr_max = np.min(grad_max_), np.max(grad_max_)
normalized_grad = (grad_max_ - arr_min) / (arr_max - arr_min + 1e-18)
normalized_grad = normalized_grad.reshape(1,grad_x.shape[1],grad_x.shape[2],1)
return normalized_grad
def get_mask(self, image, tensor_format=False):
"""Returns a saliency mask specific to each method.
Args:
image: input image in NHWC format, not batched.
"""
raise NotImplementedError('A derived class should implement get_mask()')

View File

@ -0,0 +1,63 @@
import numpy as np
import tensorflow as tf
from tensorflow.keras.applications import densenet
from base import SaliencyMap
class IntegratedGradients(SaliencyMap):
def get_mask(self, image, baseline=None, num_steps=2):
"""Computes Integrated Gradients for a predicted label.
Args:
image (ndarray): Original image
top_pred_idx: Predicted label for the input image
baseline (ndarray): The baseline image to start with for interpolation
num_steps: Number of interpolation steps between the baseline
and the input used in the computation of integrated gradients. These
steps along determine the integral approximation error. By default,
num_steps is set to 50.
Returns:
Integrated gradients w.r.t input image
"""
# If baseline is not provided, start with a black image
# having same size as the input image.
if baseline is None:
img_size = image.shape
baseline = np.zeros(img_size).astype(np.float32)
else:
baseline = baseline.astype(np.float32)
print(">>>> step ONE completed")
img_input = image
top_pred_idx = self.get_top_predicted_idx(image)
interpolated_image = [
baseline + (i / num_steps) * (img_input - baseline)
for i in range(num_steps + 1)
]
interpolated_image = np.vstack(interpolated_image).astype(np.float32)
print(">>>> step TWO completed")
grads = []
for i, img in enumerate(interpolated_image):
print("number of image:",i)
print("size of image:",np.shape(img))
img = tf.expand_dims(img, axis=0)
grad = self.get_gradients(img)
print("size of grad is:",np.shape(grad))
grads.append(grad[0])
grads = tf.convert_to_tensor(grads, dtype=tf.float32)
print(">>>> step THREE completed")
# 4. Approximate the integral using the trapezoidal rule
grads = (grads[:-1] + grads[1:]) / 2.0
avg_grads = tf.reduce_mean(grads, axis=0)
# tf.reduce_mean(grads, axis=(0, 1, 2, 3))
print(">>>> step FOUR completed")
# 5. Calculate integrated gradients and return
integrated_grads = (img_input - baseline) * avg_grads
print(">>>> step FIVE completed")
return integrated_grads

View File

@ -0,0 +1,6 @@
# from .csv2graph import *
# from .data_path_rename import *
# from .dump_params import *
# from .Make_overlay import *
# from .pred_label_images import *
# from .utils_quintin import *