import numpy as np import tensorflow as tf from tensorflow.keras.applications import densenet from sfransen.Saliency.base import SaliencyMap class IntegratedGradients(SaliencyMap): def get_mask(self, image, baseline=None, num_steps=3): """Computes Integrated Gradients for a predicted label. Args: image (ndarray): Original image top_pred_idx: Predicted label for the input image baseline (ndarray): The baseline image to start with for interpolation num_steps: Number of interpolation steps between the baseline and the input used in the computation of integrated gradients. These steps along determine the integral approximation error. By default, num_steps is set to 50. Returns: Integrated gradients w.r.t input image """ # If baseline is not provided, start with a black image # having same size as the input image. if baseline is None: img_size = image.shape baseline = np.zeros(img_size).astype(np.float32) else: baseline = baseline.astype(np.float32) img_input = image top_pred_idx = self.get_top_predicted_idx(image) interpolated_image = [ baseline + (i / num_steps) * (img_input - baseline) for i in range(num_steps + 1) ] interpolated_image = np.vstack(interpolated_image).astype(np.float32) grads = [] for i, img in enumerate(interpolated_image): # print(f"interpolation step:",i,f" out of {num_steps}") img = tf.expand_dims(img, axis=0) grad = self.get_gradients(img) grads.append(grad[0]) grads = tf.convert_to_tensor(grads, dtype=tf.float32) # 4. Approximate the integral using the trapezoidal rule grads = (grads[:-1] + grads[1:]) / 2.0 avg_grads = tf.reduce_mean(grads, axis=0) # 5. Calculate integrated gradients and return integrated_grads = (img_input - baseline) * avg_grads return integrated_grads