added 10. in scripts which calculates froc with method Chris and optuna

This commit is contained in:
Stefan 2022-04-28 10:11:10 +02:00
parent cd86205896
commit dac43d7429
5 changed files with 455 additions and 102 deletions

2
.gitignore vendored
View File

@ -149,3 +149,5 @@ cython_debug/
sqliteDB/ sqliteDB/
*.db *.db
/train_output/ /train_output/
/umcglib/
/k2s_umcg/

View File

@ -0,0 +1,318 @@
from pickle import FALSE
from umcglib.froc import calculate_froc, plot_multiple_froc, partial_auc
from umcglib.binarize import dynamic_threshold
import optuna
import sqlite3
from sfransen.utils_quintin import *
from os import path
from sfransen.DWI_exp.preprocessing_function import preprocess
import SimpleITK as sitk
import numpy as np
from sfransen.DWI_exp.callbacks import dice_coef
from sfransen.DWI_exp.losses import weighted_binary_cross_entropy
from tensorflow.keras.models import load_model
from tqdm import tqdm
from optuna.samplers import TPESampler
import argparse
import shutil
import os
parser = argparse.ArgumentParser(
description='Calculate the froc metrics and store in froc_metrics.yml')
parser.add_argument('-experiment',
help='Title of experiment')
parser.add_argument('-series', '-s',
metavar='[series_name]', required=True, nargs='+',
help='List of series to include')
parser.add_argument('-fold',
default='',
help='List of series to include')
args = parser.parse_args()
def does_table_exist(tablename: str, db_path: str):
conn = sqlite3.connect(db_path)
c = conn.cursor()
#get the count of tables with the name
c.execute(f''' SELECT count(name) FROM sqlite_master WHERE type='table' AND name='{tablename}' ''')
does_exist = False
#if the count is 1, then table exists
if c.fetchone()[0] == 1:
print(f"Table '{tablename}' exists.")
does_exist = True
else:
print(f"Table '{tablename}' does not exists.")
#commit the changes to db
conn.commit()
#close the connection
conn.close()
return does_exist
def load_or_create_study(
is_new_study: bool,
study_dir: str,
):
# Create an optuna if it does not exist.
storage = f"sqlite:///{study_dir}/{DB_FNAME}"
if is_new_study:
print(f"Creating a NEW study. With name: {storage}")
study = optuna.create_study(storage=storage,
study_name=study_dir,
direction='maximize',
sampler=TPESampler(n_startup_trials=N_STARTUP_TRIALS))
else:
print(f"LOADING study {storage} from database file.")
study = optuna.load_study(storage=storage,
study_name=study_dir)
return study
def p_auc_froc_obj(trial, y_true_val, y_pred_val):
dyn_thresh = trial.suggest_float('dyn_thresh', 0.0, 1.0)
min_conf = trial.suggest_float('min_conf', 0.0, 1.0)
stats = calculate_froc(y_true=y_true_val,
y_pred=y_pred_val,
preprocess_func=dynamic_threshold,
dynamic_threshold_factor=dyn_thresh,
minimum_confidence=min_conf)
sens, fpp = stats['sensitivity'], stats['fp_per_patient']
p_auc_froc = partial_auc(sens, fpp, low=0.1, high=2.5)
print(f"dyn_threshold: {dyn_thresh}, min_conf{min_conf}")
print(f"Trial {trial.number} pAUC FROC: {p_auc_froc}")
return p_auc_froc
def convert_np_to_list(flat_numpy_arr):
ans = []
for elem in flat_numpy_arr:
ans.append(float(elem))
return ans
# >>>>>>>>> main <<<<<<<<<<<<<
# DB_FNAME = "calc_exp_t2_b1400_adc.db"
num_trials = 50
N_STARTUP_TRIALS = 10
SERIES = args.series
series_ = '_'.join(SERIES)
EXPERIMENT = args.experiment
DB_FNAME = f'{EXPERIMENT}_{series_}_{args.fold}.db'
MODEL_PATH = f'./../train_output/{EXPERIMENT}_{series_}/models/{EXPERIMENT}_{series_}.h5'
YAML_DIR = f'./../train_output/{EXPERIMENT}_{series_}'
DATA_DIR = "./../data/Nijmegen paths/"
TARGET_SPACING = (0.5, 0.5, 3)
INPUT_SHAPE = (192, 192, 24, len(SERIES))
IMAGE_SHAPE = INPUT_SHAPE[:3]
DATA_SPLIT_INDEX = read_yaml_to_dict(f'./../data/Nijmegen paths/train_val_test_idxs.yml')
TEST_INDEX = DATA_SPLIT_INDEX['val_set0']
print("test test_index",TEST_INDEX[:5])
############ load data en preprocess / old method
# print(">>>>> read images <<<<<<<<<<")
# image_paths = {}
# for s in SERIES:
# with open(path.join(DATA_DIR, f"{s}.txt"), 'r') as f:
# image_paths[s] = [l.strip() for l in f.readlines()]
# with open(path.join(DATA_DIR, f"seg.txt"), 'r') as f:
# seg_paths = [l.strip() for l in f.readlines()]
# num_images = len(seg_paths)
# images = []
# images_list = []
# segmentations = []
# # Read and preprocess each of the paths for each series, and the segmentations.
# for img_idx in tqdm(range(len(TEST_INDEX))): #[:40]): #for less images
# # print('images number',[TEST_INDEX[img_idx]])
# img_s = {f'{s}': sitk.ReadImage(image_paths[s][TEST_INDEX[img_idx]], sitk.sitkFloat32) for s in SERIES}
# seg_s = sitk.ReadImage(seg_paths[TEST_INDEX[img_idx]], sitk.sitkFloat32)
# img_n, seg_n = preprocess(img_s, seg_s,
# shape=IMAGE_SHAPE, spacing=TARGET_SPACING)
# for seq in img_n:
# images.append(img_n[f'{seq}'])
# images_list.append(images)
# images = []
# segmentations.append(seg_n)
# images_list = np.transpose(images_list, (0, 2, 3, 4, 1))
# print("shape of segmentations is",np.shape(segmentations))
# print('>>>>> size image_list nmr 2:', np.shape(images_list), '. equal to: (5, 192, 192, 24, 3)?')
# ########### load module ##################
# print(' >>>>>>> LOAD MODEL <<<<<<<<<')
# dependencies = {
# 'dice_coef': dice_coef,
# 'weighted_cross_entropy_fn':weighted_binary_cross_entropy
# }
# reconstructed_model = load_model(MODEL_PATH, custom_objects=dependencies)
# # reconstructed_model.summary(line_length=120)
# # make predictions on all TEST_INDEX
# print(' >>>>>>> START prediction <<<<<<<<<')
# predictions_blur = reconstructed_model.predict(images_list, batch_size=1)
# ############# preprocess #################
# # preprocess predictions by removing the blur and making individual blobs
# print('>>>>>>>> START preprocess')
# # def move_dims(arr):
# # # UMCG numpy dimensions convention: dims = (batch, width, heigth, depth)
# # # Joeran numpy dimensions convention: dims = (batch, depth, heigth, width)
# # arr = np.moveaxis(arr, 3, 1)
# # arr = np.moveaxis(arr, 3, 2)
# # return arr
# # # Joeran has his numpy arrays ordered differently.
# # predictions_blur = move_dims(np.squeeze(predictions_blur))
# # segmentations = move_dims(np.squeeze(segmentations))
# y_pred_val = np.squeeze(predictions_blur)
# y_true_val = segmentations
study_dir = f"./../sqliteDB/optuna_dbs"
check_for_file = path.isfile(f"{study_dir}/{DB_FNAME}")
if check_for_file == False:
shutil.copyfile(f"{study_dir}/dyn_thres_min_conf_opt_OG.db", f"{study_dir}/{DB_FNAME}")
table_exists = does_table_exist('trials', f"{study_dir}/{DB_FNAME}")
study = load_or_create_study(is_new_study=not table_exists, study_dir=study_dir)
# # dyn_thresh = study.best_trial.params['dyn_thresh']
# # min_conf = study.best_trial.params['min_conf']
# dyn_thresh = 0.4
# min_conf = 0.01
# # print("step 1:",np.shape(y_pred_val))
# stats = calculate_froc(y_true=y_true_val,
# y_pred=y_pred_val,
# preprocess_func=dynamic_threshold,
# dynamic_threshold_factor=dyn_thresh,
# minimum_confidence=min_conf)
# sens, fpp = stats['sensitivity'], stats['fp_per_patient']
# p_auc = partial_auc(sens, fpp, low=0.1, high=2.5)
# print(f"the p_auc with old setting is: {p_auc}" )
# # Try to find the best value for the dynamic threshold and min_confidence
# opt_func = lambda trail: p_auc_froc_obj(trail, y_true_val, y_pred_val)
# study.optimize(opt_func, n_trials=num_trials)
dyn_thresh = study.best_trial.params['dyn_thresh']
min_conf = study.best_trial.params['min_conf']
print(f"done. best dyn_thresh: {dyn_thresh} . Best min_conf: {min_conf}")
########## dump dict to yaml of best froc curve #############
######## gooi dit in functie ###############
DATA_SPLIT_INDEX = read_yaml_to_dict(f'./../data/Nijmegen paths/train_val_test_idxs.yml')
TEST_INDEX = DATA_SPLIT_INDEX['test_set0']
print("test test_index",TEST_INDEX[:5])
############ load data en preprocess / old method
print(">>>>> read images <<<<<<<<<<")
image_paths = {}
for s in SERIES:
with open(path.join(DATA_DIR, f"{s}.txt"), 'r') as f:
image_paths[s] = [l.strip() for l in f.readlines()]
with open(path.join(DATA_DIR, f"seg.txt"), 'r') as f:
seg_paths = [l.strip() for l in f.readlines()]
num_images = len(seg_paths)
images = []
images_list = []
segmentations = []
# Read and preprocess each of the paths for each series, and the segmentations.
for img_idx in tqdm(range(len(TEST_INDEX))): #[:40]): #for less images
# print('images number',[TEST_INDEX[img_idx]])
img_s = {f'{s}': sitk.ReadImage(image_paths[s][TEST_INDEX[img_idx]], sitk.sitkFloat32) for s in SERIES}
seg_s = sitk.ReadImage(seg_paths[TEST_INDEX[img_idx]], sitk.sitkFloat32)
img_n, seg_n = preprocess(img_s, seg_s,
shape=IMAGE_SHAPE, spacing=TARGET_SPACING)
for seq in img_n:
images.append(img_n[f'{seq}'])
images_list.append(images)
images = []
segmentations.append(seg_n)
images_list = np.transpose(images_list, (0, 2, 3, 4, 1))
print("shape of segmentations is",np.shape(segmentations))
print('>>>>> size image_list nmr 2:', np.shape(images_list), '. equal to: (5, 192, 192, 24, 3)?')
########### load module ##################
print(' >>>>>>> LOAD MODEL <<<<<<<<<')
dependencies = {
'dice_coef': dice_coef,
'weighted_cross_entropy_fn':weighted_binary_cross_entropy
}
reconstructed_model = load_model(MODEL_PATH, custom_objects=dependencies)
# reconstructed_model.summary(line_length=120)
# make predictions on all TEST_INDEX
print(' >>>>>>> START prediction <<<<<<<<<')
predictions_blur = reconstructed_model.predict(images_list, batch_size=1)
############# preprocess #################
# preprocess predictions by removing the blur and making individual blobs
print('>>>>>>>> START preprocess')
# def move_dims(arr):
# # UMCG numpy dimensions convention: dims = (batch, width, heigth, depth)
# # Joeran numpy dimensions convention: dims = (batch, depth, heigth, width)
# arr = np.moveaxis(arr, 3, 1)
# arr = np.moveaxis(arr, 3, 2)
# return arr
# # Joeran has his numpy arrays ordered differently.
# predictions_blur = move_dims(np.squeeze(predictions_blur))
# segmentations = move_dims(np.squeeze(segmentations))
y_pred_val = np.squeeze(predictions_blur)
y_true_val = segmentations
########### einde functie ############
stats = calculate_froc(y_true=y_true_val,
y_pred=y_pred_val,
preprocess_func=dynamic_threshold,
dynamic_threshold_factor=dyn_thresh,
minimum_confidence=min_conf)
subject_idxs = list(range(len(y_true_val)))
metrics = {
"num_patients": int(stats['num_patients']),
"auroc": int(stats['patient_auc']),
'tpr': convert_np_to_list(stats['roc_tpr']),
'fpr': convert_np_to_list(stats['roc_fpr']),
"roc_true": convert_np_to_list(stats['roc_patient_level_label'][s] for s in subject_idxs),
"roc_pred": convert_np_to_list(stats['roc_patient_level_conf'][s] for s in subject_idxs),
"num_lesions": int(stats['num_lesions']),
"thresholds": convert_np_to_list(stats['thresholds']),
"sensitivity": convert_np_to_list(stats['sensitivity']),
"FP_per_case": convert_np_to_list(stats['fp_per_patient']),
"precision": convert_np_to_list(stats['precision']),
"recall": convert_np_to_list(stats['recall']),
"AP": int(stats['average_precision']),
}
dump_dict_to_yaml(metrics, YAML_DIR, "froc_metrics_optuna_test", verbose=True)

View File

@ -7,7 +7,8 @@ import multiprocessing
from functools import partial from functools import partial
import os import os
from os import path from os import path
from tqdm import tqdm
import argparse
from sfransen.utils_quintin import * from sfransen.utils_quintin import *
from sfransen.DWI_exp.helpers import * from sfransen.DWI_exp.helpers import *

View File

@ -3,7 +3,7 @@ from sfransen.utils_quintin import *
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import argparse import argparse
import matplotlib.ticker as tkr import matplotlib.ticker as tkr
from sfransen.FROC.p_auc import partial_auc from umcglib.froc.p_auc import partial_auc
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Visualise froc results') description='Visualise froc results')
@ -15,6 +15,9 @@ parser.add_argument('--experiment', '-s',
metavar='[series_name]', required=True, nargs='+', metavar='[series_name]', required=True, nargs='+',
help='List of series to include, must correspond with' + help='List of series to include, must correspond with' +
"path files in ./data/") "path files in ./data/")
parser.add_argument('-yaml_metric',
help='List of series to include, must correspond with' +
"path files in ./data/")
args = parser.parse_args() args = parser.parse_args()
if args.comparison: if args.comparison:
@ -24,6 +27,7 @@ else:
colors = ['r','b','g','k','y','c'] colors = ['r','b','g','k','y','c']
plot_type = ['-','-','-','-','-','-'] plot_type = ['-','-','-','-','-','-']
yaml_metric = args.yaml_metric
experiments = args.experiment experiments = args.experiment
print(experiments) print(experiments)
experiment_path = [] experiment_path = []
@ -34,11 +38,11 @@ paufroc = []
fig = plt.figure(1) fig = plt.figure(1)
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
for idx in range(len(args.experiment)): for idx in range(len(args.experiment)):
experiment_path = f'./../train_output/{experiments[idx]}/froc_metrics_focal_10.yml' experiment_path = f'./../train_output/{experiments[idx]}/froc_metrics_{yaml_metric}.yml'
experiment_metrics = read_yaml_to_dict(experiment_path) experiment_metrics = read_yaml_to_dict(experiment_path)
pfroc = partial_auc(experiment_metrics["sensitivity"],experiment_metrics["FP_per_case"]) pfroc = partial_auc(experiment_metrics["sensitivity"],experiment_metrics["FP_per_case"],low=0.1, high=2.5)
paufroc.append(round(pfroc,2) paufroc.append(round(pfroc,2))
plt.plot(experiment_metrics["FP_per_case"], experiment_metrics["sensitivity"],color=colors[idx],linestyle=plot_type[idx]) plt.plot(experiment_metrics["FP_per_case"], experiment_metrics["sensitivity"],color=colors[idx],linestyle=plot_type[idx])
ax.set(xscale="log") ax.set(xscale="log")
@ -50,7 +54,7 @@ for idx in range(len(args.experiment)):
ax.axes.xaxis.set_major_locator(tkr.FixedLocator([0,0.1,1,3])) ax.axes.xaxis.set_major_locator(tkr.FixedLocator([0,0.1,1,3]))
for idx in range(len(args.experiment)): for idx in range(len(args.experiment)):
experiment_path = f'./../train_output/{experiments[idx]}/froc_metrics_focal_10.yml' experiment_path = f'./../train_output/{experiments[idx]}/froc_metrics_{yaml_metric}.yml'
experiment_metrics = read_yaml_to_dict(experiment_path) experiment_metrics = read_yaml_to_dict(experiment_path)
auroc.append(round(experiment_metrics['auroc'],3)) auroc.append(round(experiment_metrics['auroc'],3))

View File

@ -1,12 +1,17 @@
import numpy as np import numpy as np
import SimpleITK as sitk import SimpleITK as sitk
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
######## load images ############# # ######## load images #############
path_b50 = '/data/pca-rad/datasets/radboud_new/pat0634/2016/diffusie_cro/b-50/nifti_image.nii.gz' # path_b50 = '/data/pca-rad/datasets/radboud_new/pat0634/2016/diffusie_cro/b-50/nifti_image.nii.gz'
path_b400 = '/data/pca-rad/datasets/radboud_new/pat0634/2016/diffusie_cro/b-400/nifti_image.nii.gz' # path_b400 = '/data/pca-rad/datasets/radboud_new/pat0634/2016/diffusie_cro/b-400/nifti_image.nii.gz'
path_b800 = '/data/pca-rad/datasets/radboud_new/pat0634/2016/diffusie_cro/b-800/nifti_image.nii.gz' # path_b800 = '/data/pca-rad/datasets/radboud_new/pat0634/2016/diffusie_cro/b-800/nifti_image.nii.gz'
path_b1400 = '/data/pca-rad/datasets/radboud_new/pat0634/2016/diffusie_cro/b-1400/nifti_image.nii.gz' # path_b1400 = '/data/pca-rad/datasets/radboud_new/pat0634/2016/diffusie_cro/b-1400/nifti_image.nii.gz'
path_adc = '/data/pca-rad/datasets/radboud_new/pat0634/2016/dADC/nifti_image.nii.gz' # path_adc = '/data/pca-rad/datasets/radboud_new/pat0634/2016/dADC/nifti_image.nii.gz'
path_adc = '/data/pca-rad/datasets/anonymized_mri/only_nii_directory/110-M-110/2015NB/dADC_0-50-500-1000/702_.nii.gz'
path_b0 = '/data/pca-rad/datasets/anonymized_mri/only_nii_directory/110-M-110/2015NB/Diffusie/b-0/701_.nii.gz'
path_b800 = '/data/pca-rad/datasets/anonymized_mri/only_nii_directory/110-M-110/2015NB/Diffusie/b-800/701_.nii.gz'
# path_b50 = 'X:/sfransen/train_output/adc_exp/b50_true.nii.gz' # path_b50 = 'X:/sfransen/train_output/adc_exp/b50_true.nii.gz'
# path_b400 = 'X:/sfransen/train_output/adc_exp/b400_true.nii.gz' # path_b400 = 'X:/sfransen/train_output/adc_exp/b400_true.nii.gz'
@ -20,14 +25,14 @@ path_adc = '/data/pca-rad/datasets/radboud_new/pat0634/2016/dADC/nifti_image.nii
# path_b1400 = '/data/pca-rad/sfransen/train_output/adc_exp/b1400_true.nii.gz' # path_b1400 = '/data/pca-rad/sfransen/train_output/adc_exp/b1400_true.nii.gz'
# path_adc = '/data/pca-rad/sfransen/train_output/adc_exp/adc_true.nii.gz' # path_adc = '/data/pca-rad/sfransen/train_output/adc_exp/adc_true.nii.gz'
b50_img = sitk.ReadImage(path_b50, sitk.sitkFloat32) b0_img = sitk.ReadImage(path_b0, sitk.sitkFloat32)
b50 = sitk.GetArrayFromImage(b50_img) b0 = sitk.GetArrayFromImage(b0_img)
b400_img = sitk.ReadImage(path_b400, sitk.sitkFloat32) # b400_img = sitk.ReadImage(path_b400, sitk.sitkFloat32)
b400 = sitk.GetArrayFromImage(b400_img) # b400 = sitk.GetArrayFromImage(b400_img)
b800_img = sitk.ReadImage(path_b800, sitk.sitkFloat32) b800_img = sitk.ReadImage(path_b800, sitk.sitkFloat32)
b800 = sitk.GetArrayFromImage(b800_img) b800 = sitk.GetArrayFromImage(b800_img)
b1400_img = sitk.ReadImage(path_b1400, sitk.sitkFloat32) # b1400_img = sitk.ReadImage(path_b1400, sitk.sitkFloat32)
b1400_original = sitk.GetArrayFromImage(b1400_img) # b1400_original = sitk.GetArrayFromImage(b1400_img)
adc_img = sitk.ReadImage(path_adc, sitk.sitkFloat32) adc_img = sitk.ReadImage(path_adc, sitk.sitkFloat32)
adc_original = sitk.GetArrayFromImage(adc_img) adc_original = sitk.GetArrayFromImage(adc_img)
@ -39,21 +44,21 @@ def show_img(greyscale_img):
fig.savefig(path, dpi=300, bbox_inches='tight') fig.savefig(path, dpi=300, bbox_inches='tight')
def calc_adc(b50, b400, b800): def calc_adc(b50, b400, b800):
"Calculates the adc based on b50, b400 and b800 DWI images/arrays."
mean_dwi = (50 + 400 + 800) / 3 mean_dwi = (50 + 400 + 800) / 3
mean_si = np.divide(np.add(np.add(np.log(b50), np.log(b400)), np.log(b800)), 3) mean_si = np.divide(np.add(np.add(np.log(b50), np.log(b400)), np.log(b800)), 3)
denominator = np.multiply((50 - mean_dwi), np.subtract(np.log(b50), mean_si)) + np.multiply((400 - mean_dwi), np.subtract(np.log(b400), mean_si)) + np.multiply((800 - mean_dwi), np.subtract(np.log(b800), mean_si)) denominator = np.multiply((50 - mean_dwi), np.subtract(np.log(b50), mean_si)) + np.multiply((400 - mean_dwi), np.subtract(np.log(b400), mean_si)) + np.multiply((800 - mean_dwi), np.subtract(np.log(b800), mean_si))
numerator = np.power((50 - mean_dwi), 2) + np.power((400 - mean_dwi), 2) + np.power((800 - mean_dwi), 2) numerator = np.power((50 - mean_dwi), 2) + np.power((400 - mean_dwi), 2) + np.power((800 - mean_dwi), 2)
adc_with_zeros = np.divide(denominator, numerator) * -1000000 adc_with_zeros = np.divide(denominator, numerator) * -1000000
adc = np.clip(adc_with_zeros,0,np.amax(adc_with_zeros)) adc = np.clip(adc_with_zeros,0,np.amax(adc_with_zeros))
return adc return adc
def calc_adc_1(b50,b800): def calc_adc_1(b0,b800):
mean_dwi = (50 + 800) / 2 mean_dwi = (0 + 800) / 2
mean_si = np.divide(np.add(np.log(b50), np.log(b800)), 2) mean_si = np.divide(np.add(np.log(b0), np.log(b800)), 2)
denominator = np.multiply((50 - mean_dwi), np.subtract(np.log(b50), mean_si)) + np.multiply((800 - mean_dwi), np.subtract(np.log(b800), mean_si)) denominator = np.multiply((0 - mean_dwi), np.subtract(np.log(b0), mean_si)) + np.multiply((800 - mean_dwi), np.subtract(np.log(b800), mean_si))
numerator = np.power((50 - mean_dwi), 2) + np.power((800 - mean_dwi), 2) numerator = np.power((0 - mean_dwi), 2) + np.power((800 - mean_dwi), 2)
adc_with_zeros = np.divide(denominator, numerator) * -1000000 adc_with_zeros = np.divide(denominator, numerator) * -1000000
adc = np.clip(adc_with_zeros,0,np.amax(adc_with_zeros)) adc = np.clip(adc_with_zeros,0,np.amax(adc_with_zeros))
return adc return adc
@ -78,92 +83,115 @@ def calc_adc_3(b400,b800):
adc = np.clip(adc_with_zeros,0,np.amax(adc_with_zeros)) adc = np.clip(adc_with_zeros,0,np.amax(adc_with_zeros))
return adc return adc
def calc_high_b(b_value_high,b_value,b_image,ADC_map): def calc_high_b(b_value_high,b_value,b_image,adc):
high_b = np.multiply(b_image, np.exp(np.multiply(np.subtract(b_value,b_value_high), (np.divide(ADC_map,1000000))))) """
Calculates a high b-value image.
b_value_high = the requered b-value integer
b_value = the b_value integer used as reference image
b_value = the corresponding array
adc = the corresponding adc array
""""
high_b = np.multiply(b_image, np.exp(np.multiply(np.subtract(b_value,b_value_high), (np.divide(adc,1000000)))))
return high_b return high_b
adc_50_400_800 = calc_adc(b50,b400,b800) adc_0_800 = calc_adc_1(b0,b800)
adc_50_800 = calc_adc_1(b50,b800) high_b_1400_0 = calc_high_b(1400,0,b0,adc_0_800)
adc_50_400 = calc_adc_2(b50,b400)
adc_400_800 = calc_adc_3(b400,b800)
high_b_1400_50 = calc_high_b(1400,50,b50,adc_50_800) adc_0_800 = sitk.GetImageFromArray(adc_0_800)
high_b_1400_all = calc_high_b(1400,50,b50,adc_50_400_800) adc_0_800.CopyInformation(b0_img)
sitk.WriteImage(adc_0_800, f"../train_output/test.nii.gz")
high_b_1400_400 = calc_high_b(1400,400,b400,adc_50_800) adc_50_800 = sitk.GetImageFromArray(adc_50_800)
high_b_1400_800 = calc_high_b(1400,800,b800,adc_50_800) adc_50_800.CopyInformation(adc_img)
sitk.WriteImage(adc_50_800, f"../train_output/adc_exp/adc_copied_with_adc.nii.gz")
fig, (ax1, ax2, ax3) = plt.subplots(1, 3)
fig.suptitle('ADC calculated with b50 b400 b800')
ax1.imshow(adc_50_400_800[:][:][13],cmap='gray')
ax1.set_title('calculated b50 b400 b800')
ax2.imshow(adc_original[:][:][13],cmap='gray')
ax2.set_title('original')
error_map = np.subtract(adc_original[:][:][13],adc_50_800[:][:][13])
ax3.imshow(error_map,cmap='gray')
ax3.set_title('error map')
path = f"ADC_634.png"
fig.savefig(path, dpi=300, bbox_inches='tight')
fig, (ax1, ax2, ax3) = plt.subplots(1, 3) #
fig.suptitle('ADC calculated with b50 b800')
ax1.imshow(adc_50_800[:][:][13],cmap='gray')
ax1.set_title('calculated b50 b800')
ax2.imshow(adc_original[:][:][13],cmap='gray')
ax2.set_title('original')
error_map = np.subtract(adc_original[:][:][13],adc_50_800[:][:][13])
ax3.imshow(error_map,cmap='gray')
ax3.set_title('error map')
path = f"ADC_634_1.png"
fig.savefig(path, dpi=300, bbox_inches='tight')
fig, (ax1, ax2, ax3) = plt.subplots(1, 3)
fig.suptitle('Difference between ADC calculation')
ax1.imshow(adc_50_400_800[:][:][13],cmap='gray')
ax1.set_title('calculated b50 b400 b800')
ax2.imshow(adc_50_800[:][:][13],cmap='gray')
ax2.set_title('calculated b50 b800')
error_map = np.subtract(adc_50_800[:][:][13],adc_50_400_800[:][:][13])
ax3.imshow(error_map,cmap='gray')
ax3.set_title('error map')
path = f"ADC_634_different_calculation_1.png"
fig.savefig(path, dpi=300, bbox_inches='tight')
fig, (ax1, ax2, ax3) = plt.subplots(1, 3) quit()
fig.suptitle('Difference between ADC calculation') # adc_50_400_800 = calc_adc(b50,b400,b800)
ax1.imshow(adc_50_800[:][:][13],cmap='gray') # adc_50_800 = calc_adc_1(b50,b800)
ax1.set_title('calculated b50 b800') # adc_50_400 = calc_adc_2(b50,b400)
ax2.imshow(adc_50_400[:][:][13],cmap='gray') # adc_400_800 = calc_adc_3(b400,b800)
ax2.set_title('calculated b50 b400')
error_map = np.subtract(adc_50_800[:][:][13],adc_50_400[:][:][13])
ax3.imshow(error_map,cmap='gray')
ax3.set_title('error map')
path = f"ADC_634_different_calculation_2.png"
fig.savefig(path, dpi=300, bbox_inches='tight')
fig, (ax1, ax2, ax3) = plt.subplots(1, 3) # high_b_1400_50 = calc_high_b(1400,50,b50,adc_50_800)
fig.suptitle('High B calculated with b50 reference and ADC from b50&b800') # high_b_1400_all = calc_high_b(1400,50,b50,adc_50_400_800)
ax1.imshow(high_b_1400_50[:][:][13],cmap='gray')
ax1.set_title('calculated')
ax2.imshow(b1400_original[:][:][13],cmap='gray')
ax2.set_title('original')
error_map = np.subtract(b1400_original[:][:][13],high_b_1400_50[:][:][13])
ax3.imshow(error_map,cmap='gray')
ax3.set_title('error map')
path = f"HighB_b50_b800_634_1.png"
fig.savefig(path, dpi=300, bbox_inches='tight')
fig, (ax1, ax2, ax3) = plt.subplots(1, 3) # high_b_1400_400 = calc_high_b(1400,400,b400,adc_50_800)
fig.suptitle('High B calculated with b50 reference and ADC from b50&b400&b800') # high_b_1400_800 = calc_high_b(1400,800,b800,adc_50_800)
ax1.imshow(high_b_1400_50[:][:][13],cmap='gray')
ax1.set_title('calculated') # fig, (ax1, ax2, ax3) = plt.subplots(1, 3)
ax2.imshow(b1400_original[:][:][13],cmap='gray') # fig.suptitle('ADC calculated with b50 b400 b800')
ax2.set_title('original') # ax1.imshow(adc_50_400_800[:][:][13],cmap='gray')
error_map = np.subtract(b1400_original[:][:][13],high_b_1400_50[:][:][13]) # ax1.set_title('calculated b50 b400 b800')
ax3.imshow(error_map,cmap='gray') # ax2.imshow(adc_original[:][:][13],cmap='gray')
ax3.set_title('error map') # ax2.set_title('original')
path = f"HighB_b50_b800_634_2.png" # error_map = np.subtract(adc_original[:][:][13],adc_50_800[:][:][13])
fig.savefig(path, dpi=300, bbox_inches='tight') # ax3.imshow(error_map,cmap='gray')
# ax3.set_title('error map')
# path = f"ADC_634.png"
# fig.savefig(path, dpi=300, bbox_inches='tight')
# fig, (ax1, ax2, ax3) = plt.subplots(1, 3)
# fig.suptitle('ADC calculated with b50 b800')
# ax1.imshow(adc_50_800[:][:][13],cmap='gray')
# ax1.set_title('calculated b50 b800')
# ax2.imshow(adc_original[:][:][13],cmap='gray')
# ax2.set_title('original')
# error_map = np.subtract(adc_original[:][:][13],adc_50_800[:][:][13])
# ax3.imshow(error_map,cmap='gray')
# ax3.set_title('error map')
# path = f"ADC_634_1.png"
# fig.savefig(path, dpi=300, bbox_inches='tight')
# fig, (ax1, ax2, ax3) = plt.subplots(1, 3)
# fig.suptitle('Difference between ADC calculation')
# ax1.imshow(adc_50_400_800[:][:][13],cmap='gray')
# ax1.set_title('calculated b50 b400 b800')
# ax2.imshow(adc_50_800[:][:][13],cmap='gray')
# ax2.set_title('calculated b50 b800')
# error_map = np.subtract(adc_50_800[:][:][13],adc_50_400_800[:][:][13])
# ax3.imshow(error_map,cmap='gray')
# ax3.set_title('error map')
# path = f"ADC_634_different_calculation_1.png"
# fig.savefig(path, dpi=300, bbox_inches='tight')
# fig, (ax1, ax2, ax3) = plt.subplots(1, 3)
# fig.suptitle('Difference between ADC calculation')
# ax1.imshow(adc_50_800[:][:][13],cmap='gray')
# ax1.set_title('calculated b50 b800')
# ax2.imshow(adc_50_400[:][:][13],cmap='gray')
# ax2.set_title('calculated b50 b400')
# error_map = np.subtract(adc_50_800[:][:][13],adc_50_400[:][:][13])
# ax3.imshow(error_map,cmap='gray')
# ax3.set_title('error map')
# path = f"ADC_634_different_calculation_2.png"
# fig.savefig(path, dpi=300, bbox_inches='tight')
# fig, (ax1, ax2, ax3) = plt.subplots(1, 3)
# fig.suptitle('High B calculated with b50 reference and ADC from b50&b800')
# ax1.imshow(high_b_1400_50[:][:][13],cmap='gray')
# ax1.set_title('calculated')
# ax2.imshow(b1400_original[:][:][13],cmap='gray')
# ax2.set_title('original')
# error_map = np.subtract(b1400_original[:][:][13],high_b_1400_50[:][:][13])
# ax3.imshow(error_map,cmap='gray')
# ax3.set_title('error map')
# path = f"HighB_b50_b800_634_1.png"
# fig.savefig(path, dpi=300, bbox_inches='tight')
# fig, (ax1, ax2, ax3) = plt.subplots(1, 3)
# fig.suptitle('High B calculated with b50 reference and ADC from b50&b400&b800')
# ax1.imshow(high_b_1400_50[:][:][13],cmap='gray')
# ax1.set_title('calculated')
# ax2.imshow(b1400_original[:][:][13],cmap='gray')
# ax2.set_title('original')
# error_map = np.subtract(b1400_original[:][:][13],high_b_1400_50[:][:][13])
# ax3.imshow(error_map,cmap='gray')
# ax3.set_title('error map')
# path = f"HighB_b50_b800_634_2.png"
# fig.savefig(path, dpi=300, bbox_inches='tight')
# adc_50_400_800 = sitk.GetImageFromArray(adc_50_400_800) # adc_50_400_800 = sitk.GetImageFromArray(adc_50_400_800)
# adc_50_400_800.CopyInformation(b50_img) # adc_50_400_800.CopyInformation(b50_img)