adding kalman
This commit is contained in:
parent
8196e39547
commit
ccf35cea16
133
kalman/aorta.yaml
Executable file
133
kalman/aorta.yaml
Executable file
@ -0,0 +1,133 @@
|
|||||||
|
|
||||||
|
mesh: '/home/yeye/Desktop/kalman/meshes/coaortaH1.h5'
|
||||||
|
# Physical parameters of the fluid
|
||||||
|
fluid:
|
||||||
|
density: 1.2
|
||||||
|
dynamic_viscosity: 0.035
|
||||||
|
|
||||||
|
io:
|
||||||
|
write_path: 'results/aorta/'
|
||||||
|
restart:
|
||||||
|
path: '' # './projects/nse_coa3d/results/test_restart2/'
|
||||||
|
time: 0
|
||||||
|
write_xdmf: True
|
||||||
|
write_checkpoints: True
|
||||||
|
write_hdf5_timeseries: False
|
||||||
|
write_velocity: 'update' # tentative
|
||||||
|
|
||||||
|
boundary_conditions:
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
type: 'dirichlet'
|
||||||
|
value: ['0','0','0']
|
||||||
|
-
|
||||||
|
id: 2
|
||||||
|
type: 'dirichlet'
|
||||||
|
value: ['0','0','-U*sin(DOLFIN_PI*t/Th)*(t<=Th) + (Th<t)*(-3.67949466208*U*sin(9*DOLFIN_PI*t/Th)*exp(-t*10))']
|
||||||
|
parameters:
|
||||||
|
U: 30
|
||||||
|
Th: 0.35
|
||||||
|
t: 0
|
||||||
|
-
|
||||||
|
id: 3
|
||||||
|
type: 'windkessel'
|
||||||
|
value: [10,0.01,1000]
|
||||||
|
p0: [47,1333.223874]
|
||||||
|
-
|
||||||
|
id: 4
|
||||||
|
type: 'windkessel'
|
||||||
|
value: [250,0.0001,8000]
|
||||||
|
p0: [47,1333.223874]
|
||||||
|
-
|
||||||
|
id: 5
|
||||||
|
type: 'windkessel'
|
||||||
|
value: [250,0.0001,8000]
|
||||||
|
p0: [47,1333.223874]
|
||||||
|
-
|
||||||
|
id: 6
|
||||||
|
type: 'windkessel'
|
||||||
|
value: [250,0.0001,8000]
|
||||||
|
p0: [47,1333.223874]
|
||||||
|
|
||||||
|
timemarching:
|
||||||
|
velocity_pressure_coupling: 'fractionalstep' # monolithic, fractionalstep
|
||||||
|
|
||||||
|
monolithic:
|
||||||
|
timescheme: 'gmp' # generalized midpoint, steady FIXME TODO
|
||||||
|
theta: 1 # 1: Euler, 0.5: implicit midpoint rule (one-legged)
|
||||||
|
nonlinear:
|
||||||
|
method: 'constant_extrapolation' # constant_extrapolation, linear_extrapolation, newton, picard, snes
|
||||||
|
maxit: 20
|
||||||
|
init_steps: 30
|
||||||
|
use_aitken: 1 # 0: False, 1: Picard only, 2: all
|
||||||
|
report: 1 # 0: None, 1: residuals, 2: residuals and energy (inflow/driving/forcing via ESSENTIAL Dbcs!)
|
||||||
|
atol: 1.e-6 # note: dot required!!
|
||||||
|
rtol: 1.e-16
|
||||||
|
stol: 0.0
|
||||||
|
|
||||||
|
fractionalstep:
|
||||||
|
scheme: 'CT' # CT, IPCS
|
||||||
|
coupled_velocity: False # False faster, True needed if robin_bc implicit
|
||||||
|
robin_bc_velocity_scheme: 'implicit' # explicit, semi-implicit, implicit
|
||||||
|
transpiration_bc_projection: 'robin' # robin, dirichlet
|
||||||
|
flux_report_normalize_boundary: 1
|
||||||
|
|
||||||
|
T: 0.8 # end time
|
||||||
|
dt: 0.01
|
||||||
|
write_dt: 0.04
|
||||||
|
checkpoint_dt: 0.04 # <= 0: only last; else value + last
|
||||||
|
report: 1 # 0: print nothing, 1: print time step and writeout, 2: 1 + flux
|
||||||
|
|
||||||
|
# solver setup
|
||||||
|
fem:
|
||||||
|
velocity_space: p1 # p1 p1b/p1+ p2
|
||||||
|
pressure_space: p1 # p1 p0/dg0 dg1
|
||||||
|
|
||||||
|
strain_symmetric: False
|
||||||
|
convection_skew_symmetric: True # aka Temam term
|
||||||
|
stabilization:
|
||||||
|
forced_normal:
|
||||||
|
enabled: True
|
||||||
|
boundaries: [6]
|
||||||
|
gamma: 10
|
||||||
|
backflow_boundaries: [3,4,5,6]
|
||||||
|
streamline_diffusion:
|
||||||
|
enabled: False
|
||||||
|
parameter: 'standard' # standard, shakib, codina, klr
|
||||||
|
length_scale: 'metric' # average, max, metric
|
||||||
|
parameter_element_constant: True
|
||||||
|
Cinv: ~
|
||||||
|
monolithic:
|
||||||
|
infsup: 'pspg' # pspg, pressure-stabilization
|
||||||
|
graddiv: False
|
||||||
|
consistent: False
|
||||||
|
pressure_stab_constant: 1.
|
||||||
|
|
||||||
|
fix_pressure: False
|
||||||
|
fix_pressure_point: [0., 0. , 0.]
|
||||||
|
|
||||||
|
linear_solver:
|
||||||
|
method: 'lu'
|
||||||
|
|
||||||
|
estimation:
|
||||||
|
boundary_conditions:
|
||||||
|
-
|
||||||
|
id: 2
|
||||||
|
type: 'dirichlet'
|
||||||
|
parameters: 'U'
|
||||||
|
initial_stddev: 1
|
||||||
|
|
||||||
|
measurements:
|
||||||
|
-
|
||||||
|
mesh: '/home/yeye/Desktop/kalman/meshes/coaortaH1.h5'
|
||||||
|
fe_degree: 1
|
||||||
|
xdmf_file: 'results/aorta/measurements/u_all.xdmf'
|
||||||
|
file_root: 'results/aorta/measurements/u{i}.h5'
|
||||||
|
indices: 0 # indices of checkpoints to be processed. 0 == all
|
||||||
|
velocity_direction: ~
|
||||||
|
noise_stddev: 3.5 # standard deviation of Gaussian noise
|
||||||
|
|
||||||
|
roukf:
|
||||||
|
particles: 'simplex' # unique or simplex
|
||||||
|
observation_operator: 'postprocessing' #state or postprocessing
|
||||||
|
reparameterize: True
|
153
kalman/channel2d.yaml
Executable file
153
kalman/channel2d.yaml
Executable file
@ -0,0 +1,153 @@
|
|||||||
|
mesh: '/home/yeye/Desktop/kalman/meshes/channel2d.h5'
|
||||||
|
# Physical parameters of the fluid
|
||||||
|
fluid:
|
||||||
|
density: 1.2
|
||||||
|
dynamic_viscosity: 0.035
|
||||||
|
|
||||||
|
io:
|
||||||
|
write_path: 'results/channel2d/'
|
||||||
|
restart:
|
||||||
|
path: '' # './projects/nse_coa3d/results/test_restart2/'
|
||||||
|
time: 0
|
||||||
|
write_xdmf: True
|
||||||
|
write_checkpoints: True
|
||||||
|
write_hdf5_timeseries: False
|
||||||
|
write_velocity: 'update' # update or tentative
|
||||||
|
|
||||||
|
boundary_conditions:
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
type: 'dirichlet'
|
||||||
|
value: ['0','0']
|
||||||
|
-
|
||||||
|
id: 2
|
||||||
|
type: 'dirichlet'
|
||||||
|
value: ['U*(1- (x[1]-1)*(x[1]-1) )*sin(DOLFIN_PI*t/T)','0']
|
||||||
|
parameters:
|
||||||
|
U: 30
|
||||||
|
T: 0.9
|
||||||
|
t: 0
|
||||||
|
-
|
||||||
|
id: 3
|
||||||
|
type: 'neumann'
|
||||||
|
value: '0'
|
||||||
|
|
||||||
|
|
||||||
|
ale:
|
||||||
|
type: 'manual'
|
||||||
|
io:
|
||||||
|
# read XDMF files from external solver
|
||||||
|
read_checkpoints: False # True, False
|
||||||
|
# path where displacement files are stored
|
||||||
|
#
|
||||||
|
read_path:
|
||||||
|
# origin mesh path
|
||||||
|
mesh_path:
|
||||||
|
# fem space of solution files
|
||||||
|
fem_type: p1
|
||||||
|
|
||||||
|
# Settings of the time integration method
|
||||||
|
timemarching:
|
||||||
|
# Time step for reading the input file
|
||||||
|
read_dt: # 0.05
|
||||||
|
fem:
|
||||||
|
# function spaces for the displacement
|
||||||
|
# options available: p1 p2
|
||||||
|
displacement_space: p1
|
||||||
|
lifting:
|
||||||
|
type: 'elastic_element' # 'harmonic', 'elastic', 'elastic_element'
|
||||||
|
# when chosing 'harmonic', no parameters required (ignored)
|
||||||
|
# otherwise, they must be specified below.
|
||||||
|
parameters:
|
||||||
|
mu: 0.0 # float value e.g. 1., 0.1, 0.01
|
||||||
|
# deformations will be read whenever chosen 'manual'
|
||||||
|
# if chosen 'external' then deformations list will be OMITTED!.
|
||||||
|
deformations:
|
||||||
|
# The dashes separate the list entries
|
||||||
|
-
|
||||||
|
id : 1
|
||||||
|
type: 'dirichlet'
|
||||||
|
value: ['0.', '0.']
|
||||||
|
|
||||||
|
timemarching:
|
||||||
|
velocity_pressure_coupling: 'fractionalstep' # monolithic, fractionalstep
|
||||||
|
|
||||||
|
monolithic:
|
||||||
|
timescheme: 'gmp' # generalized midpoint, steady FIXME TODO
|
||||||
|
theta: 1 # 1: Euler, 0.5: implicit midpoint rule (one-legged)
|
||||||
|
nonlinear:
|
||||||
|
method: 'constant_extrapolation' # constant_extrapolation, linear_extrapolation, newton, picard, snes
|
||||||
|
maxit: 20
|
||||||
|
init_steps: 30
|
||||||
|
use_aitken: 1 # 0: False, 1: Picard only, 2: all
|
||||||
|
report: 1 # 0: None, 1: residuals, 2: residuals and energy (inflow/driving/forcing via ESSENTIAL Dbcs!)
|
||||||
|
atol: 1.e-6 # note: dot required!!
|
||||||
|
rtol: 1.e-16
|
||||||
|
stol: 0.0
|
||||||
|
|
||||||
|
fractionalstep:
|
||||||
|
scheme: 'CT' # CT, IPCS
|
||||||
|
coupled_velocity: False # False faster, True needed if robin_bc implicit
|
||||||
|
robin_bc_velocity_scheme: 'implicit' # explicit, semi-implicit, implicit
|
||||||
|
transpiration_bc_projection: 'robin' # robin, dirichlet
|
||||||
|
flux_report_normalize_boundary: 1
|
||||||
|
|
||||||
|
T: 0.8 # end time
|
||||||
|
dt: 0.01
|
||||||
|
write_dt: 0.04
|
||||||
|
checkpoint_dt: 0.04 # <= 0: only last; else value + last
|
||||||
|
report: 1 # 0: print nothing, 1: print time step and writeout, 2: 1 + flux
|
||||||
|
|
||||||
|
# solver setup
|
||||||
|
fem:
|
||||||
|
velocity_space: p1 # p1 p1b/p1+ p2
|
||||||
|
pressure_space: p1 # p1 p0/dg0 dg1
|
||||||
|
|
||||||
|
strain_symmetric: False
|
||||||
|
convection_skew_symmetric: True # aka Temam term
|
||||||
|
stabilization:
|
||||||
|
forced_normal:
|
||||||
|
enabled: False
|
||||||
|
boundaries: [6]
|
||||||
|
gamma: 10
|
||||||
|
backflow_boundaries: []
|
||||||
|
streamline_diffusion:
|
||||||
|
enabled: False
|
||||||
|
parameter: 'standard' # standard, shakib, codina, klr
|
||||||
|
length_scale: 'metric' # average, max, metric
|
||||||
|
parameter_element_constant: True
|
||||||
|
Cinv: ~
|
||||||
|
monolithic:
|
||||||
|
infsup: 'pspg' # pspg, pressure-stabilization
|
||||||
|
graddiv: False
|
||||||
|
consistent: False
|
||||||
|
pressure_stab_constant: 1.
|
||||||
|
|
||||||
|
fix_pressure: False
|
||||||
|
fix_pressure_point: [0., 0.]
|
||||||
|
|
||||||
|
linear_solver:
|
||||||
|
method: 'lu'
|
||||||
|
|
||||||
|
estimation:
|
||||||
|
boundary_conditions:
|
||||||
|
-
|
||||||
|
id: 2
|
||||||
|
type: 'dirichlet'
|
||||||
|
parameters: 'U'
|
||||||
|
initial_stddev: 1
|
||||||
|
|
||||||
|
measurements:
|
||||||
|
-
|
||||||
|
mesh: '/home/yeye/Desktop/kalman/meshes/channel2d.h5'
|
||||||
|
fe_degree: 0
|
||||||
|
xdmf_file: 'results/channel2d/measurements/u_all.xdmf'
|
||||||
|
file_root: 'results/channel2d/measurements/u{i}.h5'
|
||||||
|
indices: 0 # indices of checkpoints to be processed. 0 == all
|
||||||
|
velocity_direction: ~
|
||||||
|
noise_stddev: 1.0 # standard deviation of Gaussian noise
|
||||||
|
|
||||||
|
roukf:
|
||||||
|
particles: 'simplex' # unique or simplex
|
||||||
|
observation_operator: 'postprocessing' #state or postprocessing
|
||||||
|
reparameterize: True
|
155
kalman/channel3d.yaml
Executable file
155
kalman/channel3d.yaml
Executable file
@ -0,0 +1,155 @@
|
|||||||
|
mesh: '/home/yeye/Desktop/kalman/meshes/channel3d.h5'
|
||||||
|
# Physical parameters of the fluid
|
||||||
|
fluid:
|
||||||
|
density: 1.2
|
||||||
|
dynamic_viscosity: 0.035
|
||||||
|
|
||||||
|
io:
|
||||||
|
write_path: 'results/channel3d/'
|
||||||
|
restart:
|
||||||
|
path: '' # './projects/nse_coa3d/results/test_restart2/'
|
||||||
|
time: 0
|
||||||
|
write_xdmf: True
|
||||||
|
write_checkpoints: True
|
||||||
|
write_hdf5_timeseries: False
|
||||||
|
write_velocity: 'update' # update or tentative
|
||||||
|
|
||||||
|
ale:
|
||||||
|
type: 'manual'
|
||||||
|
io:
|
||||||
|
# read XDMF files from external solver
|
||||||
|
read_checkpoints: False # True, False
|
||||||
|
# path where displacement files are stored
|
||||||
|
#
|
||||||
|
read_path:
|
||||||
|
# origin mesh path
|
||||||
|
mesh_path:
|
||||||
|
# fem space of solution files
|
||||||
|
fem_type: p1
|
||||||
|
|
||||||
|
# Settings of the time integration method
|
||||||
|
timemarching:
|
||||||
|
# Time step for reading the input file
|
||||||
|
read_dt: # 0.05
|
||||||
|
fem:
|
||||||
|
# function spaces for the displacement
|
||||||
|
# options available: p1 p2
|
||||||
|
displacement_space: p1
|
||||||
|
lifting:
|
||||||
|
type: 'elastic_element' # 'harmonic', 'elastic', 'elastic_element'
|
||||||
|
# when chosing 'harmonic', no parameters required (ignored)
|
||||||
|
# otherwise, they must be specified below.
|
||||||
|
parameters:
|
||||||
|
mu: 0.0 # float value e.g. 1., 0.1, 0.01
|
||||||
|
# deformations will be read whenever chosen 'manual'
|
||||||
|
# if chosen 'external' then deformations list will be OMITTED!.
|
||||||
|
deformations:
|
||||||
|
# The dashes separate the list entries
|
||||||
|
-
|
||||||
|
id : 1
|
||||||
|
type: 'dirichlet'
|
||||||
|
value: ['0.', '0.', '0.']
|
||||||
|
|
||||||
|
|
||||||
|
boundary_conditions:
|
||||||
|
-
|
||||||
|
id: 1
|
||||||
|
type: 'dirichlet'
|
||||||
|
value: ['0','0','0']
|
||||||
|
-
|
||||||
|
id: 2
|
||||||
|
type: 'dirichlet'
|
||||||
|
value: ['0','0','U*(1-x[0]*x[0] - x[1]*x[1])*sin(DOLFIN_PI*t/T)']
|
||||||
|
parameters:
|
||||||
|
U: 30
|
||||||
|
T: 0.9
|
||||||
|
t: 0
|
||||||
|
-
|
||||||
|
id: 3
|
||||||
|
type: 'neumann'
|
||||||
|
value: '0'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
timemarching:
|
||||||
|
velocity_pressure_coupling: 'fractionalstep' # monolithic, fractionalstep
|
||||||
|
|
||||||
|
monolithic:
|
||||||
|
timescheme: 'gmp' # generalized midpoint, steady FIXME TODO
|
||||||
|
theta: 1 # 1: Euler, 0.5: implicit midpoint rule (one-legged)
|
||||||
|
nonlinear:
|
||||||
|
method: 'constant_extrapolation' # constant_extrapolation, linear_extrapolation, newton, picard, snes
|
||||||
|
maxit: 20
|
||||||
|
init_steps: 30
|
||||||
|
use_aitken: 1 # 0: False, 1: Picard only, 2: all
|
||||||
|
report: 1 # 0: None, 1: residuals, 2: residuals and energy (inflow/driving/forcing via ESSENTIAL Dbcs!)
|
||||||
|
atol: 1.e-6 # note: dot required!!
|
||||||
|
rtol: 1.e-16
|
||||||
|
stol: 0.0
|
||||||
|
|
||||||
|
fractionalstep:
|
||||||
|
scheme: 'CT' # CT, IPCS
|
||||||
|
coupled_velocity: False # False faster, True needed if robin_bc implicit
|
||||||
|
robin_bc_velocity_scheme: 'implicit' # explicit, semi-implicit, implicit
|
||||||
|
transpiration_bc_projection: 'robin' # robin, dirichlet
|
||||||
|
flux_report_normalize_boundary: 1
|
||||||
|
|
||||||
|
T: 0.8 # end time
|
||||||
|
dt: 0.01
|
||||||
|
write_dt: 0.04
|
||||||
|
checkpoint_dt: 0.04 # <= 0: only last; else value + last
|
||||||
|
report: 1 # 0: print nothing, 1: print time step and writeout, 2: 1 + flux
|
||||||
|
|
||||||
|
# solver setup
|
||||||
|
fem:
|
||||||
|
velocity_space: p1 # p1 p1b/p1+ p2
|
||||||
|
pressure_space: p1 # p1 p0/dg0 dg1
|
||||||
|
|
||||||
|
strain_symmetric: False
|
||||||
|
convection_skew_symmetric: True # aka Temam term
|
||||||
|
stabilization:
|
||||||
|
forced_normal:
|
||||||
|
enabled: False
|
||||||
|
boundaries: [6]
|
||||||
|
gamma: 10
|
||||||
|
backflow_boundaries: []
|
||||||
|
streamline_diffusion:
|
||||||
|
enabled: False
|
||||||
|
parameter: 'standard' # standard, shakib, codina, klr
|
||||||
|
length_scale: 'metric' # average, max, metric
|
||||||
|
parameter_element_constant: True
|
||||||
|
Cinv: ~
|
||||||
|
monolithic:
|
||||||
|
infsup: 'pspg' # pspg, pressure-stabilization
|
||||||
|
graddiv: False
|
||||||
|
consistent: False
|
||||||
|
pressure_stab_constant: 1.
|
||||||
|
|
||||||
|
fix_pressure: False
|
||||||
|
fix_pressure_point: [0., 0. , 0.]
|
||||||
|
|
||||||
|
linear_solver:
|
||||||
|
method: 'lu'
|
||||||
|
|
||||||
|
estimation:
|
||||||
|
boundary_conditions:
|
||||||
|
-
|
||||||
|
id: 2
|
||||||
|
type: 'dirichlet'
|
||||||
|
parameters: 'U'
|
||||||
|
initial_stddev: 1
|
||||||
|
|
||||||
|
measurements:
|
||||||
|
-
|
||||||
|
mesh: '/home/yeye/Desktop/kalman/meshes/channel3d.h5'
|
||||||
|
fe_degree: 0
|
||||||
|
xdmf_file: 'results/channel3d/measurements/u_all.xdmf'
|
||||||
|
file_root: 'results/channel3d/measurements/u{i}.h5'
|
||||||
|
indices: 0 # indices of checkpoints to be processed. 0 == all
|
||||||
|
velocity_direction: ~
|
||||||
|
noise_stddev: 1.0 # standard deviation of Gaussian noise
|
||||||
|
|
||||||
|
roukf:
|
||||||
|
particles: 'simplex' # unique or simplex
|
||||||
|
observation_operator: 'postprocessing' #state or postprocessing
|
||||||
|
reparameterize: True
|
185
kalman/compute_solution_errors.py
Normal file
185
kalman/compute_solution_errors.py
Normal file
@ -0,0 +1,185 @@
|
|||||||
|
from dolfin import *
|
||||||
|
import numpy as np
|
||||||
|
from common import inout
|
||||||
|
from pathlib import Path
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
|
def get_indices_glob(path):
|
||||||
|
path_all = list(Path().glob(path.format(i='*')))
|
||||||
|
indices = sorted(int(str(s).split('/')[-2]) for s in path_all)
|
||||||
|
return indices
|
||||||
|
|
||||||
|
|
||||||
|
def find_checkpoints(options, path_checkpoint):
|
||||||
|
|
||||||
|
indices = options['estimation']['measurements'][0]['indices']
|
||||||
|
|
||||||
|
# look for u.h5 checkpoints
|
||||||
|
chkpt_root = str(Path(path_checkpoint).joinpath('{i}/u.h5'))
|
||||||
|
|
||||||
|
# if indices were given in input file, check if u.h5 checkpoints or X0.h5
|
||||||
|
if indices:
|
||||||
|
if not Path(chkpt_root.format(indices[0])).is_file():
|
||||||
|
chkpt_root = str(Path(path_checkpoint).joinpath('{i}/X0.h5'))
|
||||||
|
if not Path(chkpt_root.format(indices[0])).is_file():
|
||||||
|
raise Exception('No checkpoints found in folder ' + chkpt_root)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
# get indices from folder names of u.h5 checkpoints
|
||||||
|
indices = get_indices_glob(chkpt_root)
|
||||||
|
|
||||||
|
# if no indices were found, look for X0.h5 files
|
||||||
|
if not indices:
|
||||||
|
chkpt_root = str(Path(path_checkpoint).joinpath('{i}/X0.h5'))
|
||||||
|
indices = get_indices_glob(chkpt_root)
|
||||||
|
|
||||||
|
# still no indices? raise Exception
|
||||||
|
if not indices:
|
||||||
|
print(chkpt_root)
|
||||||
|
raise Exception('No checkpoint indices found')
|
||||||
|
|
||||||
|
dt = options['timemarching']['dt']
|
||||||
|
times = dt*np.array(indices)
|
||||||
|
|
||||||
|
if MPI.rank(MPI.comm_world) == 0:
|
||||||
|
print('indices: \n')
|
||||||
|
print('\t', indices)
|
||||||
|
print('times: \n')
|
||||||
|
print('\t', times)
|
||||||
|
|
||||||
|
files = [chkpt_root.format(i=i) for i in indices]
|
||||||
|
|
||||||
|
# check if all files are found
|
||||||
|
for f in files:
|
||||||
|
if not Path(f).is_file():
|
||||||
|
raise FileNotFoundError(f)
|
||||||
|
|
||||||
|
return indices, times, files
|
||||||
|
|
||||||
|
|
||||||
|
def get_h5_fun_name(file):
|
||||||
|
if 'X0.h5' in file:
|
||||||
|
fun = '/X'
|
||||||
|
else:
|
||||||
|
fun = '/u'
|
||||||
|
|
||||||
|
return fun
|
||||||
|
|
||||||
|
|
||||||
|
def filter_indices(ind1, ind2, files1, files2):
|
||||||
|
''' Filter indices and files such that only maching files and indicies
|
||||||
|
remain.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ind1 (list): list of indices of dataset 1
|
||||||
|
ind2 (list): list of indices of dataset 2
|
||||||
|
files1 (list): list of files of dataset 1
|
||||||
|
files2 (list): list of files of dataset 2
|
||||||
|
'''
|
||||||
|
ind = []
|
||||||
|
files_filt1 = []
|
||||||
|
files_filt2 = []
|
||||||
|
for i in ind1:
|
||||||
|
if i in ind2:
|
||||||
|
ind.append(i)
|
||||||
|
files_filt1.append(files1[list(ind1).index(i)])
|
||||||
|
files_filt2.append(files2[list(ind2).index(i)])
|
||||||
|
|
||||||
|
return ind, files_filt1, files_filt2
|
||||||
|
|
||||||
|
|
||||||
|
def compute_errors(inputfile, path_checkpoint_1,
|
||||||
|
path_checkpoint_2, relative=False):
|
||||||
|
|
||||||
|
options = inout.read_parameters(inputfile)
|
||||||
|
|
||||||
|
indices1, times1, files1 = find_checkpoints(options, path_checkpoint_1)
|
||||||
|
indices2, times2, files2 = find_checkpoints(options, path_checkpoint_2)
|
||||||
|
|
||||||
|
indices, files1, files2 = filter_indices(indices1, indices2, files1,
|
||||||
|
files2)
|
||||||
|
|
||||||
|
fun1 = get_h5_fun_name(files1[0])
|
||||||
|
fun2 = get_h5_fun_name(files2[0])
|
||||||
|
|
||||||
|
# assert np.allclose(indices1, indices2), 'Indices do not match!'
|
||||||
|
# assert np.allclose(times1, times2), 'Time stamps do not match!'
|
||||||
|
|
||||||
|
mesh, _, _ = inout.read_mesh(options['mesh'])
|
||||||
|
|
||||||
|
if 'fluid' in options:
|
||||||
|
assert options['fem']['velocity_space'] in ('p1', 'p2'), (
|
||||||
|
'velocity space not supported, use p1 or p2')
|
||||||
|
deg = int(options['fem']['velocity_space'][-1])
|
||||||
|
|
||||||
|
elif 'material' in options:
|
||||||
|
deg = options['solver']['fe_degree']
|
||||||
|
|
||||||
|
V = VectorFunctionSpace(mesh, 'P', deg)
|
||||||
|
|
||||||
|
u1 = Function(V)
|
||||||
|
u2 = Function(V)
|
||||||
|
|
||||||
|
err_l2 = []
|
||||||
|
err_linf = []
|
||||||
|
|
||||||
|
for i, (f1, f2) in enumerate(zip(files1, files2)):
|
||||||
|
# file_ref = str(Path(path_fwd_tentative_checkpoint).joinpath(
|
||||||
|
# '{i}/u.h5'.format(i=i)))
|
||||||
|
# file_roukf = str(Path(path_roukf_state_checkpoint).joinpath(
|
||||||
|
# '{i}/X0.h5'.format(i=i)))
|
||||||
|
|
||||||
|
t0 = inout.read_HDF5_data(V.mesh().mpi_comm(), f1, u1, fun1)
|
||||||
|
t1 = inout.read_HDF5_data(V.mesh().mpi_comm(), f2, u2, fun2)
|
||||||
|
|
||||||
|
assert np.allclose(t0, t1), ('Timestamps do not match! {} vs {} '
|
||||||
|
'(HDF5 files)'.format(t0, t1))
|
||||||
|
|
||||||
|
if relative:
|
||||||
|
u_l2 = norm(u1, 'l2')
|
||||||
|
u_linf = norm(u1.vector(), 'linf')
|
||||||
|
if u_l2 == 0:
|
||||||
|
u_l2 = 1
|
||||||
|
print('i = {} \t norm(u1) == 0, do not normalize!'.format(i))
|
||||||
|
if u_linf == 0:
|
||||||
|
u_linf = 1
|
||||||
|
print('i = {} \t max(u1) == 0, do not normalize!'.format(i))
|
||||||
|
else:
|
||||||
|
u_l2 = u_linf = 1
|
||||||
|
|
||||||
|
err_l2.append(errornorm(u1, u2, 'l2', degree_rise=0)/u_l2)
|
||||||
|
err_linf.append(norm(u1.vector() - u2.vector(), 'linf')/u_linf)
|
||||||
|
|
||||||
|
print('i = {} \t L2 error: {} \t Linf error: {}'.format(i, err_l2[-1],
|
||||||
|
err_linf[-1]))
|
||||||
|
|
||||||
|
print('max L2 error: {}'.format(max(err_l2)))
|
||||||
|
print('max Linf error: {}'.format(max(err_linf)))
|
||||||
|
|
||||||
|
|
||||||
|
def get_parser():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='''
|
||||||
|
Compute errors between vector function checkpoints of two simulations.
|
||||||
|
|
||||||
|
Can be ROUKF checkpoints, but if forward and ROUKF
|
||||||
|
files are found in the same checkpoint folder (0/u.h5 and 0/X.5),
|
||||||
|
the forward file will be preferred.''',
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
|
parser.add_argument('inputfile', type=str, help='path to yaml input file')
|
||||||
|
parser.add_argument('path_checkpoint_1', type=str,
|
||||||
|
help='Path to checkpoints of simulation 1')
|
||||||
|
parser.add_argument('path_checkpoint_2', type=str,
|
||||||
|
help='Path to checkpoints of simulation 2')
|
||||||
|
parser.add_argument('-r', '--relative', action='store_true',
|
||||||
|
help='compute relative errors')
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
args = get_parser().parse_args()
|
||||||
|
|
||||||
|
compute_errors(args.inputfile, args.path_checkpoint_1,
|
||||||
|
args.path_checkpoint_2, relative=args.relative)
|
505
kalman/gen_measurements_from_checkpoints.py
Normal file
505
kalman/gen_measurements_from_checkpoints.py
Normal file
@ -0,0 +1,505 @@
|
|||||||
|
from dolfin import *
|
||||||
|
import dolfin
|
||||||
|
import argparse
|
||||||
|
from common import inout
|
||||||
|
import numpy as np
|
||||||
|
import sys
|
||||||
|
# import ruamel.yaml as yaml
|
||||||
|
from pathlib import Path
|
||||||
|
import csv
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
# parameters['allow_extrapolation'] = True
|
||||||
|
# careful with this option... can give nonsensical results outside
|
||||||
|
|
||||||
|
|
||||||
|
if '2017' in dolfin.__version__:
|
||||||
|
class MPI(MPI):
|
||||||
|
comm_world = MPI.comm_world
|
||||||
|
|
||||||
|
|
||||||
|
def solution_function_space(options):
|
||||||
|
''' Create function space to read in solution checkpoints, as specified in
|
||||||
|
options.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
options (dict): Options dictionary (from input file)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
FunctionSpace: Solution checkpoint function space
|
||||||
|
'''
|
||||||
|
mesh, _, _ = inout.read_mesh(options['mesh'])
|
||||||
|
|
||||||
|
if 'fluid' in options:
|
||||||
|
if options['fem']['velocity_space'].lower() == 'p2':
|
||||||
|
Evel = VectorElement('P', mesh.ufl_cell(), 2)
|
||||||
|
|
||||||
|
elif options['fem']['velocity_space'].lower() == 'p1':
|
||||||
|
Evel = VectorElement('P', mesh.ufl_cell(), 1)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise Exception('Velocity space {} not yet implemented!'.format(
|
||||||
|
options['fem']['velocity_space'].lower()))
|
||||||
|
|
||||||
|
elif 'material' in options:
|
||||||
|
if options['incompressibility']['mixed_formulation']:
|
||||||
|
raise Exception('Only compressible hyperelasticity data is '
|
||||||
|
'supported here, for simplicity')
|
||||||
|
|
||||||
|
deg = options['solver']['fe_degree']
|
||||||
|
Evel = VectorElement('P', mesh.ufl_cell(), deg)
|
||||||
|
|
||||||
|
V = FunctionSpace(mesh, Evel)
|
||||||
|
|
||||||
|
return V
|
||||||
|
|
||||||
|
|
||||||
|
def measurement_function_spaces(options):
|
||||||
|
''' Create function space for measurements, as specified in
|
||||||
|
options.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
options (dict): Options dictionary (from input file)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list((Vector)FunctionSpace): list of function spaces on measurement
|
||||||
|
mesh, scalar if measurements are scalar, else VectorFunctionSpace
|
||||||
|
list(VectorFunctionSpace): if scalar measurements, a list of
|
||||||
|
corresponding VectorFunctionSpace for interpolation, else empty
|
||||||
|
list
|
||||||
|
'''
|
||||||
|
|
||||||
|
measurement_lst = options['estimation']['measurements']
|
||||||
|
if not isinstance(measurement_lst, list):
|
||||||
|
measurement_lst = [measurement_lst]
|
||||||
|
|
||||||
|
meshes = [meas['mesh'] for meas in measurement_lst]
|
||||||
|
|
||||||
|
if 'fe_degree' in options['estimation']['measurements'][0]:
|
||||||
|
degree = options['estimation']['measurements'][0]['fe_degree']
|
||||||
|
else:
|
||||||
|
degree = 1
|
||||||
|
|
||||||
|
for measurement in measurement_lst[1:]:
|
||||||
|
if not degree == measurement['fe_degree']:
|
||||||
|
raise Exception('fe_degree must the the same for all '
|
||||||
|
'measurements!')
|
||||||
|
|
||||||
|
if degree in (1, 2):
|
||||||
|
element_family = 'P'
|
||||||
|
elif degree == 0:
|
||||||
|
element_family = 'DG'
|
||||||
|
else:
|
||||||
|
raise Exception('Unsupported measurement FE degree: {}'
|
||||||
|
.format(degree))
|
||||||
|
|
||||||
|
scalar = [False]*len(measurement_lst)
|
||||||
|
for i, measurement in enumerate(measurement_lst):
|
||||||
|
if 'velocity_direction' in measurement:
|
||||||
|
direction = measurement['velocity_direction']
|
||||||
|
if direction and None not in direction and sum(direction) > 0:
|
||||||
|
scalar[i] = True
|
||||||
|
|
||||||
|
V = []
|
||||||
|
V_aux = []
|
||||||
|
for scal, file in zip(scalar, meshes):
|
||||||
|
mesh, _, _ = inout.read_mesh(file)
|
||||||
|
if scal:
|
||||||
|
V.append(FunctionSpace(mesh, element_family, degree))
|
||||||
|
# 1. interpolate velocity vector onto measurement grid (V_aux)
|
||||||
|
# 2. perform component projection in the measurement space
|
||||||
|
# ---> need to store both scalar and vector spaces
|
||||||
|
V_aux.append(VectorFunctionSpace(mesh, element_family, degree))
|
||||||
|
else:
|
||||||
|
V.append(VectorFunctionSpace(mesh, element_family, degree))
|
||||||
|
|
||||||
|
return V, V_aux
|
||||||
|
|
||||||
|
|
||||||
|
def find_checkpoints(options):
|
||||||
|
''' Find all checkpoints of a simulation from configuration in options
|
||||||
|
dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
options (dict): Options dictionary (from YAML input file)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of found checkpoint indices
|
||||||
|
list: List of found checkpoint times
|
||||||
|
list: List of found checkpoint files
|
||||||
|
'''
|
||||||
|
if MPI.rank(MPI.comm_world) > 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
chkpt_root = options['io']['write_path'] + '/checkpoint/{i}/u.h5'
|
||||||
|
|
||||||
|
indices = options['estimation']['measurements'][0]['indices']
|
||||||
|
if not indices:
|
||||||
|
path_all = list(Path().glob(chkpt_root.format(i='*')))
|
||||||
|
indices = sorted(int(str(s).split('/')[-2]) for s in path_all)
|
||||||
|
|
||||||
|
# dt_meas = options['timemarching']['checkpoint_dt']
|
||||||
|
# times = np.concatenate(([options['timemarching']['dt']],
|
||||||
|
# dt_meas*np.array(indices[1:])))
|
||||||
|
dt = options['timemarching']['dt']
|
||||||
|
times = dt*np.array(indices)
|
||||||
|
|
||||||
|
print('indices: \n')
|
||||||
|
print('\t', indices)
|
||||||
|
print('times: \n')
|
||||||
|
print('\t', times)
|
||||||
|
|
||||||
|
files = [chkpt_root.format(i=i) for i in indices]
|
||||||
|
|
||||||
|
# check if all files are found
|
||||||
|
for f in files:
|
||||||
|
if not Path(f).is_file():
|
||||||
|
raise FileNotFoundError(f)
|
||||||
|
|
||||||
|
return indices, times, files
|
||||||
|
|
||||||
|
|
||||||
|
def generate(options, seed_lst, print_norms=False):
|
||||||
|
''' Generate measurements.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
options (dict): Options dictionary (from input file)
|
||||||
|
seed_lst (list): list of random seeds
|
||||||
|
print_norms (bool): switch for printing norms of measurements
|
||||||
|
'''
|
||||||
|
|
||||||
|
V = solution_function_space(options)
|
||||||
|
ndim = V.mesh().topology().dim()
|
||||||
|
u = Function(V)
|
||||||
|
|
||||||
|
V_meas_lst, V_aux_lst = measurement_function_spaces(options)
|
||||||
|
u_meas_lst = [Function(V_meas, name='measurement') for V_meas in
|
||||||
|
V_meas_lst]
|
||||||
|
u_meas_cpy = [Function(V_meas, name='measurement') for V_meas in
|
||||||
|
V_meas_lst]
|
||||||
|
LI = LagrangeInterpolator
|
||||||
|
if V_aux_lst:
|
||||||
|
u_aux_lst = [Function(V_aux) for V_aux in V_aux_lst]
|
||||||
|
comp_assigner = [FunctionAssigner([V]*ndim, V_aux) for V, V_aux in
|
||||||
|
zip(V_meas_lst, V_aux_lst)]
|
||||||
|
|
||||||
|
measurement_lst = options['estimation']['measurements']
|
||||||
|
noise_sd = [meas['noise_stddev'] for meas in measurement_lst]
|
||||||
|
|
||||||
|
# if 'project' in options['estimation']['measurements']:
|
||||||
|
# project_switch = bool(options['estimation']['measurements']
|
||||||
|
# ['project'])
|
||||||
|
# else:
|
||||||
|
# project_switch = False
|
||||||
|
project_switch = False
|
||||||
|
|
||||||
|
indices, times, files = find_checkpoints(options)
|
||||||
|
|
||||||
|
outfile_root_lst = [meas['file_root'] for meas in measurement_lst]
|
||||||
|
xdmf_paths = [meas['xdmf_file'] for meas in measurement_lst]
|
||||||
|
|
||||||
|
# don't automatically append seed{s} do path anymore!
|
||||||
|
# suppose seed{s} is included
|
||||||
|
# if seed_lst[0] > 0:
|
||||||
|
# for i, (out, xdmf) in enumerate(zip(outfile_root_lst, xdmf_paths)):
|
||||||
|
# # if isinstance(out, str) and 'seed{s}' not in out:
|
||||||
|
# # out_ = out.split('/')
|
||||||
|
# # outfile_root_lst[i] = '/'.join(out_[:-1] + ['seed{s}',
|
||||||
|
# # out_[-1]])
|
||||||
|
# if isinstance(xdmf, str) and 'seed{s}' not in xdmf:
|
||||||
|
# xdmf_ = xdmf.split('/')
|
||||||
|
# xdmf_paths[i] = '/'.join(xdmf_[:-1] + ['seed{s}', xdmf_[-1]])
|
||||||
|
|
||||||
|
# check if a list of seeds is given that {s} is included in path
|
||||||
|
if len(seed_lst) > 1:
|
||||||
|
for out in outfile_root_lst:
|
||||||
|
assert '{s}' in out, ('For a list of seeds, the string \'{s}\' '
|
||||||
|
'must be included in the file_root and will '
|
||||||
|
'be replaced by the seed number')
|
||||||
|
|
||||||
|
if any(xdmf_paths):
|
||||||
|
xdmf_lst = []
|
||||||
|
for pth in xdmf_paths:
|
||||||
|
if not isinstance(pth, str):
|
||||||
|
raise TypeError('xdmf_file setting must be None or a string '
|
||||||
|
'indicating the target XDMF file. Got type {}'.
|
||||||
|
format(type(pth)))
|
||||||
|
|
||||||
|
seed_dict = {}
|
||||||
|
for seed in seed_lst:
|
||||||
|
file = XDMFFile(pth.format(s=seed))
|
||||||
|
file.parameters['rewrite_function_mesh'] = False
|
||||||
|
seed_dict[seed] = file
|
||||||
|
xdmf_lst.append(seed_dict)
|
||||||
|
else:
|
||||||
|
xdmf_lst = [None]*len(u_meas_lst)
|
||||||
|
|
||||||
|
for count, (index, time, infile) in enumerate(zip(indices, times, files)):
|
||||||
|
if MPI.rank(MPI.comm_world) == 0:
|
||||||
|
print('Processing {} at t = {}'.format(infile, time))
|
||||||
|
|
||||||
|
t_ = inout.read_HDF5_data(u.function_space().mesh().mpi_comm(), infile,
|
||||||
|
u, '/u')
|
||||||
|
|
||||||
|
assert np.allclose(time, t_), ('Timestamps do not match! {} vs {} '
|
||||||
|
'(HDF5 file)'.format(time, t_))
|
||||||
|
|
||||||
|
# interpolate u to measurement meshes
|
||||||
|
for k, (u_meas, outfile_root, xdmf, sd) in enumerate(zip(
|
||||||
|
u_meas_lst, outfile_root_lst, xdmf_lst, noise_sd)):
|
||||||
|
|
||||||
|
if project_switch:
|
||||||
|
u_meas.assign(project(u, u_meas.function_space()))
|
||||||
|
|
||||||
|
else:
|
||||||
|
if V_aux_lst:
|
||||||
|
if MPI.rank(MPI.comm_world) == 0:
|
||||||
|
print('- Scalar velocity component')
|
||||||
|
direction = (options['estimation']['measurements'][k]
|
||||||
|
['velocity_direction'])
|
||||||
|
if direction.count(0) == 2 and direction.count(1) == 1:
|
||||||
|
LI.interpolate(u_meas, u.sub(direction.index(1)))
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert u_meas.value_shape() == []
|
||||||
|
# normalize projection direction
|
||||||
|
direction = np.array(direction, dtype=np.float64)
|
||||||
|
direction /= np.sqrt(np.dot(direction, direction))
|
||||||
|
if MPI.rank(MPI.comm_world) == 0:
|
||||||
|
print('- direction: {}'.format(direction))
|
||||||
|
|
||||||
|
LagrangeInterpolator.interpolate(u_aux_lst[k], u)
|
||||||
|
|
||||||
|
# This is faster than simply Xobs_aux.split(True) !
|
||||||
|
u_i = [u_meas] + [u_meas.copy(True) for j in
|
||||||
|
range(ndim - 1)]
|
||||||
|
|
||||||
|
comp_assigner[k].assign(u_i, u_aux_lst[k])
|
||||||
|
u_meas.vector()[:] *= direction[0]
|
||||||
|
for ui, d in zip(u_i[1:], direction[1:]):
|
||||||
|
if d:
|
||||||
|
u_meas.vector().axpy(d, ui.vector())
|
||||||
|
|
||||||
|
else:
|
||||||
|
if MPI.rank(MPI.comm_world) == 0:
|
||||||
|
print('- Full velocity vector')
|
||||||
|
LI.interpolate(u_meas, u)
|
||||||
|
if sd:
|
||||||
|
u_meas_cpy[k].assign(u_meas)
|
||||||
|
|
||||||
|
# add noise
|
||||||
|
for seed in seed_lst:
|
||||||
|
if sd:
|
||||||
|
if seed > 0:
|
||||||
|
np.random.seed(seed + count)
|
||||||
|
|
||||||
|
if MPI.rank(MPI.comm_world) == 0:
|
||||||
|
print('- Add noise with stddev = {}, seed = {}'
|
||||||
|
.format(sd, seed + count))
|
||||||
|
|
||||||
|
noise = np.random.normal(0., sd,
|
||||||
|
u_meas.vector().local_size())
|
||||||
|
u_meas.assign(u_meas_cpy[k])
|
||||||
|
u_meas.vector()[:] += noise
|
||||||
|
|
||||||
|
if MPI.rank(MPI.comm_world) == 0:
|
||||||
|
if print_norms:
|
||||||
|
print('Writing file at t = {}\t |u_m| = {}'.format(
|
||||||
|
time, norm(u_meas)))
|
||||||
|
else:
|
||||||
|
print('Writing file at t = {}\t'.format(time))
|
||||||
|
|
||||||
|
outfile = outfile_root.format(i=index, s=seed)
|
||||||
|
inout.write_HDF5_data(
|
||||||
|
u_meas.function_space().mesh().mpi_comm(), outfile, u_meas,
|
||||||
|
'/u', time)
|
||||||
|
|
||||||
|
if xdmf:
|
||||||
|
xdmf[seed].write(u_meas, time)
|
||||||
|
|
||||||
|
# THIS IS OBSOLETE NOW
|
||||||
|
# write indices and timesteps
|
||||||
|
# write_timestamps(options, indices, times, seed_lst)
|
||||||
|
|
||||||
|
|
||||||
|
def write_timestamps(options, indices, times, seed_lst):
|
||||||
|
''' Write time stamps of measurements to csv file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
options (dict): Options dictionary (from YAML input file)
|
||||||
|
indices (list): List of checkpoint indices
|
||||||
|
times (list): List of checpoint times
|
||||||
|
seed_lst (list): List of random seeds
|
||||||
|
'''
|
||||||
|
if MPI.rank(MPI.comm_world) > 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
warning('timestamps.csv is OBSOLETE!')
|
||||||
|
|
||||||
|
file_root_lst = [meas['file_root'] for meas in options['estimation']
|
||||||
|
['measurements']]
|
||||||
|
|
||||||
|
# if seed_lst[0]:
|
||||||
|
# for i, out in enumerate(file_root_lst):
|
||||||
|
# if isinstance(out, str) and 'seed{s}' not in out:
|
||||||
|
# out_ = out.split('/')
|
||||||
|
# file_root_lst[i] = '/'.join(out_[:-1]
|
||||||
|
# + ['seed{s}', out_[-1]])
|
||||||
|
|
||||||
|
for file_root in file_root_lst:
|
||||||
|
for seed in seed_lst:
|
||||||
|
path = Path(file_root.format(s=seed, i=-1)).parent
|
||||||
|
if seed > 0 and 'seed{s}'.format(s=seed) not in str(path):
|
||||||
|
path = path.joinpath('seed{s}'.format(s=seed))
|
||||||
|
path = path.joinpath('timestamps.csv')
|
||||||
|
print('Writing timestamps to file: {}'.format(path))
|
||||||
|
with path.open('w') as file:
|
||||||
|
writer = csv.writer(file, delimiter=' ')
|
||||||
|
for i, t in zip(indices, times):
|
||||||
|
writer.writerow((i, t))
|
||||||
|
|
||||||
|
|
||||||
|
def copy_inputfile(options, inputfile, seed_lst):
|
||||||
|
''' Copy input file of reference solution to measurements directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
options (dict): Options dictionary (from YAML input file)
|
||||||
|
inputfile (str): Path of input file to be copied
|
||||||
|
seed_lst (list): List of random seeds
|
||||||
|
'''
|
||||||
|
if MPI.rank(MPI.comm_world) > 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
file_root_lst = [meas['file_root'] for meas in options['estimation']
|
||||||
|
['measurements']]
|
||||||
|
for file_root in file_root_lst:
|
||||||
|
for seed in seed_lst:
|
||||||
|
path = Path(file_root.format(s=seed, i=-1)).parent
|
||||||
|
# if seed > 0 and 'seed{s}'.format(s=seed) not in str(path):
|
||||||
|
# path = path.joinpath('seed{s}'.format(s=seed))
|
||||||
|
path = path.joinpath('input.yaml')
|
||||||
|
# path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
shutil.copy2(str(inputfile), str(path))
|
||||||
|
print('Copied input file to {}'.format(path))
|
||||||
|
|
||||||
|
|
||||||
|
def dump_example_options(path):
|
||||||
|
''' Dump example options to given path and exit.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str): Path to input file
|
||||||
|
'''
|
||||||
|
example = '''\
|
||||||
|
# example fractional step inputfile for gen_measurements_from_checkpoints.py
|
||||||
|
mesh: './meshes/mesh.h5'
|
||||||
|
|
||||||
|
io:
|
||||||
|
# Path containing checkpoints in a checkpoints/ folder
|
||||||
|
write_path: './results/test/'
|
||||||
|
|
||||||
|
timemarching:
|
||||||
|
# final time of simulation to be processed
|
||||||
|
T: 0.4
|
||||||
|
# simulation time step size (not dt_checkpoint)
|
||||||
|
dt: 0.001
|
||||||
|
|
||||||
|
fem:
|
||||||
|
# Velocity and presse (for monolithic) function spaces of checkpoints
|
||||||
|
velocity_space: 'p1'
|
||||||
|
pressure_space: 'p1'
|
||||||
|
|
||||||
|
estimation:
|
||||||
|
measurements:
|
||||||
|
# List of measurements
|
||||||
|
-
|
||||||
|
# Mesh of 1. measurement set
|
||||||
|
mesh: './measurements/mesh_meas.h5'
|
||||||
|
|
||||||
|
# degree of finite element function space
|
||||||
|
# (1 for linear, 0 for discontinuous piece-wise constant)
|
||||||
|
fe_degree: 0
|
||||||
|
|
||||||
|
# velocity measurement files to be written by this tool
|
||||||
|
# {i} will be replaced by the corresponding index of the checkpoint
|
||||||
|
# if given, {s} may be replaced by the seed number, for instance
|
||||||
|
# path/seed{s}/u{i}.h5
|
||||||
|
file_root: './measurements/u{i}.h5'
|
||||||
|
|
||||||
|
# XDMF file where measurements are optionally stored for
|
||||||
|
# visualization (one file for all time steps)
|
||||||
|
xdmf_file: './measurements/meas.xdmf'
|
||||||
|
|
||||||
|
# Select a velocity component:
|
||||||
|
# * None (~) means "use complete vector"
|
||||||
|
# * else, use a scalar velocity by projecting onto the given
|
||||||
|
# direction vector, [x, y, (z)]
|
||||||
|
velocity_direction: ~
|
||||||
|
|
||||||
|
# (absolute) standard deviation of Gaussian noise
|
||||||
|
noise_stddev: 10.
|
||||||
|
|
||||||
|
# indices of checkpoints to be processed:
|
||||||
|
# * 0 == all
|
||||||
|
# * a list of integes
|
||||||
|
indices: 0
|
||||||
|
|
||||||
|
-
|
||||||
|
# second measurement ...
|
||||||
|
-
|
||||||
|
# further measurements ...
|
||||||
|
'''
|
||||||
|
|
||||||
|
with open(path, 'w') as f:
|
||||||
|
f.write(example)
|
||||||
|
print('Example options:\n\n')
|
||||||
|
print(example)
|
||||||
|
print('\ndumped example options to file: {}'.format(path))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def get_parser():
|
||||||
|
parser = argparse.ArgumentParser(description='''\
|
||||||
|
Generate measurements from HDF5 checkpoints, generated by the Fractional-Step
|
||||||
|
or the monolithic Navier-Stokes solvers.
|
||||||
|
|
||||||
|
Reads options from the given input file.
|
||||||
|
|
||||||
|
See the example options for further explanations::
|
||||||
|
(`gen_measurements_from_checkpoints.py -d example.yaml`).
|
||||||
|
''', formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
|
parser.add_argument('inputfile', type=str, help='path to yaml input file')
|
||||||
|
# parser.add_argument('-s', '--seed', type=int, default=-1,
|
||||||
|
# help='seed for random generator')
|
||||||
|
parser.add_argument('-s', '--seed', nargs='+', help='seed or list of seeds'
|
||||||
|
' for noise random number generator. '
|
||||||
|
'Will take the given seed for the first time step '
|
||||||
|
'and increment by +1 for all subsequent time steps, '
|
||||||
|
'so that noise is different for all times '
|
||||||
|
'but reproducible', default=[-1], type=int)
|
||||||
|
parser.add_argument('-n', '--print_norms', action='store_true',
|
||||||
|
help='print norms')
|
||||||
|
parser.add_argument('-d', '--dump', action='store_true',
|
||||||
|
help='dump minimal example parameters to inputfile')
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
args = get_parser().parse_args()
|
||||||
|
|
||||||
|
if args.dump:
|
||||||
|
dump_example_options(args.inputfile)
|
||||||
|
|
||||||
|
seed = args.seed
|
||||||
|
|
||||||
|
if seed[0] <= 0:
|
||||||
|
assert len(seed) == 1, 'if multiple seeds are given, all should be > 0'
|
||||||
|
|
||||||
|
try:
|
||||||
|
options = inout.read_parameters(args.inputfile)
|
||||||
|
except IOError:
|
||||||
|
raise IOError('File could not be read: {}'.format(args.inputfile))
|
||||||
|
|
||||||
|
generate(options, seed, args.print_norms)
|
||||||
|
|
||||||
|
copy_inputfile(options, args.inputfile, seed)
|
BIN
kalman/meshes/channel2d.h5
Normal file
BIN
kalman/meshes/channel2d.h5
Normal file
Binary file not shown.
BIN
kalman/meshes/channel3d.h5
Normal file
BIN
kalman/meshes/channel3d.h5
Normal file
Binary file not shown.
BIN
kalman/meshes/channel3d_leo.h5
Executable file
BIN
kalman/meshes/channel3d_leo.h5
Executable file
Binary file not shown.
BIN
kalman/meshes/coaortaH1.h5
Executable file
BIN
kalman/meshes/coaortaH1.h5
Executable file
Binary file not shown.
137
kalman/other/batch_gen_measurements.py
Executable file
137
kalman/other/batch_gen_measurements.py
Executable file
@ -0,0 +1,137 @@
|
|||||||
|
from common import inout
|
||||||
|
from gen_measurements_from_checkpoints import generate, copy_inputfile
|
||||||
|
|
||||||
|
path_chan2d = './projects/DA_testbench/input/channel2d/measurements/'
|
||||||
|
inputfiles_chan2d = [
|
||||||
|
# 'chan2d_CT_Rtop0.9_slip0.332_no-pen_h0.05_noise0.yaml',
|
||||||
|
# 'chan2d_CT_Rtop0.9_slip0.332_no-pen_h0.05_noise6.5.yaml',
|
||||||
|
# # # 'chan2d_CT_Rtop0.9_slip0.332_no-pen_dt0.001_h0.05_noise0.yaml',
|
||||||
|
# # # 'chan2d_CT_Rtop0.9_slip0.332_no-pen_dt0.001_h0.05_noise6.5.yaml',
|
||||||
|
# 'chan2d_CT_Rtop0.9_slip0.332_trans3600_h0.05_noise0.yaml',
|
||||||
|
'chan2d_CT_Rtop0.9_slip0.332_trans3600_h0.05_noise6.5.yaml',
|
||||||
|
# 'chan2d_CT_R1_no-slip_h0.05_noise0.yaml',
|
||||||
|
# 'chan2d_CT_R1_no-slip_h0.05_noise6.5.yaml',
|
||||||
|
# # 'chan2d_CT_R1_no-slip_steady_h0.05_noise0.yaml',
|
||||||
|
# # 'chan2d_CT_R1_no-slip_steady_h0.05_noise6.5.yaml',
|
||||||
|
# 'chan2d_mono_Rtop0.9_slip0.332_no-pen_h0.05_noise0.yaml',
|
||||||
|
# 'chan2d_mono_Rtop0.9_slip0.332_no-pen_h0.05_noise6.5.yaml',
|
||||||
|
# # 'chan2d_mono_Rtop0.9_slip0.332_no-pen_dt0.001_h0.05_noise0.yaml',
|
||||||
|
# # 'chan2d_mono_Rtop0.9_slip0.332_no-pen_dt0.001_h0.05_noise6.5.yaml',
|
||||||
|
# 'chan2d_mono_Rtop0.9_slip0.332_trans3600_h0.05_noise0.yaml',
|
||||||
|
# 'chan2d_mono_Rtop0.9_slip0.332_trans3600_h0.05_noise6.5.yaml',
|
||||||
|
# 'chan2d_mono_R1_no-slip_h0.05_noise0.yaml',
|
||||||
|
# 'chan2d_mono_R1_no-slip_h0.05_noise6.5.yaml',
|
||||||
|
# # 'chan2d_mono_R1_no-slip_steady_h0.05_noise0.yaml',
|
||||||
|
# # 'chan2d_mono_R1_no-slip_steady_h0.05_noise6.5.yaml',
|
||||||
|
# #
|
||||||
|
# # 'chan2d_CT_Rtop0.9_slip0.332_no-pen_h0.05_state_noise0.yaml',
|
||||||
|
]
|
||||||
|
|
||||||
|
path_coa2d = './projects/DA_testbench/input/coa2d/measurements/'
|
||||||
|
inputfiles_coa2d = [
|
||||||
|
# CT H=h, DT=dt
|
||||||
|
# 'coa2d_CT_d0.1_slip0.001_trans1000_h0.025_supg_noise0.yaml',
|
||||||
|
# 'coa2d_CT_d0.1_slip0.332_trans3600_h0.025_supg_noise0.yaml',
|
||||||
|
# 'coa2d_CT_d0.1_slip0.001_trans1000_h0.025_supg_noise10.yaml',
|
||||||
|
# 'coa2d_CT_d0.1_slip0.332_trans3600_h0.025_supg_noise10.yaml',
|
||||||
|
# 'coa2d_CT_d0_noslip_h0.025_supg_noise10.yaml',
|
||||||
|
# 'coa2d_CT_d0_noslip_h0.025_supg_noise0.yaml',
|
||||||
|
# MONO
|
||||||
|
# 'coa2d/coa2d_mono_d0.1_slip0.001_trans1000_h0.025_supg.yaml',
|
||||||
|
# 'coa2d/coa2d_mono_d0.1_slip0.332_trans3600_h0.025_supg.yaml',
|
||||||
|
# 'coa2d/coa2d_mono_d0_noslip_h0.025_supg.yaml',
|
||||||
|
# H, DT var
|
||||||
|
# 'coa2d_CT_d0_noslip_h0.025_supg_d0.1_H0.1.yaml',
|
||||||
|
# 'coa2d_CT_d0_noslip_h0.025_supg_DT0.01_H0.025.yaml',
|
||||||
|
# 'coa2d_CT_d0_noslip_h0.025_supg_DT0.02_H0.025.yaml',
|
||||||
|
# 'coa2d_CT_d0_noslip_h0.025_supg_DT0.02_H0.1.yaml',
|
||||||
|
# PLUG FLOW
|
||||||
|
# 'coa2d_CT_d0_noslip_h0.025_supg_plug_d0_H0.1.yaml',
|
||||||
|
'coa2d_CT_d0_noslip_h0.025_supg_plug_d0.1_H0.1.yaml',
|
||||||
|
# 'coa2d_CT_d0_noslip_h0.025_supg_plug_d0.2_H0.2.yaml',
|
||||||
|
]
|
||||||
|
|
||||||
|
path_pipe3d = './projects/DA_testbench/input/pipe3d/measurements/'
|
||||||
|
inputfiles_pipe3d = [
|
||||||
|
'pipe3d_CT_R0.9_slip0.332_trans3600_h0.05_noise0.yaml',
|
||||||
|
'pipe3d_CT_R0.9_slip0.332_trans3600_h0.05_noise10.yaml',
|
||||||
|
'pipe3d_CT_R0.9_slip0.332_trans3600_h0.1_noise0.yaml',
|
||||||
|
'pipe3d_CT_R0.9_slip0.332_trans3600_h0.1_noise10.yaml',
|
||||||
|
'pipe3d_CT_R1_noslip_h0.1_noise10.yaml',
|
||||||
|
'pipe3d_CT_R1_noslip_h0.05_noise10.yaml',
|
||||||
|
]
|
||||||
|
|
||||||
|
path_coa3d = './projects/DA_testbench/input/coa3d/measurements/'
|
||||||
|
inputfiles_coa3d = [
|
||||||
|
# 'coa3d_CT_R0.9_slip0.001_trans1000_h0.025_noise0.yaml',
|
||||||
|
# 'coa3d_CT_R0.9_slip0.001_trans1000_h0.025_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_h0.025_noise0.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_h0.025_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_h0.025_H0.1_DT0.001_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_h0.025_H0.1_DT0.01_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_h0.025_H0.1_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_h0.025_H0.025_DT0.01_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_h0.025_H0.025_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_h0.025_H0.025_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_asym_CT_R1_noslip_h0.025_H0.1_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_asym_CT_R1_noslip_h0.025_H0.2_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_bend_CT_R1_noslip_h0.025_H0.1_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_bend_CT_R1_noslip_h0.025_H0.2_DT0.02_noise10.yaml',
|
||||||
|
# PLUG FLOW
|
||||||
|
# 'coa3d_bend_CT_R1_noslip_plug_h0.025_d0_H0.1_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_bend_CT_R1_noslip_plug_h0.025_H0.1_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_bend_CT_R1_noslip_plug_h0.025_H0.2_DT0.02_noise10.yaml',
|
||||||
|
# slices
|
||||||
|
# 'coa3d_CT_R1_noslip_plug_h0.025_H0.1_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_plug_h0.025_H0.2_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_plug_h0.025_slices_isZY_H0.1_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_plug_h0.025_slices_Z_P0_H0.1_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_plug_h0.025_slices_Z_P0_H0.2_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_plug_h0.025_slices_isZY_P0_H0.1_DT0.02_noise10.yaml',
|
||||||
|
# 'coa3d_CT_R1_noslip_plug_h0.025_slices_isZY_H0.2_DT0.02_noise10.yaml'
|
||||||
|
# 'coa3d_bend_f0.3_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noise48.yaml',
|
||||||
|
# 'coa3d_bend_f0.3_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noise0.yaml',
|
||||||
|
# 'coa3d_bend_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noise48.yaml',
|
||||||
|
# 'coa3d_bend_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noise0.yaml',
|
||||||
|
# 'coa3d_bend_f0.0_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noise0.yaml', # <------ compute these!
|
||||||
|
# 'coa3d_bend_f0.0_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noise48.yaml',
|
||||||
|
# 'coa3d_bend_f0.5_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noise0.yaml',
|
||||||
|
# 'coa3d_bend_f0.5_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noise48.yaml',
|
||||||
|
# 'coa3d_bend_f0.5_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noise10.yaml',
|
||||||
|
|
||||||
|
# 'coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noise0.yaml',
|
||||||
|
# 'coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noiseVENC.yaml',
|
||||||
|
# 'coa3d_bend_f0.5_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noiseVENC.yaml',
|
||||||
|
# 'coa3d_bend_f0.6_CT_R1_noslip_plug_h0.025_slices_P0_H0.1-0.2_DT0.02_noiseVENC.yaml',
|
||||||
|
|
||||||
|
'coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025_slices_par_P0_H0.1-0.2_DT0.02_noiseVENC.yaml',
|
||||||
|
'coa3d_bend_f0.5_CT_R1_noslip_plug_h0.025_slices_par_P0_H0.1-0.2_DT0.02_noiseVENC.yaml',
|
||||||
|
'coa3d_bend_f0.6_CT_R1_noslip_plug_h0.025_slices_par_P0_H0.1-0.2_DT0.02_noiseVENC.yaml',
|
||||||
|
|
||||||
|
|
||||||
|
]
|
||||||
|
|
||||||
|
seed = range(11, 51)
|
||||||
|
# seed = 2
|
||||||
|
|
||||||
|
path = path_coa3d
|
||||||
|
inputfiles = inputfiles_coa3d
|
||||||
|
|
||||||
|
#
|
||||||
|
if isinstance(seed, range):
|
||||||
|
seed = list(seed)
|
||||||
|
elif isinstance(seed, int):
|
||||||
|
seed = [seed]
|
||||||
|
|
||||||
|
assert isinstance(seed, list), ('type(seed) must be list, but is {}'.
|
||||||
|
format(type(seed)))
|
||||||
|
|
||||||
|
for inpfile in inputfiles:
|
||||||
|
try:
|
||||||
|
options = inout.read_parameters(path + inpfile)
|
||||||
|
except IOError:
|
||||||
|
raise IOError('File could not be read: {}'.format(path + inpfile))
|
||||||
|
|
||||||
|
generate(options, seed, False)
|
||||||
|
|
||||||
|
copy_inputfile(options, path + inpfile, seed)
|
@ -0,0 +1,173 @@
|
|||||||
|
# Set of default parameters for steady Navier-Stokes
|
||||||
|
mesh: './meshes/coa3d_bend_Lc2_L6.3_f0.4_d0_ns1_h0.025.h5'
|
||||||
|
density: 1.0
|
||||||
|
dynamic_viscosity: 0.035
|
||||||
|
stokes: False
|
||||||
|
|
||||||
|
io:
|
||||||
|
write_hdf5: False
|
||||||
|
write_hdf5_timeseries: False
|
||||||
|
write_xdmf: True
|
||||||
|
write_path: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/'
|
||||||
|
restart:
|
||||||
|
path: '' # './projects/nse_coa3d/results/test_restart2/'
|
||||||
|
time: 0
|
||||||
|
write_checkpoints: True
|
||||||
|
write_velocity: 'update'
|
||||||
|
log: True
|
||||||
|
|
||||||
|
boundary_conditions:
|
||||||
|
- id: 1
|
||||||
|
preset: 'sine_parabola_inlet'
|
||||||
|
value:
|
||||||
|
R: 1.0
|
||||||
|
U: 43.75
|
||||||
|
a: 2.5
|
||||||
|
flow_direction: 0 # (0, 1, 2)
|
||||||
|
symmetric: False
|
||||||
|
- id: 2
|
||||||
|
preset: 'outlet'
|
||||||
|
value: 0.
|
||||||
|
- id: 3
|
||||||
|
type: 'dirichlet'
|
||||||
|
value: [0, 0, 0]
|
||||||
|
|
||||||
|
timemarching:
|
||||||
|
velocity_pressure_coupling: 'fractionalstep' # monolithic, fractionalstep
|
||||||
|
|
||||||
|
monolithic:
|
||||||
|
timescheme: 'gmp' # generalized midpoint, steady FIXME TODO
|
||||||
|
theta: 1 # 1: Euler, 2: implicit midpoint rule (one-legged)
|
||||||
|
nonlinear:
|
||||||
|
method: 'constant_extrapolation' # constant_extrapolation, linear_extrapolation, newton, picard, snes
|
||||||
|
maxit: 20
|
||||||
|
init_steps: 30
|
||||||
|
use_aitken: 1 # 0: False, 1: Picard only, 2: all
|
||||||
|
report: 1 # 0: None, 1: residuals, 2: residuals and energy (inflow/driving/forcing via ESSENTIAL Dbcs!)
|
||||||
|
atol: 1.e-6 # note: dot required!!
|
||||||
|
rtol: 1.e-16
|
||||||
|
stol: 0.0
|
||||||
|
|
||||||
|
fractionalstep:
|
||||||
|
scheme: 'CT' # CT, IPCS
|
||||||
|
coupled_velocity: False
|
||||||
|
robin_bc_velocity_scheme: 'implicit' # explicit, semi-implicit, implicit
|
||||||
|
transpiration_bc_projection: 'robin' # robin, dirichlet
|
||||||
|
flux_report_normalize_boundary: 1
|
||||||
|
|
||||||
|
T: 0.4
|
||||||
|
dt: 0.001
|
||||||
|
write_dt: 0.001
|
||||||
|
checkpoint_dt: 0.02 # <= 0: only last; else value + last
|
||||||
|
report: 1 # 0: print nothing, 1: print time step and writeout, 2: 1 + flux
|
||||||
|
|
||||||
|
estimation:
|
||||||
|
boundary_conditions:
|
||||||
|
- id: 3
|
||||||
|
type: 'navierslip'
|
||||||
|
initial_stddev: 1
|
||||||
|
- id: 3
|
||||||
|
type: 'transpiration'
|
||||||
|
initial_stddev: 1
|
||||||
|
measurements:
|
||||||
|
# -
|
||||||
|
# mesh: './meshes/coa3d_bend_slice_XZ_H0.1.h5'
|
||||||
|
# fe_degree: 0
|
||||||
|
# xdmf_file: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_XZ_par_P0_H0.1_DT0.02_noise0/u_meas.xdmf'
|
||||||
|
# file_root: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_XZ_par_P0_H0.1_DT0.02_noise0/u{i}.h5'
|
||||||
|
# indices: [1, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260, 280, 300, 320, 340, 360, 380, 400]
|
||||||
|
# noise_stddev: 0 # standard deviation of Gaussian noise
|
||||||
|
# # noise level 48 ==> 15% of max(u) = 320
|
||||||
|
# -
|
||||||
|
# mesh: './meshes/coa3d_bend_slice_XZ_H0.2.h5'
|
||||||
|
# fe_degree: 0
|
||||||
|
# xdmf_file: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_XZ_par_P0_H0.2_DT0.02_noise0/u_meas.xdmf'
|
||||||
|
# file_root: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_XZ_par_P0_H0.2_DT0.02_noise0/u{i}.h5'
|
||||||
|
# indices: [1, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260, 280, 300, 320, 340, 360, 380, 400]
|
||||||
|
# noise_stddev: 0 # standard deviation of Gaussian noise
|
||||||
|
# # noise level 48 ==> 15% of max(u) = 320
|
||||||
|
-
|
||||||
|
mesh: './meshes/coa3d_bend_slice_inlet_H0.1.h5'
|
||||||
|
fe_degree: 0
|
||||||
|
xdmf_file: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inlet_par_P0_H0.1_DT0.02_noiseVENC/u_meas.xdmf'
|
||||||
|
file_root: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inlet_par_P0_H0.1_DT0.02_noiseVENC/u{i}.h5'
|
||||||
|
indices: [1, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260, 280, 300, 320, 340, 360, 380, 400]
|
||||||
|
velocity_direction: [1, 0, 0]
|
||||||
|
noise_stddev: 6.5625 # standard deviation of Gaussian noise
|
||||||
|
# noise level 48 ==> 15% of max(u) = 320
|
||||||
|
-
|
||||||
|
mesh: './meshes/coa3d_bend_slice_inlet_H0.2.h5'
|
||||||
|
fe_degree: 0
|
||||||
|
xdmf_file: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inlet_par_P0_H0.2_DT0.02_noiseVENC/u_meas.xdmf'
|
||||||
|
file_root: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inlet_par_P0_H0.2_DT0.02_noiseVENC/u{i}.h5'
|
||||||
|
indices: [1, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260, 280, 300, 320, 340, 360, 380, 400]
|
||||||
|
velocity_direction: [1, 0, 0]
|
||||||
|
noise_stddev: 6.5625 # standard deviation of Gaussian noise
|
||||||
|
# noise level 48 ==> 15% of max(u) = 320
|
||||||
|
# -
|
||||||
|
# mesh: './meshes/coa3d_bend_slice_inclined2_H0.1.h5'
|
||||||
|
# fe_degree: 0
|
||||||
|
# xdmf_file: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inclined2_par_P0_H0.1_DT0.02_noise0/u_meas.xdmf'
|
||||||
|
# file_root: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inclined2_par_P0_H0.1_DT0.02_noise0/u{i}.h5'
|
||||||
|
# indices: [1, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260, 280, 300, 320, 340, 360, 380, 400]
|
||||||
|
# noise_stddev: 0 # standard deviation of Gaussian noise
|
||||||
|
# # noise level 48 ==> 15% of max(u) = 320
|
||||||
|
# -
|
||||||
|
# mesh: './meshes/coa3d_bend_slice_inclined2_H0.2.h5'
|
||||||
|
# fe_degree: 0
|
||||||
|
# xdmf_file: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inclined2_par_P0_H0.2_DT0.02_noise0/u_meas.xdmf'
|
||||||
|
# file_root: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inclined2_par_P0_H0.2_DT0.02_noise0/u{i}.h5'
|
||||||
|
# indices: [1, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260, 280, 300, 320, 340, 360, 380, 400]
|
||||||
|
# noise_stddev: 0 # standard deviation of Gaussian noise
|
||||||
|
# # noise level 48 ==> 15% of max(u) = 320
|
||||||
|
-
|
||||||
|
mesh: './meshes/coa3d_bend_slice_inclined3_H0.1.h5'
|
||||||
|
fe_degree: 0
|
||||||
|
xdmf_file: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inclined3_par_P0_H0.1_DT0.02_noiseVENC/u_meas.xdmf'
|
||||||
|
file_root: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inclined3_par_P0_H0.1_DT0.02_noiseVENC/u{i}.h5'
|
||||||
|
indices: [1, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260, 280, 300, 320, 340, 360, 380, 400]
|
||||||
|
velocity_direction: [0.98426389, 0., -0.17670481]
|
||||||
|
noise_stddev: 21 # standard deviation of Gaussian noise
|
||||||
|
# noise level 48 ==> 15% of max(u) = 320
|
||||||
|
-
|
||||||
|
mesh: './meshes/coa3d_bend_slice_inclined3_H0.2.h5'
|
||||||
|
fe_degree: 0
|
||||||
|
xdmf_file: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inclined3_par_P0_H0.2_DT0.02_noiseVENC/u_meas.xdmf'
|
||||||
|
file_root: './projects/DA_testbench/results/coa3d/coa3d_bend_f0.4_CT_R1_noslip_plug_h0.025/measurements/slice_inclined3_par_P0_H0.2_DT0.02_noiseVENC/u{i}.h5'
|
||||||
|
indices: [1, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260, 280, 300, 320, 340, 360, 380, 400]
|
||||||
|
velocity_direction: [0.98426389, 0., -0.17670481]
|
||||||
|
noise_stddev: 21 # standard deviation of Gaussian noise
|
||||||
|
# noise level 48 ==> 15% of max(u) = 320
|
||||||
|
roukf:
|
||||||
|
particles: 'simplex'
|
||||||
|
observation_operator: 'postprocessing'
|
||||||
|
reparameterize: True
|
||||||
|
|
||||||
|
# solver setup
|
||||||
|
fem:
|
||||||
|
velocity_space: p1 # p1 p1b/p1+ p2
|
||||||
|
pressure_space: p1 # p1 p0/dg0 dg1
|
||||||
|
|
||||||
|
strain_symmetric: 0
|
||||||
|
convection_skew_symmetric: 1 # aka Temam term
|
||||||
|
stabilization:
|
||||||
|
backflow_boundaries: [2]
|
||||||
|
streamline_diffusion:
|
||||||
|
enabled: True
|
||||||
|
parameter: 'shakib' # standard, shakib, codina, klr
|
||||||
|
length_scale: 'metric' # average, max, metric
|
||||||
|
consistent: False # deprecated
|
||||||
|
Cinv: ~
|
||||||
|
monolithic:
|
||||||
|
infsup: False # pspg, pressure-stabilization
|
||||||
|
graddiv: False
|
||||||
|
consistent: False
|
||||||
|
pressure_stab_constant: 1.
|
||||||
|
|
||||||
|
fix_pressure: False
|
||||||
|
fix_pressure_point: [0., 0.]
|
||||||
|
|
||||||
|
linear_solver:
|
||||||
|
method: 'default'
|
||||||
|
# inputfile: './projects/nse_coa3d/input/pc/MUMPS_default.yaml'
|
||||||
|
# inputfile: './input/pc/fgmres_gamg_rtol1e-6.yaml'
|
376
kalman/other/gen_measurements_from_checkpoints.py
Executable file
376
kalman/other/gen_measurements_from_checkpoints.py
Executable file
@ -0,0 +1,376 @@
|
|||||||
|
from dolfin import *
|
||||||
|
import dolfin
|
||||||
|
import argparse
|
||||||
|
from common import inout
|
||||||
|
import numpy as np
|
||||||
|
import sys
|
||||||
|
# import ruamel.yaml as yaml
|
||||||
|
from pathlib import Path
|
||||||
|
import csv
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
# parameters['allow_extrapolation'] = True
|
||||||
|
# careful with this option... can give nonsensical results outside
|
||||||
|
|
||||||
|
from mpi4py import MPI
|
||||||
|
comm = MPI.COMM_WORLD
|
||||||
|
size = comm.Get_size()
|
||||||
|
rank = comm.Get_rank()
|
||||||
|
|
||||||
|
|
||||||
|
def solution_function_space(options):
|
||||||
|
mesh, _, _ = inout.read_mesh(options['mesh'])
|
||||||
|
if options['fem']['velocity_space'].lower() == 'p2':
|
||||||
|
Evel = VectorElement('P', mesh.ufl_cell(), 2)
|
||||||
|
elif options['fem']['velocity_space'].lower() == 'p1':
|
||||||
|
Evel = VectorElement('P', mesh.ufl_cell(), 1)
|
||||||
|
else:
|
||||||
|
raise Exception('Velocity space {} not yet implemented!'.format(
|
||||||
|
options['fem']['velocity_space'].lower()))
|
||||||
|
|
||||||
|
V = FunctionSpace(mesh, Evel)
|
||||||
|
|
||||||
|
return V
|
||||||
|
|
||||||
|
|
||||||
|
def measurement_function_spaces(options):
|
||||||
|
|
||||||
|
measurement_lst = options['estimation']['measurements']
|
||||||
|
if not isinstance(measurement_lst, list):
|
||||||
|
measurement_lst = [measurement_lst]
|
||||||
|
|
||||||
|
meshes = [meas['mesh'] for meas in measurement_lst]
|
||||||
|
|
||||||
|
if 'fe_degree' in options['estimation']['measurements'][0]:
|
||||||
|
degree = options['estimation']['measurements'][0]['fe_degree']
|
||||||
|
else:
|
||||||
|
degree = 1
|
||||||
|
|
||||||
|
for measurement in measurement_lst[1:]:
|
||||||
|
if not degree == measurement['fe_degree']:
|
||||||
|
raise Exception('fe_degree must the the same for all '
|
||||||
|
'measurements!')
|
||||||
|
|
||||||
|
if degree == 1:
|
||||||
|
element_family = 'P'
|
||||||
|
elif degree == 0:
|
||||||
|
element_family = 'DG'
|
||||||
|
else:
|
||||||
|
raise Exception('Unsupported measurement FE degree: {}'
|
||||||
|
.format(degree))
|
||||||
|
|
||||||
|
scalar = [False]*len(measurement_lst)
|
||||||
|
for i, measurement in enumerate(measurement_lst):
|
||||||
|
if 'velocity_direction' in measurement:
|
||||||
|
direction = measurement['velocity_direction']
|
||||||
|
if direction and None not in direction and sum(direction) > 0:
|
||||||
|
scalar[i] = True
|
||||||
|
|
||||||
|
V = []
|
||||||
|
V_aux = []
|
||||||
|
for scal, file in zip(scalar, meshes):
|
||||||
|
mesh, _, _ = inout.read_mesh(file)
|
||||||
|
if scal:
|
||||||
|
V.append(FunctionSpace(mesh, element_family, degree))
|
||||||
|
# 1. interpolate velocity vector onto measurement grid (V_aux)
|
||||||
|
# 2. perform component projection in the measurement space
|
||||||
|
# ---> need to store both scalar and vector spaces
|
||||||
|
V_aux.append(VectorFunctionSpace(mesh, element_family, degree))
|
||||||
|
else:
|
||||||
|
V.append(VectorFunctionSpace(mesh, element_family, degree))
|
||||||
|
|
||||||
|
return V, V_aux
|
||||||
|
|
||||||
|
|
||||||
|
def find_checkpoints(options):
|
||||||
|
|
||||||
|
chkpt_root = options['io']['write_path'] + '/checkpoint/{i}/u.h5'
|
||||||
|
|
||||||
|
|
||||||
|
indices = options['estimation']['measurements'][0]['indices']
|
||||||
|
|
||||||
|
if not indices:
|
||||||
|
# its seems not work now
|
||||||
|
#path_all = list(Path().glob(chkpt_root.format(i='*')))
|
||||||
|
#indices = sorted(int(str(s).split('/')[-2]) for s in path_all)
|
||||||
|
from glob import glob
|
||||||
|
chkpt_root2 = options['io']['write_path'] + '/checkpoint/*'
|
||||||
|
paths = glob(chkpt_root2)
|
||||||
|
pathsclean = paths
|
||||||
|
pathff = []
|
||||||
|
for l in range(len(paths)):
|
||||||
|
pathsclean[l] = paths[l].replace(options['io']['write_path'] + '/checkpoint/','')
|
||||||
|
pathff.append(int(pathsclean[l]))
|
||||||
|
|
||||||
|
indices = [int(x) for x in pathff]
|
||||||
|
indices.sort()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# dt_meas = options['timemarching']['checkpoint_dt']
|
||||||
|
# times = np.concatenate(([options['timemarching']['dt']],
|
||||||
|
# dt_meas*np.array(indices[1:])))
|
||||||
|
dt = options['timemarching']['dt']
|
||||||
|
times = dt*np.array(indices)
|
||||||
|
|
||||||
|
if rank == 0:
|
||||||
|
print('indices: \n')
|
||||||
|
print('\t', indices)
|
||||||
|
print('times: \n')
|
||||||
|
print('\t', times)
|
||||||
|
|
||||||
|
files = [chkpt_root.format(i=i) for i in indices]
|
||||||
|
|
||||||
|
# check if all files are found
|
||||||
|
# try:
|
||||||
|
# for f in files:
|
||||||
|
# # Path(f).resolve(strict=True)
|
||||||
|
# Path(f).is_file()
|
||||||
|
# except FileNotFoundError:
|
||||||
|
# raise
|
||||||
|
|
||||||
|
for f in files:
|
||||||
|
if not Path(f).is_file():
|
||||||
|
raise FileNotFoundError(f)
|
||||||
|
|
||||||
|
return indices, times, files
|
||||||
|
|
||||||
|
|
||||||
|
def generate(options, seed_lst, print_norms=False):
|
||||||
|
|
||||||
|
V = solution_function_space(options)
|
||||||
|
ndim = V.mesh().topology().dim()
|
||||||
|
u = Function(V)
|
||||||
|
|
||||||
|
V_meas_lst, V_aux_lst = measurement_function_spaces(options)
|
||||||
|
u_meas_lst = [Function(V_meas) for V_meas in V_meas_lst]
|
||||||
|
u_meas_cpy = [Function(V_meas) for V_meas in V_meas_lst]
|
||||||
|
#LI = LagrangeInterpolator()
|
||||||
|
if V_aux_lst:
|
||||||
|
u_aux_lst = [Function(V_aux) for V_aux in V_aux_lst]
|
||||||
|
comp_assigner = [FunctionAssigner([V]*ndim, V_aux) for V, V_aux in
|
||||||
|
zip(V_meas_lst, V_aux_lst)]
|
||||||
|
|
||||||
|
measurement_lst = options['estimation']['measurements']
|
||||||
|
noise_sd = [meas['noise_stddev'] for meas in measurement_lst]
|
||||||
|
|
||||||
|
# if 'project' in options['estimation']['measurements']:
|
||||||
|
# project_switch = bool(options['estimation']['measurements']
|
||||||
|
# ['project'])
|
||||||
|
# else:
|
||||||
|
# project_switch = False
|
||||||
|
project_switch = False
|
||||||
|
|
||||||
|
indices, times, files = find_checkpoints(options)
|
||||||
|
|
||||||
|
outfile_root_lst = [meas['file_root'] for meas in measurement_lst]
|
||||||
|
xdmf_paths = [meas['xdmf_file'] for meas in measurement_lst]
|
||||||
|
|
||||||
|
if seed_lst[0] > 0:
|
||||||
|
for i, (out, xdmf) in enumerate(zip(outfile_root_lst, xdmf_paths)):
|
||||||
|
if isinstance(out, str) and 'seed{s}' not in out:
|
||||||
|
out_ = out.split('/')
|
||||||
|
outfile_root_lst[i] = '/'.join(out_[:-1] + ['seed{s}',
|
||||||
|
out_[-1]])
|
||||||
|
if isinstance(xdmf, str) and 'seed{s}' not in xdmf:
|
||||||
|
xdmf_ = xdmf.split('/')
|
||||||
|
xdmf_paths[i] = '/'.join(xdmf_[:-1] + ['seed{s}', xdmf_[-1]])
|
||||||
|
|
||||||
|
if any(xdmf_paths):
|
||||||
|
xdmf_lst = []
|
||||||
|
for pth in xdmf_paths:
|
||||||
|
seed_dict = {}
|
||||||
|
for seed in seed_lst:
|
||||||
|
file = XDMFFile(pth.format(s=seed))
|
||||||
|
file.parameters['rewrite_function_mesh'] = False
|
||||||
|
seed_dict[seed] = file
|
||||||
|
xdmf_lst.append(seed_dict)
|
||||||
|
else:
|
||||||
|
xdmf_lst = [None]*len(u_meas_lst)
|
||||||
|
|
||||||
|
for i, t, infile in zip(indices, times, files):
|
||||||
|
print('Processing {} at t = {}'.format(infile, t))
|
||||||
|
|
||||||
|
t_ = inout.read_HDF5_data(u.function_space().mesh().mpi_comm(), infile,
|
||||||
|
u, '/u')
|
||||||
|
|
||||||
|
assert np.allclose(t, t_), ('Timestamps do not match! {} vs {} (HDF5 '
|
||||||
|
'file)'.format(t, t_))
|
||||||
|
|
||||||
|
# interpolate u to measurement meshes
|
||||||
|
for k, (u_meas, outfile_root, xdmf, sd) in enumerate(zip(
|
||||||
|
u_meas_lst, outfile_root_lst, xdmf_lst, noise_sd)):
|
||||||
|
|
||||||
|
if project_switch:
|
||||||
|
u_meas.assign(project(u, u_meas.function_space()))
|
||||||
|
|
||||||
|
else:
|
||||||
|
if V_aux_lst:
|
||||||
|
print('scalar')
|
||||||
|
direction = (options['estimation']['measurements'][k]
|
||||||
|
['velocity_direction'])
|
||||||
|
if direction.count(0) == 2 and direction.count(1) == 1:
|
||||||
|
LagrangeInterpolator.interpolate(u_meas, u.sub(direction.index(1)))
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert u_meas.value_size() == 1
|
||||||
|
# normalize projection direction
|
||||||
|
direction = np.array(direction, dtype=np.float64)
|
||||||
|
direction /= np.sqrt(np.dot(direction, direction))
|
||||||
|
print('d = {}'.format(direction))
|
||||||
|
|
||||||
|
LagrangeInterpolator().interpolate(u_aux_lst[k], u)
|
||||||
|
|
||||||
|
# This is faster than simply Xobs_aux.split(True) !
|
||||||
|
u_i = [u_meas] + [u_meas.copy(True) for j in
|
||||||
|
range(ndim - 1)]
|
||||||
|
|
||||||
|
comp_assigner[k].assign(u_i, u_aux_lst[k])
|
||||||
|
u_meas.vector()[:] *= direction[0]
|
||||||
|
for ui, d in zip(u_i[1:], direction[1:]):
|
||||||
|
if d:
|
||||||
|
u_meas.vector().axpy(d, ui.vector())
|
||||||
|
|
||||||
|
else:
|
||||||
|
LagrangeInterpolator.interpolate(u_meas, u)
|
||||||
|
if sd:
|
||||||
|
u_meas_cpy[k].assign(u_meas)
|
||||||
|
|
||||||
|
# add noise
|
||||||
|
for seed in seed_lst:
|
||||||
|
if sd:
|
||||||
|
# this makes no sense, sorry...
|
||||||
|
# if seed > 0:
|
||||||
|
# np.random.seed(seed + i)
|
||||||
|
noise = np.random.normal(0., sd,
|
||||||
|
u_meas.vector().local_size())
|
||||||
|
u_meas.assign(u_meas_cpy[k])
|
||||||
|
u_meas.vector()[:] += noise
|
||||||
|
|
||||||
|
if rank == 0:
|
||||||
|
if print_norms:
|
||||||
|
print('Writing file at t = {}\t |u_m| = {}'.format(
|
||||||
|
t, norm(u_meas)))
|
||||||
|
else:
|
||||||
|
print('Writing file at t = {}\t'.format(t))
|
||||||
|
|
||||||
|
outfile = outfile_root.format(i=i, s=seed)
|
||||||
|
inout.write_HDF5_data(
|
||||||
|
u_meas.function_space().mesh().mpi_comm(), outfile, u_meas,
|
||||||
|
'/u', t)
|
||||||
|
|
||||||
|
if xdmf:
|
||||||
|
xdmf[seed].write(u_meas, t)
|
||||||
|
|
||||||
|
# write indices and timesteps
|
||||||
|
write_timestamps(options, indices, times, seed_lst)
|
||||||
|
|
||||||
|
|
||||||
|
def write_timestamps(options, indices, times, seed_lst):
|
||||||
|
file_root_lst = [meas['file_root'] for meas in options['estimation']
|
||||||
|
['measurements']]
|
||||||
|
|
||||||
|
# if seed_lst[0]:
|
||||||
|
# for i, out in enumerate(file_root_lst):
|
||||||
|
# if isinstance(out, str) and 'seed{s}' not in out:
|
||||||
|
# out_ = out.split('/')
|
||||||
|
# file_root_lst[i] = '/'.join(out_[:-1] + ['seed{s}', out_[-1]])
|
||||||
|
|
||||||
|
for file_root in file_root_lst:
|
||||||
|
for seed in seed_lst:
|
||||||
|
path = Path(file_root.format(s=seed, i=-1)).parent
|
||||||
|
if seed > 0 and 'seed{s}'.format(s=seed) not in str(path):
|
||||||
|
path = path.joinpath('seed{s}'.format(s=seed))
|
||||||
|
path = path.joinpath('timestamps.csv')
|
||||||
|
if rank == 0:
|
||||||
|
print('Writing timestamps to file: {}'.format(path))
|
||||||
|
with path.open('w') as file:
|
||||||
|
writer = csv.writer(file, delimiter=' ')
|
||||||
|
for i, t in zip(indices, times):
|
||||||
|
writer.writerow((i, t))
|
||||||
|
|
||||||
|
|
||||||
|
def copy_inputfile(options, inputfile, seed_lst):
|
||||||
|
if rank == 0:
|
||||||
|
file_root_lst = [meas['file_root'] for meas in options['estimation']
|
||||||
|
['measurements']]
|
||||||
|
for file_root in file_root_lst:
|
||||||
|
for seed in seed_lst:
|
||||||
|
path = Path(file_root.format(s=seed, i=-1)).parent
|
||||||
|
if seed > 0 and 'seed{s}'.format(s=seed) not in str(path):
|
||||||
|
path = path.joinpath('seed{s}'.format(s=seed))
|
||||||
|
path = path.joinpath('input.yaml')
|
||||||
|
shutil.copy2(str(inputfile), str(path))
|
||||||
|
print('Copied input file to {}'.format(path))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser(description='''\
|
||||||
|
Generate measurements from HDF5 checkpoints, generated by the Fractional-Step
|
||||||
|
or the monolithic Navier-Stokes solvers.
|
||||||
|
|
||||||
|
Reads options from the given input file.
|
||||||
|
''', formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
|
parser.add_argument('inputfile', type=str, help='path to yaml input file')
|
||||||
|
# parser.add_argument('-s', '--seed', type=int, default=-1,
|
||||||
|
# help='seed for random generator')
|
||||||
|
parser.add_argument('-s', '--seed', nargs='+', help='seed or list of seeds'
|
||||||
|
' for repitions of RNG. not really a seed, noise is '
|
||||||
|
' different for all timesteps', default=[-1])
|
||||||
|
parser.add_argument('-n', '--print_norms', action='store_true',
|
||||||
|
help='print norms')
|
||||||
|
parser.add_argument('-d', '--dump', action='store_true',
|
||||||
|
help='dump minimal default parameters to inputfile')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.dump:
|
||||||
|
default = '''\
|
||||||
|
# default inputfile for gen_measurements_from_checkpoints.py
|
||||||
|
mesh: ./meshes/mesh.h5
|
||||||
|
|
||||||
|
io:
|
||||||
|
write_path: ./results/test/ # path to checkpoints
|
||||||
|
|
||||||
|
timemarching:
|
||||||
|
T: 0.4
|
||||||
|
dt: 0.001
|
||||||
|
|
||||||
|
fem:
|
||||||
|
velocity_space: p1 # p1, p2
|
||||||
|
pressure_space: p1
|
||||||
|
|
||||||
|
estimation:
|
||||||
|
measurements:
|
||||||
|
- mesh: ./measurements/mesh_meas.h5 # measurement mesh
|
||||||
|
fe_degree: 0 # 0 or 1
|
||||||
|
file_root: ./measurements/u{{i}}.h5 # velocity measurements \
|
||||||
|
to be written by the program, {{i}} will be replaced by the corresponding \
|
||||||
|
index of the checkpoint
|
||||||
|
xdmf_file: ./measurements/meas.xdmf # path for optional XDMF output
|
||||||
|
noise_stddev: 10. # absolute standard deviation of Gaussian \
|
||||||
|
noise
|
||||||
|
indices: 0 # indices of checkpoints to be processed. 0 == all
|
||||||
|
- # second measurement ...
|
||||||
|
- ...
|
||||||
|
'''
|
||||||
|
|
||||||
|
with open(args.inputfile, 'w') as f:
|
||||||
|
f.write(default)
|
||||||
|
print('dumped default options to file: {}'.format(args.inputfile))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
seed = args.seed
|
||||||
|
|
||||||
|
if seed[0] <= 0:
|
||||||
|
assert len(seed) == 1, 'if multiple seeds are given, all should be > 0'
|
||||||
|
|
||||||
|
try:
|
||||||
|
options = inout.read_parameters(args.inputfile)
|
||||||
|
except IOError:
|
||||||
|
raise IOError('File could not be read: {}'.format(args.inputfile))
|
||||||
|
|
||||||
|
generate(options, seed, args.print_norms)
|
||||||
|
|
||||||
|
copy_inputfile(options, args.inputfile, seed)
|
22
kalman/other/input_meas.yaml
Executable file
22
kalman/other/input_meas.yaml
Executable file
@ -0,0 +1,22 @@
|
|||||||
|
mesh: /home/yeye/Desktop/PhD/AORTA/MESH/aorta_fine2/aorta_fine2_marked.h5
|
||||||
|
|
||||||
|
io:
|
||||||
|
write_path: '/home/yeye/Desktop/PhD/AORTA/DATA/ct/aorta_fine/dt0.001' # path to checkpoints
|
||||||
|
|
||||||
|
timemarching:
|
||||||
|
T: 0.9
|
||||||
|
dt: 0.001
|
||||||
|
|
||||||
|
fem:
|
||||||
|
velocity_space: p1 # p1, p2
|
||||||
|
pressure_space: p1
|
||||||
|
|
||||||
|
estimation:
|
||||||
|
measurements:
|
||||||
|
- mesh: '/home/yeye/Desktop/PhD/AORTA/MESH/aorta_fine2/aorta_fine2_marked.h5' # measurement mesh
|
||||||
|
fe_degree: 1 # 0 or 1
|
||||||
|
file_root: '/home/yeye/Desktop/PhD/AORTA/DATA/ct/aorta_fine/dt0.001/measurements/u{i}.h5' # velocity measurements to be written by the program, {{i}} will be replaced by the corresponding index of the checkpoint
|
||||||
|
xdmf_file: /home/yeye/Desktop/PhD/AORTA/DATA/ct/aorta_fine/dt0.001/measurements/meas.xdmf # path for optional XDMF output
|
||||||
|
noise_stddev: 0 # absolute standard deviation of Gaussian noise
|
||||||
|
indices: 0 # indices of checkpoints to be processed. 0 == all
|
||||||
|
#velocity_direction: [0, 0, 1]
|
31
kalman/other/run_roukf.py
Executable file
31
kalman/other/run_roukf.py
Executable file
@ -0,0 +1,31 @@
|
|||||||
|
from roukf.roukf import *
|
||||||
|
from navierstokes.fractionalstep import *
|
||||||
|
from navierstokes import solver
|
||||||
|
from common import utils
|
||||||
|
from dolfin import *
|
||||||
|
import sys,os
|
||||||
|
import logging
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
parameters["form_compiler"]["optimize"] = True
|
||||||
|
parameters["form_compiler"]["cpp_optimize"] = True
|
||||||
|
parameters["form_compiler"]["cpp_optimize_flags"] = "-O3 -xHost -ip" if \
|
||||||
|
utils.on_cluster() else "-O3 -ffast-math -march=native"
|
||||||
|
|
||||||
|
#inpfile = '/home/yeye/Desktop/PhD/AORTA/CT_David/input/aorta_roukf.yaml'
|
||||||
|
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
if os.path.exists(sys.argv[1]):
|
||||||
|
inpfile = sys.argv[1]
|
||||||
|
print('Found input file ' + inpfile)
|
||||||
|
else:
|
||||||
|
raise Exception('Command line arg given but input file does not exist:'
|
||||||
|
' {}'.format(sys.argv[1]))
|
||||||
|
else:
|
||||||
|
print('Using default input file ' + inpfile)
|
||||||
|
|
||||||
|
#sol = solver(inpfile)
|
||||||
|
sol = solver.init(inpfile)
|
||||||
|
|
||||||
|
roukf = ROUKF(inpfile, sol)
|
||||||
|
roukf.solve()
|
42
kalman/other/theta_plot.py
Executable file
42
kalman/other/theta_plot.py
Executable file
@ -0,0 +1,42 @@
|
|||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
import os
|
||||||
|
|
||||||
|
from matplotlib import rc
|
||||||
|
#rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
|
||||||
|
rc('text', usetex=True)
|
||||||
|
|
||||||
|
|
||||||
|
if 'Zion' in os.popen('hostname').read():
|
||||||
|
user = 'yeye'
|
||||||
|
np.set_printoptions(threshold=5)
|
||||||
|
if 'fwn-bborg-5-166' in os.popen('hostname').read():
|
||||||
|
user = 'p283370'
|
||||||
|
|
||||||
|
|
||||||
|
Tf = 0.9
|
||||||
|
masterpath = '/home/'+user+'/Desktop/kalman/results/'
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
theta1 = np.loadtxt( masterpath + 'theta.txt')
|
||||||
|
t = np.linspace(0,Tf,theta1.size)
|
||||||
|
theta_real = t*0 - 60
|
||||||
|
|
||||||
|
#theta1 = 2**(theta1)
|
||||||
|
#theta2 = 2**(ltheta2)
|
||||||
|
#theta3 = 2**(ltheta3)
|
||||||
|
|
||||||
|
plt.figure(figsize=(10, 6), dpi=100)
|
||||||
|
plt.plot(t,theta1,'-',linewidth=2,label= r'$\theta_1 = $' + str(round(theta1[-1],2)) )
|
||||||
|
#plt.plot(t,theta1_s,'o-',linewidth=2,label= r'$\theta_1 stokes = $' + str(round(theta1_s[-1],2)) )
|
||||||
|
#plt.plot(t,theta2,'o-',linewidth=2,label= r'$\theta_2 = $' + str(round(theta2[-1],2)) )
|
||||||
|
#plt.plot(t,theta3,'o-',linewidth=2,label= r'$\theta_3 = $' + str(round(theta3[-1],2)) )
|
||||||
|
|
||||||
|
plt.plot(t,theta_real,'-k',linewidth=2,label= r'$real = $' + str( theta_real[-1] ) )
|
||||||
|
|
||||||
|
|
||||||
|
plt.xlabel(r'$time \ \ \ (s)$',fontsize=20)
|
||||||
|
plt.ylabel(r'$ \theta $',fontsize=20)
|
||||||
|
plt.legend(fontsize=14)
|
||||||
|
#plt.title(r'$\sigma = 0.101 $',fontsize =20 )
|
||||||
|
plt.show()
|
121
kalman/plot_roukf_parameters.py
Normal file
121
kalman/plot_roukf_parameters.py
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
from itertools import cycle
|
||||||
|
import argparse
|
||||||
|
import pickle
|
||||||
|
|
||||||
|
|
||||||
|
def is_ipython():
|
||||||
|
''' Check if script is run in IPython.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if IPython, else False '''
|
||||||
|
try:
|
||||||
|
get_ipython()
|
||||||
|
ipy = True
|
||||||
|
except NameError:
|
||||||
|
ipy = False
|
||||||
|
|
||||||
|
return ipy
|
||||||
|
|
||||||
|
|
||||||
|
def load_data(file):
|
||||||
|
''' Load numpy data from file.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
dict: data dictionary
|
||||||
|
'''
|
||||||
|
dat = np.load(file)
|
||||||
|
|
||||||
|
return dat
|
||||||
|
|
||||||
|
|
||||||
|
def plot_parameters(dat, deparameterize=False, ref=None):
|
||||||
|
''' Plot the parameters in separate subplots with uncertainties.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dat (dict): data dictionary
|
||||||
|
deparameterize (bool): flag indicating if parameters should be
|
||||||
|
deparameterized via 2**theta
|
||||||
|
ref: reference value to be plotted with parameters
|
||||||
|
'''
|
||||||
|
if is_ipython():
|
||||||
|
plt.ion()
|
||||||
|
|
||||||
|
dim = dat['theta'].shape[-1]
|
||||||
|
fig1, axes = plt.subplots(1, dim)
|
||||||
|
|
||||||
|
if dim == 1:
|
||||||
|
axes = [axes]
|
||||||
|
|
||||||
|
axes[0].set_ylabel(r'$\theta$')
|
||||||
|
|
||||||
|
t = dat['times']
|
||||||
|
theta = dat['theta']
|
||||||
|
P = dat['P_theta']
|
||||||
|
|
||||||
|
col = cycle(['C0', 'C1', 'C0', 'C1'])
|
||||||
|
ls = cycle(['-', '-', '--', '--', ':', ':', '-.', '-.'])
|
||||||
|
|
||||||
|
col_ = next(col)
|
||||||
|
ls_ = next(ls)
|
||||||
|
for i, ax in enumerate(axes):
|
||||||
|
if dim == 1:
|
||||||
|
theta = theta.reshape((-1, 1))
|
||||||
|
P = P.reshape((-1, 1, 1))
|
||||||
|
|
||||||
|
if deparameterize:
|
||||||
|
ax.plot(t, 2**theta[:, i], '-', c=col_, ls=ls_)
|
||||||
|
else:
|
||||||
|
ax.plot(t, theta[:, i], '-', c=col_, ls=ls_)
|
||||||
|
ax.fill_between(t, theta[:, i] - np.sqrt(P[:, i, i]),
|
||||||
|
theta[:, i] + np.sqrt(P[:, i, i]), alpha=0.3,
|
||||||
|
color=col_)
|
||||||
|
ax.set_xlabel(r'time')
|
||||||
|
|
||||||
|
if ref:
|
||||||
|
if isinstance(ref, (int, float)):
|
||||||
|
ref = np.array(ref)
|
||||||
|
|
||||||
|
for ax, ri in zip(axes, ref):
|
||||||
|
if ri:
|
||||||
|
# if deparameterize:
|
||||||
|
ax.plot((t[0], t[-1]), (ri, )*2, '-.k', lw=2,
|
||||||
|
label=r'ground truth')
|
||||||
|
# else:
|
||||||
|
# ax.plot((t[0], t[-1]), (np.log2(ri), )*2, '-.k', lw=2,
|
||||||
|
# label=r'ground truth')
|
||||||
|
|
||||||
|
# print('theta_peak: \t {}'.format(theta[round(len(theta)/2), :]))
|
||||||
|
print('Final value theta: \t {}'.format(theta[-1, :]))
|
||||||
|
print('Deparameterized: 2^theta_end: \t {}'.format(2**theta[-1, :]))
|
||||||
|
|
||||||
|
if not is_ipython():
|
||||||
|
plt.show()
|
||||||
|
|
||||||
|
|
||||||
|
def get_parser():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='''
|
||||||
|
Plot the time evolution of the ROUKF estimated parameters.
|
||||||
|
|
||||||
|
To execute in IPython::
|
||||||
|
|
||||||
|
%run plot_roukf_parameters.py [-d] [-r N [N \
|
||||||
|
...]] file
|
||||||
|
''',
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
|
parser.add_argument('file', type=str, help='path to ROUKF stats file')
|
||||||
|
parser.add_argument('-d', '--deparameterize', action='store_true',
|
||||||
|
help='deparameterize the parameters by 2**theta')
|
||||||
|
parser.add_argument('-r', '--ref', metavar='N', nargs='+', default=None,
|
||||||
|
type=float, help='Reference values for parameters')
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
args = get_parser().parse_args()
|
||||||
|
|
||||||
|
dat = load_data(args.file)
|
||||||
|
|
||||||
|
plot_parameters(dat, deparameterize=args.deparameterize, ref=args.ref)
|
75
kalman/run.py
Normal file
75
kalman/run.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
from roukf.core import ROUKF
|
||||||
|
import argparse
|
||||||
|
import importlib
|
||||||
|
from dolfin import *
|
||||||
|
import dolfin
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
parameters['form_compiler']['optimize'] = True
|
||||||
|
parameters['form_compiler']['cpp_optimize'] = True
|
||||||
|
parameters['form_compiler']['cpp_optimize_flags'] = ('-O3 -ffast-math '
|
||||||
|
'-march=native')
|
||||||
|
|
||||||
|
|
||||||
|
def print_timing():
|
||||||
|
if dolfin.__version__ >= '2018':
|
||||||
|
list_timings(TimingClear.clear, [TimingType.wall])
|
||||||
|
else:
|
||||||
|
list_timings(TimingClear_clear, [TimingType_wall])
|
||||||
|
|
||||||
|
|
||||||
|
def main(fwd_solver_module, inputfile):
|
||||||
|
''' Run ROUKF parameter estimation.
|
||||||
|
|
||||||
|
Imports the given forward solver module and initializes the forward solver
|
||||||
|
with the specified input file.
|
||||||
|
Creates a ROUKF solver from the same input file and the instantiated
|
||||||
|
forward solver object and solves the optimization problem specified in the
|
||||||
|
input file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fwd_solver_module (str): forward solver module to be imported and
|
||||||
|
passed to the ROUKF solver
|
||||||
|
inputfile (str): YAML input file with configuration of both
|
||||||
|
forward and ROUKF solver
|
||||||
|
'''
|
||||||
|
|
||||||
|
fwd_solver = importlib.import_module(fwd_solver_module)
|
||||||
|
fwd_solver = fwd_solver.init(inputfile)
|
||||||
|
fwd_solver.logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
kf = ROUKF(inputfile, fwd_solver)
|
||||||
|
kf.logger.setLevel(logging.INFO)
|
||||||
|
kf.solve()
|
||||||
|
|
||||||
|
print_timing()
|
||||||
|
|
||||||
|
|
||||||
|
def get_parser():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='''\
|
||||||
|
Run ROUKF parameter estimation.
|
||||||
|
|
||||||
|
1. Imports the given forward solver module and initializes the forward solver
|
||||||
|
with the specified input file. The solver module is required to have a
|
||||||
|
method `init()` which handles the complete setup and returns self.
|
||||||
|
2. Creates a ROUKF solver from the same input file and the instantiated
|
||||||
|
forward solver object and solves the optimization problem specified in the
|
||||||
|
input file.''',
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
|
parser.add_argument('fwd_solver', type=str,
|
||||||
|
help='Name of the forward solver module (see full'
|
||||||
|
' documentation), such that it can be imported.\n'
|
||||||
|
'For example:\n navierstokes.solver or '
|
||||||
|
'hyperelasticity.solver')
|
||||||
|
parser.add_argument('inputfile', type=str, help='path to YAML input file')
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
args = get_parser().parse_args()
|
||||||
|
|
||||||
|
main(args.fwd_solver, args.inputfile)
|
42
kalman/run_forward.py
Normal file
42
kalman/run_forward.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
''' Run a forward simulation.
|
||||||
|
Detects FractionalStep and Hyperelasticity from input files.
|
||||||
|
'''
|
||||||
|
from dolfin import parameters
|
||||||
|
import ruamel.yaml as yaml
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
parameters['form_compiler']['optimize'] = True
|
||||||
|
parameters['form_compiler']['cpp_optimize'] = True
|
||||||
|
parameters['form_compiler']['cpp_optimize_flags'] = ('-O3 -ffast-math '
|
||||||
|
'-march=native')
|
||||||
|
|
||||||
|
def get_forward_solver(inputfile):
|
||||||
|
''' Get forward solver from input file '''
|
||||||
|
with open(inputfile, 'r') as f:
|
||||||
|
options = yaml.load(f)
|
||||||
|
|
||||||
|
if 'fluid' in options:
|
||||||
|
from navierstokes import solver
|
||||||
|
elif 'material' in options:
|
||||||
|
parameters['form_compiler']['quadrature_degree'] = 6
|
||||||
|
from hyperelasticity import solver
|
||||||
|
|
||||||
|
return solver
|
||||||
|
|
||||||
|
|
||||||
|
def get_parser():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Run forward simulation',
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
|
parser.add_argument('inputfile', type=str, help='path to YAML input file')
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
inputfile = get_parser().parse_args().inputfile
|
||||||
|
|
||||||
|
solver = get_forward_solver(inputfile)
|
||||||
|
sol = solver.init(inputfile)
|
||||||
|
sol.solve()
|
@ -23,6 +23,7 @@
|
|||||||
\usepackage{listings,xcolor,caption, mathtools, wrapfig}
|
\usepackage{listings,xcolor,caption, mathtools, wrapfig}
|
||||||
\usepackage{amsfonts}
|
\usepackage{amsfonts}
|
||||||
\usepackage{amssymb,graphicx,enumerate}
|
\usepackage{amssymb,graphicx,enumerate}
|
||||||
|
\usepackage{subcaption}
|
||||||
\usepackage{hyperref}
|
\usepackage{hyperref}
|
||||||
|
|
||||||
\usepackage[normalem]{ulem} % for strike out command \sout
|
\usepackage[normalem]{ulem} % for strike out command \sout
|
||||||
@ -682,16 +683,36 @@ Experiments using real 4D flow data
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
\begin{frame}
|
\begin{frame}
|
||||||
\frametitle{Results}
|
\frametitle{Results}
|
||||||
\footnotesize
|
\footnotesize
|
||||||
|
|
||||||
\begin{figure}[!hbtp]
|
\begin{figure}
|
||||||
\begin{center}
|
\begin{subfigure}{.31\textwidth}
|
||||||
\includegraphics[height=0.5\textwidth]{images/phantom_cib.png}
|
\centering
|
||||||
\caption{At peak systole: a) measurements b) corrector field c) corrected measurements: $\vec u_{meas} + \vec w$}
|
\includegraphics[trim=100 80 100 150, clip, width=1.0\textwidth]{images/u_15.png}
|
||||||
\end{center}
|
\caption*{(a) $\vec{u}_{meas}$}
|
||||||
\end{figure}
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{.01\textwidth}
|
||||||
|
\hfill
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{.31\textwidth}
|
||||||
|
\centering
|
||||||
|
\includegraphics[trim=100 80 100 150, clip, width=1.0\textwidth]{images/w_15.png}
|
||||||
|
\caption*{(b) $\vec{w}$}
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{.01\textwidth}
|
||||||
|
\hfill
|
||||||
|
\end{subfigure}
|
||||||
|
\begin{subfigure}{.31\textwidth}
|
||||||
|
\centering
|
||||||
|
\includegraphics[trim=100 80 100 150, clip, width=1.0\textwidth]{images/uc_15.png}
|
||||||
|
\caption*{(c) $\vec{u}_{meas}+\vec{w}$}
|
||||||
|
\end{subfigure}
|
||||||
|
\caption{Measurements, corrector fields and corrected velocities for all the cases.}
|
||||||
|
\label{fig:phantom_resolution}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
\end{frame}
|
\end{frame}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user