sp is added to the model.
This commit is contained in:
parent
f6e7c8eefa
commit
ab3887c6ca
Binary file not shown.
@ -25,13 +25,17 @@ make_label = 0 # it takes roughly 4800 sec on Surface pro 2.
|
|||||||
make_htk_files = 0
|
make_htk_files = 0
|
||||||
extract_features = 0
|
extract_features = 0
|
||||||
flat_start = 0
|
flat_start = 0
|
||||||
train_model_without_sp = 1
|
train_model_without_sp = 0
|
||||||
|
add_sp = 0
|
||||||
|
train_model_with_sp = 0
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# pre-defined values.
|
# pre-defined values.
|
||||||
|
|
||||||
dataset_list = ['devel', 'test', 'train']
|
dataset_list = ['devel', 'test', 'train']
|
||||||
hmmdefs_name = 'hmmdefs'
|
hmmdefs_name = 'hmmdefs'
|
||||||
|
proto_name = 'proto39'
|
||||||
|
|
||||||
lexicon_asr = os.path.join(default.fame_dir, 'lexicon', 'lex.asr')
|
lexicon_asr = os.path.join(default.fame_dir, 'lexicon', 'lex.asr')
|
||||||
lexicon_oov = os.path.join(default.fame_dir, 'lexicon', 'lex.oov')
|
lexicon_oov = os.path.join(default.fame_dir, 'lexicon', 'lex.oov')
|
||||||
@ -41,7 +45,8 @@ config_hcopy = os.path.join(config_dir, 'config.HCopy')
|
|||||||
config_train = os.path.join(config_dir, 'config.train')
|
config_train = os.path.join(config_dir, 'config.train')
|
||||||
global_ded = os.path.join(config_dir, 'global.ded')
|
global_ded = os.path.join(config_dir, 'global.ded')
|
||||||
mkphones_led = os.path.join(config_dir, 'mkphones.led')
|
mkphones_led = os.path.join(config_dir, 'mkphones.led')
|
||||||
prototype = os.path.join(config_dir, 'proto39')
|
sil_hed = os.path.join(config_dir, 'sil.hed')
|
||||||
|
prototype = os.path.join(config_dir, proto_name)
|
||||||
|
|
||||||
model_dir = os.path.join(default.htk_dir, 'model')
|
model_dir = os.path.join(default.htk_dir, 'model')
|
||||||
|
|
||||||
@ -55,6 +60,7 @@ lexicon_htk = os.path.join(lexicon_dir, 'lex.htk')
|
|||||||
|
|
||||||
phonelist_txt = os.path.join(config_dir, 'phonelist.txt')
|
phonelist_txt = os.path.join(config_dir, 'phonelist.txt')
|
||||||
model0_dir = os.path.join(model_dir, 'hmm0')
|
model0_dir = os.path.join(model_dir, 'hmm0')
|
||||||
|
model1_dir = os.path.join(model_dir, 'hmm1')
|
||||||
|
|
||||||
feature_dir = os.path.join(default.htk_dir, 'mfc')
|
feature_dir = os.path.join(default.htk_dir, 'mfc')
|
||||||
if not os.path.exists(feature_dir):
|
if not os.path.exists(feature_dir):
|
||||||
@ -66,6 +72,13 @@ label_dir = os.path.join(default.htk_dir, 'label')
|
|||||||
if not os.path.exists(label_dir):
|
if not os.path.exists(label_dir):
|
||||||
os.makedirs(label_dir)
|
os.makedirs(label_dir)
|
||||||
|
|
||||||
|
## training
|
||||||
|
hcompv_scp_train = os.path.join(tmp_dir, 'train.scp')
|
||||||
|
mlf_file_train = os.path.join(label_dir, 'train_phone.mlf')
|
||||||
|
|
||||||
|
## train without sp
|
||||||
|
niter_max = 10
|
||||||
|
|
||||||
|
|
||||||
## ======================= make lexicon for HTK =======================
|
## ======================= make lexicon for HTK =======================
|
||||||
if make_lexicon:
|
if make_lexicon:
|
||||||
@ -95,6 +108,7 @@ if make_lexicon:
|
|||||||
|
|
||||||
## ======================= make label files =======================
|
## ======================= make label files =======================
|
||||||
if make_label:
|
if make_label:
|
||||||
|
# train_2002_gongfansaken_10347.lab is empty. should be removed.
|
||||||
for dataset in dataset_list:
|
for dataset in dataset_list:
|
||||||
timer_start = time.time()
|
timer_start = time.time()
|
||||||
print("==== making label files on dataset {}".format(dataset))
|
print("==== making label files on dataset {}".format(dataset))
|
||||||
@ -216,45 +230,111 @@ if extract_features:
|
|||||||
|
|
||||||
## ======================= flat start monophones =======================
|
## ======================= flat start monophones =======================
|
||||||
if flat_start:
|
if flat_start:
|
||||||
hcompv_scp = os.path.join(tmp_dir, 'test.scp')
|
|
||||||
|
|
||||||
timer_start = time.time()
|
timer_start = time.time()
|
||||||
print('==== flat start ====')
|
print('==== flat start ====')
|
||||||
pyhtk.flat_start(config_train, hcompv_scp, model0_dir, prototype)
|
pyhtk.flat_start(config_train, hcompv_scp_train, model0_dir, prototype)
|
||||||
|
|
||||||
# allocate mean & variance to all phones in the phone list
|
# allocate mean & variance to all phones in the phone list
|
||||||
|
print('>>> allocating mean & variance to all phones in the phone list...')
|
||||||
pyhtk.create_hmmdefs(
|
pyhtk.create_hmmdefs(
|
||||||
os.path.join(model0_dir, 'proto39'),
|
os.path.join(model0_dir, proto_name),
|
||||||
os.path.join(model0_dir, 'hmmdefs'),
|
os.path.join(model0_dir, 'hmmdefs'),
|
||||||
phonelist_txt)
|
phonelist_txt)
|
||||||
|
|
||||||
|
# make macros
|
||||||
|
print('>>> making macros...')
|
||||||
|
with open(os.path.join(model0_dir, 'vFloors')) as f:
|
||||||
|
lines = f.read()
|
||||||
|
with open(os.path.join(model0_dir, 'macros'), 'wb') as f:
|
||||||
|
f.write(bytes('~o <MFCC_0_D_A> <VecSize> 39\n' + lines, 'ascii'))
|
||||||
|
|
||||||
print("elapsed time: {}".format(time.time() - timer_start))
|
print("elapsed time: {}".format(time.time() - timer_start))
|
||||||
|
|
||||||
|
|
||||||
## ======================= estimate monophones =======================
|
## ======================= train model without short pause =======================
|
||||||
if train_model_without_sp:
|
if train_model_without_sp:
|
||||||
hcompv_scp = os.path.join(tmp_dir, 'test.scp')
|
fh.make_new_directory(model1_dir)
|
||||||
mlf_file = os.path.join(label_dir, 'test_phone.mlf')
|
|
||||||
output_dir = os.path.join(model_dir, 'hmm1')
|
|
||||||
fh.make_new_directory(output_dir)
|
|
||||||
|
|
||||||
print('==== train model without sp ====')
|
print('==== train model without sp ====')
|
||||||
if not os.path.exists(os.path.join(output_dir, 'iter0')):
|
if not os.path.exists(os.path.join(model1_dir, 'iter0')):
|
||||||
shutil.copytree(model0_dir, os.path.join(output_dir, 'iter0'))
|
shutil.copytree(model0_dir, os.path.join(model1_dir, 'iter0'))
|
||||||
niter = 1
|
for niter in range(1, niter_max):
|
||||||
for niter in range(1, 5):
|
|
||||||
timer_start = time.time()
|
timer_start = time.time()
|
||||||
hmm_n = 'iter' + str(niter)
|
hmm_n = 'iter' + str(niter)
|
||||||
hmm_n_pre = 'iter' + str(niter-1)
|
hmm_n_pre = 'iter' + str(niter-1)
|
||||||
modeln_dir = os.path.join(output_dir, hmm_n)
|
modeln_dir = os.path.join(model1_dir, hmm_n)
|
||||||
modeln_dir_pre = os.path.join(output_dir, hmm_n_pre)
|
modeln_dir_pre = os.path.join(model1_dir, hmm_n_pre)
|
||||||
|
|
||||||
# re-estimation
|
# re-estimation
|
||||||
fh.make_new_directory(modeln_dir)
|
fh.make_new_directory(modeln_dir)
|
||||||
pyhtk.re_estimation(
|
pyhtk.re_estimation(
|
||||||
config_train,
|
config_train,
|
||||||
os.path.join(modeln_dir_pre, 'proto39'),
|
os.path.join(modeln_dir_pre, 'macros'),
|
||||||
os.path.join(modeln_dir_pre, hmmdefs_name),
|
os.path.join(modeln_dir_pre, hmmdefs_name),
|
||||||
modeln_dir,
|
modeln_dir,
|
||||||
hcompv_scp, phonelist_txt,
|
hcompv_scp_train, phonelist_txt,
|
||||||
mlf_file=mlf_file)
|
mlf_file=mlf_file_train)
|
||||||
|
print("elapsed time: {}".format(time.time() - timer_start))
|
||||||
|
|
||||||
|
|
||||||
|
## ======================= adding sp to the model =======================
|
||||||
|
if add_sp:
|
||||||
|
print('==== adding sp to the model ====')
|
||||||
|
|
||||||
|
# make model with sp.
|
||||||
|
print('>>> modifying the last model in the previous step...')
|
||||||
|
modeln_dir_pre = os.path.join(model1_dir, 'iter'+str(niter_max-1))
|
||||||
|
modeln_dir = modeln_dir_pre.replace('iter' + str(niter_max-1), 'iter' + str(niter_max))
|
||||||
|
fh.make_new_directory(modeln_dir)
|
||||||
|
shutil.copy(
|
||||||
|
os.path.join(modeln_dir_pre, 'macros'),
|
||||||
|
os.path.join(modeln_dir, 'macros'))
|
||||||
|
shutil.copy(
|
||||||
|
os.path.join(modeln_dir_pre, hmmdefs_name),
|
||||||
|
os.path.join(modeln_dir, hmmdefs_name))
|
||||||
|
|
||||||
|
## =======================
|
||||||
|
## manually make changes to modeln_dir/hmmdefs
|
||||||
|
## =======================
|
||||||
|
# add states 'sil'.
|
||||||
|
# http://www.f.waseda.jp/yusukekondo/htk.html#flat_start_estimation
|
||||||
|
#shutil.copy(
|
||||||
|
# os.path.join(model_dir, 'hmmdefs.txt'),
|
||||||
|
# os.path.join(modeln_dir, hmmdefs_name))
|
||||||
|
|
||||||
|
#hmmdefs_file_pre = os.path.join(modeln_dir_pre, hmmdefs_name)
|
||||||
|
hmmdefs_file = os.path.join(modeln_dir, hmmdefs_name)
|
||||||
|
macros_file = os.path.join(modeln_dir, 'macros')
|
||||||
|
#with open(hmmdefs_file_pre) as f:
|
||||||
|
# lines = f.read()
|
||||||
|
#lines_ = lines.split('~h ')
|
||||||
|
#sil_model = [line for line in lines_ if line.split('\n')[0].replace('"', '') == 'sil'][0]
|
||||||
|
|
||||||
|
# update hmmdefs and macros.
|
||||||
|
print('>>> updating hmmdefs and macros...')
|
||||||
|
modeln_dir_pre = modeln_dir
|
||||||
|
modeln_dir = modeln_dir.replace('iter' + str(niter_max), 'iter' + str(niter_max+1))
|
||||||
|
fh.make_new_directory(modeln_dir)
|
||||||
|
pyhtk.include_sil_in_hmmdefs(macros_file, hmmdefs_file, modeln_dir, sil_hed, phonelist_txt)
|
||||||
|
|
||||||
|
|
||||||
|
## ======================= train model with short pause =======================
|
||||||
|
if train_model_with_sp:
|
||||||
|
print('==== train model with sp ====')
|
||||||
|
for niter in range(niter_max+1, niter_max*2+1):
|
||||||
|
timer_start = time.time()
|
||||||
|
hmm_n = 'iter' + str(niter)
|
||||||
|
hmm_n_pre = 'iter' + str(niter-1)
|
||||||
|
modeln_dir = os.path.join(model1_dir, hmm_n)
|
||||||
|
modeln_dir_pre = os.path.join(model1_dir, hmm_n_pre)
|
||||||
|
|
||||||
|
# re-estimation
|
||||||
|
fh.make_new_directory(modeln_dir)
|
||||||
|
pyhtk.re_estimation(
|
||||||
|
config_train,
|
||||||
|
os.path.join(modeln_dir_pre, 'macros'),
|
||||||
|
os.path.join(modeln_dir_pre, hmmdefs_name),
|
||||||
|
modeln_dir,
|
||||||
|
hcompv_scp_train, phonelist_txt,
|
||||||
|
mlf_file=mlf_file_train)
|
||||||
print("elapsed time: {}".format(time.time() - timer_start))
|
print("elapsed time: {}".format(time.time() - timer_start))
|
Loading…
Reference in New Issue
Block a user