ref: 219fbff4e6adee83c3b43a745c413259477df8f9
parent: edee9cd8f25ba3710d018b4b8fd85face2345b4a
author: Jean-Marc Valin <jmvalin@jmvalin.ca>
date: Sat Mar 23 23:48:26 EDT 2019
Making it easier to adapt (or not) a model
--- a/dnn/train_lpcnet.py
+++ b/dnn/train_lpcnet.py
@@ -105,6 +105,20 @@
# dump models to disk as we go
checkpoint = ModelCheckpoint('lpcnet24g_384_10_G16_{epoch:02d}.h5')-model.load_weights('lpcnet24c_384_10_G16_120.h5')-model.compile(optimizer=Adam(0.0001, amsgrad=True), loss='sparse_categorical_crossentropy')
-model.fit([in_data, features, periods], out_exc, batch_size=batch_size, epochs=nb_epochs, validation_split=0.0, callbacks=[checkpoint, lpcnet.Sparsify(0, 0, 1, (0.05, 0.05, 0.2))])
+#Set this to True to adapt an existing model (e.g. on new data)
+adaptation = False
+
+if adaptation:
+ #Adapting from an existing model
+ model.load_weights('lpcnet24c_384_10_G16_120.h5')+ sparsify = lpcnet.Sparsify(0, 0, 1, (0.05, 0.05, 0.2))
+ lr = 0.0001
+ decay = 0
+else:
+ #Training from scratch
+ sparsify = lpcnet.Sparsify(2000, 40000, 400, (0.05, 0.05, 0.2))
+ lr = 0.001
+ decay = 5e-5
+
+model.compile(optimizer=Adam(lr, amsgrad=True, decay=decay), loss='sparse_categorical_crossentropy')
+model.fit([in_data, features, periods], out_exc, batch_size=batch_size, epochs=nb_epochs, validation_split=0.0, callbacks=[checkpoint, sparsify])
--
⑨