ref: 8e7080903dcac51ebbddc541429c338259a4d031
parent: d15be43af425013e27ef872ac672700e0b642ac1
author: Jean-Marc Valin <jmvalin@amazon.com>
date: Sun Jul 23 14:25:14 EDT 2023
Make float_weights optional
--- a/dnn/parse_lpcnet_weights.c
+++ b/dnn/parse_lpcnet_weights.c
@@ -88,6 +88,13 @@
else return NULL;
}
+static const void *opt_array_check(const WeightArray *arrays, const char *name, int size, int *error) {
+ const WeightArray *a = find_array_entry(arrays, name);
+ *error = (a != NULL && a->size != size);
+ if (a && a->size == size) return a->data;
+ else return NULL;
+}
+
static const void *find_idx_check(const WeightArray *arrays, const char *name, int nb_in, int nb_out, int *total_blocks) {
int remain;
const int *idx;
@@ -124,6 +131,7 @@
int nb_inputs,
int nb_outputs)
{
+ int err;
layer->bias = NULL;
layer->subias = NULL;
layer->weights = NULL;
@@ -144,7 +152,8 @@
if ((layer->weights = find_array_check(arrays, weights, SPARSE_BLOCK_SIZE*total_blocks*sizeof(layer->weights[0]))) == NULL) return 1;
}
if (float_weights != NULL) {
- if ((layer->float_weights = find_array_check(arrays, float_weights, SPARSE_BLOCK_SIZE*total_blocks*sizeof(layer->float_weights[0]))) == NULL) return 1;
+ layer->float_weights = opt_array_check(arrays, float_weights, SPARSE_BLOCK_SIZE*total_blocks*sizeof(layer->float_weights[0]), &err);
+ if (err) return 1;
}
} else {
if (weights != NULL) {
@@ -151,7 +160,8 @@
if ((layer->weights = find_array_check(arrays, weights, nb_inputs*nb_outputs*sizeof(layer->weights[0]))) == NULL) return 1;
}
if (float_weights != NULL) {
- if ((layer->float_weights = find_array_check(arrays, float_weights, nb_inputs*nb_outputs*sizeof(layer->float_weights[0]))) == NULL) return 1;
+ layer->float_weights = opt_array_check(arrays, float_weights, nb_inputs*nb_outputs*sizeof(layer->float_weights[0]), &err);
+ if (err) return 1;
}
}
if (diag != NULL) {
--
⑨