shithub: opus

Download patch

ref: 299e38cab774fa4bd9708581210af8b09c6b5e4e
parent: 4f311a1ad44f1b7bd60e32984ca0604c46b6c593
author: Jan Buethe <jbuethe@amazon.de>
date: Mon Dec 18 07:19:55 EST 2023

Updated LACE and NoLACE models to version 2

--- a/autogen.sh
+++ b/autogen.sh
@@ -9,7 +9,7 @@
 srcdir=`dirname $0`
 test -n "$srcdir" && cd "$srcdir"
 
-dnn/download_model.sh caca188
+dnn/download_model.sh 88477f4
 
 echo "Updating build configuration files, please wait...."
 
--- a/dnn/nndsp.c
+++ b/dnn/nndsp.c
@@ -340,7 +340,8 @@
     float *x_out,
     const float *x_in,
     const float *features,
-    const LinearLayer *alpha1,
+    const LinearLayer *alpha1f,
+    const LinearLayer *alpha1t,
     const LinearLayer *alpha2,
     int feature_dim,
     int frame_size,
@@ -350,6 +351,7 @@
 {
     float in_buffer[ADASHAPE_MAX_INPUT_DIM + ADASHAPE_MAX_FRAME_SIZE];
     float out_buffer[ADASHAPE_MAX_FRAME_SIZE];
+    float tmp_buffer[ADASHAPE_MAX_FRAME_SIZE];
     int i, k;
     int tenv_size;
     float mean;
@@ -389,7 +391,8 @@
 #ifdef DEBUG_NNDSP
     print_float_vector("alpha1_in", in_buffer, feature_dim + tenv_size + 1);
 #endif
-    compute_generic_conv1d(alpha1, out_buffer, hAdaShape->conv_alpha1_state, in_buffer, feature_dim + tenv_size + 1, ACTIVATION_LINEAR, arch);
+    compute_generic_conv1d(alpha1f, out_buffer, hAdaShape->conv_alpha1f_state, in_buffer, feature_dim, ACTIVATION_LINEAR, arch);
+    compute_generic_conv1d(alpha1t, tmp_buffer, hAdaShape->conv_alpha1t_state, tenv, tenv_size + 1, ACTIVATION_LINEAR, arch);
 #ifdef DEBUG_NNDSP
     print_float_vector("alpha1_out", out_buffer, frame_size);
 #endif
@@ -396,7 +399,8 @@
     /* compute leaky ReLU by hand. ToDo: try tanh activation */
     for (i = 0; i < frame_size; i ++)
     {
-        in_buffer[i] = out_buffer[i] >= 0 ? out_buffer[i] : 0.2f * out_buffer[i];
+        float tmp = out_buffer[i] + tmp_buffer[i];
+        in_buffer[i] = tmp >= 0 ? tmp : 0.2 * tmp;
     }
 #ifdef DEBUG_NNDSP
     print_float_vector("post_alpha1", in_buffer, frame_size);
--- a/dnn/nndsp.h
+++ b/dnn/nndsp.h
@@ -71,7 +71,8 @@
 
 
 typedef struct {
-    float conv_alpha1_state[ADASHAPE_MAX_INPUT_DIM];
+    float conv_alpha1f_state[ADASHAPE_MAX_INPUT_DIM];
+    float conv_alpha1t_state[ADASHAPE_MAX_INPUT_DIM];
     float conv_alpha2_state[ADASHAPE_MAX_FRAME_SIZE];
 } AdaShapeState;
 
@@ -130,7 +131,8 @@
     float *x_out,
     const float *x_in,
     const float *features,
-    const LinearLayer *alpha1,
+    const LinearLayer *alpha1f,
+    const LinearLayer *alpha1t,
     const LinearLayer *alpha2,
     int feature_dim,
     int frame_size,
--- a/dnn/osce.c
+++ b/dnn/osce.c
@@ -155,7 +155,7 @@
         &hLACE->layers.lace_fnet_tconv,
         output_buffer,
         input_buffer,
-        ACTIVATION_LINEAR,
+        ACTIVATION_TANH,
         arch
     );
 
@@ -426,7 +426,7 @@
         &hNoLACE->layers.nolace_fnet_tconv,
         output_buffer,
         input_buffer,
-        ACTIVATION_LINEAR,
+        ACTIVATION_TANH,
         arch
     );
 
@@ -633,7 +633,8 @@
             x_buffer2 + i_subframe * NOLACE_AF1_OUT_CHANNELS * NOLACE_FRAME_SIZE + NOLACE_FRAME_SIZE,
             x_buffer2 + i_subframe * NOLACE_AF1_OUT_CHANNELS * NOLACE_FRAME_SIZE + NOLACE_FRAME_SIZE,
             feature_buffer + i_subframe * NOLACE_COND_DIM,
-            &layers->nolace_tdshape1_alpha1,
+            &layers->nolace_tdshape1_alpha1_f,
+            &layers->nolace_tdshape1_alpha1_t,
             &layers->nolace_tdshape1_alpha2,
             NOLACE_TDSHAPE1_FEATURE_DIM,
             NOLACE_TDSHAPE1_FRAME_SIZE,
@@ -688,7 +689,8 @@
             x_buffer1 + i_subframe * NOLACE_AF2_OUT_CHANNELS * NOLACE_FRAME_SIZE + NOLACE_FRAME_SIZE,
             x_buffer1 + i_subframe * NOLACE_AF2_OUT_CHANNELS * NOLACE_FRAME_SIZE + NOLACE_FRAME_SIZE,
             feature_buffer + i_subframe * NOLACE_COND_DIM,
-            &layers->nolace_tdshape2_alpha1,
+            &layers->nolace_tdshape2_alpha1_f,
+            &layers->nolace_tdshape2_alpha1_t,
             &layers->nolace_tdshape2_alpha2,
             NOLACE_TDSHAPE2_FEATURE_DIM,
             NOLACE_TDSHAPE2_FRAME_SIZE,
@@ -739,7 +741,8 @@
             x_buffer2 + i_subframe * NOLACE_AF3_OUT_CHANNELS * NOLACE_FRAME_SIZE + NOLACE_FRAME_SIZE,
             x_buffer2 + i_subframe * NOLACE_AF3_OUT_CHANNELS * NOLACE_FRAME_SIZE + NOLACE_FRAME_SIZE,
             feature_buffer + i_subframe * NOLACE_COND_DIM,
-            &layers->nolace_tdshape3_alpha1,
+            &layers->nolace_tdshape3_alpha1_f,
+            &layers->nolace_tdshape3_alpha1_t,
             &layers->nolace_tdshape3_alpha2,
             NOLACE_TDSHAPE3_FEATURE_DIM,
             NOLACE_TDSHAPE3_FRAME_SIZE,
@@ -884,7 +887,7 @@
         if (ret == 0) {ret = init_lace(&model->lace, list);}
 #endif
 
-#ifndef DISABLE_LACE
+#ifndef DISABLE_NOLACE
         if (ret == 0) {ret = init_nolace(&model->nolace, list);}
 #endif
 
@@ -898,7 +901,7 @@
         if (ret == 0) {ret = init_lace(&model->lace, lacelayers_arrays);}
 #endif
 
-#ifndef DISABLE_LACE
+#ifndef DISABLE_NOLACE
         if (ret == 0) {ret = init_nolace(&model->nolace, nolacelayers_arrays);}
 #endif
 
--- a/dnn/osce_config.h
+++ b/dnn/osce_config.h
@@ -41,7 +41,7 @@
 
 #define OSCE_PREEMPH 0.85f
 
-#define OSCE_PITCH_HANGOVER 8
+#define OSCE_PITCH_HANGOVER 0
 
 #define OSCE_CLEAN_SPEC_START 0
 #define OSCE_CLEAN_SPEC_LENGTH 64
--- a/dnn/osce_features.c
+++ b/dnn/osce_features.c
@@ -296,6 +296,7 @@
 static int pitch_postprocessing(OSCEFeatureState *psFeatures, int lag, int type)
 {
     int new_lag;
+    int modulus;
 
 #ifdef OSCE_HANGOVER_BUGFIX
 #define TESTBIT 1
@@ -303,6 +304,9 @@
 #define TESTBIT 0
 #endif
 
+    modulus = OSCE_PITCH_HANGOVER;
+    if (modulus == 0) modulus ++;
+
     /* hangover is currently disabled to reflect a bug in the python code. ToDo: re-evaluate hangover */
     if (type != TYPE_VOICED && psFeatures->last_type == TYPE_VOICED && TESTBIT)
     /* enter hangover */
@@ -311,7 +315,7 @@
         if (psFeatures->pitch_hangover_count < OSCE_PITCH_HANGOVER)
         {
             new_lag = psFeatures->last_lag;
-            psFeatures->pitch_hangover_count = (psFeatures->pitch_hangover_count + 1) % OSCE_PITCH_HANGOVER;
+            psFeatures->pitch_hangover_count = (psFeatures->pitch_hangover_count + 1) % modulus;
         }
     }
     else if (type != TYPE_VOICED && psFeatures->pitch_hangover_count && TESTBIT)
@@ -318,7 +322,7 @@
     /* continue hangover */
     {
         new_lag = psFeatures->last_lag;
-        psFeatures->pitch_hangover_count = (psFeatures->pitch_hangover_count + 1) % OSCE_PITCH_HANGOVER;
+        psFeatures->pitch_hangover_count = (psFeatures->pitch_hangover_count + 1) % modulus;
     }
     else if (type != TYPE_VOICED)
     /* unvoiced frame after hangover */
@@ -376,11 +380,7 @@
     /* smooth bit count */
     psFeatures->numbits_smooth = 0.9f * psFeatures->numbits_smooth + 0.1f * num_bits;
     numbits[0] = num_bits;
-#ifdef OSCE_NUMBITS_BUGFIX
     numbits[1] = psFeatures->numbits_smooth;
-#else
-    numbits[1] = num_bits;
-#endif
 
     for (n = 0; n < num_samples; n++)
     {
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/__init__.py
@@ -1,0 +1,2 @@
+from . import quantization
+from . import sparsification
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/quantization/__init__.py
@@ -1,0 +1,1 @@
+from .softquant import soft_quant, remove_soft_quant
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/quantization/softquant.py
@@ -1,0 +1,113 @@
+import torch
+
+@torch.no_grad()
+def compute_optimal_scale(weight):
+    with torch.no_grad():
+        n_out, n_in = weight.shape
+        assert n_in % 4 == 0
+        if n_out % 8:
+            # add padding
+            pad = n_out - n_out % 8
+            weight = torch.cat((weight, torch.zeros((pad, n_in), dtype=weight.dtype, device=weight.device)), dim=0)
+
+        weight_max_abs, _ = torch.max(torch.abs(weight), dim=1)
+        weight_max_sum, _ = torch.max(torch.abs(weight[:, : n_in : 2] + weight[:, 1 : n_in : 2]), dim=1)
+        scale_max = weight_max_abs / 127
+        scale_sum = weight_max_sum / 129
+
+        scale = torch.maximum(scale_max, scale_sum)
+
+    return scale[:n_out]
+
+@torch.no_grad()
+def q_scaled_noise(module, weight):
+    if isinstance(module, torch.nn.Conv1d):
+        w = weight.permute(0, 2, 1).flatten(1)
+        noise = torch.rand_like(w) - 0.5
+        noise[w == 0] = 0 # ignore zero entries from sparsification
+        scale = compute_optimal_scale(w)
+        noise = noise * scale.unsqueeze(-1)
+        noise = noise.reshape(weight.size(0), weight.size(2), weight.size(1)).permute(0, 2, 1)
+    elif isinstance(module, torch.nn.ConvTranspose1d):
+        i, o, k = weight.shape
+        w = weight.permute(2, 1, 0).reshape(k * o, i)
+        noise = torch.rand_like(w) - 0.5
+        noise[w == 0] = 0 # ignore zero entries from sparsification
+        scale = compute_optimal_scale(w)
+        noise = noise * scale.unsqueeze(-1)
+        noise = noise.reshape(k, o, i).permute(2, 1, 0)
+    elif len(weight.shape) == 2:
+        noise = torch.rand_like(weight) - 0.5
+        noise[weight == 0] = 0 # ignore zero entries from sparsification
+        scale = compute_optimal_scale(weight)
+        noise = noise * scale.unsqueeze(-1)
+    else:
+        raise ValueError('unknown quantization setting')
+
+    return noise
+
+class SoftQuant:
+    name: str
+
+    def __init__(self, names: str, scale: float) -> None:
+        self.names = names
+        self.quantization_noise = None
+        self.scale = scale
+
+    def __call__(self, module, inputs, *args, before=True):
+        if not module.training: return
+
+        if before:
+            self.quantization_noise = dict()
+            for name in self.names:
+                weight = getattr(module, name)
+                if self.scale is None:
+                    self.quantization_noise[name] = q_scaled_noise(module, weight)
+                else:
+                    self.quantization_noise[name] = \
+                        self.scale * (torch.rand_like(weight) - 0.5)
+                with torch.no_grad():
+                    weight.data[:] = weight + self.quantization_noise[name]
+        else:
+            for name in self.names:
+                weight = getattr(module, name)
+                with torch.no_grad():
+                    weight.data[:] = weight - self.quantization_noise[name]
+            self.quantization_noise = None
+
+    def apply(module, names=['weight'], scale=None):
+        fn = SoftQuant(names, scale)
+
+        for name in names:
+            if not hasattr(module, name):
+                raise ValueError("")
+
+        fn_before = lambda *x : fn(*x, before=True)
+        fn_after = lambda *x : fn(*x, before=False)
+        setattr(fn_before, 'sqm', fn)
+        setattr(fn_after, 'sqm', fn)
+
+
+        module.register_forward_pre_hook(fn_before)
+        module.register_forward_hook(fn_after)
+
+        module
+
+        return fn
+
+
+def soft_quant(module, names=['weight'], scale=None):
+    fn = SoftQuant.apply(module, names, scale)
+    return module
+
+def remove_soft_quant(module, names=['weight']):
+    for k, hook in module._forward_pre_hooks.items():
+        if hasattr(hook, 'sqm'):
+            if isinstance(hook.sqm, SoftQuant) and hook.sqm.names == names:
+                del module._forward_pre_hooks[k]
+    for k, hook in module._forward_hooks.items():
+        if hasattr(hook, 'sqm'):
+            if isinstance(hook.sqm, SoftQuant) and hook.sqm.names == names:
+                del module._forward_hooks[k]
+
+    return module
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/relegance/__init__.py
@@ -1,0 +1,2 @@
+from .relegance import relegance_gradient_weighting, relegance_create_tconv_kernel, relegance_map_relevance_to_input_domain, relegance_resize_relevance_to_input_size
+from .meta_critic import MetaCritic
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/relegance/meta_critic.py
@@ -1,0 +1,85 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import torch
+
+class MetaCritic():
+    def __init__(self, normalize=False, gamma=0.9, beta=0.0, joint_stats=False):
+        """ Class for assessing relevance of discriminator scores
+
+        Args:
+            gamma (float, optional): update rate for tracking discriminator stats. Defaults to 0.9.
+            beta (float, optional): Miminum confidence related threshold. Defaults to 0.0.
+        """
+        self.normalize = normalize
+        self.gamma = gamma
+        self.beta = beta
+        self.joint_stats = joint_stats
+
+        self.disc_stats = dict()
+
+    def __call__(self, disc_id, real_scores, generated_scores):
+        """ calculates relevance from normalized scores
+
+        Args:
+            disc_id (any valid key): id for tracking discriminator statistics
+            real_scores (torch.tensor): scores for real data
+            generated_scores (torch.tensor): scores for generated data; expecting device to match real_scores.device
+
+        Returns:
+            torch.tensor: output-domain relevance
+        """
+
+        if self.normalize:
+            real_std = torch.std(real_scores.detach()).cpu().item()
+            gen_std  = torch.std(generated_scores.detach()).cpu().item()
+            std = (real_std**2 + gen_std**2) ** .5
+            mean = torch.mean(real_scores.detach()).cpu().item() - torch.mean(generated_scores.detach()).cpu().item()
+
+            key = 0 if self.joint_stats else disc_id
+
+            if key in self.disc_stats:
+                self.disc_stats[key]['std'] =  self.gamma * self.disc_stats[key]['std'] + (1 - self.gamma) * std
+                self.disc_stats[key]['mean'] =  self.gamma * self.disc_stats[key]['mean'] + (1 - self.gamma) * mean
+            else:
+                self.disc_stats[key] = {
+                    'std': std + 1e-5,
+                    'mean': mean
+                }
+
+            std = self.disc_stats[key]['std']
+            mean = self.disc_stats[key]['mean']
+        else:
+            mean, std = 0, 1
+
+        relevance = torch.relu((real_scores - generated_scores - mean) / std + mean - self.beta)
+
+        if False: print(f"relevance({disc_id}): {relevance.min()=} {relevance.max()=} {relevance.mean()=}")
+
+        return relevance
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/relegance/relegance.py
@@ -1,0 +1,449 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import torch
+import torch.nn.functional as F
+
+
+def view_one_hot(index, length):
+    vec = length * [1]
+    vec[index] = -1
+    return vec
+
+def create_smoothing_kernel(widths, gamma=1.5):
+    """ creates a truncated gaussian smoothing kernel for the given widths
+
+        Parameters:
+        -----------
+        widths: list[Int] or torch.LongTensor
+            specifies the shape of the smoothing kernel, entries must be > 0.
+
+        gamma: float, optional
+            decay factor for gaussian relative to kernel size
+
+        Returns:
+        --------
+        kernel: torch.FloatTensor
+    """
+
+    widths = torch.LongTensor(widths)
+    num_dims = len(widths)
+
+    assert(widths.min() > 0)
+
+    centers = widths.float() / 2 - 0.5
+    sigmas  = gamma * (centers + 1)
+
+    vals = []
+
+    vals= [((torch.arange(widths[i]) - centers[i]) / sigmas[i]) ** 2 for i in range(num_dims)]
+    vals = sum([vals[i].view(view_one_hot(i, num_dims)) for i in range(num_dims)])
+
+    kernel = torch.exp(- vals)
+    kernel = kernel / kernel.sum()
+
+    return kernel
+
+
+def create_partition_kernel(widths, strides):
+    """ creates a partition kernel for mapping a convolutional network output back to the input domain
+
+        Given a fully convolutional network with receptive field of shape widths and the given strides, this
+        function construncts an intorpolation kernel whose tranlations by multiples of the given strides form
+        a partition of one on the input domain.
+
+        Parameter:
+        ----------
+        widths: list[Int] or torch.LongTensor
+            shape of receptive field
+
+        strides: list[Int] or torch.LongTensor
+            total strides of convolutional network
+
+        Returns:
+        kernel: torch.FloatTensor
+    """
+
+    num_dims = len(widths)
+    assert num_dims == len(strides) and num_dims in {1, 2, 3}
+
+    convs = {1 : F.conv1d, 2 : F.conv2d, 3 : F.conv3d}
+
+    widths = torch.LongTensor(widths)
+    strides = torch.LongTensor(strides)
+
+    proto_kernel = torch.ones(torch.minimum(strides, widths).tolist())
+
+    # create interpolation kernel eta
+    eta_widths = widths - strides + 1
+    if eta_widths.min() <= 0:
+        print("[create_partition_kernel] warning: receptive field does not cover input domain")
+        eta_widths = torch.maximum(eta_widths, torch.ones_like(eta_widths))
+
+
+    eta = create_smoothing_kernel(eta_widths).view(1, 1, *eta_widths.tolist())
+
+    padding = torch.repeat_interleave(eta_widths - 1, 2, 0).tolist()[::-1] # ordering of dimensions for padding and convolution functions is reversed in torch
+    padded_proto_kernel = F.pad(proto_kernel, padding)
+    padded_proto_kernel = padded_proto_kernel.view(1, 1, *padded_proto_kernel.shape)
+    kernel = convs[num_dims](padded_proto_kernel, eta)
+
+    return kernel
+
+
+def receptive_field(conv_model, input_shape, output_position):
+    """ estimates boundaries of receptive field connected to output_position via autograd
+
+        Parameters:
+        -----------
+        conv_model: nn.Module or autograd function
+            function or model implementing fully convolutional model
+
+        input_shape: List[Int]
+            input shape ignoring batch dimension, i.e. [num_channels, dim1, dim2, ...]
+
+        output_position: List[Int]
+            output position for which the receptive field is determined; the function raises an exception
+            if output_position is out of bounds for the given input_shape.
+
+        Returns:
+        --------
+        low: List[Int]
+            start indices of receptive field
+
+        high: List[Int]
+            stop indices of receptive field
+
+    """
+
+    x = torch.randn((1,) + tuple(input_shape), requires_grad=True)
+    y = conv_model(x)
+
+    # collapse channels and remove batch dimension
+    y = torch.sum(y, 1)[0]
+
+    # create mask
+    mask = torch.zeros_like(y)
+    index = [torch.tensor(i) for i in output_position]
+    try:
+        mask.index_put_(index, torch.tensor(1, dtype=mask.dtype))
+    except IndexError:
+        raise ValueError('output_position out of bounds')
+
+    (mask * y).sum().backward()
+
+    # sum over channels and remove batch dimension
+    grad = torch.sum(x.grad, dim=1)[0]
+    tmp = torch.nonzero(grad, as_tuple=True)
+    low  = [t.min().item() for t in tmp]
+    high = [t.max().item() for t in tmp]
+
+    return low, high
+
+def estimate_conv_parameters(model, num_channels, num_dims, width, max_stride=10):
+    """ attempts to estimate receptive field size, strides and left paddings for given model
+
+
+        Parameters:
+        -----------
+        model: nn.Module or autograd function
+            fully convolutional model for which parameters are estimated
+
+        num_channels: Int
+            number of input channels for model
+
+        num_dims: Int
+            number of input dimensions for model (without channel dimension)
+
+        width: Int
+            width of the input tensor (a hyper-square) on which the receptive fields are derived via autograd
+
+        max_stride: Int, optional
+            assumed maximal stride of the model for any dimension, when set too low the function may fail for
+            any value of width
+
+        Returns:
+        --------
+        receptive_field_size: List[Int]
+            receptive field size in all dimension
+
+        strides: List[Int]
+            stride in all dimensions
+
+        left_paddings: List[Int]
+            left padding in all dimensions; this is relevant for aligning the receptive field on the input plane
+
+        Raises:
+        -------
+        ValueError, KeyError
+
+    """
+
+    input_shape = [num_channels] + num_dims * [width]
+    output_position1 = num_dims * [width // (2 * max_stride)]
+    output_position2 = num_dims * [width // (2 * max_stride) + 1]
+
+    low1, high1 = receptive_field(model, input_shape, output_position1)
+    low2, high2 = receptive_field(model, input_shape, output_position2)
+
+    widths1 = [h - l + 1 for l, h in zip(low1, high1)]
+    widths2 = [h - l + 1 for l, h in zip(low2, high2)]
+
+    if not all([w1 - w2 == 0 for w1, w2 in zip(widths1, widths2)]) or not all([l1 != l2 for l1, l2 in zip(low1, low2)]):
+        raise ValueError("[estimate_strides]: widths to small to determine strides")
+
+    receptive_field_size = widths1
+    strides              = [l2 - l1 for l1, l2 in zip(low1, low2)]
+    left_paddings        = [s * p - l for l, s, p in zip(low1, strides, output_position1)]
+
+    return receptive_field_size, strides, left_paddings
+
+def inspect_conv_model(model, num_channels, num_dims, max_width=10000, width_hint=None, stride_hint=None, verbose=False):
+    """ determines size of receptive field, strides and padding probabilistically
+
+
+        Parameters:
+        -----------
+        model: nn.Module or autograd function
+            fully convolutional model for which parameters are estimated
+
+        num_channels: Int
+            number of input channels for model
+
+        num_dims: Int
+            number of input dimensions for model (without channel dimension)
+
+        max_width: Int
+            maximum width of the input tensor (a hyper-square) on which the receptive fields are derived via autograd
+
+        verbose: bool, optional
+            if true, the function prints parameters for individual trials
+
+        Returns:
+        --------
+        receptive_field_size: List[Int]
+            receptive field size in all dimension
+
+        strides: List[Int]
+            stride in all dimensions
+
+        left_paddings: List[Int]
+            left padding in all dimensions; this is relevant for aligning the receptive field on the input plane
+
+        Raises:
+        -------
+        ValueError
+
+    """
+
+    max_stride = max_width // 2
+    stride = max_stride // 100
+    width = max_width // 100
+
+    if width_hint is not None: width = 2 * width_hint
+    if stride_hint is not None: stride = stride_hint
+
+    did_it = False
+    while width < max_width and stride < max_stride:
+        try:
+            if verbose: print(f"[inspect_conv_model] trying parameters {width=}, {stride=}")
+            receptive_field_size, strides, left_paddings = estimate_conv_parameters(model, num_channels, num_dims, width, stride)
+            did_it = True
+        except:
+            pass
+
+        if did_it: break
+
+        width *= 2
+        if width >= max_width and stride < max_stride:
+            stride *= 2
+            width = 2 * stride
+
+    if not did_it:
+        raise ValueError(f'could not determine conv parameter with given max_width={max_width}')
+
+    return receptive_field_size, strides, left_paddings
+
+
+class GradWeight(torch.autograd.Function):
+    def __init__(self):
+        super().__init__()
+
+    @staticmethod
+    def forward(ctx, x, weight):
+        ctx.save_for_backward(weight)
+        return x.clone()
+
+    @staticmethod
+    def backward(ctx, grad_output):
+        weight, = ctx.saved_tensors
+
+        grad_input = grad_output * weight
+
+        return grad_input, None
+
+
+# API
+
+def relegance_gradient_weighting(x, weight):
+    """
+
+    Args:
+        x (torch.tensor): input tensor
+        weight (torch.tensor or None): weight tensor for gradients of x; if None, no gradient weighting will be applied in backward pass
+
+    Returns:
+        torch.tensor: the unmodified input tensor x
+
+    Raises:
+        RuntimeError: if estimation of parameters fails due to exceeded compute budget
+    """
+    if weight is None:
+        return x
+    else:
+        return GradWeight.apply(x, weight)
+
+
+
+def relegance_create_tconv_kernel(model, num_channels, num_dims, width_hint=None, stride_hint=None, verbose=False):
+    """ creates parameters for mapping back output domain relevance to input tomain
+
+    Args:
+        model (nn.Module or autograd.Function): fully convolutional model
+        num_channels (int): number of input channels to model
+        num_dims (int): number of input dimensions of model (without channel and batch dimension)
+        width_hint(int or None): optional hint at maximal width of receptive field
+        stride_hint(int or None): optional hint at maximal stride
+
+    Returns:
+        dict: contains kernel, kernel dimensions, strides and left paddings for transposed convolution
+    """
+
+    max_width = int(100000 / (10 ** num_dims))
+
+    did_it = False
+    try:
+        receptive_field_size, strides, left_paddings = inspect_conv_model(model, num_channels, num_dims, max_width=max_width, width_hint=width_hint, stride_hint=stride_hint, verbose=verbose)
+        did_it = True
+    except:
+        # try once again with larger max_width
+        max_width *= 10
+
+    # crash if exception is raised
+    try:
+        if not did_it: receptive_field_size, strides, left_paddings = inspect_conv_model(model, num_channels, num_dims, max_width=max_width, width_hint=width_hint, stride_hint=stride_hint, verbose=verbose)
+    except:
+        raise RuntimeError("could not determine parameters within given compute budget")
+
+    partition_kernel = create_partition_kernel(receptive_field_size, strides)
+    partition_kernel = torch.repeat_interleave(partition_kernel, num_channels, 1)
+
+    tconv_parameters = {
+        'kernel': partition_kernel,
+        'receptive_field_shape': receptive_field_size,
+        'stride': strides,
+        'left_padding': left_paddings,
+        'num_dims': num_dims
+    }
+
+    return tconv_parameters
+
+
+
+def relegance_map_relevance_to_input_domain(od_relevance, tconv_parameters):
+    """ maps output-domain relevance to input-domain relevance via transpose convolution
+
+    Args:
+        od_relevance (torch.tensor): output-domain relevance
+        tconv_parameters (dict): parameter dict as created by relegance_create_tconv_kernel
+
+    Returns:
+        torch.tensor: input-domain relevance. The tensor is left aligned, i.e. the all-zero index of the output corresponds to the all-zero index of the discriminator input.
+                      Otherwise, the size of the output tensor does not need to match the size of the discriminator input. Use relegance_resize_relevance_to_input_size for a
+                      convenient way to adjust the output to the correct size.
+
+    Raises:
+        ValueError: if number of dimensions is not supported
+    """
+
+    kernel       = tconv_parameters['kernel'].to(od_relevance.device)
+    rf_shape     = tconv_parameters['receptive_field_shape']
+    stride       = tconv_parameters['stride']
+    left_padding = tconv_parameters['left_padding']
+
+    num_dims = len(kernel.shape) - 2
+
+    # repeat boundary values
+    od_padding = [rf_shape[i//2] // stride[i//2] + 1 for i in range(2 * num_dims)]
+    padded_od_relevance = F.pad(od_relevance, od_padding[::-1], mode='replicate')
+    od_padding = od_padding[::2]
+
+    # apply mapping and left trimming
+    if num_dims == 1:
+        id_relevance = F.conv_transpose1d(padded_od_relevance, kernel, stride=stride)
+        id_relevance = id_relevance[..., left_padding[0] + stride[0] * od_padding[0] :]
+    elif num_dims == 2:
+        id_relevance = F.conv_transpose2d(padded_od_relevance, kernel, stride=stride)
+        id_relevance = id_relevance[..., left_padding[0] + stride[0] * od_padding[0] :, left_padding[1] + stride[1] * od_padding[1]:]
+    elif num_dims == 3:
+        id_relevance = F.conv_transpose2d(padded_od_relevance, kernel, stride=stride)
+        id_relevance = id_relevance[..., left_padding[0] + stride[0] * od_padding[0] :, left_padding[1] + stride[1] * od_padding[1]:, left_padding[2] + stride[2] * od_padding[2] :]
+    else:
+        raise ValueError(f'[relegance_map_to_input_domain] error: num_dims = {num_dims} not supported')
+
+    return id_relevance
+
+
+def relegance_resize_relevance_to_input_size(reference_input, relevance):
+    """ adjusts size of relevance tensor to reference input size
+
+    Args:
+        reference_input (torch.tensor): discriminator input tensor for reference
+        relevance (torch.tensor): input-domain relevance corresponding to input tensor reference_input
+
+    Returns:
+        torch.tensor: resized relevance
+
+    Raises:
+        ValueError: if number of dimensions is not supported
+    """
+    resized_relevance = torch.zeros_like(reference_input)
+
+    num_dims = len(reference_input.shape) - 2
+    with torch.no_grad():
+        if num_dims == 1:
+            resized_relevance[:] = relevance[..., : min(reference_input.size(-1), relevance.size(-1))]
+        elif num_dims == 2:
+            resized_relevance[:] = relevance[..., : min(reference_input.size(-2), relevance.size(-2)), : min(reference_input.size(-1), relevance.size(-1))]
+        elif num_dims == 3:
+            resized_relevance[:] = relevance[..., : min(reference_input.size(-3), relevance.size(-3)), : min(reference_input.size(-2), relevance.size(-2)), : min(reference_input.size(-1), relevance.size(-1))]
+        else:
+            raise ValueError(f'[relegance_map_to_input_domain] error: num_dims = {num_dims} not supported')
+
+    return resized_relevance
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/sparsification/__init__.py
@@ -1,0 +1,6 @@
+from .gru_sparsifier import GRUSparsifier
+from .conv1d_sparsifier import Conv1dSparsifier
+from .conv_transpose1d_sparsifier import ConvTranspose1dSparsifier
+from .linear_sparsifier import LinearSparsifier
+from .common import sparsify_matrix, calculate_gru_flops_per_step
+from .utils import mark_for_sparsification, create_sparsifier
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/sparsification/base_sparsifier.py
@@ -1,0 +1,58 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+class BaseSparsifier:
+    def __init__(self, task_list, start, stop, interval, exponent=3):
+
+        # just copying parameters...
+        self.start      = start
+        self.stop       = stop
+        self.interval   = interval
+        self.exponent   = exponent
+        self.task_list  = task_list
+
+        # ... and setting counter to 0
+        self.step_counter = 0
+
+    def step(self, verbose=False):
+        # compute current interpolation factor
+        self.step_counter += 1
+
+        if self.step_counter < self.start:
+            return
+        elif self.step_counter < self.stop:
+            # update only every self.interval-th interval
+            if self.step_counter % self.interval:
+                return
+
+            alpha = ((self.stop - self.step_counter) / (self.stop - self.start)) ** self.exponent
+        else:
+            alpha = 0
+
+        self.sparsify(alpha, verbose=verbose)
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/sparsification/common.py
@@ -1,0 +1,123 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import torch
+
+debug=True
+
+def sparsify_matrix(matrix : torch.tensor, density : float, block_size, keep_diagonal : bool=False, return_mask : bool=False):
+    """ sparsifies matrix with specified block size
+
+        Parameters:
+        -----------
+        matrix : torch.tensor
+            matrix to sparsify
+        density : int
+            target density
+        block_size : [int, int]
+            block size dimensions
+        keep_diagonal : bool
+            If true, the diagonal will be kept. This option requires block_size[0] == block_size[1] and defaults to False
+    """
+
+    m, n   = matrix.shape
+    m1, n1 = block_size
+
+    if m % m1 or n % n1:
+        raise ValueError(f"block size {(m1, n1)} does not divide matrix size {(m, n)}")
+
+    # extract diagonal if keep_diagonal = True
+    if keep_diagonal:
+        if m != n:
+            raise ValueError("Attempting to sparsify non-square matrix with keep_diagonal=True")
+
+        to_spare = torch.diag(torch.diag(matrix))
+        matrix   = matrix - to_spare
+    else:
+        to_spare = torch.zeros_like(matrix)
+
+    # calculate energy in sub-blocks
+    x = torch.reshape(matrix, (m // m1, m1, n // n1, n1))
+    x = x ** 2
+    block_energies = torch.sum(torch.sum(x, dim=3), dim=1)
+
+    number_of_blocks = (m * n) // (m1 * n1)
+    number_of_survivors = round(number_of_blocks * density)
+
+    # masking threshold
+    if number_of_survivors == 0:
+        threshold = 0
+    else:
+        threshold = torch.sort(torch.flatten(block_energies)).values[-number_of_survivors]
+
+    # create mask
+    mask = torch.ones_like(block_energies)
+    mask[block_energies < threshold] = 0
+    mask = torch.repeat_interleave(mask, m1, dim=0)
+    mask = torch.repeat_interleave(mask, n1, dim=1)
+
+    # perform masking
+    masked_matrix = mask * matrix + to_spare
+
+    if return_mask:
+        return masked_matrix, mask
+    else:
+        return masked_matrix
+
+def calculate_gru_flops_per_step(gru, sparsification_dict=dict(), drop_input=False):
+    input_size = gru.input_size
+    hidden_size = gru.hidden_size
+    flops = 0
+
+    input_density = (
+        sparsification_dict.get('W_ir', [1])[0]
+        + sparsification_dict.get('W_in', [1])[0]
+        + sparsification_dict.get('W_iz', [1])[0]
+    ) / 3
+
+    recurrent_density = (
+        sparsification_dict.get('W_hr', [1])[0]
+        + sparsification_dict.get('W_hn', [1])[0]
+        + sparsification_dict.get('W_hz', [1])[0]
+    ) / 3
+
+    # input matrix vector multiplications
+    if not drop_input:
+        flops += 2 * 3 * input_size * hidden_size * input_density
+
+    # recurrent matrix vector multiplications
+    flops += 2 * 3 * hidden_size * hidden_size * recurrent_density
+
+    # biases
+    flops += 6 * hidden_size
+
+    # activations estimated by 10 flops per activation
+    flops += 30 * hidden_size
+
+    return flops
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/sparsification/conv1d_sparsifier.py
@@ -1,0 +1,133 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import torch
+
+from .base_sparsifier import BaseSparsifier
+from .common import sparsify_matrix, debug
+
+
+class Conv1dSparsifier(BaseSparsifier):
+    def __init__(self, task_list, start, stop, interval, exponent=3):
+        """ Sparsifier for torch.nn.GRUs
+
+            Parameters:
+            -----------
+            task_list : list
+                task_list contains a list of tuples (conv1d, params), where conv1d is an instance
+                of torch.nn.Conv1d and params is a tuple (density, [m, n]),
+                where density is the target density in [0, 1], [m, n] is the shape sub-blocks to which
+                sparsification is applied.
+
+            start : int
+                training step after which sparsification will be started.
+
+            stop : int
+                training step after which sparsification will be completed.
+
+            interval : int
+                sparsification interval for steps between start and stop. After stop sparsification will be
+                carried out after every call to GRUSparsifier.step()
+
+            exponent : float
+                Interpolation exponent for sparsification interval. In step i sparsification will be carried out
+                with density (alpha + target_density * (1 * alpha)), where
+                alpha = ((stop - i) / (start - stop)) ** exponent
+
+            Example:
+            --------
+            >>> import torch
+            >>> conv = torch.nn.Conv1d(8, 16, 8)
+            >>> params = (0.2, [8, 4])
+            >>> sparsifier = Conv1dSparsifier([(conv, params)], 0, 100, 50)
+            >>> for i in range(100):
+            ...         sparsifier.step()
+        """
+        super().__init__(task_list, start, stop, interval, exponent=3)
+
+        self.last_mask = None
+
+
+    def sparsify(self, alpha, verbose=False):
+        """ carries out sparsification step
+
+            Call this function after optimizer.step in your
+            training loop.
+
+            Parameters:
+            ----------
+            alpha : float
+                density interpolation parameter (1: dense, 0: target density)
+            verbose : bool
+                if true, densities are printed out
+
+            Returns:
+            --------
+            None
+
+        """
+
+        with torch.no_grad():
+            for conv, params in self.task_list:
+                # reshape weight
+                if hasattr(conv, 'weight_v'):
+                    weight = conv.weight_v
+                else:
+                    weight = conv.weight
+                i, o, k = weight.shape
+                w = weight.permute(0, 2, 1).flatten(1)
+                target_density, block_size = params
+                density = alpha + (1 - alpha) * target_density
+                w, new_mask = sparsify_matrix(w, density, block_size, return_mask=True)
+                w = w.reshape(i, k, o).permute(0, 2, 1)
+                weight[:] = w
+
+                if self.last_mask is not None:
+                    if not torch.all(self.last_mask * new_mask == new_mask) and debug:
+                        print("weight resurrection in conv.weight")
+
+                self.last_mask = new_mask
+
+                if verbose:
+                    print(f"conv1d_sparsier[{self.step_counter}]: {density=}")
+
+
+if __name__ == "__main__":
+    print("Testing sparsifier")
+
+    import torch
+    conv = torch.nn.Conv1d(8, 16, 8)
+    params = (0.2, [8, 4])
+
+    sparsifier = Conv1dSparsifier([(conv, params)], 0, 100, 5)
+
+    for i in range(100):
+            sparsifier.step(verbose=True)
+
+    print(conv.weight)
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/sparsification/conv_transpose1d_sparsifier.py
@@ -1,0 +1,134 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import torch
+
+
+from .base_sparsifier import BaseSparsifier
+from .common import sparsify_matrix, debug
+
+
+class ConvTranspose1dSparsifier(BaseSparsifier):
+    def __init__(self, task_list, start, stop, interval, exponent=3):
+        """ Sparsifier for torch.nn.GRUs
+
+            Parameters:
+            -----------
+            task_list : list
+                task_list contains a list of tuples (conv1d, params), where conv1d is an instance
+                of torch.nn.Conv1d and params is a tuple (density, [m, n]),
+                where density is the target density in [0, 1], [m, n] is the shape sub-blocks to which
+                sparsification is applied.
+
+            start : int
+                training step after which sparsification will be started.
+
+            stop : int
+                training step after which sparsification will be completed.
+
+            interval : int
+                sparsification interval for steps between start and stop. After stop sparsification will be
+                carried out after every call to GRUSparsifier.step()
+
+            exponent : float
+                Interpolation exponent for sparsification interval. In step i sparsification will be carried out
+                with density (alpha + target_density * (1 * alpha)), where
+                alpha = ((stop - i) / (start - stop)) ** exponent
+
+            Example:
+            --------
+            >>> import torch
+            >>> conv = torch.nn.ConvTranspose1d(8, 16, 8)
+            >>> params = (0.2, [8, 4])
+            >>> sparsifier = ConvTranspose1dSparsifier([(conv, params)], 0, 100, 50)
+            >>> for i in range(100):
+            ...         sparsifier.step()
+        """
+
+        super().__init__(task_list, start, stop, interval, exponent=3)
+
+        self.last_mask = None
+
+    def sparsify(self, alpha, verbose=False):
+        """ carries out sparsification step
+
+            Call this function after optimizer.step in your
+            training loop.
+
+            Parameters:
+            ----------
+            alpha : float
+                density interpolation parameter (1: dense, 0: target density)
+            verbose : bool
+                if true, densities are printed out
+
+            Returns:
+            --------
+            None
+
+        """
+
+        with torch.no_grad():
+            for conv, params in self.task_list:
+                # reshape weight
+                if hasattr(conv, 'weight_v'):
+                    weight = conv.weight_v
+                else:
+                    weight = conv.weight
+                i, o, k = weight.shape
+                w = weight.permute(2, 1, 0).reshape(k * o, i)
+                target_density, block_size = params
+                density = alpha + (1 - alpha) * target_density
+                w, new_mask = sparsify_matrix(w, density, block_size, return_mask=True)
+                w = w.reshape(k, o, i).permute(2, 1, 0)
+                weight[:] = w
+
+                if self.last_mask is not None:
+                    if not torch.all(self.last_mask * new_mask == new_mask) and debug:
+                        print("weight resurrection in conv.weight")
+
+                self.last_mask = new_mask
+
+                if verbose:
+                    print(f"convtrans1d_sparsier[{self.step_counter}]: {density=}")
+
+
+if __name__ == "__main__":
+    print("Testing sparsifier")
+
+    import torch
+    conv = torch.nn.ConvTranspose1d(8, 16, 4, 4)
+    params = (0.2, [8, 4])
+
+    sparsifier = ConvTranspose1dSparsifier([(conv, params)], 0, 100, 5)
+
+    for i in range(100):
+            sparsifier.step(verbose=True)
+
+    print(conv.weight)
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/sparsification/gru_sparsifier.py
@@ -1,0 +1,178 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import torch
+
+from .base_sparsifier import BaseSparsifier
+from .common import sparsify_matrix, debug
+
+
+class GRUSparsifier(BaseSparsifier):
+    def __init__(self, task_list, start, stop, interval, exponent=3):
+        """ Sparsifier for torch.nn.GRUs
+
+            Parameters:
+            -----------
+            task_list : list
+                task_list contains a list of tuples (gru, sparsify_dict), where gru is an instance
+                of torch.nn.GRU and sparsify_dic is a dictionary with keys in {'W_ir', 'W_iz', 'W_in',
+                'W_hr', 'W_hz', 'W_hn'} corresponding to the input and recurrent weights for the reset,
+                update, and new gate. The values of sparsify_dict are tuples (density, [m, n], keep_diagonal),
+                where density is the target density in [0, 1], [m, n] is the shape sub-blocks to which
+                sparsification is applied and keep_diagonal is a bool variable indicating whether the diagonal
+                should be kept.
+
+            start : int
+                training step after which sparsification will be started.
+
+            stop : int
+                training step after which sparsification will be completed.
+
+            interval : int
+                sparsification interval for steps between start and stop. After stop sparsification will be
+                carried out after every call to GRUSparsifier.step()
+
+            exponent : float
+                Interpolation exponent for sparsification interval. In step i sparsification will be carried out
+                with density (alpha + target_density * (1 * alpha)), where
+                alpha = ((stop - i) / (start - stop)) ** exponent
+
+            Example:
+            --------
+            >>> import torch
+            >>> gru = torch.nn.GRU(10, 20)
+            >>> sparsify_dict = {
+            ...         'W_ir' : (0.5, [2, 2], False),
+            ...         'W_iz' : (0.6, [2, 2], False),
+            ...         'W_in' : (0.7, [2, 2], False),
+            ...         'W_hr' : (0.1, [4, 4], True),
+            ...         'W_hz' : (0.2, [4, 4], True),
+            ...         'W_hn' : (0.3, [4, 4], True),
+            ...     }
+            >>> sparsifier = GRUSparsifier([(gru, sparsify_dict)], 0, 100, 50)
+            >>> for i in range(100):
+            ...         sparsifier.step()
+        """
+        super().__init__(task_list, start, stop, interval, exponent=3)
+
+        self.last_masks = {key : None for key in ['W_ir', 'W_in', 'W_iz', 'W_hr', 'W_hn', 'W_hz']}
+
+    def sparsify(self, alpha, verbose=False):
+        """ carries out sparsification step
+
+            Call this function after optimizer.step in your
+            training loop.
+
+            Parameters:
+            ----------
+            alpha : float
+                density interpolation parameter (1: dense, 0: target density)
+            verbose : bool
+                if true, densities are printed out
+
+            Returns:
+            --------
+            None
+
+        """
+
+        with torch.no_grad():
+            for gru, params in self.task_list:
+                hidden_size = gru.hidden_size
+
+                # input weights
+                for i, key in enumerate(['W_ir', 'W_iz', 'W_in']):
+                    if key in params:
+                        if hasattr(gru, 'weight_ih_l0_v'):
+                            weight = gru.weight_ih_l0_v
+                        else:
+                            weight = gru.weight_ih_l0
+                        density = alpha + (1 - alpha) * params[key][0]
+                        if verbose:
+                            print(f"[{self.step_counter}]: {key} density: {density}")
+
+                        weight[i * hidden_size : (i+1) * hidden_size, : ], new_mask = sparsify_matrix(
+                            weight[i * hidden_size : (i + 1) * hidden_size, : ],
+                            density, # density
+                            params[key][1], # block_size
+                            params[key][2], # keep_diagonal (might want to set this to False)
+                            return_mask=True
+                        )
+
+                        if type(self.last_masks[key]) != type(None):
+                            if not torch.all(self.last_masks[key] * new_mask == new_mask) and debug:
+                                print("weight resurrection in weight_ih_l0_v")
+
+                        self.last_masks[key] = new_mask
+
+                # recurrent weights
+                for i, key in enumerate(['W_hr', 'W_hz', 'W_hn']):
+                    if key in params:
+                        if hasattr(gru, 'weight_hh_l0_v'):
+                            weight = gru.weight_hh_l0_v
+                        else:
+                            weight = gru.weight_hh_l0
+                        density = alpha + (1 - alpha) * params[key][0]
+                        if verbose:
+                            print(f"[{self.step_counter}]: {key} density: {density}")
+                        weight[i * hidden_size : (i+1) * hidden_size, : ], new_mask = sparsify_matrix(
+                            weight[i * hidden_size : (i + 1) * hidden_size, : ],
+                            density,
+                            params[key][1], # block_size
+                            params[key][2], # keep_diagonal (might want to set this to False)
+                            return_mask=True
+                        )
+
+                        if type(self.last_masks[key]) != type(None):
+                            if not torch.all(self.last_masks[key] * new_mask == new_mask)  and True:
+                                print("weight resurrection in weight_hh_l0_v")
+
+                        self.last_masks[key] = new_mask
+
+
+
+if __name__ == "__main__":
+    print("Testing sparsifier")
+
+    gru = torch.nn.GRU(10, 20)
+    sparsify_dict = {
+        'W_ir' : (0.5, [2, 2], False),
+        'W_iz' : (0.6, [2, 2], False),
+        'W_in' : (0.7, [2, 2], False),
+        'W_hr' : (0.1, [4, 4], True),
+        'W_hz' : (0.2, [4, 4], True),
+        'W_hn' : (0.3, [4, 4], True),
+    }
+
+    sparsifier = GRUSparsifier([(gru, sparsify_dict)], 0, 100, 10)
+
+    for i in range(100):
+        sparsifier.step(verbose=True)
+
+    print(gru.weight_hh_l0)
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/sparsification/linear_sparsifier.py
@@ -1,0 +1,128 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import torch
+
+from .base_sparsifier import BaseSparsifier
+from .common import sparsify_matrix
+
+
+class LinearSparsifier(BaseSparsifier):
+    def __init__(self, task_list, start, stop, interval, exponent=3):
+        """ Sparsifier for torch.nn.GRUs
+
+            Parameters:
+            -----------
+            task_list : list
+                task_list contains a list of tuples (linear, params), where linear is an instance
+                of torch.nn.Linear and params is a tuple (density, [m, n]),
+                where density is the target density in [0, 1], [m, n] is the shape sub-blocks to which
+                sparsification is applied.
+
+            start : int
+                training step after which sparsification will be started.
+
+            stop : int
+                training step after which sparsification will be completed.
+
+            interval : int
+                sparsification interval for steps between start and stop. After stop sparsification will be
+                carried out after every call to GRUSparsifier.step()
+
+            exponent : float
+                Interpolation exponent for sparsification interval. In step i sparsification will be carried out
+                with density (alpha + target_density * (1 * alpha)), where
+                alpha = ((stop - i) / (start - stop)) ** exponent
+
+            Example:
+            --------
+            >>> import torch
+            >>> linear = torch.nn.Linear(8, 16)
+            >>> params = (0.2, [8, 4])
+            >>> sparsifier = LinearSparsifier([(linear, params)], 0, 100, 50)
+            >>> for i in range(100):
+            ...         sparsifier.step()
+        """
+
+        super().__init__(task_list, start, stop, interval, exponent=3)
+
+        self.last_mask = None
+
+    def sparsify(self, alpha, verbose=False):
+        """ carries out sparsification step
+
+            Call this function after optimizer.step in your
+            training loop.
+
+            Parameters:
+            ----------
+            alpha : float
+                density interpolation parameter (1: dense, 0: target density)
+            verbose : bool
+                if true, densities are printed out
+
+            Returns:
+            --------
+            None
+
+        """
+
+        with torch.no_grad():
+            for linear, params in self.task_list:
+                if hasattr(linear, 'weight_v'):
+                    weight = linear.weight_v
+                else:
+                    weight = linear.weight
+                target_density, block_size = params
+                density = alpha + (1 - alpha) * target_density
+                weight[:], new_mask = sparsify_matrix(weight, density, block_size, return_mask=True)
+
+                if self.last_mask is not None:
+                    if not torch.all(self.last_mask * new_mask == new_mask) and debug:
+                        print("weight resurrection in conv.weight")
+
+                self.last_mask = new_mask
+
+                if verbose:
+                    print(f"linear_sparsifier[{self.step_counter}]: {density=}")
+
+
+if __name__ == "__main__":
+    print("Testing sparsifier")
+
+    import torch
+    linear = torch.nn.Linear(8, 16)
+    params = (0.2, [4, 2])
+
+    sparsifier = LinearSparsifier([(linear, params)], 0, 100, 5)
+
+    for i in range(100):
+            sparsifier.step(verbose=True)
+
+    print(linear.weight)
--- /dev/null
+++ b/dnn/torch/dnntools/dnntools/sparsification/utils.py
@@ -1,0 +1,64 @@
+import torch
+
+from dnntools.sparsification import GRUSparsifier, LinearSparsifier, Conv1dSparsifier, ConvTranspose1dSparsifier
+
+def mark_for_sparsification(module, params):
+    setattr(module, 'sparsify', True)
+    setattr(module, 'sparsification_params', params)
+    return module
+
+def create_sparsifier(module, start, stop, interval):
+    sparsifier_list = []
+    for m in module.modules():
+        if hasattr(m, 'sparsify'):
+            if isinstance(m, torch.nn.GRU):
+                sparsifier_list.append(
+                    GRUSparsifier([(m, m.sparsification_params)], start, stop, interval)
+                )
+            elif isinstance(m, torch.nn.Linear):
+                sparsifier_list.append(
+                    LinearSparsifier([(m, m.sparsification_params)], start, stop, interval)
+                )
+            elif isinstance(m, torch.nn.Conv1d):
+                sparsifier_list.append(
+                    Conv1dSparsifier([(m, m.sparsification_params)], start, stop, interval)
+                )
+            elif isinstance(m, torch.nn.ConvTranspose1d):
+                sparsifier_list.append(
+                    ConvTranspose1dSparsifier([(m, m.sparsification_params)], start, stop, interval)
+                )
+            else:
+                print(f"[create_sparsifier] warning: module {m} marked for sparsification but no suitable sparsifier exists.")
+
+    def sparsify(verbose=False):
+        for sparsifier in sparsifier_list:
+            sparsifier.step(verbose)
+
+    return sparsify
+
+
+def count_parameters(model, verbose=False):
+    total = 0
+    for name, p in model.named_parameters():
+        count = torch.ones_like(p).sum().item()
+
+        if verbose:
+            print(f"{name}: {count} parameters")
+
+        total += count
+
+    return total
+
+def estimate_nonzero_parameters(module):
+    num_zero_parameters = 0
+    if hasattr(module, 'sparsify'):
+        params = module.sparsification_params
+        if isinstance(module, torch.nn.Conv1d) or isinstance(module, torch.nn.ConvTranspose1d):
+            num_zero_parameters = torch.ones_like(module.weight).sum().item() * (1 - params[0])
+        elif isinstance(module, torch.nn.GRU):
+            num_zero_parameters = module.input_size * module.hidden_size * (3 - params['W_ir'][0] - params['W_iz'][0] - params['W_in'][0])
+            num_zero_parameters += module.hidden_size * module.hidden_size * (3 - params['W_hr'][0] - params['W_hz'][0] - params['W_hn'][0])
+        elif isinstance(module, torch.nn.Linear):
+            num_zero_parameters = module.in_features * module.out_features * params[0]
+        else:
+            raise ValueError(f'unknown sparsification method for module of type {type(module)}')
--- /dev/null
+++ b/dnn/torch/dnntools/requirements.txt
@@ -1,0 +1,1 @@
+torch
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/dnntools/setup.py
@@ -1,0 +1,48 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+#!/usr/bin/env/python
+import os
+from setuptools import setup
+
+lib_folder = os.path.dirname(os.path.realpath(__file__))
+
+with open(os.path.join(lib_folder, 'requirements.txt'), 'r') as f:
+    install_requires = list(f.read().splitlines())
+
+print(install_requires)
+
+setup(name='dnntools',
+      version='1.0',
+      author='Jan Buethe',
+      author_email='jbuethe@amazon.de',
+      description='Non-Standard tools for deep neural network training with PyTorch',
+      packages=['dnntools', 'dnntools.sparsification', 'dnntools.quantization'],
+      install_requires=install_requires
+      )
--- a/dnn/torch/osce/adv_train_model.py
+++ b/dnn/torch/osce/adv_train_model.py
@@ -111,7 +111,7 @@
 if has_git:
     working_dir = os.path.split(__file__)[0]
     try:
-        repo = git.Repo(working_dir)
+        repo = git.Repo(working_dir, search_parent_directories=True)
         setup['repo'] = dict()
         hash = repo.head.object.hexsha
         urls = list(repo.remote().urls)
@@ -407,6 +407,10 @@
             (gen_loss + lambda_feat * loss_feat + lambda_reg * loss_reg).backward()
 
             optimizer.step()
+
+            # sparsification
+            if hasattr(model, 'sparsifier'):
+                model.sparsifier()
 
             running_model_grad_norm += get_grad_norm(model).detach().cpu().item()
             running_adv_loss += gen_loss.detach().cpu().item()
--- a/dnn/torch/osce/adv_train_vocoder.py
+++ b/dnn/torch/osce/adv_train_vocoder.py
@@ -111,7 +111,7 @@
 if has_git:
     working_dir = os.path.split(__file__)[0]
     try:
-        repo = git.Repo(working_dir)
+        repo = git.Repo(working_dir, search_parent_directories=True)
         setup['repo'] = dict()
         hash = repo.head.object.hexsha
         urls = list(repo.remote().urls)
--- a/dnn/torch/osce/engine/engine.py
+++ b/dnn/torch/osce/engine/engine.py
@@ -46,6 +46,10 @@
             # update learning rate
             scheduler.step()
 
+            # sparsification
+            if hasattr(model, 'sparsifier'):
+                model.sparsifier()
+
             # update running loss
             running_loss += float(loss.cpu())
 
@@ -72,8 +76,6 @@
         with tqdm(dataloader, unit='batch', file=sys.stdout) as tepoch:
 
             for i, batch in enumerate(tepoch):
-
-
 
                 # push batch to device
                 for key in batch:
--- a/dnn/torch/osce/export_model_weights.py
+++ b/dnn/torch/osce/export_model_weights.py
@@ -43,6 +43,7 @@
 from utils.layers.limited_adaptive_comb1d import LimitedAdaptiveComb1d
 from utils.layers.limited_adaptive_conv1d import LimitedAdaptiveConv1d
 from utils.layers.td_shaper import TDShaper
+from utils.misc import remove_all_weight_norm
 from wexchange.torch import dump_torch_weights
 
 
@@ -58,30 +59,30 @@
     'nolace': [
         ('pitch_embedding', dict()),
         ('feature_net.conv1', dict()),
-        ('feature_net.conv2', dict(quantize=True, scale=None)),
-        ('feature_net.tconv', dict(quantize=True, scale=None)),
-        ('feature_net.gru', dict()),
+        ('feature_net.conv2', dict(quantize=True, scale=None, sparse=True)),
+        ('feature_net.tconv', dict(quantize=True, scale=None, sparse=True)),
+        ('feature_net.gru', dict(quantize=True, scale=None, recurrent_scale=None, input_sparse=True, recurrent_sparse=True)),
         ('cf1', dict(quantize=True, scale=None)),
         ('cf2', dict(quantize=True, scale=None)),
         ('af1', dict(quantize=True, scale=None)),
-        ('tdshape1', dict()),
-        ('tdshape2', dict()),
-        ('tdshape3', dict()),
+        ('tdshape1', dict(quantize=True, scale=None)),
+        ('tdshape2', dict(quantize=True, scale=None)),
+        ('tdshape3', dict(quantize=True, scale=None)),
         ('af2', dict(quantize=True, scale=None)),
         ('af3', dict(quantize=True, scale=None)),
         ('af4', dict(quantize=True, scale=None)),
-        ('post_cf1', dict(quantize=True, scale=None)),
-        ('post_cf2', dict(quantize=True, scale=None)),
-        ('post_af1', dict(quantize=True, scale=None)),
-        ('post_af2', dict(quantize=True, scale=None)),
-        ('post_af3', dict(quantize=True, scale=None))
+        ('post_cf1', dict(quantize=True, scale=None, sparse=True)),
+        ('post_cf2', dict(quantize=True, scale=None, sparse=True)),
+        ('post_af1', dict(quantize=True, scale=None, sparse=True)),
+        ('post_af2', dict(quantize=True, scale=None, sparse=True)),
+        ('post_af3', dict(quantize=True, scale=None, sparse=True))
     ],
     'lace' : [
         ('pitch_embedding', dict()),
         ('feature_net.conv1', dict()),
-        ('feature_net.conv2', dict(quantize=True, scale=None)),
-        ('feature_net.tconv', dict(quantize=True, scale=None)),
-        ('feature_net.gru', dict()),
+        ('feature_net.conv2', dict(quantize=True, scale=None, sparse=True)),
+        ('feature_net.tconv', dict(quantize=True, scale=None, sparse=True)),
+        ('feature_net.gru', dict(quantize=True, scale=None, recurrent_scale=None, input_sparse=True, recurrent_sparse=True)),
         ('cf1', dict(quantize=True, scale=None)),
         ('cf2', dict(quantize=True, scale=None)),
         ('af1', dict(quantize=True, scale=None))
@@ -140,6 +141,7 @@
     checkpoint = torch.load(checkpoint_path, map_location='cpu')
     model = model_dict[checkpoint['setup']['model']['name']](*checkpoint['setup']['model']['args'], **checkpoint['setup']['model']['kwargs'])
     model.load_state_dict(checkpoint['state_dict'])
+    remove_all_weight_norm(model, verbose=True)
 
     # CWriter
     model_name = checkpoint['setup']['model']['name']
--- a/dnn/torch/osce/models/lace.py
+++ b/dnn/torch/osce/models/lace.py
@@ -41,6 +41,12 @@
 from models.silk_feature_net import SilkFeatureNet
 from .scale_embedding import ScaleEmbedding
 
+import sys
+sys.path.append('../dnntools')
+
+from dnntools.sparsification import create_sparsifier
+
+
 class LACE(NNSBase):
     """ Linear-Adaptive Coding Enhancer """
     FRAME_SIZE=80
@@ -60,7 +66,12 @@
                  numbits_embedding_dim=8,
                  hidden_feature_dim=64,
                  partial_lookahead=True,
-                 norm_p=2):
+                 norm_p=2,
+                 softquant=False,
+                 sparsify=False,
+                 sparsification_schedule=[10000, 30000, 100],
+                 sparsification_density=0.5,
+                 apply_weight_norm=False):
 
         super().__init__(skip=skip, preemph=preemph)
 
@@ -85,7 +96,7 @@
 
         # feature net
         if partial_lookahead:
-            self.feature_net = SilkFeatureNetPL(num_features + pitch_embedding_dim + 2 * numbits_embedding_dim, cond_dim, hidden_feature_dim)
+            self.feature_net = SilkFeatureNetPL(num_features + pitch_embedding_dim + 2 * numbits_embedding_dim, cond_dim, hidden_feature_dim, softquant=softquant, sparsify=sparsify, sparsification_density=sparsification_density, apply_weight_norm=apply_weight_norm)
         else:
             self.feature_net = SilkFeatureNet(num_features + pitch_embedding_dim + 2 * numbits_embedding_dim, cond_dim)
 
@@ -92,11 +103,14 @@
         # comb filters
         left_pad = self.kernel_size // 2
         right_pad = self.kernel_size - 1 - left_pad
-        self.cf1 = LimitedAdaptiveComb1d(self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, overlap_size=40, use_bias=False, padding=[left_pad, right_pad], max_lag=pitch_max + 1, gain_limit_db=comb_gain_limit_db, global_gain_limits_db=global_gain_limits_db, norm_p=norm_p)
-        self.cf2 = LimitedAdaptiveComb1d(self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, overlap_size=40, use_bias=False, padding=[left_pad, right_pad], max_lag=pitch_max + 1, gain_limit_db=comb_gain_limit_db, global_gain_limits_db=global_gain_limits_db, norm_p=norm_p)
+        self.cf1 = LimitedAdaptiveComb1d(self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, overlap_size=40, use_bias=False, padding=[left_pad, right_pad], max_lag=pitch_max + 1, gain_limit_db=comb_gain_limit_db, global_gain_limits_db=global_gain_limits_db, norm_p=norm_p, softquant=softquant, apply_weight_norm=apply_weight_norm)
+        self.cf2 = LimitedAdaptiveComb1d(self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, overlap_size=40, use_bias=False, padding=[left_pad, right_pad], max_lag=pitch_max + 1, gain_limit_db=comb_gain_limit_db, global_gain_limits_db=global_gain_limits_db, norm_p=norm_p, softquant=softquant, apply_weight_norm=apply_weight_norm)
 
         # spectral shaping
-        self.af1 = LimitedAdaptiveConv1d(1, 1, self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, padding=[self.kernel_size - 1, 0], gain_limits_db=conv_gain_limits_db, norm_p=norm_p)
+        self.af1 = LimitedAdaptiveConv1d(1, 1, self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, padding=[self.kernel_size - 1, 0], gain_limits_db=conv_gain_limits_db, norm_p=norm_p, softquant=softquant, apply_weight_norm=apply_weight_norm)
+
+        if sparsify:
+            self.sparsifier = create_sparsifier(self, *sparsification_schedule)
 
     def flop_count(self, rate=16000, verbose=False):
 
--- a/dnn/torch/osce/models/no_lace.py
+++ b/dnn/torch/osce/models/no_lace.py
@@ -27,10 +27,14 @@
 */
 """
 
+import numbers
+
 import torch
 from torch import nn
 import torch.nn.functional as F
+from torch.nn.utils import weight_norm
 
+
 import numpy as np
 
 from utils.layers.limited_adaptive_comb1d import LimitedAdaptiveComb1d
@@ -43,6 +47,11 @@
 from models.silk_feature_net import SilkFeatureNet
 from .scale_embedding import ScaleEmbedding
 
+import sys
+sys.path.append('../dnntools')
+from dnntools.quantization import soft_quant
+from dnntools.sparsification import create_sparsifier, mark_for_sparsification
+
 class NoLACE(NNSBase):
     """ Non-Linear Adaptive Coding Enhancer """
     FRAME_SIZE=80
@@ -64,11 +73,15 @@
                  partial_lookahead=True,
                  norm_p=2,
                  avg_pool_k=4,
-                 pool_after=False):
+                 pool_after=False,
+                 softquant=False,
+                 sparsify=False,
+                 sparsification_schedule=[100, 1000, 100],
+                 sparsification_density=0.5,
+                 apply_weight_norm=False):
 
         super().__init__(skip=skip, preemph=preemph)
 
-
         self.num_features           = num_features
         self.cond_dim               = cond_dim
         self.pitch_max              = pitch_max
@@ -81,6 +94,11 @@
         self.hidden_feature_dim     = hidden_feature_dim
         self.partial_lookahead      = partial_lookahead
 
+        if isinstance(sparsification_density, numbers.Number):
+            sparsification_density = 10 * [sparsification_density]
+
+        norm = weight_norm if apply_weight_norm else lambda x, name=None: x
+
         # pitch embedding
         self.pitch_embedding = nn.Embedding(pitch_max + 1, pitch_embedding_dim)
 
@@ -89,7 +107,7 @@
 
         # feature net
         if partial_lookahead:
-            self.feature_net = SilkFeatureNetPL(num_features + pitch_embedding_dim + 2 * numbits_embedding_dim, cond_dim, hidden_feature_dim)
+            self.feature_net = SilkFeatureNetPL(num_features + pitch_embedding_dim + 2 * numbits_embedding_dim, cond_dim, hidden_feature_dim, softquant=softquant, sparsify=sparsify, sparsification_density=sparsification_density, apply_weight_norm=apply_weight_norm)
         else:
             self.feature_net = SilkFeatureNet(num_features + pitch_embedding_dim + 2 * numbits_embedding_dim, cond_dim)
 
@@ -96,30 +114,46 @@
         # comb filters
         left_pad = self.kernel_size // 2
         right_pad = self.kernel_size - 1 - left_pad
-        self.cf1 = LimitedAdaptiveComb1d(self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, overlap_size=40, padding=[left_pad, right_pad], max_lag=pitch_max + 1, gain_limit_db=comb_gain_limit_db, global_gain_limits_db=global_gain_limits_db, norm_p=norm_p)
-        self.cf2 = LimitedAdaptiveComb1d(self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, overlap_size=40, padding=[left_pad, right_pad], max_lag=pitch_max + 1, gain_limit_db=comb_gain_limit_db, global_gain_limits_db=global_gain_limits_db, norm_p=norm_p)
+        self.cf1 = LimitedAdaptiveComb1d(self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, overlap_size=40, padding=[left_pad, right_pad], max_lag=pitch_max + 1, gain_limit_db=comb_gain_limit_db, global_gain_limits_db=global_gain_limits_db, norm_p=norm_p, softquant=softquant, apply_weight_norm=apply_weight_norm)
+        self.cf2 = LimitedAdaptiveComb1d(self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, overlap_size=40, padding=[left_pad, right_pad], max_lag=pitch_max + 1, gain_limit_db=comb_gain_limit_db, global_gain_limits_db=global_gain_limits_db, norm_p=norm_p, softquant=softquant, apply_weight_norm=apply_weight_norm)
 
         # spectral shaping
-        self.af1 = LimitedAdaptiveConv1d(1, 2, self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, use_bias=False, padding=[self.kernel_size - 1, 0], gain_limits_db=conv_gain_limits_db, norm_p=norm_p)
+        self.af1 = LimitedAdaptiveConv1d(1, 2, self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, use_bias=False, padding=[self.kernel_size - 1, 0], gain_limits_db=conv_gain_limits_db, norm_p=norm_p, softquant=softquant, apply_weight_norm=apply_weight_norm)
 
         # non-linear transforms
-        self.tdshape1 = TDShaper(cond_dim, frame_size=self.FRAME_SIZE, avg_pool_k=avg_pool_k, pool_after=pool_after)
-        self.tdshape2 = TDShaper(cond_dim, frame_size=self.FRAME_SIZE, avg_pool_k=avg_pool_k, pool_after=pool_after)
-        self.tdshape3 = TDShaper(cond_dim, frame_size=self.FRAME_SIZE, avg_pool_k=avg_pool_k, pool_after=pool_after)
+        self.tdshape1 = TDShaper(cond_dim, frame_size=self.FRAME_SIZE, avg_pool_k=avg_pool_k, pool_after=pool_after, softquant=softquant, apply_weight_norm=apply_weight_norm)
+        self.tdshape2 = TDShaper(cond_dim, frame_size=self.FRAME_SIZE, avg_pool_k=avg_pool_k, pool_after=pool_after, softquant=softquant, apply_weight_norm=apply_weight_norm)
+        self.tdshape3 = TDShaper(cond_dim, frame_size=self.FRAME_SIZE, avg_pool_k=avg_pool_k, pool_after=pool_after, softquant=softquant, apply_weight_norm=apply_weight_norm)
 
         # combinators
-        self.af2 = LimitedAdaptiveConv1d(2, 2, self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, use_bias=False, padding=[self.kernel_size - 1, 0], gain_limits_db=conv_gain_limits_db, norm_p=norm_p)
-        self.af3 = LimitedAdaptiveConv1d(2, 2, self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, use_bias=False, padding=[self.kernel_size - 1, 0], gain_limits_db=conv_gain_limits_db, norm_p=norm_p)
-        self.af4 = LimitedAdaptiveConv1d(2, 1, self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, use_bias=False, padding=[self.kernel_size - 1, 0], gain_limits_db=conv_gain_limits_db, norm_p=norm_p)
+        self.af2 = LimitedAdaptiveConv1d(2, 2, self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, use_bias=False, padding=[self.kernel_size - 1, 0], gain_limits_db=conv_gain_limits_db, norm_p=norm_p, softquant=softquant, apply_weight_norm=apply_weight_norm)
+        self.af3 = LimitedAdaptiveConv1d(2, 2, self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, use_bias=False, padding=[self.kernel_size - 1, 0], gain_limits_db=conv_gain_limits_db, norm_p=norm_p, softquant=softquant, apply_weight_norm=apply_weight_norm)
+        self.af4 = LimitedAdaptiveConv1d(2, 1, self.kernel_size, cond_dim, frame_size=self.FRAME_SIZE, use_bias=False, padding=[self.kernel_size - 1, 0], gain_limits_db=conv_gain_limits_db, norm_p=norm_p, softquant=softquant, apply_weight_norm=apply_weight_norm)
 
         # feature transforms
-        self.post_cf1 = nn.Conv1d(cond_dim, cond_dim, 2)
-        self.post_cf2 = nn.Conv1d(cond_dim, cond_dim, 2)
-        self.post_af1 = nn.Conv1d(cond_dim, cond_dim, 2)
-        self.post_af2 = nn.Conv1d(cond_dim, cond_dim, 2)
-        self.post_af3 = nn.Conv1d(cond_dim, cond_dim, 2)
+        self.post_cf1 = norm(nn.Conv1d(cond_dim, cond_dim, 2))
+        self.post_cf2 = norm(nn.Conv1d(cond_dim, cond_dim, 2))
+        self.post_af1 = norm(nn.Conv1d(cond_dim, cond_dim, 2))
+        self.post_af2 = norm(nn.Conv1d(cond_dim, cond_dim, 2))
+        self.post_af3 = norm(nn.Conv1d(cond_dim, cond_dim, 2))
 
+        if softquant:
+            self.post_cf1 = soft_quant(self.post_cf1)
+            self.post_cf2 = soft_quant(self.post_cf2)
+            self.post_af1 = soft_quant(self.post_af1)
+            self.post_af2 = soft_quant(self.post_af2)
+            self.post_af3 = soft_quant(self.post_af3)
 
+
+        if sparsify:
+            mark_for_sparsification(self.post_cf1, (sparsification_density[4], [8, 4]))
+            mark_for_sparsification(self.post_cf2, (sparsification_density[5], [8, 4]))
+            mark_for_sparsification(self.post_af1, (sparsification_density[6], [8, 4]))
+            mark_for_sparsification(self.post_af2, (sparsification_density[7], [8, 4]))
+            mark_for_sparsification(self.post_af3, (sparsification_density[8], [8, 4]))
+
+            self.sparsifier = create_sparsifier(self, *sparsification_schedule)
+
     def flop_count(self, rate=16000, verbose=False):
 
         frame_rate = rate / self.FRAME_SIZE
@@ -141,9 +175,12 @@
         return feature_net_flops + comb_flops + af_flops + feature_flops + shape_flops
 
     def feature_transform(self, f, layer):
-        f = f.permute(0, 2, 1)
-        f = F.pad(f, [1, 0])
-        f = torch.tanh(layer(f))
+        f0 = f.permute(0, 2, 1)
+        f = F.pad(f0, [1, 0])
+        if self.residual_in_feature_transform:
+            f = torch.tanh(layer(f) + f0)
+        else:
+            f = torch.tanh(layer(f))
         return f.permute(0, 2, 1)
 
     def forward(self, x, features, periods, numbits, debug=False):
--- a/dnn/torch/osce/models/silk_feature_net_pl.py
+++ b/dnn/torch/osce/models/silk_feature_net_pl.py
@@ -26,36 +26,74 @@
    SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 */
 """
+import sys
+sys.path.append('../dnntools')
+import numbers
 
 
 import torch
 from torch import nn
 import torch.nn.functional as F
+from torch.nn.utils import weight_norm
 
 from utils.complexity import _conv1d_flop_count
 
+from dnntools.quantization.softquant import soft_quant
+from dnntools.sparsification import mark_for_sparsification
+
 class SilkFeatureNetPL(nn.Module):
     """ feature net with partial lookahead """
     def __init__(self,
                  feature_dim=47,
                  num_channels=256,
-                 hidden_feature_dim=64):
+                 hidden_feature_dim=64,
+                 softquant=False,
+                 sparsify=True,
+                 sparsification_density=0.5,
+                 apply_weight_norm=False):
 
         super(SilkFeatureNetPL, self).__init__()
 
+        if isinstance(sparsification_density, numbers.Number):
+            sparsification_density = 4 * [sparsification_density]
+
         self.feature_dim = feature_dim
         self.num_channels = num_channels
         self.hidden_feature_dim = hidden_feature_dim
 
-        self.conv1 = nn.Conv1d(feature_dim, self.hidden_feature_dim, 1)
-        self.conv2 = nn.Conv1d(4 * self.hidden_feature_dim, num_channels, 2)
-        self.tconv = nn.ConvTranspose1d(num_channels, num_channels, 4, 4)
+        norm = weight_norm if apply_weight_norm else lambda x, name=None: x
 
-        self.gru = nn.GRU(num_channels, num_channels, batch_first=True)
+        self.conv1 = norm(nn.Conv1d(feature_dim, self.hidden_feature_dim, 1))
+        self.conv2 = norm(nn.Conv1d(4 * self.hidden_feature_dim, num_channels, 2))
+        self.tconv = norm(nn.ConvTranspose1d(num_channels, num_channels, 4, 4))
+        gru_input_dim = num_channels + self.repeat_upsamp_dim if self.repeat_upsamp else num_channels
+        self.gru   = norm(norm(nn.GRU(gru_input_dim, num_channels, batch_first=True), name='weight_hh_l0'), name='weight_ih_l0')
 
+        if softquant:
+            self.conv2 = soft_quant(self.conv2)
+            if not self.repeat_upsamp: self.tconv = soft_quant(self.tconv)
+            self.gru = soft_quant(self.gru, names=['weight_hh_l0', 'weight_ih_l0'])
+
+
+        if sparsify:
+            mark_for_sparsification(self.conv2, (sparsification_density[0], [8, 4]))
+            if not self.repeat_upsamp: mark_for_sparsification(self.tconv, (sparsification_density[1], [8, 4]))
+            mark_for_sparsification(
+                self.gru,
+                {
+                    'W_ir' : (sparsification_density[2], [8, 4], False),
+                    'W_iz' : (sparsification_density[2], [8, 4], False),
+                    'W_in' : (sparsification_density[2], [8, 4], False),
+                    'W_hr' : (sparsification_density[3], [8, 4], True),
+                    'W_hz' : (sparsification_density[3], [8, 4], True),
+                    'W_hn' : (sparsification_density[3], [8, 4], True),
+                }
+            )
+
+
     def flop_count(self, rate=200):
         count = 0
-        for conv in self.conv1, self.conv2, self.tconv:
+        for conv in [self.conv1, self.conv2] if self.repeat_upsamp else [self.conv1, self.conv2, self.tconv]:
             count += _conv1d_flop_count(conv, rate)
 
         count += 2 * (3 * self.gru.input_size * self.gru.hidden_size + 3 * self.gru.hidden_size * self.gru.hidden_size) * rate
@@ -82,7 +120,7 @@
         c = torch.tanh(self.conv2(F.pad(c, [1, 0])))
 
         # upsampling
-        c = self.tconv(c)
+        c = torch.tanh(self.tconv(c))
         c = c.permute(0, 2, 1)
 
         c, _ = self.gru(c, state)
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/create_input_data.sh
@@ -1,0 +1,25 @@
+#!/bin/bash
+
+
+INPUT="dataset/LibriSpeech"
+OUTPUT="testdata"
+OPUSDEMO="/local/experiments/ietf_enhancement_studies/bin/opus_demo_patched"
+BITRATES=( 6000 7500 ) # 9000 12000 15000 18000 24000 32000 )
+
+
+mkdir -p $OUTPUT
+
+for fn in $(find $INPUT -name "*.wav")
+do
+    name=$(basename ${fn%*.wav})
+    sox $fn -r 16000 -b 16 -e signed-integer ${OUTPUT}/tmp.raw
+    for br in ${BITRATES[@]}
+    do
+        folder=${OUTPUT}/"${name}_${br}.se"
+        echo "creating ${folder}..."
+        mkdir -p $folder
+        cp ${OUTPUT}/tmp.raw ${folder}/clean.s16
+        (cd ${folder} && $OPUSDEMO voip 16000 1 $br clean.s16 noisy.s16)
+    done
+    rm -f ${OUTPUT}/tmp.raw
+done
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/env.rc
@@ -1,0 +1,7 @@
+#!/bin/bash
+
+export PYTHON=/home/ubuntu/opt/miniconda3/envs/torch/bin/python
+export LACE="/local/experiments/ietf_enhancement_studies/checkpoints/lace_checkpoint.pth"
+export NOLACE="/local/experiments/ietf_enhancement_studies/checkpoints/nolace_checkpoint.pth"
+export TESTMODEL="/local/experiments/ietf_enhancement_studies/opus/dnn/torch/osce/test_model.py"
+export OPUSDEMO="/local/experiments/ietf_enhancement_studies/bin/opus_demo_patched"
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/evaluate.py
@@ -1,0 +1,113 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import os
+import argparse
+
+
+from scipy.io import wavfile
+from pesq import pesq
+import numpy as np
+from moc import compare
+from moc2 import compare as compare2
+#from warpq import compute_WAPRQ as warpq
+from lace_loss_metric import compare as laceloss_compare
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument('folder', type=str, help='folder with processed items')
+parser.add_argument('metric', type=str, choices=['pesq', 'moc', 'moc2', 'laceloss'], help='metric to be used for evaluation')
+
+
+def get_bitrates(folder):
+    with open(os.path.join(folder, 'bitrates.txt')) as f:
+        x = f.read()
+
+    bitrates = [int(y) for y in x.rstrip('\n').split()]
+
+    return bitrates
+
+def get_itemlist(folder):
+    with open(os.path.join(folder, 'items.txt')) as f:
+        lines = f.readlines()
+
+    items = [x.split()[0] for x in lines]
+
+    return items
+
+
+def process_item(folder, item, bitrate, metric):
+    fs, x_clean  = wavfile.read(os.path.join(folder, 'clean', f"{item}_{bitrate}_clean.wav"))
+    fs, x_opus   = wavfile.read(os.path.join(folder, 'opus', f"{item}_{bitrate}_opus.wav"))
+    fs, x_lace   = wavfile.read(os.path.join(folder, 'lace', f"{item}_{bitrate}_lace.wav"))
+    fs, x_nolace = wavfile.read(os.path.join(folder, 'nolace', f"{item}_{bitrate}_nolace.wav"))
+
+    x_clean  = x_clean.astype(np.float32) / 2**15
+    x_opus   = x_opus.astype(np.float32) / 2**15
+    x_lace   = x_lace.astype(np.float32) / 2**15
+    x_nolace = x_nolace.astype(np.float32) / 2**15
+
+    if metric == 'pesq':
+        result = [pesq(fs, x_clean, x_opus), pesq(fs, x_clean, x_lace), pesq(fs, x_clean, x_nolace)]
+    elif metric =='moc':
+        result = [compare(x_clean, x_opus), compare(x_clean, x_lace), compare(x_clean, x_nolace)]
+    elif metric =='moc2':
+        result = [compare2(x_clean, x_opus), compare2(x_clean, x_lace), compare2(x_clean, x_nolace)]
+    # elif metric == 'warpq':
+        # result = [warpq(x_clean, x_opus), warpq(x_clean, x_lace), warpq(x_clean, x_nolace)]
+    elif metric == 'laceloss':
+        result = [laceloss_compare(x_clean, x_opus), laceloss_compare(x_clean, x_lace), laceloss_compare(x_clean, x_nolace)]
+    else:
+        raise ValueError(f'unknown metric {metric}')
+
+    return result
+
+def process_bitrate(folder, items, bitrate, metric):
+    results = np.zeros((len(items), 3))
+
+    for i, item in enumerate(items):
+        results[i, :] = np.array(process_item(folder, item, bitrate, metric))
+
+    return results
+
+
+if __name__ == "__main__":
+    args = parser.parse_args()
+
+    items = get_itemlist(args.folder)
+    bitrates = get_bitrates(args.folder)
+
+    results = dict()
+    for br in bitrates:
+        print(f"processing bitrate {br}...")
+        results[br] = process_bitrate(args.folder, items, br, args.metric)
+
+    np.save(os.path.join(args.folder, f'results_{args.metric}.npy'), results)
+
+    print("Done.")
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/lace_loss_metric.py
@@ -1,0 +1,330 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+"""STFT-based Loss modules."""
+
+import torch
+import torch.nn.functional as F
+from torch import nn
+import numpy as np
+import torchaudio
+
+
+def get_window(win_name, win_length, *args, **kwargs):
+    window_dict = {
+        'bartlett_window'   : torch.bartlett_window,
+        'blackman_window'   : torch.blackman_window,
+        'hamming_window'    : torch.hamming_window,
+        'hann_window'       : torch.hann_window,
+        'kaiser_window'     : torch.kaiser_window
+    }
+
+    if not win_name in window_dict:
+        raise ValueError()
+
+    return window_dict[win_name](win_length, *args, **kwargs)
+
+
+def stft(x, fft_size, hop_size, win_length, window):
+    """Perform STFT and convert to magnitude spectrogram.
+    Args:
+        x (Tensor): Input signal tensor (B, T).
+        fft_size (int): FFT size.
+        hop_size (int): Hop size.
+        win_length (int): Window length.
+        window (str): Window function type.
+    Returns:
+        Tensor: Magnitude spectrogram (B, #frames, fft_size // 2 + 1).
+    """
+
+    win = get_window(window, win_length).to(x.device)
+    x_stft = torch.stft(x, fft_size, hop_size, win_length, win, return_complex=True)
+
+
+    return torch.clamp(torch.abs(x_stft), min=1e-7)
+
+def spectral_convergence_loss(Y_true, Y_pred):
+    dims=list(range(1, len(Y_pred.shape)))
+    return torch.mean(torch.norm(torch.abs(Y_true) - torch.abs(Y_pred), p="fro", dim=dims) / (torch.norm(Y_pred, p="fro", dim=dims) + 1e-6))
+
+
+def log_magnitude_loss(Y_true, Y_pred):
+    Y_true_log_abs = torch.log(torch.abs(Y_true) + 1e-15)
+    Y_pred_log_abs = torch.log(torch.abs(Y_pred) + 1e-15)
+
+    return torch.mean(torch.abs(Y_true_log_abs - Y_pred_log_abs))
+
+def spectral_xcorr_loss(Y_true, Y_pred):
+    Y_true = Y_true.abs()
+    Y_pred = Y_pred.abs()
+    dims=list(range(1, len(Y_pred.shape)))
+    xcorr = torch.sum(Y_true * Y_pred, dim=dims) / torch.sqrt(torch.sum(Y_true ** 2, dim=dims) * torch.sum(Y_pred ** 2, dim=dims) + 1e-9)
+
+    return 1 - xcorr.mean()
+
+
+
+class MRLogMelLoss(nn.Module):
+    def __init__(self,
+                 fft_sizes=[512, 256, 128, 64],
+                 overlap=0.5,
+                 fs=16000,
+                 n_mels=18
+                 ):
+
+        self.fft_sizes  = fft_sizes
+        self.overlap    = overlap
+        self.fs         = fs
+        self.n_mels     = n_mels
+
+        super().__init__()
+
+        self.mel_specs = []
+        for fft_size in fft_sizes:
+            hop_size = int(round(fft_size * (1 - self.overlap)))
+
+            n_mels = self.n_mels
+            if fft_size < 128:
+                n_mels //= 2
+
+            self.mel_specs.append(torchaudio.transforms.MelSpectrogram(fs, fft_size, hop_length=hop_size, n_mels=n_mels))
+
+        for i, mel_spec in enumerate(self.mel_specs):
+            self.add_module(f'mel_spec_{i+1}', mel_spec)
+
+    def forward(self, y_true, y_pred):
+
+        loss = torch.zeros(1, device=y_true.device)
+
+        for mel_spec in self.mel_specs:
+            Y_true = mel_spec(y_true)
+            Y_pred = mel_spec(y_pred)
+            loss = loss + log_magnitude_loss(Y_true, Y_pred)
+
+        loss = loss / len(self.mel_specs)
+
+        return loss
+
+def create_weight_matrix(num_bins, bins_per_band=10):
+    m = torch.zeros((num_bins, num_bins), dtype=torch.float32)
+
+    r0 = bins_per_band // 2
+    r1 = bins_per_band - r0
+
+    for i in range(num_bins):
+        i0 = max(i - r0, 0)
+        j0 = min(i + r1, num_bins)
+
+        m[i, i0: j0] += 1
+
+        if i < r0:
+            m[i, :r0 - i] += 1
+
+        if i > num_bins - r1:
+            m[i, num_bins - r1 - i:] += 1
+
+    return m / bins_per_band
+
+def weighted_spectral_convergence(Y_true, Y_pred, w):
+
+    # calculate sfm based weights
+    logY = torch.log(torch.abs(Y_true) + 1e-9)
+    Y = torch.abs(Y_true)
+
+    avg_logY = torch.matmul(logY.transpose(1, 2), w)
+    avg_Y = torch.matmul(Y.transpose(1, 2), w)
+
+    sfm = torch.exp(avg_logY) / (avg_Y + 1e-9)
+
+    weight = (torch.relu(1 - sfm) ** .5).transpose(1, 2)
+
+    loss = torch.mean(
+        torch.mean(weight * torch.abs(torch.abs(Y_true) - torch.abs(Y_pred)), dim=[1, 2])
+        / (torch.mean( weight * torch.abs(Y_true), dim=[1, 2]) + 1e-9)
+    )
+
+    return loss
+
+def gen_filterbank(N, Fs=16000):
+    in_freq = (np.arange(N+1, dtype='float32')/N*Fs/2)[None,:]
+    out_freq = (np.arange(N, dtype='float32')/N*Fs/2)[:,None]
+    #ERB from B.C.J Moore, An Introduction to the Psychology of Hearing, 5th Ed., page 73.
+    ERB_N = 24.7 + .108*in_freq
+    delta = np.abs(in_freq-out_freq)/ERB_N
+    center = (delta<.5).astype('float32')
+    R = -12*center*delta**2 + (1-center)*(3-12*delta)
+    RE = 10.**(R/10.)
+    norm = np.sum(RE, axis=1)
+    RE = RE/norm[:, np.newaxis]
+    return torch.from_numpy(RE)
+
+def smooth_log_mag(Y_true, Y_pred, filterbank):
+    Y_true_smooth = torch.matmul(filterbank, torch.abs(Y_true))
+    Y_pred_smooth = torch.matmul(filterbank, torch.abs(Y_pred))
+
+    loss = torch.abs(
+        torch.log(Y_true_smooth + 1e-9) - torch.log(Y_pred_smooth + 1e-9)
+    )
+
+    loss = loss.mean()
+
+    return loss
+
+class MRSTFTLoss(nn.Module):
+    def __init__(self,
+                 fft_sizes=[2048, 1024, 512, 256, 128, 64],
+                 overlap=0.5,
+                 window='hann_window',
+                 fs=16000,
+                 log_mag_weight=0,
+                 sc_weight=0,
+                 wsc_weight=0,
+                 smooth_log_mag_weight=2,
+                 sxcorr_weight=1):
+        super().__init__()
+
+        self.fft_sizes = fft_sizes
+        self.overlap = overlap
+        self.window = window
+        self.log_mag_weight = log_mag_weight
+        self.sc_weight = sc_weight
+        self.wsc_weight = wsc_weight
+        self.smooth_log_mag_weight = smooth_log_mag_weight
+        self.sxcorr_weight = sxcorr_weight
+        self.fs = fs
+
+        # weights for SFM weighted spectral convergence loss
+        self.wsc_weights = torch.nn.ParameterDict()
+        for fft_size in fft_sizes:
+            width = min(11, int(1000 * fft_size / self.fs + .5))
+            width += width % 2
+            self.wsc_weights[str(fft_size)] = torch.nn.Parameter(
+                create_weight_matrix(fft_size // 2 + 1, width),
+                requires_grad=False
+            )
+
+        # filterbanks for smooth log magnitude loss
+        self.filterbanks = torch.nn.ParameterDict()
+        for fft_size in fft_sizes:
+            self.filterbanks[str(fft_size)] = torch.nn.Parameter(
+                gen_filterbank(fft_size//2),
+                requires_grad=False
+            )
+
+
+    def __call__(self, y_true, y_pred):
+
+
+        lm_loss = torch.zeros(1, device=y_true.device)
+        sc_loss = torch.zeros(1, device=y_true.device)
+        wsc_loss = torch.zeros(1, device=y_true.device)
+        slm_loss = torch.zeros(1, device=y_true.device)
+        sxcorr_loss = torch.zeros(1, device=y_true.device)
+
+        for fft_size in self.fft_sizes:
+            hop_size = int(round(fft_size * (1 - self.overlap)))
+            win_size = fft_size
+
+            Y_true = stft(y_true, fft_size, hop_size, win_size, self.window)
+            Y_pred = stft(y_pred, fft_size, hop_size, win_size, self.window)
+
+            if self.log_mag_weight > 0:
+                lm_loss = lm_loss + log_magnitude_loss(Y_true, Y_pred)
+
+            if self.sc_weight > 0:
+                sc_loss = sc_loss + spectral_convergence_loss(Y_true, Y_pred)
+
+            if self.wsc_weight > 0:
+                wsc_loss = wsc_loss + weighted_spectral_convergence(Y_true, Y_pred, self.wsc_weights[str(fft_size)])
+
+            if self.smooth_log_mag_weight > 0:
+                slm_loss = slm_loss + smooth_log_mag(Y_true, Y_pred, self.filterbanks[str(fft_size)])
+
+            if self.sxcorr_weight > 0:
+                sxcorr_loss = sxcorr_loss + spectral_xcorr_loss(Y_true, Y_pred)
+
+
+        total_loss = (self.log_mag_weight * lm_loss + self.sc_weight * sc_loss
+                + self.wsc_weight * wsc_loss + self.smooth_log_mag_weight * slm_loss
+                + self.sxcorr_weight * sxcorr_loss) / len(self.fft_sizes)
+
+        return total_loss
+
+
+def td_l2_norm(y_true, y_pred):
+    dims = list(range(1, len(y_true.shape)))
+
+    loss = torch.mean((y_true - y_pred) ** 2, dim=dims) / (torch.mean(y_pred ** 2, dim=dims) ** .5 + 1e-6)
+
+    return loss.mean()
+
+
+class LaceLoss(nn.Module):
+    def __init__(self):
+        super().__init__()
+
+
+        self.stftloss = MRSTFTLoss(log_mag_weight=0, sc_weight=0, wsc_weight=0, smooth_log_mag_weight=2, sxcorr_weight=1)
+
+
+    def forward(self, x, y):
+        specloss = self.stftloss(x, y)
+        phaseloss = td_l2_norm(x, y)
+        total_loss = (specloss + 10 * phaseloss) / 13
+
+        return total_loss
+
+    def compare(self, x_ref, x_deg):
+        # trim items to same size
+        n = min(len(x_ref), len(x_deg))
+        x_ref = x_ref[:n].copy()
+        x_deg = x_deg[:n].copy()
+
+        # pre-emphasis
+        x_ref[1:] -= 0.85 * x_ref[:-1]
+        x_deg[1:] -= 0.85 * x_deg[:-1]
+
+        device = next(iter(self.parameters())).device
+
+        x = torch.from_numpy(x_ref).to(device)
+        y = torch.from_numpy(x_deg).to(device)
+
+        with torch.no_grad():
+            dist = 10 * self.forward(x, y)
+
+        return dist.cpu().numpy().item()
+
+
+lace_loss = LaceLoss()
+device = 'cuda' if torch.cuda.is_available() else 'cpu'
+lace_loss.to(device)
+
+def compare(x, y):
+
+    return lace_loss.compare(x, y)
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/make_boxplots.py
@@ -1,0 +1,116 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import os
+import argparse
+
+import numpy as np
+import matplotlib.pyplot as plt
+from prettytable import PrettyTable
+from matplotlib.patches import Patch
+
+parser = argparse.ArgumentParser()
+parser.add_argument('folder', type=str, help='path to folder with pre-calculated metrics')
+parser.add_argument('--metric', choices=['pesq', 'moc', 'warpq', 'nomad', 'laceloss', 'all'], default='all', help='default: all')
+parser.add_argument('--output', type=str, default=None, help='alternative output folder, default: folder')
+
+def load_data(folder):
+    data = dict()
+
+    if os.path.isfile(os.path.join(folder, 'results_moc.npy')):
+        data['moc'] = np.load(os.path.join(folder, 'results_moc.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_moc2.npy')):
+        data['moc2'] = np.load(os.path.join(folder, 'results_moc2.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_pesq.npy')):
+        data['pesq'] = np.load(os.path.join(folder, 'results_pesq.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_warpq.npy')):
+        data['warpq'] = np.load(os.path.join(folder, 'results_warpq.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_nomad.npy')):
+        data['nomad'] = np.load(os.path.join(folder, 'results_nomad.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_laceloss.npy')):
+        data['laceloss'] = np.load(os.path.join(folder, 'results_laceloss.npy'), allow_pickle=True).item()
+
+    return data
+
+def plot_data(filename, data, title=None):
+    compare_dict = dict()
+    for br in data.keys():
+        compare_dict[f'Opus {br/1000:.1f} kb/s'] = data[br][:, 0]
+        compare_dict[f'LACE {br/1000:.1f} kb/s'] = data[br][:, 1]
+        compare_dict[f'NoLACE {br/1000:.1f} kb/s'] = data[br][:, 2]
+
+    plt.rcParams.update({
+        "text.usetex": True,
+        "font.family": "Helvetica",
+        "font.size": 32
+    })
+
+    black = '#000000'
+    red = '#ff5745'
+    blue = '#007dbc'
+    colors = [black, red, blue]
+    legend_elements = [Patch(facecolor=colors[0], label='Opus SILK'),
+                    Patch(facecolor=colors[1], label='LACE'),
+                    Patch(facecolor=colors[2], label='NoLACE')]
+
+    fig, ax = plt.subplots()
+    fig.set_size_inches(40, 20)
+    bplot = ax.boxplot(compare_dict.values(), showfliers=False, notch=True, patch_artist=True)
+
+    for i, patch in enumerate(bplot['boxes']):
+        patch.set_facecolor(colors[i%3])
+
+    ax.set_xticklabels(compare_dict.keys(), rotation=290)
+
+    if title is not None:
+        ax.set_title(title)
+
+    ax.legend(handles=legend_elements)
+
+    fig.savefig(filename, bbox_inches='tight')
+
+if __name__ == "__main__":
+    args = parser.parse_args()
+    data = load_data(args.folder)
+
+
+    metrics = list(data.keys()) if args.metric == 'all' else [args.metric]
+    folder = args.folder if args.output is None else args.output
+    os.makedirs(folder, exist_ok=True)
+
+    for metric in metrics:
+        print(f"Plotting data for {metric} metric...")
+        plot_data(os.path.join(folder, f"boxplot_{metric}.png"), data[metric], title=metric.upper())
+
+    print("Done.")
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/make_boxplots_moctest.py
@@ -1,0 +1,109 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import os
+import argparse
+
+import numpy as np
+import matplotlib.pyplot as plt
+from prettytable import PrettyTable
+from matplotlib.patches import Patch
+
+parser = argparse.ArgumentParser()
+parser.add_argument('folder', type=str, help='path to folder with pre-calculated metrics')
+parser.add_argument('--metric', choices=['pesq', 'moc', 'warpq', 'nomad', 'laceloss', 'all'], default='all', help='default: all')
+parser.add_argument('--output', type=str, default=None, help='alternative output folder, default: folder')
+
+def load_data(folder):
+    data = dict()
+
+    if os.path.isfile(os.path.join(folder, 'results_moc.npy')):
+        data['moc'] = np.load(os.path.join(folder, 'results_moc.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_pesq.npy')):
+        data['pesq'] = np.load(os.path.join(folder, 'results_pesq.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_warpq.npy')):
+        data['warpq'] = np.load(os.path.join(folder, 'results_warpq.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_nomad.npy')):
+        data['nomad'] = np.load(os.path.join(folder, 'results_nomad.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_laceloss.npy')):
+        data['laceloss'] = np.load(os.path.join(folder, 'results_laceloss.npy'), allow_pickle=True).item()
+
+    return data
+
+def plot_data(filename, data, title=None):
+    compare_dict = dict()
+    for br in data.keys():
+        compare_dict[f'Opus {br/1000:.1f} kb/s'] = data[br][:, 0]
+        compare_dict[f'LACE (MOC only) {br/1000:.1f} kb/s'] = data[br][:, 1]
+        compare_dict[f'LACE (MOC + TD) {br/1000:.1f} kb/s'] = data[br][:, 2]
+
+    plt.rcParams.update({
+        "text.usetex": True,
+        "font.family": "Helvetica",
+        "font.size": 32
+    })
+    colors = ['pink', 'lightblue', 'lightgreen']
+    legend_elements = [Patch(facecolor=colors[0], label='Opus SILK'),
+                    Patch(facecolor=colors[1], label='MOC loss only'),
+                    Patch(facecolor=colors[2], label='MOC + TD loss')]
+
+    fig, ax = plt.subplots()
+    fig.set_size_inches(40, 20)
+    bplot = ax.boxplot(compare_dict.values(), showfliers=False, notch=True, patch_artist=True)
+
+    for i, patch in enumerate(bplot['boxes']):
+        patch.set_facecolor(colors[i%3])
+
+    ax.set_xticklabels(compare_dict.keys(), rotation=290)
+
+    if title is not None:
+        ax.set_title(title)
+
+    ax.legend(handles=legend_elements)
+
+    fig.savefig(filename, bbox_inches='tight')
+
+if __name__ == "__main__":
+    args = parser.parse_args()
+    data = load_data(args.folder)
+
+
+    metrics = list(data.keys()) if args.metric == 'all' else [args.metric]
+    folder = args.folder if args.output is None else args.output
+    os.makedirs(folder, exist_ok=True)
+
+    for metric in metrics:
+        print(f"Plotting data for {metric} metric...")
+        plot_data(os.path.join(folder, f"boxplot_{metric}.png"), data[metric], title=metric.upper())
+
+    print("Done.")
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/make_tables.py
@@ -1,0 +1,124 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import os
+import argparse
+
+import numpy as np
+import matplotlib.pyplot as plt
+from prettytable import PrettyTable
+from matplotlib.patches import Patch
+
+parser = argparse.ArgumentParser()
+parser.add_argument('folder', type=str, help='path to folder with pre-calculated metrics')
+parser.add_argument('--metric', choices=['pesq', 'moc', 'warpq', 'nomad', 'laceloss', 'all'], default='all', help='default: all')
+parser.add_argument('--output', type=str, default=None, help='alternative output folder, default: folder')
+
+def load_data(folder):
+    data = dict()
+
+    if os.path.isfile(os.path.join(folder, 'results_moc.npy')):
+        data['moc'] = np.load(os.path.join(folder, 'results_moc.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_moc2.npy')):
+        data['moc2'] = np.load(os.path.join(folder, 'results_moc2.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_pesq.npy')):
+        data['pesq'] = np.load(os.path.join(folder, 'results_pesq.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_warpq.npy')):
+        data['warpq'] = np.load(os.path.join(folder, 'results_warpq.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_nomad.npy')):
+        data['nomad'] = np.load(os.path.join(folder, 'results_nomad.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_laceloss.npy')):
+        data['laceloss'] = np.load(os.path.join(folder, 'results_laceloss.npy'), allow_pickle=True).item()
+
+    return data
+
+def make_table(filename, data, title=None):
+
+    # mean values
+    tbl = PrettyTable()
+    tbl.field_names = ['bitrate (bps)', 'Opus', 'LACE', 'NoLACE']
+    for br in data.keys():
+        opus = data[br][:, 0]
+        lace = data[br][:, 1]
+        nolace = data[br][:, 2]
+        tbl.add_row([br, f"{float(opus.mean()):.3f} ({float(opus.std()):.2f})", f"{float(lace.mean()):.3f} ({float(lace.std()):.2f})", f"{float(nolace.mean()):.3f} ({float(nolace.std()):.2f})"])
+
+    with open(filename + ".txt", "w") as f:
+        f.write(str(tbl))
+
+    with open(filename + ".html", "w") as f:
+        f.write(tbl.get_html_string())
+
+    with open(filename + ".csv", "w") as f:
+        f.write(tbl.get_csv_string())
+
+    print(tbl)
+
+
+def make_diff_table(filename, data, title=None):
+
+    # mean values
+    tbl = PrettyTable()
+    tbl.field_names = ['bitrate (bps)', 'LACE - Opus', 'NoLACE - Opus']
+    for br in data.keys():
+        opus = data[br][:, 0]
+        lace = data[br][:, 1] - opus
+        nolace = data[br][:, 2] - opus
+        tbl.add_row([br, f"{float(lace.mean()):.3f} ({float(lace.std()):.2f})", f"{float(nolace.mean()):.3f} ({float(nolace.std()):.2f})"])
+
+    with open(filename + ".txt", "w") as f:
+        f.write(str(tbl))
+
+    with open(filename + ".html", "w") as f:
+        f.write(tbl.get_html_string())
+
+    with open(filename + ".csv", "w") as f:
+        f.write(tbl.get_csv_string())
+
+    print(tbl)
+
+if __name__ == "__main__":
+    args = parser.parse_args()
+    data = load_data(args.folder)
+
+    metrics = list(data.keys()) if args.metric == 'all' else [args.metric]
+    folder = args.folder if args.output is None else args.output
+    os.makedirs(folder, exist_ok=True)
+
+    for metric in metrics:
+        print(f"Plotting data for {metric} metric...")
+        make_table(os.path.join(folder, f"table_{metric}"), data[metric])
+        make_diff_table(os.path.join(folder, f"table_diff_{metric}"), data[metric])
+
+    print("Done.")
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/make_tables_moctest.py
@@ -1,0 +1,121 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import os
+import argparse
+
+import numpy as np
+import matplotlib.pyplot as plt
+from prettytable import PrettyTable
+from matplotlib.patches import Patch
+
+parser = argparse.ArgumentParser()
+parser.add_argument('folder', type=str, help='path to folder with pre-calculated metrics')
+parser.add_argument('--metric', choices=['pesq', 'moc', 'warpq', 'nomad', 'laceloss', 'all'], default='all', help='default: all')
+parser.add_argument('--output', type=str, default=None, help='alternative output folder, default: folder')
+
+def load_data(folder):
+    data = dict()
+
+    if os.path.isfile(os.path.join(folder, 'results_moc.npy')):
+        data['moc'] = np.load(os.path.join(folder, 'results_moc.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_pesq.npy')):
+        data['pesq'] = np.load(os.path.join(folder, 'results_pesq.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_warpq.npy')):
+        data['warpq'] = np.load(os.path.join(folder, 'results_warpq.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_nomad.npy')):
+        data['nomad'] = np.load(os.path.join(folder, 'results_nomad.npy'), allow_pickle=True).item()
+
+    if os.path.isfile(os.path.join(folder, 'results_laceloss.npy')):
+        data['laceloss'] = np.load(os.path.join(folder, 'results_laceloss.npy'), allow_pickle=True).item()
+
+    return data
+
+def make_table(filename, data, title=None):
+
+    # mean values
+    tbl = PrettyTable()
+    tbl.field_names = ['bitrate (bps)', 'Opus', 'LACE', 'NoLACE']
+    for br in data.keys():
+        opus = data[br][:, 0]
+        lace = data[br][:, 1]
+        nolace = data[br][:, 2]
+        tbl.add_row([br, f"{float(opus.mean()):.3f} ({float(opus.std()):.2f})", f"{float(lace.mean()):.3f} ({float(lace.std()):.2f})", f"{float(nolace.mean()):.3f} ({float(nolace.std()):.2f})"])
+
+    with open(filename + ".txt", "w") as f:
+        f.write(str(tbl))
+
+    with open(filename + ".html", "w") as f:
+        f.write(tbl.get_html_string())
+
+    with open(filename + ".csv", "w") as f:
+        f.write(tbl.get_csv_string())
+
+    print(tbl)
+
+
+def make_diff_table(filename, data, title=None):
+
+    # mean values
+    tbl = PrettyTable()
+    tbl.field_names = ['bitrate (bps)', 'LACE - Opus', 'NoLACE - Opus']
+    for br in data.keys():
+        opus = data[br][:, 0]
+        lace = data[br][:, 1] - opus
+        nolace = data[br][:, 2] - opus
+        tbl.add_row([br, f"{float(lace.mean()):.3f} ({float(lace.std()):.2f})", f"{float(nolace.mean()):.3f} ({float(nolace.std()):.2f})"])
+
+    with open(filename + ".txt", "w") as f:
+        f.write(str(tbl))
+
+    with open(filename + ".html", "w") as f:
+        f.write(tbl.get_html_string())
+
+    with open(filename + ".csv", "w") as f:
+        f.write(tbl.get_csv_string())
+
+    print(tbl)
+
+if __name__ == "__main__":
+    args = parser.parse_args()
+    data = load_data(args.folder)
+
+    metrics = list(data.keys()) if args.metric == 'all' else [args.metric]
+    folder = args.folder if args.output is None else args.output
+    os.makedirs(folder, exist_ok=True)
+
+    for metric in metrics:
+        print(f"Plotting data for {metric} metric...")
+        make_table(os.path.join(folder, f"table_{metric}"), data[metric])
+        make_diff_table(os.path.join(folder, f"table_diff_{metric}"), data[metric])
+
+    print("Done.")
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/moc.py
@@ -1,0 +1,182 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import numpy as np
+import scipy.signal
+
+def compute_vad_mask(x, fs, stop_db=-70):
+
+    frame_length = (fs + 49) // 50
+    x = x[: frame_length * (len(x) // frame_length)]
+
+    frames = x.reshape(-1, frame_length)
+    frame_energy = np.sum(frames ** 2, axis=1)
+    frame_energy_smooth = np.convolve(frame_energy, np.ones(5) / 5, mode='same')
+
+    max_threshold = frame_energy.max() * 10 ** (stop_db/20)
+    vactive = np.ones_like(frames)
+    vactive[frame_energy_smooth < max_threshold, :] = 0
+    vactive = vactive.reshape(-1)
+
+    filter = np.sin(np.arange(frame_length) * np.pi / (frame_length - 1))
+    filter = filter / filter.sum()
+
+    mask = np.convolve(vactive, filter, mode='same')
+
+    return x, mask
+
+def convert_mask(mask, num_frames, frame_size=160, hop_size=40):
+    num_samples = frame_size + (num_frames - 1) * hop_size
+    if len(mask) < num_samples:
+        mask = np.concatenate((mask, np.zeros(num_samples - len(mask))), dtype=mask.dtype)
+    else:
+        mask = mask[:num_samples]
+
+    new_mask = np.array([np.mean(mask[i*hop_size : i*hop_size + frame_size]) for i in range(num_frames)])
+
+    return new_mask
+
+def power_spectrum(x, window_size=160, hop_size=40, window='hamming'):
+    num_spectra = (len(x) - window_size - hop_size) // hop_size
+    window = scipy.signal.get_window(window, window_size)
+    N = window_size // 2
+
+    frames = np.concatenate([x[np.newaxis, i * hop_size : i * hop_size + window_size] for i in range(num_spectra)]) * window
+    psd = np.abs(np.fft.fft(frames, axis=1)[:, :N + 1]) ** 2
+
+    return psd
+
+
+def frequency_mask(num_bands, up_factor, down_factor):
+
+    up_mask = np.zeros((num_bands, num_bands))
+    down_mask = np.zeros((num_bands, num_bands))
+
+    for i in range(num_bands):
+        up_mask[i, : i + 1] = up_factor ** np.arange(i, -1, -1)
+        down_mask[i, i :] = down_factor ** np.arange(num_bands - i)
+
+    return down_mask @ up_mask
+
+
+def rect_fb(band_limits, num_bins=None):
+    num_bands = len(band_limits) - 1
+    if num_bins is None:
+        num_bins = band_limits[-1]
+
+    fb = np.zeros((num_bands, num_bins))
+    for i in range(num_bands):
+        fb[i, band_limits[i]:band_limits[i+1]] = 1
+
+    return fb
+
+
+def compare(x, y, apply_vad=False):
+    """ Modified version of opus_compare for 16 kHz mono signals
+
+    Args:
+        x (np.ndarray): reference input signal scaled to [-1, 1]
+        y (np.ndarray): test signal scaled to [-1, 1]
+
+    Returns:
+        float: perceptually weighted error
+    """
+    # filter bank: bark scale with minimum-2-bin bands and cutoff at 7.5 kHz
+    band_limits = [0, 2, 4, 6, 7, 9, 11, 13, 15, 18, 22, 26, 31, 36, 43, 51, 60, 75]
+    num_bands = len(band_limits) - 1
+    fb = rect_fb(band_limits, num_bins=81)
+
+    # trim samples to same size
+    num_samples = min(len(x), len(y))
+    x = x[:num_samples] * 2**15
+    y = y[:num_samples] * 2**15
+
+    psd_x = power_spectrum(x) + 100000
+    psd_y = power_spectrum(y) + 100000
+
+    num_frames = psd_x.shape[0]
+
+    # average band energies
+    be_x = (psd_x @ fb.T) / np.sum(fb, axis=1)
+
+    # frequecy masking
+    f_mask = frequency_mask(num_bands, 0.1, 0.03)
+    mask_x = be_x @ f_mask.T
+
+    # temporal masking
+    for i in range(1, num_frames):
+        mask_x[i, :] += 0.5 * mask_x[i-1, :]
+
+    # apply mask
+    masked_psd_x = psd_x + 0.1 * (mask_x @ fb)
+    masked_psd_y = psd_y + 0.1 * (mask_x @ fb)
+
+    # 2-frame average
+    masked_psd_x = masked_psd_x[1:] +  masked_psd_x[:-1]
+    masked_psd_y = masked_psd_y[1:] +  masked_psd_y[:-1]
+
+    # distortion metric
+    re = masked_psd_y / masked_psd_x
+    im = np.log(re) ** 2
+    Eb = ((im @ fb.T) / np.sum(fb, axis=1))
+    Ef = np.mean(Eb , axis=1)
+
+    if apply_vad:
+        _, mask = compute_vad_mask(x, 16000)
+        mask = convert_mask(mask, Ef.shape[0])
+    else:
+        mask = np.ones_like(Ef)
+
+    err = np.mean(np.abs(Ef[mask > 1e-6]) ** 3) ** (1/6)
+
+    return float(err)
+
+if __name__ == "__main__":
+    import argparse
+    from scipy.io import wavfile
+
+    parser = argparse.ArgumentParser()
+    parser.add_argument('ref', type=str, help='reference wav file')
+    parser.add_argument('deg', type=str, help='degraded wav file')
+    parser.add_argument('--apply-vad', action='store_true')
+    args = parser.parse_args()
+
+
+    fs1, x = wavfile.read(args.ref)
+    fs2, y = wavfile.read(args.deg)
+
+    if max(fs1, fs2) != 16000:
+        raise ValueError('error: encountered sampling frequency diffrent from 16kHz')
+
+    x = x.astype(np.float32) / 2**15
+    y = y.astype(np.float32) / 2**15
+
+    err = compare(x, y, apply_vad=args.apply_vad)
+
+    print(f"MOC: {err}")
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/moc2.py
@@ -1,0 +1,190 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import numpy as np
+import scipy.signal
+
+def compute_vad_mask(x, fs, stop_db=-70):
+
+    frame_length = (fs + 49) // 50
+    x = x[: frame_length * (len(x) // frame_length)]
+
+    frames = x.reshape(-1, frame_length)
+    frame_energy = np.sum(frames ** 2, axis=1)
+    frame_energy_smooth = np.convolve(frame_energy, np.ones(5) / 5, mode='same')
+
+    max_threshold = frame_energy.max() * 10 ** (stop_db/20)
+    vactive = np.ones_like(frames)
+    vactive[frame_energy_smooth < max_threshold, :] = 0
+    vactive = vactive.reshape(-1)
+
+    filter = np.sin(np.arange(frame_length) * np.pi / (frame_length - 1))
+    filter = filter / filter.sum()
+
+    mask = np.convolve(vactive, filter, mode='same')
+
+    return x, mask
+
+def convert_mask(mask, num_frames, frame_size=160, hop_size=40):
+    num_samples = frame_size + (num_frames - 1) * hop_size
+    if len(mask) < num_samples:
+        mask = np.concatenate((mask, np.zeros(num_samples - len(mask))), dtype=mask.dtype)
+    else:
+        mask = mask[:num_samples]
+
+    new_mask = np.array([np.mean(mask[i*hop_size : i*hop_size + frame_size]) for i in range(num_frames)])
+
+    return new_mask
+
+def power_spectrum(x, window_size=160, hop_size=40, window='hamming'):
+    num_spectra = (len(x) - window_size - hop_size) // hop_size
+    window = scipy.signal.get_window(window, window_size)
+    N = window_size // 2
+
+    frames = np.concatenate([x[np.newaxis, i * hop_size : i * hop_size + window_size] for i in range(num_spectra)]) * window
+    psd = np.abs(np.fft.fft(frames, axis=1)[:, :N + 1]) ** 2
+
+    return psd
+
+
+def frequency_mask(num_bands, up_factor, down_factor):
+
+    up_mask = np.zeros((num_bands, num_bands))
+    down_mask = np.zeros((num_bands, num_bands))
+
+    for i in range(num_bands):
+        up_mask[i, : i + 1] = up_factor ** np.arange(i, -1, -1)
+        down_mask[i, i :] = down_factor ** np.arange(num_bands - i)
+
+    return down_mask @ up_mask
+
+
+def rect_fb(band_limits, num_bins=None):
+    num_bands = len(band_limits) - 1
+    if num_bins is None:
+        num_bins = band_limits[-1]
+
+    fb = np.zeros((num_bands, num_bins))
+    for i in range(num_bands):
+        fb[i, band_limits[i]:band_limits[i+1]] = 1
+
+    return fb
+
+
+def _compare(x, y, apply_vad=False, factor=1):
+    """ Modified version of opus_compare for 16 kHz mono signals
+
+    Args:
+        x (np.ndarray): reference input signal scaled to [-1, 1]
+        y (np.ndarray): test signal scaled to [-1, 1]
+
+    Returns:
+        float: perceptually weighted error
+    """
+    # filter bank: bark scale with minimum-2-bin bands and cutoff at 7.5 kHz
+    band_limits = [factor * b for b in [0, 2, 4, 6, 7, 9, 11, 13, 15, 18, 22, 26, 31, 36, 43, 51, 60, 75]]
+    window_size = factor * 160
+    hop_size = factor * 40
+    num_bins = window_size // 2 + 1
+    num_bands = len(band_limits) - 1
+    fb = rect_fb(band_limits, num_bins=num_bins)
+
+    # trim samples to same size
+    num_samples = min(len(x), len(y))
+    x = x[:num_samples].copy() * 2**15
+    y = y[:num_samples].copy() * 2**15
+
+    psd_x = power_spectrum(x, window_size=window_size, hop_size=hop_size) + 100000
+    psd_y = power_spectrum(y, window_size=window_size, hop_size=hop_size) + 100000
+
+    num_frames = psd_x.shape[0]
+
+    # average band energies
+    be_x = (psd_x @ fb.T) / np.sum(fb, axis=1)
+
+    # frequecy masking
+    f_mask = frequency_mask(num_bands, 0.1, 0.03)
+    mask_x = be_x @ f_mask.T
+
+    # temporal masking
+    for i in range(1, num_frames):
+        mask_x[i, :] += (0.5 ** factor) * mask_x[i-1, :]
+
+    # apply mask
+    masked_psd_x = psd_x + 0.1 * (mask_x @ fb)
+    masked_psd_y = psd_y + 0.1 * (mask_x @ fb)
+
+    # 2-frame average
+    masked_psd_x = masked_psd_x[1:] +  masked_psd_x[:-1]
+    masked_psd_y = masked_psd_y[1:] +  masked_psd_y[:-1]
+
+    # distortion metric
+    re = masked_psd_y / masked_psd_x
+    #im = re - np.log(re) - 1
+    im = np.log(re) ** 2
+    Eb = ((im @ fb.T) / np.sum(fb, axis=1))
+    Ef = np.mean(Eb ** 1, axis=1)
+
+    if apply_vad:
+        _, mask = compute_vad_mask(x, 16000)
+        mask = convert_mask(mask, Ef.shape[0])
+    else:
+        mask = np.ones_like(Ef)
+
+    err = np.mean(np.abs(Ef[mask > 1e-6]) ** 3) ** (1/6)
+
+    return float(err)
+
+def compare(x, y, apply_vad=False):
+    err = np.linalg.norm([_compare(x, y, apply_vad=apply_vad, factor=1)], ord=2)
+    return err
+
+if __name__ == "__main__":
+    import argparse
+    from scipy.io import wavfile
+
+    parser = argparse.ArgumentParser()
+    parser.add_argument('ref', type=str, help='reference wav file')
+    parser.add_argument('deg', type=str, help='degraded wav file')
+    parser.add_argument('--apply-vad', action='store_true')
+    args = parser.parse_args()
+
+
+    fs1, x = wavfile.read(args.ref)
+    fs2, y = wavfile.read(args.deg)
+
+    if max(fs1, fs2) != 16000:
+        raise ValueError('error: encountered sampling frequency diffrent from 16kHz')
+
+    x = x.astype(np.float32) / 2**15
+    y = y.astype(np.float32) / 2**15
+
+    err = compare(x, y, apply_vad=args.apply_vad)
+
+    print(f"MOC: {err}")
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/process_dataset.sh
@@ -1,0 +1,98 @@
+#!/bin/bash
+
+if [ ! -f "$PYTHON" ]
+then
+    echo "PYTHON variable does not link to a file. Please point it to your python executable."
+    exit 1
+fi
+
+if [ ! -f "$TESTMODEL" ]
+then
+    echo "TESTMODEL variable does not link to a file. Please point it to your copy of test_model.py"
+    exit 1
+fi
+
+if [ ! -f "$OPUSDEMO" ]
+then
+    echo "OPUSDEMO variable does not link to a file. Please point it to your patched version of opus_demo."
+    exit 1
+fi
+
+if [ ! -f "$LACE" ]
+then
+    echo "LACE variable does not link to a file. Please point it to your copy of the LACE checkpoint."
+    exit 1
+fi
+
+if [ ! -f "$NOLACE" ]
+then
+    echo "LACE variable does not link to a file. Please point it to your copy of the NOLACE checkpoint."
+    exit 1
+fi
+
+case $# in
+    2) INPUT=$1; OUTPUT=$2;;
+    *) echo "process_dataset.sh <input folder> <output folder>"; exit 1;;
+esac
+
+if [ -d $OUTPUT ]
+then
+    echo "output folder $OUTPUT exists, aborting..."
+    exit 1
+fi
+
+mkdir -p $OUTPUT
+
+if [ "$BITRATES" == "" ]
+then
+    BITRATES=( 6000 7500 9000 12000 15000 18000 24000 32000 )
+    echo "BITRATES variable not defined. Proceeding with default bitrates ${BITRATES[@]}."
+fi
+
+
+echo "LACE=${LACE}" > ${OUTPUT}/info.txt
+echo "NOLACE=${NOLACE}" >>  ${OUTPUT}/info.txt
+
+ITEMFILE=${OUTPUT}/items.txt
+BITRATEFILE=${OUTPUT}/bitrates.txt
+
+FPROCESSING=${OUTPUT}/processing
+FCLEAN=${OUTPUT}/clean
+FOPUS=${OUTPUT}/opus
+FLACE=${OUTPUT}/lace
+FNOLACE=${OUTPUT}/nolace
+
+mkdir -p $FPROCESSING $FCLEAN $FOPUS $FLACE $FNOLACE
+
+echo "${BITRATES[@]}" > $BITRATEFILE
+
+for fn in $(find $INPUT -type f -name "*.wav")
+do
+    UUID=$(uuid)
+    echo "$UUID $fn" >> $ITEMFILE
+    PIDS=(  )
+    for br in ${BITRATES[@]}
+    do
+        # run opus
+        pfolder=${FPROCESSING}/${UUID}_${br}
+        mkdir -p $pfolder
+        sox $fn -c 1 -r 16000 -b 16 -e signed-integer $pfolder/clean.s16
+        (cd ${pfolder} && $OPUSDEMO voip 16000 1 $br clean.s16 noisy.s16)
+
+        # copy clean and opus
+        sox -c 1 -r 16000 -b 16 -e signed-integer $pfolder/clean.s16 $FCLEAN/${UUID}_${br}_clean.wav
+        sox -c 1 -r 16000 -b 16 -e signed-integer $pfolder/noisy.s16 $FOPUS/${UUID}_${br}_opus.wav
+
+        # run LACE
+        $PYTHON $TESTMODEL $pfolder $LACE $FLACE/${UUID}_${br}_lace.wav &
+        PIDS+=( "$!" )
+
+        # run NoLACE
+        $PYTHON $TESTMODEL $pfolder $NOLACE $FNOLACE/${UUID}_${br}_nolace.wav &
+        PIDS+=( "$!" )
+    done
+    for pid in ${PIDS[@]}
+    do
+        wait $pid
+    done
+done
--- /dev/null
+++ b/dnn/torch/osce/stndrd/evaluation/run_nomad.py
@@ -1,0 +1,138 @@
+"""
+/* Copyright (c) 2023 Amazon
+   Written by Jan Buethe */
+/*
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions
+   are met:
+
+   - Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+   - Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+"""
+
+import os
+import argparse
+import tempfile
+import shutil
+
+import pandas as pd
+from scipy.spatial.distance import cdist
+from scipy.io import wavfile
+import numpy as np
+
+from nomad_audio.nomad import Nomad
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument('folder', type=str, help='folder with processed items')
+parser.add_argument('--full-reference', action='store_true', help='use NOMAD as full-reference metric')
+parser.add_argument('--device', type=str, default=None, help='device for Nomad')
+
+
+def get_bitrates(folder):
+    with open(os.path.join(folder, 'bitrates.txt')) as f:
+        x = f.read()
+
+    bitrates = [int(y) for y in x.rstrip('\n').split()]
+
+    return bitrates
+
+def get_itemlist(folder):
+    with open(os.path.join(folder, 'items.txt')) as f:
+        lines = f.readlines()
+
+    items = [x.split()[0] for x in lines]
+
+    return items
+
+
+def nomad_wrapper(ref_folder, deg_folder, full_reference=False, ref_embeddings=None, device=None):
+    model = Nomad(device=device)
+    if not full_reference:
+        results = model.predict(nmr=ref_folder, deg=deg_folder)[0].to_dict()['NOMAD']
+        return results, None
+    else:
+        if ref_embeddings is None:
+            print(f"Computing reference embeddings from {ref_folder}")
+            ref_data = pd.DataFrame(sorted(os.listdir(ref_folder)))
+            ref_data.columns = ['filename']
+            ref_data['filename'] = [os.path.join(ref_folder, x) for x in ref_data['filename']]
+            ref_embeddings = model.get_embeddings_csv(model.model, ref_data).set_index('filename')
+
+        print(f"Computing degraded embeddings from {deg_folder}")
+        deg_data = pd.DataFrame(sorted(os.listdir(deg_folder)))
+        deg_data.columns = ['filename']
+        deg_data['filename'] = [os.path.join(deg_folder, x) for x in deg_data['filename']]
+        deg_embeddings = model.get_embeddings_csv(model.model, deg_data).set_index('filename')
+
+        dist = np.diag(cdist(ref_embeddings, deg_embeddings)) # wasteful
+        test_files = [x.split('/')[-1].split('.')[0] for x in deg_embeddings.index]
+
+        results = dict(zip(test_files, dist))
+
+        return results, ref_embeddings
+
+
+
+
+def nomad_process_all(folder, full_reference=False, device=None):
+    bitrates = get_bitrates(folder)
+    items = get_itemlist(folder)
+    with tempfile.TemporaryDirectory() as dir:
+        cleandir  = os.path.join(dir, 'clean')
+        opusdir   = os.path.join(dir, 'opus')
+        lacedir   = os.path.join(dir, 'lace')
+        nolacedir = os.path.join(dir, 'nolace')
+
+        # prepare files
+        for d in [cleandir, opusdir, lacedir, nolacedir]: os.makedirs(d)
+        for br in bitrates:
+            for item in items:
+                for cond in ['clean', 'opus', 'lace', 'nolace']:
+                    shutil.copyfile(os.path.join(folder, cond, f"{item}_{br}_{cond}.wav"), os.path.join(dir, cond, f"{item}_{br}.wav"))
+
+        nomad_opus, ref_embeddings   = nomad_wrapper(cleandir, opusdir, full_reference=full_reference, ref_embeddings=None)
+        nomad_lace, ref_embeddings   = nomad_wrapper(cleandir, lacedir, full_reference=full_reference, ref_embeddings=ref_embeddings)
+        nomad_nolace, ref_embeddings = nomad_wrapper(cleandir, nolacedir, full_reference=full_reference, ref_embeddings=ref_embeddings)
+
+    results = dict()
+    for br in bitrates:
+        results[br] = np.zeros((len(items), 3))
+        for i, item in enumerate(items):
+            key = f"{item}_{br}"
+            results[br][i, 0] = nomad_opus[key]
+            results[br][i, 1] = nomad_lace[key]
+            results[br][i, 2] = nomad_nolace[key]
+
+    return results
+
+
+
+if __name__ == "__main__":
+    args = parser.parse_args()
+
+    items = get_itemlist(args.folder)
+    bitrates = get_bitrates(args.folder)
+
+    results = nomad_process_all(args.folder, full_reference=args.full_reference, device=args.device)
+
+    np.save(os.path.join(args.folder, f'results_nomad.npy'), results)
+
+    print("Done.")
--- /dev/null
+++ b/dnn/torch/osce/stndrd/presentation/endoscopy.py
@@ -1,0 +1,205 @@
+""" module for inspecting models during inference """
+
+import os
+
+import yaml
+import matplotlib.pyplot as plt
+import matplotlib.animation as animation
+
+import torch
+import numpy as np
+
+# stores entries {key : {'fid' : fid, 'fs' : fs, 'dim' : dim, 'dtype' : dtype}}
+_state = dict()
+_folder = 'endoscopy'
+
+def get_gru_gates(gru, input, state):
+    hidden_size = gru.hidden_size
+
+    direct = torch.matmul(gru.weight_ih_l0, input.squeeze())
+    recurrent = torch.matmul(gru.weight_hh_l0, state.squeeze())
+
+    # reset gate
+    start, stop = 0 * hidden_size, 1 * hidden_size
+    reset_gate = torch.sigmoid(direct[start : stop] + gru.bias_ih_l0[start : stop] + recurrent[start : stop] + gru.bias_hh_l0[start : stop])
+
+    # update gate
+    start, stop = 1 * hidden_size, 2 * hidden_size
+    update_gate = torch.sigmoid(direct[start : stop] + gru.bias_ih_l0[start : stop] + recurrent[start : stop] + gru.bias_hh_l0[start : stop])
+
+    # new gate
+    start, stop = 2 * hidden_size, 3 * hidden_size
+    new_gate = torch.tanh(direct[start : stop] + gru.bias_ih_l0[start : stop] + reset_gate * (recurrent[start : stop] +  gru.bias_hh_l0[start : stop]))
+
+    return {'reset_gate' : reset_gate, 'update_gate' : update_gate, 'new_gate' : new_gate}
+
+
+def init(folder='endoscopy'):
+    """ sets up output folder for endoscopy data """
+
+    global _folder
+    _folder = folder
+
+    if not os.path.exists(folder):
+        os.makedirs(folder)
+    else:
+        print(f"warning: endoscopy folder {folder} exists. Content may be lost or inconsistent results may occur.")
+
+def write_data(key, data, fs):
+    """ appends data to previous data written under key """
+
+    global _state
+
+    # convert to numpy if torch.Tensor is given
+    if isinstance(data, torch.Tensor):
+        data = data.detach().numpy()
+
+    if not key in _state:
+        _state[key] = {
+            'fid'   : open(os.path.join(_folder, key + '.bin'), 'wb'),
+            'fs'    : fs,
+            'dim'   : tuple(data.shape),
+            'dtype' : str(data.dtype)
+        }
+
+        with open(os.path.join(_folder, key + '.yml'), 'w') as f:
+            f.write(yaml.dump({'fs' : fs, 'dim' : tuple(data.shape), 'dtype' : str(data.dtype).split('.')[-1]}))
+    else:
+        if _state[key]['fs'] != fs:
+            raise ValueError(f"fs changed for key {key}: {_state[key]['fs']} vs. {fs}")
+        if _state[key]['dtype'] != str(data.dtype):
+            raise ValueError(f"dtype changed for key {key}: {_state[key]['dtype']} vs. {str(data.dtype)}")
+        if _state[key]['dim'] != tuple(data.shape):
+            raise ValueError(f"dim changed for key {key}: {_state[key]['dim']} vs. {tuple(data.shape)}")
+
+    _state[key]['fid'].write(data.tobytes())
+
+def close(folder='endoscopy'):
+    """ clean up """
+    for key in _state.keys():
+        _state[key]['fid'].close()
+
+
+def read_data(folder='endoscopy'):
+    """ retrieves written data as numpy arrays """
+
+
+    keys = [name[:-4] for name in os.listdir(folder) if name.endswith('.yml')]
+
+    return_dict = dict()
+
+    for key in keys:
+        with open(os.path.join(folder, key + '.yml'), 'r') as f:
+            value = yaml.load(f.read(), yaml.FullLoader)
+
+        with open(os.path.join(folder, key + '.bin'), 'rb') as f:
+            data = np.frombuffer(f.read(), dtype=value['dtype'])
+
+        value['data'] = data.reshape((-1,) + value['dim'])
+
+        return_dict[key] = value
+
+    return return_dict
+
+def get_best_reshape(shape, target_ratio=1):
+    """ calculated the best 2d reshape of shape given the target ratio (rows/cols)"""
+
+    if len(shape) > 1:
+        pixel_count = 1
+        for s in shape:
+            pixel_count *= s
+    else:
+        pixel_count = shape[0]
+
+    if pixel_count == 1:
+        return (1,)
+
+    num_columns = int((pixel_count / target_ratio)**.5)
+
+    while (pixel_count % num_columns):
+        num_columns -= 1
+
+    num_rows = pixel_count // num_columns
+
+    return (num_rows, num_columns)
+
+def get_type_and_shape(shape):
+
+    # can happen if data is one dimensional
+    if len(shape) == 0:
+        shape = (1,)
+
+    # calculate pixel count
+    if len(shape) > 1:
+        pixel_count = 1
+        for s in shape:
+            pixel_count *= s
+    else:
+        pixel_count = shape[0]
+
+    if pixel_count == 1:
+        return 'plot', (1, )
+
+    # stay with shape if already 2-dimensional
+    if len(shape) == 2:
+        if (shape[0] != pixel_count) or (shape[1] != pixel_count):
+            return 'image', shape
+
+    return 'image', get_best_reshape(shape)
+
+def make_animation(data, filename, start_index=80, stop_index=-80, interval=20, half_signal_window_length=80):
+
+    # determine plot setup
+    num_keys = len(data.keys())
+
+    num_rows = int((num_keys * 3/4) ** .5)
+
+    num_cols = (num_keys + num_rows - 1) // num_rows
+
+    fig, axs = plt.subplots(num_rows, num_cols)
+    fig.set_size_inches(num_cols * 5, num_rows * 5)
+
+    display = dict()
+
+    fs_max = max([val['fs'] for val in data.values()])
+
+    num_samples = max([val['data'].shape[0] for val in data.values()])
+
+    keys = sorted(data.keys())
+
+    # inspect data
+    for i, key in enumerate(keys):
+        axs[i // num_cols, i % num_cols].title.set_text(key)
+
+        display[key] = dict()
+
+        display[key]['type'], display[key]['shape'] = get_type_and_shape(data[key]['dim'])
+        display[key]['down_factor'] = data[key]['fs'] / fs_max
+
+    start_index = max(start_index, half_signal_window_length)
+    while stop_index < 0:
+        stop_index += num_samples
+
+    stop_index = min(stop_index, num_samples - half_signal_window_length)
+
+    # actual plotting
+    frames = []
+    for index in range(start_index, stop_index):
+        ims = []
+        for i, key in enumerate(keys):
+            feature_index = int(round(index * display[key]['down_factor']))
+
+            if display[key]['type'] == 'plot':
+                ims.append(axs[i // num_cols, i % num_cols].plot(data[key]['data'][index - half_signal_window_length : index + half_signal_window_length], marker='P', markevery=[half_signal_window_length], animated=True, color='blue')[0])
+
+            elif display[key]['type'] == 'image':
+                ims.append(axs[i // num_cols, i % num_cols].imshow(data[key]['data'][index].reshape(display[key]['shape']), animated=True))
+
+        frames.append(ims)
+
+    ani = animation.ArtistAnimation(fig, frames, interval=interval, blit=True, repeat_delay=1000)
+
+    if not filename.endswith('.mp4'):
+        filename += '.mp4'
+
+    ani.save(filename)
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/osce/stndrd/presentation/lace_demo.ipynb
@@ -1,0 +1,313 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "import matplotlib.animation\n",
+    "from scipy.io import wavfile\n",
+    "import scipy.signal\n",
+    "import torch\n",
+    "\n",
+    "from playback import make_playback_animation\n",
+    "from endoscopy import read_data\n",
+    "\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "plt.rcParams.update({\n",
+    "    \"text.usetex\": True,\n",
+    "    \"font.family\": \"Helvetica\",\n",
+    "    \"font.size\": 24\n",
+    "})"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD7CAYAAABzGc+QAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9Wa4l2ZIliK3danOa25mZm7u/96JLZFaABQKVVTWDyK/6DRIcQQyhwBkQzAEQqJhBoXIGGTMosv4IssDMiMx4z5+7m7nd5nSqult+iMjWcz1ek5UR8IoATAGHu5vdexrVvWWLLFlriaq14vP1+fp8fb4+X//0Lv2/9wf4fH2+Pl+fr8/Xf971OYB/vj5fn6/P1z/R63MA/3x9vj5fn69/otfnAP75+nx9vj5f/0SvzwH88/X5+nx9vv6JXp8D+Ofr8/X5+nz9E71+bwBXSv25Uurf/p6//zOl1F/8w360z9fn6/P1+fp8/a7r9wbwWuu/+W1/p5T6c/6Zv+L//7N/uI/2+fp8fb4+X5+v33X9fSGU/xbAX/N//zWAf/n3fL3P1+fr8/X5+nz9J1727/n7tz/6/4cf/wBDK38BAE65//re3iFXOjmsrlAAlAJKBXJVqBWo4H8qkGtFAeCUglaAUQBQ6Wf5ZxT/uUaFVvS7uSpoVeFUhWhN6fcrSgXonemq/P/y3koBChVWVSgFaFWgAGhVUaCQi0Kpit+D3hf8mbWq0Eq+V6V/5L9R+X0UQrbIlV4L/Lnk0qgwurbPY3WBUQUF9OVy1ShVtW8Qi0aBQioAoK7enz5Uu5+g+64VfU7D30upilToNUNR9D4AfWdUvh90Ffnz9fZBvnyqCrEARgP2+r7zz5cqz2G9+xVA4Wcpz2ewgd5TVdSikKpBLgqpaHoeqEhVt3WklLyS/DdQivrROlJQav1cIkBenzf9TKmA0+uzV/xepQKnBNRaMVoNc/X9wGvQXj1Deg/Fa6B9vPXi9yz8npnXoDwPd/VaBQqxKFhV4XThZ1f5vqn2Xcr1mgaQ+R7oq/er/JyMquhMfvWBaqXXeCXObp+TPl+pr7+K5vdKhe6v1wVaAU5n+XX+PQXFv5nqutZyXdeF07Q2vJY9VFGrQiy67RF5Ddl/sh7ks7Vnys9Vvousi+vfx9XPpKvncP3zwBpb5I+sBu9l+m6J73OueHXvZI/IMzCq/p3nrhVajJCfbc+QP1etwKf84Yda61v86Pr7BvBnAPe/6wdqrX8J4C8B4I39ov53N/8XbCzgdcXelbbQ5CqgjZQrEIvCMQJzBh46YGsrvCkoVeHDrBEK2k2wCugMcOcLACAUhY0tePAR3mSMJiFXjVwVLsliLhp7FzGahFg1alWYs0Gu62cZTIbRBRpAbxIexjNy1ViSxZwszsnB64LRxrZ5ehPR2YRUDG0w3tyDCzC6IGaDUjSm5DAni28uG2gAd92CVDSO0WGwCbd+gdMFVmfs+hmjX5CyQa4a3x/3OEaP0SQYXfC89MhVwWm6N3M20Kqi5w1Km0XjnCx6k7F1ERsbMdjYvuslelyyxf/6ssVjULj1FERiVdAAvClwqmKw8u+EfHXPQtaYi8Ixatz6gluX2mvPWSPWNTB4TQeUlH/yuUPRuPEB/+Xb7+BsgnMR52nEh+Mel+Rwig63fsGuW9prT8khZY3n0CEWOkpTVfh28igVuPUZna7tWXb8XrkqHJPF02L5kK50MAN46BJ2NqGAgvDHxeMYNf79Eaio+D/eVexswdbR/cuF11XWCEVjShpWV/SmYDQFg004J4spabzpAzY2IfB9u/C/Na/7Y9JwquIXmwlGFb5/Fs/RodP0nqNJ2PoF5+gxJbduxuAxZ43RZigA380eU6bgr1Fx1xX0umDvI3qTsfeBk4WCOVmcosclWZyTWddF1khFwfKhdusTrC5IRSMWjU+LpQRLV74XtO++HicoPmisKvAmI2SDpRjM2SAVjV9PHZ6DRm8qnK546BI6vSZLG5tgVIHTBalqTMliygaPwdHnVvS+vckUEAH+XAreFDpkeT97TfdS1kfIGgUUN1KlA2gpGqeoYDUw2tL27zkpXLJCb+j97nzGaEpbH7EoxEprPxSFQ3wdl0YLxEJxbGOBW18wZ4U5K9x3GTtb8BIN5iwJITBY+vyyR/6vf/P/+I+/Kb7+fQP4/4w1C/9jAL+12QlwIDPAzlJW/cNi2oluFG3sUBQuib7o1lZEPt0kuwmZvtLGVox1PUHnomAUaIOCbo5RFbd+gdGUbYYM5Gow2oQRdPLFqnGKDkvR7ZTcuQgrD1wWRtUImW6XAmVCEkABYOMCeheQskEqBt+etzhFhzf9jA0Hb6szYqbN0VnaCH9kMioHlFw0FD+0wD+nVEVMFsvV56HN5mBUheOMmxZ8hFIVPlto/ozWFHidUKEQi3mVISzZYuFKIBWNVBUeuoStUy0bzZmyyF7TJpMNlorGnA0eg6XPoSv2LuPLgYKrBnDJBiFraAX0umBjE5yq7ZA+RYdUgaUYWAU+WBKsybA2wdmE3gfs+wldShhtxOAiehcwR4+QDZzOMKrgjjcrZasaMwceyWSnbGCKRr7KYAFKGEJWmLPGQ5fxxlMQAdCC1JwpW7zxgILCxkQMJmM0GbkqvGSDOWu8BIvOFDx0CYpfX4LIyD/f6YJaFc7JtmDrVMUxGdSqcMfv35v1NQoUOl4PZ37uWwCDpeRkSRaxaHg+sCVY0XtpLJkCzIfZoDf0POZsMCULy4daLHT4RF7vGoDRFV2lwN2bCsvP0OuMWHVbr5L05KIQNa2Xx+BhJYDrgi4XXDIF795k9Cbj63HG+54+t+zvVBW+nzoUKAw2wSrA6IIlGXxaPJ6CxV+fgJ1T+HIoQAZeoqFgp4ApaSwFuPcUBGOhtfwYLFJRmDNntaBM2KoKo4HBVORK1WOnab3HCsyZgvdzqBitgtcKsVg4XeE4ex75sBhtQVcUjKL3DPQYEAq912gpqJ+Twq0v+HqkZ2042YpF4+NiMWeFO13Qm0L78HfE1N8bwLkx+d8opf5cGppKqX9ba/1XtdZ/o5T67/lnbqWZ+dsuKq2ApYCzN/Ai5wxaA70p2DsKVKnSafWgKKOOnJXnClwSBf6ek4VYwEGAFsc/20U4nV9lmU4X9DXhEDqcEwUepSpi0chFYXQUVL3OUIo2MH0+CpC1AnN2+GEaKKiZDF00jK5ts4VsEIvBaBJ6k/ngMK1k611ELhrHueeKgN575qAr2USuCufk8Bw63GaDfdVwfGB4zqLphF4Dd28TZbJXwT9ljTn16G3C3s8N1pmja9mbVB9zNvy91pM/29JghVgU/sO5Q6n0rKyu2NiMUDSeg8G9Bx663DLSzGUyrxkAljMyuuf/34NDyMBDV7F1Fe/6GTu/oO8WOJdgOcPdZQ29VIRsYFRph2YuGt4keFNxO1ygVcUldJiTxffTgKkq/PpsYRR9rk5TQInt/hqEorC1BV8PuT3DpVBFJusjFIKG/mib4XWBN1Q5Fr7HvckIWeOUFJRS8LwpnS5t89VKMMIl2XaAjDa3YJsrHaSeIYNLtshcNeWqsBTVnsloNcOGFbpmnCtVT05VdFfJx0NHWWVvDBKvPwmqGiuM4A3tE6MrnuYeIXvsXMLGrlXUMTpE/mypKrwEj8QQVgFwTqZVzaMtuL3K8qlateh0wciBail0AAjUVEBZt1UVf7C90F6wifZh1gjZ4JgMCoD3Az0byWLPSeA3Sux6QwfiSzRwXOlFTkh6AxhVsLEFuQLHZAgGqpLNyxpQOCWNl0AZ+UOncIrAU6x87xW+Hit2lqpSSv4UslbI1QCo2FiuSngtETREcEysCj/MDnuXsbF0TwhCpmcSq4Lhg/8aXv3x9XsDOAflux/92b+6+u9/zf/5O4M3AIRc8ctzxc4phFLx7RwAAJ02uLEGdaBFHgvhSbkCe6cIOuHNR5tJMGgg8c8eYkW0nEmqiiUbhKIxZ4uQDaa8loWSFeSyZhF0sNBNvPULLZ4rRMroisEFdCXB64RL9Pi09BhsglcRRlGGXXijakN4+ae5R8iGYBwXYDlL6myizEw
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZwAAAEeCAYAAAC+OaPqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAAXiUlEQVR4nO3dy24kSZaf8c/M3D2CwUuyqlrdDQ0wmgvUAjQbLbWZrZ5GL6G9VtLTaDfaSS8gAcLoAkGDGVR3VVZlJsmIcHc7WkRkVndVw50RhXQBru9Xi6wkg+QJMpJ/mPuxYykikCTpc8v/rwuQJP3/wcCRJC3CwJEkLcLAkSQtwsCRJC3CwJEkLaKZemdKTUACfto6HTGkz1WUJGl9Zlc4ibJEHZKklZsNnFLul6hDkrRy3sORJC1iNnAi6hJ1SJJWbv4eTprsK5Ak6VVmA6fGcYk6JEkrNxs4OXVL1CFJWrnZwOkau9QkST/fbOAM436JOiRJK/eKezj9EnVIklbuFV1qbtWRJP1885MG8maJOiRJKzd/Sa0OS9QhSVq5V6xwbIuWJP18s4EzVjd+SpJ+PjsCJEmLmA2cttwuUYckaeXmAyffLFGHJGnlZgNnk++WqEOStHLzK5y0XaIOSdLKzbdF43k4kqSfbzZwtvFD00DJbz5rMZKk9ZoNnJvY/fBgN4FKkq40Gzi3sSWd7+NE1M9ekCRpnWYD54UDKbUARDhXTZJ0nfkjpsnU+h4oFPfkSJKuNBs4d5yOJ2jKg1MHJElXmw2c+9KS8z1d84aSbJGWJF1nvmmgyeTU2TAgSfpZ5vfhlERbbsmppc27uYdLkvRHzTcNJAgqm3LHJjlXTZJ0nfmNnwU25QGALu1IjrqRJF3hFZfUgk2+I5G5rXcE4xJ1SZJWZjZwIuBlfPv7b/mM5UiS1mo2cL49JoZ6oEkbNrEB0gJlSZLWZjZwnoYgp5Yu7XjKT7jCkSRdYzZwxghKagjchyNJut5s4OzHyrE+0ceeLjpycryNJOlys4HTRzCMT2QKbbSQZj9EkqSfmE2Pt/FMRKVJGzKZ7Dw1SdIVZgMnkeiaN2QyL+mFsb4sUZckaWXm9+EQNHnDU7ylT0ci+iXqkiStzGzgHFPPGD19faFSwUkDkqQrzHeppWcA2nzDe7757AVJktZpNnDe8y3DuCdT2Nd3Du+UJF1lNj1ueeRtbvgwfA1AMHz2oiRJ6zO/DycdADx8TZL0s8wGzi7uOfZvqdEbOpKkq83PUmMg4sBQD5TULlGTJGmFZgPnkF4IBvrxiaHul6hJkrRCr5g0UEg0NOWGJm+XqEmStEKzgdNEQ0obEplEBsoCZUmS1uZVXWo5dwSV/fgOJw1Ikq4xP2kgPlDrkVoHwrCRJF3pFcM7K8FISpkabvqUJF1nNnC6tCOiJ6XMcfh+iZokSSv0yuM7R2odICqQPmtBkqR1mt/4eT7/JhgpZQfE565JkrRCs4FTUktKWyIqyZZoSdKVXrXCidgDlRqHBUqSJK3RbODUcyv0pvmSnDafvSBJ0jq9qksNYKgvrnAkSVebPYDtJm6BxDA+LVCOJGmtXjUtGuI03iYGnKUmSbrGK/fhQE4tQY+z1CRJ15gNnGO8ADDWFyIMG0nSdeaPJzh3pg3jO5JTBiRJV5rf+MnpWOmS7+jaX372giRJ6/SqA9gAcu7IabapTZKkP2o2cHZxBxS2zePnr0aStFqzS5agUvIdx/GJ3uMJJElXelVbdFAZ64Fw0oAk6UqvmKVWz+fgQOCJn5Kk67yiS62hxtN5yoAkSdeZP57gvKppyu1nL0aStF6vmKW2B2AYn0ip++wFSZLW6ZWz1Ao1DqTzJlBJki41Gzg3sQOqh69Jkn6WVzUNQJDSqwdLS5L0E7Mp8iG9AxI1BpKjbSRJV5oNnJYOCKK+2BotSbrabOBsYkvO9wQjTblfoiZJ0grNr3CiIafzKsfTPiVJV3plJ0AFCiVvgPI565EkrdT8JTVaaj2SUstYD+AqR5J0hdm2s0yiKffU6JeoR5K0UvP3cFJm09wTMdDkmyVqkiSt0Pzwzqi89N9S44WcWuepSZKu8ooVTiFiIKUN2WkDkqQrTSZIoiEBOXckMjm1RBwXKk2StCbTTQOpoc3noEmViBFIQCxSnCRpPWaukVXGiHM7NOd7OE6NliRdbjJwmvKGPio5nc7BKan1mAJJ0lUmA6cttxQSJZ8605q0odgaLUm6wmTg1Bi4awpDfQGg0FLjsEhhkqR1mWwaGMYnbptMk2+oMXCIDwzj26VqkyStyHRb9PnAtSZvzn/PnLrUJEm6zGTgbJtHMlCjsin3PPJrmvK4TGWSpFWZDJwu39FkqNGTUqHQ2DQgSbrKZOD09Zldc7qUVqPnOb1fqi5J0spMNg309YUxYKxHahoAiKiLFCZJWpfZtmg438tJDYV2kaIkSeszGTgld2xzUFJLovDl+BU5z57ZJknST0wHTmq5bYL9+D1tvuEOGwYkSdeZHm2TdzQ5GOuRm/wGgDR/hI4kST8xcw9nZKiJu/ZXALxwpLUtWpJ0hcnACSqHmmjShpGeDS2BXWqSpMtNB06MjAHbdMcQBx5yy3F8Wqo2SdKKTAbOGANthjY2JDI3pVCjX6o2SdKKzATOgb5CJjNGTxAkylK1SZJWZDJwds1X7EqQyGzyHV1O54nRkiRdZjI98vl4guf0nkxhWwwcSdJ1pvfhsGVbKsP5lM9t+SGEJEm6xPTxBOmGJgf3fPnpbSU5T02SdLnZ5crLmNnGjmM+khOUtFmiLknSykyucJ7iLd8cCh/SO7roqAGH8d1StUmSVmRyhZMplAQtHbf1lmP94cgCSZIuMbnCSWQyQc+RQuGmQFdul6pNkrQi08cT0NJHYsOWICjew5EkXWl2eGdJkCPTUngZcbSNJOkqs23RzwMc0p5KcByXKkuStDaTgbOPDzT5NEutOc9Q68rdIoVJktZlMnAO9QNNgjY6WjKNU20kSVeanRYNMKSBgSBwH44k6TqTgXMcnxgDbmJHz0Di1CotSdKlZk78rARwEx0f0hNPQ5CS5+FIki43vQ8nd5QET2lPTZV3/eglNUnSVabbossd900wMtBGxxDBrvlqqdokSSsyGThDPdDkYBc79ukZgNts4EiSLjfdFj18B0BHQyLT5szopAFJ0hVmWs4qJQWFfD5mGg7xYZHCJEnrMn3EdLkngJHKXbwBcIUjSbrKZOB8PPvmwClk+gia7LRoSdLlpjd+Dr/jOJ4mRVcqJSWGeliqNknSiswcwNZSgUoQVCKC7MZPSdIVpu/hNG+4KZVK8JKeSCkx1P1StUmSVmQycJp8w65UNrS0saFJiaAuVZskaUVmJ3HetwMtmXx+qEdMS5Ku8arRzyNBoaEkKKn93DVJklZopi361A59oOeQ9jyPlZfx7SKFSZLWZXaFM0aiEoz0jBH049MSdUmSVmYycHbNV9yUkYe0BeCxbbhtf7VIYZKkdZneh5MyOQXbXBijpyTY5S+Wqk2StCKTgXObTuEy/t6Gzy13n78qSdLqzN7DeRoaMnAbb+gr7HFatCTpcpOB08eBvmaGCADu2+TxBJKkqzRT7xw4Derc5MyBPdsCx2rgSJIuN70Ph5GUTsNscmT2I9RwtI0k6XKzTQPHMdPXehpuk+C2+Wqp2iRJKzIZOCVa+kgcY6RS6TJ0abdUbZKkFZkMnO/jH8hABVo6coIa4zKVSZJWZfrEz/qBCuxy82la9Hg+blqSpEvMnPiZ2eSgPzcKJGAIj5iWJF1u+sTPvKMG7GOg5wgYOJKk68zOUns/FF448Mw7jvW06pEk6VKT6RFReRoSNZ1mqdXAI6YlSVeZbotOLY9d5Ta2bOOWMWCoXlKTJF1uMnAO9QPbXNmmhkJDX6HJm6VqkyStyOwNmWNNjFEZGWgS9PV5ibokSSszPS26PvM8ZhKJTKZyuq8jSdKlJgNn33/DGImeShsdxzFo881StUmSVmT2klpJQUumpkoFtuXNAmVJktZmpi36QEQip/Tpwfvx+yXqkiStzGTgNOUNKQX7GBgZTufipHah0iRJazI92qb
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 513.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZoAAAEeCAYAAACzJ9OtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz915NdWZbmif3WFkdc7QpwIIBAqIyMzKgU1WXV1TPd0z091jY9zTHylQ804x/IR9JIMxqNYobFtu4qyy7RlaIyMkNBC1dXHrX34sM+fgEEgAhEZAhEln9mqMiCO/yee/yevdS3vk9UlQtc4AIXuMAFvimY7/oCLnCBC1zgAn/cuAg0F7jABS5wgW8UF4HmAhe4wAUu8I3iItBc4AIXuMAFvlFcBJoLXOACF7jAN4qLQHOBC1zgAhf4RuE+74si7p8o91mAl3vrgkNMSYyLb/aSLnCBC1zgFYdqJ8/7e/m8PZp/uoHmZWD7/4an/lYkw9kpMTaEePbtX9YFLnCBC3xHeFGg+dyK5gKfhycDjGBkgDEZAKrxIshc4AIXuECPixnN1wJFxGFNCRhirL7rC7rABS5wgVcGF60zUrvrHKrN1/Zznd3DiKfp7vOyM58LfDFEMqwZE+LiC35fLz9ru8AFLvCH46J19jlQbfmiA8nIkKgVqWUmiOSovqhyEayZMMmvYcTR+l2itiyq333h61zg82FkiDEF1uSEuHrBd1nS7+niXn+XEMm+1sTtAt9fXAQa4PkHklBm1/GmpLBTHDmreMSmPSZqTQifxzJTYlwRNSAYVANR4zd18V8KggMxgPmcQPnqIuoKDTUxvjjQC3IRYr4RpAQqxCWfJcE8DxdB5gLnuAg0z4XFmAFKRInkMmKgY6z15GZE0Jagbfq6BpRIGzfE2NGG03QAisGIRcQgYjFEPi/DFlyqkmhRDbzMg/xlkLLLgBIQBTE5qvXnXtOrCqVDtXvh173b778voBpR7Yhafy8D67cNZ/fwdkgbFnRhzpOfQyMDnB2mz33coLz4d3CBCzyJi0DzHHi3SxfOqJpbVECbbdj1b5IxYIcrqEQa0xBo2egZbdzQdg8I8ayfH0zxboyTHIvHGEsk8KIaSKQg9/tkZkgdFjTdMWl09ocFG2umPUkhB6Bu7wDpoDYYjAyI+qL20/cX1mR4U1LaHZTIJpzQxZpN8+l3fWmvMCzWjIjaUHcNaETEPvU5dHaMMyVN++AiyFzgS+Ei0DwHRjy5v4RgiNpRNbe509zCuwN28jdZtPfYNJ8iOKblexRmgitygrY0cUnTLTDicORY8bRa0cX6ha+n2m6DmjFjRvl1AKJGurjZBoiXRxqCn1OsQywwknM+uxDJ8G7c07ALojZ/VAunUTvqsGDTHaPa0YVTvo+V27cJQYi6eW6765zU4mzRf7ODz6koL3CBz+J7wzpLzDCDiEe1/tr6v87u4OyQEOv+UHq6XZBg+++dkbsZkNoyTnJG9hJecnItsX3cjkQCHbVs6LTmtL3ZtyJOnv/ecE9liIP8DQSLFUenNZv61h+cQYoUT7WOhvnb1N3xZ67pfIj+qkNwdvY5AeT78j6+nzhfSm67Yy7u8wWexPeedebt7rbnHrTd/r01U4zJUO3QfuCuxJfK0EUKMjfFSU4DhAjPe3AEociuoqQKQ8RgJbWjGtYEbdkwT9eDx0lOrUtOq08TcSCu+rnL82A/kyFa6vakn//UoBHtmW5pKbRIc58++CnhC1UIUjuvJHOHFHZK7K9F3ZQQFk8EsVf/0Dg/5Eq3S23KF1R7n1Fr6OdfUdf8sVQ2In11od233sZSDf3n79X/vFzg1cD3ItAYM0bE9If10+ytEJdEtRgpMSZLlYDJ6GJB2z184juf3qlIFVJkXX+8fQ1nhnh3sB00qyYyABqp2yPK7JCRP0iBjIDBkskAi8NrjsXR0tBS9z/TQQRjc5Twmes5vw6Ls2Pa7nwwH7ZBQ6RIDDFNVF1jMnI3o+5OacNDBEeeHeLtkKrpXniQqjZ0oenvgqUJc7rupH+NnMRA+34whAp/SGZHFGZCpy9uR4JlmL+BFdcnCDWb5o9nHpW5XVQjUdu+snvxoS9SINgn6PlfHSIZRkqUgDFjYlzyxxK8L/DN4XsRaGJcUL+wQgmoBoI2hNi3odxOX9Y/hrO7WJPjTEkXN7RhgcbNU6/R9K8hOKwds1f+EEdOwQhPRq4Fecw4McfcD78FwFqPFbf9+tosOG4/IWq7bceliqblsxDJmBbvkpkBXXaVqIE6zGnDAmtKhv6ALlbMq39M16gdVhyhv06lo27uYcyQIjvAmwFnm3/k8WEiCBZjhogYunDyTPvu89hbrxqsmTL0B1g8Fr+tYF+ENqwIxuFNicgfkwiGJbdjOq1pw8vQjFuQ83v1/CXWRHt3X8jM055xGaLhyaB1zlar2nsvmbRcVET/lPC9CDRfBkpH150gYgG7pQp34YguAP5qHwDaF7YclI4QN2zCCZkZMZV9hnFIJNJKR6kDrtj3aKRhqY+o45KVnGAlHX6lm6EaaeOaoB2btgONGDPEmpKoLTFWGFMkCjSGgdlJlZG5gfcZsadWd7YjG45o4yax2MRjindp4nJ7vYJl6PZSdTXwrLsjjDgyM0KJ1GFOFzbPfa+fhZEh3k1fSWZR7CuYc9r5i4NHOsSa7h4AtRRkbpfMHb6wsvw+QcTjTIHGSPdSB3b4nNZtgjFDjMkI0afApF2iwktO7vcRDCE2fRs3wZmS3E4wYvFSbn8vXVgR4uYLgtYXBxkjwzSPfcU+hxf48vgeB5oXy4soChr6YPM0Qtx8IQupyK4RY8eyvomIJysHRLPPze7vWNcfk7lD3sz/AqeOdXdE0y2IsUK1Zlq+xxvyE4wKRgxRIptyQ0vDsd5i2dynC0cAxLDiZH2GMUNiXKEEhvlbvOZ+TCRSy4Zalzxa/S2PH0xL5g7Yyd8gkwEjnWLUsGFNS82mO6FqbmHMmCwfAVC3j17w0KehupEcb4c4k6dKTNte+eBVe8AjUTta3dDFmqo9eu53fXZhU7Wi6Y639Nxna8vvF4zkxH4JOB38f3h1EOJZP6O0GCmIpM+LakXV3AKEYf4WhZ1sA3wXa4LWBIUogagtVXOb82dLnjhevjhYCEl68aLK+WPE9yjQPP0wieQYyXF2iBH3VHarGl/I8lKNODvb/u8Q53x2dlPYGbkfMWYPT4ZRg8FQ2h0ae0bhpgDkWnDD/SnRRVZyRqNrHDkn8pBcS6ZxhsUSiXTScbb5+JmhvaKJyIACStU+4ja/worDmYIufravnjL1h3FF7nbw/n1yKYh9a8SbksZMETGs20c9geJFR6tu71EbMgTf/214JZcbnZ0iGLpY04T5CwkfYkr0M19TrVJFy7MMvG8eqYX5dWXmIS6pwild2BDCGV/v4Rye2a0SKbCmZOQOKGWadsd0w7K5u02aHttmPBHi5Ytnf+dKFSmxaZ+qvERcCqSvXMJzgS+L70WgMWZM7na2bKuoLV3cEML6KdZR7q/Sdmefu4QY4hlGhozyawBUXWplWZMnXbKw4qz6HdYMoYBMBuzpZXL17JirkMPI7DEJEwxC2z8EM6Y4LKcy55h7INDKiEhkziPW4eS5zDBnZwyzy9u2mMVvd2+qOKeNT7a8BGNGxLggxgWbZsF98YzcAZ6ib70N8G6MlZyyp2LXbpayz9gQtX5BAG5QXm1CwJOMw8R6erqqdXaHc0Hy501vVCtUy88EGdt/9zc50NY+yNh+CfIPvc+hrzJeFmkZ07sxRhxNtyDEFdaUeDvuWZSeoO2WHANpJubskHF2hYwBe3qIj54ou0QiD4sRDzb/ALDdyzp/HkUyBtl12rD6XFHZ80Dy+J6kKjv2ag5fLchciKm+avheBJoYF3SxoHAzcjth1T58os+eskXEfWGQMZKG5k5yMjvqD98NUWtG2RWm9pBH7Ues6iNCjFRxDibtxQBYHLmM8JqozYHIsT1CCVwL15nZnKnuc5191qHjnnlALRtEDYWZsJDh9vrOh68Q2bTHXC1+xkG4hEEwCMfmlJvxHiE2WDNFtUP6yk23Gbm
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 511.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# comparison opus@6kb/s vs. LACE@6kb/s\n",
+    "for name in ['opus_6kbps', 'lace_6kbps']:\n",
+    "    fs, y = wavfile.read(f'lace_demo/{name}.wav')\n",
+    "    spec, freqs, t, im = plt.specgram(y, NFFT=512, cmap='inferno', noverlap=256 + 128, pad_to=4096)\n",
+    "    spec = 10*np.log10(spec)\n",
+    "    \n",
+    "    make_playback_animation(f'lace_demo/{name}_spec.mp4', spec, len(y)/16)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "data = read_data('lace_demo/endoscopy/')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def get_adaconv_impz(data, prefix, frame_size=80, overlap_size=40):\n",
+    "    win1 = .5 + .5 * np.cos((np.arange(overlap_size) + 0.5) * torch.pi / overlap_size)\n",
+    "    win1 = win1[:, np.newaxis]\n",
+    "    win2 = win1[::-1]\n",
+    "    kernels = data[prefix + '_kernels']['data'][0] * data[prefix + '_gains']['data'][0, :, np.newaxis]\n",
+    "    kernels = kernels[:, ::-1]\n",
+    "    num_frames = len(kernels)\n",
+    "    impz = np.repeat(kernels, frame_size, 0)\n",
+    "    for i in range(num_frames - 2, 0, -1):\n",
+    "        idx = i * frame_size\n",
+    "        impz[idx : idx + overlap_size] = win2 * impz[idx : idx + overlap_size] + win1 * impz[idx - overlap_size : idx]\n",
+    "        \n",
+    "    return impz"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def get_adacomb_impz(data, prefix, frame_size=80, overlap_size=40):\n",
+    "    win1 = .5 + .5 * np.cos((np.arange(overlap_size) + 0.5) * torch.pi / overlap_size)\n",
+    "    win1 = win1[:, np.newaxis]\n",
+    "    win2 = win1[::-1]\n",
+    "    kernels = data[prefix + '_kernels']['data'][0]\n",
+    "    gg = data[prefix + '_global_conv_gains']['data'][0]\n",
+    "    g  = data[prefix + '_gains']['data'][0]\n",
+    "    lags = data[prefix + '_lags']['data'][0]\n",
+    "    \n",
+    "    num_frames = len(kernels)\n",
+    "    max_lag = int(lags.max())\n",
+    "    kernel_size = kernels.shape[1]\n",
+    "    padding = kernel_size // 2\n",
+    "    \n",
+    "    impz = np.zeros((num_frames, max_lag + padding + 1))\n",
+    "    for i in range(num_frames):\n",
+    "        p = int(lags[i])\n",
+    "        impz[i, 0] = gg[i]\n",
+    "        impz[i, p - padding : p - padding + kernel_size] = gg[i] * g[i] * kernels[i, ::-1]\n",
+    "    \n",
+    "    impz = np.repeat(impz, frame_size, 0)\n",
+    "    for i in range(num_frames - 2, 0, -1):\n",
+    "        idx = i * frame_size\n",
+    "        impz[idx : idx + overlap_size] = win2 * impz[idx : idx + overlap_size] + win1 * impz[idx - overlap_size : idx]\n",
+    "    \n",
+    "    return impz"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "impz_comb1 = get_adacomb_impz(data, 'limited_adaptive_comb1d_1')\n",
+    "impz_comb2 = get_adacomb_impz(data, 'limited_adaptive_comb1d_2')\n",
+    "impz_conv1 = get_adaconv_impz(data, 'limited_adaptive_conv1d_1')\n",
+    "\n",
+    "phi = data['hidden_features']['data'][0, 0]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA18AAAUFCAYAAADog3nCAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9eZxcV33n/7+P9l2tXbJsSW7LYBODjdw2qwPBEknYkoBsE5IQMhCZkIRkMgFDZibJI5kELPhmkgwkWITsmYwXGJjwCwHJLGY1luQVvEuytVhWS+rWbsmSzu+PT3245966Veqlupbu1/Px6EdV3drOraruPu/6nHNuiDEKAAAAADCyxrW6AQAAAAAwFhC+AAAAAKAJCF8AAAAA0ASELwAAAABoAsIXAAAAADQB4QsAAAAAmmBCqxswEubPnx9XrFjR6mYAABpsy5Yt+2OMC1rdjkbgfxUAjE71/leNyvC1YsUKbd68udXNAAA0WAjhqVa3oVH4XwUAo1O9/1UMOwQAAACAJiB8AQAAAEATEL4AAAAAoAkIXwAAAADQBIQvAAAAAGgCwhcAAAAANAHhCwAAAACagPAFAAAAAE1A+AIAAACAJiB8AQAAAEATEL4AAAAAoAkIXwAAAADQBIQvAAAAAGgCwhcAAAAANAHhCwCABgkh9IUQNoYQPtjqtgAA2s+EVjcAAIBR5LoY46ZWNwIA0J6ofAEA0DhdIYTuVjcCANCeCF8AADTOXEkHQwi3lF0ZQlgXQtgcQtjc29vb5KYBAFqN8AUAQIPEGDfEGPsl9YcQ1ta4vifG2LNgwYLmNxAAhun4cenkyVa3onMRvgAAaIBKVWtVq9sBAI2wf391yDpzRrr4Yuktb6m+/YMPSg880Jy2Dde3vy29+93SwYP57Y8+Kh06NLLPTfgCAKAxbpMkr3jFGO9obXMAYGj+7u+kBQukdevy2z/3OWnPHukrX5H27s1fd/XV0uWXS1u2NK+dQ3H2rPTqV0t/+7fS5z+fv+7aa6Xf+q2RfX7CFwAADRBj7I8xbo0x3hFjvKnV7QHQXr7yFWn37urtzz4r/emfSqdONb9N//7v0jveUV3h+o//sNNNhbVbv/Wt7PyXvpSdf+op6bnn7PwXv5i/z7Fj0qc/XV1laoa/+Atp48b8th07svN33ZWdf+45e3+6R3jJJMIXAAAAMIJ27ZJ+8iel88+XjhzJX/e610n/9b9mgccdOCB96EONGQZ39mwWjlI33ST9679a+Ett3Wqne/ZY290TT0g/9mPSuHHS9u3Z9q99LTv/gx/kH+ud77QK2l/+5fD2YbD27ZN++7el17/e2u3uv99Oly7Nh0nfn4suGtl2Eb4AAACABti82TrvX/hCfntaQdq8OTvf1yf98Id2vlgx+sxnpJtvlt7znurnOXCgOsRJUowWtIp+8zeluXPz1bUTJ6Qnn7TzX/96tv3QIQsrPq8rbe/jj0uXXmohMg1fjz0mTZggvfnN0kMP5Z/bA853vlPdrpGUVrXuvjs7f//9Fh6vv96qYGfO2HZ/LcZM+AohrA0hbKxzfV8IYWMI4YPNbBcAAAAwEB/8oLRtmw2zS331q9n5NJw88oidTpiQD0CSdOeddvqVr1iocvfcI82fL11xRf72MUrvepdtT29/5Ij0V39lYSutrt1/v22bO9fCk3v8cTt905vsdOdOOz192gLXxRdLF16YD1+7d0vnnSe9+MX2WB7y9u2zH0n63veyoCNJvb3S8uXSbbdpWM6elX7qp6R//uf89rvukiZOtPMecCVr9/nnSytXWnv8qB9jLnwNYGLydTHGNTHG9U1pEAAAADBAMWZD2r797XzQeOIJG144d24+fD36qJ2+6U0Wcjw0nT2bVW4OH7Z5Ye6b37TTbduyYCNZuPnHf7RVB9Nhdt/7XnY+rcj5cMKf/ElbPOPwYbvsz3XZZRYKfWGNnTstgF10kbRiRX7u1K5dNoxvxQrbb38M39e3v91CoAc5SVq/Xnr6aekP/kDD8p3vSF/+svRLv2Ttc9u32xDJSy6RHn44275vn7R4sYVFyYZWSrY/06dbsB1JbRO+BqArhDDCU+AAAACA+t72Nunnfi6/7ZlnbFGJK6+U+vuzSopk4eSCCywMpHOiHnnEqjPXXGNzsvbvt+3799tlrz6llan77svO33tvdv7BB7Pz6RwsD0lLl+YDky/+8brX2akHQQ9OS5ZIixbZfklZhWjRIgtZe/ZkFa7du+3x/fCFflsPPatXZ69RsY3bt9uiHO7MGZsj9t3vKidG6Y//OL//kq3A6NIhknv2WMC69NJ8+Hr2WWnhwurwtX+/bQ9BI6qTwtdcSQdDCLe0uiEAAAAYmx55xDr8n/98/rhWHn7e+EY79XBz5ox18M8/34bZpSsePvGEra534YV22StDtYKRZNW1V77SzvvCGJIFtClTrLp2zz3Z9h07rIL18pdbpcnt2iVNnmxLxPvtpCx8LVpkFSIPTAcO2Om8eRZSYrSwGaM91vnnV4evZ5+1MONDJNPl6bdvtyrTyZP5oLpxow3bXFs4TP3mzdLv/7700z+d3/7DH0rTpuX3QcrC17JlWcCSrPKVhi9/rQ8etNdupHVM+Ioxbogx9kvq92OopCoHt9wcQtjc6+84AAAA0EDpwhj//u/ZeQ8Q115rp96pf/ZZC2AeTtJuam+vhZwLLrDLHr58SOArXiFNmpQNI4zRgtgrXmHBaNu27LEee8zmYy1fnq8w7dhhj9/dbY/vC3Ls3m1t8mF2Hq6efVaaOVOaOtWqXx6Y0vA1Z46d7+uz4YrHjpVXvnp77fZLl9plb1d/v4Wda67Jv1ZSNndrz578So//9E92unevPa/budOO2+X7Ktnww2eftYA1f7618dQpe/08fC1ebMHQgxnhK1EJVqvq3aYSznpijD0L/J0HAABAW4lR+vjH80PE3Fe+YhWap55qfrsG6oknrJO+YEF+0Qmff3X55XbqAcpPPXwdO2YLXUgWaObNqw5fHkaWLbPw4MMRjxyxStGiRRYg0jlfHr6WLKkOXytW2GOdPJkFI5+nNW9e1hbJQsuiRXY+faxa4cvbsGhRdfjav9/av2CBrTDoj+Wvm4evdDn7tMqXDrdMh1Wmwwh37bJ5XfPm5at3MWbhy9t/6JD0/PP22k2YYO1Kw6W/FiOpbcJXCGG1pJ60qpWsfnhb5fJaaUCLcwAAAKANfelL0gc+UD1nKkZb/OHuu8tXwPva16oP+juSTp/OhwK3bZstOtHdna887dtnnffZs6WurixA+WnZsLw0nEyYkIWTXbuk8eMt0Myblx2g2O+3cGF1+Nq92wJWMXw9/bRVw4oBz4flTZ5sC02Uha/Fi+05zpyxNoRg++YVooMHs+qU7/eECfnK14IFti8LF2ZBx8PXK19pj5lWvp5+2ubN+evgdu60RUDS7YcP288FF1jA9NDu1aw0fO3fnw2pXLgwa7O3f8xVvmKMm2KMc9JgFWNcUzntjzFujTHeEWO8qXWtBAAAwEB84hN2fKmiv/s7O921Kz8HKF0J78tfzt/ne9+z+U9r1mRVo5G2fr116r///fz2J58sD18eNCSrKHmg8CCyYEHW6e/ttbDp1ZZx4ywI9Pfb9bt3W/AZP94CgQej9LHSIYynTklHj9pjLVmSDXWUrDo1b171AhP9/VnYmDcvew4flidZmIrRHvvAAat4jR+fr3yl4SsECzvF8CXlQ6G34cILLej5a3XypD3/K15hlz1k+byy4va0qrh8efm8tTR8eVhNw9fhwzYUs69vjIUvAAAAjA5Hjljw+sQnpC1b8tc9+qh16qX8ynW+LPnll1cHHl9eXcovJjFSYpT++q/tfLoU+vPPW3Wlu9t+nnoqW968tzfr1C9dmgWMNJx4ENm3zzr9p09nQ93SKowHJqm88uVBzsOEz4GaO9dC29mzdtvnn7dhjl1d2Wt+6JDt3+HD0qxZ2XP40MbDh60tUnb94cMWvjyc1Apf3ray8JUOn/SQ2dVlr1UxTL30pTbXzS/39lowu+wyq9LVGtLpAdKXzZ89O3sd9+/P2uqvxaxZtu3
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 864x1296 with 8 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# LACE animation\n",
+    "\n",
+    "fig, axs = plt.subplots(4, 2)\n",
+    "frame_duration=40\n",
+    "fig.set_size_inches(12, 18)\n",
+    "axs[0, 0].set_xlabel('Tap')\n",
+    "axs[0, 0].set_ylabel('Amplitude')\n",
+    "axs[0, 1].set_xlabel('Frequency (kHz)')\n",
+    "axs[0, 1].set_ylabel('Amplitude (dB)')\n",
+    "axs[1, 0].set_xlabel('Tap')\n",
+    "axs[1, 0].set_ylabel('Amplitude')\n",
+    "axs[1, 1].set_xlabel('Frequency (kHz)')\n",
+    "axs[1, 1].set_ylabel('Amplitude (dB)')\n",
+    "axs[2, 0].set_xlabel('Tap')\n",
+    "axs[2, 0].set_ylabel('Amplitude')\n",
+    "axs[2, 1].set_xlabel('Frequency (kHz)')\n",
+    "axs[2, 1].set_ylabel('Amplitude (dB)')\n",
+    "axs[3, 0].set_xlabel('Dim')\n",
+    "axs[3, 1].set_visible(False)\n",
+    "fig.tight_layout()\n",
+    "\n",
+    "frames = []\n",
+    "for i in range(12800, 14000, 4):\n",
+    "    f = impz_comb1[i]\n",
+    "    w, h = scipy.signal.freqz(f, fs=16000)\n",
+    "    frame = axs[0, 0].plot(f, 'b') + axs[0, 1].plot(w/1000, 10 * np.log10(np.abs(h)), 'b')\n",
+    "    \n",
+    "    f = impz_comb2[i]\n",
+    "    w, h = scipy.signal.freqz(f, fs=16000)\n",
+    "    frame += axs[1, 0].plot(f, 'b') + axs[1, 1].plot(w/1000, 10 * np.log10(np.abs(h)), 'b')\n",
+    "    \n",
+    "    f = impz_conv1[i]\n",
+    "    w, h = scipy.signal.freqz(f, fs=16000)\n",
+    "    frame += axs[2, 0].plot(f, 'b') + axs[2, 1].plot(w/1000, 10 * np.log10(np.abs(h)), 'b')\n",
+    "    \n",
+    "    frame += axs[3, 0].plot(phi[i//80], 'b')\n",
+    "    \n",
+    "    frames.append(frame)\n",
+    "    \n",
+    "ani = matplotlib.animation.ArtistAnimation(fig, frames, blit=True, interval=frame_duration)\n",
+    "ani.save('lace_demo/responses2.mp4', dpi=720)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzkAAATiCAYAAACZTC17AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOzdeZgcZbU/8O/JRgJZOhOCbEIyYV+TSQBBUZYEuSp7JgGViwrMIG73KswE1HtdroRMQH5eFZgJbihCSIJ6UQEzAZGdJJMgi2wZArJJSDKB7CQ5vz9OFd3T00t1d1XX0t/P88zTM93V1W8qPdN16pz3vKKqICIiIiIiSop+YQ+AiIiIiIjITwxyiIiIiIgoURjkEBERERFRojDIISIiIiKiRGGQQ0REREREicIgh4iIiIiIEmVA2AMgs+uuu+qYMWPCHgYRkSdLly59W1VHhz0O8h8/j4goLgp9FjHIiYgxY8ZgyZIlYQ+DiMgTEXk57DFQMPh5RERxUeiziOVqRERERESUKAxyiIiIiIgoURjkEBERERFRojDIISIiIiKiRGGQQ0REREREicIgh4iIiIiIEoVBDhERERERJQqDnATp7g57BEREVGu6u4G//Q146ilg48awR0NEZBjkJMSyZcC4ccATT4Q9EiIiqiU33wx87GPA4YcDI0YARx8NXHMN8K9/hT0yIqplDHIS4u237Xb16nDHQUREteXCC4HOTuDWW4GWFkAEuPxyYMwYu+3pCXuERFSLGOQkxPbtvW+JiIiq4YMfBE4+GTj3XOAHPwAeewz4xz+AadOAH/4QOOww4J57wh4lEdUaBjkJwSCHiIii4qCDgF/9ygKeESOAU08Frr4aUA17ZERUKwaEPQDyhxvcbNsW7jiIiCh8ItIEIAWgB8A4AItVdX61xzFpErBkiZW0XXEF8NJLwA03AP14iZWIAsYgJyGYySEiIgAQkVkAVqtqW+Z9IlKfeV+1DBkC3HKLzdGZOdOyOTfeyECHiILFICchGOQQEZGI1ANoUtWRmferaquIrBWRDlXtqf64bL4OYIHOBz4AfP/71R4FEdUSXkdJCAY5REQEoBlAZ57HugE0VXEsvbiBzhe+APzP/wC/+U1YIyGiWsBMjsOv+mURaQEwCkCDc1d7NeqgGeQQERGAySgc5EwBUPWSNZeIzclZsQJoagIaGoBDDglrNESUZMzk4P365ZSqtqlqh6q2AjjKCVhK3U+Hqraq6hQArQDmiMi8AIbdC4McIiKCXWDLt2JaN4BJVRxLToMGAbfdBgwdam2nN28Oe0RElEQ1H+Rk1C/3urLlBDpXiEjK436aYFmbnox9dAFoBDBVRKb6Nugc2F2NiIg8SIU9AADYfXdrMf3kk8C3vx32aIgoiWo+yIF/9cvjVLU7+05V7YSVwDWXNTqPmMkhIqptGRflekIchmf/9m/ARRcB110HLFsW9miIKGkY5Fj9cp/gxOHWL3vRUqAsbQkCLhFgkENEROUSkSYRWSIiS1atWlW1121rA3bd1ebn8POLiPzEIMe/+uVOAGvyPFZX4DFf7Nhht/yQICKqTRnl0qkyntuhqpNUddLo0aN9HVchI0cCP/yhLRj6619X7WWJqAYwyCku5WUjVZ2iqvlK0hqQvyTOF35lcu66C/jDHyofDxERRU4KESxlO+884JhjgG99C9i4MezREFFS1HSQU436ZachAQDMyvWYX+UBfgU5bW3ArD4jJSKimOiELYOQSx2sfDpSRIBrrgFee83m5xAR+aGmg5ygOUHULACNeZoS+FYe4Fd3tc2b06VvREQUO10A6vM8Vg8g8CUNyvGRjwCnnw5cey3wzjthj4aIkqCmg5xK6pc9mgegNU6LgW7axCCHiCjG2pFjLqlz0a0BwO3VHpBX3/42sHatLRZKRFSpmg5yPEihzFI2Z2HQeara4eeA8vEryNm8mc0LiIjiyqka6HA+gzLNgl1066n+qLyZNAk49VTL5mzYEPZoiCjuGOQEUL8sIi0AVlQrwAH8DXKYySEiii9nMesVItLizP1sAbAwe9HrKPrmN4FVq4Cbbw57JEQUdwPCHkAEdMFS+LnUw1L/nonIVADd2SVqIjLZWRg0EAxyiIjIVc2LbH768Icto/O//wtccok1JSAiKgczOWXUL4tIzkmdIjIZAPLMwckXSPnCzzk5LFcjIqIwiABf/Srw7LNAZ6ALLxBR0tV8kFNq/bKILIWVATRk3d8AoNn5fmrGl1sqkK8kzhfM5BARURJMmwbstptlc4iIysVyNVj9ckYw0gNrOLAwT0amE7kbEixy7p+a52VafRhqXn60kN6xA9i6lUEOERGFZ6edgIsuAq6+2tbO2WuvsEdERHHEIMfhtX7ZmdDZJ2BR1ZG+D6oEfmRytmypfB9ERESV+tzngKuuAn79a2DGjLBHQ0RxVPPlaknhR5CzaZPdMpNDRERh2n9/WyD0F78AVMMeDRHFEYOchPAjyNm82W4Z5BARUdg+/3ng+eeBRx8NeyREFEcMchLCzyCH5WpEVAtEZLiInC0iN4jIYhFZLSLbM75eEJF7RORCERkT9nhrTWMjMGQI8JvfhD0SIoojBjkJwUwOEZE3IjJeROYCWAtgHqwz5kQAIwGsA/ASAIF1xZwCYA6sq+bzInJhOKOuPcOGAZ/4BLBgAS++EVHpGOQkhB/d1RjkEFGSicgYEVkMYCmARlhXzBkAJqpqP+erTlX3c3+GBTqNAG4CsCuAOU7Gh8FOFTQ2Av/6F/Dgg2GPhIjihkFOQvjZeIBXzIgoaURkJoAVsAzNJQBGquopqjpbVZfle56qvqSqC1S1WVXrAJwC4F5YsPM4y9iC9clPAoMHA/PmhT0SIoobBjkJwXI1IqKCGgFMU9VJqjpHVdeVsxNV7VTVRgD7AVgJYLKPY6QsQ4eyZI2IysMgJyEY5BAR5eeUoC3wcX/dqjpNVW/ya5+U2znnAG++CSxZEvZIiChOGOQkBLurERFREn3840C/fsCf/xz2SIgoThjkJAQXAyUiqpzTnOAGp3X0PSJyFefdhGvUKOBDH2KQQ0SlYZCTEOyuRkRUnIjMdNa/cdfCuc0NYkRkAqw5QROsdfQUAK2w9tHfCG3QhE9+0srV/vWvsEdCRHHBICchWK5GRFSY0z66BdYWWpyvaQBeFJHxsDVzBMAyWHDT6nwvANpE5MgQhk2w5gMAcPfd4Y6DiOKDQU5CsPEAEVF+InI5bMFPAdDmfD8FwGzYZ+EiAGMBzHM6sM12vibBFgMVALNCGTzhyCOBPfdkyRoReTcg7AGQP9zAhHNyiIhymg5AAbSq6jUZ9y8SkdUArnYevzjHc1uc+8cFPkrKSQQ4+WTgnnsAVfuZiKgQZnISguVqREQF1Tu3HTkea3e/UdV3sh/MWFOnPvsxqp4TTgDeegv4xz/CHgkRxQGDnITwM8hRtS8iogRJAUWDmEJ6fB4PleiEE+z2r38NcxREFBcMchLCzyAHYJBDRETRMnYs8MEPMsghIm8Y5CSEHy2k3Tk5mfsjIiKKAhHgxBMtyOGFOCIqhkFOQvidyWHzASIiipoTTgBWrQKefjrskRBR1LG7WkIwyCEiKk5EhsHaQZf6eArWfY1CdPzxdvvww8Bhh4U7FiKKNmZyEsLvIIflakSUUD0A1ub40iKPUwSMGweMGgU8/njYIyGiqGMmJyH8CHIy5+Qwk0NECcTVVWJOBDj6aOCxx8IeCRFFHYOchGC5GhFRQSPDHgD54+ijgbvvBt59Fxg2LOzRxIsqsHUrMGgQF1Sl5GOQkxB+dFdjuRoRJZXHtXAoBo45xk7WlyyxbmtU2DvvAL/8JTB/PtDVBWzYAAwcCBx0EHDSScBFF3F+U1ypAk88YR0HX34Z6NcPqK8HJk8GDjww7NGFj0FOQjCTQ0REteDoo+32sceqE+S89x7w858Dt90GPP88sPPO1gDhy18GGhqCf/1yqQK//S3wn/9pHenGjwcuvBAYPdqyYMuWAe3twI9+BJx+ut2OGRP2qMmru+8GrrgCWL7cfh461M4B3akHxx8PzJwJfPjDoQ0xdAxyEoJ
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 864x1296 with 8 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# static plot for Jean-Marc's on-site presentation\n",
+    "\n",
+    "fig, axs = plt.subplots(4, 2)\n",
+    "frame_duration=40\n",
+    "fig.set_size_inches(12, 18)\n",
+    "axs[0, 0].set_xlabel('Tap')\n",
+    "axs[0, 0].set_ylabel('Amplitude')\n",
+    "axs[0, 1].set_xlabel('Frequency (kHz)')\n",
+    "axs[0, 1].set_ylabel('Amplitude (dB)')\n",
+    "axs[1, 0].set_xlabel('Tap')\n",
+    "axs[1, 0].set_ylabel('Amplitude')\n",
+    "axs[1, 1].set_xlabel('Frequency (kHz)')\n",
+    "axs[1, 1].set_ylabel('Amplitude (dB)')\n",
+    "axs[2, 0].set_xlabel('Tap')\n",
+    "axs[2, 0].set_ylabel('Amplitude')\n",
+    "axs[2, 1].set_xlabel('Frequency (kHz)')\n",
+    "axs[2, 1].set_ylabel('Amplitude (dB)')\n",
+    "axs[3, 0].set_xlabel('Tap')\n",
+    "axs[3, 0].set_ylabel('Amplitude')\n",
+    "axs[3, 1].set_xlabel('Frequency (kHz)')\n",
+    "axs[3, 1].set_ylabel('Amplitude (dB)')\n",
+    "fig.tight_layout()\n",
+    "i=10*80\n",
+    "\n",
+    "f = impz_comb1[i]\n",
+    "w, h = scipy.signal.freqz(f, fs=16000)\n",
+    "axs[0, 0].plot(f, 'b')\n",
+    "axs[0, 1].plot(w/1000, 10 * np.log10(np.abs(h)), 'b')\n",
+    "\n",
+    "f = impz_comb2[i]\n",
+    "w, h = scipy.signal.freqz(f, fs=16000)\n",
+    "axs[1, 0].plot(f, 'b')\n",
+    "axs[1, 1].plot(w/1000, 10 * np.log10(np.abs(h)), 'b')\n",
+    "\n",
+    "f = impz_conv1[i]\n",
+    "w, h = scipy.signal.freqz(f, fs=16000)\n",
+    "axs[2, 0].plot(f, 'b')\n",
+    "axs[2, 1].plot(w/1000, 10 * np.log10(np.abs(h)), 'b')\n",
+    "\n",
+    "f = np.convolve(np.convolve(impz_comb1[i], impz_comb2[i], mode='full'), impz_conv1[i])\n",
+    "w, h = scipy.signal.freqz(f, fs=16000)\n",
+    "axs[3, 0].plot(f, 'b')\n",
+    "axs[3, 1].plot(w/1000, 10 * np.log10(np.abs(h)), 'b')\n",
+    "\n",
+    "fig.savefig('plots/lace_snapshot_unvoiced.png')"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "torch",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.9.12"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
--- /dev/null
+++ b/dnn/torch/osce/stndrd/presentation/linear_prediction.ipynb
@@ -1,0 +1,320 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "import matplotlib.animation\n",
+    "from scipy.io import wavfile\n",
+    "import scipy.signal\n",
+    "import torch\n",
+    "\n",
+    "from playback import make_playback_animation"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def load_lpcnet_features(feature_file, version=2):\n",
+    "    if version == 2 or version == 3:\n",
+    "        layout = {\n",
+    "            'cepstrum': [0,18],\n",
+    "            'periods': [18, 19],\n",
+    "            'pitch_corr': [19, 20],\n",
+    "            'lpc': [20, 36]\n",
+    "            }\n",
+    "        frame_length = 36\n",
+    "\n",
+    "    elif version == 1:\n",
+    "        layout = {\n",
+    "            'cepstrum': [0,18],\n",
+    "            'periods': [36, 37],\n",
+    "            'pitch_corr': [37, 38],\n",
+    "            'lpc': [39, 55],\n",
+    "            }\n",
+    "        frame_length = 55\n",
+    "    else:\n",
+    "        raise ValueError(f'unknown feature version: {version}')\n",
+    "\n",
+    "\n",
+    "    raw_features = torch.from_numpy(np.fromfile(feature_file, dtype='float32'))\n",
+    "    raw_features = raw_features.reshape((-1, frame_length))\n",
+    "\n",
+    "    features = torch.cat(\n",
+    "        [\n",
+    "            raw_features[:, layout['cepstrum'][0]   : layout['cepstrum'][1]],\n",
+    "            raw_features[:, layout['pitch_corr'][0] : layout['pitch_corr'][1]]\n",
+    "        ],\n",
+    "        dim=1\n",
+    "    )\n",
+    "\n",
+    "    lpcs = raw_features[:, layout['lpc'][0]   : layout['lpc'][1]]\n",
+    "    if version < 3:\n",
+    "        periods = (0.1 + 50 * raw_features[:, layout['periods'][0] : layout['periods'][1]] + 100).long()\n",
+    "    else:\n",
+    "        periods = torch.round(torch.clip(256./2**(raw_features[:, layout['periods'][0] : layout['periods'][1]] + 1.5), 32, 256)).long()\n",
+    "\n",
+    "    return {'features' : features, 'periods' : periods, 'lpcs' : lpcs}\n",
+    "\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def run_lpc(signal, lpcs, frame_length=160):\n",
+    "    num_frames, lpc_order = lpcs.shape\n",
+    "\n",
+    "    prediction = np.concatenate(\n",
+    "        [- np.convolve(signal[i * frame_length : (i + 1) * frame_length + lpc_order - 1], lpcs[i], mode='valid') for i in range(num_frames)]\n",
+    "    )\n",
+    "    error = signal[lpc_order :] - prediction\n",
+    "\n",
+    "    return prediction, error"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lpcnet_features = load_lpcnet_features('lp/features.f32')\n",
+    "\n",
+    "features = lpcnet_features['features'].numpy()\n",
+    "periods = lpcnet_features['periods'].squeeze(-1).numpy()\n",
+    "lpcs = lpcnet_features['lpcs'].numpy()\n",
+    "\n",
+    "x = np.fromfile('data/a3_short.pcm', dtype=np.int16).astype(np.float32) / 2**15\n",
+    "x = np.concatenate((np.zeros(80), x, np.zeros(320)))\n",
+    "x_preemph = x.copy()\n",
+    "x_preemph[1:] -= 0.85 * x_preemph[:-1]\n",
+    "\n",
+    "num_frames = features.shape[0]\n",
+    "x = x[:160 * num_frames]\n",
+    "x_preemph = x_preemph[:160 * num_frames]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# short-term prediction\n",
+    "pred, error = run_lpc(np.concatenate((np.zeros(16), x_preemph)), lpcs)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# long-term prediction\n",
+    "offset = 256\n",
+    "padded_error = np.concatenate((np.zeros(offset), error))\n",
+    "ltp_error = padded_error.copy()\n",
+    "for i, p in enumerate(list(periods)):\n",
+    "    t0 = i * 160 + offset\n",
+    "    t1 = t0 + 160\n",
+    "    \n",
+    "    past = padded_error[t0 - p : t1 - p]\n",
+    "    current = padded_error[t0 : t1]\n",
+    "    \n",
+    "    gain = np.dot(past, current) / (np.dot(past, past) + 1e-6)\n",
+    "    ltp_error[t0 : t1] -= gain * past\n",
+    "    \n",
+    "    \n",
+    "ltp_error = ltp_error[offset:]\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD8CAYAAABn919SAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9Taxl25YeCH1j/qy19t7nRMS9972XmZW2VUXJSCDogFQWqo4lKMnQcQ/K1QKhyg5FA9EBCYFFqzo0ypIplJQKAw0sWsgNC2iWVCpQ0gGBG8i4oJz+yffy5o0bEWfvvdb8GTTGGHPOtc6JuPGKZyJtxXy6LyL22Wf9zjnmGN/4xjeImfF1fB1fx9fxdfzTP9yXvoCv4+v4Or6Or+M3M74a9K/j6/g6vo5/RsZXg/51fB1fx9fxz8j4atC/jq/j6/g6/hkZXw361/F1fB1fxz8j46tB/zq+jq/j6/hnZPykQSeif5eIfklE//eP/JyI6K8R0d8lov8bEf3nfvOX+XV8HV/H1/F1/NT4HA/9bwD4S5/4+X8ZwJ/X/34PwL/9//tlfR1fx9fxdXwdv+74SYPOzP8egD/5xFf+MoD/Fcv4PwF4Q0S/85u6wK/j6/g6vo6v4/NG+A0c43cB/P3h33+on/2j4xeJ6PcgXjwA95/3bpHPARA8HDw8ezgQiKj/3uE4DKByRaGKioKKot9z8Ajw7OGJdrsVH/4cP7dqWR7ORUR6Xf/xBg/Hlv+N93I4tp7LDeft98kozHqvFQDgQHDs4MjJ73zGPcpf+Plnz77Lu+dqV+7g4BDgWf5Gu+smOAKI5FqYgcws/1FGRda7dvD6jj0RxjtlQO6OeXdtdh4cP3vh75/67Pl9ju/HrsDezvNRwWCq8qe+BwLJc2H3bM5+9LztfHIVpL/nATii/gz1Ivt19itvn/WDtnv6nGH3y2Rzs+pP+uzcPwk7Z8X4dkhnArHbz2tde/b3l+5/HOP37N6Z5aoq61np43cnP3r+7hho9yhrx+ayh2MH/8L6GefF8ZkBz6/9eB/2DNywjp9/Zzzmfh5+zvjAf/zHzPzzl372mzDonz2Y+fcB/D4AvPG/4L94+a/1CyHC7IHJEaIDPAGVgczAWoCtMlL96ZsmQIz58OQKy+8mZiSuqPqyChgFFRkFZVikEwImeJycR3Ru9xLsuMfXdbyy8ef2O1UN3a0U3LkgIaOCEeCxUMDZeZy9w+RlQWS2a5c/i163TRZHalT+Y+w6qTKeSsFbvuGD+4AbXQEAMy94rI94wIyLj5iHg9s9jefz1J+318+ZgcSs76226zazUQ+Tt+q/KvPuOUbyWJzD5BxmL+dnMEqV46fan4kZEU/07P3LRrm/7spAYfuzG8RUK1auSFzb+ymoeHJPeE8/4Fp/wFo+AABO4Q1+i/95/C6+w8/niMco58hV3l17b3rjiRlrZWy1IrHMOLlmBw9CIGr3Ic/54xvMS+/8pfczPoNxZJY5W/n5RmBzaxz1IwvvpTlo3x3fpZ0n6zyuw1z2w7uzY3oiTE5sQtB3V9UW3AvjWirWKi6HHJ9R2pbTz5tR2mcVFQ4OMyLOFHH2Hicv52fIfH0qBdeasSKhoMLDISLgRAGLc4iO2jw0m1KYkbm2+3GDUf/YOxg3uvFzW9uzIyyeMHkgHl7e//SP/tr/5+W38Zsx6P8AwJ8d/v1n9LNPDgaw1tr+Xcl8VPGUit5vqkCurAa9NqNQdOcu3D/zIMzOY3YOExG8TgTPMjkAgNnvrkO8gD75KpuxqLjWjFxlMhAIAYRIz73/8Vg27CUnVN0uxHBHOJxdwM/ChMnNbaIyy+9nBlLuR7LF4ogwgwajaIaIkdm++/ICs2sdjVp0hMUHfIdHMD/281FfUGYI2zt7YVHbprPV/aJ3JBNx9q4ZGjPG9jv2dTOiGwpWJGQqcEyIHJBKxJkDCjs49dLEEKthsPMxyxw6eKv2nIpGDPa5Gf6of7b7dw7EhMgOhT0qGA6EX+CCiX4bMRL8tH/OXv8rPPynz8OeSR2uZWW5T9vIxBsV7x+MZngCe0SJaSQKQt/konNYvMxxZjFy73PBj3XDE92RsInx4hkLJpwoIA4bht3/4vucKBVtw7FNzTacSE7Wletz3zsgEiG4YTOHrFlZr4RU9+/DQQ3daNAOhs+e07VUlK1v2u3nhy3IQe7l1DZz0uPI/Ei1agQovxfJYdHnNzvSS5FZGshjqQ6JI46jQp6P3U0gwjLMH5sL0REmh+aYMoBrBt4nxvuc8cQJWVGFAN9siyfXrs+RwwR5v+HXcNh+Ewb9bwH4N4jobwL4CwB+ZOZncMtxEGQyjGOrFff68vcBnYRA82wyAyAHGl74vWZc9Ri2GB2AicTbjuq9NUMJmZh9EgCuAsyExC341eMBmSsq9ruwJ/GuPA1eJAO3Iou16O/bS7zWjHSAhSy8xHBGT4TZOSxEmHTxAmIwUmWg0m6ys/6M9PyALlayqKcfA+ibyLihEXqE5AkIrnuzqfYNtm2iOoHP+jsvhZQ2ChMSyUWaUbb7PJPHAg9gHq5Pw24A11J2xxy9euh7iHDiIREGIy3OwMoVKxcUfQczBywuYHb92bRnWKk9F3uGDkBwhEAyXwC0SOGa95GIJ0J0ulk48cbEG5b3Lf9Rm58RETN5zOSeRYQvDXMCrpnb3+3cEzyIT2As+pwk0lg5tYfnQAi6TURqAFpzkDK4OSEAEDlgvCp7tH2j3r9rR7K2aTD0Nmcr5FnZsT0I0TnMTpwu21zWIt78WgvuGivZ6BCK3Y943RN7zM4jtusgRALOPuycE5sf0e2jkeiAyoSH4V55eM+p9rVCkLlAQ4Tar0eiNFuDFrEWZrkmdvBwbYOe9P5He0gkz+vt9tIM+Pj4SYNORP8bAH8RwM+I6A8B/I8AeWbM/D8D8LcB/FcA/F0AVwD/jc89+bgz++Ehm6EbvRoLjz81MjOupeADr+KlkDyNmRdceMGlRkTv2+Ktw4Me9xEHYHIOy+Bd2ngJWolOjJoZQYBQGHioDlsNKDy1kLMO3mJRY1/BL4aMXr3EzA4z024DIILCEG63qFOV52DRjOHyoRI88bOw/OiFA4xS8OKwveO451bzSIcIgIAWIbXP9IIKAxEEp8ez927ent3PVsVo3UrByhJeO9hmTM0zs1EUUkPdG3oHggfhTAGOYj/n4F0Z/GCGpwzzwr5L1N8zAJCD3LRnBHK76AkQhyMXfaY6N3/kO966H/FEPyLxHZEWvK7f4Tt+gwcf8BDcRyE0MybdA67DObnBDwGEqu/Zf4T3EBXmic7tvlHhdSPde6gO4ulmfg4VjOvYDKRtZvZ8bW3MTKje7WCZwuL53sv+fliPvai3zGAkVGzIyFTa+nbskFGwccBWQzOWBGqRRXQyBwB9x2DcX5jnz9dDh87MVuy/Sz3qODyD0XbY71dmeHVC986gbv4/uZ1/evykQWfmv/ITP2cA/61f98SOCKfBXTxi02bMtyqh17VkbCjIKHAgzAiYyLeXZYmcqBPAs0di8fYMA2svdtiRLbX1/Pr2/64snulasXuxngiFCczA5A3WkesXrLDjh/Z9u94jtr5WxW4Vj/M4vHD9fqrAViru1bzOjv9HOARyLXrILHjOXTeN0RMPJBHL6BXK+dAW4/HJHCf7CBWx3u/4Ll96jpXxbBFUXWSp7L8bCLgEjxP7ZnQTGywgeYj2zjRuCseFZMdDfxERDifn5bcc7Td0knSlvIM+UpV7vB+inBGPHo8xDkcQzBYX/IzPYP5tPQAQvUPQTe34rOQr/cAG5ciGSbv3ZrHN0bjY+cfNfHSgHA2JWOwdJ1ZjeyuMDyXjWjdkVI0sAs4UcPK+QTEG2Y0RMGDzo0M6eTSMY94AYiRjm/P9OLamBLs+t3Vibldw/XfsPsVJ6PdZGzxV8WPZ8IFuWOku7xcbEq3Y+IaiYGnEgkd8i2/qN/jGnXDxfndfI4w5vrej8+OI4FkgnjHys+uzXIFOiQbbvTS3PjX+/5oUHYcn4DKAQ8fQxhbKUyn4od7wg/sBH/AWGSs8ImZ6wGN9hXM5YS6yg4/rIYAQEJv
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "spec_x, freqs, t, im = plt.specgram(x, NFFT=512, cmap='inferno')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD8CAYAAABn919SAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9S4x125bnB/3maz32IyK+xzn33Hszb6ZdlOUyBQIjYSE6SIBk6Lhru2WEVELCNBAdGggQLTo0QEJGZWQV0MCi6UZJ0EGiA5JpYAyWgXRVVmbeuuee8z0iYj/Waz5ojDnnWju+7557UqQ4VaVvSqGI2LFj7fWYc8wx/uM//kOllPgyvowv48v4Mv7xH/qnPoEv48v4Mr6ML+OvZnwx6F/Gl/FlfBn/hIwvBv3L+DK+jC/jn5DxxaB/GV/Gl/Fl/BMyvhj0L+PL+DK+jH9CxheD/mV8GV/Gl/FPyPi9Bl0p9W8ppb5TSv0/fsfflVLqf6aU+hOl1P9dKfXP/9Wf5pfxZXwZX8aX8fvGj/HQ/w7wL/7A3/8rwF/PX38L+Df+fz+tL+PL+DK+jC/jLzt+r0FPKf2fgA8/8JZ/CfhfJxn/F+BBKfXzv6oT/DK+jC/jy/gyftywfwXH+CXw55vf/yK/9puXb1RK/S3Ei0dh/jONPta/mWQwSmOVwihQn/mgUtOaEkTAp0RIkagiiYRJGqMMVimsAr05SEry/+VLAUZBoxNOR4yOaCIJhY+aJWp8VPgkB9H5va3xOBMwJqBUIiVFjJoUFTEpUj7zlH+OSX5ebwIoUj43eXdCEaIiJPmK61vRCrRKGBJKpfq6Ugmt8qepBPl/fdT4pAn5c8tnmfx+nY+RUKSEfF5S9d7KfUk3709ASHI/lnye5R5anWh0otEBm+9JjAofDXM0LFHho9qcdz6XfA3lU1N+wD+mblnOC4yK9T7GpFmiYomaJULIFdAqzwWjElatn1muPwExlbPL82MzV7bzLaRETImY/6JRWKVxGqxKt/ONT39O+b7FzfF1fsZOJYxOGBXznJD3xzKH8vnFBD4pfL7GMlcMYLWsHVPmxe84F8U6txSpLrYQFXNUjCEyqomQlt/5DBQKq1oaHK1WN9dfnvO6GrbnoG7OR+VXlAK9+Uu57pjXQ8rPKNZnAXFT5a6V2lzT7fWW57m+l3qfPr1X6ub95TnFpOpz+31Dbb+rT1/fjjK3tnNObf5Xfeb/vvffvUspffW5z/6rMOg/eqSU/jbwtwF+1vws/atv/2VAHqTV0OpEb2Od3CkppqgYvOYaZLKFdf7Vh1cmUjmOVWJoivFyKmF1oi3GOz/Ig1t40195tTuz311xbmGeGx7PR37zfM9vrns+zBYfFZ2JvGkXvu6vvNld2LdjNeghajEowcgiTIolWIbFcVkc12AJSaGB3nr2buHQTLR2wejI7C2XueXj2PNhbhi8ISS5hs5E9tbTmYDTEU3C6IjVkcYEjI4YFUlJsUTD5C1jsCxRE9K6uawLIxsGYA6GOWqmbKTLvepMpC+fqcRkzNFw9ZYhmPp+pxJ3zvNVN/C6v3LXX9E64oPhPPZ8HHse55bB27pZLVGeqd98JmQjlBdZebaNjrQ60ZlAo6NsYHkzMCpidEJvNrrPjZA0czBMUb6Xz9R5DpQvgCUpnmfH+9ny/ah4XlI24rLQfEqkjRE9WsNXneKXvefrbubgxAD6qBmDyV+6btJzVFy85uoVcxSj1Bl420Z+uRv5+e7CQ3/F6EiImsVbBu+Yg2GJhlifpxi87bNMSWVDr+sGHZNiSYoxaMYgG3JEHJOjDTw0Cw/NRO8WFKneqzHYm3tVPqPMpe0zM6qsqZjfJ89Vzll/cr+tjnUNbo8XP3PsxgScDtj87EPUXL3jvDieF8cY5Jp13gyKkS72wGeHpTgVEdlAy7ramUhnAkbJMy3PbYqKOWpiko262I4mnzvAEjVDMJy85uI1U7jdrGUzWjeA1Tm7tVnlb9vhdGJnxBbubaDV8ebv/43/17/5D/gd46/CoP8a+MPN73+QX/vBERNcvfyslcKVi/eaZCIqyU45Rc0YFWOQRbC9YSnJQyvDaUVnYG/zw9XFyJcHGPIDjCgFO7vgjN942gaAxnhedQNaJR6ahpAUVkd2duHYTlgTgOxx5a+4mYxaJZzxGBXp3SILLJ+nyh43wOItC2J0YlI0JnDvFjoTSEnVxdLoUKMIVSevTHL9wmcQzyZ7+1Hn16i/bydYYwJ7628WpNOR1np6t9QN5+XYLrzVyCjOU1d/DknTWc8bnQhRZYOjmYJsDGMUjzplozMHxRAUU5DoQrwoI1GRthgtnpRM9sjBLRz1QpOjAv0Zox6TOAMXb3k/OR4XwxjkfDuT2NvI0cqitlo2xZ0NGJ141eRIZPPcSmRRFqNRS34+stB9vt/FiJm8OZb7n5JGAUuUuS9RhDgpRiV6t7BvR6z1LIvjkhRxabh6x+BtNao/hJPaF89rD5g21ohIZ2+4NZ7GBHEKyqYdDHMw+Gyspnw9RiVcMbAm1GPJ/FyPoVWqUeoSDT7oOh/LfSnzcOv7v7yeCIzB8P3UMnjNFPOcAIxO1UHrjPi05Vw0ayRann/YfMX8LJ1ON8+tGHSrxGhvo+SytkOZp/k5yHyWzVHZyN6WOUE+bqLVa7R1WiyPi+H9pDgtiTmmHOWInTIZmQBodJmjZQOKP5qO+Fdh0P8d4F9XSv3bwL8APKWUPoFbXg6tYPfi05cc0p/87enHJDeqM+vvIX+puIYtc0w8zolLCFzTwoxHo7hTHV+3Lb/YJd40gb2VXdlHjdWBzi10SRHj1iORiRey52MRA3hoR+4PJ7pePHS/WK5Dz3XsGb14NmUU461VQmmZTeJZFuOcqudhdciLw9VjFG/cKPleglWdIxirV2MWk0InvTH0kIqRS0r+J29EcmzqsYvnr/Ki3buZfTvSdyPOyq673biKQV+/63rvyvt8NCzBsHhbN6w5GFJqGJXJ4f1qMO5c5K2R6KxcT11E2cjHHA4vUTHlezQG84mBcfnebhfjFDVjUFx92SzkfK2O9Dn6ATE6TYbcls3GZTfefIVt8rHHYLi8MLhKJWy+t+W4i15hHR8TS0q4AmElJc6H9dVh6LK3bVRk76zM8xyJzVHX+Vk+a/V+V+NaPO5LMCz5c2SjCTkKE8MOMHnLaWl4XBzPi0QXgERsJnLI16bz5hcAoiHkeVfnVr4/1qyvhagIOXKcsvdenn1jxAttdEApWc9XLDqf8xgEYpI1tUKg5em4bNz3NlRD/TKa2zpSZW1tI7saseXotzgyKd/DYXE18i0GfY069YsNQOboHGyFU8agmQPZKVH5/BWtgd6Ig9HqFRLUiDP7flao+ceb6d/7TqXU/xb4LwBvlVJ/AfwPACcnn/4XwN8F/qvAnwBX4L/2oz89j+I1OC073jZsComKL0eou2z93/y+mGAMimetCFNiCIpAIKFZUmAMltOicEpCKpcN7F1jUIixa9qZ4A1q7AhRM0VZqD6JAZmD4bI09OdjXQRGRxrjae3Cw/6MMYGUNN4b5mCZFofP3gpAUBodU10ASkmoO3lbJ03xjLZGd+t5FLjB6TXaCFExBcvz3PK0OC7eZJhnjVDaPFnFy8ze5gsPPyTN6C3QMQcruQWV0CrSOI8xHmc9xkRiVIRgWLzGB0PMhntr+HXJTRRIJxvb7XAqcXSeOzfTO0+jPYlsjDKENG/C9zKmYKphtzpHYDm6cMaL8dKRwVtaHTFKV0NQFt0YNBqL01EMYBR4YoyykUSKpy0GY2eC4MVAzPhrgQ8U6yZX8hllvi5J8X4y/PoKfz5d+a3+jlFduB/fcPU/42gbfr538tx1gR0jbTNzl6G8Au/5aJi9xWcYJkTF4B3Pc8tvh56z18xRIL7GiNfZlc0yz6fOBFot96vM5a3BcyrVDbcYxpgUQ4aSEmJwBA6Vz4tJ1vDRBl41nodmYu9KlGcwMedO8jFTXtM+6gwZ6pvXGx151STu3OosLFE2+SXDr7IRyvOc8xqboq5zfAvzFDexGOGyaae8KbZaorXWBFrj6+ayxBU+2hr
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "spec_stp_error, freqs, t, im = plt.specgram(error, NFFT=512, cmap='inferno')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD8CAYAAABn919SAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9S6xtWZamCX3ztR577/O418zcw8MzIjMyyZIo0QEkSogOEiAldLJL0QIhZaeKBqKDEAJEiw4NkFChBJUSaFCqZjZSgiYdkBLRQFQhRFJRmRHpHm7mZvc89t5rrbnmg8aYc661z73mZi6CsozUndLROWeffdZejznHHOMf//iHyjnzeXwen8fn8Xn81R/6pz6Bz+Pz+Dw+j8/jL2d8Nuifx+fxeXwe/5KMzwb98/g8Po/P41+S8dmgfx6fx+fxefxLMj4b9M/j8/g8Po9/ScZng/55fB6fx+fxL8n4QYOulPq3lVJfK6X+H9/zd6WU+p8rpf6JUur/rpT6T/3ln+bn8Xl8Hp/H5/FD48d46P8A+Du/4+//ZeBvl6+/B/xb/7+f1ufxeXwen8fn8fuOHzToOef/E/Dd73jL3wX+t1nG/wV4VEr94i/rBD+Pz+Pz+Dw+jx837F/CMX4J/Nnu9z8vr/367RuVUn8P8eKxuP/0vXnX/qYVGAVGZYzKqDf/m29+VqQMMStihpTr8eUYVmWMAqUylPeFrPAJQs6knACFUQqrFE5z85lKgWJ3Dgo0cl7WRIxOaJ3kXLIiJU0sX/JfGV2uQ5f3KnVbkbv/PWdFToqYDDFr+T3Le5QCTWo/y+lkUNv3m+NkRSrf63u1zhgd0SahdKKcIjlrcpLzr+//1MhZsUbDHCxTVMwpElVCZ82gDaPJHGygsytKZ3JWhGhYo8UnTUzyzFR5Ltsz3t2D8lxvHvTuOajd89Eqo1RC6/zRfWzf83aM7XD7z1PtPtcj7z8/1/+QKUTMmpgUIUMq71PlWmy9HlWf/s2T3o5X/rf+nMtfjcoYndEqoVVqx2jnuDsvpWRuaZVQeruHKWuZh1mTktqOrZPMWRPRJqJULtf98bNXZJSWOYva7m1OmhiNfCVNyqqtETnn8t42r5RcZ67nL0dP+7/VZ0m9nrwdJ99ee30G9RipzPH00V2ua3c3p3K9l/Kzotqa3O7723m4P2JMCp80c5n3XnkyCY3FZUentdiPtibL11sbsjvs/vy2a307b8r7FB8d48+Wb36bc/7q7aHhL8eg/+iRc/77wN8H+OvDV/m/9zf+LkBZ6JleJzoT6UxEk0koQtQsyTBHQyiTqQ79CSP5doGBPLzeBDoTsTphdCJnRUyKNclxY9IkZKKEpFmTFmNUPq83kS+GmZ+dXnh//8x4vGK0LCptA8ZGjAvtM2MwhLljmQfCaknJFCOU0Cahy8YAEIJluo5crgdepgNTcKSsMCrhTMKZgNu9XytZpNYGrIlonUhJ49eOyzxwXgam4IhJ0ZnI43jli/tn3n3xHcPjGW0icbWs04C/DszTwLq6j+7l7bNTco/K4s5ZDLS1AWcDttwDpTIxGK7TyHk68DqP7VxyVuW+GtakbxakURkNbVFroDOR0a4cnad3azNmVke6zuNswJiEUqkZnBAN6+qISYLPuqT2m1y9vmoY62fGpFmC4+p7rqtjDrYZpuoUxN1xrE7cuZWTWzj1C50NAKzRsATHEiy+GkHKtUeZy75cf68TD53ny/HKu8OZ0+GKLvOzfsWd0dVazteYWDY0uYvr6piXnsn3+GCJWWN15G6YeHx45u79M/39GW0TqISyMgeVjVANdzSkxRHnjrh0pCifG+aO6/nI5XpgmgdCMrv1JutJlXsom7/FB4uPlhB1eS/t2dV7X9+filNU73U9rjURZ2L7n5wVvhx7Ca7Zg/p89mt4r2iS6qbajn1rD6odScUmxCwbY7Mz5HbO7ZjFTizRiv0ozyfU15MmJE0o/2LVrX2CzVC3jWy3FgYTGUxgsAGr9ysF/s4//nf/Kd8z/jIM+j8H/mj3+18rr/3OkVHMcZsYVt0akGrQ16RlAewWAcjDCW3Hl+FUZjCJsdwEpxOajDWJTgc6G+lMwOgkDyQaCOJ91YmgVebofNtYjNoma2cDnQksvmMNtj2I+rVfYLlMiJQ2A5Czaee+35jEy5e/HfqFofNt8r01OvV+mWLg60SvnxWr0Qi2GZ812GIUxCtDPurm3NvmZxLGBGy3Ym1E69iupy7wGLfryFmRoiYESwh28/ZU5tAv9HZtxjVlzRoNa7CsyYiRRzbVy+q4BMscZIEolcvz7BjM0J6F0ZnRrpzCzNgtDP2CUrcGryv3L2dNjGKkZ9+VeyLnYlSis2IsXPFcq0H30bBEi0+mPSejE8NuTtXXOhNxRubTWu5LTBpNbueciqc5l/vj62ZT5oFRmc4EjuPE4XjF2kAIltU7pnnA+w4fZalaHRn6BecCrvNoE8nlfoVosMGyKnPj0Sud0SaibUI52XRIipQsedlt5HV+AMqKo0DSJBex3YrzgdXEtn5j0lzXXgx3Ma66GGNZP4HRpRvDXZ2CsDOyMjc2I5yyJmNZF9MM7P7YRpfvKmFKZNCpt/4t7b7HtlmUjYXiFNhtff+uEZIhRM2aTIsw6vGtSmiT2zw5lg1O7E9EKYkMLmvHi+95WR1Pq2Gt82oXLdQtpDNiQ5yWWMaVDe3HjL8Mg/4PgX9TKfXvAP8a8Jxz/ghueTsUsgvtRyi72iV8+rS63U4Vs0JlRUxAuTlrVsyr5VtviUkMvQYGkzi5yINbOTmPe7PjgXiDsO3GivqQNGRZSADWRpwNuOIxtuupxlZnclKsq8EXr2kJ4jEanRidZxxmjscr/bCgdSQGyzwNnC8npqljXjtSVjgd6d1K1630nW9eWUqfhkesDQzOAzDY0I5xGib6bkWpRI6GCJA0SidMtzIArpf/0ypjbJSowwV0mVw5KVLxzPNukxLDYEhxi55SlN/X1bUwHSQSAcoGowjJkFBoMvfdwmM/fwSL1MUY034DFC/YRYNfu+ahN5inGO5cohxrIod+4X68NqjsUyNnRW9XRme5i3PzROu80CW6q/dJNk+LD4ZpHZuXJqG8LGprUnNOYtJcgCVprlGzJsVgwBcPvEY8pnj6KWmsiazFkGXUR1HpfphyfvUcXYVaSiSJur32HI08y3pv30QwymSyziif2/0Rb1i393Q20JW5dnMexVDWc4lJE5pB5Gbjr5uqKQYso/DBMq0d19WxpuKBZ0VEYXJGo3ee/makjd4+c//c6n3bRwc1uqivG5PQOraot17zGizz0re5VV+vMFzdcOo1xazEKSjvTcjvsTzjTmdMgVSdFsdFNv/bKNVHw4c0opcfL6D4gwZdKfW/B/7zwJdKqT8H/oeAk4vK/0vgHwH/FeCfAFfgv/GjPlndhvU1FLFadt36ALYQ6GPcbHxzyLoZPK+Ga1T4MlHHJJhfrxOdNjf/U3fcOiF1ubmdifR2pbMBrTI+WD5cj/yT777kyXdMQY5z7wK/OF74aw/f8cUX3zEcrwCkYFnmHmcDQ7DNAzcmYEwUo+8dShuCd1yuB54uRz5MB17XrhijzGACo1vpbcDqKAZXCwzTuYCztxtLxUxBPAhnZIK2yCGp5tHmYmiVzhjeLPasiKslxY8nU8Vuc1K3kEbSbdHLtZZNskAIIMY8ofBJ4AiQzfRoVw7dwuA81obmXYdo8NHewA71/C7LwLUYG60kv9HbleMwt8giBIsPjsl3LMXQ743PflQP0JqE1XKsm3lSFncd1UjXZ2LKfaj3w0cD1WvPmsvqePEdT95yDmLQD0ZzMCV6SLsw30ikoXViGOe2iaZkWFfL+XIgvNwJdr6DZip8ARBVlqgs74w2oFRCuYwaF8HKdYVcNHHpiNeetFpiMOSkCUtH8G7LE+2jxzInf9dGCZSoWO/uWzXocu4hmRb51PsVkypwQ2j3NGaBQwXOUG2duB1c68qzV5TcRNY3nvjeGFcvXpHpbCwbum8OW0qGECqE5gjFcanOxlryFvV69hBdmwtsRr7CM4kWKLd8m0R/2xDo8fd
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "spec_ltp_error, freqs, t, im = plt.specgram(ltp_error, NFFT=512, cmap='inferno')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "x_in = (x * (2 ** 15)).astype(np.int16)\n",
+    "x_stp = (error * (2 ** 15)).astype(np.int16)\n",
+    "x_ltp = (ltp_error * (2 ** 15)).astype(np.int16)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "wavfile.write('lp/x_in.wav', 16000, x_in)\n",
+    "wavfile.write('lp/x_stp.wav', 16000, x_stp)\n",
+    "wavfile.write('lp/x_ltp.wav', 16000, x_ltp)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD8CAYAAABn919SAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9S6xk2bYlCI0112d/zOyc45/wiHvve48sxCsJRAskSogOEiAldKoJBR0QUnYoGogODQSIFh0alIQKJaiUQIMSzWykBE06IBUtBEUnhYrMfO/dGxHufs6xY2Z7ry+NOefa+0TEjXtRvSIuT74kl7ufj9m2vdeanzHHHNO01vBlfVlf1pf1Zf3//6Jf+gK+rC/ry/qyvqy/nfXFoH9ZX9aX9WX9HVlfDPqX9WV9WV/W35H1xaB/WV/Wl/Vl/R1ZXwz6l/VlfVlf1t+R9cWgf1lf1pf1Zf0dWX/QoBtj/i1jzLfGmP/77/m+Mcb8G8aYf2qM+b8ZY/4Tf/uX+WV9WV/Wl/Vl/aH1x0To/wjA3/+Z7/8XAfyl/PkHAP7Nf/+X9WV9WV/Wl/Vl/X+7/qBBb639nwB8+pkf+VcB/G8ar/8LgAdjzK/+ti7wy/qyvqwv68v645b7W3iN3wD457v//wv52t/88AeNMf8AHMXDwf8n37g3MAYwAMg0GAD7vtVSDVIDrOE/Bg1kgNIMWuOfbWCvZAxQ5Zf1ZwGgwaDyP2AMf5+M/GADKgyK/ld+hq+H/zZogNk8XwVQm8GtNNTWMFgCyXvyu+mH3V6/7d5fP2Pll+2vbdD690vjV9LXKu315zS799Hrrw3IrcEZA0/y2miojT//9rtyMfIvfW27e90mr7l/Fvp+ZPqtA6HBme2bfB0Gue0+n3zbGL1H+9dq/bVaM6iN729p6Pf09TXL83z1nPiaazPyqeT/ct/1Opzhf7Qmn9Dw867bE+vPW99Dr0eXNlUH4vtgd9e//1z6jPU56vOprYGMkXvNP2hNexVV6Z4u1WzPxTR5722v6nPQa9jfx/2rGfnMei/2905/s77aA9vn6N9/dX8g95d/jwz/ae0H+0j3iVz3D89g38M/2BP9ffrz2c6jvkdpfB/067m9Prt63vQM5/57/J6eDO8vav2M8vnhfaefaf98q9xXtUG6ojwnL8+JsF2j3ltguza9Pv0sejb3r6nvsdm4/QkAvk3fft9a+wo/sf42DPofvVpr/xDAPwSAb8LX7b/2/r+MtRoM1PB+LLCGDUSqBtdCyNVgrXpzt81fwZs0yQYdSDc9f3BHDWRaNyCx8IaK1cjh5tdIlW98rK+NOCAGAPwgyACB+PVi5c30GPnvB99g6fWG9NQw2obSgKXwcWWD2uCpIRbCUoClGqwFeDc0HFztB/lTJMQKTJavw4kB8aahNIPvVwNHwJ3n97hkA2eA2bX+PrEaLMXAG74+L/dvrQa5bp+ryGlNjQ3NQyj8OYvBUg0+rnzPZsf34OAacjV4yQazazi5gtQMUuXfJ9NwyYSlGIyW75kaIS836Vr4AHniz/N55edh+73mP0fX+j3TA1WaGH/ws98bOH2GtQG3wtd97yssNZRqUBpfNxl+b/15bxqCbUjVIFaDczK4ZODggNFuBjNWvm4rv/8QKrxcX27A9ysfp5H4d3TPnRPvFTVC1gCOeB8OtmGkhqfEe0LfTe+Fk79JjKR+zkHew9J2fUmfOfGzKo2/5uTZF7lvVs5Ga7z/P0U+A4H4fQ+Oz6GeKT2D3gBrBZ6TQSC+N/pH7506h9o2h1kbsBT+/IPFj/ZqawZL4T14yXwdb0ODo4bZVnHy22tfZX95avCE7jCTGNdAbBBTNUgV+LiyQzx4vhee+P7dh4rSDG6Fn9F3S8NggUAGb0PDm6FuQUrlQOXgat+3vM9Izjnvt6/HDGOAWyYYA4y2ojX+bEshnBOfk0HOhjUNS+F7N9mGgRomt+0rY9hexGrwz68OSwH+0cd/4//9+2zs34ZB/ysAf777/5/J1352tQbkxsbcktx8iaqumfAYDQYLTLYhV2BpBmvhm+rkQKTKD/LgeKOM1GCpiRE1CMQPcq1sDEZig/b9anoENVngXahY5TDkBuS6RQF3HtuGBV8nAHwzVnEc/DqfVgsywJ1nC7kUg0smfLs0BDLyOmwIgq0YLHCU92jN4Faob9iBGgYCgmVH4HaH1qJhdqa/h6eGr8fC9xS8+dRweHFKpQLe8uE/2QZCw1OyiNXg4CqsAWbLDtXLeyUyODSDkehVhD9QBRmDsRGsachiaNXIWdOQ5DUOsjGzRIix8GupsTkn/t37wNHROfEzveYGawwu2WCwvNHVOATiTb9kg2sRQ1D5NUZqCHKPZ8fP4ZIJKRMume/JycvnqwbWNNl/fE8gxme0TYIENmTXzIbhq7FitvuYnZ99kkOt0de18KGd5d56iSL54AK3DNwK7937YADH7zNaINbNQNUmN16MijXAu6H2+0y7a7gWwjkRfndrOHqDdwM7tWtWB7KFHAdxzpwFNdx5fo3nxPfzJtf/JvDnWeT/o68YLb/eQByEGAnCFhi0RjgXwjXzfZ5cg6cqWafphq+1Lbpnh8nPe6kcnKQKPCUjwRbBms3ZeNJgBdA7rntjtE2cn2QVYvDvPX/uJBG6OjhPnAIYGMy24ZuJX0ONtjcNl2xxyaZfb2nUA0V9Lyv3wprNmRzGxEFnNYhizK+ZHdbJs13ToDOJI/y4suN4MxBm23DnC4KtPaD9Y9bfhkH/xwD+dWPMvw3gXwHw1Fr7Edzyw6UPM1YOqS6ZumcfbcPfO5Zu9EvjqPLkXqfAqaIb7Es2qBZA4aiSjTFvRD6q/J6x8k0dLEfXwUI8KR/uazG4wcDLzw+ySTR1J4mhSjOodTN074bC0WbkYxbESLwJRg4F5HMQUuPIXJfCGxo9jJY34smVHuFWGNwyIe8ivcE2eMNR/LUQHiN1788OBTg64OQrAm3G+sfPgh0dGYNr2aLWCokAq0GSz/uSLKxEgACQq0GwDQdbegSo7xXE8JyjRZSMBODNbA3w67lICiyOd+BI5DkZcarsXBfwc3RGP9fmRN6GH2dIpRmkzPf9kiUjke/dyganjNbA+opUDc6FdtEvR44a8ekfhYH0eVwyG/JS1dnxe1Q1Ks2ADB/21HT3GETig6dQYql8z5wBztmgFuAhtB6FVwC3bOQ9DRh6MSBsn13P018cNeOo8GQwWo4+z4kdRqDNgOjnO7jWs8omEbvuS87atlOX5Plw9gKgGWQA52xxTvx6jrbPVgUeNXJPOJIHatafMz3LGKnhmxF9j+t1DpavUR3dJQPn1FC9wew2GIrvN2eZ+0CiVMlM9vCa/NNTw8lX3DJhrXz+ZlvZYRrgwRecHLBWkmyYs+eT43tsxTasEqzcxM0q/KIB5qPc09nx88nVwBggAd2J3Aoh1Q1Gek4WNhMmV0EAPoycMeDjTx5jAH+EQTfG/O8A/GcBvDfG/AsA/0MAHgBaa/8LAP8EwH8JwD8FcAXw3/hDrwnwJs1Vo7otDRxt7ZHH2gzWQj1q3n53i6S8bUBGT/sb2MgT+OHog+FUlyEDP5vuTbM4A/X+g6Rj1nCK+5wITxm4D028rxyGBpimsAIfvtIaRmt6umkJuLcbbKBrKQRrDJ4Tb8yDMz06i3nDXQ/OwKL1zZTkJe78do90YwZquPcNqW0R1ZvARukxUvfye8POhpQjoiRQUqp8KE6OoQoDzhCs4PGwW3QTq8E165UQUuUIY7AcmWsUOdkKbwySQA9snPU+NDi51wdXMDfTD9Bzog6rXCSq5boAcPJ8zxzx51gKP3/F79XpHV2DsVuEmGQfKYylUA7AhrXC4NbQg4gGhrbeBIZlKrYgIxb+98nXvoc1EwHY2VUYnOU9NZtbCmdtui+D5X3PhtigVMMZo6Th3jScm54D02GpAoOXuL22PmNjGyYDoGdKjBsr/Fbkec+udbjJmi3SfQDfd2d4z/32xp9Ho/OD4zP1lKxkmHw/H0LFWaAjxaDXQkhtywhOvr6CS6/FAGUz+qNtGOSRFIGlrJxjhXUMGBZpANayZb5l9yw
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZoAAAEeCAYAAACzJ9OtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAAVyElEQVR4nO3dS28kWXre8ec9Jy6ZTF6K1aWamsaMpYYg22NJEKTRQjAsbwRo4y9gw/BSn8qAoW+glfbaCtDGlmzAtmSNZjTTM9PqupHMS0Sc83oRLBbbqInIKDlDqMD/12gwWRkkX5JJPDiXeI+5uwAAOJXwT10AAGDZCBoAwEkRNACAkyJoAAAnRdAAAE6KoAEAnFQx9KRZ8Qv3Prt39v+/HADA0jCiAQCcFEEDADgpggYAcFIEDQDgpAgaAMBJETQAgJMiaAAAJ0XQAABOiqABAJwUQQMAOCmCBgBwUgQNAOCkCBoAwEkRNACAkyJoAAAnRdAAAE6KoAEAnBRBAwA4qdGgMavmqAMAsFCjQVPGp3PUAQBYqNGgqYuLOeoAACzUpDUaU3GqOgAACzUaNBfxxRx1AAAWajRonvnn798xRjQAgGlGg+bSNw+Pz+vvnrQYAMDyjO86u78khAvV8fLkBQEAlmU0aNYhvn9sVyctBgCwPKNB81ndr8t43qn29ckLAgAsy2jQVPdXuFyf5c9OXQ8AYGFGg2bzsNEs69JWp60GALA4R9+wGcK57JSVAAAWaXzXWXBJUgwr1YFmzwCAacZ7nQVJiqripSqCBgAw0WhytC6ZRZVhrWD0OwMATDMaNNmlYGtFK3XTJbm6OeoCACzEaNDskyn7Qa48Rz0AgIUZDZq7TnLfa9e9UnKfoyYAwIKMBk2T+3Bp041WbAYAAEw0vr3Z+rtngtWMaAAAk40GzeF+RFPElW4zGwEAANOMBs0u9UGTvdNOzckLAgAsy2jQvOr6cKnCRi6mzgAA04wGTbrf1lyEleLxrdEAAJB0RNC8C5c2b09eDABgecbXaO7XZdq809Z2Jy8IALAs450BbC9Jaro3+pn93ckLAgAsy2jQ2P0lnncqVJ+8IADAsowGzdZuJYlmmgCAjzIaNG/11f2jqEZsCAAATHPEDZuv7h8ludPBGQAwzfjBZ/n9TrMm3560GADA8owGTRU2D4+7fDhpMQCA5Rlvqplu5qgDALBQk3rKcMomAGCqSUFj9DoDAEw03lTz0bpMl2lBAwCYZjRosr8/gybl/UmLAQAsz2jQuLdz1AEAWKgjgubRlmZu2AQATHTE6v77S1yMbgAA0xzRvdmmXA4AwDeMB429PxqA9RoAwFRH7DqjYzMA4OMdMRfmD4++OY0GAMC4iS1oOPwMADANq/sAgJMiaAAAJ3XErrPVHHUAABbqiPto4hx1AAAWanzqzJhdAwB8vPH7aDL30QAAPt4RazTlHHUAABZqNGhi2MxRBwBgoSYdfAYAwFSs9AMATmo0aIJVc9QBAFio0aAp4nqOOgAACzUaNOflt+aoAwCwUKNBU+lsjjoAAAs1GjQ36adz1AEAWKjRoCkDIxoAwMcbDZqzcD1HHQCAhRoNmgsnaAAAH280aBLHNwMA/hHGe52pmKMOAMBCjXcGcLrUAAA+3nhTTctz1AEAWKjxFjTO1BkA4OONb2/2973OjAabAICJxtdoZO8fGw02AQDTjHcGeLTrLARGNACAaUaD5rp4Hy4x1NKjEQ4AAGNGg+ZxrLhnSX66agAAizPpJpkqbk5VBwBgoQaDxlQoPBrSrCN9zwAA0wyPaKyQPZo8q+381PUAABZmMGjc998c0fhGwZg+AwAcb/wo50dJE1XIlU5aEABgWYbXaGylq0e3zmRlmZWnrgkAsCCjI5pVeLed2XSZLxVoQwMAmGCkY2ZW5yZTIVlQrVKBEQ0AYIKREU1QYa4YL+TeqpApeztPZQCARRgMmmC1skxmhSRXlpTzfp7KAACLMBg0q/KZovl96xmptigZJ24CAI43mBpl6I8FsPtwiWaKYXX6qgAAizEydRYlSaYoujYDAD7G6DxYcru/STPoSRnvjwoAAOA4w1NnttZFkVWEtaSkJxWjGgDANINBs0uvZNav1cRwpWhSyoe5agMALMDgDZtFWKkMrlW8Ulvs5JLcu5lKAwAswejUWXbJFFSEWiY9bHUGAOAYg0Gz0bWCSUmtsmftk5Tz3Vy1AQAWYDBo9rrVPpl23Ws16a02hVSVz+eqDQCwAINB0+kgd1PyfgPAKvrDzZsAABxjMDWavH1oQbMqnqjJpra7mas2AMACDE+dpddKbgpWKFqtIFcInEcDADje6DxYEfrpsjbvFJk1AwBMNHgfTRU2itbfR5O8VZuN7c0AgEkGxyht7m/SzJ5kCqqCq0svZyoNALAEgyOapnujJvU3a0aVqoPLrJY7h58BAI4zOKKJYa19kiqdSZKypCJezFEXAGAhhs+jCYWq+P79Jtt9J2cAAI5z9D6yqFL7ZMo01QQATDAYNKagJkmNtiqtVptpqgkAmGYwaFJu1LopqlTptZosWtAAACYZTA1X0jq6XFkH2+mudQ4+AwBMMhw0nlUHV6FarqwqmoKVc9UGAFiAkaDplNyU1Cqq0CpK6/LpXLUBABZgZHtzpUPud5xJ0tvG1TF1BgCYYHREUwf1O878oG3K6vJurtoAAAswvL3ZClUxKyur0VbBjM0AAIBJRjcDtNkUFB6mz9jeDACYYmREExQkRS8VFNW5y+kMAACYYKQzQFQRXK0dlNTK3RWsnqs2AMACjN6wWQZX54f+4DN3FXE1V20AgAUYDJoqXiq5qbD+hs1oJlMc+hAAAL5hMGia9Faluc68P4OmDmwEAABMM5gcZdzosuwUFGQKMvXTaQAAHGs4aMKZgrmCB8X7HmdNdzNLYQCAZRgMmrvmSx1yUFBQk7dq3ZW9mas2AMACDAZN9k5B/QFo2fvtzQAATDHcVNMKBXMl9TdpliFInLAJAJhgMGiKuFERsjrr5OoDJvvdLIUBAJZhpDNAUMpBrqwm3alj6gwAMNHwiCbUanJQFtNlAICPMzqiMXPVvpJZYDMAAGCykc0ApQpzXfq5otWq6AwAAJhoMDnOwrXaHNSpP1nzqiRoAADTDHcGsFp3XVC6bzsTyRkAwESj0WEmBQUFK5RZogEATHTUGCUrqwxrVYxoAAATjUZHk02ttWrzTizRAACmGu51pqyUTV/bT9UmujYDAKYbDJqD3+quMzXaShIjGgDAZIPRsUuvJEkru5BZMUtBAIBlGQyaNu/UuimqUMp7bbu5ygIALMV4CxpJe7+Ve6sdpzgDACYaXXUJJkWVimF93F5oAAAeGcwOV1aQ60rPFMNaBUkDAJhoOGg8y0xK6uSeRWMAAMBUo0FTB+lgO2U/aJ+IGgDANINB0+U7mbnO/EIxrHVgMwAAYKLhzgD5tr/Ig2KolDj4DAAw0cjyfpC7qbXm0RqNzVAWAGApRoIm6XVr6tTfqbkKhAwAYJrRDcvRpLWfyZX7D7CzkxcFAFiO0aCpQj9hlvJBrbtiJGgAAMcbDZr6/opgpV61rdxpeAYAON4RRzm7OutUxY1cri69nqEsAMBSDDfVtEptNrnyfYNNk+gPAACYYKQzQKOzog+Z5J3OY6Fgm7lqAwAswOjUWWmutZ8pWFAVgjb153PUBQBYiNGguSyT1l6pCufq3HUen89RFwBgIQaDJthGwVxZrmilblOnwuq5agMALMDIGs1Br5tChaI6P+iqKBQ4/gwAMMFwalhQNGllhQqrtY5BpjhTaQCAJRgMmiJe6UnVqrag2s4lSZ0OsxQGAFiG0YPPNkWnzl21r9VlVyHWaAAAxyuGnsze6K4rdOMH1Vqpc6nx7Vy1AQAWYHiNxvuOzaX6TQB1lKKVc9QFAFiI0V1n66JTbVG111pH05lfzFUbAGABBqfOYrzSZdVoEzeydKYySHf2Zq7aAAALMHLDZqnz6qB3B2tml5K3c9QFAFiI4aAJ/YCntD5pysAaDQBgmtHtzW/2K62iySVtCo4IAABMM7K9uVWb+04AW2/1pDKmzgAAkwwffKa
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 511.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZoAAAEeCAYAAACzJ9OtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAAXfklEQVR4nO3dS49sWXqX8WetfYmIvJxbVbkad5tu1IBBboRAYsIAiTmfAT4ZI8ZMkb8DE0zbHtk0st3u7uquOpfMjMu+rJdBZOU5hd2xIwy5EVvPTyqdk5kRed7MjNS/1l7vfleKCCRJei75/3UBkqRlM2gkSc/KoJEkPSuDRpL0rAwaSdKzMmgkSc+qPvXBlOrf2vscMaT/++VIkpbGFY0k6VkZNJKkZ2XQSJKelUEjSXpWBo0k6VkZNJKkZ2XQSJKelUEjSXpWBo0k6VkZNJKkZ2XQSJKelUEjSXpWBo0k6VlNBk1K7Rx1SJIWajJomurNHHVIkhZqMmherL4/Rx2SpIWaDJpXfG+OOiRJCzUdNOU1UB0fnG+fux5J0sJMBk1Fpq5eAfBm84+fux5J0sJMBE3ie/U1bX1LU3/BdXpNlV/OU5kkaREmgiZzU2c21Wuumi9YxzXr5vN5KpMkLcLJoKnyDV0JbvJnNHnDm/KKJm/mqk2StAAng+bl+ke8ahNXcUuJgUJQop+rNknSApwMmjF6XrdBEy11WtFQk1I1V22SpAWY7Dq7rgsAr/iSz+sVdVo9e1GSpOU4GTRDOZDT8e/ruOK6zlzl13PUJUlaiJNBU6JnnQt1VASFdQUVzVy1SZIW4GTQrKoXvF513LBhl7bkBC/Kq5lKkyQtweQezSoXRgqFwmeroA6bASRJ5zsZNLvhG9bVwDrVjKnnth7pk+3NkqTznQyafryjkOijcBW3pAQV9Vy1SZIW4GTQRPS01ciBniZaMrBN9zOVJklagpPLk7Z+Q5sHrriBgE09sor1XLVJkhbg5Iomp5qu1NQps3psa34RN7MUJklahpMrmuA4FaDNmVKChyGzYztLYZKkZZhsb94PNX0pjFGIgEM6zFGXJGkhTq5ouv4t933DN7ElkeijIU9nkyRJTyYunY30JT+tYtb5ivJ4OU2SpHOcXJ6k1JBT0ERDIjMGDGmYqzZJ0gKcDhoyJRJ96tnEioehYpvu5qpNkrQAJ4Omrq55GCoOaU8h2I6JJjyPRpJ0vpNB0w3f8K6vSGQ2tACU5B6NJOl8p1vIYmCMxCauaFImH9sDZipNkrQEp2edMdKkYGTgECOAl84kSReZGEFzxb4kutSxp2M7JkaPCZAkXWDiPpqe/Zh4SO/JOfPV/pYR25slSeebOCagA6BmxYE9TUqM4YpGknS+yXkyhxHaaBlTT5WhC4dqSpLONxk090OwSw8kKg7jx4nOkiSdY/Jc5och6GJLpiIItsPXc9QlSVqIyaDpS2GTXjLScyjBWDwmQJJ0vsmgSSkBMEZPEM9ekCRpWSb3aLbjwG15wba8JaXEONoMIEk632TQNCnTpY7D+IExXNFIki5z1nGZD+k9Q9kBUFVXz1qQJGlZJoNmH+PxXJrSMUYQYXuzJOl8k0FTCG7iJVXesIuB8jgtQJKkc0wGTRA00TKWHZlEOIJGknSByaDZcSAo1NU1hSAnjwmQJJ3vrGaAIQ3kNHnLjSRJf8N0ezM1ObIzziRJfyfnBQ2ZVfUCgLq6fvaiJEnLMX1MAD1DGijRk0nOOpMkXWQ6aFLHNt2x679hpJBTM0ddkqSFmAyaNhq62AGFh7RnVd/OUJYkaSnO6joDGB9H0HiUsyTpEmc0A1TUaUVKDTkS/fgwR12SpIWYDJoVNZu4ZlW/5pAOjGU/R12SpIU4Y9YZFAopZSpqIuw6kySd76zzaIY0ABAUEnadSZLON93eHCN11CQqhjSQHEUjSbrAZNDUJPp0YIwDhULO7Rx1SZIW4oxLZxVVNE8TATz4TJJ0icmg6WPkA18RMTAyEDHMUZckaSEmg6ZKmRIDkB+7z9yjkSSdbzI1mpRp2NBU1zS04HEBkqQLnLFHk1inG1LK1FGf8xRJkp5MpsaLpmJkoM03VNROb5YkXWR6BE0Fu/Kevuz4kN8RjHPUJUlaiMmg+eZQGKNnKDsOcU+dN3PUJUlaiOkVTU5UqaFET51Wjx1okiSdZzJorptEV+4ppTvOOks2A0iSzjc9giZB/3jo2T7uaLx0Jkm6wGTQjAGJTFDYDe/sOpMkXWQyaHLiaV+mciqAJOlCk8lR4vjnt6uaftw9d02SpAU5a2c/4uPxAF46kyRd4qwbNnOuiShcVZ/NUZMkaUHO6jrLqabKK655zaZ+NUNZkqSlmAyaIeCq/ow6b6ipyVRz1CVJWojpoCmQqSjRk8gkpzdLki4wmRqfr4JNevF0TECbruaoS5K0ECeDJqWWlII+DiQqhjRwiPu5apMkLcAZkwESQWF4HEPzof/rZy9KkrQcJ4MmYmQM6Nmzqd9wW27nqkuStBAngyanNYnjCJoqNaxouK6/mKk0SdISnAyaElvq9HH8TEvFVX49V22SpAWY2KMJ2qpQpYZEZiQY4jBPZZKkRZgcqrnOQUUDCUrEHDVJkhbkZNAkapoc1GkFAVeppnKopiTpAqe7zgiGgE1cH5sBcqZmNVdtkqQFmLhh8zjXrImWLrZUKTHSz1KYJGkZJtqbN0QkAIY4MEawLW9nKUyStAwng6auro9dZ49bOW3O7tFIki5yMmiq3BKROKQ9Y/TkBKt0M1dtkqQFOB00qWEIyBxXMhkIykylSZKWYGIyQKErHx/yok3PXpAkaVkmVjQ1bS400TKUPS8ab9iUJF3mZNAcxjvqBKto2VSvaTKUGOeqTZK0AJPNAG0u1GTW6Ybe7RlJ0oVOBk2bbxgf76NpYkWVYMChmpKk850Mmk1+SUpB4bg3sx8TXdnOUpgkaRlO79HEPd2YeZff88A7rurgZfW9uWqTJC3ARDPAPWMk9ukBgE1VKN5HI0m6wMljAkr0VCmoomGVKrqS6MJLZ5Kk850MmsPwljESNTUpMhmOh6BJknSmk0GTU01KQZc6gpGUID8eHSBJ0jlOn7CZajKwijUjA11J9LGfqTRJ0hKcPmEzBgAO6RguTQp2o+fRSJLOd3JFM4wf6EqijZZ1XLGuCk2+mqs2SdICTJywueZQMjfl9ul4gE1+OUthkqRlOBk0pEyTP05s3o+ZNPEUSZI+NZkaJWBII4nMdvQ8GknSZU4GTSLTlcTIwCpaupI8YVOSdJHJrrMxEhU1NZm+wBj9XLVJkhbgdNBwHEHTp46ekf0IIwaNJOl8k3s0V4+DNLvU02Zok+3NkqTznQ6aKPRx3KOpo6LJsI7rmUqTJC3B6WaAvKEbM6tYA3DfQ588YVOSdL6JZoADVQoaWvrUcz8EXezmqk2StACTezR1DkYGHtI9dYIeh2pKks43GTRDSTTRUD+ORfM8GknSJU7v0aTV8QwajqNnCjCEezSSpPNNdp01OUgcO8/GCOq0mqk0SdISnDUh85A6hnQ8m8b7aCRJlzh9TEBeMz4Obx44UMIRNJKky5wMmrHc0ebgOtZU1FzVicZLZ5KkC0wcfLbhTftxBZNIVGHXmSTpfCeDpq6uSel47ayKhjGCe76epTBJ0jKcDJpD/ysehppMZs89fbG9WZJ0mYmus5FVLlRkgsIQ4VHOkqSLTJywWZOA8fFUzXVOVMk9GknS+U4GTVW9pMqFgZHyGDd3/S9nKk2StAQT18EKY8mMFPrYUQKG4vRmSdL5JoImsy+ZLh1bnIOgyt5HI0k638RQzcwqF9poqNPqaUqAJEnnOhk0TXVNlYKXaUNFQ5sTXf92rtokSQtwMmjW1UtKJPoojPS8ahMk25slSeebaG/O3A0VhaCioUqQUz1XbZKkBZjcozk+KB3/TFDlzfNXJUlajJNBU2IgPz6oTRuq9DF8JEk6x2Rq9JHoKYwMNClsBpAkXWRyj2aMxLv0gW15SyERePCZJOl8J4NmP37grs98yG9JZNocJA8+kyRd4GTQdOMHAJpYPQ3TXDefPX9VkqTFOBk0EQP
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 511.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZoAAAEeCAYAAACzJ9OtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz96bck13XlCf7OHWzy6c0xBwCCACdRIqVUKrMqOyuzKmv1l1pd/Vf2H9DfenWvVdXd2aszlS2lRIniBBIEAjG+eJOPZnan/nDNPSKAwEASoQyScbQkhIAX/szdze45Z5+995GUEm/iTbyJN/Em3sSrCvXf+gLexJt4E2/iTfxhx5tE8ybexJt4E2/ilcabRPMm3sSbeBNv4pXGm0TzJt7Em3gTb+KVxptE8ybexJt4E2/ilcabRPMm3sSbeBNv4pWG+aL/KGL+oLjPdXGXffsWXVqy8qfE6PFxBUBp9in0mFX/GB/OEKk4qL+DiOKy/RU+zAEQBGuO2C/fxkrFKM1QKM64zyZc0PpLvL8AIJGA8DtetVDaGxhVs+kfEdPq5T8lFZU9YWSPmXJCy5JleMLGn+P86e94DZ8No/fZ1ikhrkip/dp/x+8aRh/+BtemEbHDnyMp9a/y0v6bh0iFVjWFmRFiT+cefOHPKzXhev0DxuyxkRVdWtLFJd3wXGgpMarkHfkz9tOIJ3LBqdxDUBTSkIj0aU0f11xu/vGf4y2+if8GkZKXl/37L0w0f0ihZISIIuDwqcOHDSl5YlwDgU2/YEM+QK+N/hUaS8WYKJHeLmmlpPePSSR6/4iz5DC6prXX0Fjm7gG9u0DEoPWMEFfwtRy+iRA7lBhS6j7/p1JL2z9AiWVk92njnEX7SxIB0MNP/a5J71mEuBl+sSfhv7bX/V1DqxlNcUJMkZhc/h6+UgRS+vo+n9c9UmrxoSXEDYJGqxkxdZ+blGNc4OkQFBqLodzhIZWa8s34HSoxNEqjRcDvYymYyyVP/a8AGOkjxvqItblO7x/9c73VN/EaxB9Nokmpw4UVG7lg1T8ixKuX/pwPFzxe/Se0mnFS/wmaXOVqVSDo4VDVNPYIJZaYPC5taPtPhl/09V97TD0xlUOH9PlhzRGlHuc/S01d3ibEHh9WOal+TjcEIFKg1YSYelLqECxKVUAkhMVnksnr2MEAhDhn3YOIwegaJSXhNb3W3z0EJQ0xrflNbzzBIENHAxCjh8hLPyvBUNgTxhxSpopOWgIOAKtqrFT0eCQJKgpWhJ5AJy0KxUgfEQkEHC619K+gw34Tr3f88SQaPM6fo8Qi8uWjqRCvWMcLjJT0cZU7oN1hG7CqoZCGUvLB7so1bX/6Wz30XyVicnxZR5KToSISCTh6v8iJI24+t7MRDFpPaOwJhR4Tkyckt4MQ8+vO0KoeDojXvepPuyIixtHwffwhhkZJhTUzYmpw/pyv+t2IVBTmAK0KrGrwsaOPV8S4fOnPV8VNGnNISUWRLGWqCOIIyRFwrOMF/7X/ewDerv47rsUTPtH3eNL/nFJPOVB3iBK5DPfp4/IrX+eb+MOJP5pEkyPg/NUXVvYATfk2RkqsqvGxe9atPBdtmIOGCQdoDGNzgqBxYYWPK1JyX1vVn1+r+NKf2/Qf4+MGKRVrf7ZLFCCAQsjw6fNpMOHx4YKNGEQ0G3eOC+cvzCgSEatHVGaGiKb1l/T+HIjDD8TXCj4Tef6z+oMaMz4XgYQjxA4fzvlN3mdK7W4mI1KQkvvCv7/pP6Z1jxiPDimlIuB3ScanjlLGfKf8D5SpZBIrSqW5He4wNjM2suYqPYIEtZpRqxkurF7J3PBNvL7xR5Zo+MI5x7OfiShlGck+UUfa4ja9vyLGxe5ntJhdZxSJrPwZm/4RCfe1H7zyFbsw0GhVYqmwqkGpCZCJDkosLqyIqcOHi8/8TR+ucHo0JMkXB+FKDCF1aDEYKUkpvrbQWQ6FkhKtakI0nwuT/iGEEotS4xfuzWchfFkC+iqkB6UmWD2hTDUqCVoMVkpCcri4RmvLODVUWBptsCKEZPFpBMBKShKRQurhqvQX/bo38QcYf3SJxpojYuoIcTNUcp9t43t/hRJLpUYUqcDa79MXGz5Z/j/ZPrhWNRQ0RCIRT0yOhBsGyl8ODeSqe0geXzJQl6/IQi/tNUbmmJopohSpCCQiWkqUaIwqSSmweEky0WqCVQ26sIR0yLr7aPdefbjAhws6KVBSo1RBVdzO1bS/eK26GcgVe0gt1kyI6Q+Vwa8RcgGipNj2lp+Kz08y2xmNUSNC3HxhMq7sIbXep0wVFoNJuftV6Nzhhit+lP4jWgzfiH/OIWPOWPJUP8RQchLvEIks5YpNmv/GRADBvHb32Jv4zeL3NtEIBkT9hjRUTWVm9HGV2TaiAZsrrKFj2B7qiYBCISgUCo2htDfo3AOUmuwYbGvmJCKb/h6/EUyTIshXqSgFpQrUjnr7eT9l0KpAi0WRr23jz0kpUpo9tBhC8iTCS39nCFe4OMkMJLE05VvE5EkDeyulfJSJKEJYv8bQh6DVFBFDSvE3YJ3lmUdK3TDPer0hN0FIOHr/dCiYnv9vX34wJ1IGVJUhURBenqkA2HSfEGzPpriJTRYnPQFHHAoqJZZa72OlogwWLYJKgqBIAykgEpH8FP3G7zW9men83sfvbaJJ+N/oLFBqQmn28/BzB58ptKoBRQhXJDxGH7JXvU1BA0Arax76n9L5c2LyiFQoMWz8JUaVTPQJ8VPXUZjrX1olftXrFzRWj7CqIaWID4uXzpgSntadYVUD6hiFRokdOhozkCC+ALIQ9Rk9RVO+nbugoV7uw4oQuy+dcf23DEFT2v0hQfrfgLIcXuv39enY3j+C4dM30ler/gMxLuj6DSIlRh8CPDfXe/H1ev+UtlxRS4PHk4gkIoKiVjPei9+mUYbKKLQIyY9R4SYLteCMPOOcphMapjTl26y7e3x1UsDrnfTfxJfH722i+U1jO5sxUg4HUJ4xNPYtap11J33IrJuVO6VXNcrkg1mLwegRvb8a/t6ImblJxZhp3MMkTTUas2FOl5a04QqlDBLM7yxmTHhiygeH0RUiis61fPYhFUbFdRq1jybPUko9QUTTqH2M5EpSoYiVY9H+Yvf3csIdDbOpycBSS59zGLxUj/XaRMKz7j5CSYM1M5RqPmd+8YcRv2u1r9QIEYPVIxIR/9KXE7SaoIfjIkokpNzRxORwtFzInE2s2E81tdZskmehFiQie1zPTEhx9HhC/MMWw76Jz8YfTaIBhVEllYyxxTfp7U00lj1OsKnI56eBR/IhTzc/wasNlZ4RCazdk8EZID+FIS6wUlKmijJZDHoHV7Xhik2fMeisNP8CTOIrRtbBxC9Ub5f2BrXe3w3r1/GCefszQFON/pJ6GMwCuK3YEmBwL3gZQeDFyJCUVjUxdcTYDzOp1/HQSMS0IqbmM7DS58V2ZvZ5c7vXN367al+kGkgmhhhbNi/pZD79O+LwP/A8xBzpwpyP+BFGVbyb/hQTG5ayYc5TCqmZxUMSkUs5w6WWEDt+vz7jN/G7xh9Noklpy/v3xOfglI6WIH73EI05ZFz/GxQKS4GjZy73gWe2HaU5IBLpaJkrUEmx4pIuLtFiqewJnTv7WippwVCYGVY1hLjBh0tedrj07gnnsWNa3qaUOmt87E1EspLb4wniiAM54PN+28teW6sZShVoVWYGWsyHjA+vp0Yli09HGFUPcOOXd5SvZ8J8dZFSS0rt55RBmk8nAh/OWMcLxjLDJAPSQIKoPKWMuRvfpU6WmSkolHDdTxnHb7Gm5Uw/IRIpU03NiLU9Y5X6P+hO8028GH80iQYirTvjdFDK5/mJ0JRvYVXDxp3j4wol+UCtzQF35XuM0oip/R+JReJMPWYeHmFVpmkGcXQJgnLM3SNa9xSjRxR6RNATev81YP6iKNSYRu0zqobK0N1j03/8wo9tcXnZEhdkzMgeIygmHGSRXfIZ9jA3B5bcs2NGUHnQH3tC7BBRgxNCyHMhv8q/QRRZk6N5XbHzlHpitBibYdKXw0F/yKGzZkrUl7Ig88yxxJpMhc+ase3
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 511.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "for y, name in [(x_in, 'x_in'), (x_stp, 'x_stp'), (x_ltp, 'x_ltp')]:\n",
+    "    spec, freqs, t, im = plt.specgram(y, NFFT=512, cmap='inferno', noverlap=256 + 128, pad_to=4096)\n",
+    "    spec = 10*np.log10(spec)\n",
+    "    \n",
+    "    make_playback_animation(f'lp/{name}_spec.mp4', spec, len(y)/16)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "torch",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.9.12"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
--- /dev/null
+++ b/dnn/torch/osce/stndrd/presentation/playback.py
@@ -1,0 +1,25 @@
+import matplotlib
+import matplotlib.pyplot as plt
+import matplotlib.animation
+
+def make_playback_animation(savepath, spec, duration_ms, vmin=20, vmax=90):
+    fig, axs = plt.subplots()
+    axs.set_axis_off()
+    fig.set_size_inches((duration_ms / 1000 * 5, 5))
+    frames = []
+    frame_duration=20
+    num_frames = int(duration_ms / frame_duration + .99)
+
+    spec_height, spec_width = spec.shape
+    for i in range(num_frames):
+        xpos = (i - 1) / (num_frames - 3) * (spec_width - 1)
+        new_frame = axs.imshow(spec, cmap='inferno', origin='lower', aspect='auto', vmin=vmin, vmax=vmax)
+        if i in {0, num_frames - 1}:
+            frames.append([new_frame])
+        else:
+            line = axs.plot([xpos, xpos], [0, spec_height-1], color='white', alpha=0.8)[0]
+            frames.append([new_frame, line])
+
+
+    ani = matplotlib.animation.ArtistAnimation(fig, frames, blit=True, interval=frame_duration)
+    ani.save(savepath, dpi=720)
\ No newline at end of file
--- /dev/null
+++ b/dnn/torch/osce/stndrd/presentation/postfilter.ipynb
@@ -1,0 +1,275 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "import matplotlib.animation\n",
+    "from scipy.io import wavfile\n",
+    "import scipy.signal\n",
+    "import torch\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "plt.rcParams.update({\n",
+    "    \"text.usetex\": True,\n",
+    "    \"font.family\": \"Helvetica\",\n",
+    "    \"font.size\": 20\n",
+    "})"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def load_lpcnet_features(feature_file, version=2):\n",
+    "    if version == 2 or version == 3:\n",
+    "        layout = {\n",
+    "            'cepstrum': [0,18],\n",
+    "            'periods': [18, 19],\n",
+    "            'pitch_corr': [19, 20],\n",
+    "            'lpc': [20, 36]\n",
+    "            }\n",
+    "        frame_length = 36\n",
+    "\n",
+    "    elif version == 1:\n",
+    "        layout = {\n",
+    "            'cepstrum': [0,18],\n",
+    "            'periods': [36, 37],\n",
+    "            'pitch_corr': [37, 38],\n",
+    "            'lpc': [39, 55],\n",
+    "            }\n",
+    "        frame_length = 55\n",
+    "    else:\n",
+    "        raise ValueError(f'unknown feature version: {version}')\n",
+    "\n",
+    "\n",
+    "    raw_features = torch.from_numpy(np.fromfile(feature_file, dtype='float32'))\n",
+    "    raw_features = raw_features.reshape((-1, frame_length))\n",
+    "\n",
+    "    features = torch.cat(\n",
+    "        [\n",
+    "            raw_features[:, layout['cepstrum'][0]   : layout['cepstrum'][1]],\n",
+    "            raw_features[:, layout['pitch_corr'][0] : layout['pitch_corr'][1]]\n",
+    "        ],\n",
+    "        dim=1\n",
+    "    )\n",
+    "\n",
+    "    lpcs = raw_features[:, layout['lpc'][0]   : layout['lpc'][1]]\n",
+    "    if version < 3:\n",
+    "        periods = (0.1 + 50 * raw_features[:, layout['periods'][0] : layout['periods'][1]] + 100).long()\n",
+    "    else:\n",
+    "        periods = torch.round(torch.clip(256./2**(raw_features[:, layout['periods'][0] : layout['periods'][1]] + 1.5), 32, 256)).long()\n",
+    "\n",
+    "    return {'features' : features, 'periods' : periods, 'lpcs' : lpcs}"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "lpcnet_features = load_lpcnet_features('lp/features.f32')\n",
+    "\n",
+    "features = lpcnet_features['features'].numpy()\n",
+    "periods = lpcnet_features['periods'].squeeze(-1).numpy()\n",
+    "lpcs = lpcnet_features['lpcs'].numpy()\n",
+    "\n",
+    "x = np.fromfile('data/critical_3.pcm', dtype=np.int16).astype(np.float32) / 2**15\n",
+    "x = np.concatenate((np.zeros(80), x, np.zeros(320)))\n",
+    "x_preemph = x.copy()\n",
+    "x_preemph[1:] -= 0.85 * x_preemph[:-1]\n",
+    "\n",
+    "num_frames = features.shape[0]\n",
+    "x = x[:160 * num_frames]\n",
+    "x_preemph = x_preemph[:160 * num_frames]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def stp(A, alpha=0.8, beta=0.5):\n",
+    "    A_num = A * (beta ** np.arange(len(A)))\n",
+    "    A_den = A * (alpha ** np.arange(len(A)))\n",
+    "    \n",
+    "    return A_num, A_den"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "frame_idx = 31"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "[<matplotlib.lines.Line2D at 0x7ff3342887f0>]"
+      ]
+     },
+     "execution_count": 11,
+     "metadata": {},
+     "output_type": "execute_result"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAA1Q0lEQVR4nO3dd3yV5f3/8deVnJM9TiaQEAhJ2EtCUAS3Ea2rDtx+62hFq9bR1lprtfZXO7S21iqK1Lo3VKrVagUHslQSQKYQEghk771zrt8f9wnLBJKck3Of8Xk+HufByck55/6QnLzPda51K601QgghvE+A2QUIIYQYHAlwIYTwUhLgQgjhpSTAhRDCS0mACyGEl5IAF0IIL2Vx58Hi4+N1amqqOw8phBBeLzc3t0prnXDk7W4N8NTUVHJyctx5SCGE8HpKqcLebh9QF4pSar5Sankvt2UrpRY4U6AQQoiBGVCAa62XHvq1Umq+4/YVjq+zXVeaEEKIo3F2EHMWUOC4XgBkOvl8Qggh+snZALcd8XXckXdQSi1QSuUopXIqKyudPJwQQogezgZ4HRB7tDtorRdrrbO01lkJCd8ZRBVCCDFIzgb4eg62wtOA5X3fVQghhCsNdBZKNpB1yODlUiDNcbutZzBTCCHE0BvQPHBHQMcccdujjqumhPfOskb+tmIX3z8umXOmDDejBCGEMIVbF/IMhZfW7eXDrWVUNLZLgAsh/IrX74WyOq8KgC1F9bR1dptcjRBCuI9XB3hlYzv7alo4fkwsHd12tpc2mF2SEEK4jVcHeH5lEwDzJg0DoKi21cxyhBDCrbw6wPdUNQNw0th4AErqJMCFEP7DqwO8oLKJYEsA4xIjiQqxUCoBLoTwI14e4M2MiQ8nIECRZAuluK7N7JKEEMJtvDvAq5pJSwgHIMkWKl0oQgi/4rUB3tFlZ19NC2PijQCPjwiiprnD5KqEEMJ9vDbA99e20G3XpMVHABATHkRNSwdaa5MrE0II9/DaAC+oNGag9HShxIYF0dFlp7lDFvMIIfyDFwe4MQe8pwUeGx4EQK10owgh/IQXB3gzceFBRIdZgYMBLv3gQgh/4bUBvueQGShg9IED1LRIgAsh/IPXBnhBVdOB7hMw+sBBulCEEP7DKwO8vrWTqqaO3lvgEuBCCD/hlQHeM4DZMwccICrEQmCAola6UIQQfsJLA7xnCuHBLhSlFDFhQdQ0d5pVlhBCuJVXBviuikaCAgMYHRd22O2x4VbpAxdC+A2vDPDtJQ2MHRaBNfDw8mPCgmQWihDCb3hlgO8obWTiiKjv3B4bHiQtcCGE3/C6AK9obKOqqb3XAI8Jlw2thBD+w+sCfEdpIwCTemuBhwVR29KB3S4bWgkhfJ/TAa6Umq+UylZKLXBFQceyw3Hi4t4C3BZmxa6hsa3LHaUIIYSpnApwpVQ2UKC1XgEUKKUyXVNW37YU15NsCz2wB8qhYhyrMetapRtFCOH7nG2B5wBLHMGdprXe4IKa+qS15us9NWSlxvT6fZsj1OtaZC64EML3ORXgWus64FlgCTCzt/sopRYopXKUUjmVlZXOHI49Vc1UNrZzwpi4Xr9/IMBbJcCFEL7PFV0oK7TW6UCdUmr+kffRWi/WWmdprbMSEhKcORyf7KgA4OSx8b1+PzrU0YUic8GFEH7A2S6UzEO6Tf4IxDr5fEf1wZZSJo2IIiU2rNfvx0gXihDCj1icfPxix+yTAow+8MUuqKlXuYW1bNpfx4PnT+rzPtGhEuBCCP/hVIA7+sCHLLR7fLS1jN+9v52EyGAun5XS5/0sgQFEBltkFooQwi94xUKejftrCbEGsOjamUQEH/09JzrMSr20wIUQfsDZLhS3uDt7HPeePYGAAHXM+9rCrDILRQjhF7wiwEOsgf2+b4xjOb0QQvg6r+hCGYjoUOlCEUL4B58LcOlCEUL4C98L8NAg6mRHQiGEH/C9AHfsSNjUITsSCiF8mw8GuLGcXvrBhRC+zvcC3LEaU2aiCCF8ne8FuOyHIoTwE74b4DITRQjh43wwwHv6wKULRQjh23wuwGVHQiGEv/C5ALcGBhARbKFWAlwI4eN8LsDBaIXLlrJCCF/nkwFuky1lhRB+wCcDPCYsSGahCCF8nk8GeHSYVU5sLITweT4Z4LZQq8xCEUL4PN8McMeWslrLjoRCCN/lmwEeGkS3XdPULjsSCiF8l28GuOyHIoTwAz4a4I7l9DITRQjhw5w+qbFSKhNIA9BaL3W6IheQFrgQwh+4ogV+nyO4Y5VSaS54PqfJnuBCCH/gVAtcKbUAWK+UStNaL3ZRTU7r6UKRueBCCF/mbAs8HYgDapRSzyqlbEfeQSm1QCmVo5TKqaysdPJw/RPj6EKpbpYAF0L4Lld0oeRrreuAXGDBkd/UWi/WWmdprbMSEhJccLhjswQGYAuzUiMBLoTwYc4G+PpDrtuAOiefz2XiwoOobpIAF0L4LqcC3DF4aVNKZTu+9ph+8LjwYKqb280uQwghhozT0wi11o86rq5w9rlcKTY8iPzKJrPLEEKIIeOTC3kA4iKCpA9cCOHTfDfAw4Ooaemg2y4bWgkhfJPvBnhEMFrLXHAhhO/y2QCPDTcW88hccCGEr/LZAI/rCXCZSiiE8FG+G+ARwQAylVAI4bOcnkboqXq6UGQmiuut31vDc6sKiAkL4qfzxpEYGWJ2SUL4JZ8N8JgwK0pBlXShuNRn31bwo5dziAmz0tDWxYZ9tbx3+0mEWAPNLk0Iv+OzXSiWwABsoVZqpAsFrTXvby7hTx9+y/aShkE/T21zBz99exMThkfy2c9P4x8/yGJXeRP/XL3HhdUKIfrLZwMcjG4U6UKBRSsLuP31jSxamc/3F65mxfbyQT3PIx99S0NbF3+5fDqRIVZOHZfAqeMSeH71Hjq67C6uWghxLD4d4HERwX7fhVJS18rjK3ZxzuTh5P46mwnDo7jrrU3sq24Z0PPk7K3hzfX7+dFJY5gwPOrA7dfPSaW6uYNPvx3cm4IQYvB8O8ClBc4rXxbSbdf8+vyJxEUE8/Q1mQQouOPNjXR196/V3Nlt5/5lW0m2hXJn9tjDvnfy2HjiI4L475ayoShfCHEUvh3gEUFUN/lvH7jdrlm2oZjTxiUwMiYMgJTYMB6+eCqb9tf1u+/6+dV72FneyEMXTiYs6PBxb0tgAKeMS+CLvErZtkAIN/PpAI8ND6autbPfLU1fs6OsgbKGNs6dOuKw2y+YNoJ5k4bxl+W7jrljY1FtC39bkcdZk4Zx1qRhvd7ntPGJ1LV0sml/natKF0L0g08HeFx4EFpDrZ+enX51XhUAJ42NP+x2pRQPXzyFUGsgv1i6uc+Ws92uuWfJZgIUPHTh5D6Pc8rYeAIUrNxZ4brihRDH5NsBHtGzH4p/dqOs3l3F2MQIhkV9d6FNYmQIv7lgErmFtfx1+c5eH7/oi3zWFVTz4AWTSLaF9nkcW1gQx6XYWL27ymW1CyGOzacDPMGxnL6y0f8CvK2zm6/31Hyn9X2oi2ckc9XxKSz8LJ8X1hzeH74kZz+PfrST86eN4PKslGMeb1ZqLFuLG2jr7Ha6diFE//jsSkzgQMuzvMH/AnxLcT3tXXZOTIvr8z5KKX574RQqGzv47X+2s35vDaeOS2BtfjXvbiphTnocj102HaXUMY83c3QMz35RwJbiemalxrryvyKE6INPB3hilNECr2hsM7kS99tcVA/AcSm2o94vyBLAs/83k6c+3c0/VhXw3y1lhFoDuf30DO7MHos1sH8f0maOjgEgt7BWAlwIN/HpAA8LshAZbKHCD1vgW4vrGRYVTGIv/d9HCgxQ3Jk9lltOS6OioZ2EyOAB720SFxFMWnw4OXtr4dTBVi2EGAif7gMHoxXuny3wOqYm2wb0mGBLICmxYYPemGrm6Bg27KtFa5kPLoQ7+H6AR4b4XR94Y1snBVXNTBsZ7dbjThsZTU1zByX1/veGKYQZfD7Ah0UFU97gX4GyraQBrWGqmwN8SrJxvK3F9W49rhD+ymUBrpR6xFXP5UqJUSFUNLb71cf6HaXGlrGTR0Qd456uNXF
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "\n",
+    "A = np.concatenate((np.ones(1), lpcs[frame_idx]))\n",
+    "A_num, A_den = stp(A)\n",
+    "w, h = scipy.signal.freqz([1], A, fs=16000)\n",
+    "w, h_stp = scipy.signal.freqz(A_num, A_den, fs=16000)\n",
+    "plt.plot(w/1000, 10*np.log10(np.abs(h)))\n",
+    "plt.plot(w/1000, 10*np.log10(np.abs(h_stp)))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "x_frame = x_preemph[frame_idx * 160 - 80: (frame_idx + 1) * 160 + 80]\n",
+    "\n",
+    "window = scipy.signal.get_window('hamming', 320)\n",
+    "spec = np.fft.fft(x_frame, n=1024)\n",
+    "\n",
+    "log_mag = 10*np.log10(np.abs(spec[:512]) + 1e-6)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 61,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkQAAAGECAYAAADX4D9zAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAADbwklEQVR4nOydeXwcdf3/X5+Z2SP3JmnTu02TcrSUKwlXQa4mqCAImoKgfhWVxK/HVwRt4fv7CvpVKQ3yVb+eifpV8KQNlygCSUEQKLRpetCTtul9pLk212Z35/j8/pj5fHZmdjfZtEmTtJ/n45FHdmdnZj8zuzuf17xPQimFQCAQCAQCwZmMNNYDEAgEAoFAIBhrhCASCAQCgUBwxiMEkUAgEAgEgjMeIYgEAoFAIBCc8QhBJBAIBAKB4IxHCCKBQCAQCARnPMpYD2A8M2nSJFpYWDjWwxAIBAKBQDACrF+/vp1SOjnRa0IQDUJhYSGamprGehgCgUAgEAhGAELI/mSvCZeZQCAQCASCMx4hiAQCgUAgEJzxCEEkEAgEAoHgjEcIIoFAIBAIBGc8QhAJBAKBQCA44xGCSCAQCAQCwRmPEEQCgUAgEAjOeIQgEggEAoFAcMYjCjMKBIIzkkgkgs7OTvT29kLX9bEejkAgGCayLCMrKwt5eXnw+XwnvT8hiAQCwRlHJBLBgQMHkJubi8LCQng8HhBCxnpYAoEgRSilUFUVPT09OHDgAGbPnn3Soki4zAQCwRlHZ2cncnNzMWnSJHi9XiGGBIIJBiEEXq8XkyZNQm5uLjo7O096n0IQTRCimoGIJsz6AsFI0Nvbi+zs7LEehkAgGAGys7PR29t70vsRgmiC8PgrO/HlPzaDUjrWQxEIJjy6rsPj8Yz1MAQCwQjg8XhGJA5QCKIJwu7jfaAU2HgwONZDEQhOC4SbTCA4PRip37IQRBOENK8MAHh378n7SQUCgUAgEDgRgmgCoOkGBqKmObA/oo3xaAQCgUAgOP0QgmgC0BuOiaD+iAisFggEAoFgpBGCaALQE1YBAD6PhFBUWIgEAsGZQ2NjI4qLi9HY2DjWQxGc5ghBNAHoHjAF0bScNPRHhYVIIBCcWbS0tCAYDI71MASnOUIQTQB6Bkyr0LQcPwaimki9FwgEAoFghBGCaALALERTc/ygFBhQhZVIIBCcPPX19SgtLQUhBLm5uaioqIhzTS1ZsgR1dXVoaWnBkiVLkJubi+LiYixZsiSp1aampobvt7i4GMuWLUu4bjAYRHV1NYqLi/n719fX89crKipQUVHBx2FPr2ZjAYDq6moQQlBXV8e3Y6+5IYSgurqaP6+urkZubm7cWNjxBYNBLFmyBMXFxSCEoKKiAi0tLUOfXMGEQwiiCUBPWIXfKyOQ5gUAhITbTCAQnCQ1NTVYsmQJWlpaUFlZibKyMjQ2NqKiogLNzc18vcbGRqxatQqlpaVobm5GeXk5AoEA6uvrMXfu3DhxUFpaimXLlgEAqqqqEAgEuECyi6JgMIi5c+eirq4ORUVFuP3227noYtsvW7YMS5cu5ftatWpV3HFUV1dj5cqVqKqqQllZ2Qmdi2AwiMWLF6OpqQmVlZUoKipCfX09Fi9ejNLSUn6OysvL+TkSnH6I5q4TgL6whkyvgnSfWYsoFNGBzDEelEBwmvLntQdwoDM01sMYlNl56bjz0tkntY/ly5ejqKgIe/bs4cvq6+uxZMkS1NbWora2li9vbGxEeXk5Ghoa+LK6ujpUV1ejurqaL6+pqUFzczNWrFjBhYx93XvuuYeLmnvuuQfBYBANDQ0oLy/n65aWlqKmpgYPPvggX15TU4OKigpUVlY6jiEYDKKxsRFdXV0ndS4AoKioyCG4cnNzuQC0HzcThsFgEIFA4KTfVzB+EBaiCUBUN+BVJGR4Tf3aLzLNBALBSZLIhVVZWYk9e/ZgxYoVca/ZBRJgWmyYxYTti4ksuxhi65aUlHB3WDAYRH19PcrLyx1iCAAefPBBlJSUpOyWSjTWE8G9n9tvvz3hcjbekWgmKhhfCAvRBEC1BFG6Va1apN4LBKPHyVpeJgpMzBQXF6O6uholJSUoLy9HUVFR3LpFRUUJl7OYo6amJpSXlyMYDHJ3k5u8vDwAzoyxRK6nysrKOEvQYJSUlKS87mCw8TGY9cd93Pn5+SPyfoLxhxBEEwBVN6DIBBk+y0IkijMKBIKTpKGhATU1NaitreUxO4BpzVmxYoXDHZTMNcTEQjAY5Bad5uZmLFmyZND3ZusmElnDZTj7GCx1X7i/BMJlNgFQdQqvLCxEAoFgZFm6dCn27NmDPXv2oLa2FiUlJairq8PixYsd6yUTEkzYBAIBbmGpqqoCpTTpX1FRERcfpzpbS7i5BIMhBNEEIKoZ8MgSfIoESSLCQiQQCE6KlpYWLFu2jGeTFRUVoaqqCuvXr0dJSQkPGravn0i8sGBjJnICgQCampoSvmdNTQ1qamoAgGeDrVu3Lm69+vp6EEISut2GQyIRZ8+eEwjcCEE0AVB1UxARQuBTJER1Y6yHJBAIJjg1NTUOVxmDCQm3C8leuwcwhQvLPmNuq6qqKjQ3N3Phw6irq8OyZct4RlsgEEB5eTnfh53ly5cDOLnYIDZ2twBi+xYIEiFiiCYApiAyC5IpEoEqBJFAIDgJioqKHEHV5eXl6Ozs5Blj7swqZvlh67a0tKCxsRGBQMCRfbZixQrU19dj2bJleOqpp1BWVuZY177f2tpalJaWoqKigouqxsZGtLS0YOnSpXGxQcuXL+evDcUdd9zB6whVVVUBMAWc3V0nELgRFqIJgKpTeBXzo1JkCaouWncIBIKTo6GhgYuLuro6NDY28lo8btFRVlaG9evXo6ioCCtXruQFDNkyO3v27HHst6WlBVVVVdi7d69DjBQVFWHv3r2orKxES0sL6urquMCyC6fy8nK+TqoWnsrKStTW1iIvLw81NTVobGxEZWUlGhoa8OCDD4rCioKEENEXKzllZWU0mT/8VPLlPzXjqnmTcOels/HgM5tRmJ+B6muKx3pYAsGEZfv27Zg/f/5YD2NCkJubi7KyMkdxQoFgvJHqb5oQsp5SmrCkubAQTQBUK6gaABRJgmYIESsQCAQCwUgiBNE4xzAodIPyGCKPLEETLjOBQCAQCEYUIYjGOaphBlB7LQuRRybQDBFULRAIBALBSHJaZJkRQlYBWE4pbXYtLwKwCkAtgCYARQAqANS61x2vsABq7jKTiQiqFggEp4yRaJwqEEwEJqwgssTOMgBBAOUwRU8iAgBWWP8bASybKGIIMIsyAoBHicUQRVRRqVogEAgEgpFkwgoiSmkLgGoAIIQM1glwyUQSQG40q+aQqEMkEAgEAsHoIWKIxjkRzRlDpMgSVJFlJhAIBALBiDJhLUTDgRASgBk/FLQsSxMGlmLvsQdVCwuRQCAQCAQjyplgIboDQBmAFgABQsgqSyAlhBBSRQhpIoQ0tbW1naoxJoXHEPE6RETUIRIIBAKBYIQ5rQWRZQ1aTiltpJQGrViiBgC/GmSbOkppGaW0bPLkyadsrMlg8UJexYohEnWIBAKBQCAYcU5rQQQAlNKga1EjgMrBrETjCVV3Woi8siTqEAkEAoFAMMKc1oKIEFKVYHGn9b8owWvjDlGHSCAQCASC0ee0FURWnaJa638iJkRwtdtCpMgSDIPCEHFEAoHgBKmrqwMhBHV1dUOu29zcDEJIwr/c3FxUVFSguTl5ZZPm5mYsWbIEpaWlIISguLgYFRUVaGxsHMlDEghOmtNWELE6RQmyym4H0JzAlTYuiequtHvJjCVShdtMIBCcQgKBACorKx1/eXl5aGxsRGlpKerr6+O2WbZsGX8tGAyisrISgUAAjY2NqKioQHV19RgcyejR2NiI4uJiIfYmKKd72n0nIaSIiSIrbqgawD1jOqphoPJK1bHCjACg6RS+0/3TEwgE44by8nKsWrUqbnl9fT2WLFmCe+65B5WVsRq5dXV1qKmpQUlJCVavXo1AIMBfa2lpwZIlS1BXV4eKigrHdhOdlpYWBIPBsR6G4ASYsBYiQkiAELKCEFI
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 648x432 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "fig = plt.figure(figsize=(9, 6))\n",
+    "\n",
+    "plt.plot(w/1000, log_mag - log_mag.mean(), alpha=0.7, label='spectrum')\n",
+    "plt.plot(w/1000, 10*np.log10(np.abs(h)), \"r--\", label='LPC spectrum', linewidth=3, alpha=0.9)\n",
+    "plt.plot(w/1000, 10*np.log10(np.abs(h_stp)), \"k--\", label='short-term postfilter', linewidth=3, alpha=0.9)\n",
+    "plt.legend()\n",
+    "plt.xlabel('kHz')\n",
+    "plt.ylabel('Amplitude (dB)')\n",
+    "plt.show()\n",
+    "fig.savefig('plots/short_term_postfilter.png')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 62,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "periods[frame_idx]\n",
+    "\n",
+    "p = int(periods[frame_idx])\n",
+    "ltp_num = np.zeros(p+1)\n",
+    "ltp_den = np.zeros(p+1)\n",
+    "\n",
+    "ltp_num[0] = ltp_den[0] = 1\n",
+    "ltp_num[p] = 0.25\n",
+    "ltp_den[p] = -0.25\n",
+    "\n",
+    "w, h_ltp = scipy.signal.freqz(ltp_num, ltp_den, fs=16000)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 63,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkQAAAGECAYAAADX4D9zAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAADmGklEQVR4nOz9eXQc2X3niX5vROSCJYHEDiT2BJdikSxWkaySZEuWZZFud4+7xwsptc+Me+y2RboXu909dtFqPb/j7rG7hpyesWy/tkzacsvbSCrSslq2ZUmE9iqpqgiyWCyuIJHYdyCRyESusdz3x417M3JPVJEESN7POTgAMiMybsZ2v/FbCaUUEolEIpFIJE8yylYPQCKRSCQSiWSrkYJIIpFIJBLJE48URBKJRCKRSJ54pCCSSCQSiUTyxCMFkUQikUgkkiceKYgkEolEIpE88WhbPYDtTGtrKx0YGNjqYUgkEolEIrkPXL58eYVS2lbsPSmIyjAwMICRkZGtHoZEIpFIJJL7ACFkstR70mUmkUgkEonkiUcKIolEIpFIJE88UhBJJBKJRCJ54pGCSCKRSCQSyROPFEQSiUQikUieeKQgkkgkEolE8sQjBZFEIpFIJJInHimIJBKJRCKRPPHIwowSiURShHQ6jXA4jFgsBtM0t3o4EokkD1VV4fP50NzcDI/H864/TwoiiUQiySOdTmNqagpNTU0YGBiAy+UCIWSrhyWRSGwopdB1HdFoFFNTU+jr63vXoki6zCQSiSSPcDiMpqYmtLa2wu12SzEkkWwzCCFwu91obW1FU1MTwuHwu/5MKYgeETKGhbQhzfYSycMgFouhoaFhq4chkUiqoKGhAbFY7F1/jhREjwj/99fu4N/81RVQSrd6KBLJY49pmnC5XFs9DIlEUgUul+u+xPlJQfSIcG9pA5QCV6cjWz0UieSJQLrJJJJHg/t1rUpB9IhQ41YBAK+Pv3s/qUQikUgkklykIHoEMEwLyQwzB8bTxhaPRiKRSCSSxw8piB4BYqmsCIqnZWC1RCKRSCT3GymIHgGiKR0A4HEpSGSkhUgikUg4w8PDGBoawvDw8FYPRfKIIwXRI8B6kgmirsYaxDPSQiSRSCROQqEQIpHIVg9D8ogjBdEjQDTJrEJdjV4kM4ZMvZdIJBKJ5D4jBdEjALcQdTZ6QSmQ1KWVSCKRPHguXLiAQ4cOgRCCpqYmHD16tMA1dfz4cZw7dw6hUAjHjx9HU1MThoaGcPz48ZJWmzNnzojPHRoawqlTp4ouG4lEcPLkSQwNDYntX7hwQbx/9OhRHD16VIzDmX7NxwIAJ0+eBCEE586dE+vx9/IhhODkyZPi/5MnT6KpqalgLPz7RSIRHD9+HENDQyCE4OjRowiFQpV3rmTbIQXRI0A0pcPrVuGvcQMAEtJtJpFIHjBnzpzB8ePHEQqFcOzYMRw+fBjDw8M4evQorly5IpYbHh7G+fPncejQIVy5cgVHjhyB3+/HhQsXMDg4WCAODh06hFOnTgEATpw4Ab/fLwSSUxRFIhEMDg7i3LlzCAaD+MhHPiJEF1//1KlTePHFF8VnnT9/vuB7nDx5Ei+//DJOnDiBw4cPv6N9EYlE8OEPfxgjIyM4duwYgsEgLly4gA9/+MM4dOiQ2EdHjhwR+0jy6CGbuz4CbKQM1Ls11HpYLaJE2gTqt3hQEskTymffmMJUOLHVwyhLX3MtfuaFvnf1GS+99BKCwSDGxsbEaxcuXMDx48dx9uxZnD17Vrw+PDyMI0eO4OLFi+K1c+fO4eTJkzh58qR4/cyZM7hy5QpOnz4thIxz2Y997GNC1HzsYx9DJBLBxYsXceTIEbHsoUOHcObMGXz84x8Xr585cwZHjx7FsWPHcr5DJBLB8PAw1tbW3tW+AIBgMJgjuJqamoQAdH5vLgwjkQj8fv+73q7k4SEtRI8AGdOCW1NQ52b6NS4zzSQSyQOmmAvr2LFjGBsbw+nTpwvecwokgFlsuMWEfxYXWU4xxJc9ePCgcIdFIhFcuHABR44cyRFDAPDxj38cBw8erNotVWys74T8z/nIRz5S9HU+3vvRbFTycJEWokcA3RZEtXa1apl6L5FsHe/W8vKowMXM0NAQTp48iYMHD+LIkSMIBoMFywaDwaKv85ijkZERHDlyBJFIRLib8mlubgaQmzFWzPV07NixAktQOQ4ePFj1suXg4+Nw60/+925pabkv25M8fKQgegTQTQuaSlDnsS1EsjijRCJ5wFy8eBFnzpzB2bNnRcwOwKw5p0+fznEHlXINcbEQiUSERefKlSs4fvx42W3zZYuJrM2ymc8ol7ov3V+PP9Jl9gigmxRuVVqIJBLJw+XFF1/E2NgYxsbGcPbsWRw8eBDnzp3Dhz/84ZzlSgkJLmz8fr+wsJw4cQKU0pI/wWBQiI+Hna0l3VxPNlIQPQJkDAsuVYFHU6AoRFqIJBLJAyUUCuHUqVMimywYDOLEiRO4fPkyDh48KIKGncsXEy882JiLHL/fj5GRkaLbPHPmDM6cOQMAIhvs0qVLBctduHABhJCibrfNUEzEObPnJE8eUhA9AugmE0SEEHg0BRnT2uohSSSSx5wzZ87kuMo4XEjku5CctXsAJlx49hl3W504cQJXrlwRwodz7tw5nDp1SmS0+f1+HDlyRHyGk5deegnAu4sN4mPPF0D8syVPJjKG6BGACSJWcExTCHQpiCQSyQMkGAzmBFUfOXIE4XBYZIzlZ1Zxyw9fNhQKYXh4GH6/Pyf77PTp07hw4QJOnTqFz3/+8zh8+HDOss7PPXv2LA4dOoSjR48KUTU8PIxQKIQXX3yxIDbopZdeEu9V4qMf/aioI3TixAkATMA53XWSJw9pIXoE0E0Kt8YOlaYq0E3ZukMikTxYLl68KMTFuXPnMDw8LGrx5IuOw4cP4/LlywgGg3j55ZdFAUP+mpOxsbGczw2FQjhx4gTGx8dzxEgwGMT4+DiOHTuGUCiEc+fOCYHlFE5HjhwRy1Rr4Tl27BjOnj2L5uZmnDlzBsPDwzh27BguXryIj3/847Kw4hMKkX2xSnP48GFayt/9MPk3/+8VvH9HK37mhT58/AvXMNBSh5MfHNrqYUkkjy23bt3Cnj17tnoYjwRNTU04fPhwTnFCieRhU+01Swi5TCktWrJcWogeAXQ7qBoANEWBYUkRK5FIJBLJ/UQKom2OZVGYFhUxRC5VgSFdZhKJRCKR3FekINrm6BYLoHbbFiKXSmBYMqhaIpFIJJL7yWORZUYIOQ/gJUrplbzXgwDOAzgLYARAEMBRAGfzl92u8ABq4TJTiQyqlkgk24b70ThVItkOPLKCyBY7pwBEABwBEz3F8AM4bf8eBnDqURFDACvKCAAuLRtDlNZlpWqJRCKRSO4nj6wgopSGAJwEAEJIuU5/xx8lAZSPYdccknWIJBKJRCJ5cMgYom1O2siNIdJUBbrMMpNIJBKJ5L7yyFqINgMhxA8WPxSxLUuPDDzF3uUMqpYWIolEIpFI7itPgoXoowAOAwgB8BNCztsCqSiEkBOEkBFCyMjy8vLDGmNJRAyRqENEZB0iiUQikUjuM4+1ILKtQS9RSocppRE7lugigD8us845SulhSunhtra2hzbWUvB4IbdmxxDJOkQSiUQikdx3HmtBBACU0kjeS8MAjpWzEm0ndDPXQuRWFVmHSCKRSCSS+8xjLYgIISeKvBy2fweLvLftkHWIJBKJRCJ58Dy2gsiuU3TW/l2MRyK4Ot9CpKkKLIvCknFEEonkAXH8+HEQQrZ6GJIt5MyZM2hqaso5DwghOH78uPj/cTtPHltBxOsUFckq+wiAK0VcaduSjJmXdq+wk0+XbjOJRCIBAAwPD2NoaAjDw8NbPZRHilL77cqVKzh16hSam5vx4osvvuvPe1R43NPuw4SQIBdFdtzQSQAf29JRbQJdVKrOFmYEAMOk8DzuR08ikUiqJBQKIRKJbPUwHjmK7bdQiNkRTp8+jWPHsnWPq2nT8igfh0d2SrXFzcfBWnIEAZwmhAwDuEgpHQYASukFQsgxRyXrIbDK1Y+EuwwojCHiv2WmmUQ
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 648x432 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "fig = plt.figure(figsize=(9, 6))\n",
+    "plt.plot(w/1000, log_mag - log_mag.mean(), alpha=0.7, label='spectrum')\n",
+    "plt.plot(w/1000, 10*np.log10(np.abs(h_ltp)), \"k\", alpha=1, label='long-term postfilter', linewidth=1.5)\n",
+    "plt.legend()\n",
+    "plt.xlabel('kHz')\n",
+    "plt.ylabel('Amplitude (dB)')\n",
+    "plt.show()\n",
+    "fig.savefig('plots/long_term_postfilter.png')"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "torch",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.9.12"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
--- /dev/null
+++ b/dnn/torch/osce/stndrd/presentation/spectrogram.ipynb
@@ -1,0 +1,173 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "id": "04cba77c",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/tmp/ipykernel_252797/4199785080.py:9: WavFileWarning: Chunk (non-data) not understood, skipping it.\n",
+      "  fs, x = wavfile.read(f'data/a3_short_opus_{br}bps.wav')\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD8CAYAAABn919SAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9O6wu25YmCH3zHRH/Y639OI/Mm1VZBZQBQhgg0UI4SIBU4LQJjYQEQiqHxkA4GAgQVjsYgFCjArUKMGhhllESmDggNRaCtkoIqvNmZZ5z9t5rrf8REfOJMcaYEWufx01U2ewitae077l7r3/FHzFjzjHH+MY3vqFaa/g6vo6v4+v4Ov7/f+gvfQNfx9fxdXwdX8dfz/hq0L+Or+Pr+Dr+hoyvBv3r+Dq+jq/jb8j4atC/jq/j6/g6/oaMrwb96/g6vo6v42/I+GrQv46v4+v4Ov6GjD9o0JVS/5ZS6gel1P/9V36ulFL/U6XUP1VK/d+UUv/xv/7b/Dq+jq/j6/g6/tD4q3jo/wjA3/+Nn/8XAPw9/vMPAPyb/+K39XV8HV/H1/F1/H87/qBBb639nwB8/I2P/KsA/jeNxv8FwKNS6o/+um7w6/g6vo6v4+v4qw3713CN3wH493Z//zP+t3/++QeVUv8A5MXDKfefeGffwCiqVJV6Va2A2oDcFOQHRjUYRZ9pAEpTqA0wij7QQJ/VaIDaTim5hubvULs/BUCV7wBQGtAaYDX6Pcn9KDQA9J2pKlS+Dw3A6e3+1e7+K90K5IsV6N/o/23XVwCM5n9t2zOC/16xPatWrV+zYZun0gCn6X7kM7VfS/F9tf7NDUBr9Deltjlp/ffUz+ZEq4ba6FpW8e8p+pIKuodYtvlUSuaO5m//PWr3EJWfc3tX27woNGi1vfPtnfB62BU57+udG/+PfOfno6+jqqDU9r7l2dv+XfB/raLrWdX4HuW9K77f7e+1Abmq/l2KH0juxfA1FOSdqN3cb/MkozTVn2//LuWdVLx+h/uRKt2P5U1R23ad/f3JvPf117a/y702KLTdvG73RO937x3K88gnlGrQr36H10GT72ifXXMbFQqpKt5zrc/2/j5lfvfXSBWorfH3A0G//h65JrDtZbmv2mTNbc+2n28ZBrQPGr8jWs805+D10ud19+/bXuB3yb9b2+t3gQYUnne515/yDz+11r7BL4y/DoP+Vx6ttX8I4B8CwHfuu/Zf/fa/hPchw+mGQVc0AGvVKFVhqbpPqtPtlZFNVaE01TeG/CxWMjgnV9Cawiqbin9VjGPbXUODJlVertdkLCJvhAdXYBSwVIXaVN+oudEmP9iC1BTuWWOyFSdbyaADWIpGrAqTqfC6Yamav5fuZzD0XEE35AY8R4uKbWMo1XhuGkoDUtsvcxqF72cwFUoBL8mgNGDka2slm4Cefyn0zEaLEaP5dWr7/F8sFk9R9Y3+/Vj6PQCA1Y0XOM8rv6e1fm54tu9eq0auin5XNSxFozRgKduBZVXDwTYY3TAa2jbyjgB6/jlrOuB1699R2mactaJnEUPdmurvzqiGxu/FqYajK30eb9ngKWqab9N4XaA/sxyohq/74DIU0N/pUnR/D0vR+LCavjZTVYgVGAy988FUONX4MKZ3UrblDfBzDbrxOlJwGnh0BZXXuVMNo62Ys8ZzMphs7Z/fj/2hJ3NSmsIt6/7dRuad19vnB6zMrxirgy19DVTQus0NmGzludTdmBd2OAbT8OhKf2ajGqxqeEoG96wwWVrnR1cQ9HbTpSnkRu+s8XoVoxyrwkvS8Lrh5Gq/x6XQASAj8+W+HcqrdfXDYhEr8ByBYIC/eyzwusLphlg0blnDm4bJVCjeH0uhdbxWWr8nW+HNNuuyVitozm5F1ge9w7MrfU5k/cp3DrrC6drn+Sk63IvGD4tGrMC7QOvv3/iz/9n/G78y/joM+u8B/K3d3/+E/+03Rz8hm0KpwKUatKaQ5BQDsBaNe1HQkA1KE3DP9JIN/9u7QC9i4NmMRXfPD6CF3Br6xF6zwtHSIpCRqsJa6I9W4EOEFnZuwCXRxd96MvDe0AKSzXVyFUY11AZcssE17Z7FA0pVDLpiMjsvnz3gK2+uxM90sKV7vY43Wa0KpdA1SwMOtmI0FdrQnORGi2AwtS968mxpAk6WjIFR9F2x0GePPsPyYlV8MP5ONbwPtPFao0NOK0Dxd2Q2noF/xyjAoEGx6y3G3qiGtSrEqvuhPPMCdxpwCiiqIYMM3sKHglHAydG15qzgDW0EjQZvKu5Z4xY1BtN6ZKJUw8FUBN36ofn72aE04ME1vhcNpRq8pjkl46X6hgco0vGa7vOeFB59pQPZVGhskcJadV9ra1X4aTVwqiEMdEB/N+TuQLwkjWtW/eArTcPx/KhdBLJ3WkoFPmYNo4BvhgKnGh/aDSfIdegguWa6ZjFycIMPCQWvG0cXZIAPltbpZCo7Iga1KcwFMEoh19YPRsMHslaAM60b8cGQ4aFDWsPqBsVG3+iG9yF3w1b4QAXoQHb83eIMDKWhaLrfW1a48IHtPgODZW7ogFf9Xk6W7mvQDfemsGSFyTQMnvYQdnMx8vqQdS17/P2pIeiKIzuCS9G4F42npDHU1r1mjYbU6LAQJ03WtdO1759SNeZi6MCJGkoBJ1f7OxQHIFeaG3FZHn3ByRZ2WLZD5GAbfFW451+KN1+Pvw6D/o8B/OtKqX8bwL8C4Lm19jO45WdfrBsefcElb2+OTlrymjSAZCoOu5NWNshg6O9iuFNVSJUmsLQtbBFv27F3Gflao6FN5wSKUQ0nB0wWuGc6eQdNXqBAKHtPpzTgw2rhdKMTlz2wVBWWRr/vDeAlDG8K12SQdt4eQCGheENyiIjnpUGLOFZaXLEo3PmU93ozSrHQ6U2HBx1YDcBo0Deu0WR4NXu7DQ1WiweqkDkaMrxJACDohueksRaNhQ1P0HWDS3jexNNaisFztBhtxVufkJtCLBozezoyjrbijSePRDy8UhVekkHi+wfIkCsFjJYm7ZoMrwGKjB4cbWSn26uoSyKv0tD/JI5uLknBaYXvhwylaP3IdxoFfDfkvs5kaH5GMSgfV4tYFRtGdE/wezbgy857bn3zNxQnh13jdQI8OtrgpZHhHgxdU9blys9ySRpaASNHQLGQ0fNsZN+H2teOzNElWdwyEAwfkBZQur16F0oBfzxmlAbci+4OledDb1vv7FE2AIqMWikGC8/fLZOBPHsyWq+81J13X5pC1QRMSKQz2grPG6I28u5z3WBWiao+rOZVJDeADuJYFSYAxjY8uIJH3/oha/kascNoBKFYXaGUwmg0WiMHymlax+B3PZiKydBeXqtGQ0MFHQpn1/ozPSfTDwiAHA8A+BAtUkV3RAHa50vR0AAmU1G0wsBrM/Ee+BQN3vmyuydyOnOlee5Q9K+MP2jQlVL/OwD/GQDvlVJ/BuB/AMABQGvtfwHgnwD4LwL4pwDuAP7rf+iaMmr3IFqHHyQkTByuXJJG0EAwFS9JYykKb33FYCrAXnJjz+clbQbbsEGn76FJPbvyyoDKIisM1TjV8NbTCxH4QKKFN4EmVkLr/emseQUrWqsQJG8wDV7XvsAXDtNeou7wiQIt0NwUflwUDlbh0ZM7LCHxS9IYdMO7QPfW2rbpHXtHl2gQC3CwZHxvmVC6wt66eO1O0cIUr1BgITESSqF7pJOheW58r3svVfIJif2LCppfySl0TJJRDTmEKNw3fcEa1WBMw3v+njt78LKxP0WaPdkUtdEzPrjSD/i5aMSCHhGdHD3no9/C61SByAFZhYJB2w5/kBf3FC177mRIHj1BP657dao7CeK9HvjwFeMVi0ZVCsgc3dXNUy48Z043eE0GPOiG4At78gaxKThXGP5SyBW4Zoa/eCVVAGtReEq6h+1nVzEw5GFAUeuDUz2iEKhL8Nujrd1rr/xz8fpLowN0rWQwR0NrOfLa/hRpHZ85Kg2aruF3h3RrG4x0sBWlocMt96I7hFXZGI4MGeWqoNVmFwTacKohQeGWAacVRlthoVB2h604GORkoM8VYf4Nl7zBKBoNJ0uL814
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 432x288 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZwAAAEeCAYAAAC+OaPqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAAWDElEQVR4nO3dO48kWXre8ec9Jy55qazq2/bs7oy4JCRKWEEguYAkyJBsegQEyJAhS4a+inw5cvkFCJr0+AEIOqIgR9ASQ2lmZ7Rz666uyktEnPPKyLr1zGxEZcxWCAj+f1Z2Z1b1m1U58+Dc3mPuLgAAnlr4/10AAODvBwIHADAJAgcAMAkCBwAwCQIHADAJAgcAMImi70mz4jfumXbv7LdfDgBgrhjhAAAmQeAAACZB4AAAJkHgAAAmQeAAACZB4AAAJkHgAAAmQeAAACZB4AAAJkHgAAAmQeAAACZB4AAAJkHgAAAmQeAAACZB4AAAJkHgAAAmQeAAACZB4AAAJvGIwIlPXwUAYPYGA6eI51PUAQCYucHAWZavpqgDADBzJ63hmIqnqgMAMHODgXMWX09RBwBg5gYD55X/9O6xWf2kxQAA5mt4hOOru8fr+qc9rwQA4DcbDJyFyuMLba1lfP7kBQEA5ml4l1q4P4ezMgIHADDOQOBEPa+OO9PcD6q8mqAkAMAcDU+p3QxwXK6X+cVT1wMAmKmBwElaF/ePN+xSAwCM9OiDnyFsVJg9ZS0AgBkbDJwyuCQphoUigQMAGGkwcOogSVFVPNcicpsBAGCcwQRpXTKLKsNSJXkDABhpMEKyS8GWChb1pklT1AQAmKHhEU42Zd9NUQsAYMYGA+eqk9wb7bpv1LlPURMAYIYGA+dwM4vWpmstI9dNAwDGGQyccLMTuohLZUY4AICRHrGGcwyZMiy1z/nJCwIAzNNg4OzSMXC6fNDW2ycvCAAwT4OBc9kdQ6YMS3ViWzQAYJzhKTUdp9GClQqitQ0AYJzhTQM3IdPmnTqxhgMAGGd4W7SOU2rJD2qte/KCAADzNBg4Wzt2GTi0X+pX9ssnLwgAME+DgWM3L3FvVdnqyQsCAMzTcGsbu7x5xA41AMB4w4GjryRJpkKNb5+8IADAPA0Gzj69lSS5OmVGOQCAkR7RvPPdg8eXPa8EAOA3G75iOm7uHida2wAARjpphGPiegIAwDiDgfNQZoQDABhpMHDc79vZBCuftBgAwHwNBs7DUU3yw5MWAwCYr0dMqd2PcA7tl09YCgBgzgYDJ+Xd/YutftJiAADz9Yg1HKbRAAA/3COm1O5f4tyHAwAY6RHdoh/c8ukEDgBgnOHACcu7xy7O4QAAxhlew3mwacCd5p0AgHGGA0f310qb0doGADDOIzYN3IeMe/OEpQAA5uwRgcM0GgDghzupeScAAGMNBk6w9RR1AABmbniEYwyCAAA/3CMOfhI4AIAf7hHNO6+mqAMAMHPDazhhNUUdAICZGwycGBZT1AEAmLnhGz8zhz0BAD/cI1rb0CEaAPDDDQZOGTdT1AEAmLlHrOFUU9QBAJi5wcC5KD6cog4AwMxx8BMAMInBNLnOX01RBwBg5gYDp7B6ijoAADM3GDi1nU1RBwBg5gYDZ6MXU9QBAJi54ead6qaoAwAwc8MHP8U5HADAD8eeZwDAJIabd9JLDQDwWzC8LdqLKeoAAMzcYOAsfXn32Iz1HADAOMPncFTePY5h/aTFAADma7hb9IOXmDG9BgAYZyBwTJv4cIRDmxsAwDi9gWNWqjS7/zO7qAEAIw0kSFByv/tTwQgHADBSb+CEb3WKruP5kxYDAJivgSm1oDLcT6ktbPPkBQEA5qk3cHJuFB+s4VS+lImdagCA0/UGjqvVIt4HTq2FZGwcAACcrjc9Yljr/H5XNH3VAACjDazhFCrvXhF1ns8VbNn3JQAAfK/BNZx0syvaLKpWKWNKDQAwwvAuNXOFsJF7q0Imd6bVAACnGzyHk2UKVklyRQty58ppAMDpegNnWb5QkEs3mwWiTK40RV0AgJnpDZxFeL+zQDD7TvcBAAAe45E7AIJMhaIZmwYAAKMMpkeW3a3bXJSBEQ4AYJT+KTU707rIKuJark7nJWs4AIBxegNn55eqgquKa8VwoWhSStupagMAzEj/ORwFBZPKsFJZbJRcjHAAAKP0Bk5lK2WXCqsVrdaDPp4AAJykN3BWfrz/pvODXEltltz3kxQGAJiX3sBJ6tRk0677Rk33VqtCCoFL2AAAp+sNnIPt1GZT9k7BalXBZY89ugMAwAO96dH4VmU4toteFBdqsinlq0kKAwDMS2/gvOs+lyS5Z5VhpSBXsMUkhQEA5mVgW3SUJMVQqclXKphNAwCMVPQ9uSyeKZpUhuMtn12W3A+TFAYAmJfewGnzTtLxts9opWKQXNyHAwA4Xf8utXSpfTJFK1XbmRY3GwgAADjVwI2fhZpsqu1MpqDWjXM4AIBRegMnWqlFPI5qXFltPoYQAACn6u804K2yH5t4ZiXt0vEQKAAApxoc4SSXOh20sDN1WQpWTVUbAGBGegOnywftkykrKXqpzsUV0wCAUXrTw5W1KlzuWclaXbd+d900AACnGByulHa8FyepUxVN7nmKugAAM9M/pZZ2ypKSWhWqtYzSonw1UWkAgDnp3zQQaiWXCtVKarVLXDENABinfw3H87GXmtVK3urrJqlN11PVBgCYkd5TnCEUqoKr8Z2SWtWBNRwAwDgD1xMEuUuF1cpKSrRSAwCM1DvCcR1HM6XX6rRQdlfO+0kKAwDMy2Dzziq6WjsoqVWWFAI3fgIATjd4Dieaq/GtmrxVm7OKuJyiLgDAzPQGThlWSm6qbCVJKoxNAwCAcXoDp81bleaqfSlX1iLSRw0AMM5gt+hN2SmqUGG1TKbs7VS1AQBmpDdwqnCmImQVXihaKZerS2+nqg0AMCO926Kvuy/UpKigcNw0YC53WtsAAE430LzzWtFcpqDkByV3mcWpagMAzMhA886lXFLS8Q6cMgSJXWoAgBF6A2dRPFMVsg62V5cPMkkuLmADAJxucJdaclNQUJd3Sk4zNQDAOIPNO7fpfs0mmj15QQCAeXrEFdOu2hcKVorxDQBgrMEpNUla+ELBChWMcAAAI/UGzsqeK0tKSuryThcVgQMAGKe/04BX+upwHOUEKxXJGwDASP1TaipUh6ygoGC9TQkAAOjVGzhJncykrKwi1GKAAwAYa3BbdJtNBzuoSdeq6WoDABip/z4cNeqy6Rv7tdruLSMcAMBovYGz06W2ybT3S5kVWkRO4gAAxukNnINfKblpFZ5LEgc/AQCj9QZO9qTkx91q2Q+67phUAwCM0xs4rqzs0t6vJO+0pVE0AGCkwV5qdXBFlYrxQoEBDgBgpP4Rzs110md6piIuVQ3GEwAA36//4KcfD35KUs6dMrsGAAAjDQTOQYvo2tm1unytQ5qqLADA3PQHTj5Ikpa+VhHWOjDEAQCM1B84aaug47Zos6A2T1QVAGB2+rcBWFDrplaN3LOco58AgJH6D37md+8d9qzYFw0AGOlRG51LVYqhkrsk0TIaAHC6/usJrFJpruBBbbpW664QVlPVBgCYkYGDn42WhSsoqIhLbVOWnJ0DAIDTDY5wJKmzTsEKHXJS9utJCgMAzEtv4MSwVnJTVla0kl1qAIDRBg5+XmsZ76fQLopK4t5PAMAIg2s4dXCt81rZs6pgKuKLqWoDAMxI/xqOCm3KTlFR0Qold13UvzNVbQCAGelfw4kXym4KMhVhoX3Kqoxt0QCA0w1MqXV61x0PenZ5r3URZY87KwoAwHsGtkUHBUlnVmsRL7SKbBgAAIzTGzhl3OhZ1aq2oEK1Opc6HaaqDQAwI4MjnGXRKUuqbKXkzhoOAGCU3sBpundqUtQ2d1r6SsGkvb+bqjYAwIwM7ADIavNxHefYbYA1HADAOP334XinTdkqWtAmb7SM0pm9mqo2AMCMFL1PhrXWZaNNXCuklcogHfxqqtoAADPSO8IJodCmPtxNpSWXWu0nKQwAMC/9nQaslrspmNR5VhWkwI2fAIAR+kc4FrRrS1XhOMI
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 513.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZwAAAEeCAYAAAC+OaPqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAAVy0lEQVR4nO3dTY8kWZbW8efca2b+GpGRWVnVSauYXgyC0bRoMSMkJCS+AJoFOxZ8GDZ8BCQkPgRbdqxYIVatkUaoRTMw09PVXdmVL5Hu4W5m9x4W5hnhOV1pFm5TYUjG/7fo8si0yDwR4dmPrtm555q7CwCApxb+XxcAAPj/A4EDAJgEgQMAmASBAwCYBIEDAJgEgQMAmETR95tmxWd7pt1b++HLAQDMFSscAMAkCBwAwCQIHADAJAgcAMAkCBwAwCQIHADAJAgcAMAkCBwAwCQIHADAJAgcAMAkCBwAwCQIHADAJAgcAMAkCBwAwCQIHADAJAgcAMAkHhE4nLMGAPi7GwycqvjRFHUAAGZuMHDavJuiDgDAzA0GTrDq7CNurwEAxhkMnFX54v61KT5pMQCA+XrECqe8f+1KT1oMAGC+BgNnHZ6ffeRPWAoAYM4GAyeerXBMxZMWAwCYr8HAsbNLYrx60mIAAPM13DTgm/vXRdz0XAkAwOcNBs7S1/evF/H6SYsBAMzXYOB84Q+30T5tIAAA4PEGA+d5+dA0cKUXPVcCAPB5w89wYneJqdB1fvbkBQEA5mkwcBan4QJmi6euBQAwY4/Yh9P915UUOT4HADDSI/bhdGJY6dqWT1wOAGCuho8ncEmKkoKWgRUOAGCcwQTJLplMRVzpXWqmqAkAMEODgbNrXa5WbdrprX2YoiYAwAwNBs5t2x1J4J517Yy2AQCMMxg4ybsjCVxZK2NaNABgnMHAabxb4cSwVOY8HADASI9oi+4ao3Ou9c7vnrwgAMA8DQZOfTpWOvtRu7B78oIAAPM0GDjfhbeSpDLeqHCe4QAAxhkMnA/2rrswFMrKT14QAGCehod3+kqSlHOr1tonLwgAME+DgRNOl7R5p0b1kxcEAJinwcA5WteZlvOt3vrfPHlBAIB5esQ+nOP967v2zZMWAwCYr+FJA3oY2FmG1ZMWAwCYr+Eutfbb+9dm8UmLAQDM1/DGz/T+/rVx4icAYKTh83DyQ2easw8HADDS8Cy1swnR2TmADQAwziPukT2saprM8E4AwDjDGz9tcf+6TQQOAGCcwcDx07RoSSojJ34CAMZ5xAqnvH9dhEXPlQAAfN7wCscfnuHcNa+ftBgAwHwNt0WfjbY5b5EGAOASw6NtzjrTQqietBgAwHwNt0X7+Rk4TBoAAIzziC41v39tRuAAAMYZTJAYtvevUz48aTEAgPl6xGibs0ucWWoAgHEecY+M22gAgL+7i2apnU8dAADgEhctX5xp0QCAkR6x8fO8LZpnOACAcR4x2uZ4/tETlgIAmLNHDO9cnX1kT1gKAGDOLmqLtrPJ0QAAXOKiZzjudKkBAMZ5xC214uwjmgYAAOMMBk4Zr84+omkAADDOYOBwyicA4IcwfB4Omz0BAD+A4S41xSnqAADM3PDxBGdNA2bLJy0GADBfw00DYX32EV1qAIBxLnqGwz4cAMBYj7il9jBdgEkDAICxLjtd7ZPJ0QAAPN5g4CxsO0UdAICZe8Rom4e26PjJ1AEAAB5vMHAqfzieIBhTBwAA4wwGzsof2qJddKkBAMYZDJyjHe5f51w/aTEAgPkaPg+HzZ4AgB/A8LRonY+2KXquBADg84ZH23h1/7qIq54rAQD4vOHA0UPg5MzGTwDAOMMbP/1hnE2bbp+0GADAfA23ReshcELgeAIAwDiDgdPK71+XcfOkxQAA5mswcNZnnWlloGkAADDOI2ap2f1rM46bBgCMMxw4Z685gA0AMNYjDmCzoUsAABg0vA8nPAROFTkbBwAwzmDgbIqHS6I4YhoAMM4jbqk9vH6mL5+yFgDAjA0GTvKhKwAAGPaIZzhnr88GeQIAcInewAnhStXZFStn4ycAYJzewFmVXym7ZNatbLZaTFIUAGB+egPHlU7PcLrLog3egQMA4Hv1JkjKtZJL7gdJ0t7rSYoCAMxPb+CYhU+61K5sIYnJAwCAy/UGThlWui6lsuj233y5KIc+BQCA79WbHk2+UzAphoWkqDIwMRoAME5/W7QVqkJ3T+0haFjhAAAu15se0UolN5mCpKB1IZlY4QAALtcbOFfxlYJcwUpJWYckZd9NVBoAYE4GVziSlPwos4WqIJmKvk8BAOB79QbO0T9on0zuWWW80qZwic2fAIAR+jd+eiNJcs+q4kZ3yeRs/gQAjNAbOLv2WyWXYqgUrPzkbBwAAC4x8AynG9ZZhpWO6f0kBQEA5mkgcAqVQSrDWm260zFNVRYAYG4GnuG0KqybqbYqX2jJFhwAwEi9gXNoXqvOUvZWVdiqDJw3DQAYp3dTTfajDqkbcRPVTR0AAGCM3sAxK7UupDYf5dZNGgAAYIzeW2pF2Cia5MqKVhI4AIDR+qdFh24BZAqqbC3nEQ4AYKT+83DaWx2TtAhbFVqocRcnfgIAxugNHPdWRZDu0ht98NfatS6JZQ4A4HIDt9SWqoJUhKXafDyNtmGFAwC43MDo56zsXVv0Imy1ikF2GncDAMAlegPHrFCwblq0K+suZflpgjQAAJfoH22T9pKkdXiuOu/1pmkk0RsNALhc/y01C/fjbLI3WkdO+wQAjDPQpXZUk02urDKs1eTMEdMAgFEGAicp+8PJn0kuVztJYQCAeelvi7al1oXLFFTnD4q0RAMARhrYh1MpSEpq1OajghE4AIBxBtuiJam0pYrA/hsAwHi9gdOmW62KrKhCVdhqGQb2iQIA8BkDK5xS12WrtV+pyfvT8E4AAC43eB5OYa7gQcFKJXdJcaLSAABz0j9pIN/pkKKCgoJFBUlGpxoAYITeXZyuLKk7gC15o4Z9OACAkfqf4SjIzNVYrWN6r03kdhoAYJzewCmLK0VzlV4p2kKRfTgAgJEGRtt0t9SSWiU/qs55kqIAAPPTGzgxVNq1UY3VyrnVkcABAIzUHzhW3t9Sk6QFGz8BACMNDO8sVQZXqUohFFpGAgcAME5vgizDtdxN6dQKXQWaBgAA4wwuWQ7JVFstU1BB3gAARhoMnH0KalSrzXeq2IYDABipv2lApdy7ZY07EwYAAOMNdqllScm6I6ZLegYAACP1b/w8zVKr/U7ZW0We4QAARuoNnLv8TvvWlNTIvVFpnIcDABinP3DSG7mkQgu5H9Q6SxwAwDgD5+HUyi5VtpYk1Uy2AQCM1Bs4VdwomBS8u+y2maQmAMAMDZ+Ho49dapGNnwCA0XoDp847HdLDxwVt0QCAkfoDp30nl5SVFWwpetQAAGMNHMB2VBmk2vdyJe1bIgcAMM7AM5xSJmllzyRJu5Y2NQDAOL2Bk32vMjx0qXHiJwBgrME2gHx2F20dGRcNABhnIHBc72opKKiMN1pwABsAYKTBFc4iSlGFzOiJBgCMN5giV6Wr8ELuWV2TGqscAMDlhlc4oVvhhFBol5LEbhwAwAiDgdO6dLSDoi2UnbABAIwzEDhdV1qjWmVYsbYBAIw2EDhJZXCVquTK2sQoUzFNZQCAWRm8pbaOWZu8UZP3imaK8dkUdQEAZmYwcIrg3T6csNYhZ63Ll1PUBQCYmd7ACba5PwMnWqlDTn2XAwDwWf3TopV02wZFBQUrdFOUU9UFAJiZgWnRUauYtVKpqFLLaDJjnhoA4HL9gWOFnpWtlqFQaUu12RWNVQ4A4HIDB7C1Wsbuuc3SN2pdKmwxSWEAgHkZPPFz3xZKpwkDhUnJm0kKAwDMy0DTgCu5ydTNU6uiMTUaADDKYHqsirb7r6+1ilIUz3AAAJfrDZwYtnqxOGgRgr7QdqqaAAAz1Bs4RdzoanFUGUylBbmk1o8TlQYAmJPBW2opB1XB1HhWpGkAADBSb+CkfNSbw0rJpeyuF5UrsPETADBC71kDMSwUzJXc9UFHPasCKxwAwCgDwzsLPV/eKbsUZKpzkDsDPAEAl+t
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 513.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZwAAAEeCAYAAAC+OaPqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAAWBklEQVR4nO3dy24kSZbe8e+Y+SUieK3MqupS9/SgIcxAEuYKSIAWWmopYbTSSg8jQK+glRZ6DEE7PcAAAgSoly2NemZ60KhLZ2WSjJu72dHCI0nWVKVbhnfTBbj+vw2dpJM8GRHkl+Z+zMzcXQAAvLTw/7oAAMD/HwgcAMAsCBwAwCwIHADALAgcAMAsCBwAwCyqsU+aVR/smXbv7XdfDgBgqRjhAABmQeAAAGZB4AAAZkHgAABmQeAAAGZB4AAAZkHgAABmUQwcG5+qAwDARykHTljPUQcAYOG4pAYAmAWBAwCYBYEDAJhFMXBiWM1RBwBg4YqB497PUQcAYOGKgZPS3Rx1AAAWjrZoAMAsioETrJmjDgDAwhUDJ+f9HHUAABau3DSg9Ow9dpUGAEzzEW3RF8+Or1+0GADAchUDp61uH4+zH16yFgDAghUDp37WpfZ8tAMAwDnKl9Ssfjr52TEAAOcoBs5leP14nPLuRYsBACzXWYt3hsCcHADANB+x42f8wWMAAM5RDJzKn7aYjoxwAAATFQPndX66h7OpXo+cCQDAhxUD59qe9sO5Cp+/aDEAgOUqBs5t/XRJbe2bFy0GALBcHzHx8/1RVHB2pAYATPMREz+HBTureK3W2xcvCACwTB85whlCZ6PV6LkAAHxIeT8clyRX9l6B7QkAABMVAye5JJliWOmg7uUrAgAsUjFwHvosKci9V1KeoSQAwBIVA+fr7igpKeUHGZfUAAATFQPn/WU098Q9HADAZOWmAbkkyWSPxwAAnKsYOHvbDwcWdLDjS9cDAFio8sRPDUvbuCftbfviBQEAlql8D+c0wgm2egwfAADOVQycO3sznBhYZQAAMN1HTPwcutSyH9WJezgAgGk+okstPR7v7eFFiwEALNdHNA3UkiTPO+397sULAgAsU3lpm/yNJMnVP4YPAADnOmtHtcTinQCAiYqBY89OcWfxTgDANGeNcCpjx08AwDTFwPFnWxL0fnjRYgAAy1VeS63/9vH4of/qJWsBACxYMXCqsH48jlxSAwBMVAycY3r3eNzn3YsWAwBYrvJKA/lpOZvstEUDAKYpt0Xb0wrRTbx+0WIAAMtVDJzr9vcej6OxPQEAYJpy08CzRoHk/YsWAwBYrnLTQGaXTwDAb++slQb82VYFAACcoxg4weLjsSmOnAkAwIedNcJhHg4AYKryPBx/uozWJ3b8BABM8xGB8zTZ0+ysAREAAI8+4h7O0y6f8dm6agAAnKMYOIktCQAAvwPFwGmfLWeTaBoAAExUDJyop0tqmZUGAAATle/h6Pk8HJoGAADTFBNkm988nRyaFy0GALBcxcDpnq2l5lxSAwBMVAycVbx5PKZpAAAw1VkjHCm/YCkAgCX7iHk4TysNxHDxosUAAJbrvO0JnBEOAGCaYuA04fLx2NhiGgAwUXmL6dA+e48RDgBgmmLgPL+MlvPxRYsBACxXMXCeb0nAJTUAwFRnNQ1UkS41AMA05YmfdvV4HFnaBgAwUblLzZ82XWO1aADAVMXAqfU0qgncwwEATFQMnIPt56gDALBwH3FJ7WmEwyU1AMBUxcC5zE9NAyxtAwCYqhg4u8CWBACA3175Ho6e7uHQNAAAmKoYOGvfPB4/X3UAAIBzFBOk9afFO2kaAABMVQ4c1Y/HDRuwAQAm+ojA4b4NAOC39xFNA0+X0Zp4OXImAAAfVgycqPjs5DhyJgAAH1YMnJv4tNJAfHY/BwCAc5Tv4QR7PG5sPXImAAAfVggcU/Kn9zZ+9eFTAQAYccZMTlNwJn4CAKYZTRBT1OrxkpprZ9sZSgIALNH4kMWCYpDsNBcnnDMgAgDgmdEEieFC2SU/zcV5vvsnAADnGA2clHfaVPY4wqmcVQcAANOMBk6wVibJbFjAM7LMDQBgovHACY1WUQphJUmqnJUGAADTFLvU6iDlPGzCtuIeDgBgotHAyX7QPkmuTqZKl4GlbQAA0xT6nIOiDd1qsoqmaADAZKMZUscLRZOqeCF5r1UkcgAA04wmSJ93CiZ1/Z1kQbXZ2OkAAHzQaOB0/VcKcsWwljyriQQOAGCawkoDNzpkUx0vFMKFVlGSCB0AwPlGA6epbtRlqQqtqnhxihru4wAAzjeaHq4sSYpWyyyoCpKUZigLALA0hbXUDqeQkUxB6+hjpwMA8EGFwNkru9TnvYLVomcAADDV+CW1vFM+DWr6vNN9T+IAAKYZX0strIeTrFbKBzX0CwAAJhqNkLb6RMmHLrV1/UrHPFdZAIClKXapbU5b4FzFL5iBAwCYbDRwjv1bHZJUa6VgUYkmNQDARKNbeAarlFxK6nTMWz30c5UFAFia8UtqPty0MQW5MmsMAAAmK8zDuX88rqzVIXNNDQAwzegltbp6pU0lBUVVtlGgawAAMNH4PBxFmaS1rpW8O300vnxVAIDFGQ2c7J2CSck6PeRv1GXJaI4GAExQuIfzoGjDfZvsnY7Z5awWDQCYYPySmtXDCEe91vETVWYyLqkBACYotEUf1GWTKaj3g1yMcAAA0xQCp9M+DUvc9Hmv+z5JojUaAHC+wlxOV5el7EnH/KCts9QAAGCaQuCYXreu2lrVYa17281TFQBgcYpNA3VwZWVFa3XpazEPBwAwRWGEExRNqr3VIb3TJ7GRGYEDADjfaOBU8UrhNA/n0H0jSTLVL18VAGBxikvbHFPQXvcyG5Zdc3VjXwIAwA8aDZw+P6gKrk/8c100Xyi5l74EAIAfVJz4WQfXxjdK3itLimE9U2kAgCUZH654VnZpb3tl73RVRaVMazQA4HzjIxwltWFoGqhCq+Qu9/0shQEAlqU4D2cVs0xB0drT0jYAAJxvNHDq+EqrOIRMl3faRObgAACmGQ2cGBolN7XeSGLZTgDAdIUdP3vt09OoZpe4pAYAmGY0cFbVrbps2tvQKFAZ20sDAKYZDZwubyVJtdfq0oPqwKRPAMA0xaVtsqSNVpIY4QAAphsNnHV1q9qGjaXreKGrmsABAExTWNoma5+HU+qwViRvAAATjQbOPr3TXRd0VK/snZJLEqkDADjfaOAEC+qz1KhS7wcds4vZOACAKcYnflqrKkg7HSRJa66pAQAmKlxS+1aVSbuwkylqU81VFgBgaUYDJ+WDtsmUlZW9UxNc3MMBAEwxvrRNPuqhl1xZfd7poTdxDwcAMEVhLbXh3k1UpWC1HvpZagIALFBxP5zbxpVObdHHxOgGADBNoS26UhNctTcKVmufuYcDAJimuNLAXRdUq1H27rSWGqMcAMD5xkc4Ydh4LamXe9YqmiR2/QQAnK/QFr1XG10Pdq9g9fuPzlAWAGBpihvcHNLTPZs9TQMAgImKTQPBpFYruZJ6J3AAANMURzh1eAqZTOAAACYaDZw+vdO2N3U6KlitOphoiwYATFEY4SS5pLVvZBZkoi0aADBNIXBMlUmmIFNQciZ+AgCmKd7D2VSug+3lyjrmrBAu56gLALAwhcAZLp9t7U6moINned7NUBYAYGmKW6oFSUFB1/WPdRkqOfdwAAATFC+pmbk2fqWoWpdVUOSSGgBggmLgBEmtr+TKcrnM2GcaAHC+YuDcNkkXvlK0Wvvk2tSfzlEXAGBhim3R0Vy9krInve17mbFaNADgfIXACdqmoCBTUqerWKmydp7KAACLMr7FtExtcLWqVNtKdTB1eTtXbQCABRkf4VjQOiZFC2p8rSApeT9PZQCARSlsT7B+PK7VKAapjbRFAwDONxo47r2OOZxODMrM+QQATFQInIPqkLUKQ2faRWWycic1AADfMx446nXTHNUGU+2NbhunSw0AMEnhHs6FrpuDVtH0hW7UZVMQ83AAAOcbDZwqXqmJSV0e3r/vpW1+M0ddAICFGZ+HY0GHvlIThk3XmiAd0rtZCgMALMto4MT
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 513.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZwAAAEeCAYAAAC+OaPqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAAV50lEQVR4nO3dy44kSXbe8e+Yucc1L3XpmmbPUAIHJAgShERAEEBoozW14VKAXomANtppp9eQFnoEaaFZ8j4DTnOmq+uWmRHh7mZHC4+8FKfbLcN7wgW4/r9FZVSVR+XJzIj6YObHzMzdBQDAuYX/1wUAAP7/QOAAACZB4AAAJkHgAAAmQeAAACZB4AAAJlEN/aVZ9b090+6d/fbLAQDMFSMcAMAkCBwAwCSeETjx/FUAAGavGDhmBA4A4IcrBk6w9RR1AABmrhg4rjxFHQCAmSsGTs6fpqgDADBzdKkBACZB4AAAJkHgAAAmQeAAACZB4AAAJlFe+Dm8vycAAM9SDJwYr6eoAwAwc+WFn95NUQcAYOYIHADAJMp7qYXVFHUAAGauGDhVZPNOAMAPVwycOjwGjhmjHQDAOM8InM2T37FzNABgnHLgPDkPx709azEAgPk6cacBP08VAIDZKwZO0uOoJtj2rMUAAOarGDhX+tEUdQAAZq4YOEs9dqaZsa8aAGCcYuCs82PTwKJiXzUAwDjlnQaeXLKIF2ctBgAwX8XAeWmP63Ci1WctBgAwX8XAWYX+ErPFZ2tyAAA4RTFwFg+Bszx7MQCA+XrG1jb9+ptglV7oyylqAgDMUHlKLZqkviV67ZvC1QAAfLdyl5pJrqQqrLV1dosGAIxTDJzb1mWKSrlRZrdoAMBIxcD51CW5krIfdGN3U9QEAJihYuDkh18DIxwAwGjP2Gmgb4mu4/azXQcAADhFMUEOnuXeKlitzropagIAzFAxcO68kXvbd6o5u0UDAMZ51uadZrWyM7oBAIxXDJxvwrdyb9V0H5hSAwCMVgycvd1K6u/j3NmnCUoCAMxRMXBaP0hyiZZoAMAPUAycja76B55ZhwMAGO3ZJ35aWKvzw9kLAgDMU7kt2j5J6neMvsvvzl0PAGCmioHzrv25JFfOn+SeJigJADBHxcBJT6bRmFIDAIz1jPNwHncXSLk5azEAgPkqBo77Y2faIm7PWgwAYL5O2v65y0ypAQDGKQbOMl49PGY/NQDAWOUptSeLPZ9OrwEAcIpi4FS2fLw4cDwBAGCckwLnov7yrMUAAObr2U0DIVzSNAAAGK18PEH++PB413171mIAAPNVDJxotSTJ805mJ3VRAwDw4Nk7Dbg62WnLdgAAeFBMkF33/uExW9sAAMYqn/iZbh8eZ2/PWgwAYL7KJ37WXzw87tL7c9YCAJixE2/KsNMAAGCcclv0k1FNFV+csRQAwJydNMJZVNfnqgMAMHPFwNlWbx4em+JZiwEAzNdJu0U/7VgDAOAUz7iH8+HhcXb2UgMAjHPSbtEAAIx10pQaJ34CAMZ6xl5q9cNj9lIDAIxVTJBluHh47IxwAAAjle/h6PEejiudtRgAwHwVA6fTY2daDOuzFgMAmK9y4HzWCs09HADAOMUEqe1xVBMDLdIAgHHKR0zrsUuN83AAAGOVA+dJW3SXPp21GADAfJ208LOKl2ctBgAwX8XAebrYM+X9WYsBAMzXSSOcGFZnLQYAMF/FwFn4Y5daHbdnLQYAMF/FwMn2OMJxzwNXAgDw/YqBk/TYCm3Gwk8AwDjlhZ/+uNjzaYs0AACnKO8WrcdGgS5z4icAYJyT5sjadHuuOgAAM1duGvhs4Se7RQMAximfh+PVw+Nt9easxQAA5usZm3dWpUsAACgqBs7al5KiJNqiAQDjlduiVUnHo6UP6eO56wEAzFS5Ldriw2PjxE8AwEjFBDl4eni8jV+ctRgAwHydNGRZ6+pcdQAAZq4YOK+rx61tknHENABgnPLxBMEeHje+O2sxAID5KgbOKprMFlPUAgCYscFVnVV8rSDJrJ9WO/jNFDUBAGZocIQTw1Jmknsr96SV0TQAABhnMHC6dKvkkryTlLTSxTRVAQBmZzBwUr5RdsnVSZIq9lUDAIw0GDhmUasoxXAtyY5HFdjQUwAA+E6FwOmbBdw7Sa7X+bUkn6AsAMDcPGungRD6Y6ZfBg5gAwCMMxg47gfF4wxaCJds3QkAGG0wQ+r4SssoZW/kfr+tDfdwAACnK9zDCTL1xxK4H/RyUSnYZqLSAABz8qxZshjWMluqyS4XG3gCAE43GDjr6pXqcOxK807xuOsAAACnGgycYFFtNsWwkKzSZc39GwDAOIOBU9lSyaU6rBWOj1mHAwAYYzBw9unDQ1v0fQMBXWoAgDGKTQN1cLmyJGkZJUY4AIAxhhd+KiuY1KTb0qUAAAwanlLr3ut9Y6rCWmZBFbNpAICRBs8bMEXVQbqufqy7sFaTpyoLADA3gyOcy8VXkqROBy3DhYJJNA0AAMYYHuFYn0eHfCNTP6Vmig8HsgEA8FyDI5y77q3a3O+lVoXj2Th0qQEARhgMnKb7pLtOilb3v8+SHR8DAHCKwSm1Kq61qaR1e63krVqaBgAAIw2OcOLxiOns6WGbG1OcpDAAwLwUFn4mLY5XrH0r9/7PAAA41eCUWpf2CibVtlSjRh39AgCAkQZHOF2+VTQpeq07f6cuu9z3U9UGAJiRwnk4Cy2Cq7GdKi1FzwAAYKzCjpxZrr5pwJW1CCbRNAAAGGEwcHJulF1a6UJJrZrsMiNwAACnK545sE+mnT4qK+m2S3JvpqgLADAzw/dwwkJ16I+aDoqKxsadAIBxCk0DS0VzBQXl4/qbKr6epDAAwLwMBk7KOy2DtPKtsnfa56zMlBoAYIThpgHfaRmzgoLafKdF4JhpAMA4hQQJcknB+8tWIcj9cP6qAACzM7yXmre67fpLoi31qevoUgMAjFI48bPWVZ2VLSsoqmZKDQAw0mCCmKI2MSt40CHdaBWCQricqjYAwIwMB45Vyq6+LdpbNTlzDwcAMErhPJysLKmzTq6sYCZ3zsMBAJxuMHAW1bU2MWvpK0Vb6pCzxAFsAIARntUFUHnUMl6cuxYAwIwNBk60pW67qM76Uc2SLjUAwEiFnQZa9ZNonbInRZMkNvAEAJyuEDjdw+M2746P/Jz1AABmajBwlvFS0Vy115KkRNYAAEYaDJw271SZFBXV5Z12iQ41AMA4heMJGkVzJVqhAQA/UGGngaB9CuosaV290o/X9VR1AQBmZjBw1tUrfWyjlr5QsKhVnKosAMDcDAZOFZbKx8emoEBbNABgpGJbdHLpYI2afKPs/Q7SAACcqnA8QVBt/SaeXT6oyZKrG3oKAADfaTBwmnyjLGlnd5KYTAMAjFdch7NPpqysYLWWzKYBAEYqTqk1Wf0R0xbU5v5PAQA4VbFpYBGk4EFt3im5ZMZaHADA6Yq7RSeXKlVK+aD3TZZ7O1VtAIAZGT5i2jvtUz+1Zopqs4vdogEAYxQCJytafx6OJK0iB7ABAMYpjnCyS3u7lbOBJwDgByjcw7lTHaRKS9Vxq33KQ5cDAPC9njVHtvCFTFH7TOAAAMYpBI7Ldb9xZ9AicA8HADBOMUG63O+lJkmrEMTCTwDAGM8asmRl7bv3/RPCxTnrAQDMVDFwLirXxvuQcTkLPwEAoxQDZ3HcsPP+HBzOwwEAjFEInP5+zcH2quJKycV6HADAKMUuteR9W/Q6vpSJEQ4AYJziCKc2qfZ+h+hVNJlVE5QFAJib4fNwbKllzAoKMusvreJ2ksIAAPNS2EutVR1ctaK6fNAu+UPwAABwisH5MbOoVcy6sKWCojp32fOW7gAA8JniCKdJ/SWmcNxpAACA0xVGOH2zQDTTQhttKmNKDQAwynDTgGqtq6QgKVqt5K54DCEAAE5RHK5sYr/QM6rSXedKbG0DABihcA/noDYHRTOZooJJq3g9VW0AgBkZHuFYpWCuaKbgQdsqKLDwEwAwwmDgxLDWRd1PoV35C13V0kKbSQoDAMzLYOBk77SpWrmkK98omHSX301UGgBgTorrcKqQFU1aWlSbpSb
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 513.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZwAAAEeCAYAAAC+OaPqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAAWkElEQVR4nO3dy44kSXbe8e+Y+S0ueamqnu6hhiNxKAEkQVBXrrXRRoLWWumZtNETCFroGQRttNEbEAIBCYQu7GkO2d2srMpLXNzNjhYemZWlbrpn+Ey6ANf/t+mozsjqU1He+cHMjpmZuwsAgNcW/l8XAAD4/wOBAwCYBYEDAJgFgQMAmAWBAwCYBYEDAJhFMfRFs+Jv7Jl27+w3Xw4AYKlGRzjBNnPUAQBYuNHAyX4/Rx0AgIVjDQcAMAsCBwAwi9HAseG+AgAAXmQ0cFwc7gkA+PWNj3CsnKMOAMDCvaAtup6jDgDAwtE0AACYxQv24ezmqAMAsHDjTQN+nKMOAMDCMaUGAJgF+3AAALNgHw4AYBbjbdFhPUcdAICFe8E+nGqOOgAACzcaOFVxMUcdAICFGw+csJ2jDgDAwr2gaSDPUQcAYOFe0BbNVh0AwK9vNE0ip0UDAH4DRgOn+Oy06PiKpQAAlmw0cFZ29fTaZK9aDABguV7QNJCeXht34wAAJhoNnNpXn94cmlctBgCwXC8InE8hUxfXr1kLAGDBXhA4j9NopiIwpQYAmObFm2yCrdWEy9esBQCwYKOB81Xsj7YJgUM8AQDTjQbORREkmbJ36vwwQ0kAgCUaDZwmmoKtFazQRfhyjpoAAAv0gsCRZEFmhS4yazgAgGlGAyefbpg2Ra2f7ckBAOAco4FzzJJ7q+ytShVz1AQAWKDRwLnvXO6tirhS4Cw1AMBEo4Hz7fEgKSlyjhoA4NfwggvY+lFNl3dq1b16QQCAZRoNnFZZkinlnQ5qZygJALBEo4Gz11HB1irjhQLXTQMAJhpNkJvwXu4HmQVl5TlqAgAs0GjgBAW5OmXv9Kvw53PUBABYoNHAiae9N+5ZRz28ekEAgGUaP2ng2TTaxt68ajEAgOUaDZyD7SVJ2elQAwBMN360jffTaMFKtVxPAACYaDRwOvUhk/JBNx1NAwCAacYD5zSq6dKN3GmLBgBMc8ZOzixnHw4AYKIXB45ZKeOkAQDARKMJkk7dae7p1YsBACzXC2787APHLL56MQCA5XrxCMdUqgjciQMAmGY0cKqwlSRlf1Cbd69eEABgmUYD51NnWlCw4pXLAQAs1fg+nNzvw+m71FjHAQBMMxo4j+s2psh5agCAycan1J61Q6d8fNViAADLNbooU4a1JMn9oC6z8RMAMM1o4Dw2Dbg6idOiAQATnTVkqYovXqsOAMDCjQbOrnsvs0pSlBlTagCAaV6UIO5HmZU6tN+9dj0AgIU6Y+OnFMPqVYsBACzXi+fI3Pcq4uY1awEALNho4DTxWpJkKpRz99r1AAAWajRwVuFKkuRyJQ7vBABMdEbbWVLKd69XCQBg0UYDp9X+2a+49RMAMM0LzlL71KVm4wcTAADwo8YvYLP16VWU2PgJAJjojCm1JDOumAYATDMaOMf0qVEghuZViwEALNeLTxro9+FwHw4AYJrRwHm8D0f6/JgbAADOMRo4wfrONFenYNWrFwQAWKbxwFF8eh0DTQMAgGlGA8ee3mKvXAoAYMlGA2ejN6dX/sqlAACWbPzwTv/UNMCNnwCAqUYTJFvW43RalzgtGgAwzWjgfNCna6UZ4QAAphpNkKRWrN8AAH5d4zd+2vbpdRFWr1oMAGC5RgPnTf7J0+vsXDENAJjmBV1qn04XyN6+ajEAgOUaDZydPR7YGWXPTh0AAOAcL2ga6CRFSVl1cfH6FQEAFml8H46yYthKclVhO/Z2AAB+1GjgrH2t7Ic5agEALNh404AqmZWSojqCBwAw0Wjg1PbYKJCenRwNAMB5RhOk9SQ/jWyC0aUGAJhmNHAO6iTvr5ZmhAMAmGokQUxXoZHLJZlqo0sNADDNSOC4NjGemgZca13OUhQAYHkGA8dUKJjkvj+9OYirpgEAUwwGjispPsuXg+3EVQUAgCmGRzhWqw594gTbaO0cbQMAmGYkcErFIJk1kgUFDzIVc9UGAFiQwcAJVqkKUgwb5fygUpVcaa7aAAALMhg4XbpR+fSOx6BhDQcAcL6REU6j0lzBSplV2ngzV10AgIUZDJyyuJKZFEMls1obK+eqCwCwMIMdAGZByU1lWCmFowozmQq5urnqAwAsxMgazk4fW+mY7yVJd7kjbAAAkwyOcFK+lSQduw8KVijTMAAAmGhwhFPEKzVRiqFvFrgMrOEAAKYZ6VIr1QRXEVaKYaVNERVsM1dtAIAFGQyclHcqwuPrg7Lr6TI2AADOMRo4u2QqQq1gpdaFcdIAAGCS0Ss8S3Ot4zuFUHx2cjQAAOcYDJy6/EJmUmmNghVqs8TRNgCAKQYDJ1qt5CZTULRSTZS4gA0AMMVg4DTxUkGuoKAqbE8HeTLCAQCcbzBw2vygu87U+kHb8E7RJLNqrtoAAAsyeNJAm3eSpMrWKrxQGST3dpbCAADLMrrxc1u4Nnmrzjpll5hSAwBMMTyllu6fXlf+OJVG0wAA4HzDXWqh0iGbDrbv32yScScOAGCC4cM7Qy2XlJUlSfed5H6coy4AwMIMBk72rCa4SlXq1GnPqTYAgIlG13BaNxX+vJmNNRwAwPkGA+fxoM6P4b2StUruMqtnKQwAsCzDgeOdTJIrK3rfLEDTAABgiuE1nHzULklrv+h/7Z9u/wQA4BwjU2qfThUoVMi9H/UAAHCuwaNtpL5FYGf3Cgpq3fWCK3QAAPiBwcAxlQomRfXrNvuUZUbgAADONzqlVgbpIl9Kkjp3uedZCgMALMtIl1q/hnOwvY7+oCRXoEsNADDByPyYK5p0sJ0e8ntFGVNqAIBJRtMjyFV6reytklzdsxOkAQB4qdHAKYIUVagIjYJM2Q9z1AUAWJiRwDE10VV5JVNQaSaxDwcAMMHoGo77aYRjtZoYJBvdugMAwA8MBo6pUBFcq9Ntn8klibZoAMD5RvbhdCrM1SopqlTnLhNt0QCA8402DTTRZTLt/aPcXYHDOwEAE4w2DeyTqVBQ8lZNDOzDAQBMMpgeRbzWIQd1ylqHN1rFoJyPc9UGAFiQkaNtstYxy+WSpOSu7LtZCgMALMvojZ+XZadSsX+ziSumAQCTvHhBJlqp7FLOd69ZDwBgoQYDJ8Z1/09FNb6R+yw1AQAWaDBwVsVb3XdRB7UqVamKJonUAQCcbzBwknfa5yCX68FuVdERDQCYaOSkgaQqZF3ZSqaoOs5VFgBgaYbXcKxUkBRkuvPvlL0/Xw0AgHMNBk72rGM23fhOXT6ozeK0aADAJCNNA9fap6AHe1D2tv8G9uEAACYYPWngIZm2vpFZVDTJGOEAACYYDJzOD3KXCkV1+aDkUpe+n6s2AMCCjKzhtLqssjol1XF7uoDN5qkMALAowyOcfFCb+4Cpbav7zse+BQCAHzWYHm261X0X1KrTLn8gagAAk402DQS5OksqQq0YJGNKDQAwwWDgVMWVJOlge7lnpSyJGz8BABOMTKndq3VT4X0rdJbkzo2fAIDzjXSpHZVcypZ1332r5C6zaq7aAAALMnyWWmhkkkqvVIWNqmCK4WKm0gAASzLaNNBEV+FRwUq1ub92GgCAcw0GTrBSZehv/KzCWp07gQMAmGQwcMyCquAqTm9rc56lKADA8oxOqe2T6aPdqfODyhBoiwYATDLSpXaQS9rZgySpMFOgSw0AMMHIlFqhJriu8xslb7liGgAw2ej82LZMutRqjloAAAs2GDgpPajLpoM6RSu1T66qYB8OAOB8I2s49/rQRh3UKlihKphMzKsBAM43OqUWrN+HI0nRTJErpgEAE4wGzir2e2/cswqTzBjhAADONxo4F2VSrVLRSrHtEwAw1WjglOa6DKVWdqmPbVZh9Rx1AQAWZmQfTqVjDrrLnUqvZZKilTOVBgBYkpERTlB26aBWUYXqYNqnD/NUBgBYlJHAybo
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 513.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZwAAAEeCAYAAAC+OaPqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAAWnUlEQVR4nO3dTY8kWZbW8efcay/uHm+ZlVVF1jAaEGpAzdBiwxohFrBErFnxvViwY82GLXvEEs1IwwLNTA/dVVNVWZkR4e72cu9hYR6Rkd3ZZuHWHYZk/H9SqSIjzCNOunvkI7t27FxzdwEA8NLC/+sCAAD/fyBwAACLIHAAAIsgcAAAiyBwAACLIHAAAIsoxr5oVvzOnmn33v7w5QAA1oozHADAIiYDx6xaog4AwMpNBo57t0QdAICVe8YZTr1EHQCAlZu+huP9AmUAANZueklNaYk6AAAr94wuNaZJAwB+f7RFAwAWQeAAABZB4AAAFkHgAAAWQeAAABYxGTgx3CxRBwBg5Z5xH05eog4AwMpNBk6w0R0MAAB4lmcEDrPUAAC/v8nASfmwRB0AgJWbDJyqoGkAAPD7e8aSWrlEHQCAlZsMnJ4lNQDAH8Bk4BRhu0QdAICVmwyci+LNEnUAAFZuMnC2dr1EHQCAlWOWGgBgEZOBU/vHazghXL1oMQCA9ZoMnI3vTh9FFeHihcsBAKzV9DUcH0bbmEWVkcABAMwzfeOnTJIUw4Wydy9eEABgnSYDZ2fV6doN/QUAgPkmU+QqFgpWqIhb7bgnBwAw0zNmqQ1bFKTcqLbLJWoCAKzQZOCUZo8fv/G3L1oMAGC9JrfzrKLJLEgu1c5mbACAeSYDJ9owwDNYUFZeoiYAwApNLql96LKSNyrDww2gNno8AACfMxk433etUm50TO8VFCT5AmUBANZmMnCSsvp0q+y9jLMbAMBMk4HzEDKmoGr6kg8AAJ81GTiNOpmVMgtqxGgbAMA8000D4YPkWVfFW/WWlqgJALBCk4FztHu5OgVFdWqXqAkAsEKTgdN5I3nWPr/TwfZL1AQAWKHpDdh0KVlQ9k7JuIYDAJhnMnCyZcmz+tzIFJeoCQCwQpOB40py9erzQXf+/RI1AQBW6HnXcE6y9y9aDABgvZ6xPcEwIdq9V2FMiwYAzDMZOLVvhwOtVhb34QAA5pkMnL3dShqu5QAAMNdk4LQ+3HuT8lHu7IcDAJhnMnAeeD7omD68ZC0AgBWbDJzeG5kK2WnXTwAA5nheglhQES7U5cMLlwMAWKvpM5x8lHurPt3Knr8CBwDAJyYTpAgbSVL2vVw0DQAA5pmepeZJUpTkyplJAwCAeSYDJ3kjKcmsUn4y5gYAgHNMBs7DdZsYrrgPBwAw27O7AIKVqoqbl6wFALBiz24aiKFST1s0AGCmZ5/hdOn2JesAAKzcM7vUpD69e/FiAADrNRk40cqHj9T1f/vC5QAA1uoZbdHdw0cy27xwOQCAtZreYvpJo0Bgx08AwEzP6FL7GDKMtgEAzDUZOJtw/fixM2kAADDT9BbT6Ycl6gAArNwZ13CixGgbAMBMk4FThQtJUgyXMpoGAAAzTQZOGXaShoYBs+LFCwIArNOzb/zM+ZYuNQDAbNNt0XrSFs3wTgDATJOB0+uhFdrkYsdPAMA8z96ATfIXLgUAsGbTTQNP5qeZaBoAAMzzzGs4Nhx8apEGAOBck4HT+l4Py2m0RQMA5prenkDd48fZ2xctBgCwXtP34ah8/DhY9aLFAADWazJwniri9qXqAACs3PT2BLpcog4AwMpNBk44HRLsQmXgDAcAMM+zA0cW1Gc2YAMAzDMZOLUPN366d+qZpQYAmOnZTQPunap4PX0gAACfMRk4l747fZQU7KymNgAAHj3/Go7ENRwAwGyTgdOok51u+AxWThwNAMDnPWPSQJB82OkzezdxNAAAnzcZODtVMht2/UyZWWoAgHmmAycWinFoHCjiZuJoAAA+bzJw9mnYVtpsIz8trQEAcK5n9Tn3/TuZlYy2AQDMNhE4ptKCXL3cO5VhJykuUhgAYF1GA8esVmF2Wk7rtAuvFYzrOACA800ETqlopjK+kpQVFOSiNRoAcL7RwKmKG5XBlH2YMBC9lLPNNABghtHAybnXqyrIrJAkFSpkLKkBAGYYDxzvVAUphlrBdmrsIInWaADA+UYDZwgayT0rhEpJPUtqAIBZRgMnnTZcS7lR9l6JWWoAgJkmznC2yi5lb5XzrYrTTDUAAM413qUWL5T8459f569euh4AwEpNNA1kmaQiXEiSLpwONQDAPKOB40oqgxRDJbNKWT52OAAAv9No4BRWK9qwD05dfq1SxVJ1AQBWZjRBunxQYa7L6u8oeadCJrOK1mgAwNkmruEMe+HUdin3pItYPu7+CQDAOUYDZ1O8Uu8mOx22CUE53y5SGABgXUYDp8+N2iz5aZxNYWKWGgBglonAOWgTJVOQWVQRTEW8Wqo2AMCKjHepnSYNbHSpMmy1KyQ/XdcBAOAco4FzXbxVcilbfmwgeJivBgDAOUYDR5KSS0mdCqs/GXMDAMA5RgOn9b2qIAUF7ey1ujxsOw0AwLlGb/ys7VJ1dG38Qkm9giRnSQ0AMMP4GY722vempF6lKkmSi6YBAMD5RgMnKCq51Fuv/HAvTny9SGEAgHUZDZwm3ymYVHmlre/Uu5Qzc9QAAOcb357AkySp9ErBTX2mTQ0AMM/4pAFv1KThPpxDOKh3KQRG2wAAzjc5LXpXSAfbP36OSQMAgDlGA8cUVNqwjHawvTbRFikKALA+k1t4dm5yJWVlHVJ+nBwNAMA5Rs9wdsUbZZeyhqC569Pj3jgAAJxjND2Sd4omRZVqfa8yBIVQLVUbAGBFRpfU2nynOroqrxT0RoWZ3FlSAwCcbzRwUm61Ca7rfKPWOjU506UGAJhlfEktH5QlNdbox/CtopnMuIYDADjfs9KjsaOyJ5bUAACzTd74GW0YbbPTtfYpKTvbEwAAzjd5huMu7Xyn3np1nmRiAzYAwPnGA+fJ8pkr6SqWEtdwAAAzjKeHBcXgugt36rzRLgYF4z4cAMD5xjdgs0JdHnb8DBbVuyRG2wAAZphYHwsqg6u1VlGFjjkrcx8OAGCGyQsyr8qkV/kLtX5QacaSGgBglmd1ANRe6i59J0nc+AkAmGU0PWKoJUmForbxtdqcldJ+7CEAAHzW5GgbM5fJVNvlabTN5BY6AAD8lvHASbcKklzDrp9lMLYnAADMMho4ZfHl8H8F7fxqkYIAAOs0cQ2nUpuDerlcWdsYGN4JAJhl4sbPUnd9UJCptXaInXxcqjYAwIpMBM7w5axh188uS65ukcIAAOsyeYZTBldS0sHuVQbJvV2qNgDAioxfw7FhK4KNSvXeaBNNZptFCgMArMto4DTpTm02dcrKSqqCVBVfLFUbAGBFJufUlOaqLarzgw7M7QQAzDR+H07YqnOTJNV2KUnq0u3LVwUAWJ3RwNmGG912QcmzKtvKTHKnSw0AcL7x0TbqFE06qFPrh6Et2rkPBwBwvslrOHVwNdbJFNRnX6ImAMAKjQZO742abOrUKlqpbWGSbKHSAABrMh44+agyuA62V++N3CWJsxwAwPlGA6fLB0lSawd1Oqp3iTMcAMAc44GT7tVm08YvhoNNKuKrJeoCAKzMRNNAVnapVCX3rCAp5ftFCgMArMvkFtO3nSl4kGvYB4fhnQCAOSbPcK7KoUkgWKHbPstULFAWAGBtRgPHPcldqr1WVKljyoqRraYBAOcbDRyTqYpSqUK9N6pCUAzbpWoDAKzI5KSBdLrtxiyc9sOZfAgAAL9lfElNvZpk6tRrazdKLtl0RgEA8Fsm0yOYVKtU1LD7Z3Y2xQEAnO9ZS2pBQY3fLVEPAGClJgLHZJLu7ajW92qzcw0HADDLs5bUpKFpoMlZ0eqXrgkAsEITgePaRNelb7WxK9UhKHCGAwCYYTI9roqkyzA0DET7OEEaAIBzTAaOPSypKajJrpzpUgMAnG8ycI7p4yFdzoqhetGCAADr9KwLMp1nRZUqQ1AVL1+6JgDACk3MUitUh6wg09Z3uox
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 513.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZwAAAEeCAYAAAC+OaPqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAAXIUlEQVR4nO3dy24lyXbe8W9FRGbuG1n3dh9BBmzoWBBwYA89kSd+C7+FAD+IhnoEz+0XMOwX8MwQBEuApaOjdvdp1YXc3JfMiOVB7mJ1S30yubPFNJD+/3rQZDGLXNxM1kJkrFjL3F0AADy38P86AADA/x9IOACAWZBwAACzIOEAAGZBwgEAzIKEAwCYRRr6oFn6nTXT7p3904cDAFiqJ6xw4vNHAQBYvNGEY0bCAQD8fKMJJ9h6jjgAAAtH0QAAYBajCcdV5ogDALBw43s4LIIAAP8EWOEAAGYxvnxxEg4A4Od7wgqnnSMOAMDCPWGDhj0cAMDPN77C8dMccQAAFu4JnQaaOeIAACzcaMKp4ssZwgAALN1owomhniMOAMDCjSacXM5zxAEAWDhWOACAWTyhaICyaADAzze+wrFqjjgAAAs3mnDKj1rbMIwNADDNExLOl9Y2xmoHADDRaMJp4u3j28YKBwAw0XjCCbsvF1OxBgCY6Kp5OIUzOQCAiUYTzsZefbmYFQ4AYKLRhPOyvL68FRXD+pnDAQAs1WjCWTurGgDAzzeacG4u4wnMohIrHADARKMJJ5nJlNRUX2n1gxJpAACuMT4PJwTJktwLfdUAAJONZpBtMsWwVvFWgYOfAICJntC805TLQev0WkmMmwYATDM+gM1dUlEdd1ppN3Y5AAA/aTTh1MFk1igo6nV5M0dMAIAFGk04+84Vw0pRlXY8UgMATPSEhJMlSUVZnfzZAwIALNP4PBxJXb5T5yc96DhDSACAJRpNOKeSHwevhfHLAQD4SU+YhxO1qt5oHV6o4hwOAGCi0YTTepF7UVRSpTRHTACABRp/pOZZfikZaNXNERMAYIFGE84n26vLexXPKlSpAQAmGu80oE6lnPWx+43ubS+xjwMAmOBJZWeurGhJJ6MsGgAwzfgjtfBeZpVW8YWKiqQ8Q1gAgKUZTThBQaag7K32ej9HTACABXpCp4GiUvY65k9q/TBHTACABXpa6wALWsVbZW+fORwAwFKNJpyN38is0bncK15a3AAAcK2nrXC89BcbnQYAANOMV6npt5L1hQNtYQ8HADDNEw5+tjIFdX6aIx4AwEKNJpykRu6dolVqwm6OmAAAC/TkATefz+IAADDFaMJ5KO9V/MgjNQDAzzJ+8NNbSVlt3pN0AACTjdY513EnU1LODzqFT3PEBABYoNEVzspu+m7RcSO/nMcBAOBaTywacOVyJOEAACYbP4dzqUxzb9Xlu2cPCACwTKMJxy6XNNVbhbB69oAAAMs0mnCO3hcKRGsUQ/PsAQEAlulJj9TM6kt5NAAA04xXqYVbSUHFO+XCORwAwDSjCSeqkryT2ZO74AAA8I+MZpFOJ7lcbXfHHg4AYLLRTgP50tqm+EFt5hwOAGCa656TcfATADDRExOOyb1VCPXzRgMAWKzRhLMOLyS5JFcM6+ePCACwSE9ubRNsq8gKBwAw0WjCaXXsLwwrdfn47AEBAJbpaedwJM7hAAB+lqcnHEUVJn4CACZ68rLFLKjLTPwEAEwzXjSgvmggl5NM9uwBAQCWaTThVNbPwHHvZJRFAwAmuqoSIDKADQAw0ROKBvp2azGs5d49e0AAgGUaTTgH/yTJVPwkp5caAGCi0YRzLg+SXLnsOYsDAJhsNIPs4ltJUcHWKjxSAwBM9MQ9nKIY1qrTixlCAgAs0WjCWflWktTmD+zhAAAmG004yZOkIPeTcjnMEBIAYIlGE05QULDV43sAAEwxXqVmZxXfK4ZbpUinAQDANKMJx9Xv2+Ryr2jNswcEAFimJxQNbC5vZUVLzxwOAGCpRhNO7dXj22bxWYMBACzX6JJlp0aSyaySe54hJADAEg2ucIJtVVmUWSP3szo/ycZzFAAA/8jwIzUL2sYoXVrarOILuWhvAwC43uByxf2kTTKl9EqlnGWcwwEATDSYQUyVjtllimqqV0rWKISbuWIDACzI6JLFZKriVqa+Qo2pnwCAKUYTThWkaEnZT3IVdd37OeICACzMaMLZJFN3mfYZVclFaTQA4HrDZdFhpWhS8U513Oq1f60UX84UGgBgSZ70SC1cWtpEJRntbQAAEwwmnBTXSiYla9SVk472IGfMNABggsGE4160Sa4UVmrzXkVM/AQATDOYcHI56ZyltjzILGjtG6Z+AgAmGUw4MTRq3ZS91Sq91Cu/lVjlAAAmGN7DCWvVoe80EKxSZWHsrwAA8JPGR0wXU/FW53yvk2fFwJhpAMD1RhNOW/pzOGZBLpc7j9QAANcbeaTWKFh/DqcOO70INedwAACTDCactjxoFVx13KkOG61ikBl7OACA6w1XqVmlOkqb8EqmoFU0Dn4CACYZGTFdKXvfaeBU7tUW8UgNADDJ6AonF8lV1ISdsjsrHADAJE9arkQlFVWKZuIcDgBgisHsUdlaKUiVN+p00jaZ6DQAAJhiuHnnJbl01imq0jaxhwMAmGYw4RRluaSsVrWt5RJ7OACASQaXK0FRx2xqvG9nU1wK1swSGABgWYbHE3irY5Y2vpMknYvkyrMEBgBYltGE0xYpXP6LNldYAIClGTn4GbWK0oPda+M71aEfWQAAwLVGVzjFv7x/LlKb988dEwBggZ5U43ywvaKS9p0rxbU6tnEAAFcabW2TghRVKavr93M4hwMAmGB0PEEdpORJja9UBakOu7liAwAsyGDCMQXVwXWyg0xBxaVT/jRXbACABRkpGuiUvW9xc7Kj3KWKKjUAwASjrZ+zm278lSTJTGrL4dmDAgAsz2gFQFukymtldfrUZnUkHADABMPNO73VJrke7F7F+s7RVdzOEhgAYFlGEk6ndXQFBWV1qkJQtGqu2AAACzL4SK3Nd+qKqfJaxYqK+9DlAAD8TsMD2PwkqR8x3eqsaEbRAABgktEqtSYWuYqiklxSl0k4AIDrDR/8tEZNcHXWKSiI6QQAgKlGHqm1uq067UrfzqYKpBwAwDRPOviZFHRvH1WcsmgAwDTD3aLDWn9/TipyVd4omVRHmncCAK43MvGz38MJMgUFnYqreDtXbACABRlu3lkOuqk6baxWUVEVTJmEAwCYYHiFE2rVoTyucFbRmIcDAJhkZB5OfHx77Ruds3PwEwAwycg5nCAzVzRTVNJ9Lmrzfq7YAAALMppwgqRVCDrYg1YhyJVnCg0AsCSDCaeJt7qpztqkfg8nmJTLca7YAAALMtJpIKstQcfsanwlSYphNUtgAIBlGTmHU+mb41r3XVZR0UMuSmE9V2wAgAUZnIeTQqN9FxXMddJR4fJnAABca3TiZ38ORzrZUdsU1JXTTKEBAJZkMOEk61czh5KVPGmTjLJoAMAkIwc/g86lv2TjO21TXyoNAMC1BrPHIb9Xccnlau2s4qJoAAAwyXDzTu/UBFe0oAe7U1soGgAATDNSFh0UzVXctfEbVaF/zAYAwLVGz+HUseioVq3OcklmceivAADwkwbP4ZiC9l1UVNHR9iouRavmig0AsCCDK5y2HPSpjXK5imdllw7d+7liAwAsyOiGTFvUT/u0Rscs5XKeIy4AwMKMFg0Ek/qHakXFOYcDAJhmMHscuw8qLmVlNb5WV1zBBrd9AAD4SYMJp463aotpb0ed7KA6GiscAMAkg9mjKwcF698++4OyO1VqAIBJhhNO3ivItfWVoqp+D4eDnwCACYYnfqponVyNknZ6KUmqwmaOuAAACzNSpZa0iq7K4qVztMtV5ooNALAgwyscL2qLXS4MOhXXKd/PEhgAYFmGVzih1iGbDt4peVITTK48V2wAgAUZGcDWN+ps1amoqA72OAUUAIBrjJac1cFVKSlcLg2URQMAJhgewFYO2sQil+t7+0buc4UFAFia0RWOmXRWJ1dR51ITdnPEBQBYmMGEU/ykQxe0Vi1T0LEU1cY
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 513.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZwAAAEeCAYAAAC+OaPqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9969lWXbniX3WNsdc+2zEi4iMjMysLJtsks0iOT3d49SSBoPRYAD9Kkj/pYCBBAgczPT0tNqIpkiWYWVW2nAvnrv2uL2Xftjn3vA+sjKSjAUkEIl3zbnH7LXXWl8jqsq7eBfv4l28i3fxbYf5rg/gXbyLd/Eu3sU/jXiXcN7Fu3gX7+Jd/F7iXcJ5F+/iXbyLd/F7iXcJ5128i3fxLt7F7yXeJZx38S7exbt4F7+XeJdw3sW7eBfv4l38XsI9648i7q3FTAsOpXuFd1pyf5lp9h6OnIIRQVruNL+iac+IunzqN3p3gLdDvBmQmxED2eVyuIzHMRCHE2EdAwHlV+YX3F3+++ceS5ldQ8QQYoMRx3X/R+zHPZZSUcmKOaccV78CItaUANTtzVf43Y/8Gslwdkrp9lg0N4lx/tqf+W3Hn5X/D0bkDKzDAJ/HE+7JTe6s/uoZ1+1dPC+K7D0Ku8OiuUUXToHXf+yNGSMYoq5B4ys+qy8WZfY+mR1hxSMY5s0tmu4OIjmZ26MN85e4vy0iHhH/vXgm3sZQ7eRpf3tmwnk7wwIRJbzi+wN1e5O77U2MDDka/AkGm/4k5qFnzZgxqi2qFaB4O2THXcdLjteclppPzd9h8exwmVwLEDAYHDm5v0rd3uLpD3CgDUtEDDE2iDjw4HFk6mnFIRggErVDtH3F3/xwZO6I0u8hGIxYMjelat7+hyulckVVaYGZOWPW3iZqtX1N7q9iTQaAaqTpLohx+a0ueP9YwohHJEe1hVd+vlJkbooRh5VLRI2s27vEuOBNJLNHY918SW3GeDvGmgxvh+TuY7pY04Y5Gtcv8WkB1fTbX31T+y6eFt/DhPN6D0IKizEDvB0TaDFYdv0Nog/Mu9vU7RnWlBRuijMFY3OIVU8rNUFbblV/S9PdRiRjnH9IZkf9kXWcym2W4R4iltLtYcSxbr4EQKTASE6IF9sj6cI51ozYL39MKRNGYUxEEQSvGUs9oQtnAMQ4TztHKYC4/QzV5vHfJwXGFIQ4f+zvUWuasCBqR4g1MVa87SH9rdr1C1afhglaI+JRDYhk7GTvk8mAoC2BloV4mpDRdsff4dF/t7GpNkTM9l56UoS47jdXrx91ew/BYkxG1O5brxZinFM/8B0i2ROeixePdB4E7w7pwvyNnZd/6vG9STjO7mPEY4yjC0u6MCMtuq+yYwrbG3RhSqw4Vu09YqxQWlQDUWui1ng7ZpwdYjAEbWlY0Xb3gLTQr9p7KBFnHCMd4nmf1hyxlhUVCzIzwJmcoC1tWBJjg5Hh9nsgEOIFd5f/CRHPovwpUznC4fCaYcU/cNyCxvVDuy7vDlHtCCE9bMYMEXHkboe8T4RRA9onKNVA1V3QdBeEuODNJPBvP7w7YELJ0HrGzmBF+Lj6iH13mZk/Z8ZdPAVj9ogauRO+ou7mOFtQuj0G/rBv8bTUYf6SbZbvcwiCQbUj6pOvdReWGLfPtPgAgHV3xrr5hte5N1QrFIjhu2l1vk6ygftJWrV7l2zeYLzVCUdwiCkp/D7eDB4odQ0ijq47e8GSV/pd8OYmTBWONQXelASt6cLJQ+9QjYS+iLgItzFiWbbH6eE0wwde19HGNa1taDUHUkutkTXn7VfbKkK1owvnbBKkPHbqUym/aO8gmaGUafqdeh/XIZJjJMeYDCMeazKc5DRxuf1s1XQ+lvXnLAk4u8ul4g+w4rF4lMApkaoLRPXbc/o2h+CY5NeYmozSCkMnGIGRdYQw4NycMqu+oXBTJvYAi9men8yMyMyAUiZM4z61qTizt1iHM+bVP4WEow9V1E+KqN0D/w60Yc6Tk43wshs8I0OMKbAmR8RQNV+/1Ptf9ru8mz6njf2sEASLtWOsKb831f/3Kd7qhKN0aJyzrtes+/9/lbBmQu53cZIjYrfDRSc5Q3aJEskGI5q4YF79w/bbUwVlcCbHU1CbBaoRkdSeqJvbKB2hueCOnVDbq+Ra4jUj05KxO8KQvq/VipPq131lFlA6RDIKfw0l0oXUZy7dHpZU1bQ028oEQLUlErGUZHZIF2vW4RTVDpGU7OiPzZpU3WRuihWPwdBR0+iKefVbvo1e+rcdAhhJRx4UFqHjTGYogVF2mYHZZSdMAQjmQ2q7xmCw6nHqCHQEOjpNrdF/TPE68wZvxxhxtHFNF2tU799zm51+SlrPu2cEIwOsHVC4HazkjOwBgqHRFYGWGDua7vYj70tz2de5J53dZZhdITcj1nZI3c0Jcf2SVbyidHTh7Jmtx3fx6vFWJ5xNvO7gzpoSJzmd1umG79FoRobYwZ/gyXGSg0lzFtUKkYKjwc/JZcRBOMRiWNmr1K7iTvyUWfXZQ8fVhAULOeFOd0zbzdktP+b9+GM6CaxYpNWynzxsZjlRa9bNN4hYMneANVlKPlqn3y2RZfPg7CH0g/A7NN0dnvSAqlZ9ikq7tTZkrN0FBtsDECD3V2i7i+8NskvpWLR3WLqAYmn79XBBzdzMECxjc8hAx4wkxyLk8Yialr/X/5159Q8YM2aS36DuZtuZ2u8/LG9jC9OZEoOlCQvq7qwHDVgEQbV+aivu8VCiLtEQqLF4G9ngcWbtzb6N+6Rq6/XOibO7lP6AwkzwUjCLN5+Q1N7F2xDfi4TzutF29zDGoRpRjduFNuqSVTzDmZx1d04X1wgWRXB2/Njn5JpjcXhTIuJxdrzdyeVmhMHS2QmCIWOAx4GCEbMd2qbWXk3Qis1w37sp17I/otQBngyjwm35ipPmM5rudPv9IhlGyue2SFIoSkitvFjTxjXL+lMgwWB3yx9xXn32gp/13UfVfM2ZXVF3OYVYFPgN/4mLxd9jzJjMTZn4q+zrLh5DTUstLesmnb8Y58zrrx6qGH9fYcwYawqcKana2689X3hSPGtT9rzqJ2pLpzWZHeFMThOWNN1pml28StGhXQIgEJjJLQDq9uRb2+CoRpqwxIgnmFS5vi5o4F18O/G9TziCA3HPHeyFWGNNjjUZ1ryPEjHiUCLr7ny7GG+i7Y651SOb7uYfUNgph/IB4zjmPX7EleIjOumoqWilZhlPiBo4NB8yYUqrLSfmhE661E7QFtfzaO4jpgKZ32Xg9tmPBwzJsGIwIizZY25uU2ZHRO36mVLdz2g2O2VBJEc220hAtUZRBvl1dtx1MhkwjhM617HwNwjabhfdzE1ZN98f4EAtNV4tBRYBDs2H2IGnjSuasKTTmpqWgGFm5qxl9dBsLsTZd3DU9j5c1wzo4vT3jph7HoVg3XxJ1K6velcYGZD7A0KsX+FYBSUQ4owQDSFWGHEU2SFO3mNeffrGocYhpsqp6U4p/CUAcn/pW50XvYtXi+9dwnl0t6Z0oM++gZWOtjvmwa79uPhhj0IJhL6F9bSommNau6TIJ2Bgxj2W4R6l3eWy3sBFyyFH6cX9Bvqe3OTu+m/TMUs6zQmi/PDsIMaONq44dndZ6IDLcY/Ceq6EA4byZ3xjP+PO8t898biMDCizo20F1WnNqv58e8wLyRnZS3jdp9CCESMCgW/MZ8y721TNTb4vycaaKQMtKfDkpm8NxgIvJcY6rOQUMuk5OoGFXLDWR6u372JuFR6orNstwvH3G8/+3amN/GDlvyK3N8DCGksX5i9RnWy+KwFzRtlVzANIy2+L1yI4yuyIwk6pQkJhvou3L743CUekSDt5MegbIJCtmrt4N2bq36O0u3RhTQgXT3ggLDvlxxQyYcweuRYMZWfbm74nN/Em51K4QoajpqWTwJg9pPxDlEijKzqtWTYQ4vKhaqzpjunCnDauycwQY/6AGHZo6bBYMhk80B7YVDIpSShtagOKwYij6e6jrqIumVW/ZsavYfhf4iUnkKCxi+6Yppuj3yPgwKY6MSJ4IwjCIAwozZR5vJvg6T7SyFW
\ No newline at end of file
+      "text/plain": [
+       "<Figure size 513.2x360 with 1 Axes>"
+      ]
+     },
+     "metadata": {
+      "needs_background": "light"
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "import numpy as np\n",
+    "import matplotlib.pyplot as plt\n",
+    "from scipy.io import wavfile\n",
+    "import scipy.signal\n",
+    "from playback import make_playback_animation\n",
+    "\n",
+    "# coding noise demonstration\n",
+    "for br in [\"inf\", 24000, 15000, 12000, 9000, 8000, 7000, 6000]:\n",
+    "    fs, x = wavfile.read(f'data/a3_short_opus_{br}bps.wav')\n",
+    "    spec, freqs, t, im = plt.specgram(x, NFFT=512, cmap='inferno', noverlap=256 + 128, pad_to=4096)\n",
+    "    spec = 10*np.log10(spec)\n",
+    "    \n",
+    "    make_playback_animation(f'animations/opus_{br}bps.mp4', spec, len(x)/16)\n",
+    "    "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "b8408ca1",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3 (ipykernel)",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.9.12"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
--- a/dnn/torch/osce/train_model.py
+++ b/dnn/torch/osce/train_model.py
@@ -27,9 +27,13 @@
 */
 """
 
+seed=1888
+
 import os
 import argparse
 import sys
+import random
+random.seed(seed)
 
 import yaml
 
@@ -40,9 +44,12 @@
     has_git = False
 
 import torch
+torch.manual_seed(seed)
+torch.backends.cudnn.benchmark = False
 from torch.optim.lr_scheduler import LambdaLR
 
 import numpy as np
+np.random.seed(seed)
 
 from scipy.io import wavfile
 
@@ -54,7 +61,7 @@
 
 
 from utils.silk_features import load_inference_data
-from utils.misc import count_parameters
+from utils.misc import count_parameters, count_nonzero_parameters
 
 from losses.stft_loss import MRSTFTLoss, MRLogMelLoss
 
@@ -71,6 +78,7 @@
 args = parser.parse_args()
 
 
+
 torch.set_num_threads(4)
 
 with open(args.setup, 'r') as f:
@@ -98,7 +106,7 @@
         reply = input('continue? (y/n): ')
 
     if reply == 'n':
-        os._exit()
+        os._exit(0)
 else:
     os.makedirs(args.output, exist_ok=True)
 
@@ -109,7 +117,7 @@
 if has_git:
     working_dir = os.path.split(__file__)[0]
     try:
-        repo = git.Repo(working_dir)
+        repo = git.Repo(working_dir, search_parent_directories=True)
         setup['repo'] = dict()
         hash = repo.head.object.hexsha
         urls = list(repo.remote().urls)
@@ -117,6 +125,8 @@
 
         if is_dirty:
             print("warning: repo is dirty")
+            with open(os.path.join(args.output, 'repo.diff'), "w") as f:
+                f.write(repo.git.execute(["git", "diff"]))
 
         setup['repo']['hash'] = hash
         setup['repo']['urls'] = urls
@@ -292,6 +302,6 @@
     torch.save(checkpoint, os.path.join(checkpoint_dir, checkpoint_prefix + f'_last.pth'))
 
 
-    print()
+    print(f"non-zero parameters: {count_nonzero_parameters(model)}\n")
 
 print('Done')
--- a/dnn/torch/osce/train_vocoder.py
+++ b/dnn/torch/osce/train_vocoder.py
@@ -107,7 +107,7 @@
 if has_git:
     working_dir = os.path.split(__file__)[0]
     try:
-        repo = git.Repo(working_dir)
+        repo = git.Repo(working_dir, search_parent_directories=True)
         setup['repo'] = dict()
         hash = repo.head.object.hexsha
         urls = list(repo.remote().urls)
--- a/dnn/torch/osce/utils/layers/limited_adaptive_comb1d.py
+++ b/dnn/torch/osce/utils/layers/limited_adaptive_comb1d.py
@@ -32,6 +32,7 @@
 import torch.nn.functional as F
 
 from utils.endoscopy import write_data
+from utils.softquant import soft_quant
 
 class LimitedAdaptiveComb1d(nn.Module):
     COUNTER = 1
@@ -47,6 +48,8 @@
                  gain_limit_db=10,
                  global_gain_limits_db=[-6, 6],
                  norm_p=2,
+                 softquant=False,
+                 apply_weight_norm=False,
                  **kwargs):
         """
 
@@ -97,17 +100,22 @@
         else:
             self.name = name
 
+        norm = torch.nn.utils.weight_norm if apply_weight_norm else lambda x, name=None: x
+
         # network for generating convolution weights
-        self.conv_kernel = nn.Linear(feature_dim, kernel_size)
+        self.conv_kernel = norm(nn.Linear(feature_dim, kernel_size))
 
+        if softquant:
+            self.conv_kernel = soft_quant(self.conv_kernel)
 
+
         # comb filter gain
-        self.filter_gain = nn.Linear(feature_dim, 1)
+        self.filter_gain = norm(nn.Linear(feature_dim, 1))
         self.log_gain_limit = gain_limit_db * 0.11512925464970229
         with torch.no_grad():
             self.filter_gain.bias[:] = max(0.1, 4 + self.log_gain_limit)
 
-        self.global_filter_gain = nn.Linear(feature_dim, 1)
+        self.global_filter_gain = norm(nn.Linear(feature_dim, 1))
         log_min, log_max = global_gain_limits_db[0] * 0.11512925464970229, global_gain_limits_db[1] * 0.11512925464970229
         self.filter_gain_a = (log_max - log_min) / 2
         self.filter_gain_b = (log_max + log_min) / 2
--- a/dnn/torch/osce/utils/layers/limited_adaptive_conv1d.py
+++ b/dnn/torch/osce/utils/layers/limited_adaptive_conv1d.py
@@ -34,8 +34,8 @@
 from utils.endoscopy import write_data
 
 from utils.ada_conv import adaconv_kernel
+from utils.softquant import soft_quant
 
-
 class LimitedAdaptiveConv1d(nn.Module):
     COUNTER = 1
 
@@ -51,6 +51,8 @@
                  gain_limits_db=[-6, 6],
                  shape_gain_db=0,
                  norm_p=2,
+                 softquant=False,
+                 apply_weight_norm=False,
                  **kwargs):
         """
 
@@ -100,12 +102,16 @@
         else:
             self.name = name
 
+        norm = torch.nn.utils.weight_norm if apply_weight_norm else lambda x, name=None: x
+
         # network for generating convolution weights
-        self.conv_kernel = nn.Linear(feature_dim, in_channels * out_channels * kernel_size)
+        self.conv_kernel = norm(nn.Linear(feature_dim, in_channels * out_channels * kernel_size))
+        if softquant:
+            self.conv_kernel = soft_quant(self.conv_kernel)
 
         self.shape_gain = min(1, 10**(shape_gain_db / 20))
 
-        self.filter_gain = nn.Linear(feature_dim, out_channels)
+        self.filter_gain = norm(nn.Linear(feature_dim, out_channels))
         log_min, log_max = gain_limits_db[0] * 0.11512925464970229, gain_limits_db[1] * 0.11512925464970229
         self.filter_gain_a = (log_max - log_min) / 2
         self.filter_gain_b = (log_max + log_min) / 2
--- a/dnn/torch/osce/utils/layers/td_shaper.py
+++ b/dnn/torch/osce/utils/layers/td_shaper.py
@@ -3,6 +3,7 @@
 import torch.nn.functional as F
 
 from utils.complexity import _conv1d_flop_count
+from utils.softquant import soft_quant
 
 class TDShaper(nn.Module):
     COUNTER = 1
@@ -12,7 +13,9 @@
                  frame_size=160,
                  avg_pool_k=4,
                  innovate=False,
-                 pool_after=False
+                 pool_after=False,
+                 softquant=False,
+                 apply_weight_norm=False
     ):
         """
 
@@ -45,16 +48,22 @@
         assert frame_size % avg_pool_k == 0
         self.env_dim = frame_size // avg_pool_k + 1
 
+        norm = torch.nn.utils.weight_norm if apply_weight_norm else lambda x, name=None: x
+
         # feature transform
-        self.feature_alpha1 = nn.Conv1d(self.feature_dim + self.env_dim, frame_size, 2)
-        self.feature_alpha2 = nn.Conv1d(frame_size, frame_size, 2)
+        self.feature_alpha1_f = norm(nn.Conv1d(self.feature_dim, frame_size, 2))
+        self.feature_alpha1_t = norm(nn.Conv1d(self.env_dim, frame_size, 2))
+        self.feature_alpha2 = norm(nn.Conv1d(frame_size, frame_size, 2))
 
+        if softquant:
+            self.feature_alpha1_f = soft_quant(self.feature_alpha1_f)
+
         if self.innovate:
-            self.feature_alpha1b = nn.Conv1d(self.feature_dim + self.env_dim, frame_size, 2)
-            self.feature_alpha1c = nn.Conv1d(self.feature_dim + self.env_dim, frame_size, 2)
+            self.feature_alpha1b = norm(nn.Conv1d(self.feature_dim + self.env_dim, frame_size, 2))
+            self.feature_alpha1c = norm(nn.Conv1d(self.feature_dim + self.env_dim, frame_size, 2))
 
-            self.feature_alpha2b = nn.Conv1d(frame_size, frame_size, 2)
-            self.feature_alpha2c = nn.Conv1d(frame_size, frame_size, 2)
+            self.feature_alpha2b = norm(nn.Conv1d(frame_size, frame_size, 2))
+            self.feature_alpha2c = norm(nn.Conv1d(frame_size, frame_size, 2))
 
 
     def flop_count(self, rate):
@@ -61,7 +70,7 @@
 
         frame_rate = rate / self.frame_size
 
-        shape_flops = sum([_conv1d_flop_count(x, frame_rate) for x in (self.feature_alpha1, self.feature_alpha2)]) + 11 * frame_rate * self.frame_size
+        shape_flops = sum([_conv1d_flop_count(x, frame_rate) for x in (self.feature_alpha1_f, self.feature_alpha1_t, self.feature_alpha2)]) + 11 * frame_rate * self.frame_size
 
         if self.innovate:
             inno_flops = sum([_conv1d_flop_count(x, frame_rate) for x in (self.feature_alpha1b, self.feature_alpha2b, self.feature_alpha1c, self.feature_alpha2c)]) + 22 * frame_rate * self.frame_size
@@ -110,9 +119,10 @@
         tenv = self.envelope_transform(x)
 
         # feature path
-        f = torch.cat((features, tenv), dim=-1)
-        f = F.pad(f.permute(0, 2, 1), [1, 0])
-        alpha = F.leaky_relu(self.feature_alpha1(f), 0.2)
+        f = F.pad(features.permute(0, 2, 1), [1, 0])
+        t = F.pad(tenv.permute(0, 2, 1), [1, 0])
+        alpha = self.feature_alpha1_f(f) + self.feature_alpha1_t(t)
+        alpha = F.leaky_relu(alpha, 0.2)
         alpha = torch.exp(self.feature_alpha2(F.pad(alpha, [1, 0])))
         alpha = alpha.permute(0, 2, 1)
 
--- a/dnn/torch/osce/utils/misc.py
+++ b/dnn/torch/osce/utils/misc.py
@@ -28,6 +28,7 @@
 """
 
 import torch
+from torch.nn.utils import remove_weight_norm
 
 def count_parameters(model, verbose=False):
     total = 0
@@ -41,7 +42,17 @@
 
     return total
 
+def count_nonzero_parameters(model, verbose=False):
+    total = 0
+    for name, p in model.named_parameters():
+        count = torch.count_nonzero(p).item()
 
+        if verbose:
+            print(f"{name}: {count} non-zero parameters")
+
+        total += count
+
+    return total
 def retain_grads(module):
     for p in module.parameters():
         if p.requires_grad:
@@ -62,4 +73,23 @@
             weight = torch.exp(alpha * (sr[-1] - sg[-1]))
             weights.append(weight)
 
-    return weights
\ No newline at end of file
+    return weights
+
+
+def _get_candidates(module: torch.nn.Module):
+    candidates = []
+    for key in module.__dict__.keys():
+        if hasattr(module, key + '_v'):
+            candidates.append(key)
+    return candidates
+
+def remove_all_weight_norm(model : torch.nn.Module, verbose=False):
+    for name, m in model.named_modules():
+        candidates = _get_candidates(m)
+
+        for candidate in candidates:
+            try:
+                remove_weight_norm(m, name=candidate)
+                if verbose: print(f'removed weight norm on weight {name}.{candidate}')
+            except:
+                pass
--- /dev/null
+++ b/dnn/torch/osce/utils/softquant.py
@@ -1,0 +1,110 @@
+import torch
+
+@torch.no_grad()
+def compute_optimal_scale(weight):
+    with torch.no_grad():
+        n_out, n_in = weight.shape
+        assert n_in % 4 == 0
+        if n_out % 8:
+            # add padding
+            pad = n_out - n_out % 8
+            weight = torch.cat((weight, torch.zeros((pad, n_in), dtype=weight.dtype, device=weight.device)), dim=0)
+
+        weight_max_abs, _ = torch.max(torch.abs(weight), dim=1)
+        weight_max_sum, _ = torch.max(torch.abs(weight[:, : n_in : 2] + weight[:, 1 : n_in : 2]), dim=1)
+        scale_max = weight_max_abs / 127
+        scale_sum = weight_max_sum / 129
+
+        scale = torch.maximum(scale_max, scale_sum)
+
+    return scale[:n_out]
+
+@torch.no_grad()
+def q_scaled_noise(module, weight):
+    if isinstance(module, torch.nn.Conv1d):
+        w = weight.permute(0, 2, 1).flatten(1)
+        noise = torch.rand_like(w) - 0.5
+        scale = compute_optimal_scale(w)
+        noise = noise * scale.unsqueeze(-1)
+        noise = noise.reshape(weight.size(0), weight.size(2), weight.size(1)).permute(0, 2, 1)
+    elif isinstance(module, torch.nn.ConvTranspose1d):
+        i, o, k = weight.shape
+        w = weight.permute(2, 1, 0).reshape(k * o, i)
+        noise = torch.rand_like(w) - 0.5
+        scale = compute_optimal_scale(w)
+        noise = noise * scale.unsqueeze(-1)
+        noise = noise.reshape(k, o, i).permute(2, 1, 0)
+    elif len(weight.shape) == 2:
+        noise = torch.rand_like(weight) - 0.5
+        scale = compute_optimal_scale(weight)
+        noise = noise * scale.unsqueeze(-1)
+    else:
+        raise ValueError('unknown quantization setting')
+
+    return noise
+
+class SoftQuant:
+    name: str
+
+    def __init__(self, names: str, scale: float) -> None:
+        self.names = names
+        self.quantization_noise = None
+        self.scale = scale
+
+    def __call__(self, module, inputs, *args, before=True):
+        if not module.training: return
+
+        if before:
+            self.quantization_noise = dict()
+            for name in self.names:
+                weight = getattr(module, name)
+                if self.scale is None:
+                    self.quantization_noise[name] = q_scaled_noise(module, weight)
+                else:
+                    self.quantization_noise[name] = \
+                        self.scale * weight.abs().max() * (torch.rand_like(weight) - 0.5)
+                with torch.no_grad():
+                    weight.data[:] = weight + self.quantization_noise[name]
+        else:
+            for name in self.names:
+                weight = getattr(module, name)
+                with torch.no_grad():
+                    weight.data[:] = weight - self.quantization_noise[name]
+            self.quantization_noise = None
+
+    def apply(module, names=['weight'], scale=None):
+        fn = SoftQuant(names, scale)
+
+        for name in names:
+            if not hasattr(module, name):
+                raise ValueError("")
+
+        fn_before = lambda *x : fn(*x, before=True)
+        fn_after = lambda *x : fn(*x, before=False)
+        setattr(fn_before, 'sqm', fn)
+        setattr(fn_after, 'sqm', fn)
+
+
+        module.register_forward_pre_hook(fn_before)
+        module.register_forward_hook(fn_after)
+
+        module
+
+        return fn
+
+
+def soft_quant(module, names=['weight'], scale=None):
+    fn = SoftQuant.apply(module, names, scale)
+    return module
+
+def remove_soft_quant(module, names=['weight']):
+    for k, hook in module._forward_pre_hooks.items():
+        if hasattr(hook, 'sqm'):
+            if isinstance(hook.sqm, SoftQuant) and hook.sqm.names == names:
+                del module._forward_pre_hooks[k]
+    for k, hook in module._forward_hooks.items():
+        if hasattr(hook, 'sqm'):
+            if isinstance(hook.sqm, SoftQuant) and hook.sqm.names == names:
+                del module._forward_hooks[k]
+
+    return module
\ No newline at end of file
--- a/dnn/torch/osce/utils/templates.py
+++ b/dnn/torch/osce/utils/templates.py
@@ -50,7 +50,11 @@
             'pitch_embedding_dim': 64,
             'pitch_max': 300,
             'preemph': 0.85,
-            'skip': 91
+            'skip': 91,
+            'softquant': True,
+            'sparsify': False,
+            'sparsification_density': 0.4,
+            'sparsification_schedule': [10000, 40000, 200]
         }
     },
     'data': {
@@ -63,7 +67,7 @@
         'num_bands_clean_spec': 64,
         'num_bands_noisy_spec': 18,
         'noisy_spec_scale': 'opus',
-        'pitch_hangover': 8,
+        'pitch_hangover': 0,
     },
     'training': {
         'batch_size': 256,
@@ -106,7 +110,11 @@
             'pitch_embedding_dim': 64,
             'pitch_max': 300,
             'preemph': 0.85,
-            'skip': 91
+            'skip': 91,
+            'softquant': True,
+            'sparsify': False,
+            'sparsification_density': 0.4,
+            'sparsification_schedule': [10000, 40000, 200]
         }
     },
     'data': {
@@ -119,7 +127,7 @@
         'num_bands_clean_spec': 64,
         'num_bands_noisy_spec': 18,
         'noisy_spec_scale': 'opus',
-        'pitch_hangover': 8,
+        'pitch_hangover': 0,
     },
     'training': {
         'batch_size': 256,
@@ -160,7 +168,11 @@
             'pitch_embedding_dim': 64,
             'pitch_max': 300,
             'preemph': 0.85,
-            'skip': 91
+            'skip': 91,
+            'softquant': True,
+            'sparsify': False,
+            'sparsification_density': 0.4,
+            'sparsification_schedule': [0, 0, 200]
         }
     },
     'data': {
@@ -173,7 +185,7 @@
         'num_bands_clean_spec': 64,
         'num_bands_noisy_spec': 18,
         'noisy_spec_scale': 'opus',
-        'pitch_hangover': 8,
+        'pitch_hangover': 0,
     },
     'discriminator': {
         'args': [],
--- a/dnn/torch/weight-exchange/wexchange/c_export/common.py
+++ b/dnn/torch/weight-exchange/wexchange/c_export/common.py
@@ -282,7 +282,8 @@
                        bias : np.ndarray,
                        scale=1/128,
                        format : str = 'torch',
-                       quantize=False):
+                       quantize=False,
+                       sparse=False):
 
 
     if format == "torch":
@@ -290,7 +291,7 @@
         weight = np.transpose(weight, (2, 1, 0))
 
     lin_weight = np.reshape(weight, (-1, weight.shape[-1]))
-    print_linear_layer(writer, name, lin_weight, bias, scale=scale, sparse=False, diagonal=False, quantize=quantize)
+    print_linear_layer(writer, name, lin_weight, bias, scale=scale, sparse=sparse, diagonal=False, quantize=quantize)
 
 
     writer.header.write(f"\n#define {name.upper()}_OUT_SIZE {weight.shape[2]}\n")
@@ -369,7 +370,8 @@
                        bias : np.ndarray,
                        stride: int,
                        scale=1/128,
-                       quantize=False):
+                       quantize=False,
+                       sparse=False):
 
     in_channels, out_channels, kernel_size = weight.shape
 
@@ -377,7 +379,7 @@
     linear_weight = weight.transpose(2, 1, 0).reshape(kernel_size * out_channels, in_channels).transpose(1, 0)
     linear_bias = np.repeat(bias[np.newaxis, :], kernel_size, 0).flatten()
 
-    print_linear_layer(writer, name, linear_weight, linear_bias, scale=scale, quantize=quantize)
+    print_linear_layer(writer, name, linear_weight, linear_bias, scale=scale, quantize=quantize, sparse=sparse)
 
     writer.header.write(f"\n#define {name.upper()}_KERNEL_SIZE {kernel_size}\n")
     writer.header.write(f"\n#define {name.upper()}_STRIDE {stride}\n")
--- a/dnn/torch/weight-exchange/wexchange/torch/torch.py
+++ b/dnn/torch/weight-exchange/wexchange/torch/torch.py
@@ -153,7 +153,7 @@
         np.save(where, 'weight_global_gain.npy', w_global_gain)
         np.save(where, 'bias_global_gain.npy', b_global_gain)
 
-def dump_torch_tdshaper(where, shaper, name='tdshaper'):
+def dump_torch_tdshaper(where, shaper, name='tdshaper', quantize=False, scale=1/128):
 
     if isinstance(where, CWriter):
         where.header.write(f"""
@@ -165,7 +165,8 @@
 """
         )
 
-    dump_torch_conv1d_weights(where, shaper.feature_alpha1, name + "_alpha1")
+    dump_torch_conv1d_weights(where, shaper.feature_alpha1_f, name + "_alpha1_f", quantize=quantize, scale=scale)
+    dump_torch_conv1d_weights(where, shaper.feature_alpha1_t, name + "_alpha1_t")
     dump_torch_conv1d_weights(where, shaper.feature_alpha2, name + "_alpha2")
 
     if shaper.innovate:
@@ -274,7 +275,7 @@
             dense.bias.set_(torch.from_numpy(b))
 
 
-def dump_torch_conv1d_weights(where, conv, name='conv', scale=1/128, quantize=False):
+def dump_torch_conv1d_weights(where, conv, name='conv', scale=1/128, quantize=False, sparse=False):
 
     w = conv.weight.detach().cpu().numpy().copy()
     if conv.bias is None:
@@ -284,7 +285,7 @@
 
     if isinstance(where, CWriter):
 
-        return print_conv1d_layer(where, name, w, b, scale=scale, format='torch', quantize=quantize)
+        return print_conv1d_layer(where, name, w, b, scale=scale, format='torch', quantize=quantize, sparse=sparse)
     else:
         os.makedirs(where, exist_ok=True)
 
@@ -304,7 +305,7 @@
                 conv.bias.set_(torch.from_numpy(b))
 
 
-def dump_torch_tconv1d_weights(where, conv, name='conv', scale=1/128, quantize=False):
+def dump_torch_tconv1d_weights(where, conv, name='conv', scale=1/128, quantize=False, sparse=False):
 
     w = conv.weight.detach().cpu().numpy().copy()
     if conv.bias is None:
@@ -314,7 +315,7 @@
 
     if isinstance(where, CWriter):
 
-        return print_tconv1d_layer(where, name, w, b, conv.stride[0], scale=scale, quantize=quantize)
+        return print_tconv1d_layer(where, name, w, b, conv.stride[0], scale=scale, quantize=quantize, sparse=sparse)
     else:
         os.makedirs(where, exist_ok=True)
 
--