shithub: aubio

Download patch

ref: 92a6af0d4379e0818f09eb067a00c0a33fb9d110
parent: 7165fc56c4aaa5eefb8c04f6eb9ca3c0980904dc
parent: adf09b28c9435d9048ee91180530a4d7b7afd620
author: Paul Brossier <piem@piem.org>
date: Mon Oct 29 11:39:50 EDT 2018

Merge branch 'fix/slicing' into feature/docstrings

--- a/python/lib/aubio/cmd.py
+++ b/python/lib/aubio/cmd.py
@@ -247,6 +247,9 @@
                 metavar = "<slices>",
                 action = "store", dest = "cut_until_nslices", default = None,
                 help="how many extra slices should be added at the end of each slice")
+        self.add_argument("--create-first",
+                action = "store_true", dest = "create_first", default = False,
+                help="always include first slice")
 
 # some utilities
 
--- a/python/lib/aubio/cut.py
+++ b/python/lib/aubio/cut.py
@@ -101,7 +101,7 @@
 
     s = source(source_uri, samplerate, hopsize)
     if samplerate == 0:
-        samplerate = s.get_samplerate()
+        samplerate = s.samplerate
         options.samplerate = samplerate
 
     if options.beat:
@@ -150,7 +150,8 @@
         slice_source_at_stamps(options.source_uri,
                 timestamps, timestamps_end = timestamps_end,
                 output_dir = options.output_directory,
-                samplerate = options.samplerate)
+                samplerate = options.samplerate,
+                create_first = options.create_first)
 
 def main():
     parser = aubio_cut_parser()
--- a/python/lib/aubio/slicing.py
+++ b/python/lib/aubio/slicing.py
@@ -6,19 +6,22 @@
 _max_timestamp = 1e120
 
 def slice_source_at_stamps(source_file, timestamps, timestamps_end=None,
-                           output_dir=None, samplerate=0, hopsize=256):
+                           output_dir=None, samplerate=0, hopsize=256,
+                           create_first=False):
     """ slice a sound file at given timestamps """
 
     if timestamps is None or len(timestamps) == 0:
         raise ValueError("no timestamps given")
 
-    if timestamps[0] != 0:
+    if timestamps[0] != 0 and create_first:
         timestamps = [0] + timestamps
         if timestamps_end is not None:
             timestamps_end = [timestamps[1] - 1] + timestamps_end
 
     if timestamps_end is not None:
-        if len(timestamps_end) != len(timestamps):
+        if len(timestamps_end) == len(timestamps) - 1:
+            timestamps_end = timestamps_end + [_max_timestamp]
+        elif len(timestamps_end) != len(timestamps):
             raise ValueError("len(timestamps_end) != len(timestamps)")
     else:
         timestamps_end = [t - 1 for t in timestamps[1:]] + [_max_timestamp]
@@ -48,7 +51,7 @@
         # get hopsize new samples from source
         vec, read = _source.do_multi()
         # if the total number of frames read will exceed the next region start
-        if len(regions) and total_frames + read >= regions[0][0]:
+        while len(regions) and total_frames + read >= regions[0][0]:
             #print "getting", regions[0], "at", total_frames
             # get next region
             start_stamp, end_stamp = regions.pop(0)
@@ -75,7 +78,7 @@
                 if remaining > start:
                     # write remaining samples from current region
                     _sink.do_multi(vec[:, start:remaining], remaining - start)
-                    #print "closing region", "remaining", remaining
+                    #print("closing region", "remaining", remaining)
                     # close this file
                     _sink.close()
             elif read > start:
@@ -82,5 +85,8 @@
                 # write all the samples
                 _sink.do_multi(vec[:, start:read], read - start)
         total_frames += read
+        # remove old slices
+        slices = list(filter(lambda s: s['end_stamp'] > total_frames,
+            slices))
         if read < hopsize:
             break
--- a/python/tests/test_slicing.py
+++ b/python/tests/test_slicing.py
@@ -23,12 +23,14 @@
 
     def test_slice_start_only_no_zero(self):
         regions_start = [i*1000 for i in range(1, n_slices)]
-        slice_source_at_stamps(self.source_file, regions_start, output_dir = self.output_dir)
+        slice_source_at_stamps(self.source_file, regions_start,
+                output_dir = self.output_dir, create_first=True)
 
     def test_slice_start_beyond_end(self):
         regions_start = [i*1000 for i in range(1, n_slices)]
         regions_start += [count_samples_in_file(self.source_file) + 1000]
-        slice_source_at_stamps(self.source_file, regions_start, output_dir = self.output_dir)
+        slice_source_at_stamps(self.source_file, regions_start,
+                output_dir = self.output_dir, create_first=True)
 
     def test_slice_start_every_blocksize(self):
         hopsize = 200
@@ -36,6 +38,12 @@
         slice_source_at_stamps(self.source_file, regions_start, output_dir = self.output_dir,
                 hopsize = 200)
 
+    def test_slice_start_every_half_blocksize(self):
+        hopsize = 200
+        regions_start = [i*hopsize//2 for i in range(1, n_slices)]
+        slice_source_at_stamps(self.source_file, regions_start,
+                output_dir = self.output_dir, hopsize = 200)
+
     def tearDown(self):
         original_samples = count_samples_in_file(self.source_file)
         written_samples = count_samples_in_directory(self.output_dir)
@@ -91,6 +99,19 @@
         assert_equal(written_samples, expected_samples,
             "number of samples written different from number of original samples")
 
+    def test_slice_start_and_ends_with_missing_end(self):
+        regions_start = [i*1000 for i in range(n_slices)]
+        regions_ends = [r-1 for r in regions_start[1:]]
+        slice_source_at_stamps(self.source_file, regions_start, regions_ends,
+                output_dir = self.output_dir)
+        written_samples = count_samples_in_directory(self.output_dir)
+        original_samples = count_samples_in_file(self.source_file)
+        total_files = count_files_in_directory(self.output_dir)
+        assert_equal(n_slices, total_files,
+            "number of slices created different from expected")
+        assert_equal(written_samples, original_samples,
+            "number of samples written different from number of original samples")
+
     def tearDown(self):
         shutil.rmtree(self.output_dir)
 
@@ -133,7 +154,7 @@
         regions_start = [i*1000 for i in range(1, n_slices)]
         regions_end = None
         slice_source_at_stamps (self.source_file, regions_start, regions_end,
-                output_dir = self.output_dir)
+                output_dir = self.output_dir, create_first=True)
         total_files = count_files_in_directory(self.output_dir)
         assert_equal(n_slices, total_files,
             "number of slices created different from expected")