-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathgenerator_player.py
507 lines (419 loc) · 18 KB
/
generator_player.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# NOTE: THIS IS THE CLASS THAT WILL BE REPLACING scarlett_player.py eventually.
# It is cleaner, more object oriented, and will allows us to run proper tests.
# Also threading.RLock() and threading.Semaphore() works correctly.
#
# There are a LOT of threads going on here, all of them managed by Gstreamer.
# If pyglet ever needs to run under a Python that doesn't have a GIL, some
# locks will need to be introduced to prevent concurrency catastrophes.
#
# At the moment, no locks are used because we assume only one thread is
# executing Python code at a time. Some semaphores are used to block and wake
# up the main thread when needed, these are all instances of
# threading.Semaphore. Note that these don't represent any kind of
# thread-safety.
from __future__ import with_statement
from __future__ import division
import sys
import os
os.environ[
"GST_DEBUG_DUMP_DOT_DIR"] = "/home/pi/dev/bossjones-github/scarlett-dbus-poc/_debug"
os.putenv('GST_DEBUG_DUMP_DIR_DIR',
'/home/pi/dev/bossjones-github/scarlett-dbus-poc/_debug')
import gi
gi.require_version('Gst', '1.0')
from gi.repository import GObject, Gst, GLib, Gio # NOQA
import threading
GObject.threads_init()
Gst.init(None)
Gst.debug_set_active(True)
Gst.debug_set_default_threshold(3)
gst = Gst
import argparse
import pprint
pp = pprint.PrettyPrinter(indent=4)
try:
import queue
except ImportError:
import Queue as queue
try:
from urllib.parse import quote
except ImportError:
from urllib import quote
QUEUE_SIZE = 10
BUFFER_SIZE = 10
SENTINEL = '__GSTDEC_SENTINEL__'
import signal
from IPython.core.debugger import Tracer
from IPython.core import ultratb
sys.excepthook = ultratb.FormattedTB(mode='Verbose',
color_scheme='Linux',
call_pdb=True,
ostream=sys.__stdout__)
import logging
logger = logging.getLogger('scarlettlogger')
import generator_utils
from generator_utils import trace, abort_on_exception, _IdleObject
# Managing the Gobject main loop thread.
_shared_loop_thread = None
_loop_thread_lock = threading.RLock()
def get_loop_thread():
"""Get the shared main-loop thread.
"""
global _shared_loop_thread
with _loop_thread_lock:
if not _shared_loop_thread:
# Start a new thread.
_shared_loop_thread = MainLoopThread()
_shared_loop_thread.start()
return _shared_loop_thread
class MainLoopThread(threading.Thread):
"""A daemon thread encapsulating a Gobject main loop.
"""
def __init__(self):
super(MainLoopThread, self).__init__()
self.loop = GObject.MainLoop()
self.daemon = True
def run(self):
self.loop.run()
# The decoder.
class ScarlettPlayer(_IdleObject):
# Anything defined here belongs to the class itself
def __init__(self, path, handle_error):
# anythning defined here belongs to the INSTANCE of the class
self.running = False
self.finished = False
self.handle_error = False if handle_error is None else handle_error
# Set up the Gstreamer pipeline.
self.pipeline = Gst.Pipeline('main-pipeline')
self.ready_sem = threading.Semaphore(0)
# Register for bus signals.
bus = self.pipeline.get_bus()
bus.add_signal_watch()
bus.connect("message::eos", self._message)
bus.connect("message::error", self._message)
bus.connect("message::state-changed", self._on_state_changed)
# 1. Create pipeline's elements
self.source = Gst.ElementFactory.make("uridecodebin", 'input_stream')
self.audioconvert = Gst.ElementFactory.make('audioconvert', None)
self.splitter = Gst.ElementFactory.make("tee", 'splitter')
if (not self.source or not self.audioconvert or not self.splitter):
logger.error("ERROR: Not all elements could be created.")
raise generator_utils.IncompleteGStreamerError()
# 2. Set properties
uri = f'file://{quote(os.path.abspath(path))}'
self.source.set_property('uri', uri)
# 3. Add them to the pipeline
self.pipeline.add(self.source)
self.pipeline.add(self.audioconvert)
self.pipeline.add(self.splitter)
self.audioconvert.link(self.splitter)
self.source.connect('source-setup', self._source_setup_cb)
# 4.a. uridecodebin has a "sometimes" pad (created after prerolling)
self.source.connect('pad-added', self._decode_src_created)
self.source.connect('no-more-pads', self._no_more_pads)
self.source.connect("unknown-type", self._unknown_type)
#######################################################################
# QUEUE A
#######################################################################
self.queueA = Gst.ElementFactory.make('queue', None)
# BOSSJONESTEMP # self.audioconvert =
# Gst.ElementFactory.make('audioconvert', None)
self.appsink = Gst.ElementFactory.make('appsink', None)
self.appsink.set_property(
'caps',
Gst.Caps.from_string('audio/x-raw, format=(string)S16LE'),
)
# TODO set endianness?
# Set up the characteristics of the output. We don't want to
# drop any data (nothing is real-time here); we should bound
# the memory usage of the internal queue; and, most
# importantly, setting "sync" to False disables the default
# behavior in which you consume buffers in real time. This way,
# we get data as soon as it's decoded.
self.appsink.set_property('drop', False)
self.appsink.set_property('max-buffers', BUFFER_SIZE)
self.appsink.set_property('sync', False)
# The callback to receive decoded data.
self.appsink.set_property('emit-signals', True)
self.appsink.connect("new-sample", self._new_sample)
self.caps_handler = self.appsink.get_static_pad("sink").connect(
"notify::caps", self._notify_caps
)
self.pipeline.add(self.queueA)
self.pipeline.add(self.appsink)
self.queueA.link(self.appsink)
# link tee to queueA
tee_src_pad_to_appsink_bin = self.splitter.get_request_pad('src_%u')
logger.debug(
f"Obtained request pad Name({self.splitter.name}) Type({self.splitter}) for audio branch."
)
queueAsinkPad = self.queueA.get_static_pad('sink')
logger.debug(
f"Obtained sink pad for element ({queueAsinkPad}) for tee -> queueA."
)
tee_src_pad_to_appsink_bin.link(queueAsinkPad)
#######################################################################
# QUEUE B
#######################################################################
self.queueB = Gst.ElementFactory.make('queue', None)
self.pulsesink = Gst.ElementFactory.make('pulsesink', None)
self.pipeline.add(self.queueB)
self.pipeline.add(self.pulsesink)
self.queueB.link(self.pulsesink)
self.queueB_sink_pad = self.queueB.get_static_pad('sink')
# link tee to queueB
tee_src_pad_to_appsink_bin = self.splitter.get_request_pad('src_%u')
logger.debug(
f"Obtained request pad Name({self.splitter.name}) Type({self.splitter}) for audio branch."
)
queueAsinkPad = self.queueB.get_static_pad('sink')
logger.debug(
f"Obtained sink pad for element ({queueAsinkPad}) for tee -> queueB."
)
tee_src_pad_to_appsink_bin.link(queueAsinkPad)
# recursively print elements
self._listElements(self.pipeline)
#######################################################################
# Set up the queue for data and run the main thread.
self.queue = queue.Queue(QUEUE_SIZE)
self.thread = get_loop_thread()
# This wil get filled with an exception if opening fails.
self.read_exc = None
self.dot_exc = None
# Return as soon as the stream is ready!
self.running = True
self.got_caps = False
self.pipeline.set_state(Gst.State.PLAYING)
self.on_debug_activate()
self.ready_sem.acquire()
if self.read_exc:
# An error occurred before the stream became ready.
self.close(True)
raise self.read_exc
def _source_setup_cb(self, discoverer, source):
logger.debug(f"Discoverer object: ({discoverer})")
logger.debug(f"Source object: ({source})")
def _on_state_changed(self, bus, msg):
states = msg.parse_state_changed()
# To state is PLAYING
if msg.src.get_name() == "pipeline" and states[1] == 4:
dotfile = "/home/pi/dev/bossjones-github/scarlett-dbus-poc/_debug/generator-player.dot"
pngfile = "/home/pi/dev/bossjones-github/scarlett-dbus-poc/_debug/generator-player-pipeline.png" # NOQA
if os.access(dotfile, os.F_OK):
os.remove(dotfile)
if os.access(pngfile, os.F_OK):
os.remove(pngfile)
Gst.debug_bin_to_dot_file(msg.src,
Gst.DebugGraphDetails.ALL, "generator-player")
os.system('/usr/bin/dot' + " -Tpng -o " + pngfile + " " + dotfile)
print("pipeline dot file created in " +
os.getenv("GST_DEBUG_DUMP_DOT_DIR"))
def _listElements(self, bin, level=0):
try:
iterator = bin.iterate_elements()
# print iterator
while True:
elem = iterator.next()
if elem[1] is None:
break
logger.debug(level * '** ' + str(elem[1]))
# uncomment to print pads of element
self._iteratePads(elem[1])
# call recursively
self._listElements(elem[1], level + 1)
except AttributeError:
pass
def _iteratePads(self, element):
try:
iterator = element.iterate_pads()
while True:
pad = iterator.next()
if pad[1] is None:
break
logger.debug(f'pad: {str(pad[1])}')
except AttributeError:
pass
# NOTE: This function generates the dot file, checks that graphviz in installed and
# then finally generates a png file, which it then displays
def on_debug_activate(self):
dotfile = "/home/pi/dev/bossjones-github/scarlett-dbus-poc/_debug/generator-player.dot"
pngfile = "/home/pi/dev/bossjones-github/scarlett-dbus-poc/_debug/generator-player-pipeline.png" # NOQA
if os.access(dotfile, os.F_OK):
os.remove(dotfile)
if os.access(pngfile, os.F_OK):
os.remove(pngfile)
Gst.debug_bin_to_dot_file(self.pipeline,
Gst.DebugGraphDetails.ALL, "generator-player")
os.system('/usr/bin/dot' + " -Tpng -o " + pngfile + " " + dotfile)
# Gstreamer callbacks.
def _notify_caps(self, pad, args):
"""The callback for the sinkpad's "notify::caps" signal.
"""
# The sink has started to receive data, so the stream is ready.
# This also is our opportunity to read information about the
# stream.
logger.debug(f"pad: {pad}")
logger.debug(f"pad name: {pad.name} parent: {pad.get_parent()}")
logger.debug(f"args: {args}")
self.got_caps = True
info = pad.get_current_caps().get_structure(0)
# Stream attributes.
self.channels = info.get_int('channels')[1]
self.samplerate = info.get_int('rate')[1]
# Query duration.
success, length = pad.get_peer().query_duration(Gst.Format.TIME)
if success:
self.duration = length / 1000000000
logger.debug(f"FILE DURATION: {self.duration}")
else:
self.read_exc = generator_utils.MetadataMissingError('duration not available')
# Allow constructor to complete.
self.ready_sem.release()
_got_a_pad = False
def _decode_src_created(self, element, pad):
"""The callback for GstElement's "pad-added" signal.
"""
# Decoded data is ready. Connect up the decoder, finally.
name = pad.query_caps(None).to_string()
logger.debug(f"pad: {pad}")
logger.debug(f"pad name: {pad.name} parent: {pad.get_parent()}")
if name.startswith('audio/x-raw'):
nextpad = self.audioconvert.get_static_pad('sink')
if not nextpad.is_linked():
self._got_a_pad = True
pad.link(nextpad)
def _no_more_pads(self, element):
"""The callback for GstElement's "no-more-pads" signal.
"""
# Sent when the pads are done adding (i.e., there are no more
# streams in the file). If we haven't gotten at least one
# decodable stream, raise an exception.
if not self._got_a_pad:
logger.error(
"If we haven't gotten at least one decodable stream, raise an exception.")
self.read_exc = generator_utils.NoStreamError()
self.ready_sem.release() # No effect if we've already started.
def _new_sample(self, sink):
"""The callback for appsink's "new-sample" signal.
"""
if self.running:
# FIXME: logger.debug("sink: {}".format(sink))
# FIXME: logger.debug("sink name: {} parent: {}".format(sink.name, sink.get_parent()))
# New data is available from the pipeline! Dump it into our
# queue (or possibly block if we're full).
buf = sink.emit('pull-sample').get_buffer()
self.queue.put(buf.extract_dup(0, buf.get_size()))
return Gst.FlowReturn.OK
def _unknown_type(self, uridecodebin, decodebin, caps):
"""The callback for decodebin's "unknown-type" signal.
"""
# This is called *before* the stream becomes ready when the
# file can't be read.
streaminfo = caps.to_string()
if not streaminfo.startswith('audio/'):
# Ignore non-audio (e.g., video) decode errors.
return
logger.error("Ignore non-audio (e.g., video) decode errors.")
logger.error(f"streaminfo: {streaminfo}")
self.read_exc = generator_utils.UnknownTypeError(streaminfo)
self.ready_sem.release()
def _message(self, bus, message):
"""The callback for GstBus's "message" signal (for two kinds of
messages).
"""
if self.finished:
return
if message.type == Gst.MessageType.EOS:
# The file is done. Tell the consumer thread.
self.queue.put(SENTINEL)
if not self.got_caps:
logger.error(
"If the stream ends before _notify_caps was called, this is an invalid file.")
# If the stream ends before _notify_caps was called, this
# is an invalid file.
self.read_exc = generator_utils.NoStreamError()
self.ready_sem.release()
elif message.type == Gst.MessageType.ERROR:
gerror, debug = message.parse_error()
if 'not-linked' in debug:
logger.error('not-linked')
self.read_exc = generator_utils.NoStreamError()
elif 'No such file' in debug:
self.read_exc = IOError('resource not found')
else:
self.read_exc = generator_utils.FileReadError(debug)
self.ready_sem.release()
# Iteration.
def next(self):
# Wait for data from the Gstreamer callbacks.
val = self.queue.get()
if val == SENTINEL:
# End of stream.
raise StopIteration
return val
# For Python 3 compatibility.
__next__ = next
def __iter__(self):
return self
# Cleanup.
def close(self, force=False):
"""Close the file and clean up associated resources.
Calling `close()` a second time has no effect.
"""
if not self.running and not force:
return
self.running = False
self.finished = True
# Unregister for signals, which we registered for above with
# `add_signal_watch`. (Without this, GStreamer leaks file
# descriptors.)
try:
self.pipeline
except NameError:
logger.info("well, self.pipeline WASN'T defined after all!")
else:
logger.info("OK, self.pipeline IS defined.")
self.pipeline.get_bus().remove_signal_watch()
# Stop reading the file.
self.source.set_property("uri", None)
# Block spurious signals.
self.appsink.get_static_pad("sink").disconnect(self.caps_handler)
# Make space in the output queue to let the decoder thread
# finish. (Otherwise, the thread blocks on its enqueue and
# the interpreter hangs.)
try:
self.queue.get_nowait()
except queue.Empty:
pass
# Halt the pipeline (closing file).
self.pipeline.set_state(Gst.State.NULL)
logger.info(f"closing generator_player: {self}")
# Delete the pipeline object. This seems to be necessary on Python
# 2, but not Python 3 for some reason: on 3.5, at least, the
# pipeline gets dereferenced automatically.
del self.pipeline
def __del__(self):
logger.info("delete time")
self.close()
# Context manager.
def __enter__(self):
logger.info("enter time")
return self
def __exit__(self, exc_type, exc_val, exc_tb):
logger.info("exit time")
self.close()
return self.handle_error
# Smoke test.
if __name__ == '__main__':
wavefile = [
'/home/pi/dev/bossjones-github/scarlett-dbus-poc/static/sounds/pi-listening.wav']
# ORIG # for path in sys.argv[1:]:
for path in wavefile:
path = os.path.abspath(os.path.expanduser(path))
with ScarlettPlayer(path) as f:
print(f.channels)
print(f.samplerate)
print(f.duration)
# READ IN BLOCKS # print(len(s), ord(s[0]))