grim/pidgin3-gstreamer-poc
Clone
Summary
Browse
Changes
Graph
Initial import
draft
2022-04-12, Gary Kramlich
3fe81f4ddd0c
Parents
Children
d4b019cb0c17
Initial import
2 files changed, 211 insertions(+), 0 deletions(-)
+65
-0
streams.dot
+146
-0
test.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/streams.dot Tue Apr 12 01:33:29 2022 -0500
@@ -0,0 +1,65 @@
+digraph {
+ rankdir="LR"
+
+ node[shape="rect" style="filled,rounded"]
+
+ subgraph cluster_ui {
+ label="Pidgin [GstBin]"
+
+ node[fillcolor="powderblue"]
+ webcam[label="UI Video Source"]
+ microphone[label="UI Audio Source"]
+ ui_video_source[label="UI Video Source Pad"]
+ ui_video_monitor[label="UI Video Monitor Pad"]
+ ui_audio_source[label="UI Audio Source Pad"]
+ ui_audio_monitor[label="UI Audio Monitor Pad"]
+
+ ui_video_compositor[label="UI Video Compositor"]
+ ui_audio_mixer[label="UI Audio Mixer"]
+
+ ui_video_sink[label="UI Video Sink"]
+ ui_audio_sink[label="UI Audio Sink "]
+ }
+
+ subgraph cluster_purple {
+ label="Purple"
+
+ node[fillcolor="lightpink1"]
+ purple_ui_video_sink[label="Purple UI Video Sink"]
+ purple_ui_audio_sink[label="Purple UI Audio Sink"]
+
+ purple_video_tee[label="Purple UI Video Tee"]
+ purple_audio_tee[label="Purple UI Audio Tee"]
+ }
+
+ subgraph cluster_protocol {
+ label="Protocol [GstBin]"
+
+ node[fillcolor="palegreen"]
+
+ protocol_user_video_sink[label="Protocol User Video Sink Pad"]
+ protocol_user_audio_sink[label="Protocol User Audio Sink Pad"]
+
+ protocol_user_n_video_source[label="Protocol User N Video Source Pad"]
+ protocol_user_n_audio_source[label="Protocol User N Audio Source Pad"]
+ }
+
+ node[fillcolor="red"]
+
+ webcam -> ui_video_source
+ microphone -> ui_audio_source
+ ui_video_compositor -> ui_video_sink
+ ui_audio_mixer -> ui_audio_sink
+
+ ui_audio_source -> purple_ui_audio_sink
+ ui_video_source -> purple_ui_video_sink
+
+ purple_ui_video_sink -> purple_video_tee
+ purple_video_tee -> {ui_video_compositor, protocol_user_video_sink}
+
+ purple_ui_audio_sink -> purple_audio_tee
+ purple_audio_tee -> {ui_audio_monitor, protocol_user_audio_sink}
+
+ protocol_user_n_video_source -> ui_video_compositor
+ protocol_user_n_audio_source -> ui_audio_mixer
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test.py Tue Apr 12 01:33:29 2022 -0500
@@ -0,0 +1,146 @@
+#!/usr/bin/env python3
+
+import gi
+
+gi.require_version('Gst', '1.0')
+from gi.repository import Gst
+
+gi.require_version('GLib', '2.0')
+from gi.repository import GLib
+
+gi.require_version('GObject', '2.0')
+from gi.repository import GObject
+
+UI_VIDEO_SOURCE_PAD = 'ui-video-source-pad'
+UI_VIDEO_SINK_PAD = 'ui-video-sink-pad'
+UI_AUDIO_SOURCE_PAD = 'ui-audio-source-pad'
+
+PROTOCOL_VIDEO_SINK_PAD = 'protocol-video-sink-pad'
+
+Gst.init(None)
+
+class Call(object):
+ def __init__(self, ui_element, protocol_element):
+ self.ui_element = ui_element
+ self.protocol_element = protocol_element
+ self.pipeline = None
+
+ def start(self):
+ self.pipeline = Gst.Pipeline.new('call1')
+
+ self.pipeline.add(self.ui_element)
+ self.pipeline.add(self.protocol_element)
+
+ tee = Gst.ElementFactory.make('tee')
+ self.pipeline.add(tee)
+ self.ui_element.link_pads(UI_VIDEO_SOURCE_PAD, tee, 'sink')
+
+ # output the video the user
+ tee.link_pads('src_0', self.ui_element, UI_VIDEO_SINK_PAD)
+
+ # output the video to the protocol plugin
+ tee.link_pads('src_1', self.protocol_element, PROTOCOL_VIDEO_SINK_PAD)
+
+ # now that everything is wired up, start it
+ bus = self.pipeline.get_bus()
+ bus.add_signal_watch()
+ bus.enable_sync_message_emission()
+ bus.connect("message", self.message_handler)
+ #bus.connect("sync-message::element", self.sync_message_handler)
+ self.pipeline.set_state(Gst.State.PLAYING)
+
+
+ def message_handler(self, bus, message):
+ t = message.type
+ if t == Gst.MessageType.EOS:
+ self.pipeline.set_state(Gst.State.NULL)
+ print('Got EOS')
+ elif t == Gst.MessageType.STATE_CHANGED:
+ Gst.debug_bin_to_dot_file_with_ts(self.pipeline, Gst.DebugGraphDetails.ALL, 'foo')
+ elif t == Gst.MessageType.ERROR:
+ err, debug = message.parse_error()
+ print(f'Error: {err}, {debug}')
+ self.pipeline.set_state(Gst.State.NULL)
+ elif t == Gst.MessageType.WARNING:
+ err, debug = message.parse_warning()
+ print(f'Warning: {err}, {debug}')
+ # else:
+ # print(f'unknown message type {t}')
+
+ return True
+
+
+class UIElement(Gst.Bin):
+ def __init__(self):
+ Gst.Bin.__init__(self)
+
+ # def do_request_new_pad(self, template, name, caps):
+ # pass
+
+class UI(object):
+ def get_element(self):
+ bin = Gst.ElementFactory.make('purpleuielement', 'pidgin')
+
+ video = Gst.ElementFactory.make('videotestsrc', 'video-source')
+ bin.add(video)
+
+ video_source_pad = video.get_static_pad('src')
+ ghost_pad = Gst.GhostPad.new(UI_VIDEO_SOURCE_PAD, video_source_pad)
+ bin.add_pad(ghost_pad)
+
+ compositor = Gst.ElementFactory.make('compositor', UI_VIDEO_SINK_PAD)
+ bin.add(compositor)
+
+ video_sink = Gst.ElementFactory.make('gtksink', 'video-sink')
+ bin.add(video_sink)
+
+ video_sink_pad = video_sink.get_static_pad('sink')
+ ghost_pad = Gst.GhostPad.new(UI_VIDEO_SINK_PAD, video_sink_pad)
+ bin.add_pad(ghost_pad)
+
+ return bin
+
+
+class Core(object):
+ def __init__(self, ui):
+ self.ui = ui
+
+ def new_call(self, protocol):
+ return Call(self.ui.get_element(), protocol.get_element())
+
+
+class Protocol(object):
+ def get_element(self):
+ bin = Gst.Bin.new('protocol')
+
+ user_video = Gst.ElementFactory.make('fakesink', 'user-video-sink')
+ user_video.set_property("sync", True)
+ bin.add(user_video)
+
+ user_video_sink_pad = user_video.get_static_pad('sink')
+ ghost_pad = Gst.GhostPad.new(PROTOCOL_VIDEO_SINK_PAD, user_video_sink_pad)
+ bin.add_pad(ghost_pad)
+
+ user_video.set_state(Gst.State.PLAYING)
+
+ return bin
+
+
+def main():
+ loop = GLib.MainLoop()
+
+ Gst.Element.register(None, 'purpleuielement', Gst.Rank.NONE, UIElement)
+
+ core = Core(UI())
+
+ # User tells the UI that they want to start a call on protocol
+ protocol = Protocol()
+ call = core.new_call(protocol)
+ call.start()
+
+ loop.run()
+
+
+if __name__ == '__main__':
+ main()
+