mirror of
https://github.com/matthew1000/gstreamer-cheat-sheet.git
synced 2024-11-22 00:10:59 +00:00
More Python examples and more descriptions of inter-pipeline conections
This commit is contained in:
parent
dbd54c4033
commit
5065eecd11
7 changed files with 191 additions and 6 deletions
|
@ -12,9 +12,10 @@ A few Python examples are also [included](python_examples/) for when you need GS
|
||||||
* [Mixing video & audio](mixing.md)
|
* [Mixing video & audio](mixing.md)
|
||||||
* [Images](images.md)
|
* [Images](images.md)
|
||||||
* [Queues](queues.md)
|
* [Queues](queues.md)
|
||||||
|
* [Writing to files](writing_to_files.md)
|
||||||
* [Capturing images](capturing_images.md)
|
* [Capturing images](capturing_images.md)
|
||||||
* [Sending to multiple destinations (tee)](tee.md)
|
* [Sending to multiple destinations (tee)](tee.md)
|
||||||
* [Sharing and receiving pipelines, including sending/receiving video from shared memory](sharing_and_splitting_pipelines.md)
|
* [Sharing and receiving pipelines (including sending/receiving video from shared memory)](sharing_and_splitting_pipelines.md)
|
||||||
* [Network transfer](network_transfer.md) (including how to send so that VLC can preview)
|
* [Network transfer](network_transfer.md) (including how to send so that VLC can preview)
|
||||||
|
|
||||||
## Sources and references
|
## Sources and references
|
||||||
|
|
10
basics.md
10
basics.md
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
## Playing content
|
## Playing content
|
||||||
|
|
||||||
For these,set `SRC` to be e.g. an mp4 file., e.g.
|
These examples assume that bash variable `SRC` to be set to a video file (e.g. an mp4 file). You can do this by, e.g.
|
||||||
|
|
||||||
```
|
```
|
||||||
export SRC=/home/me/videos/test.mp4
|
export SRC=/home/me/videos/test.mp4
|
||||||
|
@ -130,7 +130,13 @@ gst-launch-1.0 -v uridecodebin uri="file://$AUDIO_SRC" ! autoaudiosink
|
||||||
gst-launch-1.0 -v filesrc location=$AUDIO_SRC ! mpegaudioparse ! decodebin ! autoaudiosink
|
gst-launch-1.0 -v filesrc location=$AUDIO_SRC ! mpegaudioparse ! decodebin ! autoaudiosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
### Play files back to back
|
### Play files back to back
|
||||||
|
|
||||||
See (https://coaxion.net/blog/2014/08/concatenate-multiple-streams-gaplessly-with-gstreamer/)[https://coaxion.net/blog/2014/08/concatenate-multiple-streams-gaplessly-with-gstreamer/]
|
See (https://coaxion.net/blog/2014/08/concatenate-multiple-streams-gaplessly-with-gstreamer/)[https://coaxion.net/blog/2014/08/concatenate-multiple-streams-gaplessly-with-gstreamer/]
|
||||||
|
|
||||||
|
|
||||||
|
### Jumping to a certain point in a video/audio (seek/rewind/restart)
|
||||||
|
|
||||||
|
As far as I know, this isn't possible on the command-line. But it is possible as code. Here is a simple Python example:
|
||||||
|
|
||||||
|
[/python_examples/seeking.py](/python_examples/seeking.py)
|
||||||
|
|
19
python_examples/gstinter_01_audiotestsrc.py
Normal file
19
python_examples/gstinter_01_audiotestsrc.py
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Shows how two pipelines can be connected, using interaudiosink/interaudiosrc.
|
||||||
|
# (Search and replace 'audio' with 'video' to get a video example.)
|
||||||
|
# It will output a test audio sound.
|
||||||
|
import gi
|
||||||
|
gi.require_version('Gst', '1.0')
|
||||||
|
from gi.repository import GObject, Gst
|
||||||
|
import os
|
||||||
|
|
||||||
|
Gst.init(None)
|
||||||
|
mainloop = GObject.MainLoop()
|
||||||
|
|
||||||
|
pipe1 = Gst.parse_launch("audiotestsrc is-live=1 ! interaudiosink name=psink")
|
||||||
|
pipe2 = Gst.parse_launch("interaudiosrc name=psrc ! autoaudiosink")
|
||||||
|
|
||||||
|
pipe1.set_state(Gst.State.PLAYING)
|
||||||
|
pipe2.set_state(Gst.State.PLAYING)
|
||||||
|
|
||||||
|
mainloop.run()
|
87
python_examples/gstinter_02_separate_seeking.py
Normal file
87
python_examples/gstinter_02_separate_seeking.py
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
'''
|
||||||
|
To run this, set the environment variables $SRC and $SRC2 to full paths to two mp4 files.
|
||||||
|
|
||||||
|
This has three pipelines:
|
||||||
|
|
||||||
|
- Pipeline 1 plays the file $SRC
|
||||||
|
- Pipeline 2 plays the file $SRC2
|
||||||
|
- Pipeline 3 displays them mixed
|
||||||
|
|
||||||
|
Pipeline-1 --\
|
||||||
|
---> Pipeline3
|
||||||
|
Pipeline 2 --/
|
||||||
|
|
||||||
|
This demo shows how, by splitting into pipelines, each soure can be seeked independently.
|
||||||
|
And if one fails (e.g. file not found), the other continues.
|
||||||
|
'''
|
||||||
|
|
||||||
|
import gi
|
||||||
|
gi.require_version('Gst', '1.0')
|
||||||
|
from gi.repository import GObject, Gst
|
||||||
|
import os
|
||||||
|
from time import sleep
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
|
Gst.init(None)
|
||||||
|
mainloop = GObject.MainLoop()
|
||||||
|
|
||||||
|
# We make the two pipelines
|
||||||
|
pipe1 = Gst.parse_launch("playbin uri=\"file://" + os.environ['SRC'] + "\"")
|
||||||
|
pipe2 = Gst.parse_launch("playbin uri=\"file://" + os.environ['SRC2'] + "\"")
|
||||||
|
|
||||||
|
# The third pipeline is more complex as it has to accept the other two, and mix.
|
||||||
|
pipe3 = Gst.parse_launch(
|
||||||
|
"intervideosrc name=video_src_1 ! videomix. " +
|
||||||
|
"interaudiosrc name=audio_src_1 ! autoaudiosink "
|
||||||
|
"intervideosrc name=video_src_2 ! videomix. " +
|
||||||
|
"interaudiosrc name=audio_src_2 ! autoaudiosink " +
|
||||||
|
"compositor name=videomix sink_1::xpos=800 sink_1::ypos=800 ! autovideosink "
|
||||||
|
)
|
||||||
|
|
||||||
|
# Because 'playbin' is a bin rather than element, the bit we want within it is 'playsink':
|
||||||
|
pipe1_playsink = pipe1.get_by_name('playsink')
|
||||||
|
pipe2_playsink = pipe2.get_by_name('playsink')
|
||||||
|
|
||||||
|
audio_src_1 = pipe3.get_by_name('audio_src_1')
|
||||||
|
video_src_1 = pipe3.get_by_name('video_src_1')
|
||||||
|
audio_src_2 = pipe3.get_by_name('audio_src_2')
|
||||||
|
video_src_2 = pipe3.get_by_name('video_src_2')
|
||||||
|
|
||||||
|
# Make the sinks for the first two pipelines:
|
||||||
|
video_sink_1 = Gst.ElementFactory.make("intervideosink", "video_sink_1")
|
||||||
|
audio_sink_1 = Gst.ElementFactory.make("interaudiosink", "audio_sink_1")
|
||||||
|
video_sink_2 = Gst.ElementFactory.make("intervideosink", "video_sink_2")
|
||||||
|
audio_sink_2 = Gst.ElementFactory.make("interaudiosink", "audio_sink_2")
|
||||||
|
pipe1_playsink.set_property('video-sink', video_sink_1)
|
||||||
|
pipe1_playsink.set_property('audio-sink', audio_sink_1)
|
||||||
|
pipe2_playsink.set_property('video-sink', video_sink_2)
|
||||||
|
pipe2_playsink.set_property('audio-sink', audio_sink_2)
|
||||||
|
|
||||||
|
# We use 'channel' to name the two different connections between
|
||||||
|
video_sink_1.set_property('channel', 'video-channel-1')
|
||||||
|
audio_sink_1.set_property('channel', 'audio-channel-1')
|
||||||
|
video_src_1.set_property('channel', 'video-channel-1')
|
||||||
|
audio_src_1.set_property('channel', 'audio-channel-1')
|
||||||
|
video_sink_2.set_property('channel', 'video-channel-2')
|
||||||
|
audio_sink_2.set_property('channel', 'audio-channel-2')
|
||||||
|
video_src_2.set_property('channel', 'video-channel-2')
|
||||||
|
audio_src_2.set_property('channel', 'audio-channel-2')
|
||||||
|
|
||||||
|
# Off we go!
|
||||||
|
pipe1.set_state(Gst.State.PLAYING)
|
||||||
|
pipe2.set_state(Gst.State.PLAYING)
|
||||||
|
pipe3.set_state(Gst.State.PLAYING)
|
||||||
|
|
||||||
|
# This bit allows the user to specitfy different offsets for each video
|
||||||
|
def separate_thread():
|
||||||
|
while True:
|
||||||
|
seconds = input("Enter the number of seconds to jump the FIRST video to (0=start): ")
|
||||||
|
pipe1.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, Gst.SECOND * int(seconds))
|
||||||
|
seconds = input("Enter the number of seconds to jump the SECOND video to (0=start): ")
|
||||||
|
pipe2.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, Gst.SECOND * int(seconds))
|
||||||
|
|
||||||
|
myThread = Thread(target=separate_thread, args=())
|
||||||
|
myThread.start()
|
||||||
|
|
||||||
|
mainloop.run()
|
25
python_examples/seeking.py
Normal file
25
python_examples/seeking.py
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Shows seeking in action.
|
||||||
|
|
||||||
|
import gi
|
||||||
|
gi.require_version('Gst', '1.0')
|
||||||
|
from gi.repository import GObject, Gst
|
||||||
|
import os
|
||||||
|
from time import sleep
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
|
Gst.init(None)
|
||||||
|
mainloop = GObject.MainLoop()
|
||||||
|
pipe = Gst.parse_launch("playbin uri=\"file://" + os.environ['SRC'] + "\"")
|
||||||
|
pipe.set_state(Gst.State.PLAYING)
|
||||||
|
|
||||||
|
def separate_thread():
|
||||||
|
while True:
|
||||||
|
seconds = input("Enter the number of seconds to jump to (0=start): ")
|
||||||
|
seek_success = pipe.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, Gst.SECOND * int(seconds))
|
||||||
|
print ('seek_success=' + str(seek_success))
|
||||||
|
|
||||||
|
myThread = Thread(target=separate_thread, args=())
|
||||||
|
myThread.start()
|
||||||
|
|
||||||
|
mainloop.run()
|
|
@ -26,7 +26,7 @@ _As with the rest of this site, this is a rough guide, and is probably not compl
|
||||||
| *appsrc/appsink* | Allows video data to leave/enter the pipeline from your own application | n/a | https://thiblahute.github.io/GStreamer-doc/app-1.0/index.html?gi-language=c |
|
| *appsrc/appsink* | Allows video data to leave/enter the pipeline from your own application | n/a | https://thiblahute.github.io/GStreamer-doc/app-1.0/index.html?gi-language=c |
|
||||||
| *fdsrc/fdsink* | Allows communication via a file descriptor | n/a | https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer-plugins/html/gstreamer-plugins-fdsrc.html |
|
| *fdsrc/fdsink* | Allows communication via a file descriptor | n/a | https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer-plugins/html/gstreamer-plugins-fdsrc.html |
|
||||||
| *interpipe* | Allows simple communication between two or more independent pipelines. Very powerful. | * Not part of GStreamer (though it is open-source... I'm not sure why it's not been included) | * Well-documented at https://developer.ridgerun.com/wiki/index.php?title=GstInterpipe<br>* https://gstreamer.freedesktop.org/data/events/gstreamer-conference/2015/Melissa%20Montero%20-%20GST%20Daemon%20and%20Interpipes:%20A%20simpler%20way%20to%20get%20your%20applications%20done%20.pdf |
|
| *interpipe* | Allows simple communication between two or more independent pipelines. Very powerful. | * Not part of GStreamer (though it is open-source... I'm not sure why it's not been included) | * Well-documented at https://developer.ridgerun.com/wiki/index.php?title=GstInterpipe<br>* https://gstreamer.freedesktop.org/data/events/gstreamer-conference/2015/Melissa%20Montero%20-%20GST%20Daemon%20and%20Interpipes:%20A%20simpler%20way%20to%20get%20your%20applications%20done%20.pdf |
|
||||||
| *inter* (intervideosink, etc) | Send/receive AV between two pipelines in the same process | Only support raw audio or video, and drop events and queries at the boundary (source: [Nirbheek's blog](http://blog.nirbheek.in/2018/02/decoupling-gstreamer-pipelines.html)) | * https://thiblahute.github.io/GStreamer-doc/inter-1.0/index.html?gi-language=c<br>* Pros and cons discussed here: http://gstreamer-devel.966125.n4.nabble.com/How-to-connect-intervideosink-and-intervideosrc-for-IPC-pipelines-td4684567.html |
|
| *inter* (intervideosink, etc) | Send/receive AV between two pipelines in the same process | Only support raw audio or video, and drop events and queries at the boundary (source: [Nirbheek's blog](http://blog.nirbheek.in/2018/02/decoupling-gstreamer-pipelines.html)) | See below |
|
||||||
| *ipcpipeline* | Allows communication between pipelines *in different processes*. | Arrived with GStreamer 1.14 (Spring 2018) | https://www.collabora.com/news-and-blog/blog/2017/11/17/ipcpipeline-splitting-a-gstreamer-pipeline-into-multiple-processes/ |
|
| *ipcpipeline* | Allows communication between pipelines *in different processes*. | Arrived with GStreamer 1.14 (Spring 2018) | https://www.collabora.com/news-and-blog/blog/2017/11/17/ipcpipeline-splitting-a-gstreamer-pipeline-into-multiple-processes/ |
|
||||||
| *gstproxy (proxysink and proxysrc)* | Send/receive AV between two pipelines in the same process. | Arrived with GStreamer 1.14 (Spring 2018) | See below |
|
| *gstproxy (proxysink and proxysrc)* | Send/receive AV between two pipelines in the same process. | Arrived with GStreamer 1.14 (Spring 2018) | See below |
|
||||||
|
|
||||||
|
@ -72,8 +72,9 @@ gst-launch-1.0 shmsrc socket-path=/tmp/tmpsock ! \
|
||||||
|
|
||||||
## gstproxy (proxysink and proxysrc)
|
## gstproxy (proxysink and proxysrc)
|
||||||
|
|
||||||
I've used `proxysink` and `proxysrc` to split large pipelines into smaller ones. That way, if a part fails, the rest can continue.
|
I've used `proxysink` and `proxysrc` to split larger pipelines into smaller ones. That way, if a part fails, the rest can continue.
|
||||||
|
|
||||||
|
Unlike _inter_ below, _proxy_ will keep timing in sync. This is great if it's what you want... but if you want pipelines to have their own timing, it might not be right for your needs..
|
||||||
|
|
||||||
### gstproxy documentation
|
### gstproxy documentation
|
||||||
|
|
||||||
|
@ -81,7 +82,6 @@ I've used `proxysink` and `proxysrc` to split large pipelines into smaller ones.
|
||||||
* Example code on proxysrc here: https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-bad-plugins/html/gst-plugins-bad-plugins-proxysrc.html
|
* Example code on proxysrc here: https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-bad-plugins/html/gst-plugins-bad-plugins-proxysrc.html
|
||||||
* Equivalent proxysink: https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-bad-plugins/html/gst-plugins-bad-plugins-proxysink.html
|
* Equivalent proxysink: https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-bad-plugins/html/gst-plugins-bad-plugins-proxysink.html
|
||||||
|
|
||||||
|
|
||||||
### gstproxy examples
|
### gstproxy examples
|
||||||
|
|
||||||
It's not possible to use them via the command-line, because you connect them by having the receiver (`proxysrc`) reference the sender (`proxysink`).
|
It's not possible to use them via the command-line, because you connect them by having the receiver (`proxysrc`) reference the sender (`proxysink`).
|
||||||
|
@ -104,3 +104,21 @@ A slightly more interesting example can be found at
|
||||||
* that `proxysink` can work with [`playbin`](https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-plugins/html/gst-plugins-base-plugins-playbin.html)
|
* that `proxysink` can work with [`playbin`](https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-plugins/html/gst-plugins-base-plugins-playbin.html)
|
||||||
* separate proxies for audio and video
|
* separate proxies for audio and video
|
||||||
* that when the video ends, the other pipelines continue.
|
* that when the video ends, the other pipelines continue.
|
||||||
|
|
||||||
|
## inter (intervideosink/intervideosrc and their audio & subtitle counterparts)
|
||||||
|
|
||||||
|
The 'inter' versions of 'proxy' are dumber. They don't attempt to sync timings. But this can be useful if you want pipelines to be more independent. (Pros and cons on this discussed [here](http://gstreamer-devel.966125.n4.nabble.com/How-to-connect-intervideosink-and-intervideosrc-for-IPC-pipelines-td4684567.html).)
|
||||||
|
|
||||||
|
* `interaudiosink` and `intervideosink` allow a pipeline to send audio/video to another pipeline.
|
||||||
|
* `interaudiosrc` and `intervideosrc` are the corresponding elements for receiving the audio/video.
|
||||||
|
* subtitle versions are available too.
|
||||||
|
|
||||||
|
They are documented here: https://thiblahute.github.io/GStreamer-doc/inter-1.0/index.html?gi-language=c<
|
||||||
|
|
||||||
|
An example can't be done via the command-line, but here is a simple example using Python:
|
||||||
|
|
||||||
|
[/python_examples/gstinter_01_audiotestsrc.py](/python_examples/gstinter_01_audiotestsrc.py)
|
||||||
|
|
||||||
|
Here's a more complex example, showing how two files can have separate seeking by being in different pipelines:
|
||||||
|
|
||||||
|
[/python_examples/gstinter_02_separate_seeking](/python_examples/gstinter_02_separate_seeking)
|
||||||
|
|
|
@ -1,7 +1,36 @@
|
||||||
|
# Writing to files (GStreamer command-line cheat sheet)
|
||||||
|
|
||||||
|
The [`filesink`](https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer-plugins/html/gstreamer-plugins-filesink.html) element allows writing to a file.
|
||||||
|
|
||||||
|
Note that, when using the command-line, the `-e` parameter ensures the output file is correctly completed on exit.
|
||||||
|
|
||||||
|
### Write to an mp4 file
|
||||||
|
|
||||||
|
This example creates a test video (animated ball moving, with clock), and writes it as an MP4 file.
|
||||||
|
Also added is an audio test source - a short beep every second.
|
||||||
|
|
||||||
|
Leave it running for a few seconds, and then CTRL-C to stop it.
|
||||||
|
|
||||||
|
```
|
||||||
|
gst-launch-1.0 -e videotestsrc pattern=ball ! \
|
||||||
|
video/x-raw,width=1280,height=720 ! \
|
||||||
|
timeoverlay font-desc="Sans, 48" ! \
|
||||||
|
x264enc ! mux. \
|
||||||
|
audiotestsrc is-live=true wave=ticks ! audioconvert ! audioresample ! faac bitrate=32000 ! mux. \
|
||||||
|
mp4mux name=mux ! filesink location=file.mp4
|
||||||
|
```
|
||||||
|
|
||||||
|
### Other examples
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
gst-launch-1.0 -e videotestsrc ! video/x-raw-yuv, framerate=25/1, width=640, height=360 ! x264enc ! \
|
gst-launch-1.0 -e videotestsrc ! video/x-raw-yuv, framerate=25/1, width=640, height=360 ! x264enc ! \
|
||||||
mpegtsmux ! filesink location=test.ts
|
mpegtsmux ! filesink location=test.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
gst-launch-1.0 -e videotestsrc !\
|
gst-launch-1.0 -e videotestsrc !\
|
||||||
x264enc !\
|
x264enc !\
|
||||||
mpegtsmux !\
|
mpegtsmux !\
|
||||||
filesink location=test.ts
|
filesink location=test.ts
|
||||||
|
```
|
||||||
|
|
Loading…
Reference in a new issue