Port examples/tutorials to the explicit Option parameter changes

This commit is contained in:
Sebastian Dröge 2019-04-15 18:17:42 +03:00
parent 8cd9b6c9fc
commit 8618085d46
10 changed files with 49 additions and 47 deletions

View file

@ -70,7 +70,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// provide the format we request.
// This can be set after linking the two objects, because format negotiation between
// both elements will happen during pre-rolling of the pipeline.
appsink.set_caps(&gst::Caps::new_simple(
appsink.set_caps(Some(&gst::Caps::new_simple(
"audio/x-raw",
&[
("format", &gst_audio::AUDIO_FORMAT_S16.to_string()),
@ -78,7 +78,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
("channels", &(1i32)),
("rate", &gst::IntRange::<i32>::new(1, i32::MAX)),
],
));
)));
// Getting data out of the appsink is done by setting callbacks on it.
// The appsink will then call those handlers, as soon as data is available.

View file

@ -73,7 +73,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
.build()
.expect("Failed to create video info");
appsrc.set_caps(&video_info.to_caps().unwrap());
appsrc.set_caps(Some(&video_info.to_caps().unwrap()));
appsrc.set_property_format(gst::Format::Time);
// Our frame counter, that is stored in the mutable environment

View file

@ -558,7 +558,7 @@ impl App {
.field("format", &gst_video::VideoFormat::Rgba.to_string())
.field("texture-target", &"2D")
.build();
appsink.set_caps(&caps);
appsink.set_caps(Some(&caps));
// get the glupload element to extract later the used context in it
let mut iter = sink.dynamic_cast::<gst::Bin>().unwrap().iterate_elements();

View file

@ -106,11 +106,11 @@ fn main_loop() -> Result<(), Error> {
);
}
auth.set_tls_certificate(&cert);
auth.set_tls_certificate(Some(&cert));
auth.add_basic(basic.as_str(), &token);
// Here, we tell the RTSP server about the authentication method we
// configured above.
server.set_auth(&auth);
server.set_auth(Some(&auth));
factory.set_launch(args[1].as_str());
// Tell the RTSP server that we want to work in RECORD mode (clients send)

View file

@ -9,13 +9,13 @@ fn tutorial_main() {
gst::init().unwrap();
// Create the elements
let source = gst::ElementFactory::make("videotestsrc", "source")
let source = gst::ElementFactory::make("videotestsrc", Some("source"))
.expect("Could not create source element.");
let sink =
gst::ElementFactory::make("autovideosink", "sink").expect("Could not create sink element");
let sink = gst::ElementFactory::make("autovideosink", Some("sink"))
.expect("Could not create sink element");
// Create the empty pipeline
let pipeline = gst::Pipeline::new("test-pipeline");
let pipeline = gst::Pipeline::new(Some("test-pipeline"));
// Build the pipeline
pipeline.add_many(&[&source, &sink]).unwrap();

View file

@ -9,15 +9,15 @@ fn tutorial_main() {
gst::init().unwrap();
// Create the elements
let source = gst::ElementFactory::make("uridecodebin", "source")
let source = gst::ElementFactory::make("uridecodebin", Some("source"))
.expect("Could not create uridecodebin element.");
let convert = gst::ElementFactory::make("audioconvert", "convert")
let convert = gst::ElementFactory::make("audioconvert", Some("convert"))
.expect("Could not create convert element.");
let sink =
gst::ElementFactory::make("autoaudiosink", "sink").expect("Could not create sink element.");
let sink = gst::ElementFactory::make("autoaudiosink", Some("sink"))
.expect("Could not create sink element.");
// Create the empty pipeline
let pipeline = gst::Pipeline::new("test-pipeline");
let pipeline = gst::Pipeline::new(Some("test-pipeline"));
// Build the pipeline Note that we are NOT linking the source at this
// point. We will do it later.

View file

@ -21,8 +21,8 @@ fn tutorial_main() {
gst::init().unwrap();
// Creat the playbin element
let playbin =
gst::ElementFactory::make("playbin", "playbin").expect("Failed to create playbin element");
let playbin = gst::ElementFactory::make("playbin", Some("playbin"))
.expect("Failed to create playbin element");
// Set the URI to play
let uri =

View file

@ -94,14 +94,14 @@ fn tutorial_main() {
// Ask the factories to instantiate actual elements
let source = source_factory
.create("source")
.create(Some("source"))
.expect("Failed to create source element");
let sink = sink_factory
.create("sink")
.create(Some("sink"))
.expect("Failed to create sink element");
// Create the empty pipeline
let pipeline = gst::Pipeline::new("test-pipeline");
let pipeline = gst::Pipeline::new(Some("test-pipeline"));
pipeline.add_many(&[&source, &sink]).unwrap();
source.link(&sink).expect("Elements could not be linked.");

View file

@ -11,18 +11,19 @@ fn tutorial_main() {
return;
}
let audio_source = gst::ElementFactory::make("audiotestsrc", "audio_source").unwrap();
let tee = gst::ElementFactory::make("tee", "tee").unwrap();
let audio_queue = gst::ElementFactory::make("queue", "audio_queue").unwrap();
let audio_convert = gst::ElementFactory::make("audioconvert", "audio_convert").unwrap();
let audio_resample = gst::ElementFactory::make("audioresample", "audio_resample").unwrap();
let audio_sink = gst::ElementFactory::make("autoaudiosink", "audio_sink").unwrap();
let video_queue = gst::ElementFactory::make("queue", "video_queue").unwrap();
let visual = gst::ElementFactory::make("wavescope", "visual").unwrap();
let video_convert = gst::ElementFactory::make("videoconvert", "video_convert").unwrap();
let video_sink = gst::ElementFactory::make("autovideosink", "video_sink").unwrap();
let audio_source = gst::ElementFactory::make("audiotestsrc", Some("audio_source")).unwrap();
let tee = gst::ElementFactory::make("tee", Some("tee")).unwrap();
let audio_queue = gst::ElementFactory::make("queue", Some("audio_queue")).unwrap();
let audio_convert = gst::ElementFactory::make("audioconvert", Some("audio_convert")).unwrap();
let audio_resample =
gst::ElementFactory::make("audioresample", Some("audio_resample")).unwrap();
let audio_sink = gst::ElementFactory::make("autoaudiosink", Some("audio_sink")).unwrap();
let video_queue = gst::ElementFactory::make("queue", Some("video_queue")).unwrap();
let visual = gst::ElementFactory::make("wavescope", Some("visual")).unwrap();
let video_convert = gst::ElementFactory::make("videoconvert", Some("video_convert")).unwrap();
let video_sink = gst::ElementFactory::make("autovideosink", Some("video_sink")).unwrap();
let pipeline = gst::Pipeline::new("test-pipeline");
let pipeline = gst::Pipeline::new(Some("test-pipeline"));
audio_source.set_property("freq", &215.0).unwrap();
visual.set_property_from_str("shader", "none");

View file

@ -52,21 +52,22 @@ fn main() {
return;
}
let appsrc = gst::ElementFactory::make("appsrc", "audio_source").unwrap();
let tee = gst::ElementFactory::make("tee", "tee").unwrap();
let audio_queue = gst::ElementFactory::make("queue", "audio_queue").unwrap();
let audio_convert1 = gst::ElementFactory::make("audioconvert", "audio_convert1").unwrap();
let audio_resample = gst::ElementFactory::make("audioresample", "audio_resample").unwrap();
let audio_sink = gst::ElementFactory::make("autoaudiosink", "audio_sink").unwrap();
let video_queue = gst::ElementFactory::make("queue", "video_queue").unwrap();
let audio_convert2 = gst::ElementFactory::make("audioconvert", "audio_convert2").unwrap();
let visual = gst::ElementFactory::make("wavescope", "visual").unwrap();
let video_convert = gst::ElementFactory::make("videoconvert", "video_convert").unwrap();
let video_sink = gst::ElementFactory::make("autovideosink", "video_sink").unwrap();
let app_queue = gst::ElementFactory::make("queue", "app_queue").unwrap();
let appsink = gst::ElementFactory::make("appsink", "app_sink").unwrap();
let appsrc = gst::ElementFactory::make("appsrc", Some("audio_source")).unwrap();
let tee = gst::ElementFactory::make("tee", Some("tee")).unwrap();
let audio_queue = gst::ElementFactory::make("queue", Some("audio_queue")).unwrap();
let audio_convert1 = gst::ElementFactory::make("audioconvert", Some("audio_convert1")).unwrap();
let audio_resample =
gst::ElementFactory::make("audioresample", Some("audio_resample")).unwrap();
let audio_sink = gst::ElementFactory::make("autoaudiosink", Some("audio_sink")).unwrap();
let video_queue = gst::ElementFactory::make("queue", Some("video_queue")).unwrap();
let audio_convert2 = gst::ElementFactory::make("audioconvert", Some("audio_convert2")).unwrap();
let visual = gst::ElementFactory::make("wavescope", Some("visual")).unwrap();
let video_convert = gst::ElementFactory::make("videoconvert", Some("video_convert")).unwrap();
let video_sink = gst::ElementFactory::make("autovideosink", Some("video_sink")).unwrap();
let app_queue = gst::ElementFactory::make("queue", Some("app_queue")).unwrap();
let appsink = gst::ElementFactory::make("appsink", Some("app_sink")).unwrap();
let pipeline = gst::Pipeline::new("test-pipeline");
let pipeline = gst::Pipeline::new(Some("test-pipeline"));
visual.set_property_from_str("shader", "none");
visual.set_property_from_str("style", "lines");
@ -130,7 +131,7 @@ fn main() {
let appsrc = appsrc
.dynamic_cast::<AppSrc>()
.expect("Source element is expected to be an appsrc!");
appsrc.set_caps(&audio_caps);
appsrc.set_caps(Some(&audio_caps));
appsrc.set_property_format(gst::Format::Time);
let appsink = appsink
@ -216,7 +217,7 @@ fn main() {
// configure appsink
appsink.set_emit_signals(true);
appsink.set_caps(&audio_caps);
appsink.set_caps(Some(&audio_caps));
let data_weak = Arc::downgrade(&data);
appsink.connect_new_sample(move |_| {