threadshare/*src: rework pause/flush_start/flush_stop

This commit fixes several issues with the `Ts*Src` elements.

The pause functions used cancel_task which breaks the Task loop at await
points. For some elements, this implies making sure no item is being lost.
Moreover, cancelling the Task also cancels downstream processing, which
makes it difficult to ensure elements can handle all cases.

This commit reimplements Task::pause which allows completing the running
loop iteration before pausing the loop.

See https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/277#note_439529

In the Paused state, incoming items were rejected by TsAppSrc and DataQueue.

See https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/277#note_438455

- FlushStart must engage items rejection and cancel the Task.
- FlushStop must purge the internal stream & accept items again.

If the task was cancelled, `push_prelude` could set `need_initial_events`
to `true` when the events weren't actually pushed yet.

TsAppSrc used to renew its internal channel which could cause Buffer loss
when transitionning Playing -> Paused -> Playing.

See https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/issues/98
This commit is contained in:
François Laignel 2020-03-19 19:34:51 +01:00 committed by Sebastian Dröge
parent 3c82f7d82e
commit 116cf9bd3c
13 changed files with 1412 additions and 343 deletions

View file

@ -35,8 +35,8 @@ use lazy_static::lazy_static;
use rand; use rand;
use std::convert::TryInto; use std::convert::TryInto;
use std::sync::Arc;
use std::sync::Mutex as StdMutex; use std::sync::Mutex as StdMutex;
use std::sync::{self, Arc};
use std::u32; use std::u32;
use crate::runtime::prelude::*; use crate::runtime::prelude::*;
@ -138,48 +138,112 @@ enum StreamItem {
#[derive(Debug)] #[derive(Debug)]
struct AppSrcPadHandlerState { struct AppSrcPadHandlerState {
need_initial_events: bool, need_initial_events: bool,
need_segment: bool,
caps: Option<gst::Caps>, caps: Option<gst::Caps>,
configured_caps: Option<gst::Caps>,
} }
impl Default for AppSrcPadHandlerState { impl Default for AppSrcPadHandlerState {
fn default() -> Self { fn default() -> Self {
AppSrcPadHandlerState { AppSrcPadHandlerState {
need_initial_events: true, need_initial_events: true,
need_segment: true,
caps: None, caps: None,
configured_caps: None,
} }
} }
} }
#[derive(Debug, Default)] #[derive(Debug)]
struct AppSrcPadHandlerInner { struct AppSrcPadHandlerInner {
state: sync::RwLock<AppSrcPadHandlerState>, state: FutMutex<AppSrcPadHandlerState>,
configured_caps: StdMutex<Option<gst::Caps>>,
receiver: FutMutex<mpsc::Receiver<StreamItem>>,
} }
#[derive(Clone, Debug, Default)] impl AppSrcPadHandlerInner {
fn new(receiver: mpsc::Receiver<StreamItem>, caps: Option<gst::Caps>) -> Self {
AppSrcPadHandlerInner {
state: FutMutex::new(AppSrcPadHandlerState {
caps,
..Default::default()
}),
configured_caps: StdMutex::new(None),
receiver: FutMutex::new(receiver),
}
}
}
#[derive(Clone, Debug)]
struct AppSrcPadHandler(Arc<AppSrcPadHandlerInner>); struct AppSrcPadHandler(Arc<AppSrcPadHandlerInner>);
impl AppSrcPadHandler { impl AppSrcPadHandler {
fn start_task( fn new(receiver: mpsc::Receiver<StreamItem>, caps: Option<gst::Caps>) -> AppSrcPadHandler {
&self, AppSrcPadHandler(Arc::new(AppSrcPadHandlerInner::new(receiver, caps)))
pad: PadSrcRef<'_>, }
element: &gst::Element,
receiver: mpsc::Receiver<StreamItem>, fn reset(&self, pad: &PadSrcRef<'_>) {
) { // Precondition: task must be stopped
// TODO: assert the task state when Task & PadSrc are separated
gst_debug!(CAT, obj: pad.gst_pad(), "Resetting handler");
*self.0.state.try_lock().expect("State locked elsewhere") = Default::default();
*self.0.configured_caps.lock().unwrap() = None;
self.flush(pad);
gst_debug!(CAT, obj: pad.gst_pad(), "Handler reset");
}
fn flush(&self, pad: &PadSrcRef<'_>) {
// Precondition: task must be stopped
// TODO: assert the task state when Task & PadSrc are separated
gst_debug!(CAT, obj: pad.gst_pad(), "Flushing");
// Purge the channel
let mut receiver = self
.0
.receiver
.try_lock()
.expect("Channel receiver is locked elsewhere");
loop {
match receiver.try_next() {
Ok(Some(_item)) => {
gst_log!(CAT, obj: pad.gst_pad(), "Dropping pending item");
}
Err(_) => {
gst_log!(CAT, obj: pad.gst_pad(), "No more pending item");
break;
}
Ok(None) => {
panic!("Channel sender dropped");
}
}
}
self.0
.state
.try_lock()
.expect("state is locked elsewhere")
.need_segment = true;
gst_debug!(CAT, obj: pad.gst_pad(), "Flushed");
}
fn start_task(&self, pad: PadSrcRef<'_>, element: &gst::Element) {
let this = self.clone(); let this = self.clone();
let pad_weak = pad.downgrade(); let pad_weak = pad.downgrade();
let element = element.clone(); let element = element.clone();
let receiver = Arc::new(FutMutex::new(receiver));
pad.start_task(move || { pad.start_task(move || {
let this = this.clone(); let this = this.clone();
let pad_weak = pad_weak.clone(); let pad_weak = pad_weak.clone();
let element = element.clone(); let element = element.clone();
let receiver = Arc::clone(&receiver);
async move { async move {
let item = receiver.lock().await.next().await; let item = this.0.receiver.lock().await.next().await;
let pad = pad_weak.upgrade().expect("PadSrc no longer exists"); let pad = pad_weak.upgrade().expect("PadSrc no longer exists");
let item = match item { let item = match item {
Some(item) => item, Some(item) => item,
None => { None => {
@ -219,37 +283,31 @@ impl AppSrcPadHandler {
} }
async fn push_prelude(&self, pad: &PadSrcRef<'_>, _element: &gst::Element) { async fn push_prelude(&self, pad: &PadSrcRef<'_>, _element: &gst::Element) {
let mut events = Vec::new(); let mut state = self.0.state.lock().await;
if state.need_initial_events {
// Only `read` the state in the hot path
if self.0.state.read().unwrap().need_initial_events {
// We will need to `write` and we also want to prevent
// any changes on the state while we are handling initial events
let mut state = self.0.state.write().unwrap();
assert!(state.need_initial_events);
gst_debug!(CAT, obj: pad.gst_pad(), "Pushing initial events"); gst_debug!(CAT, obj: pad.gst_pad(), "Pushing initial events");
let stream_id = format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>()); let stream_id = format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
events.push( let stream_start_evt = gst::Event::new_stream_start(&stream_id)
gst::Event::new_stream_start(&stream_id) .group_id(gst::GroupId::next())
.group_id(gst::GroupId::next()) .build();
.build(), pad.push_event(stream_start_evt).await;
);
if let Some(ref caps) = state.caps { if let Some(ref caps) = state.caps {
events.push(gst::Event::new_caps(&caps).build()); let caps_evt = gst::Event::new_caps(&caps).build();
state.configured_caps = Some(caps.clone()); pad.push_event(caps_evt).await;
*self.0.configured_caps.lock().unwrap() = Some(caps.clone());
} }
events.push(
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build(),
);
state.need_initial_events = false; state.need_initial_events = false;
} }
for event in events { if state.need_segment {
pad.push_event(event).await; let segment_evt =
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build();
pad.push_event(segment_evt).await;
state.need_segment = false;
} }
} }
@ -259,6 +317,8 @@ impl AppSrcPadHandler {
element: &gst::Element, element: &gst::Element,
item: StreamItem, item: StreamItem,
) -> Result<gst::FlowSuccess, gst::FlowError> { ) -> Result<gst::FlowSuccess, gst::FlowError> {
gst_log!(CAT, obj: pad.gst_pad(), "Handling {:?}", item);
self.push_prelude(pad, element).await; self.push_prelude(pad, element).await;
match item { match item {
@ -291,13 +351,11 @@ impl PadSrcHandler for AppSrcPadHandler {
let ret = match event.view() { let ret = match event.view() {
EventView::FlushStart(..) => { EventView::FlushStart(..) => {
let _ = appsrc.pause(element); appsrc.flush_start(element);
true true
} }
EventView::FlushStop(..) => { EventView::FlushStop(..) => {
appsrc.flush_stop(element); appsrc.flush_stop(element);
true true
} }
EventView::Reconfigure(..) => true, EventView::Reconfigure(..) => true,
@ -335,8 +393,7 @@ impl PadSrcHandler for AppSrcPadHandler {
true true
} }
QueryView::Caps(ref mut q) => { QueryView::Caps(ref mut q) => {
let state = self.0.state.read().unwrap(); let caps = if let Some(caps) = self.0.configured_caps.lock().unwrap().as_ref() {
let caps = if let Some(ref caps) = state.configured_caps {
q.get_filter() q.get_filter()
.map(|f| f.intersect_with_mode(caps, gst::CapsIntersectMode::First)) .map(|f| f.intersect_with_mode(caps, gst::CapsIntersectMode::First))
.unwrap_or_else(|| caps.clone()) .unwrap_or_else(|| caps.clone())
@ -362,20 +419,29 @@ impl PadSrcHandler for AppSrcPadHandler {
} }
} }
#[derive(Debug, Eq, PartialEq)]
enum AppSrcState {
Paused,
RejectBuffers,
Started,
}
#[derive(Debug)]
struct AppSrc { struct AppSrc {
src_pad: PadSrc, src_pad: PadSrc,
src_pad_handler: AppSrcPadHandler, src_pad_handler: StdMutex<Option<AppSrcPadHandler>>,
state: StdMutex<AppSrcState>,
sender: StdMutex<Option<mpsc::Sender<StreamItem>>>, sender: StdMutex<Option<mpsc::Sender<StreamItem>>>,
settings: StdMutex<Settings>, settings: StdMutex<Settings>,
} }
impl AppSrc { impl AppSrc {
fn push_buffer(&self, element: &gst::Element, mut buffer: gst::Buffer) -> bool { fn push_buffer(&self, element: &gst::Element, mut buffer: gst::Buffer) -> bool {
let mut sender = self.sender.lock().unwrap(); let state = self.state.lock().unwrap();
let sender = match sender.as_mut() { if *state == AppSrcState::RejectBuffers {
Some(sender) => sender, gst_debug!(CAT, obj: element, "Rejecting buffer due to element state");
None => return false, return false;
}; }
let do_timestamp = self.settings.lock().unwrap().do_timestamp; let do_timestamp = self.settings.lock().unwrap().do_timestamp;
if do_timestamp { if do_timestamp {
@ -392,7 +458,14 @@ impl AppSrc {
} }
} }
match sender.try_send(StreamItem::Buffer(buffer)) { match self
.sender
.lock()
.unwrap()
.as_mut()
.unwrap()
.try_send(StreamItem::Buffer(buffer))
{
Ok(_) => true, Ok(_) => true,
Err(err) => { Err(err) => {
gst_error!(CAT, obj: element, "Failed to queue buffer: {}", err); gst_error!(CAT, obj: element, "Failed to queue buffer: {}", err);
@ -407,6 +480,7 @@ impl AppSrc {
Some(sender) => sender, Some(sender) => sender,
None => return false, None => return false,
}; };
let eos = StreamItem::Event(gst::Event::new_eos().build()); let eos = StreamItem::Event(gst::Event::new_eos().build());
match sender.try_send(eos) { match sender.try_send(eos) {
Ok(_) => true, Ok(_) => true,
@ -421,8 +495,6 @@ impl AppSrc {
let settings = self.settings.lock().unwrap(); let settings = self.settings.lock().unwrap();
gst_debug!(CAT, obj: element, "Preparing"); gst_debug!(CAT, obj: element, "Preparing");
self.src_pad_handler.0.state.write().unwrap().caps = settings.caps.clone();
let context = let context =
Context::acquire(&settings.context, settings.context_wait).map_err(|err| { Context::acquire(&settings.context, settings.context_wait).map_err(|err| {
gst_error_msg!( gst_error_msg!(
@ -431,8 +503,14 @@ impl AppSrc {
) )
})?; })?;
let max_buffers = settings.max_buffers.try_into().unwrap();
let (sender, receiver) = mpsc::channel(max_buffers);
*self.sender.lock().unwrap() = Some(sender);
let src_pad_handler = AppSrcPadHandler::new(receiver, settings.caps.clone());
self.src_pad self.src_pad
.prepare(context, &self.src_pad_handler) .prepare(context, &src_pad_handler)
.map_err(|err| { .map_err(|err| {
gst_error_msg!( gst_error_msg!(
gst::ResourceError::OpenRead, gst::ResourceError::OpenRead,
@ -440,6 +518,8 @@ impl AppSrc {
) )
})?; })?;
*self.src_pad_handler.lock().unwrap() = Some(src_pad_handler);
gst_debug!(CAT, obj: element, "Prepared"); gst_debug!(CAT, obj: element, "Prepared");
Ok(()) Ok(())
@ -449,8 +529,9 @@ impl AppSrc {
gst_debug!(CAT, obj: element, "Unpreparing"); gst_debug!(CAT, obj: element, "Unpreparing");
let _ = self.src_pad.unprepare(); let _ = self.src_pad.unprepare();
*self.src_pad_handler.lock().unwrap() = None;
*self.src_pad_handler.0.state.write().unwrap() = Default::default(); *self.sender.lock().unwrap() = None;
gst_debug!(CAT, obj: element, "Unprepared"); gst_debug!(CAT, obj: element, "Unprepared");
@ -460,16 +541,18 @@ impl AppSrc {
fn stop(&self, element: &gst::Element) -> Result<(), ()> { fn stop(&self, element: &gst::Element) -> Result<(), ()> {
gst_debug!(CAT, obj: element, "Stopping"); gst_debug!(CAT, obj: element, "Stopping");
*self.state.lock().unwrap() = AppSrcState::RejectBuffers;
// Now stop the task if it was still running, blocking // Now stop the task if it was still running, blocking
// until this has actually happened // until this has actually happened
self.src_pad.stop_task(); self.src_pad.stop_task();
self.src_pad_handler self.src_pad_handler
.0 .lock()
.state
.write()
.unwrap() .unwrap()
.need_initial_events = true; .as_ref()
.unwrap()
.reset(&self.src_pad.as_ref());
gst_debug!(CAT, obj: element, "Stopped"); gst_debug!(CAT, obj: element, "Stopped");
@ -477,15 +560,15 @@ impl AppSrc {
} }
fn start(&self, element: &gst::Element) -> Result<(), ()> { fn start(&self, element: &gst::Element) -> Result<(), ()> {
let mut sender = self.sender.lock().unwrap(); let mut state = self.state.lock().unwrap();
if sender.is_some() { if *state == AppSrcState::Started {
gst_debug!(CAT, obj: element, "Already started"); gst_debug!(CAT, obj: element, "Already started");
return Ok(()); return Ok(());
} }
gst_debug!(CAT, obj: element, "Starting"); gst_debug!(CAT, obj: element, "Starting");
self.start_unchecked(element, &mut sender); self.start_unchecked(element, &mut state);
gst_debug!(CAT, obj: element, "Started"); gst_debug!(CAT, obj: element, "Started");
@ -493,12 +576,10 @@ impl AppSrc {
} }
fn flush_stop(&self, element: &gst::Element) { fn flush_stop(&self, element: &gst::Element) {
// Keep the lock on the `sender` until `flush_stop` is complete // Keep the lock on the `state` until `flush_stop` is complete
// so as to prevent race conditions due to concurrent state transitions. // so as to prevent race conditions due to concurrent state transitions.
// Note that this won't deadlock as `sender` is not used let mut state = self.state.lock().unwrap();
// within the `src_pad`'s `Task`. if *state == AppSrcState::Started {
let mut sender = self.sender.lock().unwrap();
if sender.is_some() {
gst_debug!(CAT, obj: element, "Already started"); gst_debug!(CAT, obj: element, "Already started");
return; return;
} }
@ -506,38 +587,52 @@ impl AppSrc {
gst_debug!(CAT, obj: element, "Stopping Flush"); gst_debug!(CAT, obj: element, "Stopping Flush");
self.src_pad.stop_task(); self.src_pad.stop_task();
self.start_unchecked(element, &mut sender);
gst_debug!(CAT, obj: element, "Stopped Flush");
}
fn start_unchecked(
&self,
element: &gst::Element,
sender: &mut Option<mpsc::Sender<StreamItem>>,
) {
let max_buffers = self
.settings
.lock()
.unwrap()
.max_buffers
.try_into()
.unwrap();
let (new_sender, receiver) = mpsc::channel(max_buffers);
*sender = Some(new_sender);
self.src_pad_handler self.src_pad_handler
.start_task(self.src_pad.as_ref(), element, receiver); .lock()
.unwrap()
.as_ref()
.unwrap()
.flush(&self.src_pad.as_ref());
self.start_unchecked(element, &mut state);
gst_debug!(CAT, obj: element, "Flush Stopped");
}
fn start_unchecked(&self, element: &gst::Element, state: &mut AppSrcState) {
self.src_pad_handler
.lock()
.unwrap()
.as_ref()
.unwrap()
.start_task(self.src_pad.as_ref(), element);
*state = AppSrcState::Started;
}
fn flush_start(&self, element: &gst::Element) {
// Keep the lock on the `state` until `flush_start` is complete
// so as to prevent race conditions due to concurrent state transitions.
let mut state = self.state.lock().unwrap();
gst_debug!(CAT, obj: element, "Starting Flush");
*state = AppSrcState::RejectBuffers;
self.src_pad.cancel_task();
gst_debug!(CAT, obj: element, "Flush Started");
} }
fn pause(&self, element: &gst::Element) -> Result<(), ()> { fn pause(&self, element: &gst::Element) -> Result<(), ()> {
let mut sender = self.sender.lock().unwrap(); // Lock the state to prevent race condition due to concurrent FlushStop
let mut state = self.state.lock().unwrap();
gst_debug!(CAT, obj: element, "Pausing"); gst_debug!(CAT, obj: element, "Pausing");
self.src_pad.cancel_task(); self.src_pad.pause_task();
// Prevent subsequent items from being enqueued *state = AppSrcState::Paused;
*sender = None;
gst_debug!(CAT, obj: element, "Paused"); gst_debug!(CAT, obj: element, "Paused");
@ -616,7 +711,8 @@ impl ObjectSubclass for AppSrc {
Self { Self {
src_pad, src_pad,
src_pad_handler: AppSrcPadHandler::default(), src_pad_handler: StdMutex::new(None),
state: StdMutex::new(AppSrcState::RejectBuffers),
sender: StdMutex::new(None), sender: StdMutex::new(None),
settings: StdMutex::new(Settings::default()), settings: StdMutex::new(Settings::default()),
} }

View file

@ -185,7 +185,7 @@ impl DataQueue {
pub fn push(&self, item: DataQueueItem) -> Result<(), DataQueueItem> { pub fn push(&self, item: DataQueueItem) -> Result<(), DataQueueItem> {
let mut inner = self.0.lock().unwrap(); let mut inner = self.0.lock().unwrap();
if inner.state != DataQueueState::Started { if inner.state == DataQueueState::Stopped {
gst_debug!( gst_debug!(
DATA_QUEUE_CAT, DATA_QUEUE_CAT,
obj: &inner.element, obj: &inner.element,

View file

@ -838,7 +838,7 @@ impl ProxySrcPadHandler {
let item = match item { let item = match item {
Some(item) => item, Some(item) => item,
None => { None => {
gst_log!(SRC_CAT, obj: pad.gst_pad(), "DataQueue Stopped"); gst_log!(SRC_CAT, obj: pad.gst_pad(), "DataQueue Stopped or Paused");
return glib::Continue(false); return glib::Continue(false);
} }
}; };
@ -1109,6 +1109,7 @@ impl ProxySrc {
self.src_pad.stop_task(); self.src_pad.stop_task();
let dataqueue = dataqueue.as_ref().unwrap(); let dataqueue = dataqueue.as_ref().unwrap();
dataqueue.clear();
dataqueue.stop(); dataqueue.stop();
gst_debug!(SRC_CAT, obj: element, "Stopped"); gst_debug!(SRC_CAT, obj: element, "Stopped");
@ -1171,11 +1172,9 @@ impl ProxySrc {
let dataqueue = self.dataqueue.lock().unwrap(); let dataqueue = self.dataqueue.lock().unwrap();
gst_debug!(SRC_CAT, obj: element, "Pausing"); gst_debug!(SRC_CAT, obj: element, "Pausing");
self.src_pad.cancel_task(); dataqueue.as_ref().unwrap().pause();
let dataqueue = dataqueue.as_ref().unwrap(); self.src_pad.pause_task();
dataqueue.pause();
dataqueue.clear();
gst_debug!(SRC_CAT, obj: element, "Paused"); gst_debug!(SRC_CAT, obj: element, "Paused");

View file

@ -157,6 +157,12 @@ pub fn block_on<Fut: Future>(future: Fut) -> Fut::Output {
}) })
} }
/// Yields execution back to the runtime
#[inline]
pub async fn yield_now() {
tokio::task::yield_now().await;
}
struct ContextThread { struct ContextThread {
name: String, name: String,
} }

View file

@ -343,6 +343,11 @@ impl<'a> PadSrcRef<'a> {
self.strong.start_task(func); self.strong.start_task(func);
} }
/// Pauses the `Started` `Pad` `Task`.
pub fn pause_task(&self) {
self.strong.pause_task();
}
/// Cancels the `Started` `Pad` `Task`. /// Cancels the `Started` `Pad` `Task`.
pub fn cancel_task(&self) { pub fn cancel_task(&self) {
self.strong.cancel_task(); self.strong.cancel_task();
@ -473,6 +478,11 @@ impl PadSrcStrong {
self.0.task.start(func); self.0.task.start(func);
} }
#[inline]
fn pause_task(&self) {
self.0.task.pause();
}
#[inline] #[inline]
fn cancel_task(&self) { fn cancel_task(&self) {
self.0.task.cancel(); self.0.task.cancel();
@ -722,6 +732,10 @@ impl PadSrc {
self.0.start_task(func); self.0.start_task(func);
} }
pub fn pause_task(&self) {
self.0.pause_task();
}
pub fn cancel_task(&self) { pub fn cancel_task(&self) {
self.0.cancel_task(); self.0.cancel_task();
} }

View file

@ -1,4 +1,4 @@
// Copyright (C) 2019 François Laignel <fengalin@free.fr> // Copyright (C) 2019-2020 François Laignel <fengalin@free.fr>
// Copyright (C) 2020 Sebastian Dröge <sebastian@centricular.com> // Copyright (C) 2020 Sebastian Dröge <sebastian@centricular.com>
// //
// This library is free software; you can redistribute it and/or // This library is free software; you can redistribute it and/or
@ -18,18 +18,29 @@
//! An execution loop to run asynchronous processing. //! An execution loop to run asynchronous processing.
use futures::channel::oneshot;
use futures::future::{abortable, AbortHandle, Aborted}; use futures::future::{abortable, AbortHandle, Aborted};
use futures::prelude::*; use futures::prelude::*;
use gst::TaskState; use gst::{gst_debug, gst_error, gst_log, gst_trace, gst_warning};
use gst::{gst_debug, gst_log, gst_trace, gst_warning};
use std::fmt; use std::fmt;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use super::executor::block_on; use super::executor::{block_on, yield_now};
use super::{Context, JoinHandle, RUNTIME_CAT}; use super::{Context, JoinHandle, RUNTIME_CAT};
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy)]
pub enum TaskState {
Cancelled,
Started,
Stopped,
Paused,
Pausing,
Preparing,
Unprepared,
}
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
pub enum TaskError { pub enum TaskError {
ActiveTask, ActiveTask,
@ -53,27 +64,27 @@ struct TaskInner {
prepare_abort_handle: Option<AbortHandle>, prepare_abort_handle: Option<AbortHandle>,
abort_handle: Option<AbortHandle>, abort_handle: Option<AbortHandle>,
loop_handle: Option<JoinHandle<Result<(), Aborted>>>, loop_handle: Option<JoinHandle<Result<(), Aborted>>>,
resume_sender: Option<oneshot::Sender<()>>,
} }
impl Default for TaskInner { impl Default for TaskInner {
fn default() -> Self { fn default() -> Self {
TaskInner { TaskInner {
context: None, context: None,
state: TaskState::Stopped, state: TaskState::Unprepared,
prepare_handle: None, prepare_handle: None,
prepare_abort_handle: None, prepare_abort_handle: None,
abort_handle: None, abort_handle: None,
loop_handle: None, loop_handle: None,
resume_sender: None,
} }
} }
} }
impl Drop for TaskInner { impl Drop for TaskInner {
fn drop(&mut self) { fn drop(&mut self) {
// Check invariant which can't be held automatically in `Task` if self.state != TaskState::Unprepared {
// because `drop` can't be `async` panic!("Missing call to `Task::unprepared`");
if self.state != TaskState::Stopped {
panic!("Missing call to `Task::stop`");
} }
} }
} }
@ -103,7 +114,7 @@ impl Task {
gst_debug!(RUNTIME_CAT, "Preparing task"); gst_debug!(RUNTIME_CAT, "Preparing task");
let mut inner = self.0.lock().unwrap(); let mut inner = self.0.lock().unwrap();
if inner.state != TaskState::Stopped { if inner.state != TaskState::Unprepared {
return Err(TaskError::ActiveTask); return Err(TaskError::ActiveTask);
} }
@ -138,6 +149,7 @@ impl Task {
inner.context = Some(context); inner.context = Some(context);
inner.state = TaskState::Preparing;
gst_debug!(RUNTIME_CAT, "Task prepared"); gst_debug!(RUNTIME_CAT, "Task prepared");
Ok(()) Ok(())
@ -147,7 +159,7 @@ impl Task {
gst_debug!(RUNTIME_CAT, "Preparing task"); gst_debug!(RUNTIME_CAT, "Preparing task");
let mut inner = self.0.lock().unwrap(); let mut inner = self.0.lock().unwrap();
if inner.state != TaskState::Stopped { if inner.state != TaskState::Unprepared {
return Err(TaskError::ActiveTask); return Err(TaskError::ActiveTask);
} }
@ -156,19 +168,25 @@ impl Task {
inner.context = Some(context); inner.context = Some(context);
inner.state = TaskState::Stopped;
gst_debug!(RUNTIME_CAT, "Task prepared"); gst_debug!(RUNTIME_CAT, "Task prepared");
Ok(()) Ok(())
} }
pub fn unprepare(&self) -> Result<(), TaskError> { pub fn unprepare(&self) -> Result<(), TaskError> {
gst_debug!(RUNTIME_CAT, "Unpreparing task");
let mut inner = self.0.lock().unwrap(); let mut inner = self.0.lock().unwrap();
if inner.state != TaskState::Stopped { if inner.state != TaskState::Stopped {
gst_error!(
RUNTIME_CAT,
"Attempt to Unprepare a task in state {:?}",
inner.state
);
return Err(TaskError::ActiveTask); return Err(TaskError::ActiveTask);
} }
gst_debug!(RUNTIME_CAT, "Unpreparing task");
// Abort any pending preparation // Abort any pending preparation
if let Some(abort_handle) = inner.prepare_abort_handle.take() { if let Some(abort_handle) = inner.prepare_abort_handle.take() {
abort_handle.abort(); abort_handle.abort();
@ -176,6 +194,9 @@ impl Task {
let prepare_handle = inner.prepare_handle.take(); let prepare_handle = inner.prepare_handle.take();
let context = inner.context.take().unwrap(); let context = inner.context.take().unwrap();
inner.state = TaskState::Unprepared;
drop(inner); drop(inner);
if let Some(prepare_handle) = prepare_handle { if let Some(prepare_handle) = prepare_handle {
@ -251,8 +272,27 @@ impl Task {
gst_log!(RUNTIME_CAT, "Task already Started"); gst_log!(RUNTIME_CAT, "Task already Started");
return; return;
} }
TaskState::Paused | TaskState::Stopped => (), TaskState::Pausing => {
other => unreachable!("Unexpected Task state {:?}", other), gst_debug!(RUNTIME_CAT, "Re-starting a Pausing task");
assert!(inner.resume_sender.is_none());
inner.state = TaskState::Started;
return;
}
TaskState::Paused => {
inner
.resume_sender
.take()
.expect("Task Paused but the resume_sender is already taken")
.send(())
.expect("Task Paused but the resume_receiver was dropped");
gst_log!(RUNTIME_CAT, "Resume requested");
return;
}
TaskState::Stopped | TaskState::Cancelled | TaskState::Preparing => (),
TaskState::Unprepared => panic!("Attempt to start an unprepared Task"),
} }
gst_debug!(RUNTIME_CAT, "Starting Task"); gst_debug!(RUNTIME_CAT, "Starting Task");
@ -280,21 +320,50 @@ impl Task {
let res = prepare_handle.await; let res = prepare_handle.await;
if res.is_err() { if res.is_err() {
gst_warning!(RUNTIME_CAT, "Preparing failed"); gst_warning!(RUNTIME_CAT, "Preparing failed");
inner_clone.lock().unwrap().state = TaskState::Unprepared;
return; return;
} }
inner_clone.lock().unwrap().state = TaskState::Stopped;
} }
gst_trace!(RUNTIME_CAT, "Starting task loop"); gst_trace!(RUNTIME_CAT, "Starting task loop");
// Then loop as long as we're actually running // Then loop as long as we're actually running
loop { loop {
match inner_clone.lock().unwrap().state { let mut resume_receiver = {
TaskState::Started => (), let mut inner = inner_clone.lock().unwrap();
TaskState::Paused | TaskState::Stopped => { match inner.state {
gst_trace!(RUNTIME_CAT, "Stopping task loop"); TaskState::Started => None,
break; TaskState::Pausing => {
let (sender, receiver) = oneshot::channel();
inner.resume_sender = Some(sender);
inner.state = TaskState::Paused;
Some(receiver)
}
TaskState::Stopped | TaskState::Cancelled => {
gst_trace!(RUNTIME_CAT, "Stopping task loop");
break;
}
TaskState::Paused => {
unreachable!("The Paused state is controlled by the loop");
}
other => {
unreachable!("Task loop iteration in state {:?}", other);
}
} }
other => unreachable!("Unexpected Task state {:?}", other), };
if let Some(resume_receiver) = resume_receiver.take() {
gst_trace!(RUNTIME_CAT, "Task loop paused");
let _ = resume_receiver.await;
gst_trace!(RUNTIME_CAT, "Resuming task loop");
inner_clone.lock().unwrap().state = TaskState::Started;
} }
if func().await == glib::Continue(false) { if func().await == glib::Continue(false) {
@ -309,12 +378,15 @@ impl Task {
.map(|h| h.task_id() == task_id) .map(|h| h.task_id() == task_id)
.unwrap_or(false) .unwrap_or(false)
{ {
gst_trace!(RUNTIME_CAT, "Pausing task loop"); gst_trace!(RUNTIME_CAT, "Exiting task loop");
inner.state = TaskState::Paused; inner.state = TaskState::Cancelled;
} }
break; break;
} }
// Make sure the loop can be aborted even if `func` never goes `Pending`.
yield_now().await;
} }
// Once the loop function is finished we can forget the corresponding // Once the loop function is finished we can forget the corresponding
@ -332,6 +404,7 @@ impl Task {
{ {
inner.abort_handle = None; inner.abort_handle = None;
inner.loop_handle = None; inner.loop_handle = None;
inner.state = TaskState::Stopped;
} }
} }
@ -351,11 +424,29 @@ impl Task {
gst_debug!(RUNTIME_CAT, "Task Started"); gst_debug!(RUNTIME_CAT, "Task Started");
} }
/// Requests the `Task` loop to pause.
///
/// If an iteration is in progress, it will run to completion,
/// then no more iteration will be executed before `start` is called again.
pub fn pause(&self) {
let mut inner = self.0.lock().unwrap();
if inner.state != TaskState::Started {
gst_log!(RUNTIME_CAT, "Task not started");
return;
}
inner.state = TaskState::Pausing;
gst_debug!(RUNTIME_CAT, "Pause requested");
}
/// Cancels the `Task` so that it stops running as soon as possible. /// Cancels the `Task` so that it stops running as soon as possible.
pub fn cancel(&self) { pub fn cancel(&self) {
let mut inner = self.0.lock().unwrap(); let mut inner = self.0.lock().unwrap();
if inner.state != TaskState::Started { if inner.state != TaskState::Started
gst_log!(RUNTIME_CAT, "Task already paused or stopped"); && inner.state != TaskState::Paused
&& inner.state != TaskState::Pausing
{
gst_log!(RUNTIME_CAT, "Task not Started nor Paused");
return; return;
} }
@ -366,14 +457,16 @@ impl Task {
abort_handle.abort(); abort_handle.abort();
} }
inner.state = TaskState::Paused; inner.resume_sender = None;
inner.state = TaskState::Cancelled;
} }
/// Stops the `Started` `Task` and wait for it to finish. /// Stops the `Started` `Task` and wait for it to finish.
pub fn stop(&self) { pub fn stop(&self) {
let mut inner = self.0.lock().unwrap(); let mut inner = self.0.lock().unwrap();
if inner.state == TaskState::Stopped { if inner.state == TaskState::Stopped || inner.state == TaskState::Preparing {
gst_log!(RUNTIME_CAT, "Task already stopped"); gst_log!(RUNTIME_CAT, "Task loop already stopped");
return; return;
} }
@ -388,6 +481,9 @@ impl Task {
// And now wait for it to actually stop // And now wait for it to actually stop
let loop_handle = inner.loop_handle.take(); let loop_handle = inner.loop_handle.take();
inner.resume_sender = None;
let context = inner.context.as_ref().unwrap().clone(); let context = inner.context.as_ref().unwrap().clone();
drop(inner); drop(inner);
@ -550,4 +646,74 @@ mod tests {
task.unprepare().unwrap(); task.unprepare().unwrap();
gst_debug!(RUNTIME_CAT, "task test: unprepared"); gst_debug!(RUNTIME_CAT, "task test: unprepared");
} }
#[tokio::test]
async fn pause_start() {
use gst::gst_error;
gst::init().unwrap();
let context = Context::acquire("task_pause_start", 2).unwrap();
let task = Task::default();
task.prepare(context).unwrap();
let (iter_sender, mut iter_receiver) = mpsc::channel(0);
let iter_sender = Arc::new(Mutex::new(iter_sender));
let (mut complete_sender, complete_receiver) = mpsc::channel(0);
let complete_receiver = Arc::new(Mutex::new(complete_receiver));
gst_debug!(RUNTIME_CAT, "task_pause_start: starting");
task.start(move || {
let iter_sender = Arc::clone(&iter_sender);
let complete_receiver = Arc::clone(&complete_receiver);
async move {
gst_debug!(RUNTIME_CAT, "task_pause_start: entering iteration");
iter_sender.lock().await.send(()).await.unwrap();
gst_debug!(
RUNTIME_CAT,
"task_pause_start: iteration awaiting completion"
);
complete_receiver.lock().await.next().await.unwrap();
gst_debug!(RUNTIME_CAT, "task_pause_start: iteration complete");
glib::Continue(true)
}
});
gst_debug!(RUNTIME_CAT, "task_pause_start: awaiting 1st iteration");
iter_receiver.next().await.unwrap();
task.pause();
gst_debug!(
RUNTIME_CAT,
"task_pause_start: sending 1st iteration completion"
);
complete_sender.send(()).await.unwrap();
// Loop held on
iter_receiver.try_next().unwrap_err();
task.start(|| {
gst_error!(
RUNTIME_CAT,
"task_pause_start: reached start to resume closure"
);
future::pending()
});
gst_debug!(RUNTIME_CAT, "task_pause_start: awaiting 2d iteration");
iter_receiver.next().await.unwrap();
gst_debug!(
RUNTIME_CAT,
"task_pause_start: sending 2d iteration completion"
);
complete_sender.send(()).await.unwrap();
task.stop();
task.unprepare().unwrap();
}
} }

View file

@ -246,7 +246,7 @@ impl<T: SocketRead + 'static> SocketStream<T> {
let (read_fut, clock, base_time) = { let (read_fut, clock, base_time) = {
let mut inner = self.socket.0.lock().unwrap(); let mut inner = self.socket.0.lock().unwrap();
if inner.state != SocketState::Started { if inner.state != SocketState::Started {
gst_debug!(SOCKET_CAT, obj: &inner.element, "DataQueue is not Started"); gst_debug!(SOCKET_CAT, obj: &inner.element, "Socket is not Started");
return None; return None;
} }

View file

@ -37,8 +37,8 @@ use rand;
use std::io; use std::io;
use std::net::{IpAddr, SocketAddr}; use std::net::{IpAddr, SocketAddr};
use std::sync::Arc;
use std::sync::Mutex as StdMutex; use std::sync::Mutex as StdMutex;
use std::sync::{self, Arc};
use std::u16; use std::u16;
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
@ -177,29 +177,73 @@ impl SocketRead for TcpClientReader {
#[derive(Debug)] #[derive(Debug)]
struct TcpClientSrcPadHandlerState { struct TcpClientSrcPadHandlerState {
need_initial_events: bool, need_initial_events: bool,
need_segment: bool,
caps: Option<gst::Caps>, caps: Option<gst::Caps>,
configured_caps: Option<gst::Caps>,
} }
impl Default for TcpClientSrcPadHandlerState { impl Default for TcpClientSrcPadHandlerState {
fn default() -> Self { fn default() -> Self {
TcpClientSrcPadHandlerState { TcpClientSrcPadHandlerState {
need_initial_events: true, need_initial_events: true,
need_segment: true,
caps: None, caps: None,
configured_caps: None,
} }
} }
} }
#[derive(Debug, Default)] #[derive(Debug)]
struct TcpClientSrcPadHandlerInner { struct TcpClientSrcPadHandlerInner {
state: sync::RwLock<TcpClientSrcPadHandlerState>, state: FutMutex<TcpClientSrcPadHandlerState>,
configured_caps: StdMutex<Option<gst::Caps>>,
} }
#[derive(Clone, Debug, Default)] impl TcpClientSrcPadHandlerInner {
fn new(caps: Option<gst::Caps>) -> Self {
TcpClientSrcPadHandlerInner {
state: FutMutex::new(TcpClientSrcPadHandlerState {
caps,
..Default::default()
}),
configured_caps: StdMutex::new(None),
}
}
}
#[derive(Clone, Debug)]
struct TcpClientSrcPadHandler(Arc<TcpClientSrcPadHandlerInner>); struct TcpClientSrcPadHandler(Arc<TcpClientSrcPadHandlerInner>);
impl TcpClientSrcPadHandler { impl TcpClientSrcPadHandler {
fn new(caps: Option<gst::Caps>) -> Self {
TcpClientSrcPadHandler(Arc::new(TcpClientSrcPadHandlerInner::new(caps)))
}
fn reset(&self, pad: &PadSrcRef<'_>) {
// Precondition: task must be stopped
// TODO: assert the task state when Task & PadSrc are separated
gst_debug!(CAT, obj: pad.gst_pad(), "Resetting handler");
*self.0.state.try_lock().expect("State locked elsewhere") = Default::default();
*self.0.configured_caps.lock().unwrap() = None;
gst_debug!(CAT, obj: pad.gst_pad(), "Handler reset");
}
fn flush(&self, pad: &PadSrcRef<'_>) {
// Precondition: task must be stopped
// TODO: assert the task state when Task & PadSrc are separated
gst_debug!(CAT, obj: pad.gst_pad(), "Flushing");
self.0
.state
.try_lock()
.expect("state is locked elsewhere")
.need_segment = true;
gst_debug!(CAT, obj: pad.gst_pad(), "Flushed");
}
fn start_task( fn start_task(
&self, &self,
pad: PadSrcRef<'_>, pad: PadSrcRef<'_>,
@ -284,37 +328,31 @@ impl TcpClientSrcPadHandler {
} }
async fn push_prelude(&self, pad: &PadSrcRef<'_>, _element: &gst::Element) { async fn push_prelude(&self, pad: &PadSrcRef<'_>, _element: &gst::Element) {
let mut events = Vec::new(); let mut state = self.0.state.lock().await;
if state.need_initial_events {
// Only `read` the state in the hot path
if self.0.state.read().unwrap().need_initial_events {
// We will need to `write` and we also want to prevent
// any changes on the state while we are handling initial events
let mut state = self.0.state.write().unwrap();
assert!(state.need_initial_events);
gst_debug!(CAT, obj: pad.gst_pad(), "Pushing initial events"); gst_debug!(CAT, obj: pad.gst_pad(), "Pushing initial events");
let stream_id = format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>()); let stream_id = format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
events.push( let stream_start_evt = gst::Event::new_stream_start(&stream_id)
gst::Event::new_stream_start(&stream_id) .group_id(gst::GroupId::next())
.group_id(gst::GroupId::next()) .build();
.build(), pad.push_event(stream_start_evt).await;
);
if let Some(ref caps) = state.caps { if let Some(ref caps) = state.caps {
events.push(gst::Event::new_caps(&caps).build()); let caps_evt = gst::Event::new_caps(&caps).build();
state.configured_caps = Some(caps.clone()); pad.push_event(caps_evt).await;
*self.0.configured_caps.lock().unwrap() = Some(caps.clone());
} }
events.push(
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build(),
);
state.need_initial_events = false; state.need_initial_events = false;
} }
for event in events { if state.need_segment {
pad.push_event(event).await; let segment_evt =
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build();
pad.push_event(segment_evt).await;
state.need_segment = false;
} }
} }
@ -324,6 +362,8 @@ impl TcpClientSrcPadHandler {
element: &gst::Element, element: &gst::Element,
buffer: gst::Buffer, buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> { ) -> Result<gst::FlowSuccess, gst::FlowError> {
gst_log!(CAT, obj: pad.gst_pad(), "Handling {:?}", buffer);
self.push_prelude(pad, element).await; self.push_prelude(pad, element).await;
if buffer.get_size() == 0 { if buffer.get_size() == 0 {
@ -352,7 +392,7 @@ impl PadSrcHandler for TcpClientSrcPadHandler {
let ret = match event.view() { let ret = match event.view() {
EventView::FlushStart(..) => { EventView::FlushStart(..) => {
tcpclientsrc.pause(element).unwrap(); tcpclientsrc.flush_start(element);
true true
} }
@ -396,8 +436,7 @@ impl PadSrcHandler for TcpClientSrcPadHandler {
true true
} }
QueryView::Caps(ref mut q) => { QueryView::Caps(ref mut q) => {
let state = self.0.state.read().unwrap(); let caps = if let Some(caps) = self.0.configured_caps.lock().unwrap().as_ref() {
let caps = if let Some(ref caps) = state.configured_caps {
q.get_filter() q.get_filter()
.map(|f| f.intersect_with_mode(caps, gst::CapsIntersectMode::First)) .map(|f| f.intersect_with_mode(caps, gst::CapsIntersectMode::First))
.unwrap_or_else(|| caps.clone()) .unwrap_or_else(|| caps.clone())
@ -426,7 +465,7 @@ impl PadSrcHandler for TcpClientSrcPadHandler {
struct TcpClientSrc { struct TcpClientSrc {
src_pad: PadSrc, src_pad: PadSrc,
src_pad_handler: TcpClientSrcPadHandler, src_pad_handler: StdMutex<Option<TcpClientSrcPadHandler>>,
socket: StdMutex<Option<Socket<TcpClientReader>>>, socket: StdMutex<Option<Socket<TcpClientReader>>>,
settings: StdMutex<Settings>, settings: StdMutex<Settings>,
} }
@ -499,16 +538,13 @@ impl TcpClientSrc {
) )
})?; })?;
{
let mut src_pad_handler_state = self.src_pad_handler.0.state.write().unwrap();
src_pad_handler_state.caps = settings.caps;
}
*socket_storage = Some(socket); *socket_storage = Some(socket);
drop(socket_storage); drop(socket_storage);
let src_pad_handler = TcpClientSrcPadHandler::new(settings.caps);
self.src_pad self.src_pad
.prepare(context, &self.src_pad_handler) .prepare(context, &src_pad_handler)
.map_err(|err| { .map_err(|err| {
gst_error_msg!( gst_error_msg!(
gst::ResourceError::OpenRead, gst::ResourceError::OpenRead,
@ -516,6 +552,8 @@ impl TcpClientSrc {
) )
})?; })?;
*self.src_pad_handler.lock().unwrap() = Some(src_pad_handler);
gst_debug!(CAT, obj: element, "Prepared"); gst_debug!(CAT, obj: element, "Prepared");
Ok(()) Ok(())
@ -529,8 +567,7 @@ impl TcpClientSrc {
} }
let _ = self.src_pad.unprepare(); let _ = self.src_pad.unprepare();
*self.src_pad_handler.lock().unwrap() = None;
*self.src_pad_handler.0.state.write().unwrap() = Default::default();
gst_debug!(CAT, obj: element, "Unprepared"); gst_debug!(CAT, obj: element, "Unprepared");
@ -545,11 +582,11 @@ impl TcpClientSrc {
self.src_pad.stop_task(); self.src_pad.stop_task();
self.src_pad_handler self.src_pad_handler
.0 .lock()
.state
.write()
.unwrap() .unwrap()
.need_initial_events = true; .as_ref()
.unwrap()
.reset(&self.src_pad.as_ref());
gst_debug!(CAT, obj: element, "Stopped"); gst_debug!(CAT, obj: element, "Stopped");
@ -591,6 +628,14 @@ impl TcpClientSrc {
gst_debug!(CAT, obj: element, "Stopping Flush"); gst_debug!(CAT, obj: element, "Stopping Flush");
self.src_pad.stop_task(); self.src_pad.stop_task();
self.src_pad_handler
.lock()
.unwrap()
.as_ref()
.unwrap()
.flush(&self.src_pad.as_ref());
self.start_unchecked(element, socket); self.start_unchecked(element, socket);
gst_debug!(CAT, obj: element, "Stopped Flush"); gst_debug!(CAT, obj: element, "Stopped Flush");
@ -602,9 +647,26 @@ impl TcpClientSrc {
.unwrap(); .unwrap();
self.src_pad_handler self.src_pad_handler
.lock()
.unwrap()
.as_ref()
.unwrap()
.start_task(self.src_pad.as_ref(), element, socket_stream); .start_task(self.src_pad.as_ref(), element, socket_stream);
} }
fn flush_start(&self, element: &gst::Element) {
let socket = self.socket.lock().unwrap();
gst_debug!(CAT, obj: element, "Starting Flush");
if let Some(socket) = socket.as_ref() {
socket.pause();
}
self.src_pad.cancel_task();
gst_debug!(CAT, obj: element, "Flush Started");
}
fn pause(&self, element: &gst::Element) -> Result<(), ()> { fn pause(&self, element: &gst::Element) -> Result<(), ()> {
let socket = self.socket.lock().unwrap(); let socket = self.socket.lock().unwrap();
gst_debug!(CAT, obj: element, "Pausing"); gst_debug!(CAT, obj: element, "Pausing");
@ -613,7 +675,7 @@ impl TcpClientSrc {
socket.pause(); socket.pause();
} }
self.src_pad.cancel_task(); self.src_pad.pause_task();
gst_debug!(CAT, obj: element, "Paused"); gst_debug!(CAT, obj: element, "Paused");
@ -656,7 +718,7 @@ impl ObjectSubclass for TcpClientSrc {
Self { Self {
src_pad, src_pad,
src_pad_handler: TcpClientSrcPadHandler::default(), src_pad_handler: StdMutex::new(None),
socket: StdMutex::new(None), socket: StdMutex::new(None),
settings: StdMutex::new(Settings::default()), settings: StdMutex::new(Settings::default()),
} }

View file

@ -649,6 +649,12 @@ impl UdpSinkPadHandler {
} }
} }
fn unprepare(&self) {
if let Some(task) = &self.0.read().unwrap().task {
task.unprepare().unwrap();
}
}
fn stop_task(&self) { fn stop_task(&self) {
if let Some(task) = &self.0.read().unwrap().task { if let Some(task) = &self.0.read().unwrap().task {
task.stop(); task.stop();
@ -959,6 +965,7 @@ impl UdpSink {
fn unprepare(&self, element: &gst::Element) -> Result<(), ()> { fn unprepare(&self, element: &gst::Element) -> Result<(), ()> {
gst_debug!(CAT, obj: element, "Unpreparing"); gst_debug!(CAT, obj: element, "Unpreparing");
self.sink_pad_handler.unprepare();
self.sink_pad.unprepare(); self.sink_pad.unprepare();
gst_debug!(CAT, obj: element, "Unprepared"); gst_debug!(CAT, obj: element, "Unprepared");

View file

@ -39,8 +39,8 @@ use rand;
use std::io; use std::io;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr}; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};
use std::sync::Arc;
use std::sync::Mutex as StdMutex; use std::sync::Mutex as StdMutex;
use std::sync::{self, Arc};
use std::u16; use std::u16;
use crate::runtime::prelude::*; use crate::runtime::prelude::*;
@ -230,8 +230,8 @@ impl SocketRead for UdpReader {
struct UdpSrcPadHandlerState { struct UdpSrcPadHandlerState {
retrieve_sender_address: bool, retrieve_sender_address: bool,
need_initial_events: bool, need_initial_events: bool,
need_segment: bool,
caps: Option<gst::Caps>, caps: Option<gst::Caps>,
configured_caps: Option<gst::Caps>,
} }
impl Default for UdpSrcPadHandlerState { impl Default for UdpSrcPadHandlerState {
@ -239,21 +239,69 @@ impl Default for UdpSrcPadHandlerState {
UdpSrcPadHandlerState { UdpSrcPadHandlerState {
retrieve_sender_address: true, retrieve_sender_address: true,
need_initial_events: true, need_initial_events: true,
need_segment: true,
caps: None, caps: None,
configured_caps: None,
} }
} }
} }
#[derive(Debug, Default)] #[derive(Debug)]
struct UdpSrcPadHandlerInner { struct UdpSrcPadHandlerInner {
state: sync::RwLock<UdpSrcPadHandlerState>, state: FutMutex<UdpSrcPadHandlerState>,
configured_caps: StdMutex<Option<gst::Caps>>,
} }
#[derive(Clone, Debug, Default)] impl UdpSrcPadHandlerInner {
fn new(caps: Option<gst::Caps>, retrieve_sender_address: bool) -> Self {
UdpSrcPadHandlerInner {
state: FutMutex::new(UdpSrcPadHandlerState {
retrieve_sender_address,
caps,
..Default::default()
}),
configured_caps: StdMutex::new(None),
}
}
}
#[derive(Clone, Debug)]
struct UdpSrcPadHandler(Arc<UdpSrcPadHandlerInner>); struct UdpSrcPadHandler(Arc<UdpSrcPadHandlerInner>);
impl UdpSrcPadHandler { impl UdpSrcPadHandler {
fn new(caps: Option<gst::Caps>, retrieve_sender_address: bool) -> UdpSrcPadHandler {
UdpSrcPadHandler(Arc::new(UdpSrcPadHandlerInner::new(
caps,
retrieve_sender_address,
)))
}
fn reset(&self, pad: &PadSrcRef<'_>) {
// Precondition: task must be stopped
// TODO: assert the task state when Task & PadSrc are separated
gst_debug!(CAT, obj: pad.gst_pad(), "Resetting handler");
*self.0.state.try_lock().expect("State locked elsewhere") = Default::default();
*self.0.configured_caps.lock().unwrap() = None;
gst_debug!(CAT, obj: pad.gst_pad(), "Handler reset");
}
fn flush(&self, pad: &PadSrcRef<'_>) {
// Precondition: task must be stopped
// TODO: assert the task state when Task & PadSrc are separated
gst_debug!(CAT, obj: pad.gst_pad(), "Flushing");
self.0
.state
.try_lock()
.expect("state is locked elsewhere")
.need_segment = true;
gst_debug!(CAT, obj: pad.gst_pad(), "Flushed");
}
fn start_task( fn start_task(
&self, &self,
pad: PadSrcRef<'_>, pad: PadSrcRef<'_>,
@ -306,7 +354,7 @@ impl UdpSrcPadHandler {
}; };
if let Some(saddr) = saddr { if let Some(saddr) = saddr {
if this.0.state.read().unwrap().retrieve_sender_address { if this.0.state.lock().await.retrieve_sender_address {
let inet_addr = match saddr.ip() { let inet_addr = match saddr.ip() {
IpAddr::V4(ip) => gio::InetAddress::new_from_bytes( IpAddr::V4(ip) => gio::InetAddress::new_from_bytes(
gio::InetAddressBytes::V4(&ip.octets()), gio::InetAddressBytes::V4(&ip.octets()),
@ -354,37 +402,31 @@ impl UdpSrcPadHandler {
} }
async fn push_prelude(&self, pad: &PadSrcRef<'_>, _element: &gst::Element) { async fn push_prelude(&self, pad: &PadSrcRef<'_>, _element: &gst::Element) {
let mut events = Vec::new(); let mut state = self.0.state.lock().await;
if state.need_initial_events {
// Only `read` the state in the hot path
if self.0.state.read().unwrap().need_initial_events {
// We will need to `write` and we also want to prevent
// any changes on the state while we are handling initial events
let mut state = self.0.state.write().unwrap();
assert!(state.need_initial_events);
gst_debug!(CAT, obj: pad.gst_pad(), "Pushing initial events"); gst_debug!(CAT, obj: pad.gst_pad(), "Pushing initial events");
let stream_id = format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>()); let stream_id = format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
events.push( let stream_start_evt = gst::Event::new_stream_start(&stream_id)
gst::Event::new_stream_start(&stream_id) .group_id(gst::GroupId::next())
.group_id(gst::GroupId::next()) .build();
.build(), pad.push_event(stream_start_evt).await;
);
if let Some(ref caps) = state.caps { if let Some(ref caps) = state.caps {
events.push(gst::Event::new_caps(&caps).build()); let caps_evt = gst::Event::new_caps(&caps).build();
state.configured_caps = Some(caps.clone()); pad.push_event(caps_evt).await;
*self.0.configured_caps.lock().unwrap() = Some(caps.clone());
} }
events.push(
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build(),
);
state.need_initial_events = false; state.need_initial_events = false;
} }
for event in events { if state.need_segment {
pad.push_event(event).await; let segment_evt =
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build();
pad.push_event(segment_evt).await;
state.need_segment = false;
} }
} }
@ -394,6 +436,8 @@ impl UdpSrcPadHandler {
element: &gst::Element, element: &gst::Element,
buffer: gst::Buffer, buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> { ) -> Result<gst::FlowSuccess, gst::FlowError> {
gst_log!(CAT, obj: pad.gst_pad(), "Handling {:?}", buffer);
self.push_prelude(pad, element).await; self.push_prelude(pad, element).await;
pad.push(buffer).await pad.push(buffer).await
@ -416,7 +460,7 @@ impl PadSrcHandler for UdpSrcPadHandler {
let ret = match event.view() { let ret = match event.view() {
EventView::FlushStart(..) => { EventView::FlushStart(..) => {
udpsrc.pause(element).unwrap(); udpsrc.flush_start(element);
true true
} }
@ -461,8 +505,7 @@ impl PadSrcHandler for UdpSrcPadHandler {
true true
} }
QueryView::Caps(ref mut q) => { QueryView::Caps(ref mut q) => {
let state = self.0.state.read().unwrap(); let caps = if let Some(caps) = self.0.configured_caps.lock().unwrap().as_ref() {
let caps = if let Some(ref caps) = state.configured_caps {
q.get_filter() q.get_filter()
.map(|f| f.intersect_with_mode(caps, gst::CapsIntersectMode::First)) .map(|f| f.intersect_with_mode(caps, gst::CapsIntersectMode::First))
.unwrap_or_else(|| caps.clone()) .unwrap_or_else(|| caps.clone())
@ -491,7 +534,7 @@ impl PadSrcHandler for UdpSrcPadHandler {
struct UdpSrc { struct UdpSrc {
src_pad: PadSrc, src_pad: PadSrc,
src_pad_handler: UdpSrcPadHandler, src_pad_handler: StdMutex<Option<UdpSrcPadHandler>>,
socket: StdMutex<Option<Socket<UdpReader>>>, socket: StdMutex<Option<Socket<UdpReader>>>,
settings: StdMutex<Settings>, settings: StdMutex<Settings>,
} }
@ -686,19 +729,16 @@ impl UdpSrc {
) )
})?; })?;
{
let mut src_pad_handler_state = self.src_pad_handler.0.state.write().unwrap();
src_pad_handler_state.retrieve_sender_address = settings.retrieve_sender_address;
src_pad_handler_state.caps = settings.caps;
}
*socket_storage = Some(socket); *socket_storage = Some(socket);
drop(socket_storage); drop(socket_storage);
element.notify("used-socket"); element.notify("used-socket");
let src_pad_handler =
UdpSrcPadHandler::new(settings.caps, settings.retrieve_sender_address);
self.src_pad self.src_pad
.prepare(context, &self.src_pad_handler) .prepare(context, &src_pad_handler)
.map_err(|err| { .map_err(|err| {
gst_error_msg!( gst_error_msg!(
gst::ResourceError::OpenRead, gst::ResourceError::OpenRead,
@ -706,6 +746,8 @@ impl UdpSrc {
) )
})?; })?;
*self.src_pad_handler.lock().unwrap() = Some(src_pad_handler);
gst_debug!(CAT, obj: element, "Prepared"); gst_debug!(CAT, obj: element, "Prepared");
Ok(()) Ok(())
@ -722,8 +764,7 @@ impl UdpSrc {
} }
let _ = self.src_pad.unprepare(); let _ = self.src_pad.unprepare();
*self.src_pad_handler.lock().unwrap() = None;
*self.src_pad_handler.0.state.write().unwrap() = Default::default();
gst_debug!(CAT, obj: element, "Unprepared"); gst_debug!(CAT, obj: element, "Unprepared");
@ -738,11 +779,11 @@ impl UdpSrc {
self.src_pad.stop_task(); self.src_pad.stop_task();
self.src_pad_handler self.src_pad_handler
.0 .lock()
.state
.write()
.unwrap() .unwrap()
.need_initial_events = true; .as_ref()
.unwrap()
.reset(&self.src_pad.as_ref());
gst_debug!(CAT, obj: element, "Stopped"); gst_debug!(CAT, obj: element, "Stopped");
@ -784,6 +825,14 @@ impl UdpSrc {
gst_debug!(CAT, obj: element, "Stopping Flush"); gst_debug!(CAT, obj: element, "Stopping Flush");
self.src_pad.stop_task(); self.src_pad.stop_task();
self.src_pad_handler
.lock()
.unwrap()
.as_ref()
.unwrap()
.flush(&self.src_pad.as_ref());
self.start_unchecked(element, socket); self.start_unchecked(element, socket);
gst_debug!(CAT, obj: element, "Stopped Flush"); gst_debug!(CAT, obj: element, "Stopped Flush");
@ -795,9 +844,26 @@ impl UdpSrc {
.unwrap(); .unwrap();
self.src_pad_handler self.src_pad_handler
.lock()
.unwrap()
.as_ref()
.unwrap()
.start_task(self.src_pad.as_ref(), element, socket_stream); .start_task(self.src_pad.as_ref(), element, socket_stream);
} }
fn flush_start(&self, element: &gst::Element) {
let socket = self.socket.lock().unwrap();
gst_debug!(CAT, obj: element, "Starting Flush");
if let Some(socket) = socket.as_ref() {
socket.pause();
}
self.src_pad.cancel_task();
gst_debug!(CAT, obj: element, "Flush Started");
}
fn pause(&self, element: &gst::Element) -> Result<(), ()> { fn pause(&self, element: &gst::Element) -> Result<(), ()> {
let socket = self.socket.lock().unwrap(); let socket = self.socket.lock().unwrap();
gst_debug!(CAT, obj: element, "Pausing"); gst_debug!(CAT, obj: element, "Pausing");
@ -806,7 +872,7 @@ impl UdpSrc {
socket.pause(); socket.pause();
} }
self.src_pad.cancel_task(); self.src_pad.pause_task();
gst_debug!(CAT, obj: element, "Paused"); gst_debug!(CAT, obj: element, "Paused");
@ -865,7 +931,7 @@ impl ObjectSubclass for UdpSrc {
Self { Self {
src_pad, src_pad,
src_pad_handler: UdpSrcPadHandler::default(), src_pad_handler: StdMutex::new(None),
socket: StdMutex::new(None), socket: StdMutex::new(None),
settings: StdMutex::new(Settings::default()), settings: StdMutex::new(Settings::default()),
} }

View file

@ -18,6 +18,8 @@
use glib::prelude::*; use glib::prelude::*;
use gst; use gst;
use gst::prelude::*;
use gst_check; use gst_check;
use gstthreadshare; use gstthreadshare;
@ -33,7 +35,7 @@ fn init() {
} }
#[test] #[test]
fn test_push() { fn push() {
init(); init();
let mut h = gst_check::Harness::new("ts-appsrc"); let mut h = gst_check::Harness::new("ts-appsrc");
@ -43,7 +45,7 @@ fn test_push() {
let appsrc = h.get_element().unwrap(); let appsrc = h.get_element().unwrap();
appsrc.set_property("caps", &caps).unwrap(); appsrc.set_property("caps", &caps).unwrap();
appsrc.set_property("do-timestamp", &true).unwrap(); appsrc.set_property("do-timestamp", &true).unwrap();
appsrc.set_property("context", &"test-push").unwrap(); appsrc.set_property("context", &"appsrc-push").unwrap();
} }
h.play(); h.play();
@ -97,3 +99,205 @@ fn test_push() {
} }
assert!(n_events >= 2); assert!(n_events >= 2);
} }
#[test]
fn pause() {
init();
let mut h = gst_check::Harness::new("ts-appsrc");
let caps = gst::Caps::new_simple("foo/bar", &[]);
{
let appsrc = h.get_element().unwrap();
appsrc.set_property("caps", &caps).unwrap();
appsrc.set_property("do-timestamp", &true).unwrap();
appsrc.set_property("context", &"appsrc-pause").unwrap();
}
h.play();
let appsrc = h.get_element().unwrap();
// Initial buffer
assert!(appsrc
.emit("push-buffer", &[&gst::Buffer::from_slice(vec![1, 2, 3, 4])])
.unwrap()
.unwrap()
.get_some::<bool>()
.unwrap());
let _ = h.pull().unwrap();
appsrc
.change_state(gst::StateChange::PlayingToPaused)
.unwrap();
// Pre-pause buffer
assert!(appsrc
.emit("push-buffer", &[&gst::Buffer::from_slice(vec![5, 6, 7])])
.unwrap()
.unwrap()
.get_some::<bool>()
.unwrap());
appsrc
.change_state(gst::StateChange::PlayingToPaused)
.unwrap();
// Buffer is queued during Paused
assert!(appsrc
.emit("push-buffer", &[&gst::Buffer::from_slice(vec![8, 9])])
.unwrap()
.unwrap()
.get_some::<bool>()
.unwrap());
appsrc
.change_state(gst::StateChange::PausedToPlaying)
.unwrap();
// Pull Pre-pause buffer
let _ = h.pull().unwrap();
// Pull buffer queued while Paused
let _ = h.pull().unwrap();
// Can push again
assert!(appsrc
.emit("push-buffer", &[&gst::Buffer::new()])
.unwrap()
.unwrap()
.get_some::<bool>()
.unwrap());
let _ = h.pull().unwrap();
assert!(h.try_pull().is_none());
}
#[test]
fn flush() {
init();
let mut h = gst_check::Harness::new("ts-appsrc");
let caps = gst::Caps::new_simple("foo/bar", &[]);
{
let appsrc = h.get_element().unwrap();
appsrc.set_property("caps", &caps).unwrap();
appsrc.set_property("do-timestamp", &true).unwrap();
appsrc.set_property("context", &"appsrc-flush").unwrap();
}
h.play();
let appsrc = h.get_element().unwrap();
// Initial buffer
assert!(appsrc
.emit("push-buffer", &[&gst::Buffer::from_slice(vec![1, 2, 3, 4])])
.unwrap()
.unwrap()
.get_some::<bool>()
.unwrap());
let _ = h.pull().unwrap();
// FlushStart
assert!(h.push_upstream_event(gst::Event::new_flush_start().build()));
// Can't push buffer while flushing
assert!(!appsrc
.emit("push-buffer", &[&gst::Buffer::new()])
.unwrap()
.unwrap()
.get_some::<bool>()
.unwrap());
assert!(h.try_pull().is_none());
// FlushStop
assert!(h.push_upstream_event(gst::Event::new_flush_stop(true).build()));
// No buffer available due to flush
assert!(h.try_pull().is_none());
// Can push again
assert!(appsrc
.emit("push-buffer", &[&gst::Buffer::new()])
.unwrap()
.unwrap()
.get_some::<bool>()
.unwrap());
let _ = h.pull().unwrap();
assert!(h.try_pull().is_none());
}
#[test]
fn pause_flush() {
init();
let mut h = gst_check::Harness::new("ts-appsrc");
let caps = gst::Caps::new_simple("foo/bar", &[]);
{
let appsrc = h.get_element().unwrap();
appsrc.set_property("caps", &caps).unwrap();
appsrc.set_property("do-timestamp", &true).unwrap();
appsrc
.set_property("context", &"appsrc-pause_flush")
.unwrap();
}
h.play();
let appsrc = h.get_element().unwrap();
// Initial buffer
assert!(appsrc
.emit("push-buffer", &[&gst::Buffer::from_slice(vec![1, 2, 3, 4])])
.unwrap()
.unwrap()
.get_some::<bool>()
.unwrap());
let _ = h.pull().unwrap();
appsrc
.change_state(gst::StateChange::PlayingToPaused)
.unwrap();
// FlushStart
assert!(h.push_upstream_event(gst::Event::new_flush_start().build()));
// Can't push buffer while flushing
assert!(!appsrc
.emit("push-buffer", &[&gst::Buffer::new()])
.unwrap()
.unwrap()
.get_some::<bool>()
.unwrap());
assert!(h.try_pull().is_none());
// FlushStop
assert!(h.push_upstream_event(gst::Event::new_flush_stop(true).build()));
appsrc
.change_state(gst::StateChange::PausedToPlaying)
.unwrap();
// No buffer available due to flush
assert!(h.try_pull().is_none());
// Can push again
assert!(appsrc
.emit("push-buffer", &[&gst::Buffer::new()])
.unwrap()
.unwrap()
.get_some::<bool>()
.unwrap());
let _ = h.pull().unwrap();
assert!(h.try_pull().is_none());
}

View file

@ -29,13 +29,14 @@ use gst;
use gst::prelude::*; use gst::prelude::*;
use gst::subclass::prelude::*; use gst::subclass::prelude::*;
use gst::EventView; use gst::EventView;
use gst::{gst_debug, gst_error_msg, gst_log}; use gst::{gst_debug, gst_error_msg, gst_info, gst_log};
use lazy_static::lazy_static; use lazy_static::lazy_static;
use std::boxed::Box; use std::boxed::Box;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Mutex}; use std::sync::Arc;
use std::sync::Mutex as StdMutex;
use gstthreadshare::runtime::prelude::*; use gstthreadshare::runtime::prelude::*;
use gstthreadshare::runtime::{Context, PadSink, PadSinkRef, PadSrc, PadSrcRef}; use gstthreadshare::runtime::{Context, PadSink, PadSinkRef, PadSrc, PadSrcRef};
@ -79,36 +80,93 @@ lazy_static! {
); );
} }
#[derive(Clone, Debug, Default)] #[derive(Debug)]
struct PadSrcHandlerTest; struct PadSrcHandlerTestInner {
receiver: FutMutex<mpsc::Receiver<Item>>,
}
impl PadSrcHandlerTestInner {
fn new(receiver: mpsc::Receiver<Item>) -> PadSrcHandlerTestInner {
PadSrcHandlerTestInner {
receiver: FutMutex::new(receiver),
}
}
}
#[derive(Clone, Debug)]
struct PadSrcHandlerTest(Arc<PadSrcHandlerTestInner>);
impl PadSrcHandlerTest { impl PadSrcHandlerTest {
fn start_task(&self, pad: PadSrcRef<'_>, receiver: mpsc::Receiver<Item>) { fn new(receiver: mpsc::Receiver<Item>) -> PadSrcHandlerTest {
PadSrcHandlerTest(Arc::new(PadSrcHandlerTestInner::new(receiver)))
}
fn stop(&self, pad: &PadSrcRef<'_>) {
gst_debug!(SRC_CAT, obj: pad.gst_pad(), "Stopping handler");
pad.stop_task();
// From here on, the task is stopped so it can't hold resources anymore
self.flush(pad);
gst_debug!(SRC_CAT, obj: pad.gst_pad(), "Handler stopped");
}
fn flush(&self, pad: &PadSrcRef<'_>) {
// Precondition: task must be stopped
// TODO: assert the task state when Task & PadSrc are separated
gst_debug!(SRC_CAT, obj: pad.gst_pad(), "Flushing");
// Purge the channel
let mut receiver = self
.0
.receiver
.try_lock()
.expect("Channel receiver is locked elsewhere");
loop {
match receiver.try_next() {
Ok(Some(_item)) => {
gst_log!(SRC_CAT, obj: pad.gst_pad(), "Dropping pending item");
}
Err(_) => {
gst_log!(SRC_CAT, obj: pad.gst_pad(), "No more pending item");
break;
}
Ok(None) => {
panic!("Channel sender dropped");
}
}
}
gst_debug!(SRC_CAT, obj: pad.gst_pad(), "Flushed");
}
fn start_task(&self, pad: PadSrcRef<'_>) {
gst_debug!(SRC_CAT, obj: pad.gst_pad(), "SrcPad task starting"); gst_debug!(SRC_CAT, obj: pad.gst_pad(), "SrcPad task starting");
let this = self.clone();
let pad_weak = pad.downgrade(); let pad_weak = pad.downgrade();
let receiver = Arc::new(FutMutex::new(receiver));
pad.start_task(move || { pad.start_task(move || {
let pad_weak = pad_weak.clone(); let pad_weak = pad_weak.clone();
let receiver = Arc::clone(&receiver); let this = this.clone();
async move { async move {
let item = this.0.receiver.lock().await.next().await;
let pad = pad_weak.upgrade().expect("PadSrc no longer exists"); let pad = pad_weak.upgrade().expect("PadSrc no longer exists");
let item = { let item = match item {
let mut receiver = receiver.lock().await; Some(item) => item,
None => {
match receiver.next().await { gst_log!(SRC_CAT, obj: pad.gst_pad(), "SrcPad channel aborted");
Some(item) => item, return glib::Continue(false);
None => {
gst_debug!(SRC_CAT, obj: pad.gst_pad(), "SrcPad channel aborted");
return glib::Continue(false);
}
} }
}; };
// We could also check here first if we're flushing but as we're not doing anything let pad = pad_weak.upgrade().expect("PadSrc no longer exists");
// complicated below we can just defer that to the pushing function match this.push_item(pad, item).await {
match Self::push_item(pad, item).await {
Ok(_) => glib::Continue(true), Ok(_) => glib::Continue(true),
Err(gst::FlowError::Flushing) => glib::Continue(false), Err(gst::FlowError::Flushing) => glib::Continue(false),
Err(err) => panic!("Got error {:?}", err), Err(err) => panic!("Got error {:?}", err),
@ -117,7 +175,13 @@ impl PadSrcHandlerTest {
}); });
} }
async fn push_item(pad: PadSrcRef<'_>, item: Item) -> Result<gst::FlowSuccess, gst::FlowError> { async fn push_item(
self,
pad: PadSrcRef<'_>,
item: Item,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst_debug!(SRC_CAT, obj: pad.gst_pad(), "Handling {:?}", item);
match item { match item {
Item::Event(event) => { Item::Event(event) => {
pad.push_event(event).await; pad.push_event(event).await;
@ -144,9 +208,7 @@ impl PadSrcHandler for PadSrcHandlerTest {
let ret = match event.view() { let ret = match event.view() {
EventView::FlushStart(..) => { EventView::FlushStart(..) => {
// Cancel the task so that it finishes ASAP elem_src_test.flush_start(element);
// and clear the sender
elem_src_test.pause(element).unwrap();
true true
} }
EventView::Qos(..) | EventView::Reconfigure(..) | EventView::Latency(..) => true, EventView::Qos(..) | EventView::Reconfigure(..) | EventView::Latency(..) => true,
@ -167,16 +229,34 @@ impl PadSrcHandler for PadSrcHandlerTest {
} }
} }
#[derive(Debug, Eq, PartialEq)]
enum ElementSrcTestState {
Paused,
RejectItems,
Started,
}
#[derive(Debug)] #[derive(Debug)]
struct ElementSrcTest { struct ElementSrcTest {
src_pad: PadSrc, src_pad: PadSrc,
src_pad_handler: PadSrcHandlerTest, src_pad_handler: StdMutex<Option<PadSrcHandlerTest>>,
sender: Mutex<Option<mpsc::Sender<Item>>>, state: StdMutex<ElementSrcTestState>,
settings: Mutex<Settings>, sender: StdMutex<Option<mpsc::Sender<Item>>>,
settings: StdMutex<Settings>,
} }
impl ElementSrcTest { impl ElementSrcTest {
fn try_push(&self, item: Item) -> Result<(), Item> { fn try_push(&self, item: Item) -> Result<(), Item> {
let state = self.state.lock().unwrap();
if *state == ElementSrcTestState::RejectItems {
gst_debug!(
SRC_CAT,
"ElementSrcTest rejecting item due to element state"
);
return Err(item);
}
match self.sender.lock().unwrap().as_mut() { match self.sender.lock().unwrap().as_mut() {
Some(sender) => sender Some(sender) => sender
.try_send(item) .try_send(item)
@ -196,8 +276,13 @@ impl ElementSrcTest {
) )
})?; })?;
let (sender, receiver) = mpsc::channel(1);
*self.sender.lock().unwrap() = Some(sender);
let src_pad_handler = PadSrcHandlerTest::new(receiver);
self.src_pad self.src_pad
.prepare(context, &self.src_pad_handler) .prepare(context, &src_pad_handler)
.map_err(|err| { .map_err(|err| {
gst_error_msg!( gst_error_msg!(
gst::ResourceError::OpenRead, gst::ResourceError::OpenRead,
@ -205,6 +290,8 @@ impl ElementSrcTest {
) )
})?; })?;
*self.src_pad_handler.lock().unwrap() = Some(src_pad_handler);
gst_debug!(SRC_CAT, obj: element, "Prepared"); gst_debug!(SRC_CAT, obj: element, "Prepared");
Ok(()) Ok(())
@ -214,6 +301,7 @@ impl ElementSrcTest {
gst_debug!(SRC_CAT, obj: element, "Unpreparing"); gst_debug!(SRC_CAT, obj: element, "Unpreparing");
self.src_pad.unprepare().unwrap(); self.src_pad.unprepare().unwrap();
*self.src_pad_handler.lock().unwrap() = None;
gst_debug!(SRC_CAT, obj: element, "Unprepared"); gst_debug!(SRC_CAT, obj: element, "Unprepared");
@ -221,15 +309,15 @@ impl ElementSrcTest {
} }
fn start(&self, element: &gst::Element) -> Result<(), ()> { fn start(&self, element: &gst::Element) -> Result<(), ()> {
let mut sender = self.sender.lock().unwrap(); let mut state = self.state.lock().unwrap();
if sender.is_some() { if *state == ElementSrcTestState::Started {
gst_debug!(SRC_CAT, obj: element, "Already started"); gst_debug!(SRC_CAT, obj: element, "Already started");
return Err(()); return Err(());
} }
gst_debug!(SRC_CAT, obj: element, "Starting"); gst_debug!(SRC_CAT, obj: element, "Starting");
self.start_unchecked(&mut sender); self.start_unchecked(&mut state);
gst_debug!(SRC_CAT, obj: element, "Started"); gst_debug!(SRC_CAT, obj: element, "Started");
@ -237,12 +325,8 @@ impl ElementSrcTest {
} }
fn flush_stop(&self, element: &gst::Element) { fn flush_stop(&self, element: &gst::Element) {
// Keep the lock on the `sender` until `flush_stop` is complete let mut state = self.state.lock().unwrap();
// so as to prevent race conditions due to concurrent state transitions. if *state == ElementSrcTestState::Started {
// Note that this won't deadlock as `sender` is not used
// within the `src_pad`'s `Task`.
let mut sender = self.sender.lock().unwrap();
if sender.is_some() {
gst_debug!(SRC_CAT, obj: element, "Already started"); gst_debug!(SRC_CAT, obj: element, "Already started");
return; return;
} }
@ -252,30 +336,52 @@ impl ElementSrcTest {
// Stop it so we wait for it to actually finish // Stop it so we wait for it to actually finish
self.src_pad.stop_task(); self.src_pad.stop_task();
self.src_pad_handler
.lock()
.unwrap()
.as_ref()
.unwrap()
.flush(&self.src_pad.as_ref());
// And then start it again // And then start it again
self.start_unchecked(&mut sender); self.start_unchecked(&mut state);
gst_debug!(SRC_CAT, obj: element, "Stopped Flush"); gst_debug!(SRC_CAT, obj: element, "Stopped Flush");
} }
fn start_unchecked(&self, sender: &mut Option<mpsc::Sender<Item>>) { fn start_unchecked(&self, state: &mut ElementSrcTestState) {
// Start the task and set up the sender. We only accept
// data in Playing
let (sender_new, receiver) = mpsc::channel(1);
*sender = Some(sender_new);
self.src_pad_handler self.src_pad_handler
.start_task(self.src_pad.as_ref(), receiver); .lock()
.unwrap()
.as_ref()
.unwrap()
.start_task(self.src_pad.as_ref());
*state = ElementSrcTestState::Started;
}
fn flush_start(&self, element: &gst::Element) {
// Keep the lock on the `state` until `flush_start` is complete
// so as to prevent race conditions due to concurrent state transitions.
let mut state = self.state.lock().unwrap();
gst_debug!(SRC_CAT, obj: element, "Starting Flush");
*state = ElementSrcTestState::RejectItems;
self.src_pad.cancel_task();
gst_debug!(SRC_CAT, obj: element, "Flush Started");
} }
fn pause(&self, element: &gst::Element) -> Result<(), ()> { fn pause(&self, element: &gst::Element) -> Result<(), ()> {
let mut sender = self.sender.lock().unwrap(); // Lock the state to prevent race condition due to concurrent FlushStop
let mut state = self.state.lock().unwrap();
gst_debug!(SRC_CAT, obj: element, "Pausing"); gst_debug!(SRC_CAT, obj: element, "Pausing");
// Cancel task, we only accept data in Playing self.src_pad.pause_task();
self.src_pad.cancel_task();
// Prevent subsequent items from being enqueued *state = ElementSrcTestState::Paused;
*sender = None;
gst_debug!(SRC_CAT, obj: element, "Paused"); gst_debug!(SRC_CAT, obj: element, "Paused");
@ -285,9 +391,14 @@ impl ElementSrcTest {
fn stop(&self, element: &gst::Element) -> Result<(), ()> { fn stop(&self, element: &gst::Element) -> Result<(), ()> {
gst_debug!(SRC_CAT, obj: element, "Stopping"); gst_debug!(SRC_CAT, obj: element, "Stopping");
// Now stop the task if it was still running, blocking *self.state.lock().unwrap() = ElementSrcTestState::RejectItems;
// until this has actually happened
self.src_pad.stop_task(); self.src_pad_handler
.lock()
.unwrap()
.as_ref()
.unwrap()
.stop(&self.src_pad.as_ref());
gst_debug!(SRC_CAT, obj: element, "Stopped"); gst_debug!(SRC_CAT, obj: element, "Stopped");
@ -334,9 +445,10 @@ impl ObjectSubclass for ElementSrcTest {
ElementSrcTest { ElementSrcTest {
src_pad, src_pad,
src_pad_handler: PadSrcHandlerTest::default(), src_pad_handler: StdMutex::new(None),
sender: Mutex::new(None), state: StdMutex::new(ElementSrcTestState::RejectItems),
settings: Mutex::new(settings), sender: StdMutex::new(None),
settings: StdMutex::new(settings),
} }
} }
} }
@ -595,6 +707,24 @@ impl ElementSinkTest {
} }
} }
impl ElementSinkTest {
fn push_flush_start(&self, element: &gst::Element) {
gst_debug!(SINK_CAT, obj: element, "Pushing FlushStart");
self.sink_pad
.gst_pad()
.push_event(gst::Event::new_flush_start().build());
gst_debug!(SINK_CAT, obj: element, "FlushStart pushed");
}
fn push_flush_stop(&self, element: &gst::Element) {
gst_debug!(SINK_CAT, obj: element, "Pushing FlushStop");
self.sink_pad
.gst_pad()
.push_event(gst::Event::new_flush_stop(true).build());
gst_debug!(SINK_CAT, obj: element, "FlushStop pushed");
}
}
lazy_static! { lazy_static! {
static ref SINK_CAT: gst::DebugCategory = gst::DebugCategory::new( static ref SINK_CAT: gst::DebugCategory = gst::DebugCategory::new(
"ts-element-sink-test", "ts-element-sink-test",
@ -823,10 +953,10 @@ fn nominal_scenario(
// Pause the Pad task // Pause the Pad task
pipeline.set_state(gst::State::Paused).unwrap(); pipeline.set_state(gst::State::Paused).unwrap();
// Items not longer accepted // Item accepted, but not processed before switching to Playing again
elem_src_test elem_src_test
.try_push(Item::Buffer(gst::Buffer::from_slice(vec![1, 2, 3, 4]))) .try_push(Item::Buffer(gst::Buffer::from_slice(vec![5, 6, 7])))
.unwrap_err(); .unwrap();
// Nothing forwarded // Nothing forwarded
receiver.try_next().unwrap_err(); receiver.try_next().unwrap_err();
@ -834,8 +964,13 @@ fn nominal_scenario(
// Switch back the Pad task to Started // Switch back the Pad task to Started
pipeline.set_state(gst::State::Playing).unwrap(); pipeline.set_state(gst::State::Playing).unwrap();
// Still nothing forwarded match futures::executor::block_on(receiver.next()).unwrap() {
receiver.try_next().unwrap_err(); Item::Buffer(buffer) => {
let data = buffer.map_readable().unwrap();
assert_eq!(data.as_slice(), vec![5, 6, 7].as_slice());
}
other => panic!("Unexpected item {:?}", other),
}
// Flush // Flush
src_element.send_event(gst::Event::new_flush_start().build()); src_element.send_event(gst::Event::new_flush_start().build());
@ -849,6 +984,33 @@ fn nominal_scenario(
other => panic!("Unexpected item {:?}", other), other => panic!("Unexpected item {:?}", other),
} }
elem_src_test
.try_push(Item::Event(
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build(),
))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Event(event) => match event.view() {
EventView::Segment(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
// Buffer
elem_src_test
.try_push(Item::Buffer(gst::Buffer::from_slice(vec![8, 9])))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Buffer(buffer) => {
let data = buffer.map_readable().unwrap();
assert_eq!(data.as_slice(), vec![8, 9].as_slice());
}
other => panic!("Unexpected item {:?}", other),
}
// EOS // EOS
elem_src_test elem_src_test
.try_push(Item::Event(gst::Event::new_eos().build())) .try_push(Item::Event(gst::Event::new_eos().build()))
@ -885,24 +1047,24 @@ fn src_sink_nominal() {
nominal_scenario(&name, pipeline, src_element, receiver); nominal_scenario(&name, pipeline, src_element, receiver);
} }
// #[test] #[test]
// fn src_tsqueue_sink_nominal() { fn src_tsqueue_sink_nominal() {
// init(); init();
//
// let name = "src_tsqueue_sink"; let name = "src_tsqueue_sink";
//
// let ts_queue = gst::ElementFactory::make("ts-queue", Some("ts-queue")).unwrap(); let ts_queue = gst::ElementFactory::make("ts-queue", Some("ts-queue")).unwrap();
// ts_queue ts_queue
// .set_property("context", &format!("{}_queue", name)) .set_property("context", &format!("{}_queue", name))
// .unwrap(); .unwrap();
// ts_queue ts_queue
// .set_property("context-wait", &THROTTLING_DURATION) .set_property("context-wait", &THROTTLING_DURATION)
// .unwrap(); .unwrap();
//
// let (pipeline, src_element, _sink_element, receiver) = setup(name, Some(ts_queue), None); let (pipeline, src_element, _sink_element, receiver) = setup(name, Some(ts_queue), None);
//
// nominal_scenario(&name, pipeline, src_element, receiver); nominal_scenario(&name, pipeline, src_element, receiver);
// } }
#[test] #[test]
fn src_queue_sink_nominal() { fn src_queue_sink_nominal() {
@ -916,30 +1078,309 @@ fn src_queue_sink_nominal() {
nominal_scenario(&name, pipeline, src_element, receiver); nominal_scenario(&name, pipeline, src_element, receiver);
} }
// #[test] #[test]
// fn src_tsproxy_sink_nominal() { fn src_tsproxy_sink_nominal() {
// init(); init();
//
// let name = "src_tsproxy_sink"; let name = "src_tsproxy_sink";
//
// let ts_proxy_sink = gst::ElementFactory::make("ts-proxysink", Some("ts-proxysink")).unwrap(); let ts_proxy_sink = gst::ElementFactory::make("ts-proxysink", Some("ts-proxysink")).unwrap();
// ts_proxy_sink ts_proxy_sink
// .set_property("proxy-context", &format!("{}_proxy_context", name)) .set_property("proxy-context", &format!("{}_proxy_context", name))
// .unwrap(); .unwrap();
//
// let ts_proxy_src = gst::ElementFactory::make("ts-proxysrc", Some("ts-proxysrc")).unwrap(); let ts_proxy_src = gst::ElementFactory::make("ts-proxysrc", Some("ts-proxysrc")).unwrap();
// ts_proxy_src ts_proxy_src
// .set_property("proxy-context", &format!("{}_proxy_context", name)) .set_property("proxy-context", &format!("{}_proxy_context", name))
// .unwrap(); .unwrap();
// ts_proxy_src ts_proxy_src
// .set_property("context", &format!("{}_context", name)) .set_property("context", &format!("{}_context", name))
// .unwrap(); .unwrap();
// ts_proxy_src ts_proxy_src
// .set_property("context-wait", &THROTTLING_DURATION) .set_property("context-wait", &THROTTLING_DURATION)
// .unwrap(); .unwrap();
//
// let (pipeline, src_element, _sink_element, receiver) = let (pipeline, src_element, _sink_element, receiver) =
// setup(name, Some(ts_proxy_sink), Some(ts_proxy_src)); setup(name, Some(ts_proxy_sink), Some(ts_proxy_src));
//
// nominal_scenario(&name, pipeline, src_element, receiver); nominal_scenario(&name, pipeline, src_element, receiver);
// } }
#[test]
fn start_pause_start() {
init();
let scenario_name = "start_pause_start";
let (pipeline, src_element, _sink_element, mut receiver) = setup(&scenario_name, None, None);
let elem_src_test = ElementSrcTest::from_instance(&src_element);
pipeline.set_state(gst::State::Playing).unwrap();
// Initial events
elem_src_test
.try_push(Item::Event(
gst::Event::new_stream_start(scenario_name)
.group_id(gst::GroupId::next())
.build(),
))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Event(event) => match event.view() {
EventView::StreamStart(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
elem_src_test
.try_push(Item::Event(
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build(),
))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Event(event) => match event.view() {
EventView::Segment(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
// Buffer
elem_src_test
.try_push(Item::Buffer(gst::Buffer::from_slice(vec![1, 2, 3, 4])))
.unwrap();
pipeline.set_state(gst::State::Paused).unwrap();
pipeline.set_state(gst::State::Playing).unwrap();
elem_src_test
.try_push(Item::Buffer(gst::Buffer::from_slice(vec![5, 6, 7])))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Buffer(buffer) => {
let data = buffer.map_readable().unwrap();
assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
}
other => panic!("Unexpected item {:?}", other),
}
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Buffer(buffer) => {
let data = buffer.map_readable().unwrap();
assert_eq!(data.as_slice(), vec![5, 6, 7].as_slice());
}
other => panic!("Unexpected item {:?}", other),
}
// Nothing else forwarded
receiver.try_next().unwrap_err();
pipeline.set_state(gst::State::Null).unwrap();
}
#[test]
fn start_stop_start() {
init();
let scenario_name = "start_stop_start";
let (pipeline, src_element, _sink_element, mut receiver) = setup(&scenario_name, None, None);
let elem_src_test = ElementSrcTest::from_instance(&src_element);
pipeline.set_state(gst::State::Playing).unwrap();
// Initial events
elem_src_test
.try_push(Item::Event(
gst::Event::new_stream_start(&format!("{}-after_stop", scenario_name))
.group_id(gst::GroupId::next())
.build(),
))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Event(event) => match event.view() {
EventView::StreamStart(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
elem_src_test
.try_push(Item::Event(
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build(),
))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Event(event) => match event.view() {
EventView::Segment(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
// Buffer
elem_src_test
.try_push(Item::Buffer(gst::Buffer::from_slice(vec![1, 2, 3, 4])))
.unwrap();
pipeline.set_state(gst::State::Ready).unwrap();
pipeline.set_state(gst::State::Playing).unwrap();
// Initial events again
elem_src_test
.try_push(Item::Event(
gst::Event::new_stream_start(scenario_name)
.group_id(gst::GroupId::next())
.build(),
))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Buffer(_buffer) => {
gst_info!(
SRC_CAT,
"{}: initial buffer went through, don't expect any pending item to be dropped",
scenario_name
);
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Event(event) => match event.view() {
EventView::StreamStart(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
}
Item::Event(event) => match event.view() {
EventView::StreamStart(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
elem_src_test
.try_push(Item::Event(
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build(),
))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Event(event) => match event.view() {
EventView::Segment(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
elem_src_test
.try_push(Item::Buffer(gst::Buffer::from_slice(vec![5, 6, 7])))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Buffer(buffer) => {
let data = buffer.map_readable().unwrap();
assert_eq!(data.as_slice(), vec![5, 6, 7].as_slice());
}
other => panic!("Unexpected item {:?}", other),
}
pipeline.set_state(gst::State::Null).unwrap();
}
#[test]
fn start_flush() {
init();
let scenario_name = "start_flush";
let (pipeline, src_element, sink_element, mut receiver) = setup(&scenario_name, None, None);
let elem_src_test = ElementSrcTest::from_instance(&src_element);
pipeline.set_state(gst::State::Playing).unwrap();
// Initial events
elem_src_test
.try_push(Item::Event(
gst::Event::new_stream_start(&format!("{}-after_stop", scenario_name))
.group_id(gst::GroupId::next())
.build(),
))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Event(event) => match event.view() {
EventView::StreamStart(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
elem_src_test
.try_push(Item::Event(
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build(),
))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Event(event) => match event.view() {
EventView::Segment(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
// Buffer
elem_src_test
.try_push(Item::Buffer(gst::Buffer::from_slice(vec![1, 2, 3, 4])))
.unwrap();
let elem_sink_test = ElementSinkTest::from_instance(&sink_element);
elem_sink_test.push_flush_start(&sink_element);
elem_src_test
.try_push(Item::Buffer(gst::Buffer::from_slice(vec![5, 6, 7])))
.unwrap_err();
elem_sink_test.push_flush_stop(&sink_element);
elem_src_test
.try_push(Item::Event(
gst::Event::new_segment(&gst::FormattedSegment::<gst::format::Time>::new()).build(),
))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Event(event) => match event.view() {
EventView::Segment(_) => (),
other => panic!("Unexpected event {:?}", other),
},
other => panic!("Unexpected item {:?}", other),
}
// Post flush buffer
elem_src_test
.try_push(Item::Buffer(gst::Buffer::from_slice(vec![8, 9])))
.unwrap();
match futures::executor::block_on(receiver.next()).unwrap() {
Item::Buffer(buffer) => {
let data = buffer.map_readable().unwrap();
assert_eq!(data.as_slice(), vec![8, 9].as_slice());
}
other => panic!("Unexpected item {:?}", other),
}
pipeline.set_state(gst::State::Null).unwrap();
}

View file

@ -504,7 +504,7 @@ fn premature_shutdown() {
appsink.set_property("sync", &false).unwrap(); appsink.set_property("sync", &false).unwrap();
appsink.set_property("async", &false).unwrap(); appsink.set_property("async", &false).unwrap();
let (sender, receiver) = mpsc::channel(); let (appsink_sender, appsink_receiver) = mpsc::channel();
let appsink = appsink.dynamic_cast::<gst_app::AppSink>().unwrap(); let appsink = appsink.dynamic_cast::<gst_app::AppSink>().unwrap();
appsink.connect_new_sample(move |appsink| { appsink.connect_new_sample(move |appsink| {
@ -517,13 +517,18 @@ fn premature_shutdown() {
.unwrap() .unwrap()
.unwrap(); .unwrap();
sender.send(()).unwrap(); appsink_sender.send(()).unwrap();
Ok(gst::FlowSuccess::Ok) Ok(gst::FlowSuccess::Ok)
}); });
fn push_buffer(src: &gst::Element) -> bool { fn push_buffer(src: &gst::Element, intent: &str) -> bool {
gst_debug!(CAT, obj: src, "premature_shutdown: pushing buffer"); gst_debug!(
CAT,
obj: src,
"premature_shutdown: pushing buffer {}",
intent
);
src.emit("push-buffer", &[&gst::Buffer::from_slice(vec![0; 1024])]) src.emit("push-buffer", &[&gst::Buffer::from_slice(vec![0; 1024])])
.unwrap() .unwrap()
.unwrap() .unwrap()
@ -536,38 +541,41 @@ fn premature_shutdown() {
let mut scenario = Some(move || { let mut scenario = Some(move || {
gst_debug!(CAT, "premature_shutdown: STEP 1: Playing"); gst_debug!(CAT, "premature_shutdown: STEP 1: Playing");
// Initialize the dataflow // Initialize the dataflow
assert!(push_buffer(&src)); assert!(push_buffer(&src, "(initial)"));
// Wait for the buffer to reach AppSink // Wait for the buffer to reach AppSink
receiver.recv().unwrap(); appsink_receiver.recv().unwrap();
assert_eq!(receiver.try_recv().unwrap_err(), mpsc::TryRecvError::Empty); assert_eq!(
appsink_receiver.try_recv().unwrap_err(),
mpsc::TryRecvError::Empty
);
assert!(push_buffer(&src)); assert!(push_buffer(&src, "before Playing -> Paused"));
gst_debug!(CAT, "premature_shutdown: STEP 2: Playing -> Paused");
pipeline_clone.set_state(gst::State::Paused).unwrap(); pipeline_clone.set_state(gst::State::Paused).unwrap();
// Paused -> can't push_buffer gst_debug!(CAT, "premature_shutdown: STEP 3: Paused -> Playing");
assert!(!push_buffer(&src));
gst_debug!(CAT, "premature_shutdown: STEP 2: Paused -> Playing");
pipeline_clone.set_state(gst::State::Playing).unwrap(); pipeline_clone.set_state(gst::State::Playing).unwrap();
gst_debug!(CAT, "premature_shutdown: STEP 3: Playing"); gst_debug!(CAT, "premature_shutdown: Playing again");
receiver.recv().unwrap(); gst_debug!(CAT, "Waiting for buffer sent before Playing -> Paused");
appsink_receiver.recv().unwrap();
assert!(push_buffer(&src)); assert!(push_buffer(&src, "after Paused -> Playing"));
receiver.recv().unwrap(); gst_debug!(CAT, "Waiting for buffer sent after Paused -> Playing");
appsink_receiver.recv().unwrap();
// Fill up the (dataqueue) and abruptly shutdown // Fill up the (dataqueue) and abruptly shutdown
assert!(push_buffer(&src)); assert!(push_buffer(&src, "filling 1"));
assert!(push_buffer(&src)); assert!(push_buffer(&src, "filling 2"));
gst_debug!(CAT, "premature_shutdown: STEP 4: Shutdown"); gst_debug!(CAT, "premature_shutdown: STEP 4: Playing -> Null");
pipeline_clone.set_state(gst::State::Null).unwrap(); pipeline_clone.set_state(gst::State::Null).unwrap();
assert!(!push_buffer(&src)); assert!(!push_buffer(&src, "after Null"));
l_clone.quit(); l_clone.quit();
}); });