2021-10-18 06:42:42 +00:00
|
|
|
// Copyright (C) 2021 Sebastian Dröge <sebastian@centricular.com>
|
|
|
|
//
|
|
|
|
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
|
|
|
|
// If a copy of the MPL was not distributed with this file, You can obtain one at
|
|
|
|
// <https://mozilla.org/MPL/2.0/>.
|
|
|
|
//
|
|
|
|
// SPDX-License-Identifier: MPL-2.0
|
|
|
|
|
|
|
|
use gst::glib;
|
|
|
|
use gst::prelude::*;
|
|
|
|
use gst::subclass::prelude::*;
|
2022-05-05 12:09:19 +00:00
|
|
|
use gst_base::prelude::*;
|
|
|
|
use gst_base::subclass::prelude::*;
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
use std::collections::VecDeque;
|
|
|
|
use std::sync::Mutex;
|
|
|
|
|
|
|
|
use once_cell::sync::Lazy;
|
|
|
|
|
|
|
|
use super::boxes;
|
|
|
|
use super::Buffer;
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
/// Offset for the segment in non-single-stream variants.
|
|
|
|
const SEGMENT_OFFSET: gst::ClockTime = gst::ClockTime::from_seconds(60 * 60 * 1000);
|
|
|
|
|
2022-05-27 10:27:10 +00:00
|
|
|
/// Offset between UNIX epoch and Jan 1 1601 epoch in seconds.
|
|
|
|
/// 1601 = UNIX + UNIX_1601_OFFSET.
|
|
|
|
const UNIX_1601_OFFSET: u64 = 11_644_473_600;
|
|
|
|
|
|
|
|
/// Offset between NTP and UNIX epoch in seconds.
|
|
|
|
/// NTP = UNIX + NTP_UNIX_OFFSET.
|
|
|
|
const NTP_UNIX_OFFSET: u64 = 2_208_988_800;
|
|
|
|
|
|
|
|
/// Reference timestamp meta caps for NTP timestamps.
|
|
|
|
static NTP_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::builder("timestamp/x-ntp").build());
|
|
|
|
|
|
|
|
/// Reference timestamp meta caps for UNIX timestamps.
|
|
|
|
static UNIX_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::builder("timestamp/x-unix").build());
|
|
|
|
|
|
|
|
/// Returns the UTC time of the buffer in the UNIX epoch.
|
|
|
|
fn get_utc_time_from_buffer(buffer: &gst::BufferRef) -> Option<gst::ClockTime> {
|
|
|
|
buffer
|
|
|
|
.iter_meta::<gst::ReferenceTimestampMeta>()
|
|
|
|
.find_map(|meta| {
|
|
|
|
if meta.reference().can_intersect(&UNIX_CAPS) {
|
|
|
|
Some(meta.timestamp())
|
|
|
|
} else if meta.reference().can_intersect(&NTP_CAPS) {
|
2022-10-17 17:48:43 +00:00
|
|
|
meta.timestamp().checked_sub(NTP_UNIX_OFFSET.seconds())
|
2022-05-27 10:27:10 +00:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-10-18 06:42:42 +00:00
|
|
|
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
|
|
|
|
gst::DebugCategory::new(
|
|
|
|
"fmp4mux",
|
|
|
|
gst::DebugColorFlags::empty(),
|
|
|
|
Some("FMP4Mux Element"),
|
|
|
|
)
|
|
|
|
});
|
|
|
|
|
|
|
|
const DEFAULT_FRAGMENT_DURATION: gst::ClockTime = gst::ClockTime::from_seconds(10);
|
|
|
|
const DEFAULT_HEADER_UPDATE_MODE: super::HeaderUpdateMode = super::HeaderUpdateMode::None;
|
|
|
|
const DEFAULT_WRITE_MFRA: bool = false;
|
|
|
|
const DEFAULT_WRITE_MEHD: bool = false;
|
2022-05-12 10:44:20 +00:00
|
|
|
const DEFAULT_INTERLEAVE_BYTES: Option<u64> = None;
|
|
|
|
const DEFAULT_INTERLEAVE_TIME: Option<gst::ClockTime> = Some(gst::ClockTime::from_mseconds(250));
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
struct Settings {
|
|
|
|
fragment_duration: gst::ClockTime,
|
|
|
|
header_update_mode: super::HeaderUpdateMode,
|
|
|
|
write_mfra: bool,
|
|
|
|
write_mehd: bool,
|
2022-05-12 10:44:20 +00:00
|
|
|
interleave_bytes: Option<u64>,
|
|
|
|
interleave_time: Option<gst::ClockTime>,
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for Settings {
|
|
|
|
fn default() -> Self {
|
|
|
|
Settings {
|
|
|
|
fragment_duration: DEFAULT_FRAGMENT_DURATION,
|
|
|
|
header_update_mode: DEFAULT_HEADER_UPDATE_MODE,
|
|
|
|
write_mfra: DEFAULT_WRITE_MFRA,
|
|
|
|
write_mehd: DEFAULT_WRITE_MEHD,
|
2022-05-12 10:44:20 +00:00
|
|
|
interleave_bytes: DEFAULT_INTERLEAVE_BYTES,
|
|
|
|
interleave_time: DEFAULT_INTERLEAVE_TIME,
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
#[derive(Debug)]
|
2022-06-30 13:29:09 +00:00
|
|
|
struct GopBuffer {
|
|
|
|
buffer: gst::Buffer,
|
|
|
|
pts: gst::ClockTime,
|
|
|
|
dts: Option<gst::ClockTime>,
|
|
|
|
}
|
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
#[derive(Debug)]
|
2021-10-18 06:42:42 +00:00
|
|
|
struct Gop {
|
|
|
|
// Running times
|
|
|
|
start_pts: gst::ClockTime,
|
|
|
|
start_dts: Option<gst::ClockTime>,
|
|
|
|
earliest_pts: gst::ClockTime,
|
|
|
|
// Once this is known to be the final earliest PTS/DTS
|
|
|
|
final_earliest_pts: bool,
|
|
|
|
// PTS plus duration of last buffer, or start of next GOP
|
|
|
|
end_pts: gst::ClockTime,
|
2022-05-12 10:44:20 +00:00
|
|
|
// Once this is known to be the final end PTS/DTS
|
|
|
|
final_end_pts: bool,
|
2021-10-18 06:42:42 +00:00
|
|
|
// DTS plus duration of last buffer, or start of next GOP
|
|
|
|
end_dts: Option<gst::ClockTime>,
|
|
|
|
|
|
|
|
// Buffer positions
|
|
|
|
earliest_pts_position: gst::ClockTime,
|
|
|
|
start_dts_position: Option<gst::ClockTime>,
|
|
|
|
|
|
|
|
// Buffer, PTS running time, DTS running time
|
2022-06-30 13:29:09 +00:00
|
|
|
buffers: Vec<GopBuffer>,
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
struct Stream {
|
|
|
|
sinkpad: gst_base::AggregatorPad,
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
caps: gst::Caps,
|
|
|
|
intra_only: bool,
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
queued_gops: VecDeque<Gop>,
|
2022-05-12 10:44:20 +00:00
|
|
|
fragment_filled: bool,
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
// Difference between the first DTS and 0 in case of negative DTS
|
|
|
|
dts_offset: Option<gst::ClockTime>,
|
|
|
|
|
2022-06-01 17:04:58 +00:00
|
|
|
// Current position (DTS, or PTS for intra-only) to prevent
|
|
|
|
// timestamps from going backwards when queueing new buffers
|
|
|
|
current_position: gst::ClockTime,
|
|
|
|
|
2022-05-27 10:27:10 +00:00
|
|
|
// Current UTC time in ONVIF mode to prevent timestamps from
|
|
|
|
// going backwards when draining a fragment.
|
|
|
|
// UNIX epoch.
|
|
|
|
current_utc_time: gst::ClockTime,
|
2022-05-12 10:44:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
struct State {
|
|
|
|
streams: Vec<Stream>,
|
|
|
|
|
|
|
|
// Created once we received caps and kept up to date with the caps,
|
|
|
|
// sent as part of the buffer list for the first fragment.
|
|
|
|
stream_header: Option<gst::Buffer>,
|
|
|
|
|
|
|
|
sequence_number: u32,
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
// Fragment tracking for mfra
|
|
|
|
current_offset: u64,
|
|
|
|
fragment_offsets: Vec<super::FragmentOffset>,
|
|
|
|
|
|
|
|
// Start / end PTS of the whole stream
|
|
|
|
earliest_pts: Option<gst::ClockTime>,
|
|
|
|
end_pts: Option<gst::ClockTime>,
|
2022-04-28 15:39:55 +00:00
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
// Start PTS of the current fragment
|
|
|
|
fragment_start_pts: Option<gst::ClockTime>,
|
2022-10-01 17:52:18 +00:00
|
|
|
// Additional timeout delay in case GOPs are bigger than the fragment duration
|
|
|
|
timeout_delay: gst::ClockTime,
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-05-27 10:27:10 +00:00
|
|
|
// In ONVIF mode the UTC time corresponding to the beginning of the stream
|
|
|
|
// UNIX epoch.
|
|
|
|
start_utc_time: Option<gst::ClockTime>,
|
|
|
|
end_utc_time: Option<gst::ClockTime>,
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
sent_headers: bool,
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
#[derive(Default)]
|
2021-10-18 06:42:42 +00:00
|
|
|
pub(crate) struct FMP4Mux {
|
|
|
|
state: Mutex<State>,
|
|
|
|
settings: Mutex<Settings>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FMP4Mux {
|
2022-08-17 10:19:08 +00:00
|
|
|
fn find_earliest_stream<'a>(
|
|
|
|
&self,
|
|
|
|
state: &'a mut State,
|
|
|
|
timeout: bool,
|
|
|
|
) -> Result<Option<(usize, &'a mut Stream)>, gst::FlowError> {
|
|
|
|
let mut earliest_stream = None;
|
|
|
|
let mut all_have_data_or_eos = true;
|
|
|
|
|
|
|
|
for (idx, stream) in state.streams.iter_mut().enumerate() {
|
|
|
|
let buffer = match stream.sinkpad.peek_buffer() {
|
|
|
|
Some(buffer) => buffer,
|
|
|
|
None => {
|
|
|
|
if stream.sinkpad.is_eos() {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::trace!(CAT, obj: stream.sinkpad, "Stream is EOS");
|
2022-08-17 10:19:08 +00:00
|
|
|
} else {
|
|
|
|
all_have_data_or_eos = false;
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::trace!(CAT, obj: stream.sinkpad, "Stream has no buffer");
|
2022-08-17 10:19:08 +00:00
|
|
|
}
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if stream.fragment_filled {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::trace!(CAT, obj: stream.sinkpad, "Stream has current fragment filled");
|
2022-08-17 10:19:08 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
let segment = match stream
|
|
|
|
.sinkpad
|
|
|
|
.segment()
|
|
|
|
.clone()
|
|
|
|
.downcast::<gst::ClockTime>()
|
|
|
|
.ok()
|
|
|
|
{
|
|
|
|
Some(segment) => segment,
|
|
|
|
None => {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Got buffer before segment");
|
2022-08-17 10:19:08 +00:00
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// If the stream has no valid running time, assume it's before everything else.
|
|
|
|
let running_time = match segment.to_running_time(buffer.dts_or_pts()) {
|
|
|
|
None => {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::trace!(CAT, obj: stream.sinkpad, "Stream has no valid running time");
|
2022-08-22 12:20:10 +00:00
|
|
|
if earliest_stream
|
|
|
|
.as_ref()
|
|
|
|
.map_or(true, |(_, _, earliest_running_time)| {
|
|
|
|
*earliest_running_time > gst::ClockTime::ZERO
|
|
|
|
})
|
|
|
|
{
|
2022-08-17 10:19:08 +00:00
|
|
|
earliest_stream = Some((idx, stream, gst::ClockTime::ZERO));
|
|
|
|
}
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
Some(running_time) => running_time,
|
|
|
|
};
|
|
|
|
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::trace!(CAT, obj: stream.sinkpad, "Stream has running time {} queued", running_time);
|
2022-08-17 10:19:08 +00:00
|
|
|
|
|
|
|
if earliest_stream
|
|
|
|
.as_ref()
|
|
|
|
.map_or(true, |(_idx, _stream, earliest_running_time)| {
|
|
|
|
*earliest_running_time > running_time
|
|
|
|
})
|
|
|
|
{
|
|
|
|
earliest_stream = Some((idx, stream, running_time));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !timeout && !all_have_data_or_eos {
|
|
|
|
gst::trace!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-08-17 10:19:08 +00:00
|
|
|
"No timeout and not all streams have a buffer or are EOS"
|
|
|
|
);
|
|
|
|
Ok(None)
|
|
|
|
} else if let Some((idx, stream, earliest_running_time)) = earliest_stream {
|
|
|
|
gst::trace!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-08-17 10:19:08 +00:00
|
|
|
"Stream {} is earliest stream with running time {}",
|
|
|
|
stream.sinkpad.name(),
|
|
|
|
earliest_running_time
|
|
|
|
);
|
|
|
|
Ok(Some((idx, stream)))
|
|
|
|
} else {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::trace!(CAT, imp: self, "No streams have data queued currently");
|
2022-08-17 10:19:08 +00:00
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-06-30 13:29:09 +00:00
|
|
|
// Queue incoming buffers as individual GOPs.
|
|
|
|
fn queue_gops(
|
2021-10-18 06:42:42 +00:00
|
|
|
&self,
|
2022-06-30 13:29:09 +00:00
|
|
|
_idx: usize,
|
2022-05-12 10:44:20 +00:00
|
|
|
stream: &mut Stream,
|
2022-05-05 12:09:19 +00:00
|
|
|
segment: &gst::FormattedSegment<gst::ClockTime>,
|
2022-05-12 10:44:20 +00:00
|
|
|
mut buffer: gst::Buffer,
|
2021-10-18 06:42:42 +00:00
|
|
|
) -> Result<(), gst::FlowError> {
|
2022-07-18 21:32:45 +00:00
|
|
|
use gst::Signed::*;
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
assert!(!stream.fragment_filled);
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::trace!(CAT, obj: stream.sinkpad, "Handling buffer {:?}", buffer);
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
let intra_only = stream.intra_only;
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
if !intra_only && buffer.dts().is_none() {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Require DTS for video streams");
|
2021-10-18 06:42:42 +00:00
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
|
|
|
|
if intra_only && buffer.flags().contains(gst::BufferFlags::DELTA_UNIT) {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Intra-only stream with delta units");
|
2021-10-18 06:42:42 +00:00
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
let pts_position = buffer.pts().ok_or_else(|| {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Require timestamped buffers");
|
2021-10-18 06:42:42 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
|
|
|
let duration = buffer.duration();
|
2022-07-15 21:29:45 +00:00
|
|
|
let end_pts_position = duration.opt_add(pts_position).unwrap_or(pts_position);
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-07-18 21:32:45 +00:00
|
|
|
let mut pts = segment
|
|
|
|
.to_running_time_full(pts_position)
|
|
|
|
.ok_or_else(|| {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Couldn't convert PTS to running time");
|
2022-07-18 21:32:45 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?
|
|
|
|
.positive_or_else(|_| {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Negative PTSs are not supported");
|
2022-07-18 21:32:45 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
|
|
|
|
|
|
|
let mut end_pts = segment
|
|
|
|
.to_running_time_full(end_pts_position)
|
|
|
|
.ok_or_else(|| {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Couldn't convert end PTS to running time");
|
2022-07-18 21:32:45 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?
|
|
|
|
.positive_or_else(|_| {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Negative PTSs are not supported");
|
2022-07-18 21:32:45 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-06-01 17:04:58 +00:00
|
|
|
// Enforce monotonically increasing PTS for intra-only streams
|
|
|
|
if intra_only {
|
|
|
|
if pts < stream.current_position {
|
|
|
|
gst::warning!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-06-01 17:04:58 +00:00
|
|
|
"Decreasing PTS {} < {} for intra-only stream",
|
|
|
|
pts,
|
|
|
|
stream.current_position,
|
|
|
|
);
|
|
|
|
pts = stream.current_position;
|
|
|
|
} else {
|
|
|
|
stream.current_position = pts;
|
|
|
|
}
|
|
|
|
end_pts = std::cmp::max(end_pts, pts);
|
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
let (dts_position, dts, end_dts) = if intra_only {
|
|
|
|
(None, None, None)
|
2021-10-18 06:42:42 +00:00
|
|
|
} else {
|
2022-05-12 10:44:20 +00:00
|
|
|
// Negative DTS are handled via the dts_offset and by having negative composition time
|
|
|
|
// offsets in the `trun` box. The smallest DTS here is shifted to zero.
|
|
|
|
let dts_position = buffer.dts().expect("not DTS");
|
2022-07-15 21:29:45 +00:00
|
|
|
let end_dts_position = duration.opt_add(dts_position).unwrap_or(dts_position);
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-07-18 21:32:45 +00:00
|
|
|
let signed_dts = segment.to_running_time_full(dts_position).ok_or_else(|| {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Couldn't convert DTS to running time");
|
2022-07-18 21:32:45 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
|
|
|
let mut dts = match signed_dts {
|
|
|
|
Positive(dts) => {
|
|
|
|
if let Some(dts_offset) = stream.dts_offset {
|
|
|
|
dts + dts_offset
|
|
|
|
} else {
|
|
|
|
dts
|
|
|
|
}
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
2022-07-18 21:32:45 +00:00
|
|
|
Negative(dts) => {
|
2022-05-12 10:44:20 +00:00
|
|
|
if stream.dts_offset.is_none() {
|
|
|
|
stream.dts_offset = Some(dts);
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
let dts_offset = stream.dts_offset.unwrap();
|
2021-10-18 06:42:42 +00:00
|
|
|
if dts > dts_offset {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::warning!(CAT, obj: stream.sinkpad, "DTS before first DTS");
|
2021-10-18 06:42:42 +00:00
|
|
|
gst::ClockTime::ZERO
|
|
|
|
} else {
|
|
|
|
dts_offset - dts
|
|
|
|
}
|
|
|
|
}
|
2022-07-18 21:32:45 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
let signed_end_dts =
|
|
|
|
segment
|
|
|
|
.to_running_time_full(end_dts_position)
|
|
|
|
.ok_or_else(|| {
|
|
|
|
gst::error!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-07-18 21:32:45 +00:00
|
|
|
"Couldn't convert end DTS to running time"
|
|
|
|
);
|
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
|
|
|
let mut end_dts = match signed_end_dts {
|
|
|
|
Positive(dts) => {
|
2022-05-12 10:44:20 +00:00
|
|
|
if let Some(dts_offset) = stream.dts_offset {
|
2021-10-18 06:42:42 +00:00
|
|
|
dts + dts_offset
|
|
|
|
} else {
|
|
|
|
dts
|
|
|
|
}
|
|
|
|
}
|
2022-07-18 21:32:45 +00:00
|
|
|
Negative(dts) => {
|
2022-05-12 10:44:20 +00:00
|
|
|
if stream.dts_offset.is_none() {
|
|
|
|
stream.dts_offset = Some(dts);
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
let dts_offset = stream.dts_offset.unwrap();
|
2021-10-18 06:42:42 +00:00
|
|
|
if dts > dts_offset {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::warning!(CAT, obj: stream.sinkpad, "End DTS before first DTS");
|
2021-10-18 06:42:42 +00:00
|
|
|
gst::ClockTime::ZERO
|
|
|
|
} else {
|
|
|
|
dts_offset - dts
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2022-06-01 17:04:58 +00:00
|
|
|
|
|
|
|
// Enforce monotonically increasing DTS for intra-only streams
|
|
|
|
// NOTE: PTS stays the same so this will cause a bigger PTS/DTS difference
|
|
|
|
// FIXME: Is this correct?
|
|
|
|
if dts < stream.current_position {
|
|
|
|
gst::warning!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-06-01 17:04:58 +00:00
|
|
|
"Decreasing DTS {} < {}",
|
|
|
|
dts,
|
|
|
|
stream.current_position,
|
|
|
|
);
|
|
|
|
dts = stream.current_position;
|
|
|
|
} else {
|
|
|
|
stream.current_position = dts;
|
|
|
|
}
|
|
|
|
end_dts = std::cmp::max(end_dts, dts);
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
(Some(dts_position), Some(dts), Some(end_dts))
|
|
|
|
};
|
|
|
|
|
|
|
|
// If this is a multi-stream element then we need to update the PTS/DTS positions according
|
|
|
|
// to the output segment, specifically to re-timestamp them with the running time and
|
|
|
|
// adjust for the segment shift to compensate for negative DTS.
|
2022-10-23 20:03:22 +00:00
|
|
|
let aggregator = self.obj();
|
2022-10-09 13:06:59 +00:00
|
|
|
let class = aggregator.class();
|
2022-05-12 10:44:20 +00:00
|
|
|
let (pts_position, dts_position) = if class.as_ref().variant.is_single_stream() {
|
|
|
|
(pts_position, dts_position)
|
|
|
|
} else {
|
|
|
|
let pts_position = pts + SEGMENT_OFFSET;
|
|
|
|
let dts_position = dts.map(|dts| {
|
|
|
|
dts + SEGMENT_OFFSET - stream.dts_offset.unwrap_or(gst::ClockTime::ZERO)
|
|
|
|
});
|
|
|
|
|
|
|
|
let buffer = buffer.make_mut();
|
|
|
|
buffer.set_pts(pts_position);
|
|
|
|
buffer.set_dts(dts_position);
|
|
|
|
|
|
|
|
(pts_position, dts_position)
|
2021-10-18 06:42:42 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
if !buffer.flags().contains(gst::BufferFlags::DELTA_UNIT) {
|
2022-02-21 17:43:46 +00:00
|
|
|
gst::debug!(
|
2021-10-18 06:42:42 +00:00
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-05-12 10:44:20 +00:00
|
|
|
"Starting new GOP at PTS {} DTS {} (DTS offset {})",
|
2021-10-18 06:42:42 +00:00
|
|
|
pts,
|
2022-05-12 10:44:20 +00:00
|
|
|
dts.display(),
|
|
|
|
stream.dts_offset.display(),
|
2021-10-18 06:42:42 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
let gop = Gop {
|
|
|
|
start_pts: pts,
|
|
|
|
start_dts: dts,
|
2022-05-12 10:44:20 +00:00
|
|
|
start_dts_position: if intra_only { None } else { dts_position },
|
2021-10-18 06:42:42 +00:00
|
|
|
earliest_pts: pts,
|
2022-05-12 10:44:20 +00:00
|
|
|
earliest_pts_position: pts_position,
|
2021-10-18 06:42:42 +00:00
|
|
|
final_earliest_pts: intra_only,
|
|
|
|
end_pts,
|
|
|
|
end_dts,
|
2022-05-12 10:44:20 +00:00
|
|
|
final_end_pts: false,
|
2022-06-30 13:29:09 +00:00
|
|
|
buffers: vec![GopBuffer { buffer, pts, dts }],
|
2021-10-18 06:42:42 +00:00
|
|
|
};
|
2022-05-12 10:44:20 +00:00
|
|
|
stream.queued_gops.push_front(gop);
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
if let Some(prev_gop) = stream.queued_gops.get_mut(1) {
|
2022-02-21 17:43:46 +00:00
|
|
|
gst::debug!(
|
2021-10-18 06:42:42 +00:00
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2021-10-18 06:42:42 +00:00
|
|
|
"Updating previous GOP starting at PTS {} to end PTS {} DTS {}",
|
|
|
|
prev_gop.earliest_pts,
|
|
|
|
pts,
|
|
|
|
dts.display(),
|
|
|
|
);
|
2022-06-01 17:04:58 +00:00
|
|
|
|
|
|
|
prev_gop.end_pts = std::cmp::max(prev_gop.end_pts, pts);
|
|
|
|
prev_gop.end_dts = std::cmp::max(prev_gop.end_dts, dts);
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
if intra_only {
|
|
|
|
prev_gop.final_end_pts = true;
|
|
|
|
}
|
|
|
|
|
2021-10-18 06:42:42 +00:00
|
|
|
if !prev_gop.final_earliest_pts {
|
|
|
|
// Don't bother logging this for intra-only streams as it would be for every
|
|
|
|
// single buffer.
|
|
|
|
if !intra_only {
|
2022-02-21 17:43:46 +00:00
|
|
|
gst::debug!(
|
2021-10-18 06:42:42 +00:00
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2021-10-18 06:42:42 +00:00
|
|
|
"Previous GOP has final earliest PTS at {}",
|
|
|
|
prev_gop.earliest_pts
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
prev_gop.final_earliest_pts = true;
|
2022-05-12 10:44:20 +00:00
|
|
|
if let Some(prev_prev_gop) = stream.queued_gops.get_mut(2) {
|
|
|
|
prev_prev_gop.final_end_pts = true;
|
|
|
|
}
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
}
|
2022-05-12 10:44:20 +00:00
|
|
|
} else if let Some(gop) = stream.queued_gops.front_mut() {
|
2021-10-18 06:42:42 +00:00
|
|
|
assert!(!intra_only);
|
|
|
|
|
|
|
|
// We require DTS for non-intra-only streams
|
|
|
|
let dts = dts.unwrap();
|
|
|
|
let end_dts = end_dts.unwrap();
|
|
|
|
|
|
|
|
gop.end_pts = std::cmp::max(gop.end_pts, end_pts);
|
|
|
|
gop.end_dts = Some(std::cmp::max(gop.end_dts.expect("no end DTS"), end_dts));
|
2022-06-30 13:29:09 +00:00
|
|
|
gop.buffers.push(GopBuffer {
|
2021-10-18 06:42:42 +00:00
|
|
|
buffer,
|
|
|
|
pts,
|
|
|
|
dts: Some(dts),
|
|
|
|
});
|
|
|
|
|
|
|
|
if gop.earliest_pts > pts && !gop.final_earliest_pts {
|
2022-02-21 17:43:46 +00:00
|
|
|
gst::debug!(
|
2021-10-18 06:42:42 +00:00
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2021-10-18 06:42:42 +00:00
|
|
|
"Updating current GOP earliest PTS from {} to {}",
|
|
|
|
gop.earliest_pts,
|
|
|
|
pts
|
|
|
|
);
|
|
|
|
gop.earliest_pts = pts;
|
|
|
|
gop.earliest_pts_position = pts_position;
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
if let Some(prev_gop) = stream.queued_gops.get_mut(1) {
|
2022-06-01 17:04:58 +00:00
|
|
|
if prev_gop.end_pts < pts {
|
|
|
|
gst::debug!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-06-01 17:04:58 +00:00
|
|
|
"Updating previous GOP starting PTS {} end time from {} to {}",
|
|
|
|
pts,
|
|
|
|
prev_gop.end_pts,
|
|
|
|
pts
|
|
|
|
);
|
|
|
|
prev_gop.end_pts = pts;
|
|
|
|
}
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
let gop = stream.queued_gops.front_mut().unwrap();
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
// The earliest PTS is known when the current DTS is bigger or equal to the first
|
|
|
|
// PTS that was observed in this GOP. If there was another frame later that had a
|
|
|
|
// lower PTS then it wouldn't be possible to display it in time anymore, i.e. the
|
|
|
|
// stream would be invalid.
|
|
|
|
if gop.start_pts <= dts && !gop.final_earliest_pts {
|
2022-02-21 17:43:46 +00:00
|
|
|
gst::debug!(
|
2021-10-18 06:42:42 +00:00
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2021-10-18 06:42:42 +00:00
|
|
|
"GOP has final earliest PTS at {}",
|
|
|
|
gop.earliest_pts
|
|
|
|
);
|
|
|
|
gop.final_earliest_pts = true;
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
if let Some(prev_gop) = stream.queued_gops.get_mut(1) {
|
|
|
|
prev_gop.final_end_pts = true;
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2022-02-21 17:43:46 +00:00
|
|
|
gst::warning!(
|
2021-10-18 06:42:42 +00:00
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2021-10-18 06:42:42 +00:00
|
|
|
"Waiting for keyframe at the beginning of the stream"
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
if let Some((prev_gop, first_gop)) = Option::zip(
|
|
|
|
stream.queued_gops.iter().find(|gop| gop.final_end_pts),
|
|
|
|
stream.queued_gops.back(),
|
|
|
|
) {
|
|
|
|
gst::debug!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-05-12 10:44:20 +00:00
|
|
|
"Queued full GOPs duration updated to {}",
|
2022-05-19 10:26:34 +00:00
|
|
|
prev_gop.end_pts.saturating_sub(first_gop.earliest_pts),
|
2022-05-12 10:44:20 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
gst::debug!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-05-12 10:44:20 +00:00
|
|
|
"Queued duration updated to {}",
|
|
|
|
Option::zip(stream.queued_gops.front(), stream.queued_gops.back())
|
2022-05-19 10:26:34 +00:00
|
|
|
.map(|(end, start)| end.end_pts.saturating_sub(start.start_pts))
|
2022-05-12 10:44:20 +00:00
|
|
|
.unwrap_or(gst::ClockTime::ZERO)
|
|
|
|
);
|
|
|
|
|
2021-10-18 06:42:42 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
#[allow(clippy::type_complexity)]
|
|
|
|
fn drain_buffers(
|
2021-10-18 06:42:42 +00:00
|
|
|
&self,
|
|
|
|
state: &mut State,
|
|
|
|
settings: &Settings,
|
2022-05-17 12:05:19 +00:00
|
|
|
timeout: bool,
|
2021-10-18 06:42:42 +00:00
|
|
|
at_eos: bool,
|
2022-09-28 16:35:47 +00:00
|
|
|
) -> Result<
|
|
|
|
(
|
2022-10-01 17:52:18 +00:00
|
|
|
// Drained streams
|
2022-09-28 16:35:47 +00:00
|
|
|
Vec<(
|
|
|
|
gst::Caps,
|
|
|
|
Option<super::FragmentTimingInfo>,
|
|
|
|
VecDeque<Buffer>,
|
|
|
|
)>,
|
2022-10-01 17:52:18 +00:00
|
|
|
// Minimum earliest PTS position of all streams
|
2022-09-28 16:35:47 +00:00
|
|
|
Option<gst::ClockTime>,
|
2022-10-01 17:52:18 +00:00
|
|
|
// Minimum earliest PTS of all streams
|
2022-09-28 16:35:47 +00:00
|
|
|
Option<gst::ClockTime>,
|
2022-10-01 17:52:18 +00:00
|
|
|
// Minimum start DTS position of all streams (if any stream has DTS)
|
2022-09-28 16:35:47 +00:00
|
|
|
Option<gst::ClockTime>,
|
2022-10-01 17:52:18 +00:00
|
|
|
// End PTS of this drained fragment, i.e. start PTS of the next fragment
|
2022-09-28 16:35:47 +00:00
|
|
|
Option<gst::ClockTime>,
|
|
|
|
),
|
|
|
|
gst::FlowError,
|
|
|
|
> {
|
|
|
|
let mut drained_streams = Vec::with_capacity(state.streams.len());
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
let mut min_earliest_pts_position = None;
|
|
|
|
let mut min_earliest_pts = None;
|
|
|
|
let mut min_start_dts_position = None;
|
2022-10-01 17:52:18 +00:00
|
|
|
let mut fragment_end_pts = None;
|
|
|
|
|
|
|
|
// The first stream decides how much can be dequeued, if anything at all.
|
|
|
|
//
|
|
|
|
// All complete GOPs (or at EOS everything) up to the fragment duration will be dequeued
|
|
|
|
// but on timeout in live pipelines it might happen that the first stream does not have a
|
|
|
|
// complete GOP queued. In that case nothing is dequeued for any of the streams and the
|
|
|
|
// timeout is advanced by 1s until at least one complete GOP can be dequeued.
|
|
|
|
//
|
|
|
|
// If the first stream is already EOS then the next stream that is not EOS yet will be
|
|
|
|
// taken in its place.
|
|
|
|
let fragment_start_pts = state.fragment_start_pts.unwrap();
|
|
|
|
gst::info!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-10-01 17:52:18 +00:00
|
|
|
"Starting to drain at {}",
|
|
|
|
fragment_start_pts
|
|
|
|
);
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-06-30 13:29:09 +00:00
|
|
|
for (idx, stream) in state.streams.iter_mut().enumerate() {
|
2022-05-12 10:44:20 +00:00
|
|
|
assert!(
|
2022-05-17 12:05:19 +00:00
|
|
|
timeout
|
|
|
|
|| at_eos
|
2022-05-12 10:44:20 +00:00
|
|
|
|| stream.sinkpad.is_eos()
|
|
|
|
|| stream.queued_gops.get(1).map(|gop| gop.final_earliest_pts) == Some(true)
|
|
|
|
);
|
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
// Drain all complete GOPs until at most one fragment duration was dequeued for the
|
|
|
|
// first stream, or until the dequeued duration of the first stream.
|
|
|
|
let mut gops = Vec::with_capacity(stream.queued_gops.len());
|
|
|
|
let dequeue_end_pts =
|
|
|
|
fragment_end_pts.unwrap_or(fragment_start_pts + settings.fragment_duration);
|
|
|
|
gst::trace!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-10-01 17:52:18 +00:00
|
|
|
"Draining up to end PTS {} / duration {}",
|
|
|
|
dequeue_end_pts,
|
|
|
|
dequeue_end_pts - fragment_start_pts
|
|
|
|
);
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
while let Some(gop) = stream.queued_gops.back() {
|
|
|
|
// If this GOP is not complete then we can't pop it yet.
|
|
|
|
//
|
|
|
|
// If there was no complete GOP at all yet then it might be bigger than the
|
|
|
|
// fragment duration. In this case we might not be able to handle the latency
|
|
|
|
// requirements in a live pipeline.
|
|
|
|
if !gop.final_end_pts && !at_eos && !stream.sinkpad.is_eos() {
|
|
|
|
break;
|
|
|
|
}
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
// If this GOP starts after the fragment end then don't dequeue it yet unless this is
|
|
|
|
// the first stream and no GOPs were dequeued at all yet. This would mean that the
|
|
|
|
// GOP is bigger than the fragment duration.
|
|
|
|
if gop.end_pts > dequeue_end_pts && (fragment_end_pts.is_some() || !gops.is_empty())
|
|
|
|
{
|
|
|
|
break;
|
2022-05-12 10:44:20 +00:00
|
|
|
}
|
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
gops.push(stream.queued_gops.pop_back().unwrap());
|
|
|
|
}
|
2022-05-12 10:44:20 +00:00
|
|
|
stream.fragment_filled = false;
|
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
// If we don't have a next fragment start PTS then this is the first stream as above.
|
|
|
|
if fragment_end_pts.is_none() {
|
|
|
|
if let Some(last_gop) = gops.last() {
|
|
|
|
// Dequeued something so let's take the end PTS of the last GOP
|
|
|
|
fragment_end_pts = Some(last_gop.end_pts);
|
|
|
|
gst::info!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-10-01 17:52:18 +00:00
|
|
|
"Draining up to PTS {} for this fragment",
|
|
|
|
last_gop.end_pts,
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
// If nothing was dequeued for the first stream then this is OK if we're at
|
|
|
|
// EOS: we just consider the next stream as first stream then.
|
|
|
|
if at_eos || stream.sinkpad.is_eos() {
|
|
|
|
// This is handled below generally if nothing was dequeued
|
|
|
|
} else {
|
|
|
|
// Otherwise this can only really happen on timeout in live pipelines.
|
|
|
|
assert!(timeout);
|
|
|
|
|
|
|
|
gst::warning!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-10-01 17:52:18 +00:00
|
|
|
"Don't have a complete GOP for the first stream on timeout in a live pipeline",
|
|
|
|
);
|
|
|
|
|
|
|
|
// In this case we advance the timeout by 1s and hope that things are
|
|
|
|
// better then.
|
|
|
|
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
if gops.is_empty() {
|
2022-05-31 14:25:13 +00:00
|
|
|
gst::info!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-05-31 14:25:13 +00:00
|
|
|
"Draining no buffers",
|
|
|
|
);
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
drained_streams.push((stream.caps.clone(), None, VecDeque::new()));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
assert!(fragment_end_pts.is_some());
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
let first_gop = gops.first().unwrap();
|
|
|
|
let last_gop = gops.last().unwrap();
|
|
|
|
let earliest_pts = first_gop.earliest_pts;
|
|
|
|
let earliest_pts_position = first_gop.earliest_pts_position;
|
|
|
|
let start_dts = first_gop.start_dts;
|
|
|
|
let start_dts_position = first_gop.start_dts_position;
|
|
|
|
let end_pts = last_gop.end_pts;
|
|
|
|
let dts_offset = stream.dts_offset;
|
|
|
|
|
|
|
|
if min_earliest_pts.opt_gt(earliest_pts).unwrap_or(true) {
|
|
|
|
min_earliest_pts = Some(earliest_pts);
|
|
|
|
}
|
|
|
|
if min_earliest_pts_position
|
|
|
|
.opt_gt(earliest_pts_position)
|
|
|
|
.unwrap_or(true)
|
|
|
|
{
|
|
|
|
min_earliest_pts_position = Some(earliest_pts_position);
|
|
|
|
}
|
|
|
|
if let Some(start_dts_position) = start_dts_position {
|
|
|
|
if min_start_dts_position
|
|
|
|
.opt_gt(start_dts_position)
|
2022-07-15 21:29:45 +00:00
|
|
|
.unwrap_or(true)
|
|
|
|
{
|
2022-09-28 16:35:47 +00:00
|
|
|
min_start_dts_position = Some(start_dts_position);
|
2022-05-12 10:44:20 +00:00
|
|
|
}
|
2022-09-28 16:35:47 +00:00
|
|
|
}
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
gst::info!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-09-28 16:35:47 +00:00
|
|
|
"Draining {} worth of buffers starting at PTS {} DTS {}, DTS offset {}",
|
|
|
|
end_pts.saturating_sub(earliest_pts),
|
|
|
|
earliest_pts,
|
|
|
|
start_dts.display(),
|
|
|
|
dts_offset.display(),
|
|
|
|
);
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
if let Some((prev_gop, first_gop)) = Option::zip(
|
|
|
|
stream.queued_gops.iter().find(|gop| gop.final_end_pts),
|
|
|
|
stream.queued_gops.back(),
|
|
|
|
) {
|
2022-05-12 10:44:20 +00:00
|
|
|
gst::debug!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-09-28 16:35:47 +00:00
|
|
|
"Queued full GOPs duration updated to {}",
|
|
|
|
prev_gop.end_pts.saturating_sub(first_gop.earliest_pts),
|
2022-05-12 10:44:20 +00:00
|
|
|
);
|
2022-09-28 16:35:47 +00:00
|
|
|
}
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
gst::debug!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: stream.sinkpad,
|
2022-09-28 16:35:47 +00:00
|
|
|
"Queued duration updated to {}",
|
|
|
|
Option::zip(stream.queued_gops.front(), stream.queued_gops.back())
|
|
|
|
.map(|(end, start)| end.end_pts.saturating_sub(start.start_pts))
|
|
|
|
.unwrap_or(gst::ClockTime::ZERO)
|
|
|
|
);
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
let start_time = if stream.intra_only {
|
|
|
|
earliest_pts
|
|
|
|
} else {
|
|
|
|
start_dts.unwrap()
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut buffers = VecDeque::with_capacity(gops.iter().map(|g| g.buffers.len()).sum());
|
2022-06-30 13:29:09 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
for gop in gops {
|
|
|
|
let mut gop_buffers = gop.buffers.into_iter().peekable();
|
|
|
|
while let Some(buffer) = gop_buffers.next() {
|
|
|
|
let timestamp = if stream.intra_only {
|
|
|
|
buffer.pts
|
|
|
|
} else {
|
|
|
|
buffer.dts.unwrap()
|
|
|
|
};
|
|
|
|
|
|
|
|
let end_timestamp = match gop_buffers.peek() {
|
|
|
|
Some(buffer) => {
|
|
|
|
if stream.intra_only {
|
|
|
|
buffer.pts
|
|
|
|
} else {
|
|
|
|
buffer.dts.unwrap()
|
2022-06-30 13:29:09 +00:00
|
|
|
}
|
2022-09-28 16:35:47 +00:00
|
|
|
}
|
|
|
|
None => {
|
|
|
|
if stream.intra_only {
|
|
|
|
gop.end_pts
|
|
|
|
} else {
|
|
|
|
gop.end_dts.unwrap()
|
2022-06-30 13:29:09 +00:00
|
|
|
}
|
2022-09-28 16:35:47 +00:00
|
|
|
}
|
|
|
|
};
|
2022-06-30 13:29:09 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
// Timestamps are enforced to monotonically increase when queueing buffers
|
|
|
|
let duration = end_timestamp
|
|
|
|
.checked_sub(timestamp)
|
|
|
|
.expect("Timestamps going backwards");
|
2022-06-30 13:29:09 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
let composition_time_offset = if stream.intra_only {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
let pts = buffer.pts;
|
|
|
|
let dts = buffer.dts.unwrap();
|
2022-06-30 13:29:09 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
if pts > dts {
|
2022-10-23 18:46:18 +00:00
|
|
|
Some(i64::try_from((pts - dts).nseconds()).map_err(|_| {
|
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Too big PTS/DTS difference");
|
|
|
|
gst::FlowError::Error
|
|
|
|
})?)
|
2022-09-28 16:35:47 +00:00
|
|
|
} else {
|
2022-10-23 18:46:18 +00:00
|
|
|
let diff = i64::try_from((dts - pts).nseconds()).map_err(|_| {
|
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Too big PTS/DTS difference");
|
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
2022-09-28 16:35:47 +00:00
|
|
|
Some(-diff)
|
|
|
|
}
|
|
|
|
};
|
2022-06-30 13:29:09 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
buffers.push_back(Buffer {
|
|
|
|
idx,
|
|
|
|
buffer: buffer.buffer,
|
|
|
|
timestamp,
|
|
|
|
duration,
|
|
|
|
composition_time_offset,
|
|
|
|
});
|
2022-05-12 10:44:20 +00:00
|
|
|
}
|
|
|
|
}
|
2022-09-28 16:35:47 +00:00
|
|
|
|
|
|
|
drained_streams.push((
|
|
|
|
stream.caps.clone(),
|
|
|
|
Some(super::FragmentTimingInfo {
|
|
|
|
start_time,
|
|
|
|
intra_only: stream.intra_only,
|
|
|
|
}),
|
|
|
|
buffers,
|
|
|
|
));
|
2022-05-12 10:44:20 +00:00
|
|
|
}
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
Ok((
|
|
|
|
drained_streams,
|
|
|
|
min_earliest_pts_position,
|
|
|
|
min_earliest_pts,
|
|
|
|
min_start_dts_position,
|
2022-10-01 17:52:18 +00:00
|
|
|
fragment_end_pts,
|
2022-09-28 16:35:47 +00:00
|
|
|
))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn preprocess_drained_streams_onvif(
|
|
|
|
&self,
|
|
|
|
state: &mut State,
|
|
|
|
drained_streams: &mut [(
|
|
|
|
gst::Caps,
|
|
|
|
Option<super::FragmentTimingInfo>,
|
|
|
|
VecDeque<Buffer>,
|
|
|
|
)],
|
|
|
|
) -> Result<Option<gst::ClockTime>, gst::FlowError> {
|
2022-10-23 20:03:22 +00:00
|
|
|
let aggregator = self.obj();
|
2022-10-09 13:06:59 +00:00
|
|
|
if aggregator.class().as_ref().variant != super::Variant::ONVIF {
|
2022-09-28 16:35:47 +00:00
|
|
|
return Ok(None);
|
2022-09-15 15:11:10 +00:00
|
|
|
}
|
|
|
|
|
2022-05-27 10:27:10 +00:00
|
|
|
let mut max_end_utc_time = None;
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
let calculate_pts = |buffer: &Buffer| -> gst::ClockTime {
|
|
|
|
let composition_time_offset = buffer.composition_time_offset.unwrap_or(0);
|
|
|
|
if composition_time_offset > 0 {
|
2022-10-17 17:48:43 +00:00
|
|
|
buffer.timestamp + (composition_time_offset as u64).nseconds()
|
2022-09-28 16:35:47 +00:00
|
|
|
} else {
|
|
|
|
buffer
|
|
|
|
.timestamp
|
2022-10-17 17:48:43 +00:00
|
|
|
.checked_sub(((-composition_time_offset) as u64).nseconds())
|
2022-09-28 16:35:47 +00:00
|
|
|
.unwrap()
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// If this is the first fragment then allow the first buffers to not have a reference
|
|
|
|
// timestamp meta and backdate them
|
|
|
|
if state.stream_header.is_none() {
|
|
|
|
for (idx, (_, _, drain_buffers)) in drained_streams.iter_mut().enumerate() {
|
|
|
|
let (buffer_idx, utc_time, buffer) =
|
|
|
|
match drain_buffers.iter().enumerate().find_map(|(idx, buffer)| {
|
|
|
|
get_utc_time_from_buffer(&buffer.buffer)
|
|
|
|
.map(|timestamp| (idx, timestamp, buffer))
|
|
|
|
}) {
|
2022-05-27 10:27:10 +00:00
|
|
|
None => {
|
|
|
|
gst::error!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: state.streams[idx].sinkpad,
|
2022-05-27 10:27:10 +00:00
|
|
|
"No reference timestamp set on any buffers in the first fragment",
|
|
|
|
);
|
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
Some(res) => res,
|
|
|
|
};
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
// Now do the backdating
|
|
|
|
if buffer_idx > 0 {
|
|
|
|
let utc_time_pts = calculate_pts(buffer);
|
2022-05-27 10:27:10 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
for buffer in drain_buffers.iter_mut().take(buffer_idx) {
|
|
|
|
let buffer_pts = calculate_pts(buffer);
|
|
|
|
let buffer_pts_diff = if utc_time_pts >= buffer_pts {
|
|
|
|
(utc_time_pts - buffer_pts).nseconds() as i64
|
|
|
|
} else {
|
|
|
|
-((buffer_pts - utc_time_pts).nseconds() as i64)
|
|
|
|
};
|
|
|
|
let buffer_utc_time = if buffer_pts_diff >= 0 {
|
|
|
|
utc_time
|
2022-10-17 17:48:43 +00:00
|
|
|
.checked_sub((buffer_pts_diff as u64).nseconds())
|
2022-09-28 16:35:47 +00:00
|
|
|
.unwrap()
|
|
|
|
} else {
|
|
|
|
utc_time
|
2022-10-17 17:48:43 +00:00
|
|
|
.checked_add(((-buffer_pts_diff) as u64).nseconds())
|
2022-09-28 16:35:47 +00:00
|
|
|
.unwrap()
|
2022-05-27 10:27:10 +00:00
|
|
|
};
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
let buffer = buffer.buffer.make_mut();
|
|
|
|
gst::ReferenceTimestampMeta::add(
|
|
|
|
buffer,
|
|
|
|
&UNIX_CAPS,
|
|
|
|
buffer_utc_time,
|
|
|
|
gst::ClockTime::NONE,
|
|
|
|
);
|
2022-05-27 10:27:10 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-09-28 16:35:47 +00:00
|
|
|
}
|
2022-05-27 10:27:10 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
// Calculate the minimum across all streams and remember that
|
|
|
|
if state.start_utc_time.is_none() {
|
|
|
|
let mut start_utc_time = None;
|
2022-05-27 10:27:10 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
for (idx, (_, _, drain_buffers)) in drained_streams.iter().enumerate() {
|
|
|
|
for buffer in drain_buffers {
|
2022-05-27 10:27:10 +00:00
|
|
|
let utc_time = match get_utc_time_from_buffer(&buffer.buffer) {
|
|
|
|
None => {
|
|
|
|
gst::error!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: state.streams[idx].sinkpad,
|
2022-05-27 10:27:10 +00:00
|
|
|
"No reference timestamp set on all buffers"
|
|
|
|
);
|
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
Some(utc_time) => utc_time,
|
|
|
|
};
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
if start_utc_time.is_none() || start_utc_time > Some(utc_time) {
|
|
|
|
start_utc_time = Some(utc_time);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
gst::debug!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-09-28 16:35:47 +00:00
|
|
|
"Configuring start UTC time {}",
|
|
|
|
start_utc_time.unwrap()
|
|
|
|
);
|
|
|
|
state.start_utc_time = start_utc_time;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update all buffer timestamps based on the UTC time and offset to the start UTC time
|
|
|
|
let start_utc_time = state.start_utc_time.unwrap();
|
|
|
|
for (idx, (_, timing_info, drain_buffers)) in drained_streams.iter_mut().enumerate() {
|
|
|
|
let mut start_time = None;
|
2022-05-27 10:27:10 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
for buffer in drain_buffers.iter_mut() {
|
|
|
|
let utc_time = match get_utc_time_from_buffer(&buffer.buffer) {
|
|
|
|
None => {
|
|
|
|
gst::error!(
|
2022-05-27 10:27:10 +00:00
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: state.streams[idx].sinkpad,
|
2022-09-28 16:35:47 +00:00
|
|
|
"No reference timestamp set on all buffers"
|
2022-05-27 10:27:10 +00:00
|
|
|
);
|
2022-09-28 16:35:47 +00:00
|
|
|
return Err(gst::FlowError::Error);
|
2022-05-27 10:27:10 +00:00
|
|
|
}
|
2022-09-28 16:35:47 +00:00
|
|
|
Some(utc_time) => utc_time,
|
|
|
|
};
|
2022-05-27 10:27:10 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
// Convert PTS UTC time to DTS
|
|
|
|
let mut utc_time_dts =
|
|
|
|
if let Some(composition_time_offset) = buffer.composition_time_offset {
|
|
|
|
if composition_time_offset >= 0 {
|
|
|
|
utc_time
|
2022-10-17 17:48:43 +00:00
|
|
|
.checked_sub((composition_time_offset as u64).nseconds())
|
2022-09-28 16:35:47 +00:00
|
|
|
.unwrap()
|
|
|
|
} else {
|
|
|
|
utc_time
|
2022-10-17 17:48:43 +00:00
|
|
|
.checked_add(((-composition_time_offset) as u64).nseconds())
|
2022-09-28 16:35:47 +00:00
|
|
|
.unwrap()
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
utc_time
|
|
|
|
};
|
2022-05-27 10:27:10 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
// Enforce monotonically increasing timestamps
|
|
|
|
if utc_time_dts < state.streams[idx].current_utc_time {
|
|
|
|
gst::warning!(
|
2022-05-27 10:27:10 +00:00
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: state.streams[idx].sinkpad,
|
2022-09-28 16:35:47 +00:00
|
|
|
"Decreasing UTC DTS timestamp for buffer {} < {}",
|
2022-05-27 10:27:10 +00:00
|
|
|
utc_time_dts,
|
2022-09-28 16:35:47 +00:00
|
|
|
state.streams[idx].current_utc_time,
|
2022-05-27 10:27:10 +00:00
|
|
|
);
|
2022-09-28 16:35:47 +00:00
|
|
|
utc_time_dts = state.streams[idx].current_utc_time;
|
|
|
|
} else {
|
|
|
|
state.streams[idx].current_utc_time = utc_time_dts;
|
2022-05-27 10:27:10 +00:00
|
|
|
}
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
let timestamp = utc_time_dts.checked_sub(start_utc_time).unwrap();
|
2022-05-27 10:27:10 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
gst::trace!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: state.streams[idx].sinkpad,
|
2022-09-28 16:35:47 +00:00
|
|
|
"Updating buffer timestamp from {} to relative UTC DTS time {} / absolute DTS time {}, UTC PTS time {}",
|
|
|
|
buffer.timestamp,
|
|
|
|
timestamp,
|
|
|
|
utc_time_dts,
|
|
|
|
utc_time,
|
|
|
|
);
|
2022-05-27 10:27:10 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
buffer.timestamp = timestamp;
|
|
|
|
if start_time.is_none() || start_time > Some(buffer.timestamp) {
|
|
|
|
start_time = Some(buffer.timestamp);
|
|
|
|
}
|
|
|
|
}
|
2022-05-27 10:27:10 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
// Update durations for all buffers except for the last in the fragment unless all
|
|
|
|
// have the same duration anyway
|
|
|
|
let mut common_duration = Ok(None);
|
|
|
|
let mut drain_buffers_iter = drain_buffers.iter_mut().peekable();
|
|
|
|
while let Some(buffer) = drain_buffers_iter.next() {
|
|
|
|
let next_timestamp = drain_buffers_iter.peek().map(|b| b.timestamp);
|
|
|
|
|
|
|
|
if let Some(next_timestamp) = next_timestamp {
|
|
|
|
let duration = next_timestamp.saturating_sub(buffer.timestamp);
|
|
|
|
if common_duration == Ok(None) {
|
|
|
|
common_duration = Ok(Some(duration));
|
|
|
|
} else if common_duration != Ok(Some(duration)) {
|
|
|
|
common_duration = Err(());
|
2022-05-27 10:27:10 +00:00
|
|
|
}
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
gst::trace!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: state.streams[idx].sinkpad,
|
2022-09-28 16:35:47 +00:00
|
|
|
"Updating buffer with timestamp {} duration from {} to relative UTC duration {}",
|
|
|
|
buffer.timestamp,
|
|
|
|
buffer.duration,
|
|
|
|
duration,
|
|
|
|
);
|
2022-05-27 10:27:10 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
buffer.duration = duration;
|
|
|
|
} else if let Ok(Some(common_duration)) = common_duration {
|
|
|
|
gst::trace!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: state.streams[idx].sinkpad,
|
2022-09-28 16:35:47 +00:00
|
|
|
"Updating last buffer with timestamp {} duration from {} to common relative UTC duration {}",
|
|
|
|
buffer.timestamp,
|
|
|
|
buffer.duration,
|
|
|
|
common_duration,
|
|
|
|
);
|
|
|
|
|
|
|
|
buffer.duration = common_duration;
|
2022-05-27 10:27:10 +00:00
|
|
|
} else {
|
2022-09-28 16:35:47 +00:00
|
|
|
gst::trace!(
|
|
|
|
CAT,
|
2022-10-23 18:46:18 +00:00
|
|
|
obj: state.streams[idx].sinkpad,
|
2022-09-28 16:35:47 +00:00
|
|
|
"Keeping last buffer with timestamp {} duration at {}",
|
|
|
|
buffer.timestamp,
|
|
|
|
buffer.duration,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
let end_utc_time = start_utc_time + buffer.timestamp + buffer.duration;
|
|
|
|
if max_end_utc_time.is_none() || max_end_utc_time < Some(end_utc_time) {
|
|
|
|
max_end_utc_time = Some(end_utc_time);
|
2022-05-27 10:27:10 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
if let Some(start_time) = start_time {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::debug!(CAT, obj: state.streams[idx].sinkpad, "Fragment starting at UTC time {}", start_time);
|
2022-09-28 16:35:47 +00:00
|
|
|
timing_info.as_mut().unwrap().start_time = start_time;
|
|
|
|
} else {
|
|
|
|
assert!(timing_info.is_none());
|
|
|
|
}
|
2022-05-27 08:56:12 +00:00
|
|
|
}
|
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
Ok(max_end_utc_time)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[allow(clippy::type_complexity)]
|
|
|
|
fn interleave_buffers(
|
|
|
|
&self,
|
|
|
|
settings: &Settings,
|
|
|
|
mut drained_streams: Vec<(
|
|
|
|
gst::Caps,
|
|
|
|
Option<super::FragmentTimingInfo>,
|
|
|
|
VecDeque<Buffer>,
|
|
|
|
)>,
|
|
|
|
) -> Result<
|
|
|
|
(
|
|
|
|
Vec<Buffer>,
|
|
|
|
Vec<(gst::Caps, Option<super::FragmentTimingInfo>)>,
|
|
|
|
),
|
|
|
|
gst::FlowError,
|
|
|
|
> {
|
2022-05-12 10:44:20 +00:00
|
|
|
let mut interleaved_buffers =
|
2022-09-28 16:35:47 +00:00
|
|
|
Vec::with_capacity(drained_streams.iter().map(|(_, _, bufs)| bufs.len()).sum());
|
|
|
|
while let Some((_idx, (_, _, bufs))) = drained_streams.iter_mut().enumerate().min_by(
|
|
|
|
|(a_idx, (_, _, a)), (b_idx, (_, _, b))| {
|
|
|
|
let (a, b) = match (a.front(), b.front()) {
|
|
|
|
(None, None) => return std::cmp::Ordering::Equal,
|
|
|
|
(None, _) => return std::cmp::Ordering::Greater,
|
|
|
|
(_, None) => return std::cmp::Ordering::Less,
|
|
|
|
(Some(a), Some(b)) => (a, b),
|
|
|
|
};
|
2022-05-17 12:25:28 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
match a.timestamp.cmp(&b.timestamp) {
|
|
|
|
std::cmp::Ordering::Equal => a_idx.cmp(b_idx),
|
|
|
|
cmp => cmp,
|
|
|
|
}
|
|
|
|
},
|
|
|
|
) {
|
|
|
|
let start_time = match bufs.front() {
|
2022-05-17 12:25:28 +00:00
|
|
|
None => {
|
|
|
|
// No more buffers now
|
|
|
|
break;
|
|
|
|
}
|
2022-06-30 13:29:09 +00:00
|
|
|
Some(buf) => buf.timestamp,
|
2022-05-17 12:25:28 +00:00
|
|
|
};
|
|
|
|
let mut current_end_time = start_time;
|
|
|
|
let mut dequeued_bytes = 0;
|
|
|
|
|
|
|
|
while settings
|
|
|
|
.interleave_bytes
|
2022-07-15 21:29:45 +00:00
|
|
|
.opt_ge(dequeued_bytes)
|
|
|
|
.unwrap_or(true)
|
|
|
|
&& settings
|
|
|
|
.interleave_time
|
|
|
|
.opt_ge(current_end_time.saturating_sub(start_time))
|
|
|
|
.unwrap_or(true)
|
2022-05-17 12:25:28 +00:00
|
|
|
{
|
2022-09-28 16:35:47 +00:00
|
|
|
if let Some(buffer) = bufs.pop_front() {
|
2022-06-30 13:29:09 +00:00
|
|
|
current_end_time = buffer.timestamp + buffer.duration;
|
2022-05-17 12:25:28 +00:00
|
|
|
dequeued_bytes += buffer.buffer.size() as u64;
|
|
|
|
interleaved_buffers.push(buffer);
|
|
|
|
} else {
|
|
|
|
// No buffers left in this stream, go to next stream
|
|
|
|
break;
|
2022-05-12 10:44:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
// All buffers should be consumed now
|
|
|
|
assert!(drained_streams.iter().all(|(_, _, bufs)| bufs.is_empty()));
|
2022-05-17 12:25:28 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
let streams = drained_streams
|
|
|
|
.into_iter()
|
|
|
|
.map(|(caps, timing_info, _)| (caps, timing_info))
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
Ok((interleaved_buffers, streams))
|
|
|
|
}
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
fn drain(
|
|
|
|
&self,
|
|
|
|
state: &mut State,
|
|
|
|
settings: &Settings,
|
|
|
|
timeout: bool,
|
|
|
|
at_eos: bool,
|
2022-10-01 17:52:18 +00:00
|
|
|
upstream_events: &mut Vec<(gst_base::AggregatorPad, gst::Event)>,
|
2022-09-28 16:35:47 +00:00
|
|
|
) -> Result<(Option<gst::Caps>, Option<gst::BufferList>), gst::FlowError> {
|
|
|
|
if at_eos {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::info!(CAT, imp: self, "Draining at EOS");
|
2022-09-28 16:35:47 +00:00
|
|
|
} else if timeout {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::info!(CAT, imp: self, "Draining at timeout");
|
2022-09-28 16:35:47 +00:00
|
|
|
} else {
|
|
|
|
for stream in &state.streams {
|
|
|
|
if !stream.fragment_filled && !stream.sinkpad.is_eos() {
|
|
|
|
return Ok((None, None));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Collect all buffers and their timing information that are to be drained right now.
|
|
|
|
let (
|
|
|
|
mut drained_streams,
|
|
|
|
min_earliest_pts_position,
|
|
|
|
min_earliest_pts,
|
|
|
|
min_start_dts_position,
|
2022-10-01 17:52:18 +00:00
|
|
|
fragment_end_pts,
|
2022-10-09 13:06:59 +00:00
|
|
|
) = self.drain_buffers(state, settings, timeout, at_eos)?;
|
2022-09-28 16:35:47 +00:00
|
|
|
|
|
|
|
// Remove all GAP buffers before processing them further
|
|
|
|
for (_, _, buffers) in &mut drained_streams {
|
|
|
|
buffers.retain(|buf| {
|
|
|
|
!buf.buffer.flags().contains(gst::BufferFlags::GAP)
|
|
|
|
|| !buf.buffer.flags().contains(gst::BufferFlags::DROPPABLE)
|
|
|
|
|| buf.buffer.size() != 0
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2022-10-02 09:23:37 +00:00
|
|
|
// For ONVIF, replace all timestamps with timestamps based on UTC times.
|
|
|
|
let max_end_utc_time =
|
2022-10-09 13:06:59 +00:00
|
|
|
self.preprocess_drained_streams_onvif(state, &mut drained_streams)?;
|
2022-10-02 09:23:37 +00:00
|
|
|
|
2022-09-28 16:35:47 +00:00
|
|
|
// Create header now if it was not created before and return the caps
|
|
|
|
let mut caps = None;
|
|
|
|
if state.stream_header.is_none() {
|
2022-10-09 13:06:59 +00:00
|
|
|
let (_, new_caps) = self.update_header(state, settings, false)?.unwrap();
|
2022-09-28 16:35:47 +00:00
|
|
|
caps = Some(new_caps);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Interleave buffers according to the settings into a single vec
|
|
|
|
let (mut interleaved_buffers, streams) =
|
2022-10-09 13:06:59 +00:00
|
|
|
self.interleave_buffers(settings, drained_streams)?;
|
2022-09-28 16:35:47 +00:00
|
|
|
|
|
|
|
let mut buffer_list = None;
|
2022-05-12 10:44:20 +00:00
|
|
|
if interleaved_buffers.is_empty() {
|
2022-10-01 17:52:18 +00:00
|
|
|
assert!(at_eos);
|
2022-05-12 10:44:20 +00:00
|
|
|
} else {
|
2022-09-28 16:35:47 +00:00
|
|
|
// If there are actual buffers to output then create headers as needed and create a
|
|
|
|
// bufferlist for all buffers that have to be output.
|
2022-05-12 10:44:20 +00:00
|
|
|
let min_earliest_pts_position = min_earliest_pts_position.unwrap();
|
|
|
|
let min_earliest_pts = min_earliest_pts.unwrap();
|
2022-10-01 17:52:18 +00:00
|
|
|
let fragment_end_pts = fragment_end_pts.unwrap();
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
let mut fmp4_header = None;
|
2022-05-12 10:44:20 +00:00
|
|
|
if !state.sent_headers {
|
2021-10-18 06:42:42 +00:00
|
|
|
let mut buffer = state.stream_header.as_ref().unwrap().copy();
|
|
|
|
{
|
|
|
|
let buffer = buffer.get_mut().unwrap();
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
buffer.set_pts(min_earliest_pts_position);
|
|
|
|
buffer.set_dts(min_start_dts_position);
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
// Header is DISCONT|HEADER
|
|
|
|
buffer.set_flags(gst::BufferFlags::DISCONT | gst::BufferFlags::HEADER);
|
|
|
|
}
|
|
|
|
|
|
|
|
fmp4_header = Some(buffer);
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
state.sent_headers = true;
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Write prft boxes before moof
|
|
|
|
// TODO: Write sidx boxes before moof and rewrite once offsets are known
|
|
|
|
|
2022-04-28 15:39:55 +00:00
|
|
|
if state.sequence_number == 0 {
|
|
|
|
state.sequence_number = 1;
|
|
|
|
}
|
2021-10-18 06:42:42 +00:00
|
|
|
let sequence_number = state.sequence_number;
|
|
|
|
state.sequence_number += 1;
|
|
|
|
let (mut fmp4_fragment_header, moof_offset) =
|
|
|
|
boxes::create_fmp4_fragment_header(super::FragmentHeaderConfiguration {
|
2022-10-23 20:03:22 +00:00
|
|
|
variant: self.obj().class().as_ref().variant,
|
2021-10-18 06:42:42 +00:00
|
|
|
sequence_number,
|
2022-06-30 13:29:09 +00:00
|
|
|
streams: streams.as_slice(),
|
2022-05-12 10:44:20 +00:00
|
|
|
buffers: interleaved_buffers.as_slice(),
|
2021-10-18 06:42:42 +00:00
|
|
|
})
|
|
|
|
.map_err(|err| {
|
2022-02-21 17:43:46 +00:00
|
|
|
gst::error!(
|
2021-10-18 06:42:42 +00:00
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2021-10-18 06:42:42 +00:00
|
|
|
"Failed to create FMP4 fragment header: {}",
|
|
|
|
err
|
|
|
|
);
|
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
|
|
|
|
|
|
|
{
|
|
|
|
let buffer = fmp4_fragment_header.get_mut().unwrap();
|
2022-05-12 10:44:20 +00:00
|
|
|
buffer.set_pts(min_earliest_pts_position);
|
|
|
|
buffer.set_dts(min_start_dts_position);
|
2022-10-01 17:52:18 +00:00
|
|
|
buffer.set_duration(fragment_end_pts.checked_sub(min_earliest_pts));
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
// Fragment header is HEADER
|
|
|
|
buffer.set_flags(gst::BufferFlags::HEADER);
|
|
|
|
|
|
|
|
// Copy metas from the first actual buffer to the fragment header. This allows
|
|
|
|
// getting things like the reference timestamp meta or the timecode meta to identify
|
|
|
|
// the fragment.
|
2022-05-12 10:44:20 +00:00
|
|
|
let _ = interleaved_buffers[0].buffer.copy_into(
|
|
|
|
buffer,
|
|
|
|
gst::BufferCopyFlags::META,
|
|
|
|
0,
|
|
|
|
None,
|
|
|
|
);
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let moof_offset = state.current_offset
|
|
|
|
+ fmp4_header.as_ref().map(|h| h.size()).unwrap_or(0) as u64
|
|
|
|
+ moof_offset;
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
let buffers_len = interleaved_buffers.len();
|
|
|
|
for (idx, buffer) in interleaved_buffers.iter_mut().enumerate() {
|
2021-10-18 06:42:42 +00:00
|
|
|
// Fix up buffer flags, all other buffers are DELTA_UNIT
|
|
|
|
let buffer_ref = buffer.buffer.make_mut();
|
|
|
|
buffer_ref.unset_flags(gst::BufferFlags::all());
|
|
|
|
buffer_ref.set_flags(gst::BufferFlags::DELTA_UNIT);
|
|
|
|
|
|
|
|
// Set the marker flag for the last buffer of the segment
|
|
|
|
if idx == buffers_len - 1 {
|
|
|
|
buffer_ref.set_flags(gst::BufferFlags::MARKER);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
buffer_list = Some(
|
|
|
|
fmp4_header
|
|
|
|
.into_iter()
|
|
|
|
.chain(Some(fmp4_fragment_header))
|
2022-05-12 10:44:20 +00:00
|
|
|
.chain(interleaved_buffers.into_iter().map(|buffer| buffer.buffer))
|
2021-10-18 06:42:42 +00:00
|
|
|
.inspect(|b| {
|
|
|
|
state.current_offset += b.size() as u64;
|
|
|
|
})
|
|
|
|
.collect::<gst::BufferList>(),
|
|
|
|
);
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
// Write mfra only for the main stream, and if there are no buffers for the main stream
|
|
|
|
// in this segment then don't write anything.
|
2022-05-24 11:04:11 +00:00
|
|
|
if let Some((_caps, Some(ref timing_info))) = streams.get(0) {
|
2022-05-12 10:44:20 +00:00
|
|
|
state.fragment_offsets.push(super::FragmentOffset {
|
2022-06-30 13:29:09 +00:00
|
|
|
time: timing_info.start_time,
|
2022-05-12 10:44:20 +00:00
|
|
|
offset: moof_offset,
|
|
|
|
});
|
|
|
|
}
|
2022-10-01 17:52:18 +00:00
|
|
|
|
|
|
|
state.end_pts = Some(fragment_end_pts);
|
2022-05-27 10:27:10 +00:00
|
|
|
state.end_utc_time = max_end_utc_time;
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
// Update for the start PTS of the next fragment
|
2022-10-01 17:52:18 +00:00
|
|
|
gst::info!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-10-01 17:52:18 +00:00
|
|
|
"Starting new fragment at {}",
|
|
|
|
fragment_end_pts,
|
|
|
|
);
|
|
|
|
state.fragment_start_pts = Some(fragment_end_pts);
|
2022-05-31 14:25:29 +00:00
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
gst::debug!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-10-01 17:52:18 +00:00
|
|
|
"Sending force-keyunit events for running time {}",
|
|
|
|
fragment_end_pts + settings.fragment_duration,
|
|
|
|
);
|
2022-05-31 14:25:29 +00:00
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
let fku = gst_video::UpstreamForceKeyUnitEvent::builder()
|
|
|
|
.running_time(fragment_end_pts + settings.fragment_duration)
|
|
|
|
.all_headers(true)
|
|
|
|
.build();
|
|
|
|
|
|
|
|
for stream in &state.streams {
|
|
|
|
upstream_events.push((stream.sinkpad.clone(), fku.clone()));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Reset timeout delay now that we've output an actual fragment
|
|
|
|
state.timeout_delay = gst::ClockTime::ZERO;
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if settings.write_mfra && at_eos {
|
2022-05-24 11:04:11 +00:00
|
|
|
match boxes::create_mfra(&streams[0].0, &state.fragment_offsets) {
|
2021-10-18 06:42:42 +00:00
|
|
|
Ok(mut mfra) => {
|
|
|
|
{
|
|
|
|
let mfra = mfra.get_mut().unwrap();
|
|
|
|
// mfra is HEADER|DELTA_UNIT like other boxes
|
|
|
|
mfra.set_flags(gst::BufferFlags::HEADER | gst::BufferFlags::DELTA_UNIT);
|
|
|
|
}
|
|
|
|
|
|
|
|
if buffer_list.is_none() {
|
|
|
|
buffer_list = Some(gst::BufferList::new_sized(1));
|
|
|
|
}
|
|
|
|
buffer_list.as_mut().unwrap().get_mut().unwrap().add(mfra);
|
|
|
|
}
|
|
|
|
Err(err) => {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::error!(CAT, imp: self, "Failed to create mfra box: {}", err);
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Write edit list at EOS
|
|
|
|
// TODO: Rewrite bitrates at EOS
|
|
|
|
|
2022-05-27 08:56:12 +00:00
|
|
|
Ok((caps, buffer_list))
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn create_streams(&self, state: &mut State) -> Result<(), gst::FlowError> {
|
|
|
|
for pad in self
|
2022-10-23 20:03:22 +00:00
|
|
|
.obj()
|
2022-05-12 10:44:20 +00:00
|
|
|
.sink_pads()
|
|
|
|
.into_iter()
|
|
|
|
.map(|pad| pad.downcast::<gst_base::AggregatorPad>().unwrap())
|
|
|
|
{
|
|
|
|
let caps = match pad.current_caps() {
|
|
|
|
Some(caps) => caps,
|
|
|
|
None => {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::warning!(CAT, obj: pad, "Skipping pad without caps");
|
2022-05-12 10:44:20 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::info!(CAT, obj: pad, "Configuring caps {:?}", caps);
|
2022-05-12 10:44:20 +00:00
|
|
|
|
|
|
|
let s = caps.structure(0).unwrap();
|
|
|
|
|
|
|
|
let mut intra_only = false;
|
|
|
|
match s.name() {
|
|
|
|
"video/x-h264" | "video/x-h265" => {
|
|
|
|
if !s.has_field_with_type("codec_data", gst::Buffer::static_type()) {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: pad, "Received caps without codec_data");
|
2022-05-12 10:44:20 +00:00
|
|
|
return Err(gst::FlowError::NotNegotiated);
|
|
|
|
}
|
|
|
|
}
|
2022-10-25 06:55:50 +00:00
|
|
|
"video/x-vp9" => (),
|
2022-05-13 08:45:01 +00:00
|
|
|
"image/jpeg" => {
|
|
|
|
intra_only = true;
|
|
|
|
}
|
2022-05-12 10:44:20 +00:00
|
|
|
"audio/mpeg" => {
|
|
|
|
if !s.has_field_with_type("codec_data", gst::Buffer::static_type()) {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: pad, "Received caps without codec_data");
|
2022-05-12 10:44:20 +00:00
|
|
|
return Err(gst::FlowError::NotNegotiated);
|
|
|
|
}
|
|
|
|
intra_only = true;
|
|
|
|
}
|
2022-05-13 08:45:01 +00:00
|
|
|
"audio/x-alaw" | "audio/x-mulaw" => {
|
|
|
|
intra_only = true;
|
|
|
|
}
|
|
|
|
"audio/x-adpcm" => {
|
|
|
|
intra_only = true;
|
|
|
|
}
|
2022-05-13 09:13:54 +00:00
|
|
|
"application/x-onvif-metadata" => {
|
|
|
|
intra_only = true;
|
|
|
|
}
|
2022-05-12 10:44:20 +00:00
|
|
|
_ => unreachable!(),
|
|
|
|
}
|
|
|
|
|
|
|
|
state.streams.push(Stream {
|
|
|
|
sinkpad: pad,
|
|
|
|
caps,
|
|
|
|
intra_only,
|
|
|
|
queued_gops: VecDeque::new(),
|
|
|
|
fragment_filled: false,
|
|
|
|
dts_offset: None,
|
2022-06-01 17:04:58 +00:00
|
|
|
current_position: gst::ClockTime::ZERO,
|
2022-05-27 10:27:10 +00:00
|
|
|
current_utc_time: gst::ClockTime::ZERO,
|
2022-05-12 10:44:20 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
if state.streams.is_empty() {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::error!(CAT, imp: self, "No streams available");
|
2022-05-12 10:44:20 +00:00
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
|
2022-05-13 09:13:54 +00:00
|
|
|
// Sort video streams first and then audio streams and then metadata streams, and each group by pad name.
|
2022-05-12 10:44:20 +00:00
|
|
|
state.streams.sort_by(|a, b| {
|
2022-05-13 09:13:54 +00:00
|
|
|
let order_of_caps = |caps: &gst::CapsRef| {
|
2022-05-12 10:44:20 +00:00
|
|
|
let s = caps.structure(0).unwrap();
|
|
|
|
|
|
|
|
if s.name().starts_with("video/") {
|
2022-05-13 09:13:54 +00:00
|
|
|
0
|
2022-05-12 10:44:20 +00:00
|
|
|
} else if s.name().starts_with("audio/") {
|
2022-05-13 09:13:54 +00:00
|
|
|
1
|
|
|
|
} else if s.name().starts_with("application/x-onvif-metadata") {
|
|
|
|
2
|
2022-05-12 10:44:20 +00:00
|
|
|
} else {
|
|
|
|
unimplemented!();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2022-05-13 09:13:54 +00:00
|
|
|
let st_a = order_of_caps(&a.caps);
|
|
|
|
let st_b = order_of_caps(&b.caps);
|
2022-05-12 10:44:20 +00:00
|
|
|
|
|
|
|
if st_a == st_b {
|
|
|
|
return a.sinkpad.name().cmp(&b.sinkpad.name());
|
|
|
|
}
|
|
|
|
|
2022-05-13 09:13:54 +00:00
|
|
|
st_a.cmp(&st_b)
|
2022-05-12 10:44:20 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2021-10-18 06:42:42 +00:00
|
|
|
fn update_header(
|
|
|
|
&self,
|
|
|
|
state: &mut State,
|
|
|
|
settings: &Settings,
|
|
|
|
at_eos: bool,
|
|
|
|
) -> Result<Option<(gst::BufferList, gst::Caps)>, gst::FlowError> {
|
2022-10-23 20:03:22 +00:00
|
|
|
let aggregator = self.obj();
|
2022-10-09 13:06:59 +00:00
|
|
|
let class = aggregator.class();
|
2021-10-18 06:42:42 +00:00
|
|
|
let variant = class.as_ref().variant;
|
|
|
|
|
|
|
|
if settings.header_update_mode == super::HeaderUpdateMode::None && at_eos {
|
|
|
|
return Ok(None);
|
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
assert!(!at_eos || state.streams.iter().all(|s| s.queued_gops.is_empty()));
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-05-27 10:27:10 +00:00
|
|
|
let duration = if variant == super::Variant::ONVIF {
|
|
|
|
state
|
|
|
|
.end_utc_time
|
|
|
|
.opt_checked_sub(state.start_utc_time)
|
|
|
|
.ok()
|
|
|
|
.flatten()
|
|
|
|
} else {
|
|
|
|
state
|
|
|
|
.end_pts
|
|
|
|
.opt_checked_sub(state.earliest_pts)
|
|
|
|
.ok()
|
|
|
|
.flatten()
|
|
|
|
};
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-06-30 13:29:09 +00:00
|
|
|
let streams = state
|
|
|
|
.streams
|
|
|
|
.iter()
|
2022-05-24 11:04:11 +00:00
|
|
|
.map(|s| s.caps.clone())
|
2022-06-30 13:29:09 +00:00
|
|
|
.collect::<Vec<_>>();
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2021-10-18 06:42:42 +00:00
|
|
|
let mut buffer = boxes::create_fmp4_header(super::HeaderConfiguration {
|
|
|
|
variant,
|
|
|
|
update: at_eos,
|
2022-06-30 13:29:09 +00:00
|
|
|
streams: streams.as_slice(),
|
2021-10-18 06:42:42 +00:00
|
|
|
write_mehd: settings.write_mehd,
|
|
|
|
duration: if at_eos { duration } else { None },
|
2022-05-27 10:27:10 +00:00
|
|
|
start_utc_time: state
|
|
|
|
.start_utc_time
|
|
|
|
.map(|unix| unix.nseconds() / 100 + UNIX_1601_OFFSET * 10_000_000),
|
2021-10-18 06:42:42 +00:00
|
|
|
})
|
|
|
|
.map_err(|err| {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::error!(CAT, imp: self, "Failed to create FMP4 header: {}", err);
|
2021-10-18 06:42:42 +00:00
|
|
|
gst::FlowError::Error
|
|
|
|
})?;
|
|
|
|
|
|
|
|
{
|
|
|
|
let buffer = buffer.get_mut().unwrap();
|
|
|
|
|
|
|
|
// No timestamps
|
|
|
|
|
|
|
|
// Header is DISCONT|HEADER
|
|
|
|
buffer.set_flags(gst::BufferFlags::DISCONT | gst::BufferFlags::HEADER);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Remember stream header for later
|
|
|
|
state.stream_header = Some(buffer.clone());
|
|
|
|
|
|
|
|
let variant = match variant {
|
2022-05-13 08:45:01 +00:00
|
|
|
super::Variant::ISO | super::Variant::DASH | super::Variant::ONVIF => "iso-fragmented",
|
2021-10-18 06:42:42 +00:00
|
|
|
super::Variant::CMAF => "cmaf",
|
|
|
|
};
|
|
|
|
let caps = gst::Caps::builder("video/quicktime")
|
|
|
|
.field("variant", variant)
|
|
|
|
.field("streamheader", gst::Array::new(&[&buffer]))
|
|
|
|
.build();
|
|
|
|
|
|
|
|
let mut list = gst::BufferList::new_sized(1);
|
|
|
|
{
|
|
|
|
let list = list.get_mut().unwrap();
|
|
|
|
list.add(buffer);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Some((list, caps)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[glib::object_subclass]
|
|
|
|
impl ObjectSubclass for FMP4Mux {
|
|
|
|
const NAME: &'static str = "GstFMP4Mux";
|
|
|
|
type Type = super::FMP4Mux;
|
2022-05-05 12:09:19 +00:00
|
|
|
type ParentType = gst_base::Aggregator;
|
2021-10-18 06:42:42 +00:00
|
|
|
type Class = Class;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ObjectImpl for FMP4Mux {
|
|
|
|
fn properties() -> &'static [glib::ParamSpec] {
|
|
|
|
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
|
|
|
|
vec![
|
|
|
|
// TODO: Add chunk-duration property separate from fragment-size
|
2022-08-18 12:04:15 +00:00
|
|
|
glib::ParamSpecUInt64::builder("fragment-duration")
|
|
|
|
.nick("Fragment Duration")
|
|
|
|
.blurb("Duration for each FMP4 fragment")
|
|
|
|
.default_value(DEFAULT_FRAGMENT_DURATION.nseconds())
|
|
|
|
.mutable_ready()
|
|
|
|
.build(),
|
2022-09-05 08:45:47 +00:00
|
|
|
glib::ParamSpecEnum::builder::<super::HeaderUpdateMode>("header-update-mode", DEFAULT_HEADER_UPDATE_MODE)
|
2022-08-18 12:04:15 +00:00
|
|
|
.nick("Header update mode")
|
|
|
|
.blurb("Mode for updating the header at the end of the stream")
|
|
|
|
.mutable_ready()
|
|
|
|
.build(),
|
|
|
|
glib::ParamSpecBoolean::builder("write-mfra")
|
|
|
|
.nick("Write mfra box")
|
|
|
|
.blurb("Write fragment random access box at the end of the stream")
|
|
|
|
.default_value(DEFAULT_WRITE_MFRA)
|
|
|
|
.mutable_ready()
|
|
|
|
.build(),
|
|
|
|
glib::ParamSpecBoolean::builder("write-mehd")
|
|
|
|
.nick("Write mehd box")
|
|
|
|
.blurb("Write movie extends header box with the duration at the end of the stream (needs a header-update-mode enabled)")
|
|
|
|
.default_value(DEFAULT_WRITE_MFRA)
|
|
|
|
.mutable_ready()
|
|
|
|
.build(),
|
|
|
|
glib::ParamSpecUInt64::builder("interleave-bytes")
|
|
|
|
.nick("Interleave Bytes")
|
|
|
|
.blurb("Interleave between streams in bytes")
|
|
|
|
.default_value(DEFAULT_INTERLEAVE_BYTES.unwrap_or(0))
|
|
|
|
.mutable_ready()
|
|
|
|
.build(),
|
|
|
|
glib::ParamSpecUInt64::builder("interleave-time")
|
|
|
|
.nick("Interleave Time")
|
|
|
|
.blurb("Interleave between streams in nanoseconds")
|
|
|
|
.default_value(DEFAULT_INTERLEAVE_TIME.map(gst::ClockTime::nseconds).unwrap_or(u64::MAX))
|
|
|
|
.mutable_ready()
|
|
|
|
.build(),
|
2021-10-18 06:42:42 +00:00
|
|
|
]
|
|
|
|
});
|
|
|
|
|
|
|
|
&*PROPERTIES
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
|
2021-10-18 06:42:42 +00:00
|
|
|
match pspec.name() {
|
|
|
|
"fragment-duration" => {
|
|
|
|
let mut settings = self.settings.lock().unwrap();
|
2022-05-05 12:09:19 +00:00
|
|
|
let fragment_duration = value.get().expect("type checked upstream");
|
|
|
|
if settings.fragment_duration != fragment_duration {
|
|
|
|
settings.fragment_duration = fragment_duration;
|
|
|
|
drop(settings);
|
2022-10-23 20:03:22 +00:00
|
|
|
self.obj().set_latency(fragment_duration, None);
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
"header-update-mode" => {
|
|
|
|
let mut settings = self.settings.lock().unwrap();
|
|
|
|
settings.header_update_mode = value.get().expect("type checked upstream");
|
|
|
|
}
|
|
|
|
|
|
|
|
"write-mfra" => {
|
|
|
|
let mut settings = self.settings.lock().unwrap();
|
|
|
|
settings.write_mfra = value.get().expect("type checked upstream");
|
|
|
|
}
|
|
|
|
|
|
|
|
"write-mehd" => {
|
|
|
|
let mut settings = self.settings.lock().unwrap();
|
|
|
|
settings.write_mehd = value.get().expect("type checked upstream");
|
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
"interleave-bytes" => {
|
|
|
|
let mut settings = self.settings.lock().unwrap();
|
|
|
|
settings.interleave_bytes = match value.get().expect("type checked upstream") {
|
|
|
|
0 => None,
|
|
|
|
v => Some(v),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
"interleave-time" => {
|
|
|
|
let mut settings = self.settings.lock().unwrap();
|
|
|
|
settings.interleave_time = match value.get().expect("type checked upstream") {
|
|
|
|
Some(gst::ClockTime::ZERO) | None => None,
|
|
|
|
v => v,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2021-10-18 06:42:42 +00:00
|
|
|
_ => unimplemented!(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
|
2021-10-18 06:42:42 +00:00
|
|
|
match pspec.name() {
|
|
|
|
"fragment-duration" => {
|
|
|
|
let settings = self.settings.lock().unwrap();
|
|
|
|
settings.fragment_duration.to_value()
|
|
|
|
}
|
|
|
|
|
|
|
|
"header-update-mode" => {
|
|
|
|
let settings = self.settings.lock().unwrap();
|
|
|
|
settings.header_update_mode.to_value()
|
|
|
|
}
|
|
|
|
|
|
|
|
"write-mfra" => {
|
|
|
|
let settings = self.settings.lock().unwrap();
|
|
|
|
settings.write_mfra.to_value()
|
|
|
|
}
|
|
|
|
|
|
|
|
"write-mehd" => {
|
|
|
|
let settings = self.settings.lock().unwrap();
|
|
|
|
settings.write_mehd.to_value()
|
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
"interleave-bytes" => {
|
|
|
|
let settings = self.settings.lock().unwrap();
|
|
|
|
settings.interleave_bytes.unwrap_or(0).to_value()
|
|
|
|
}
|
|
|
|
|
|
|
|
"interleave-time" => {
|
|
|
|
let settings = self.settings.lock().unwrap();
|
|
|
|
settings.interleave_time.to_value()
|
|
|
|
}
|
|
|
|
|
2021-10-18 06:42:42 +00:00
|
|
|
_ => unimplemented!(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn constructed(&self) {
|
|
|
|
self.parent_constructed();
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-10-23 20:03:22 +00:00
|
|
|
let obj = self.obj();
|
2022-05-12 10:44:20 +00:00
|
|
|
let class = obj.class();
|
|
|
|
for templ in class.pad_template_list().filter(|templ| {
|
|
|
|
templ.presence() == gst::PadPresence::Always
|
|
|
|
&& templ.direction() == gst::PadDirection::Sink
|
|
|
|
}) {
|
|
|
|
let sinkpad =
|
|
|
|
gst::PadBuilder::<gst_base::AggregatorPad>::from_template(&templ, Some("sink"))
|
|
|
|
.flags(gst::PadFlags::ACCEPT_INTERSECT)
|
|
|
|
.build();
|
|
|
|
|
|
|
|
obj.add_pad(&sinkpad).unwrap();
|
|
|
|
}
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
obj.set_latency(Settings::default().fragment_duration, None);
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl GstObjectImpl for FMP4Mux {}
|
|
|
|
|
|
|
|
impl ElementImpl for FMP4Mux {
|
2022-05-05 12:09:19 +00:00
|
|
|
fn request_new_pad(
|
2021-10-18 06:42:42 +00:00
|
|
|
&self,
|
2022-05-12 10:44:20 +00:00
|
|
|
templ: &gst::PadTemplate,
|
2022-10-20 18:18:35 +00:00
|
|
|
name: Option<&str>,
|
2022-05-12 10:44:20 +00:00
|
|
|
caps: Option<&gst::Caps>,
|
2022-05-05 12:09:19 +00:00
|
|
|
) -> Option<gst::Pad> {
|
2022-05-12 10:44:20 +00:00
|
|
|
let state = self.state.lock().unwrap();
|
|
|
|
if state.stream_header.is_some() {
|
|
|
|
gst::error!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-05-12 10:44:20 +00:00
|
|
|
"Can't request new pads after header was generated"
|
|
|
|
);
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
self.parent_request_new_pad(templ, name, caps)
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AggregatorImpl for FMP4Mux {
|
2022-10-09 13:06:59 +00:00
|
|
|
fn next_time(&self) -> Option<gst::ClockTime> {
|
2022-05-17 12:05:19 +00:00
|
|
|
let state = self.state.lock().unwrap();
|
2022-10-01 17:52:18 +00:00
|
|
|
state.fragment_start_pts.opt_add(state.timeout_delay)
|
2022-05-17 12:05:19 +00:00
|
|
|
}
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
fn sink_query(
|
|
|
|
&self,
|
|
|
|
aggregator_pad: &gst_base::AggregatorPad,
|
|
|
|
query: &mut gst::QueryRef,
|
|
|
|
) -> bool {
|
|
|
|
use gst::QueryViewMut;
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
gst::trace!(CAT, obj: aggregator_pad, "Handling query {:?}", query);
|
|
|
|
|
|
|
|
match query.view_mut() {
|
|
|
|
QueryViewMut::Caps(q) => {
|
2022-05-12 10:44:20 +00:00
|
|
|
let allowed_caps = aggregator_pad
|
|
|
|
.current_caps()
|
|
|
|
.unwrap_or_else(|| aggregator_pad.pad_template_caps());
|
2022-05-05 12:09:19 +00:00
|
|
|
|
|
|
|
if let Some(filter_caps) = q.filter() {
|
|
|
|
let res = filter_caps
|
|
|
|
.intersect_with_mode(&allowed_caps, gst::CapsIntersectMode::First);
|
2022-10-20 18:18:35 +00:00
|
|
|
q.set_result(&res);
|
2022-05-05 12:09:19 +00:00
|
|
|
} else {
|
2022-10-20 18:18:35 +00:00
|
|
|
q.set_result(&allowed_caps);
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
true
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
2022-10-09 13:06:59 +00:00
|
|
|
_ => self.parent_sink_query(aggregator_pad, query),
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn sink_event_pre_queue(
|
|
|
|
&self,
|
|
|
|
aggregator_pad: &gst_base::AggregatorPad,
|
|
|
|
mut event: gst::Event,
|
|
|
|
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
|
|
|
use gst::EventView;
|
2021-10-18 06:42:42 +00:00
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
gst::trace!(CAT, obj: aggregator_pad, "Handling event {:?}", event);
|
|
|
|
|
|
|
|
match event.view() {
|
|
|
|
EventView::Segment(ev) => {
|
|
|
|
if ev.segment().format() != gst::Format::Time {
|
|
|
|
gst::warning!(
|
|
|
|
CAT,
|
|
|
|
obj: aggregator_pad,
|
|
|
|
"Received non-TIME segment, replacing with default TIME segment"
|
|
|
|
);
|
|
|
|
let segment = gst::FormattedSegment::<gst::ClockTime>::new();
|
|
|
|
event = gst::event::Segment::builder(&segment)
|
|
|
|
.seqnum(event.seqnum())
|
|
|
|
.build();
|
|
|
|
}
|
2022-10-09 13:06:59 +00:00
|
|
|
self.parent_sink_event_pre_queue(aggregator_pad, event)
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
2022-10-09 13:06:59 +00:00
|
|
|
_ => self.parent_sink_event_pre_queue(aggregator_pad, event),
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn sink_event(&self, aggregator_pad: &gst_base::AggregatorPad, event: gst::Event) -> bool {
|
2022-05-05 12:09:19 +00:00
|
|
|
use gst::EventView;
|
|
|
|
|
|
|
|
gst::trace!(CAT, obj: aggregator_pad, "Handling event {:?}", event);
|
|
|
|
|
|
|
|
match event.view() {
|
|
|
|
EventView::Segment(ev) => {
|
|
|
|
// Already fixed-up above to always be a TIME segment
|
|
|
|
let segment = ev
|
|
|
|
.segment()
|
|
|
|
.clone()
|
|
|
|
.downcast::<gst::ClockTime>()
|
|
|
|
.expect("non-TIME segment");
|
|
|
|
gst::info!(CAT, obj: aggregator_pad, "Received segment {:?}", segment);
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
// Only forward the segment event verbatim if this is a single stream variant.
|
|
|
|
// Otherwise we have to produce a default segment and re-timestamp all buffers
|
|
|
|
// with their running time.
|
2022-10-23 20:03:22 +00:00
|
|
|
let aggregator = self.obj();
|
2022-05-12 10:44:20 +00:00
|
|
|
let class = aggregator.class();
|
|
|
|
if class.as_ref().variant.is_single_stream() {
|
|
|
|
aggregator.update_segment(&segment);
|
|
|
|
}
|
2022-05-05 12:09:19 +00:00
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
self.parent_sink_event(aggregator_pad, event)
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
|
|
|
EventView::Tag(_ev) => {
|
|
|
|
// TODO: Maybe store for putting into the headers of the next fragment?
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
self.parent_sink_event(aggregator_pad, event)
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
2022-10-09 13:06:59 +00:00
|
|
|
_ => self.parent_sink_event(aggregator_pad, event),
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn src_query(&self, query: &mut gst::QueryRef) -> bool {
|
2022-05-05 12:09:19 +00:00
|
|
|
use gst::QueryViewMut;
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::trace!(CAT, imp: self, "Handling query {:?}", query);
|
2022-05-05 12:09:19 +00:00
|
|
|
|
|
|
|
match query.view_mut() {
|
|
|
|
QueryViewMut::Seeking(q) => {
|
|
|
|
// We can't really handle seeking, it would break everything
|
2022-07-04 16:04:11 +00:00
|
|
|
q.set(false, gst::ClockTime::ZERO, gst::ClockTime::NONE);
|
2022-05-05 12:09:19 +00:00
|
|
|
true
|
|
|
|
}
|
2022-10-09 13:06:59 +00:00
|
|
|
_ => self.parent_src_query(query),
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn src_event(&self, event: gst::Event) -> bool {
|
2022-05-05 12:09:19 +00:00
|
|
|
use gst::EventView;
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::trace!(CAT, imp: self, "Handling event {:?}", event);
|
2022-05-05 12:09:19 +00:00
|
|
|
|
|
|
|
match event.view() {
|
|
|
|
EventView::Seek(_ev) => false,
|
2022-10-09 13:06:59 +00:00
|
|
|
_ => self.parent_src_event(event),
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn flush(&self) -> Result<gst::FlowSuccess, gst::FlowError> {
|
|
|
|
self.parent_flush()?;
|
2022-05-05 12:09:19 +00:00
|
|
|
|
|
|
|
let mut state = self.state.lock().unwrap();
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
for stream in &mut state.streams {
|
|
|
|
stream.queued_gops.clear();
|
|
|
|
stream.dts_offset = None;
|
2022-06-01 17:04:58 +00:00
|
|
|
stream.current_position = gst::ClockTime::ZERO;
|
2022-05-27 10:27:10 +00:00
|
|
|
stream.current_utc_time = gst::ClockTime::ZERO;
|
2022-05-12 10:44:20 +00:00
|
|
|
stream.fragment_filled = false;
|
|
|
|
}
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
state.current_offset = 0;
|
|
|
|
state.fragment_offsets.clear();
|
|
|
|
|
|
|
|
Ok(gst::FlowSuccess::Ok)
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn stop(&self) -> Result<(), gst::ErrorMessage> {
|
|
|
|
gst::trace!(CAT, imp: self, "Stopping");
|
2022-05-05 12:09:19 +00:00
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
let _ = self.parent_stop();
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
*self.state.lock().unwrap() = State::default();
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn start(&self) -> Result<(), gst::ErrorMessage> {
|
|
|
|
gst::trace!(CAT, imp: self, "Starting");
|
2022-05-05 12:09:19 +00:00
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
self.parent_start()?;
|
2022-05-12 10:44:20 +00:00
|
|
|
|
|
|
|
// For non-single-stream variants configure a default segment that allows for negative
|
|
|
|
// DTS so that we can correctly re-timestamp buffers with their running times.
|
2022-10-23 20:03:22 +00:00
|
|
|
let aggregator = self.obj();
|
2022-05-12 10:44:20 +00:00
|
|
|
let class = aggregator.class();
|
|
|
|
if !class.as_ref().variant.is_single_stream() {
|
|
|
|
let mut segment = gst::FormattedSegment::<gst::ClockTime>::new();
|
|
|
|
segment.set_start(SEGMENT_OFFSET);
|
|
|
|
segment.set_position(SEGMENT_OFFSET);
|
|
|
|
aggregator.update_segment(&segment);
|
|
|
|
}
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
*self.state.lock().unwrap() = State::default();
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn negotiate(&self) -> bool {
|
2022-05-05 12:09:19 +00:00
|
|
|
true
|
|
|
|
}
|
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
fn aggregate(&self, timeout: bool) -> Result<gst::FlowSuccess, gst::FlowError> {
|
2022-05-05 12:09:19 +00:00
|
|
|
let settings = self.settings.lock().unwrap().clone();
|
|
|
|
|
|
|
|
let mut upstream_events = vec![];
|
|
|
|
|
2022-08-17 10:19:08 +00:00
|
|
|
let all_eos;
|
2022-05-27 08:56:12 +00:00
|
|
|
let (caps, buffers) = {
|
2022-05-05 12:09:19 +00:00
|
|
|
let mut state = self.state.lock().unwrap();
|
|
|
|
|
2022-06-30 13:29:09 +00:00
|
|
|
// Create streams
|
|
|
|
if state.streams.is_empty() {
|
2022-10-09 13:06:59 +00:00
|
|
|
self.create_streams(&mut state)?;
|
2022-06-30 13:29:09 +00:00
|
|
|
}
|
2022-05-12 10:44:20 +00:00
|
|
|
|
|
|
|
// Queue buffers from all streams that are not filled for the current fragment yet
|
2022-08-17 10:19:08 +00:00
|
|
|
//
|
|
|
|
// Always take a buffer from the stream with the earliest queued buffer to keep the
|
|
|
|
// fill-level at all sinkpads in sync.
|
2022-05-12 10:44:20 +00:00
|
|
|
let fragment_start_pts = state.fragment_start_pts;
|
2022-05-05 12:09:19 +00:00
|
|
|
|
2022-10-09 13:06:59 +00:00
|
|
|
while let Some((idx, stream)) = self.find_earliest_stream(&mut state, timeout)? {
|
2022-08-17 10:19:08 +00:00
|
|
|
// Can only happen if the stream was flushed in the meantime
|
|
|
|
let buffer = match stream.sinkpad.pop_buffer() {
|
2022-07-20 14:32:42 +00:00
|
|
|
Some(buffer) => buffer,
|
2022-08-17 10:19:08 +00:00
|
|
|
None => continue,
|
2022-07-20 14:32:42 +00:00
|
|
|
};
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-08-17 10:19:08 +00:00
|
|
|
// Can only happen if the stream was flushed in the meantime
|
2022-07-20 14:32:42 +00:00
|
|
|
let segment = match stream
|
|
|
|
.sinkpad
|
|
|
|
.segment()
|
|
|
|
.clone()
|
|
|
|
.downcast::<gst::ClockTime>()
|
|
|
|
.ok()
|
|
|
|
{
|
|
|
|
Some(segment) => segment,
|
|
|
|
None => {
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::error!(CAT, obj: stream.sinkpad, "Got buffer before segment");
|
2022-07-20 14:32:42 +00:00
|
|
|
return Err(gst::FlowError::Error);
|
|
|
|
}
|
|
|
|
};
|
2022-05-05 12:09:19 +00:00
|
|
|
|
2022-07-20 14:32:42 +00:00
|
|
|
// Queue up the buffer and update GOP tracking state
|
2022-10-09 13:06:59 +00:00
|
|
|
self.queue_gops(idx, stream, &segment, buffer)?;
|
2022-05-05 12:09:19 +00:00
|
|
|
|
2022-07-20 14:32:42 +00:00
|
|
|
// Check if this stream is filled enough now.
|
|
|
|
if let Some((queued_end_pts, fragment_start_pts)) = Option::zip(
|
|
|
|
stream
|
|
|
|
.queued_gops
|
|
|
|
.iter()
|
|
|
|
.find(|gop| gop.final_end_pts)
|
|
|
|
.map(|gop| gop.end_pts),
|
|
|
|
fragment_start_pts,
|
|
|
|
) {
|
|
|
|
if queued_end_pts.saturating_sub(fragment_start_pts)
|
|
|
|
>= settings.fragment_duration
|
|
|
|
{
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::debug!(CAT, obj: stream.sinkpad, "Stream queued enough data for this fragment");
|
2022-07-20 14:32:42 +00:00
|
|
|
stream.fragment_filled = true;
|
2022-05-12 10:44:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Calculate the earliest PTS after queueing input if we can now.
|
|
|
|
if state.earliest_pts.is_none() {
|
|
|
|
let mut earliest_pts = None;
|
|
|
|
|
|
|
|
for stream in &state.streams {
|
|
|
|
let stream_earliest_pts = match stream.queued_gops.back() {
|
|
|
|
None => {
|
|
|
|
earliest_pts = None;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
Some(oldest_gop) => {
|
2022-05-17 12:05:19 +00:00
|
|
|
if !timeout && !oldest_gop.final_earliest_pts {
|
2022-05-12 10:44:20 +00:00
|
|
|
earliest_pts = None;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
oldest_gop.earliest_pts
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2022-07-15 21:29:45 +00:00
|
|
|
if earliest_pts.opt_gt(stream_earliest_pts).unwrap_or(true) {
|
2022-05-12 10:44:20 +00:00
|
|
|
earliest_pts = Some(stream_earliest_pts);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(earliest_pts) = earliest_pts {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::info!(CAT, imp: self, "Got earliest PTS {}", earliest_pts);
|
2022-05-12 10:44:20 +00:00
|
|
|
state.earliest_pts = Some(earliest_pts);
|
|
|
|
state.fragment_start_pts = Some(earliest_pts);
|
|
|
|
|
2022-10-01 17:52:18 +00:00
|
|
|
gst::debug!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-10-01 17:52:18 +00:00
|
|
|
"Sending first force-keyunit event for running time {}",
|
|
|
|
earliest_pts + settings.fragment_duration,
|
|
|
|
);
|
|
|
|
|
|
|
|
let fku = gst_video::UpstreamForceKeyUnitEvent::builder()
|
|
|
|
.running_time(earliest_pts + settings.fragment_duration)
|
|
|
|
.all_headers(true)
|
|
|
|
.build();
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
for stream in &mut state.streams {
|
2022-10-01 17:52:18 +00:00
|
|
|
upstream_events.push((stream.sinkpad.clone(), fku.clone()));
|
2022-05-12 10:44:20 +00:00
|
|
|
|
|
|
|
// Check if this stream is filled enough now.
|
|
|
|
if let Some(queued_end_pts) = stream
|
|
|
|
.queued_gops
|
|
|
|
.iter()
|
|
|
|
.find(|gop| gop.final_end_pts)
|
|
|
|
.map(|gop| gop.end_pts)
|
|
|
|
{
|
|
|
|
if queued_end_pts.saturating_sub(earliest_pts)
|
|
|
|
>= settings.fragment_duration
|
|
|
|
{
|
2022-10-23 18:46:18 +00:00
|
|
|
gst::debug!(CAT, obj: stream.sinkpad, "Stream queued enough data for this fragment");
|
2022-05-12 10:44:20 +00:00
|
|
|
stream.fragment_filled = true;
|
|
|
|
}
|
|
|
|
}
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-17 10:19:08 +00:00
|
|
|
all_eos = state.streams.iter().all(|stream| stream.sinkpad.is_eos());
|
|
|
|
if all_eos {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::debug!(CAT, imp: self, "All streams are EOS now");
|
2022-08-17 10:19:08 +00:00
|
|
|
}
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
// If enough GOPs were queued, drain and create the output fragment
|
2022-10-01 17:52:18 +00:00
|
|
|
match self.drain(
|
|
|
|
&mut state,
|
|
|
|
&settings,
|
|
|
|
timeout,
|
|
|
|
all_eos,
|
|
|
|
&mut upstream_events,
|
|
|
|
) {
|
|
|
|
Ok(res) => res,
|
|
|
|
Err(gst_base::AGGREGATOR_FLOW_NEED_DATA) => {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::element_imp_warning!(
|
|
|
|
self,
|
2022-10-01 17:52:18 +00:00
|
|
|
gst::StreamError::Format,
|
|
|
|
["Longer GOPs than fragment duration"]
|
|
|
|
);
|
2022-10-17 17:48:43 +00:00
|
|
|
state.timeout_delay += 1.seconds();
|
2022-10-01 17:52:18 +00:00
|
|
|
|
|
|
|
drop(state);
|
|
|
|
for (sinkpad, event) in upstream_events {
|
|
|
|
sinkpad.push_event(event);
|
|
|
|
}
|
|
|
|
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
|
|
|
|
}
|
|
|
|
Err(err) => return Err(err),
|
|
|
|
}
|
2022-05-05 12:09:19 +00:00
|
|
|
};
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
for (sinkpad, event) in upstream_events {
|
|
|
|
sinkpad.push_event(event);
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
|
|
|
|
2022-05-27 08:56:12 +00:00
|
|
|
if let Some(caps) = caps {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::debug!(CAT, imp: self, "Setting caps on source pad: {:?}", caps);
|
2022-10-23 20:03:22 +00:00
|
|
|
self.obj().set_src_caps(&caps);
|
2022-05-27 08:56:12 +00:00
|
|
|
}
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
if let Some(buffers) = buffers {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::trace!(CAT, imp: self, "Pushing buffer list {:?}", buffers);
|
2022-10-23 20:03:22 +00:00
|
|
|
self.obj().finish_buffer_list(buffers)?;
|
2022-05-05 12:09:19 +00:00
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
if all_eos {
|
2022-10-09 13:06:59 +00:00
|
|
|
gst::debug!(CAT, imp: self, "Doing EOS handling");
|
2022-05-12 10:44:20 +00:00
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
if settings.header_update_mode != super::HeaderUpdateMode::None {
|
2022-10-09 13:06:59 +00:00
|
|
|
let updated_header =
|
|
|
|
self.update_header(&mut self.state.lock().unwrap(), &settings, true);
|
2022-05-05 12:09:19 +00:00
|
|
|
match updated_header {
|
|
|
|
Ok(Some((buffer_list, caps))) => {
|
|
|
|
match settings.header_update_mode {
|
|
|
|
super::HeaderUpdateMode::None => unreachable!(),
|
|
|
|
super::HeaderUpdateMode::Rewrite => {
|
|
|
|
let mut q = gst::query::Seeking::new(gst::Format::Bytes);
|
2022-10-23 20:03:22 +00:00
|
|
|
if self.obj().src_pad().peer_query(&mut q) && q.result().0 {
|
|
|
|
let aggregator = self.obj();
|
2022-10-09 13:06:59 +00:00
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
aggregator.set_src_caps(&caps);
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
// Seek to the beginning with a default bytes segment
|
|
|
|
aggregator
|
|
|
|
.update_segment(
|
|
|
|
&gst::FormattedSegment::<gst::format::Bytes>::new(),
|
|
|
|
);
|
|
|
|
|
|
|
|
if let Err(err) = aggregator.finish_buffer_list(buffer_list) {
|
|
|
|
gst::error!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-05-05 12:09:19 +00:00
|
|
|
"Failed pushing updated header buffer downstream: {:?}",
|
|
|
|
err,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
gst::error!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-05-05 12:09:19 +00:00
|
|
|
"Can't rewrite header because downstream is not seekable"
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
super::HeaderUpdateMode::Update => {
|
2022-10-23 20:03:22 +00:00
|
|
|
let aggregator = self.obj();
|
2022-10-09 13:06:59 +00:00
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
aggregator.set_src_caps(&caps);
|
|
|
|
if let Err(err) = aggregator.finish_buffer_list(buffer_list) {
|
|
|
|
gst::error!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-05-05 12:09:19 +00:00
|
|
|
"Failed pushing updated header buffer downstream: {:?}",
|
|
|
|
err,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(None) => {}
|
|
|
|
Err(err) => {
|
|
|
|
gst::error!(
|
|
|
|
CAT,
|
2022-10-09 13:06:59 +00:00
|
|
|
imp: self,
|
2022-05-05 12:09:19 +00:00
|
|
|
"Failed to generate updated header: {:?}",
|
|
|
|
err
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-12 10:44:20 +00:00
|
|
|
// Need to output new headers if started again after EOS
|
|
|
|
self.state.lock().unwrap().sent_headers = false;
|
2022-05-05 12:09:19 +00:00
|
|
|
|
|
|
|
Err(gst::FlowError::Eos)
|
|
|
|
} else {
|
|
|
|
Ok(gst::FlowSuccess::Ok)
|
|
|
|
}
|
2021-10-18 06:42:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[repr(C)]
|
|
|
|
pub(crate) struct Class {
|
2022-05-05 12:09:19 +00:00
|
|
|
parent: gst_base::ffi::GstAggregatorClass,
|
2021-10-18 06:42:42 +00:00
|
|
|
variant: super::Variant,
|
|
|
|
}
|
|
|
|
|
|
|
|
unsafe impl ClassStruct for Class {
|
|
|
|
type Type = FMP4Mux;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl std::ops::Deref for Class {
|
2022-05-05 12:09:19 +00:00
|
|
|
type Target = glib::Class<gst_base::Aggregator>;
|
2021-10-18 06:42:42 +00:00
|
|
|
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
unsafe { &*(&self.parent as *const _ as *const _) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
unsafe impl<T: FMP4MuxImpl> IsSubclassable<T> for super::FMP4Mux {
|
|
|
|
fn class_init(class: &mut glib::Class<Self>) {
|
|
|
|
Self::parent_class_init::<T>(class);
|
|
|
|
|
|
|
|
let class = class.as_mut();
|
|
|
|
class.variant = T::VARIANT;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
pub(crate) trait FMP4MuxImpl: AggregatorImpl {
|
2021-10-18 06:42:42 +00:00
|
|
|
const VARIANT: super::Variant;
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
pub(crate) struct ISOFMP4Mux;
|
|
|
|
|
|
|
|
#[glib::object_subclass]
|
|
|
|
impl ObjectSubclass for ISOFMP4Mux {
|
|
|
|
const NAME: &'static str = "GstISOFMP4Mux";
|
|
|
|
type Type = super::ISOFMP4Mux;
|
|
|
|
type ParentType = super::FMP4Mux;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ObjectImpl for ISOFMP4Mux {}
|
|
|
|
|
|
|
|
impl GstObjectImpl for ISOFMP4Mux {}
|
|
|
|
|
|
|
|
impl ElementImpl for ISOFMP4Mux {
|
|
|
|
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
|
|
|
|
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
|
|
|
|
gst::subclass::ElementMetadata::new(
|
|
|
|
"ISOFMP4Mux",
|
|
|
|
"Codec/Muxer",
|
|
|
|
"ISO fragmented MP4 muxer",
|
|
|
|
"Sebastian Dröge <sebastian@centricular.com>",
|
|
|
|
)
|
|
|
|
});
|
|
|
|
|
|
|
|
Some(&*ELEMENT_METADATA)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn pad_templates() -> &'static [gst::PadTemplate] {
|
|
|
|
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
|
|
|
|
let src_pad_template = gst::PadTemplate::new(
|
|
|
|
"src",
|
|
|
|
gst::PadDirection::Src,
|
|
|
|
gst::PadPresence::Always,
|
|
|
|
&gst::Caps::builder("video/quicktime")
|
|
|
|
.field("variant", "iso-fragmented")
|
|
|
|
.build(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
let sink_pad_template = gst::PadTemplate::new(
|
2022-05-12 10:44:20 +00:00
|
|
|
"sink_%u",
|
2021-10-18 06:42:42 +00:00
|
|
|
gst::PadDirection::Sink,
|
2022-05-12 10:44:20 +00:00
|
|
|
gst::PadPresence::Request,
|
2021-10-18 06:42:42 +00:00
|
|
|
&[
|
|
|
|
gst::Structure::builder("video/x-h264")
|
|
|
|
.field("stream-format", gst::List::new(["avc", "avc3"]))
|
|
|
|
.field("alignment", "au")
|
|
|
|
.field("width", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
|
|
|
gst::Structure::builder("video/x-h265")
|
|
|
|
.field("stream-format", gst::List::new(["hvc1", "hev1"]))
|
|
|
|
.field("alignment", "au")
|
|
|
|
.field("width", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
2022-10-25 06:55:50 +00:00
|
|
|
gst::Structure::builder("video/x-vp9")
|
|
|
|
.field("profile", gst::List::new(["0", "1", "2", "3"]))
|
|
|
|
.field("chroma-format", gst::List::new(["4:2:0", "4:2:2", "4:4:4"]))
|
|
|
|
.field("bit-depth-luma", gst::List::new([8u32, 10u32, 12u32]))
|
|
|
|
.field("bit-depth-chroma", gst::List::new([8u32, 10u32, 12u32]))
|
|
|
|
.field("width", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
2021-10-18 06:42:42 +00:00
|
|
|
gst::Structure::builder("audio/mpeg")
|
|
|
|
.field("mpegversion", 4i32)
|
|
|
|
.field("stream-format", "raw")
|
|
|
|
.field("channels", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.field("rate", gst::IntRange::new(1, i32::MAX))
|
|
|
|
.build(),
|
|
|
|
]
|
|
|
|
.into_iter()
|
|
|
|
.collect::<gst::Caps>(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
vec![src_pad_template, sink_pad_template]
|
|
|
|
});
|
|
|
|
|
|
|
|
PAD_TEMPLATES.as_ref()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
impl AggregatorImpl for ISOFMP4Mux {}
|
|
|
|
|
2021-10-18 06:42:42 +00:00
|
|
|
impl FMP4MuxImpl for ISOFMP4Mux {
|
|
|
|
const VARIANT: super::Variant = super::Variant::ISO;
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
pub(crate) struct CMAFMux;
|
|
|
|
|
|
|
|
#[glib::object_subclass]
|
|
|
|
impl ObjectSubclass for CMAFMux {
|
|
|
|
const NAME: &'static str = "GstCMAFMux";
|
|
|
|
type Type = super::CMAFMux;
|
|
|
|
type ParentType = super::FMP4Mux;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ObjectImpl for CMAFMux {}
|
|
|
|
|
|
|
|
impl GstObjectImpl for CMAFMux {}
|
|
|
|
|
|
|
|
impl ElementImpl for CMAFMux {
|
|
|
|
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
|
|
|
|
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
|
|
|
|
gst::subclass::ElementMetadata::new(
|
|
|
|
"CMAFMux",
|
|
|
|
"Codec/Muxer",
|
|
|
|
"CMAF fragmented MP4 muxer",
|
|
|
|
"Sebastian Dröge <sebastian@centricular.com>",
|
|
|
|
)
|
|
|
|
});
|
|
|
|
|
|
|
|
Some(&*ELEMENT_METADATA)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn pad_templates() -> &'static [gst::PadTemplate] {
|
|
|
|
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
|
|
|
|
let src_pad_template = gst::PadTemplate::new(
|
|
|
|
"src",
|
|
|
|
gst::PadDirection::Src,
|
|
|
|
gst::PadPresence::Always,
|
|
|
|
&gst::Caps::builder("video/quicktime")
|
|
|
|
.field("variant", "cmaf")
|
|
|
|
.build(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
let sink_pad_template = gst::PadTemplate::new(
|
|
|
|
"sink",
|
|
|
|
gst::PadDirection::Sink,
|
|
|
|
gst::PadPresence::Always,
|
|
|
|
&[
|
|
|
|
gst::Structure::builder("video/x-h264")
|
|
|
|
.field("stream-format", gst::List::new(["avc", "avc3"]))
|
|
|
|
.field("alignment", "au")
|
|
|
|
.field("width", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
|
|
|
gst::Structure::builder("video/x-h265")
|
|
|
|
.field("stream-format", gst::List::new(["hvc1", "hev1"]))
|
|
|
|
.field("alignment", "au")
|
|
|
|
.field("width", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
2022-10-25 06:55:50 +00:00
|
|
|
gst::Structure::builder("video/x-vp9")
|
|
|
|
.field("profile", gst::List::new(["0", "1", "2", "3"]))
|
|
|
|
.field("chroma-format", gst::List::new(["4:2:0", "4:2:2", "4:4:4"]))
|
|
|
|
.field("bit-depth-luma", gst::List::new([8u32, 10u32, 12u32]))
|
|
|
|
.field("bit-depth-chroma", gst::List::new([8u32, 10u32, 12u32]))
|
|
|
|
.field("width", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
2021-10-18 06:42:42 +00:00
|
|
|
gst::Structure::builder("audio/mpeg")
|
|
|
|
.field("mpegversion", 4i32)
|
|
|
|
.field("stream-format", "raw")
|
|
|
|
.field("channels", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.field("rate", gst::IntRange::new(1, i32::MAX))
|
|
|
|
.build(),
|
|
|
|
]
|
|
|
|
.into_iter()
|
|
|
|
.collect::<gst::Caps>(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
vec![src_pad_template, sink_pad_template]
|
|
|
|
});
|
|
|
|
|
|
|
|
PAD_TEMPLATES.as_ref()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
impl AggregatorImpl for CMAFMux {}
|
|
|
|
|
2021-10-18 06:42:42 +00:00
|
|
|
impl FMP4MuxImpl for CMAFMux {
|
|
|
|
const VARIANT: super::Variant = super::Variant::CMAF;
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
pub(crate) struct DASHMP4Mux;
|
|
|
|
|
|
|
|
#[glib::object_subclass]
|
|
|
|
impl ObjectSubclass for DASHMP4Mux {
|
|
|
|
const NAME: &'static str = "GstDASHMP4Mux";
|
|
|
|
type Type = super::DASHMP4Mux;
|
|
|
|
type ParentType = super::FMP4Mux;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ObjectImpl for DASHMP4Mux {}
|
|
|
|
|
|
|
|
impl GstObjectImpl for DASHMP4Mux {}
|
|
|
|
|
|
|
|
impl ElementImpl for DASHMP4Mux {
|
|
|
|
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
|
|
|
|
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
|
|
|
|
gst::subclass::ElementMetadata::new(
|
|
|
|
"DASHMP4Mux",
|
|
|
|
"Codec/Muxer",
|
|
|
|
"DASH fragmented MP4 muxer",
|
|
|
|
"Sebastian Dröge <sebastian@centricular.com>",
|
|
|
|
)
|
|
|
|
});
|
|
|
|
|
|
|
|
Some(&*ELEMENT_METADATA)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn pad_templates() -> &'static [gst::PadTemplate] {
|
|
|
|
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
|
|
|
|
let src_pad_template = gst::PadTemplate::new(
|
|
|
|
"src",
|
|
|
|
gst::PadDirection::Src,
|
|
|
|
gst::PadPresence::Always,
|
|
|
|
&gst::Caps::builder("video/quicktime")
|
|
|
|
.field("variant", "iso-fragmented")
|
|
|
|
.build(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
let sink_pad_template = gst::PadTemplate::new(
|
|
|
|
"sink",
|
|
|
|
gst::PadDirection::Sink,
|
|
|
|
gst::PadPresence::Always,
|
2022-01-12 17:51:08 +00:00
|
|
|
&[
|
2021-10-18 06:42:42 +00:00
|
|
|
gst::Structure::builder("video/x-h264")
|
|
|
|
.field("stream-format", gst::List::new(&[&"avc", &"avc3"]))
|
|
|
|
.field("alignment", "au")
|
|
|
|
.field("width", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
|
|
|
gst::Structure::builder("video/x-h265")
|
|
|
|
.field("stream-format", gst::List::new(&[&"hvc1", &"hev1"]))
|
|
|
|
.field("alignment", "au")
|
|
|
|
.field("width", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
2022-10-25 06:55:50 +00:00
|
|
|
gst::Structure::builder("video/x-vp9")
|
|
|
|
.field("profile", gst::List::new(["0", "1", "2", "3"]))
|
|
|
|
.field("chroma-format", gst::List::new(["4:2:0", "4:2:2", "4:4:4"]))
|
|
|
|
.field("bit-depth-luma", gst::List::new([8u32, 10u32, 12u32]))
|
|
|
|
.field("bit-depth-chroma", gst::List::new([8u32, 10u32, 12u32]))
|
|
|
|
.field("width", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
2021-10-18 06:42:42 +00:00
|
|
|
gst::Structure::builder("audio/mpeg")
|
|
|
|
.field("mpegversion", 4i32)
|
|
|
|
.field("stream-format", "raw")
|
|
|
|
.field("channels", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.field("rate", gst::IntRange::<i32>::new(1, i32::MAX))
|
|
|
|
.build(),
|
2022-01-12 17:51:08 +00:00
|
|
|
]
|
|
|
|
.into_iter()
|
2021-10-18 06:42:42 +00:00
|
|
|
.collect::<gst::Caps>(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
vec![src_pad_template, sink_pad_template]
|
|
|
|
});
|
|
|
|
|
|
|
|
PAD_TEMPLATES.as_ref()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-05 12:09:19 +00:00
|
|
|
impl AggregatorImpl for DASHMP4Mux {}
|
|
|
|
|
2021-10-18 06:42:42 +00:00
|
|
|
impl FMP4MuxImpl for DASHMP4Mux {
|
|
|
|
const VARIANT: super::Variant = super::Variant::DASH;
|
|
|
|
}
|
2022-05-13 08:45:01 +00:00
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
pub(crate) struct ONVIFFMP4Mux;
|
|
|
|
|
|
|
|
#[glib::object_subclass]
|
|
|
|
impl ObjectSubclass for ONVIFFMP4Mux {
|
|
|
|
const NAME: &'static str = "GstONVIFFMP4Mux";
|
|
|
|
type Type = super::ONVIFFMP4Mux;
|
|
|
|
type ParentType = super::FMP4Mux;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ObjectImpl for ONVIFFMP4Mux {}
|
|
|
|
|
|
|
|
impl GstObjectImpl for ONVIFFMP4Mux {}
|
|
|
|
|
|
|
|
impl ElementImpl for ONVIFFMP4Mux {
|
|
|
|
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
|
|
|
|
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
|
|
|
|
gst::subclass::ElementMetadata::new(
|
|
|
|
"ONVIFFMP4Mux",
|
|
|
|
"Codec/Muxer",
|
|
|
|
"ONVIF fragmented MP4 muxer",
|
|
|
|
"Sebastian Dröge <sebastian@centricular.com>",
|
|
|
|
)
|
|
|
|
});
|
|
|
|
|
|
|
|
Some(&*ELEMENT_METADATA)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn pad_templates() -> &'static [gst::PadTemplate] {
|
|
|
|
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
|
|
|
|
let src_pad_template = gst::PadTemplate::new(
|
|
|
|
"src",
|
|
|
|
gst::PadDirection::Src,
|
|
|
|
gst::PadPresence::Always,
|
|
|
|
&gst::Caps::builder("video/quicktime")
|
|
|
|
.field("variant", "iso-fragmented")
|
|
|
|
.build(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
let sink_pad_template = gst::PadTemplate::new(
|
|
|
|
"sink_%u",
|
|
|
|
gst::PadDirection::Sink,
|
|
|
|
gst::PadPresence::Request,
|
|
|
|
&[
|
|
|
|
gst::Structure::builder("video/x-h264")
|
|
|
|
.field("stream-format", gst::List::new(&[&"avc", &"avc3"]))
|
|
|
|
.field("alignment", "au")
|
|
|
|
.field("width", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
|
|
|
gst::Structure::builder("video/x-h265")
|
|
|
|
.field("stream-format", gst::List::new(&[&"hvc1", &"hev1"]))
|
|
|
|
.field("alignment", "au")
|
|
|
|
.field("width", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
|
|
|
gst::Structure::builder("image/jpeg")
|
|
|
|
.field("width", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.field("height", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.build(),
|
|
|
|
gst::Structure::builder("audio/mpeg")
|
|
|
|
.field("mpegversion", 4i32)
|
|
|
|
.field("stream-format", "raw")
|
|
|
|
.field("channels", gst::IntRange::<i32>::new(1, u16::MAX as i32))
|
|
|
|
.field("rate", gst::IntRange::<i32>::new(1, i32::MAX))
|
|
|
|
.build(),
|
|
|
|
gst::Structure::builder("audio/x-alaw")
|
|
|
|
.field("channels", gst::IntRange::<i32>::new(1, 2))
|
|
|
|
.field("rate", gst::IntRange::<i32>::new(1, i32::MAX))
|
|
|
|
.build(),
|
|
|
|
gst::Structure::builder("audio/x-mulaw")
|
|
|
|
.field("channels", gst::IntRange::<i32>::new(1, 2))
|
|
|
|
.field("rate", gst::IntRange::<i32>::new(1, i32::MAX))
|
|
|
|
.build(),
|
|
|
|
gst::Structure::builder("audio/x-adpcm")
|
|
|
|
.field("layout", "g726")
|
|
|
|
.field("channels", 1i32)
|
|
|
|
.field("rate", 8000i32)
|
|
|
|
.field("bitrate", gst::List::new([16000i32, 24000, 32000, 40000]))
|
|
|
|
.build(),
|
2022-05-13 09:13:54 +00:00
|
|
|
gst::Structure::builder("application/x-onvif-metadata")
|
2022-08-11 17:47:36 +00:00
|
|
|
.field("parsed", true)
|
2022-05-13 09:13:54 +00:00
|
|
|
.build(),
|
2022-05-13 08:45:01 +00:00
|
|
|
]
|
|
|
|
.into_iter()
|
|
|
|
.collect::<gst::Caps>(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
vec![src_pad_template, sink_pad_template]
|
|
|
|
});
|
|
|
|
|
|
|
|
PAD_TEMPLATES.as_ref()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AggregatorImpl for ONVIFFMP4Mux {}
|
|
|
|
|
|
|
|
impl FMP4MuxImpl for ONVIFFMP4Mux {
|
|
|
|
const VARIANT: super::Variant = super::Variant::ONVIF;
|
|
|
|
}
|