rtp: port remaining to 0.11

This commit is contained in:
Mark Nauwelaerts 2011-07-10 21:50:19 +02:00
parent 9e276e4f81
commit eb82a50bd1
31 changed files with 1115 additions and 1110 deletions

View file

@ -83,6 +83,7 @@ endif
libgstrtp_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstrtp_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
-lgstaudio-@GST_MAJORMINOR@ \
-lgstvideo-@GST_MAJORMINOR@ \
-lgsttag-@GST_MAJORMINOR@ \
-lgstrtp-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) $(GST_LIBS) \

View file

@ -44,8 +44,8 @@ typedef struct _GstAsteriskH263Header
guint16 length; /* Length */
} GstAsteriskH263Header;
#define GST_ASTERISKH263_HEADER_TIMESTAMP(buf) (((GstAsteriskH263Header *)(GST_BUFFER_DATA (buf)))->timestamp)
#define GST_ASTERISKH263_HEADER_LENGTH(buf) (((GstAsteriskH263Header *)(GST_BUFFER_DATA (buf)))->length)
#define GST_ASTERISKH263_HEADER_TIMESTAMP(data) (((GstAsteriskH263Header *)(data))->timestamp)
#define GST_ASTERISKH263_HEADER_LENGTH(data) (((GstAsteriskH263Header *)(data))->length)
static GstStaticPadTemplate gst_asteriskh263_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
@ -71,24 +71,8 @@ static GstFlowReturn gst_asteriskh263_chain (GstPad * pad, GstBuffer * buffer);
static GstStateChangeReturn gst_asteriskh263_change_state (GstElement *
element, GstStateChange transition);
GST_BOILERPLATE (GstAsteriskh263, gst_asteriskh263, GstElement,
GST_TYPE_ELEMENT);
static void
gst_asteriskh263_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_asteriskh263_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_asteriskh263_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP Asterisk H263 depayloader", "Codec/Depayloader/Network/RTP",
"Extracts H263 video from RTP and encodes in Asterisk H263 format",
"Neil Stratford <neils@vipadia.com>");
}
#define gst_asteriskh263_parent_class parent_class
G_DEFINE_TYPE (GstAsteriskh263, gst_asteriskh263, GST_TYPE_ELEMENT);
static void
gst_asteriskh263_class_init (GstAsteriskh263Class * klass)
@ -102,11 +86,20 @@ gst_asteriskh263_class_init (GstAsteriskh263Class * klass)
gobject_class->finalize = gst_asteriskh263_finalize;
gstelement_class->change_state = gst_asteriskh263_change_state;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_asteriskh263_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_asteriskh263_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP Asterisk H263 depayloader", "Codec/Depayloader/Network/RTP",
"Extracts H263 video from RTP and encodes in Asterisk H263 format",
"Neil Stratford <neils@vipadia.com>");
}
static void
gst_asteriskh263_init (GstAsteriskh263 * asteriskh263,
GstAsteriskh263Class * klass)
gst_asteriskh263_init (GstAsteriskh263 * asteriskh263)
{
asteriskh263->srcpad =
gst_pad_new_from_static_template (&gst_asteriskh263_src_template, "src");
@ -153,12 +146,18 @@ gst_asteriskh263_chain (GstPad * pad, GstBuffer * buf)
guint32 timestamp;
guint32 samples;
guint16 asterisk_len;
GstRTPBuffer rtp;
guint8 *data;
payload_len = gst_rtp_buffer_get_payload_len (buf);
payload = gst_rtp_buffer_get_payload (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
M = gst_rtp_buffer_get_marker (buf);
timestamp = gst_rtp_buffer_get_timestamp (buf);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
payload = gst_rtp_buffer_get_payload (&rtp);
M = gst_rtp_buffer_get_marker (&rtp);
timestamp = gst_rtp_buffer_get_timestamp (&rtp);
gst_rtp_buffer_unmap (&rtp);
outbuf = gst_buffer_new_and_alloc (payload_len +
GST_ASTERISKH263_HEADER_LEN);
@ -172,16 +171,24 @@ gst_asteriskh263_chain (GstPad * pad, GstBuffer * buf)
samples = timestamp - asteriskh263->lastts;
asteriskh263->lastts = timestamp;
GST_ASTERISKH263_HEADER_TIMESTAMP (outbuf) = g_htonl (samples);
GST_ASTERISKH263_HEADER_LENGTH (outbuf) = g_htons (asterisk_len);
data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
GST_ASTERISKH263_HEADER_TIMESTAMP (data) = g_htonl (samples);
GST_ASTERISKH263_HEADER_LENGTH (data) = g_htons (asterisk_len);
/* copy the data into place */
memcpy (GST_BUFFER_DATA (outbuf) + GST_ASTERISKH263_HEADER_LEN, payload,
payload_len);
memcpy (data + GST_ASTERISKH263_HEADER_LEN, payload, payload_len);
gst_buffer_unmap (outbuf, data, -1);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
gst_buffer_set_caps (outbuf,
(GstCaps *) gst_pad_get_pad_template_caps (asteriskh263->srcpad));
if (!gst_pad_has_current_caps (asteriskh263->srcpad)) {
GstCaps *caps;
caps = gst_caps_copy
(gst_pad_get_pad_template_caps (asteriskh263->srcpad));
gst_pad_set_caps (asteriskh263->srcpad, caps);
gst_caps_unref (caps);
}
ret = gst_pad_push (asteriskh263->srcpad, outbuf);

View file

@ -67,47 +67,42 @@ static GstStaticPadTemplate gst_rtp_L16_depay_sink_template =
)
);
GST_BOILERPLATE (GstRtpL16Depay, gst_rtp_L16_depay, GstBaseRTPDepayload,
GST_TYPE_BASE_RTP_DEPAYLOAD);
#define gst_rtp_L16_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpL16Depay, gst_rtp_L16_depay, GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_L16_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
static GstBuffer *gst_rtp_L16_depay_process (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
static void
gst_rtp_L16_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_L16_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_L16_depay_sink_template));
gst_element_class_set_details_simple (element_class, "RTP audio depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts raw audio from RTP packets",
"Zeeshan Ali <zak147@yahoo.com>," "Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_L16_depay_class_init (GstRtpL16DepayClass * klass)
{
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->set_caps = gst_rtp_L16_depay_setcaps;
gstbasertpdepayload_class->process = gst_rtp_L16_depay_process;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_L16_depay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_L16_depay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP audio depayloader", "Codec/Depayloader/Network/RTP",
"Extracts raw audio from RTP packets",
"Zeeshan Ali <zak147@yahoo.com>," "Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpL16depay_debug, "rtpL16depay", 0,
"Raw Audio RTP Depayloader");
}
static void
gst_rtp_L16_depay_init (GstRtpL16Depay * rtpL16depay,
GstRtpL16DepayClass * klass)
gst_rtp_L16_depay_init (GstRtpL16Depay * rtpL16depay)
{
/* needed because of GST_BOILERPLATE */
}
@ -228,24 +223,28 @@ gst_rtp_L16_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
GstBuffer *outbuf;
gint payload_len;
gboolean marker;
GstRTPBuffer rtp;
rtpL16depay = GST_RTP_L16_DEPAY (depayload);
payload_len = gst_rtp_buffer_get_payload_len (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len <= 0)
goto empty_packet;
GST_DEBUG_OBJECT (rtpL16depay, "got payload of %d bytes", payload_len);
outbuf = gst_rtp_buffer_get_payload_buffer (buf);
marker = gst_rtp_buffer_get_marker (buf);
outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
marker = gst_rtp_buffer_get_marker (&rtp);
if (marker) {
/* mark talk spurt with DISCONT */
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
}
gst_rtp_buffer_unmap (&rtp);
return outbuf;
/* ERRORS */
@ -253,6 +252,7 @@ empty_packet:
{
GST_ELEMENT_WARNING (rtpL16depay, STREAM, DECODE,
("Empty Payload."), (NULL));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}

View file

@ -70,43 +70,39 @@ static GstStaticPadTemplate gst_rtp_L16_pay_src_template =
static gboolean gst_rtp_L16_pay_setcaps (GstBaseRTPPayload * basepayload,
GstCaps * caps);
static GstCaps *gst_rtp_L16_pay_getcaps (GstBaseRTPPayload * rtppayload,
GstPad * pad);
GstPad * pad, GstCaps * filter);
GST_BOILERPLATE (GstRtpL16Pay, gst_rtp_L16_pay, GstBaseRTPAudioPayload,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
gst_rtp_L16_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_L16_pay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_L16_pay_sink_template));
gst_element_class_set_details_simple (element_class, "RTP audio payloader",
"Codec/Payloader/Network/RTP",
"Payload-encode Raw audio into RTP packets (RFC 3551)",
"Wim Taymans <wim.taymans@gmail.com>");
}
#define gst_rtp_L16_pay_parent_class parent_class
G_DEFINE_TYPE (GstRtpL16Pay, gst_rtp_L16_pay, GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
gst_rtp_L16_pay_class_init (GstRtpL16PayClass * klass)
{
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_L16_pay_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_L16_pay_getcaps;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_L16_pay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_L16_pay_sink_template));
gst_element_class_set_details_simple (gstelement_class, "RTP audio payloader",
"Codec/Payloader/Network/RTP",
"Payload-encode Raw audio into RTP packets (RFC 3551)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpL16pay_debug, "rtpL16pay", 0,
"L16 RTP Payloader");
}
static void
gst_rtp_L16_pay_init (GstRtpL16Pay * rtpL16pay, GstRtpL16PayClass * klass)
gst_rtp_L16_pay_init (GstRtpL16Pay * rtpL16pay)
{
GstBaseRTPAudioPayload *basertpaudiopayload;
@ -191,7 +187,8 @@ no_channels:
}
static GstCaps *
gst_rtp_L16_pay_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad)
gst_rtp_L16_pay_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad,
GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
@ -227,6 +224,14 @@ gst_rtp_L16_pay_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad)
}
gst_caps_unref (otherpadcaps);
}
if (filter) {
GstCaps *tcaps = caps;
caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (tcaps);
}
return caps;
}

View file

@ -887,8 +887,9 @@ gst_rtp_h263_pay_move_window_right (GstRtpH263PayContext * context, guint n,
} else {
if (n > rest_bits) {
context->window =
(context->window << rest_bits) | (*context->
win_end & (((guint) pow (2.0, (double) rest_bits)) - 1));
(context->
window << rest_bits) | (*context->win_end & (((guint) pow (2.0,
(double) rest_bits)) - 1));
n -= rest_bits;
rest_bits = 0;
} else {
@ -1655,8 +1656,8 @@ gst_rtp_h263_pay_flush (GstRtpH263Pay * rtph263pay)
gst_rtp_h263_pay_boundry_init (&bound, NULL, rtph263pay->data - 1, 0, 0);
context->gobs =
(GstRtpH263PayGob **) g_malloc0 (format_props[context->piclayer->
ptype_srcformat][0] * sizeof (GstRtpH263PayGob *));
(GstRtpH263PayGob **) g_malloc0 (format_props[context->
piclayer->ptype_srcformat][0] * sizeof (GstRtpH263PayGob *));
for (i = 0; i < format_props[context->piclayer->ptype_srcformat][0]; i++) {

View file

@ -488,6 +488,7 @@ gst_rtp_jpeg_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
guint type, width, height;
guint16 dri, precision, length;
guint8 *qtable;
GstRTPBuffer rtp;
rtpjpegdepay = GST_RTP_JPEG_DEPAY (depayload);
@ -496,12 +497,13 @@ gst_rtp_jpeg_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
rtpjpegdepay->discont = TRUE;
}
payload_len = gst_rtp_buffer_get_payload_len (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 8)
goto empty_packet;
payload = gst_rtp_buffer_get_payload (buf);
payload = gst_rtp_buffer_get_payload (&rtp);
header_len = 0;
/* 0 1 2 3
@ -601,6 +603,7 @@ gst_rtp_jpeg_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
if (frag_offset == 0) {
guint size;
guint8 *data;
if (rtpjpegdepay->width != width || rtpjpegdepay->height != height) {
GstCaps *outcaps;
@ -642,23 +645,23 @@ gst_rtp_jpeg_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
}
/* max header length, should be big enough */
outbuf = gst_buffer_new_and_alloc (1000);
size = MakeHeaders (GST_BUFFER_DATA (outbuf), type,
width, height, qtable, precision, dri);
data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
size = MakeHeaders (data, type, width, height, qtable, precision, dri);
gst_buffer_unmap (outbuf, data, size);
GST_DEBUG_OBJECT (rtpjpegdepay, "pushing %u bytes of header", size);
GST_BUFFER_SIZE (outbuf) = size;
GST_DEBUG_OBJECT (rtpjpegdepay,
"pushing %" G_GSIZE_FORMAT " bytes of header", size);
gst_adapter_push (rtpjpegdepay->adapter, outbuf);
}
/* take JPEG data, push in the adapter */
GST_DEBUG_OBJECT (rtpjpegdepay, "pushing data at offset %d", header_len);
outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, header_len, -1);
outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, header_len, -1);
gst_adapter_push (rtpjpegdepay->adapter, outbuf);
outbuf = NULL;
if (gst_rtp_buffer_get_marker (buf)) {
if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
guint8 end[2];
guint8 *data;
@ -676,9 +679,10 @@ gst_rtp_jpeg_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
/* no EOI marker, add one */
outbuf = gst_buffer_new_and_alloc (2);
data = GST_BUFFER_DATA (outbuf);
data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
data[0] = 0xff;
data[1] = 0xd9;
gst_buffer_unmap (outbuf, data, -1);
gst_adapter_push (rtpjpegdepay->adapter, outbuf);
avail += 2;
@ -693,6 +697,8 @@ gst_rtp_jpeg_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
GST_DEBUG_OBJECT (rtpjpegdepay, "returning %u bytes", avail);
}
gst_rtp_buffer_unmap (&rtp);
return outbuf;
/* ERRORS */
@ -700,17 +706,20 @@ empty_packet:
{
GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, DECODE,
("Empty Payload."), (NULL));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_dimension:
{
GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, FORMAT,
("Invalid Dimension %dx%d.", width, height), (NULL));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
no_qtable:
{
GST_WARNING_OBJECT (rtpjpegdepay, "no qtable");
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}

View file

@ -234,37 +234,33 @@ static gboolean gst_rtp_jpeg_pay_setcaps (GstBaseRTPPayload * basepayload,
static GstFlowReturn gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * pad,
GstBuffer * buffer);
GST_BOILERPLATE (GstRtpJPEGPay, gst_rtp_jpeg_pay, GstBaseRTPPayload,
GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_jpeg_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_jpeg_pay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_jpeg_pay_sink_template));
gst_element_class_set_details_simple (element_class, "RTP JPEG payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes JPEG pictures into RTP packets (RFC 2435)",
"Axis Communications <dev-gstreamer@axis.com>");
}
#define gst_rtp_jpeg_pay_parent_class parent_class
G_DEFINE_TYPE (GstRtpJPEGPay, gst_rtp_jpeg_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_jpeg_pay_class_init (GstRtpJPEGPayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->set_property = gst_rtp_jpeg_pay_set_property;
gobject_class->get_property = gst_rtp_jpeg_pay_get_property;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_jpeg_pay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_jpeg_pay_sink_template));
gst_element_class_set_details_simple (gstelement_class, "RTP JPEG payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes JPEG pictures into RTP packets (RFC 2435)",
"Axis Communications <dev-gstreamer@axis.com>");
gstbasertppayload_class->set_caps = gst_rtp_jpeg_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_jpeg_pay_handle_buffer;
@ -288,7 +284,7 @@ gst_rtp_jpeg_pay_class_init (GstRtpJPEGPayClass * klass)
}
static void
gst_rtp_jpeg_pay_init (GstRtpJPEGPay * pay, GstRtpJPEGPayClass * klass)
gst_rtp_jpeg_pay_init (GstRtpJPEGPay * pay)
{
pay->quality = DEFAULT_JPEG_QUALITY;
pay->quant = DEFAULT_JPEG_QUANT;
@ -609,8 +605,8 @@ gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * basepayload,
RtpQuantTable tables[15] = { {0, NULL}, };
CompInfo info[3] = { {0,}, };
guint quant_data_size;
guint8 *data;
guint size;
guint8 *data, *bdata;
gsize size;
guint mtu;
guint bytes_left;
guint jpeg_header_size = 0;
@ -619,18 +615,16 @@ gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * basepayload,
gboolean sos_found, sof_found, dqt_found, dri_found;
gint i;
GstBufferList *list = NULL;
GstBufferListIterator *it = NULL;
pay = GST_RTP_JPEG_PAY (basepayload);
mtu = GST_BASE_RTP_PAYLOAD_MTU (pay);
size = GST_BUFFER_SIZE (buffer);
data = GST_BUFFER_DATA (buffer);
data = bdata = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
offset = 0;
GST_LOG_OBJECT (pay, "got buffer size %u, timestamp %" GST_TIME_FORMAT, size,
GST_TIME_ARGS (timestamp));
GST_LOG_OBJECT (pay, "got buffer size %" G_GSIZE_FORMAT
" , timestamp %" GST_TIME_FORMAT, size, GST_TIME_ARGS (timestamp));
/* parse the jpeg header for 'start of scan' and read quant tables if needed */
sos_found = FALSE;
@ -734,7 +728,6 @@ gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * basepayload,
if (pay->buffer_list) {
list = gst_buffer_list_new ();
it = gst_buffer_list_iterate (list);
}
bytes_left = sizeof (jpeg_header) + quant_data_size + size;
@ -747,27 +740,25 @@ gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * basepayload,
GstBuffer *outbuf;
guint8 *payload;
guint payload_size = (bytes_left < mtu ? bytes_left : mtu);
guint header_size;
GstBuffer *paybuf;
GstRTPBuffer rtp;
if (pay->buffer_list) {
guint header_size;
header_size = sizeof (jpeg_header) + quant_data_size;
if (dri_found)
header_size += sizeof (restart_marker_header);
header_size = sizeof (jpeg_header) + quant_data_size;
if (dri_found)
header_size += sizeof (restart_marker_header);
outbuf = gst_rtp_buffer_new_allocate (header_size, 0, 0);
outbuf = gst_rtp_buffer_new_allocate (header_size, 0, 0);
} else {
outbuf = gst_rtp_buffer_new_allocate (payload_size, 0, 0);
}
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
if (payload_size == bytes_left) {
GST_LOG_OBJECT (pay, "last packet of frame");
frame_done = TRUE;
gst_rtp_buffer_set_marker (outbuf, 1);
gst_rtp_buffer_set_marker (&rtp, 1);
}
payload = gst_rtp_buffer_get_payload (outbuf);
payload = gst_rtp_buffer_get_payload (&rtp);
/* update offset */
#if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
@ -810,20 +801,21 @@ gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * basepayload,
quant_data_size = 0;
}
GST_LOG_OBJECT (pay, "sending payload size %d", payload_size);
gst_rtp_buffer_unmap (&rtp);
/* create a new buf to hold the payload */
paybuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
jpeg_header_size + offset, payload_size);
/* join memory parts */
outbuf = gst_buffer_join (outbuf, paybuf);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
if (pay->buffer_list) {
GstBuffer *paybuf;
/* create a new buf to hold the payload */
paybuf = gst_buffer_create_sub (buffer, jpeg_header_size + offset,
payload_size);
/* create a new group to hold the rtp header and the payload */
gst_buffer_list_iterator_add_group (it);
gst_buffer_list_iterator_add (it, outbuf);
gst_buffer_list_iterator_add (it, paybuf);
/* and add to list */
gst_buffer_list_insert (list, -1, outbuf);
} else {
memcpy (payload, data, payload_size);
ret = gst_basertppayload_push (basepayload, outbuf);
if (ret != GST_FLOW_OK)
break;
@ -836,11 +828,11 @@ gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * basepayload,
while (!frame_done);
if (pay->buffer_list) {
gst_buffer_list_iterator_free (it);
/* push the whole buffer list at once */
ret = gst_basertppayload_push_list (basepayload, list);
}
gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return ret;
@ -849,24 +841,28 @@ gst_rtp_jpeg_pay_handle_buffer (GstBaseRTPPayload * basepayload,
unsupported_jpeg:
{
GST_ELEMENT_ERROR (pay, STREAM, FORMAT, ("Unsupported JPEG"), (NULL));
gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_SUPPORTED;
}
no_dimension:
{
GST_ELEMENT_ERROR (pay, STREAM, FORMAT, ("No size given"), (NULL));
gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_NEGOTIATED;
}
invalid_format:
{
/* error was posted */
gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
invalid_quant:
{
GST_ELEMENT_ERROR (pay, STREAM, FORMAT, ("Invalid quant tables"), (NULL));
gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}

View file

@ -62,7 +62,7 @@ static GstStaticPadTemplate gst_rtp_mp1s_depay_sink_template =
"clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"MP1S\"")
);
GST_BOILERPLATE (GstRtpMP1SDepay, gst_rtp_mp1s_depay, GstBaseRTPDepayload,
G_DEFINE_TYPE (GstRtpMP1SDepay, gst_rtp_mp1s_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_mp1s_depay_setcaps (GstBaseRTPDepayload * depayload,
@ -71,35 +71,30 @@ static GstBuffer *gst_rtp_mp1s_depay_process (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
static void
gst_rtp_mp1s_depay_base_init (gpointer klass)
gst_rtp_mp1s_depay_class_init (GstRtpMP1SDepayClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gst_element_class_add_pad_template (element_class,
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_mp1s_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_mp1s_depay_setcaps;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp1s_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp1s_depay_sink_template));
gst_element_class_set_details_simple (element_class,
gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG1 System Stream depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG1 System Streams from RTP packets (RFC 3555)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_mp1s_depay_class_init (GstRtpMP1SDepayClass * klass)
{
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_mp1s_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_mp1s_depay_setcaps;
}
static void
gst_rtp_mp1s_depay_init (GstRtpMP1SDepay * rtpmp1sdepay,
GstRtpMP1SDepayClass * klass)
gst_rtp_mp1s_depay_init (GstRtpMP1SDepay * rtpmp1sdepay)
{
}
@ -128,12 +123,15 @@ static GstBuffer *
gst_rtp_mp1s_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf;
GstRTPBuffer rtp;
outbuf = gst_rtp_buffer_get_payload_buffer (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
gst_rtp_buffer_unmap (&rtp);
if (outbuf)
GST_DEBUG ("gst_rtp_mp1s_depay_chain: pushing buffer of size %d",
GST_BUFFER_SIZE (outbuf));
gst_buffer_get_size (outbuf));
return outbuf;
}

View file

@ -68,7 +68,7 @@ static GstStaticPadTemplate gst_rtp_mp2t_depay_sink_template =
"clock-rate = (int) [1, MAX ]")
);
GST_BOILERPLATE (GstRtpMP2TDepay, gst_rtp_mp2t_depay, GstBaseRTPDepayload,
G_DEFINE_TYPE (GstRtpMP2TDepay, gst_rtp_mp2t_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_mp2t_depay_setcaps (GstBaseRTPDepayload * depayload,
@ -81,31 +81,15 @@ static void gst_rtp_mp2t_depay_set_property (GObject * object, guint prop_id,
static void gst_rtp_mp2t_depay_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void
gst_rtp_mp2t_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp2t_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp2t_depay_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP MPEG Transport Stream depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG2 TS from RTP packets (RFC 2250)",
"Wim Taymans <wim.taymans@gmail.com>, "
"Thijs Vermeir <thijs.vermeir@barco.com>");
}
static void
gst_rtp_mp2t_depay_class_init (GstRtpMP2TDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_mp2t_depay_process;
@ -114,6 +98,17 @@ gst_rtp_mp2t_depay_class_init (GstRtpMP2TDepayClass * klass)
gobject_class->set_property = gst_rtp_mp2t_depay_set_property;
gobject_class->get_property = gst_rtp_mp2t_depay_get_property;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp2t_depay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp2t_depay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG Transport Stream depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG2 TS from RTP packets (RFC 2250)",
"Wim Taymans <wim.taymans@gmail.com>, "
"Thijs Vermeir <thijs.vermeir@barco.com>");
g_object_class_install_property (gobject_class, PROP_SKIP_FIRST_BYTES,
g_param_spec_uint ("skip-first-bytes",
"Skip first bytes",
@ -123,8 +118,7 @@ gst_rtp_mp2t_depay_class_init (GstRtpMP2TDepayClass * klass)
}
static void
gst_rtp_mp2t_depay_init (GstRtpMP2TDepay * rtpmp2tdepay,
GstRtpMP2TDepayClass * klass)
gst_rtp_mp2t_depay_init (GstRtpMP2TDepay * rtpmp2tdepay)
{
rtpmp2tdepay->skip_first_bytes = DEFAULT_SKIP_FIRST_BYTES;
}
@ -157,21 +151,24 @@ gst_rtp_mp2t_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
GstRtpMP2TDepay *rtpmp2tdepay;
GstBuffer *outbuf;
gint payload_len;
GstRTPBuffer rtp;
rtpmp2tdepay = GST_RTP_MP2T_DEPAY (depayload);
payload_len = gst_rtp_buffer_get_payload_len (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (G_UNLIKELY (payload_len <= rtpmp2tdepay->skip_first_bytes))
goto empty_packet;
outbuf =
gst_rtp_buffer_get_payload_subbuffer (buf, rtpmp2tdepay->skip_first_bytes,
-1);
outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp,
rtpmp2tdepay->skip_first_bytes, -1);
gst_rtp_buffer_unmap (&rtp);
if (outbuf)
GST_DEBUG ("gst_rtp_mp2t_depay_chain: pushing buffer of size %d",
GST_BUFFER_SIZE (outbuf));
gst_buffer_get_size (outbuf));
return outbuf;
@ -180,6 +177,7 @@ empty_packet:
{
GST_ELEMENT_WARNING (rtpmp2tdepay, STREAM, DECODE,
(NULL), ("Packet was empty"));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}

View file

@ -52,41 +52,37 @@ static GstFlowReturn gst_rtp_mp2t_pay_handle_buffer (GstBaseRTPPayload *
static GstFlowReturn gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay);
static void gst_rtp_mp2t_pay_finalize (GObject * object);
GST_BOILERPLATE (GstRTPMP2TPay, gst_rtp_mp2t_pay, GstBaseRTPPayload,
GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_mp2t_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp2t_pay_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp2t_pay_src_template));
gst_element_class_set_details_simple (element_class,
"RTP MPEG2 Transport Stream payloader", "Codec/Payloader/Network/RTP",
"Payload-encodes MPEG2 TS into RTP packets (RFC 2250)",
"Wim Taymans <wim.taymans@gmail.com>");
}
#define gst_rtp_mp2t_pay_parent_class parent_class
G_DEFINE_TYPE (GstRTPMP2TPay, gst_rtp_mp2t_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_mp2t_pay_class_init (GstRTPMP2TPayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->finalize = gst_rtp_mp2t_pay_finalize;
gstbasertppayload_class->set_caps = gst_rtp_mp2t_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_mp2t_pay_handle_buffer;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp2t_pay_sink_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp2t_pay_src_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG2 Transport Stream payloader", "Codec/Payloader/Network/RTP",
"Payload-encodes MPEG2 TS into RTP packets (RFC 2250)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_mp2t_pay_init (GstRTPMP2TPay * rtpmp2tpay, GstRTPMP2TPayClass * klass)
gst_rtp_mp2t_pay_init (GstRTPMP2TPay * rtpmp2tpay)
{
GST_BASE_RTP_PAYLOAD (rtpmp2tpay)->clock_rate = 90000;
GST_BASE_RTP_PAYLOAD_PT (rtpmp2tpay) = GST_RTP_PAYLOAD_MP2T;
@ -125,21 +121,24 @@ gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay)
guint8 *payload;
GstFlowReturn ret;
GstBuffer *outbuf;
GstRTPBuffer rtp;
avail = gst_adapter_available (rtpmp2tpay->adapter);
outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);
/* get payload */
payload = gst_rtp_buffer_get_payload (outbuf);
gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
payload = gst_rtp_buffer_get_payload (&rtp);
/* copy stuff from adapter to payload */
gst_adapter_copy (rtpmp2tpay->adapter, payload, 0, avail);
gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmp2tpay->first_ts;
GST_BUFFER_DURATION (outbuf) = rtpmp2tpay->duration;
GST_DEBUG_OBJECT (rtpmp2tpay, "pushing buffer of size %d",
GST_BUFFER_SIZE (outbuf));
gst_buffer_get_size (outbuf));
ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp2tpay), outbuf);
@ -160,7 +159,7 @@ gst_rtp_mp2t_pay_handle_buffer (GstBaseRTPPayload * basepayload,
rtpmp2tpay = GST_RTP_MP2T_PAY (basepayload);
size = GST_BUFFER_SIZE (buffer);
size = gst_buffer_get_size (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);

View file

@ -56,7 +56,8 @@ GST_STATIC_PAD_TEMPLATE ("sink",
)
);
GST_BOILERPLATE (GstRtpMP4ADepay, gst_rtp_mp4a_depay, GstBaseRTPDepayload,
#define gst_rtp_mp4a_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpMP4ADepay, gst_rtp_mp4a_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_mp4a_depay_finalize (GObject * object);
@ -70,23 +71,6 @@ static GstStateChangeReturn gst_rtp_mp4a_depay_change_state (GstElement *
element, GstStateChange transition);
static void
gst_rtp_mp4a_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4a_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4a_depay_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP MPEG4 audio depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG4 audio from RTP packets (RFC 3016)",
"Nokia Corporation (contact <stefan.kost@nokia.com>), "
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_mp4a_depay_class_init (GstRtpMP4ADepayClass * klass)
{
@ -105,13 +89,23 @@ gst_rtp_mp4a_depay_class_init (GstRtpMP4ADepayClass * klass)
gstbasertpdepayload_class->process = gst_rtp_mp4a_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_mp4a_depay_setcaps;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4a_depay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4a_depay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG4 audio depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG4 audio from RTP packets (RFC 3016)",
"Nokia Corporation (contact <stefan.kost@nokia.com>), "
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpmp4adepay_debug, "rtpmp4adepay", 0,
"MPEG4 audio RTP Depayloader");
}
static void
gst_rtp_mp4a_depay_init (GstRtpMP4ADepay * rtpmp4adepay,
GstRtpMP4ADepayClass * klass)
gst_rtp_mp4a_depay_init (GstRtpMP4ADepay * rtpmp4adepay)
{
rtpmp4adepay->adapter = gst_adapter_new ();
}
@ -168,7 +162,7 @@ gst_rtp_mp4a_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
if (gst_value_deserialize (&v, str)) {
GstBuffer *buffer;
guint8 *data;
guint size;
gsize size;
gint i;
guint32 rate = 0;
guint8 obj_type = 0, sr_idx = 0, channels = 0;
@ -178,11 +172,11 @@ gst_rtp_mp4a_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
gst_buffer_ref (buffer);
g_value_unset (&v);
data = GST_BUFFER_DATA (buffer);
size = GST_BUFFER_SIZE (buffer);
data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size < 2) {
GST_WARNING_OBJECT (depayload, "config too short (%d < 2)", size);
GST_WARNING_OBJECT (depayload, "config too short (%d < 2)",
(gint) size);
goto bad_config;
}
@ -214,8 +208,6 @@ gst_rtp_mp4a_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
for (i = 0; i < size; i++) {
data[i] = ((data[i + 1] & 1) << 7) | ((data[i + 2] & 0xfe) >> 1);
}
/* ignore remaining bit, we're only interested in full bytes */
GST_BUFFER_SIZE (buffer) = size;
gst_bit_reader_init (&br, data, size);
@ -273,16 +265,22 @@ gst_rtp_mp4a_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
break;
}
/* ignore remaining bit, we're only interested in full bytes */
gst_buffer_unmap (buffer, data, size);
data = NULL;
gst_caps_set_simple (srccaps,
"channels", G_TYPE_INT, (gint) channels,
"rate", G_TYPE_INT, (gint) rate,
"codec_data", GST_TYPE_BUFFER, buffer, NULL);
bad_config:
if (data)
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
} else {
g_warning ("cannot convert config to buffer");
}
}
bad_config:
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
@ -294,6 +292,8 @@ gst_rtp_mp4a_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpMP4ADepay *rtpmp4adepay;
GstBuffer *outbuf;
GstRTPBuffer rtp;
guint8 *bdata;
rtpmp4adepay = GST_RTP_MP4A_DEPAY (depayload);
@ -302,14 +302,16 @@ gst_rtp_mp4a_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
gst_adapter_clear (rtpmp4adepay->adapter);
}
outbuf = gst_rtp_buffer_get_payload_buffer (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
gst_buffer_copy_metadata (outbuf, buf, GST_BUFFER_COPY_TIMESTAMPS);
outbuf = gst_buffer_make_writable (outbuf);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
gst_adapter_push (rtpmp4adepay->adapter, outbuf);
/* RTP marker bit indicates the last packet of the AudioMuxElement => create
* and push a buffer */
if (gst_rtp_buffer_get_marker (buf)) {
if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
guint i;
guint8 *data;
@ -322,7 +324,7 @@ gst_rtp_mp4a_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
GST_LOG_OBJECT (rtpmp4adepay, "have marker and %u available", avail);
outbuf = gst_adapter_take_buffer (rtpmp4adepay->adapter, avail);
data = GST_BUFFER_DATA (outbuf);
data = bdata = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_READ);
/* position in data we are at */
pos = 0;
@ -353,7 +355,8 @@ gst_rtp_mp4a_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
/* take data out, skip the header */
pos += skip;
tmp = gst_buffer_create_sub (outbuf, pos, data_len);
tmp = gst_buffer_copy_region (outbuf, GST_BUFFER_COPY_MEMORY, pos,
data_len);
/* skip data too */
skip += data_len;
@ -382,8 +385,10 @@ gst_rtp_mp4a_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
"possible wrongly encoded packet."));
}
gst_buffer_unmap (outbuf, bdata, -1);
gst_buffer_unref (outbuf);
}
gst_rtp_buffer_unmap (&rtp);
return NULL;
/* ERRORS */
@ -391,7 +396,9 @@ wrong_size:
{
GST_ELEMENT_WARNING (rtpmp4adepay, STREAM, DECODE,
("Packet did not validate"), ("wrong packet size"));
gst_buffer_unmap (outbuf, bdata, -1);
gst_buffer_unref (outbuf);
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}

View file

@ -64,31 +64,17 @@ static gboolean gst_rtp_mp4a_pay_setcaps (GstBaseRTPPayload * payload,
static GstFlowReturn gst_rtp_mp4a_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
GST_BOILERPLATE (GstRtpMP4APay, gst_rtp_mp4a_pay, GstBaseRTPPayload,
GST_TYPE_BASE_RTP_PAYLOAD)
#define gst_rtp_mp4a_pay_parent_class parent_class
G_DEFINE_TYPE (GstRtpMP4APay, gst_rtp_mp4a_pay, GST_TYPE_BASE_RTP_PAYLOAD)
static void gst_rtp_mp4a_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4a_pay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4a_pay_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP MPEG4 audio payloader", "Codec/Payloader/Network/RTP",
"Payload MPEG4 audio as RTP packets (RFC 3016)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_mp4a_pay_class_init (GstRtpMP4APayClass * klass)
static void gst_rtp_mp4a_pay_class_init (GstRtpMP4APayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->finalize = gst_rtp_mp4a_pay_finalize;
@ -96,12 +82,22 @@ gst_rtp_mp4a_pay_class_init (GstRtpMP4APayClass * klass)
gstbasertppayload_class->set_caps = gst_rtp_mp4a_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_mp4a_pay_handle_buffer;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4a_pay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4a_pay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG4 audio payloader", "Codec/Payloader/Network/RTP",
"Payload MPEG4 audio as RTP packets (RFC 3016)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpmp4apay_debug, "rtpmp4apay", 0,
"MP4A-LATM RTP Payloader");
}
static void
gst_rtp_mp4a_pay_init (GstRtpMP4APay * rtpmp4apay, GstRtpMP4APayClass * klass)
gst_rtp_mp4a_pay_init (GstRtpMP4APay * rtpmp4apay)
{
rtpmp4apay->rate = 90000;
rtpmp4apay->profile = g_strdup ("1");
@ -137,13 +133,12 @@ gst_rtp_mp4a_pay_parse_audio_config (GstRtpMP4APay * rtpmp4apay,
GstBuffer * buffer)
{
guint8 *data;
guint size;
gsize size;
guint8 objectType;
guint8 samplingIdx;
guint8 channelCfg;
data = GST_BUFFER_DATA (buffer);
size = GST_BUFFER_SIZE (buffer);
data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size < 2)
goto too_short;
@ -187,6 +182,8 @@ gst_rtp_mp4a_pay_parse_audio_config (GstRtpMP4APay * rtpmp4apay,
"objectType: %d, samplingIdx: %d (%d), channelCfg: %d", objectType,
samplingIdx, rtpmp4apay->rate, channelCfg);
gst_buffer_unmap (buffer, data, -1);
return TRUE;
/* ERROR */
@ -194,24 +191,28 @@ too_short:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, FORMAT,
(NULL), ("config string too short, expected 2 bytes, got %d", size));
gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
invalid_object:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, FORMAT,
(NULL), ("invalid object type 0"));
gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
wrong_freq:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported frequency index %d", samplingIdx));
gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
wrong_channels:
{
GST_ELEMENT_ERROR (rtpmp4apay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported number of channels %d, must < 8", channelCfg));
gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
}
@ -271,7 +272,8 @@ gst_rtp_mp4a_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
GstBuffer *buffer, *cbuffer;
guint8 *config;
guint8 *data;
guint size, i;
guint i;
gsize size;
buffer = gst_value_get_buffer (codec_data);
GST_LOG_OBJECT (rtpmp4apay, "configuring codec_data");
@ -282,11 +284,11 @@ gst_rtp_mp4a_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
if (!res)
goto config_failed;
size = GST_BUFFER_SIZE (buffer);
data = GST_BUFFER_DATA (buffer);
data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
/* make the StreamMuxConfig, we need 15 bits for the header */
config = g_malloc0 (size + 2);
cbuffer = gst_buffer_new_and_alloc (size + 2);
config = gst_buffer_map (cbuffer, NULL, NULL, GST_MAP_WRITE);
/* Create StreamMuxConfig according to ISO/IEC 14496-3:
*
@ -305,10 +307,8 @@ gst_rtp_mp4a_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
config[i + 2] |= ((data[i] & 0x7f) << 1);
}
cbuffer = gst_buffer_new ();
GST_BUFFER_DATA (cbuffer) = config;
GST_BUFFER_MALLOCDATA (cbuffer) = config;
GST_BUFFER_SIZE (cbuffer) = size + 2;
gst_buffer_unmap (cbuffer, config, -1);
gst_buffer_unmap (buffer, data, -1);
/* now we can configure the buffer */
if (rtpmp4apay->config)
@ -345,8 +345,9 @@ gst_rtp_mp4a_pay_handle_buffer (GstBaseRTPPayload * basepayload,
GstRtpMP4APay *rtpmp4apay;
GstFlowReturn ret;
GstBuffer *outbuf;
guint count, mtu, size;
guint8 *data;
guint count, mtu;
gsize size;
guint8 *data, *bdata;
gboolean fragmented;
GstClockTime timestamp;
@ -354,8 +355,7 @@ gst_rtp_mp4a_pay_handle_buffer (GstBaseRTPPayload * basepayload,
rtpmp4apay = GST_RTP_MP4A_PAY (basepayload);
size = GST_BUFFER_SIZE (buffer);
data = GST_BUFFER_DATA (buffer);
data = bdata = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
fragmented = FALSE;
@ -366,6 +366,7 @@ gst_rtp_mp4a_pay_handle_buffer (GstBaseRTPPayload * basepayload,
guint8 *payload;
guint payload_len;
guint packet_len;
GstRTPBuffer rtp;
/* this will be the total lenght of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (size, 0, 0);
@ -394,7 +395,8 @@ gst_rtp_mp4a_pay_handle_buffer (GstBaseRTPPayload * basepayload,
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
/* copy payload */
payload = gst_rtp_buffer_get_payload (outbuf);
gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
payload = gst_rtp_buffer_get_payload (&rtp);
if (!fragmented) {
/* first packet write the header */
@ -414,7 +416,9 @@ gst_rtp_mp4a_pay_handle_buffer (GstBaseRTPPayload * basepayload,
size -= payload_len;
/* marker only if the packet is complete */
gst_rtp_buffer_set_marker (outbuf, size == 0);
gst_rtp_buffer_set_marker (&rtp, size == 0);
gst_rtp_buffer_unmap (&rtp);
/* copy incomming timestamp (if any) to outgoing buffers */
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
@ -424,6 +428,7 @@ gst_rtp_mp4a_pay_handle_buffer (GstBaseRTPPayload * basepayload,
fragmented = TRUE;
}
gst_buffer_unmap (buffer, bdata, -1);
gst_buffer_unref (buffer);
return ret;

View file

@ -126,7 +126,8 @@ gst_bs_parse_read (GstBsParse * bs, guint n)
}
GST_BOILERPLATE (GstRtpMP4GDepay, gst_rtp_mp4g_depay, GstBaseRTPDepayload,
#define gst_rtp_mp4g_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpMP4GDepay, gst_rtp_mp4g_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_mp4g_depay_finalize (GObject * object);
@ -142,22 +143,6 @@ static GstStateChangeReturn gst_rtp_mp4g_depay_change_state (GstElement *
element, GstStateChange transition);
static void
gst_rtp_mp4g_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4g_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4g_depay_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP MPEG4 ES depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG4 elementary streams from RTP packets (RFC 3640)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_mp4g_depay_class_init (GstRtpMP4GDepayClass * klass)
{
@ -177,13 +162,22 @@ gst_rtp_mp4g_depay_class_init (GstRtpMP4GDepayClass * klass)
gstbasertpdepayload_class->set_caps = gst_rtp_mp4g_depay_setcaps;
gstbasertpdepayload_class->handle_event = gst_rtp_mp4g_depay_handle_event;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4g_depay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4g_depay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG4 ES depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG4 elementary streams from RTP packets (RFC 3640)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpmp4gdepay_debug, "rtpmp4gdepay", 0,
"MP4-generic RTP Depayloader");
}
static void
gst_rtp_mp4g_depay_init (GstRtpMP4GDepay * rtpmp4gdepay,
GstRtpMP4GDepayClass * klass)
gst_rtp_mp4g_depay_init (GstRtpMP4GDepay * rtpmp4gdepay)
{
rtpmp4gdepay->adapter = gst_adapter_new ();
rtpmp4gdepay->packets = g_queue_new ();
@ -428,8 +422,9 @@ static GstBuffer *
gst_rtp_mp4g_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpMP4GDepay *rtpmp4gdepay;
GstBuffer *outbuf;
GstBuffer *outbuf = NULL;
GstClockTime timestamp;
GstRTPBuffer rtp;
rtpmp4gdepay = GST_RTP_MP4G_DEPAY (depayload);
@ -449,13 +444,16 @@ gst_rtp_mp4g_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
guint AU_size, AU_index, AU_index_delta, payload_AU_size;
gboolean M;
payload_len = gst_rtp_buffer_get_payload_len (buf);
payload = gst_rtp_buffer_get_payload (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
payload = gst_rtp_buffer_get_payload (&rtp);
GST_DEBUG_OBJECT (rtpmp4gdepay, "received payload of %d", payload_len);
rtptime = gst_rtp_buffer_get_timestamp (buf);
M = gst_rtp_buffer_get_marker (buf);
rtptime = gst_rtp_buffer_get_timestamp (&rtp);
M = gst_rtp_buffer_get_marker (&rtp);
gst_rtp_buffer_unmap (&rtp);
if (rtpmp4gdepay->sizelength > 0) {
gint num_AU_headers, AU_headers_bytes, i;
@ -653,7 +651,7 @@ gst_rtp_mp4g_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
/* collect stuff in the adapter, strip header from payload and push in
* the adapter */
outbuf =
gst_rtp_buffer_get_payload_subbuffer (buf, payload_AU, AU_size);
gst_rtp_buffer_get_payload_subbuffer (&rtp, payload_AU, AU_size);
gst_adapter_push (rtpmp4gdepay->adapter, outbuf);
if (M) {
@ -663,7 +661,6 @@ gst_rtp_mp4g_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
avail = gst_adapter_available (rtpmp4gdepay->adapter);
outbuf = gst_adapter_take_buffer (rtpmp4gdepay->adapter, avail);
gst_buffer_set_caps (outbuf, GST_PAD_CAPS (depayload->srcpad));
/* copy some of the fields we calculated above on the buffer. We also
* copy the AU_index so that we can sort the packets in our queue. */
@ -675,7 +672,7 @@ gst_rtp_mp4g_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
timestamp = -1;
GST_DEBUG_OBJECT (depayload, "pushing buffer of size %d",
GST_BUFFER_SIZE (outbuf));
gst_buffer_get_size (outbuf));
gst_rtp_mp4g_depay_queue (rtpmp4gdepay, outbuf);
@ -685,7 +682,7 @@ gst_rtp_mp4g_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
}
} else {
/* push complete buffer in adapter */
outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 0, payload_len);
outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 0, payload_len);
gst_adapter_push (rtpmp4gdepay->adapter, outbuf);
/* if this was the last packet of the VOP, create and push a buffer */
@ -697,12 +694,15 @@ gst_rtp_mp4g_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
outbuf = gst_adapter_take_buffer (rtpmp4gdepay->adapter, avail);
GST_DEBUG ("gst_rtp_mp4g_depay_chain: pushing buffer of size %d",
GST_BUFFER_SIZE (outbuf));
gst_buffer_get_size (outbuf));
gst_rtp_buffer_unmap (&rtp);
return outbuf;
}
}
}
gst_rtp_buffer_unmap (&rtp);
return NULL;
/* ERRORS */
@ -710,6 +710,7 @@ short_payload:
{
GST_ELEMENT_WARNING (rtpmp4gdepay, STREAM, DECODE,
("Packet payload was too short."), (NULL));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}

View file

@ -82,28 +82,13 @@ static gboolean gst_rtp_mp4g_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
static GstFlowReturn gst_rtp_mp4g_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
static gboolean gst_rtp_mp4g_pay_handle_event (GstPad * pad, GstEvent * event);
static gboolean gst_rtp_mp4g_pay_handle_event (GstBaseRTPPayload * payload,
GstEvent * event);
GST_BOILERPLATE (GstRtpMP4GPay, gst_rtp_mp4g_pay, GstBaseRTPPayload,
GST_TYPE_BASE_RTP_PAYLOAD)
#define gst_rtp_mp4g_pay_parent_class parent_class
G_DEFINE_TYPE (GstRtpMP4GPay, gst_rtp_mp4g_pay, GST_TYPE_BASE_RTP_PAYLOAD)
static void gst_rtp_mp4g_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4g_pay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4g_pay_sink_template));
gst_element_class_set_details_simple (element_class, "RTP MPEG4 ES payloader",
"Codec/Payloader/Network/RTP",
"Payload MPEG4 elementary streams as RTP packets (RFC 3640)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_mp4g_pay_class_init (GstRtpMP4GPayClass * klass)
static void gst_rtp_mp4g_pay_class_init (GstRtpMP4GPayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
@ -121,12 +106,23 @@ gst_rtp_mp4g_pay_class_init (GstRtpMP4GPayClass * klass)
gstbasertppayload_class->handle_buffer = gst_rtp_mp4g_pay_handle_buffer;
gstbasertppayload_class->handle_event = gst_rtp_mp4g_pay_handle_event;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4g_pay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4g_pay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG4 ES payloader",
"Codec/Payloader/Network/RTP",
"Payload MPEG4 elementary streams as RTP packets (RFC 3640)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpmp4gpay_debug, "rtpmp4gpay", 0,
"MP4-generic RTP Payloader");
}
static void
gst_rtp_mp4g_pay_init (GstRtpMP4GPay * rtpmp4gpay, GstRtpMP4GPayClass * klass)
gst_rtp_mp4g_pay_init (GstRtpMP4GPay * rtpmp4gpay)
{
rtpmp4gpay->adapter = gst_adapter_new ();
}
@ -186,14 +182,13 @@ gst_rtp_mp4g_pay_parse_audio_config (GstRtpMP4GPay * rtpmp4gpay,
GstBuffer * buffer)
{
guint8 *data;
guint size;
gsize size;
guint8 objectType = 0;
guint8 samplingIdx = 0;
guint8 channelCfg = 0;
GstBitReader br;
data = GST_BUFFER_DATA (buffer);
size = GST_BUFFER_SIZE (buffer);
data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
gst_bit_reader_init (&br, data, size);
@ -266,6 +261,7 @@ gst_rtp_mp4g_pay_parse_audio_config (GstRtpMP4GPay * rtpmp4gpay,
objectType, samplingIdx, rtpmp4gpay->rate, channelCfg,
rtpmp4gpay->frame_len);
gst_buffer_unmap (buffer, data, -1);
return TRUE;
/* ERROR */
@ -273,24 +269,28 @@ too_short:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
(NULL), ("config string too short"));
gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
invalid_object:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
(NULL), ("invalid object type"));
gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
wrong_freq:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported frequency index %d", samplingIdx));
gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
wrong_channels:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, NOT_IMPLEMENTED,
(NULL), ("unsupported number of channels %d, must < 8", channelCfg));
gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
}
@ -302,11 +302,10 @@ gst_rtp_mp4g_pay_parse_video_config (GstRtpMP4GPay * rtpmp4gpay,
GstBuffer * buffer)
{
guint8 *data;
guint size;
gsize size;
guint32 code;
data = GST_BUFFER_DATA (buffer);
size = GST_BUFFER_SIZE (buffer);
data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
if (size < 5)
goto too_short;
@ -334,6 +333,8 @@ gst_rtp_mp4g_pay_parse_video_config (GstRtpMP4GPay * rtpmp4gpay,
GST_LOG_OBJECT (rtpmp4gpay, "profile %s", rtpmp4gpay->profile);
gst_buffer_unmap (buffer, data, -1);
return TRUE;
/* ERROR */
@ -341,6 +342,7 @@ too_short:
{
GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
(NULL), ("config string too short"));
gst_buffer_unmap (buffer, data, -1);
return FALSE;
}
}
@ -468,6 +470,7 @@ gst_rtp_mp4g_pay_flush (GstRtpMP4GPay * rtpmp4gpay)
guint8 *payload;
guint payload_len;
guint packet_len;
GstRTPBuffer rtp;
/* this will be the total lenght of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
@ -486,8 +489,10 @@ gst_rtp_mp4g_pay_flush (GstRtpMP4GPay * rtpmp4gpay)
/* create buffer to hold the payload, also make room for the 4 header bytes. */
outbuf = gst_rtp_buffer_new_allocate (payload_len + 4, 0, 0);
gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* copy payload */
payload = gst_rtp_buffer_get_payload (outbuf);
payload = gst_rtp_buffer_get_payload (&rtp);
/* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
* |AU-headers-length|AU-header|AU-header| |AU-header|padding|
@ -528,7 +533,9 @@ gst_rtp_mp4g_pay_flush (GstRtpMP4GPay * rtpmp4gpay)
gst_adapter_flush (rtpmp4gpay->adapter, payload_len);
/* marker only if the packet is complete */
gst_rtp_buffer_set_marker (outbuf, avail <= payload_len);
gst_rtp_buffer_set_marker (&rtp, avail <= payload_len);
gst_rtp_buffer_unmap (&rtp);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmp4gpay->first_timestamp;
GST_BUFFER_DURATION (outbuf) = rtpmp4gpay->first_duration;
@ -566,16 +573,16 @@ gst_rtp_mp4g_pay_handle_buffer (GstBaseRTPPayload * basepayload,
}
static gboolean
gst_rtp_mp4g_pay_handle_event (GstPad * pad, GstEvent * event)
gst_rtp_mp4g_pay_handle_event (GstBaseRTPPayload * payload, GstEvent * event)
{
GstRtpMP4GPay *rtpmp4gpay;
rtpmp4gpay = GST_RTP_MP4G_PAY (gst_pad_get_parent (pad));
rtpmp4gpay = GST_RTP_MP4G_PAY (payload);
GST_DEBUG ("Got event: %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:
case GST_EVENT_SEGMENT:
case GST_EVENT_EOS:
/* This flush call makes sure that the last buffer is always pushed
* to the base payloader */
@ -588,8 +595,6 @@ gst_rtp_mp4g_pay_handle_event (GstPad * pad, GstEvent * event)
break;
}
g_object_unref (rtpmp4gpay);
/* let parent handle event too */
return FALSE;
}

View file

@ -53,7 +53,8 @@ GST_STATIC_PAD_TEMPLATE ("sink",
)
);
GST_BOILERPLATE (GstRtpMP4VDepay, gst_rtp_mp4v_depay, GstBaseRTPDepayload,
#define gst_rtp_mp4v_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpMP4VDepay, gst_rtp_mp4v_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_mp4v_depay_finalize (GObject * object);
@ -66,23 +67,6 @@ static GstBuffer *gst_rtp_mp4v_depay_process (GstBaseRTPDepayload * depayload,
static GstStateChangeReturn gst_rtp_mp4v_depay_change_state (GstElement *
element, GstStateChange transition);
static void
gst_rtp_mp4v_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4v_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4v_depay_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP MPEG4 video depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG4 video from RTP packets (RFC 3016)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_mp4v_depay_class_init (GstRtpMP4VDepayClass * klass)
{
@ -101,13 +85,22 @@ gst_rtp_mp4v_depay_class_init (GstRtpMP4VDepayClass * klass)
gstbasertpdepayload_class->process = gst_rtp_mp4v_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_mp4v_depay_setcaps;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4v_depay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4v_depay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG4 video depayloader", "Codec/Depayloader/Network/RTP",
"Extracts MPEG4 video from RTP packets (RFC 3016)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpmp4vdepay_debug, "rtpmp4vdepay", 0,
"MPEG4 video RTP Depayloader");
}
static void
gst_rtp_mp4v_depay_init (GstRtpMP4VDepay * rtpmp4vdepay,
GstRtpMP4VDepayClass * klass)
gst_rtp_mp4v_depay_init (GstRtpMP4VDepay * rtpmp4vdepay)
{
rtpmp4vdepay->adapter = gst_adapter_new ();
}
@ -170,7 +163,8 @@ static GstBuffer *
gst_rtp_mp4v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpMP4VDepay *rtpmp4vdepay;
GstBuffer *outbuf;
GstBuffer *outbuf = NULL;
GstRTPBuffer rtp;
rtpmp4vdepay = GST_RTP_MP4V_DEPAY (depayload);
@ -178,11 +172,12 @@ gst_rtp_mp4v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
if (GST_BUFFER_IS_DISCONT (buf))
gst_adapter_clear (rtpmp4vdepay->adapter);
outbuf = gst_rtp_buffer_get_payload_buffer (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
gst_adapter_push (rtpmp4vdepay->adapter, outbuf);
/* if this was the last packet of the VOP, create and push a buffer */
if (gst_rtp_buffer_get_marker (buf)) {
if (gst_rtp_buffer_get_marker (&rtp)) {
guint avail;
avail = gst_adapter_available (rtpmp4vdepay->adapter);
@ -190,11 +185,12 @@ gst_rtp_mp4v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
outbuf = gst_adapter_take_buffer (rtpmp4vdepay->adapter, avail);
GST_DEBUG ("gst_rtp_mp4v_depay_chain: pushing buffer of size %d",
GST_BUFFER_SIZE (outbuf));
return outbuf;
gst_buffer_get_size (outbuf));
}
return NULL;
gst_rtp_buffer_unmap (&rtp);
return outbuf;
}
static GstStateChangeReturn

View file

@ -78,38 +78,35 @@ static gboolean gst_rtp_mp4v_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
static GstFlowReturn gst_rtp_mp4v_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
static gboolean gst_rtp_mp4v_pay_handle_event (GstPad * pad, GstEvent * event);
static gboolean gst_rtp_mp4v_pay_handle_event (GstBaseRTPPayload * pay,
GstEvent * event);
GST_BOILERPLATE (GstRtpMP4VPay, gst_rtp_mp4v_pay, GstBaseRTPPayload,
GST_TYPE_BASE_RTP_PAYLOAD)
#define gst_rtp_mp4v_pay_parent_class parent_class
G_DEFINE_TYPE (GstRtpMP4VPay, gst_rtp_mp4v_pay, GST_TYPE_BASE_RTP_PAYLOAD)
static void gst_rtp_mp4v_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4v_pay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_mp4v_pay_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP MPEG4 Video payloader", "Codec/Payloader/Network/RTP",
"Payload MPEG-4 video as RTP packets (RFC 3016)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_mp4v_pay_class_init (GstRtpMP4VPayClass * klass)
static void gst_rtp_mp4v_pay_class_init (GstRtpMP4VPayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gobject_class->set_property = gst_rtp_mp4v_pay_set_property;
gobject_class->get_property = gst_rtp_mp4v_pay_get_property;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4v_pay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_mp4v_pay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP MPEG4 Video payloader", "Codec/Payloader/Network/RTP",
"Payload MPEG-4 video as RTP packets (RFC 3016)",
"Wim Taymans <wim.taymans@gmail.com>");
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SEND_CONFIG,
g_param_spec_boolean ("send-config", "Send Config",
"Send the config parameters in RTP packets as well(deprecated "
@ -140,7 +137,7 @@ gst_rtp_mp4v_pay_class_init (GstRtpMP4VPayClass * klass)
}
static void
gst_rtp_mp4v_pay_init (GstRtpMP4VPay * rtpmp4vpay, GstRtpMP4VPayClass * klass)
gst_rtp_mp4v_pay_init (GstRtpMP4VPay * rtpmp4vpay)
{
rtpmp4vpay->adapter = gst_adapter_new ();
rtpmp4vpay->rate = 90000;
@ -216,20 +213,15 @@ gst_rtp_mp4v_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
GST_LOG_OBJECT (rtpmp4vpay, "got codec_data");
if (G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
GstBuffer *buffer;
guint8 *data;
guint size;
buffer = gst_value_get_buffer (codec_data);
data = GST_BUFFER_DATA (buffer);
size = GST_BUFFER_SIZE (buffer);
if (size < 5)
if (gst_buffer_get_size (buffer) < 5)
goto done;
rtpmp4vpay->profile = data[4];
gst_buffer_extract (buffer, 4, &rtpmp4vpay->profile, 1);
GST_LOG_OBJECT (rtpmp4vpay, "configuring codec_data, profile %d",
data[4]);
rtpmp4vpay->profile);
if (rtpmp4vpay->config)
gst_buffer_unref (rtpmp4vpay->config);
@ -256,7 +248,6 @@ gst_rtp_mp4v_pay_flush (GstRtpMP4VPay * rtpmp4vpay)
GstBuffer *outbuf_data = NULL;
GstFlowReturn ret;
GstBufferList *list = NULL;
GstBufferListIterator *it = NULL;
/* the data available in the adapter is either smaller
* than the MTU or bigger. In the case it is smaller, the complete
@ -281,14 +272,13 @@ gst_rtp_mp4v_pay_flush (GstRtpMP4VPay * rtpmp4vpay)
* of buffers and the whole list will be pushed downstream
* at once */
list = gst_buffer_list_new ();
it = gst_buffer_list_iterate (list);
}
while (avail > 0) {
guint towrite;
guint8 *payload;
guint payload_len;
guint packet_len;
GstRTPBuffer rtp;
/* this will be the total lenght of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
@ -299,43 +289,32 @@ gst_rtp_mp4v_pay_flush (GstRtpMP4VPay * rtpmp4vpay)
/* this is the payload length */
payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
if (rtpmp4vpay->buffer_list) {
/* create buffer without payload. The payload will be put
* in next buffer instead. Both buffers will be then added
* to the list */
outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
/* create buffer without payload. The payload will be put
* in next buffer instead. Both buffers will be merged */
outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
/* Take buffer with the payload from the adapter */
outbuf_data = gst_adapter_take_buffer (rtpmp4vpay->adapter, payload_len);
} else {
/* create buffer to hold the payload */
outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
/* copy payload */
payload = gst_rtp_buffer_get_payload (outbuf);
gst_adapter_copy (rtpmp4vpay->adapter, payload, 0, payload_len);
gst_adapter_flush (rtpmp4vpay->adapter, payload_len);
}
/* Take buffer with the payload from the adapter */
outbuf_data = gst_adapter_take_buffer (rtpmp4vpay->adapter, payload_len);
avail -= payload_len;
gst_rtp_buffer_set_marker (outbuf, avail == 0);
gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
gst_rtp_buffer_set_marker (&rtp, avail == 0);
gst_rtp_buffer_unmap (&rtp);
outbuf = gst_buffer_join (outbuf, outbuf_data);
GST_BUFFER_TIMESTAMP (outbuf) = rtpmp4vpay->first_timestamp;
if (rtpmp4vpay->buffer_list) {
/* create a new group to hold the rtp header and the payload */
gst_buffer_list_iterator_add_group (it);
gst_buffer_list_iterator_add (it, outbuf);
gst_buffer_list_iterator_add (it, outbuf_data);
/* add to list */
gst_buffer_list_insert (list, -1, outbuf);
} else {
ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp4vpay), outbuf);
}
}
if (rtpmp4vpay->buffer_list) {
gst_buffer_list_iterator_free (it);
/* push the whole buffer list at once */
ret =
gst_basertppayload_push_list (GST_BASE_RTP_PAYLOAD (rtpmp4vpay), list);
@ -400,16 +379,20 @@ gst_rtp_mp4v_pay_depay_data (GstRtpMP4VPay * enc, guint8 * data, guint size,
/* see if config changed */
equal = FALSE;
if (enc->config) {
if (GST_BUFFER_SIZE (enc->config) == i) {
equal = memcmp (GST_BUFFER_DATA (enc->config), data, i) == 0;
if (gst_buffer_get_size (enc->config) == i) {
equal = gst_buffer_memcmp (enc->config, 0, data, i) == 0;
}
}
/* if config string changed or new profile, make new caps */
if (!equal || newprofile) {
guint8 *bdata;
if (enc->config)
gst_buffer_unref (enc->config);
enc->config = gst_buffer_new_and_alloc (i);
memcpy (GST_BUFFER_DATA (enc->config), data, i);
bdata = gst_buffer_map (enc->config, NULL, NULL, GST_MAP_WRITE);
memcpy (bdata, data, i);
gst_buffer_unmap (enc->config, bdata, -1);
gst_rtp_mp4v_pay_new_caps (enc);
}
*strip = i;
@ -458,7 +441,8 @@ gst_rtp_mp4v_pay_handle_buffer (GstBaseRTPPayload * basepayload,
{
GstRtpMP4VPay *rtpmp4vpay;
GstFlowReturn ret;
guint size, avail;
guint avail;
gsize size;
guint packet_len;
guint8 *data;
gboolean flush;
@ -472,8 +456,7 @@ gst_rtp_mp4v_pay_handle_buffer (GstBaseRTPPayload * basepayload,
rtpmp4vpay = GST_RTP_MP4V_PAY (basepayload);
size = GST_BUFFER_SIZE (buffer);
data = GST_BUFFER_DATA (buffer);
data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration = GST_BUFFER_DURATION (buffer);
avail = gst_adapter_available (rtpmp4vpay->adapter);
@ -490,21 +473,25 @@ gst_rtp_mp4v_pay_handle_buffer (GstBaseRTPPayload * basepayload,
/* depay incomming data and see if we need to start a new RTP
* packet */
flush = gst_rtp_mp4v_pay_depay_data (rtpmp4vpay, data, size, &strip, &vopi);
gst_buffer_unmap (buffer, data, -1);
data = NULL;
if (strip) {
/* strip off config if requested */
if (!(rtpmp4vpay->config_interval > 0)) {
GstBuffer *subbuf;
GST_LOG_OBJECT (rtpmp4vpay, "stripping config at %d, size %d", strip,
size - strip);
(gint) size - strip);
/* strip off header */
subbuf = gst_buffer_create_sub (buffer, strip, size - strip);
subbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY, strip,
size - strip);
GST_BUFFER_TIMESTAMP (subbuf) = timestamp;
gst_buffer_unref (buffer);
buffer = subbuf;
size = GST_BUFFER_SIZE (buffer);
size = gst_buffer_get_size (buffer);
} else {
GST_LOG_OBJECT (rtpmp4vpay, "found config in stream");
rtpmp4vpay->last_config = timestamp;
@ -555,7 +542,7 @@ gst_rtp_mp4v_pay_handle_buffer (GstBaseRTPPayload * basepayload,
gst_buffer_unref (buffer);
buffer = superbuf;
size = GST_BUFFER_SIZE (buffer);
size = gst_buffer_get_size (buffer);
if (timestamp != -1) {
rtpmp4vpay->last_config = timestamp;
@ -590,16 +577,16 @@ gst_rtp_mp4v_pay_handle_buffer (GstBaseRTPPayload * basepayload,
}
static gboolean
gst_rtp_mp4v_pay_handle_event (GstPad * pad, GstEvent * event)
gst_rtp_mp4v_pay_handle_event (GstBaseRTPPayload * pay, GstEvent * event)
{
GstRtpMP4VPay *rtpmp4vpay;
rtpmp4vpay = GST_RTP_MP4V_PAY (gst_pad_get_parent (pad));
rtpmp4vpay = GST_RTP_MP4V_PAY (pay);
GST_DEBUG ("Got event: %s", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:
case GST_EVENT_SEGMENT:
case GST_EVENT_EOS:
/* This flush call makes sure that the last buffer is always pushed
* to the base payloader */
@ -612,8 +599,6 @@ gst_rtp_mp4v_pay_handle_event (GstPad * pad, GstEvent * event)
break;
}
g_object_unref (rtpmp4vpay);
/* let parent handle event too */
return FALSE;
}

View file

@ -77,32 +77,19 @@ static gboolean gst_rtp_qcelp_depay_setcaps (GstBaseRTPDepayload * depayload,
static GstBuffer *gst_rtp_qcelp_depay_process (GstBaseRTPDepayload * depayload,
GstBuffer * buf);
GST_BOILERPLATE (GstRtpQCELPDepay, gst_rtp_qcelp_depay, GstBaseRTPDepayload,
#define gst_rtp_qcelp_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpQCELPDepay, gst_rtp_qcelp_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
gst_rtp_qcelp_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_qcelp_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_qcelp_depay_sink_template));
gst_element_class_set_details_simple (element_class, "RTP QCELP depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts QCELP (PureVoice) audio from RTP packets (RFC 2658)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_qcelp_depay_class_init (GstRtpQCELPDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_qcelp_depay_finalize;
@ -110,13 +97,22 @@ gst_rtp_qcelp_depay_class_init (GstRtpQCELPDepayClass * klass)
gstbasertpdepayload_class->process = gst_rtp_qcelp_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_qcelp_depay_setcaps;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_qcelp_depay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_qcelp_depay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP QCELP depayloader", "Codec/Depayloader/Network/RTP",
"Extracts QCELP (PureVoice) audio from RTP packets (RFC 2658)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpqcelpdepay_debug, "rtpqcelpdepay", 0,
"QCELP RTP Depayloader");
}
static void
gst_rtp_qcelp_depay_init (GstRtpQCELPDepay * rtpqcelpdepay,
GstRtpQCELPDepayClass * klass)
gst_rtp_qcelp_depay_init (GstRtpQCELPDepay * rtpqcelpdepay)
{
GstBaseRTPDepayload G_GNUC_UNUSED *depayload;
@ -245,9 +241,12 @@ static GstBuffer *
create_erasure_buffer (GstRtpQCELPDepay * depay)
{
GstBuffer *outbuf;
guint8 *data;
outbuf = gst_buffer_new_and_alloc (1);
GST_BUFFER_DATA (outbuf)[0] = 14;
data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
data[0] = 14;
gst_buffer_unmap (outbuf, data, -1);
return outbuf;
}
@ -261,17 +260,20 @@ gst_rtp_qcelp_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
guint payload_len, offset, index;
guint8 *payload;
guint LLL, NNN;
GstRTPBuffer rtp;
depay = GST_RTP_QCELP_DEPAY (depayload);
payload_len = gst_rtp_buffer_get_payload_len (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 2)
goto too_small;
timestamp = GST_BUFFER_TIMESTAMP (buf);
payload = gst_rtp_buffer_get_payload (buf);
payload = gst_rtp_buffer_get_payload (&rtp);
/* 0 1 2 3 4 5 6 7
* +-+-+-+-+-+-+-+-+
@ -353,7 +355,7 @@ gst_rtp_qcelp_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
outbuf = create_erasure_buffer (depay);
} else {
/* each frame goes into its buffer */
outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, offset, frame_len);
outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, offset, frame_len);
}
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
@ -395,6 +397,7 @@ gst_rtp_qcelp_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
flush_packets (depay);
}
gst_rtp_buffer_unmap (&rtp);
return NULL;
/* ERRORS */
@ -402,24 +405,28 @@ too_small:
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP payload too small (%d)", payload_len));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_lll:
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP invalid LLL received (%d)", LLL));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_nnn:
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP invalid NNN received (%d)", NNN));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_frame:
{
GST_ELEMENT_WARNING (depay, STREAM, DECODE,
(NULL), ("QCELP RTP invalid frame received"));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}

View file

@ -46,7 +46,8 @@ GST_STATIC_PAD_TEMPLATE ("sink",
"encoding-name = (string)\"X-QDM\"")
);
GST_BOILERPLATE (GstRtpQDM2Depay, gst_rtp_qdm2_depay, GstBaseRTPDepayload,
#define gst_rtp_qdm2_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpQDM2Depay, gst_rtp_qdm2_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static const guint8 headheader[20] = {
@ -65,23 +66,6 @@ static GstBuffer *gst_rtp_qdm2_depay_process (GstBaseRTPDepayload * depayload,
gboolean gst_rtp_qdm2_depay_setcaps (GstBaseRTPDepayload * filter,
GstCaps * caps);
static void
gst_rtp_qdm2_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_qdm2_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_qdm2_depay_sink_template));
gst_element_class_set_details_simple (element_class, "RTP QDM2 depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts QDM2 audio from RTP packets (no RFC)",
"Edward Hervey <bilboed@bilboed.com>");
}
static void
gst_rtp_qdm2_depay_class_init (GstRtpQDM2DepayClass * klass)
{
@ -99,11 +83,21 @@ gst_rtp_qdm2_depay_class_init (GstRtpQDM2DepayClass * klass)
gobject_class->finalize = gst_rtp_qdm2_depay_finalize;
gstelement_class->change_state = gst_rtp_qdm2_depay_change_state;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_qdm2_depay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_qdm2_depay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP QDM2 depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts QDM2 audio from RTP packets (no RFC)",
"Edward Hervey <bilboed@bilboed.com>");
}
static void
gst_rtp_qdm2_depay_init (GstRtpQDM2Depay * rtpqdm2depay,
GstRtpQDM2DepayClass * klass)
gst_rtp_qdm2_depay_init (GstRtpQDM2Depay * rtpqdm2depay)
{
rtpqdm2depay->adapter = gst_adapter_new ();
}
@ -187,9 +181,9 @@ flush_data (GstRtpQDM2Depay * depay)
GST_MEMDUMP ("Extracted packet", data, depay->packetsize);
buf = gst_buffer_new ();
GST_BUFFER_DATA (buf) = data;
GST_BUFFER_MALLOCDATA (buf) = data;
GST_BUFFER_SIZE (buf) = depay->packetsize;
gst_buffer_take_memory (buf, -1,
gst_memory_new_wrapped (0, data, g_free, depay->packetsize, 0,
depay->packetsize));
gst_adapter_push (depay->adapter, buf);
@ -236,8 +230,9 @@ static GstBuffer *
gst_rtp_qdm2_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpQDM2Depay *rtpqdm2depay;
GstBuffer *outbuf;
GstBuffer *outbuf = NULL;
guint16 seq;
GstRTPBuffer rtp;
rtpqdm2depay = GST_RTP_QDM2_DEPAY (depayload);
@ -247,12 +242,13 @@ gst_rtp_qdm2_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
guint avail;
guint pos = 0;
payload_len = gst_rtp_buffer_get_payload_len (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 3)
goto bad_packet;
payload = gst_rtp_buffer_get_payload (buf);
seq = gst_rtp_buffer_get_seq (buf);
payload = gst_rtp_buffer_get_payload (&rtp);
seq = gst_rtp_buffer_get_seq (&rtp);
if (G_UNLIKELY (seq != rtpqdm2depay->nextseq)) {
GST_DEBUG ("GAP in sequence number, Resetting data !");
/* Flush previous data */
@ -287,6 +283,7 @@ gst_rtp_qdm2_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
if (G_UNLIKELY (!rtpqdm2depay->configured)) {
guint8 *ourdata;
GstBuffer *codecdata;
guint8 *cdata;
GstCaps *caps;
/* First bytes are unknown */
@ -309,8 +306,10 @@ gst_rtp_qdm2_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
/* Caps */
codecdata = gst_buffer_new_and_alloc (48);
memcpy (GST_BUFFER_DATA (codecdata), headheader, 20);
memcpy (GST_BUFFER_DATA (codecdata) + 20, ourdata, 28);
cdata = gst_buffer_map (codecdata, NULL, NULL, GST_MAP_WRITE);
memcpy (cdata, headheader, 20);
memcpy (cdata + 20, ourdata, 28);
gst_buffer_unmap (codecdata, cdata, -1);
caps = gst_caps_new_simple ("audio/x-qdm2",
"samplesize", G_TYPE_INT, 16,
@ -364,16 +363,18 @@ gst_rtp_qdm2_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
GST_BUFFER_TIMESTAMP (outbuf) = rtpqdm2depay->ptimestamp;
GST_DEBUG ("Outgoing buffer timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (rtpqdm2depay->ptimestamp));
return outbuf;
}
}
return NULL;
gst_rtp_buffer_unmap (&rtp);
return outbuf;
/* ERRORS */
bad_packet:
{
GST_ELEMENT_WARNING (rtpqdm2depay, STREAM, DECODE,
(NULL), ("Packet was too short"));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}

View file

@ -51,38 +51,33 @@ static GstBuffer *gst_rtp_siren_depay_process (GstBaseRTPDepayload * depayload,
static gboolean gst_rtp_siren_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
GST_BOILERPLATE (GstRTPSirenDepay, gst_rtp_siren_depay, GstBaseRTPDepayload,
G_DEFINE_TYPE (GstRTPSirenDepay, gst_rtp_siren_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
gst_rtp_siren_depay_base_init (gpointer klass)
gst_rtp_siren_depay_class_init (GstRTPSirenDepayClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gst_element_class_add_pad_template (element_class,
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_siren_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_siren_depay_setcaps;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_siren_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_siren_depay_sink_template));
gst_element_class_set_details_simple (element_class,
gst_element_class_set_details_simple (gstelement_class,
"RTP Siren packet depayloader", "Codec/Depayloader/Network/RTP",
"Extracts Siren audio from RTP packets",
"Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
}
static void
gst_rtp_siren_depay_class_init (GstRTPSirenDepayClass * klass)
{
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_siren_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_siren_depay_setcaps;
}
static void
gst_rtp_siren_depay_init (GstRTPSirenDepay * rtpsirendepay,
GstRTPSirenDepayClass * klass)
gst_rtp_siren_depay_init (GstRTPSirenDepay * rtpsirendepay)
{
}
@ -110,8 +105,11 @@ static GstBuffer *
gst_rtp_siren_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf;
GstRTPBuffer rtp;
outbuf = gst_rtp_buffer_get_payload_buffer (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
gst_rtp_buffer_unmap (&rtp);
return outbuf;
}

View file

@ -51,40 +51,35 @@ GST_STATIC_PAD_TEMPLATE ("src",
static gboolean gst_rtp_siren_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
GST_BOILERPLATE (GstRTPSirenPay, gst_rtp_siren_pay, GstBaseRTPAudioPayload,
G_DEFINE_TYPE (GstRTPSirenPay, gst_rtp_siren_pay,
GST_TYPE_BASE_RTP_AUDIO_PAYLOAD);
static void
gst_rtp_siren_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_siren_pay_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_siren_pay_src_template));
gst_element_class_set_details_simple (element_class,
"RTP Payloader for Siren Audio", "Codec/Payloader/Network/RTP",
"Packetize Siren audio streams into RTP packets",
"Youness Alaoui <kakaroto@kakaroto.homelinux.net>");
}
static void
gst_rtp_siren_pay_class_init (GstRTPSirenPayClass * klass)
{
GstElementClass *gstelement_class;
GstBaseRTPPayloadClass *gstbasertppayload_class;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_siren_pay_setcaps;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_siren_pay_sink_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_siren_pay_src_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP Payloader for Siren Audio", "Codec/Payloader/Network/RTP",
"Packetize Siren audio streams into RTP packets",
"Youness Alaoui <kakaroto@kakaroto.homelinux.net>");
GST_DEBUG_CATEGORY_INIT (rtpsirenpay_debug, "rtpsirenpay", 0,
"siren audio RTP payloader");
}
static void
gst_rtp_siren_pay_init (GstRTPSirenPay * rtpsirenpay,
GstRTPSirenPayClass * klass)
gst_rtp_siren_pay_init (GstRTPSirenPay * rtpsirenpay)
{
GstBaseRTPPayload *basertppayload;
GstBaseRTPAudioPayload *basertpaudiopayload;

View file

@ -63,38 +63,33 @@ static GstBuffer *gst_rtp_speex_depay_process (GstBaseRTPDepayload * depayload,
static gboolean gst_rtp_speex_depay_setcaps (GstBaseRTPDepayload * depayload,
GstCaps * caps);
GST_BOILERPLATE (GstRtpSPEEXDepay, gst_rtp_speex_depay, GstBaseRTPDepayload,
G_DEFINE_TYPE (GstRtpSPEEXDepay, gst_rtp_speex_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void
gst_rtp_speex_depay_base_init (gpointer klass)
gst_rtp_speex_depay_class_init (GstRtpSPEEXDepayClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gst_element_class_add_pad_template (element_class,
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_speex_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_speex_depay_setcaps;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_speex_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_speex_depay_sink_template));
gst_element_class_set_details_simple (element_class, "RTP Speex depayloader",
"Codec/Depayloader/Network/RTP",
gst_element_class_set_details_simple (gstelement_class,
"RTP Speex depayloader", "Codec/Depayloader/Network/RTP",
"Extracts Speex audio from RTP packets",
"Edgard Lima <edgard.lima@indt.org.br>");
}
static void
gst_rtp_speex_depay_class_init (GstRtpSPEEXDepayClass * klass)
{
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gstbasertpdepayload_class->process = gst_rtp_speex_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_speex_depay_setcaps;
}
static void
gst_rtp_speex_depay_init (GstRtpSPEEXDepay * rtpspeexdepay,
GstRtpSPEEXDepayClass * klass)
gst_rtp_speex_depay_init (GstRtpSPEEXDepay * rtpspeexdepay)
{
}
@ -123,7 +118,7 @@ gst_rtp_speex_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
GstRtpSPEEXDepay *rtpspeexdepay;
gint clock_rate, nb_channels;
GstBuffer *buf;
guint8 *data;
guint8 *data, *bdata;
const gchar *params;
GstCaps *srccaps;
gboolean res;
@ -144,7 +139,7 @@ gst_rtp_speex_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
/* construct minimal header and comment packet for the decoder */
buf = gst_buffer_new_and_alloc (80);
data = GST_BUFFER_DATA (buf);
data = bdata = gst_buffer_map (buf, NULL, NULL, GST_MAP_WRITE);
memcpy (data, "Speex ", 8);
data += 8;
memcpy (data, "1.1.12", 7);
@ -174,19 +169,19 @@ gst_rtp_speex_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
GST_WRITE_UINT32_LE (data, 0); /* reserved1 */
data += 4;
GST_WRITE_UINT32_LE (data, 0); /* reserved2 */
gst_buffer_unmap (buf, bdata, -1);
srccaps = gst_caps_new_simple ("audio/x-speex", NULL);
res = gst_pad_set_caps (depayload->srcpad, srccaps);
gst_caps_unref (srccaps);
gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpspeexdepay), buf);
buf = gst_buffer_new_and_alloc (sizeof (gst_rtp_speex_comment));
memcpy (GST_BUFFER_DATA (buf), gst_rtp_speex_comment,
sizeof (gst_rtp_speex_comment));
bdata = gst_buffer_map (buf, NULL, NULL, GST_MAP_WRITE);
memcpy (bdata, gst_rtp_speex_comment, sizeof (gst_rtp_speex_comment));
gst_buffer_unmap (buf, bdata, -1);
gst_buffer_set_caps (buf, GST_PAD_CAPS (depayload->srcpad));
gst_base_rtp_depayload_push (GST_BASE_RTP_DEPAYLOAD (rtpspeexdepay), buf);
return res;
@ -203,14 +198,18 @@ static GstBuffer *
gst_rtp_speex_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf = NULL;
GstRTPBuffer rtp;
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
GST_DEBUG ("process : got %d bytes, mark %d ts %u seqn %d",
GST_BUFFER_SIZE (buf),
gst_rtp_buffer_get_marker (buf),
gst_rtp_buffer_get_timestamp (buf), gst_rtp_buffer_get_seq (buf));
gst_buffer_get_size (buf),
gst_rtp_buffer_get_marker (&rtp),
gst_rtp_buffer_get_timestamp (&rtp), gst_rtp_buffer_get_seq (&rtp));
/* nothing special to be done */
outbuf = gst_rtp_buffer_get_payload_buffer (buf);
outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
gst_rtp_buffer_unmap (&rtp);
if (outbuf)
GST_BUFFER_DURATION (outbuf) = 20 * GST_MSECOND;

View file

@ -56,30 +56,12 @@ static GstStateChangeReturn gst_rtp_speex_pay_change_state (GstElement *
static gboolean gst_rtp_speex_pay_setcaps (GstBaseRTPPayload * payload,
GstCaps * caps);
static GstCaps *gst_rtp_speex_pay_getcaps (GstBaseRTPPayload * payload,
GstPad * pad);
GstPad * pad, GstCaps * filter);
static GstFlowReturn gst_rtp_speex_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
GST_BOILERPLATE (GstRtpSPEEXPay, gst_rtp_speex_pay, GstBaseRTPPayload,
GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_speex_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_speex_pay_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_speex_pay_src_template));
gst_element_class_set_details_simple (element_class, "RTP Speex payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes Speex audio into a RTP packet",
"Edgard Lima <edgard.lima@indt.org.br>");
GST_DEBUG_CATEGORY_INIT (rtpspeexpay_debug, "rtpspeexpay", 0,
"Speex RTP Payloader");
}
#define gst_rtp_speex_pay_parent_class parent_class
G_DEFINE_TYPE (GstRtpSPEEXPay, gst_rtp_speex_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static void
gst_rtp_speex_pay_class_init (GstRtpSPEEXPayClass * klass)
@ -95,11 +77,22 @@ gst_rtp_speex_pay_class_init (GstRtpSPEEXPayClass * klass)
gstbasertppayload_class->set_caps = gst_rtp_speex_pay_setcaps;
gstbasertppayload_class->get_caps = gst_rtp_speex_pay_getcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_speex_pay_handle_buffer;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_speex_pay_sink_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_speex_pay_src_template));
gst_element_class_set_details_simple (gstelement_class, "RTP Speex payloader",
"Codec/Payloader/Network/RTP",
"Payload-encodes Speex audio into a RTP packet",
"Edgard Lima <edgard.lima@indt.org.br>");
GST_DEBUG_CATEGORY_INIT (rtpspeexpay_debug, "rtpspeexpay", 0,
"Speex RTP Payloader");
}
static void
gst_rtp_speex_pay_init (GstRtpSPEEXPay * rtpspeexpay,
GstRtpSPEEXPayClass * klass)
gst_rtp_speex_pay_init (GstRtpSPEEXPay * rtpspeexpay)
{
GST_BASE_RTP_PAYLOAD (rtpspeexpay)->clock_rate = 8000;
GST_BASE_RTP_PAYLOAD_PT (rtpspeexpay) = 110; /* Create String */
@ -114,7 +107,8 @@ gst_rtp_speex_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
static GstCaps *
gst_rtp_speex_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad)
gst_rtp_speex_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad,
GstCaps * filter)
{
GstCaps *otherpadcaps;
GstCaps *caps;
@ -135,6 +129,13 @@ gst_rtp_speex_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad)
gst_caps_unref (otherpadcaps);
}
if (filter) {
GstCaps *tcaps = caps;
caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (tcaps);
}
return caps;
}
@ -231,16 +232,17 @@ gst_rtp_speex_pay_handle_buffer (GstBaseRTPPayload * basepayload,
GstBuffer * buffer)
{
GstRtpSPEEXPay *rtpspeexpay;
guint size, payload_len;
guint payload_len;
gsize size;
GstBuffer *outbuf;
guint8 *payload, *data;
GstClockTime timestamp, duration;
GstFlowReturn ret;
GstRTPBuffer rtp;
rtpspeexpay = GST_RTP_SPEEX_PAY (basepayload);
size = GST_BUFFER_SIZE (buffer);
data = GST_BUFFER_DATA (buffer);
data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
switch (rtpspeexpay->packet) {
case 0:
@ -279,15 +281,19 @@ gst_rtp_speex_pay_handle_buffer (GstBaseRTPPayload * basepayload,
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_DURATION (outbuf) = duration;
gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
/* get payload */
payload = gst_rtp_buffer_get_payload (outbuf);
payload = gst_rtp_buffer_get_payload (&rtp);
/* copy data in payload */
memcpy (&payload[0], data, size);
gst_rtp_buffer_unmap (&rtp);
ret = gst_basertppayload_push (basepayload, outbuf);
done:
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
rtpspeexpay->packet++;
@ -299,6 +305,7 @@ parse_error:
{
GST_ELEMENT_ERROR (rtpspeexpay, STREAM, DECODE, (NULL),
("Error parsing first identification packet."));
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}

View file

@ -47,7 +47,8 @@ GST_STATIC_PAD_TEMPLATE ("sink",
"encoding-name = (string) { \"X-SV3V-ES\", \"X-SORENSON-VIDEO\" , \"X-SORENSONVIDEO\" , \"X-SorensonVideo\" }")
);
GST_BOILERPLATE (GstRtpSV3VDepay, gst_rtp_sv3v_depay, GstBaseRTPDepayload,
#define gst_rtp_sv3v_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpSV3VDepay, gst_rtp_sv3v_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static void gst_rtp_sv3v_depay_finalize (GObject * object);
@ -60,23 +61,6 @@ static GstBuffer *gst_rtp_sv3v_depay_process (GstBaseRTPDepayload * depayload,
gboolean gst_rtp_sv3v_depay_setcaps (GstBaseRTPDepayload * filter,
GstCaps * caps);
static void
gst_rtp_sv3v_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_sv3v_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_sv3v_depay_sink_template));
gst_element_class_set_details_simple (element_class, "RTP SVQ3 depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts SVQ3 video from RTP packets (no RFC)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_sv3v_depay_class_init (GstRtpSV3VDepayClass * klass)
{
@ -94,11 +78,20 @@ gst_rtp_sv3v_depay_class_init (GstRtpSV3VDepayClass * klass)
gobject_class->finalize = gst_rtp_sv3v_depay_finalize;
gstelement_class->change_state = gst_rtp_sv3v_depay_change_state;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_sv3v_depay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_sv3v_depay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP SVQ3 depayloader", "Codec/Depayloader/Network/RTP",
"Extracts SVQ3 video from RTP packets (no RFC)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_sv3v_depay_init (GstRtpSV3VDepay * rtpsv3vdepay,
GstRtpSV3VDepayClass * klass)
gst_rtp_sv3v_depay_init (GstRtpSV3VDepay * rtpsv3vdepay)
{
rtpsv3vdepay->adapter = gst_adapter_new ();
}
@ -155,11 +148,14 @@ gst_rtp_sv3v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
gboolean C, S, E;
GstBuffer *outbuf = NULL;
guint16 seq;
GstRTPBuffer rtp;
rtpsv3vdepay = GST_RTP_SV3V_DEPAY (depayload);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
/* flush on sequence number gaps */
seq = gst_rtp_buffer_get_seq (buf);
seq = gst_rtp_buffer_get_seq (&rtp);
GST_DEBUG ("timestamp %" GST_TIME_FORMAT ", sequence number:%d",
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), seq);
@ -170,13 +166,13 @@ gst_rtp_sv3v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
}
rtpsv3vdepay->nextseq = seq + 1;
payload_len = gst_rtp_buffer_get_payload_len (buf);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 3)
goto bad_packet;
payload = gst_rtp_buffer_get_payload (buf);
payload = gst_rtp_buffer_get_payload (&rtp);
M = gst_rtp_buffer_get_marker (buf);
M = gst_rtp_buffer_get_marker (&rtp);
/* This is all a guess:
* 1 1 1 1 1 1
@ -205,12 +201,14 @@ gst_rtp_sv3v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
GstCaps *caps;
GstBuffer *codec_data;
guint8 res;
guint8 *cdata;
GST_DEBUG ("Configuration packet");
/* if we already have caps, we don't need to do anything. FIXME, check if
* something changed. */
if (G_UNLIKELY (GST_PAD_CAPS (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload)))) {
if (G_UNLIKELY (gst_pad_has_current_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD
(depayload)))) {
GST_DEBUG ("Already configured, skipping config parsing");
goto beach;
}
@ -231,12 +229,14 @@ gst_rtp_sv3v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
/* CodecData needs to be 'SEQH' + len (32bit) + data according to
* ffmpeg's libavcodec/svq3.c:svq3_decode_init */
codec_data = gst_buffer_new_and_alloc (payload_len + 6);
memcpy (GST_BUFFER_DATA (codec_data), "SEQH", 4);
GST_WRITE_UINT32_LE (GST_BUFFER_DATA (codec_data) + 4, payload_len - 2);
memcpy (GST_BUFFER_DATA (codec_data) + 8, payload + 2, payload_len - 2);
cdata = gst_buffer_map (codec_data, NULL, NULL, GST_MAP_WRITE);
memcpy (cdata, "SEQH", 4);
GST_WRITE_UINT32_LE (cdata + 4, payload_len - 2);
memcpy (cdata + 8, payload + 2, payload_len - 2);
GST_MEMDUMP ("codec_data", GST_BUFFER_DATA (codec_data),
GST_BUFFER_SIZE (codec_data));
GST_MEMDUMP ("codec_data", cdata, gst_buffer_get_size (codec_data));
gst_buffer_unmap (codec_data, cdata, -1);
caps = gst_caps_new_simple ("video/x-svq",
"svqversion", G_TYPE_INT, 3,
@ -258,7 +258,7 @@ gst_rtp_sv3v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
GST_DEBUG ("Storing incoming payload");
/* store data in adapter, stip off 2 bytes header */
tmpbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 2, -1);
tmpbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 2, -1);
gst_adapter_push (rtpsv3vdepay->adapter, tmpbuf);
if (G_UNLIKELY (M)) {
@ -272,6 +272,7 @@ gst_rtp_sv3v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
}
beach:
gst_rtp_buffer_unmap (&rtp);
return outbuf;
/* ERRORS */
@ -279,6 +280,7 @@ bad_packet:
{
GST_ELEMENT_WARNING (rtpsv3vdepay, STREAM, DECODE,
(NULL), ("Packet was too short"));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}

View file

@ -60,7 +60,8 @@ GST_STATIC_PAD_TEMPLATE ("src",
GST_STATIC_CAPS ("video/x-theora")
);
GST_BOILERPLATE (GstRtpTheoraDepay, gst_rtp_theora_depay, GstBaseRTPDepayload,
#define gst_rtp_theora_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpTheoraDepay, gst_rtp_theora_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_theora_depay_setcaps (GstBaseRTPDepayload * depayload,
@ -72,30 +73,15 @@ static gboolean gst_rtp_theora_depay_packet_lost (GstBaseRTPDepayload *
static void gst_rtp_theora_depay_finalize (GObject * object);
static void
gst_rtp_theora_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_theora_depay_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_theora_depay_src_template));
gst_element_class_set_details_simple (element_class, "RTP Theora depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts Theora video from RTP packets (draft-01 of RFC XXXX)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_theora_depay_class_init (GstRtpTheoraDepayClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
gobject_class->finalize = gst_rtp_theora_depay_finalize;
@ -104,13 +90,22 @@ gst_rtp_theora_depay_class_init (GstRtpTheoraDepayClass * klass)
gstbasertpdepayload_class->set_caps = gst_rtp_theora_depay_setcaps;
gstbasertpdepayload_class->packet_lost = gst_rtp_theora_depay_packet_lost;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_theora_depay_sink_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_theora_depay_src_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP Theora depayloader", "Codec/Depayloader/Network/RTP",
"Extracts Theora video from RTP packets (draft-01 of RFC XXXX)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtptheoradepay_debug, "rtptheoradepay", 0,
"Theora RTP Depayloader");
}
static void
gst_rtp_theora_depay_init (GstRtpTheoraDepay * rtptheoradepay,
GstRtpTheoraDepayClass * klass)
gst_rtp_theora_depay_init (GstRtpTheoraDepay * rtptheoradepay)
{
rtptheoradepay->adapter = gst_adapter_new ();
}
@ -131,14 +126,13 @@ gst_rtp_theora_depay_parse_configuration (GstRtpTheoraDepay * rtptheoradepay,
{
GstBuffer *buf;
guint32 num_headers;
guint8 *data;
guint size;
guint8 *data, *bdata;
gsize size;
gint i, j;
data = GST_BUFFER_DATA (confbuf);
size = GST_BUFFER_SIZE (confbuf);
data = bdata = gst_buffer_map (confbuf, &size, NULL, GST_MAP_READ);
GST_DEBUG_OBJECT (rtptheoradepay, "config size %u", size);
GST_DEBUG_OBJECT (rtptheoradepay, "config size %" G_GSIZE_FORMAT, size);
/* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Number of packed headers |
@ -241,6 +235,7 @@ gst_rtp_theora_depay_parse_configuration (GstRtpTheoraDepay * rtptheoradepay,
for (j = 0; j <= n_headers; j++) {
guint h_size;
guint8 *odata;
h_size = h_sizes[j];
if (size < h_size) {
@ -257,19 +252,24 @@ gst_rtp_theora_depay_parse_configuration (GstRtpTheoraDepay * rtptheoradepay,
h_size);
buf = gst_buffer_new_and_alloc (h_size);
memcpy (GST_BUFFER_DATA (buf), data, h_size);
odata = gst_buffer_map (buf, NULL, NULL, GST_MAP_WRITE);
memcpy (odata, data, h_size);
gst_buffer_unmap (buf, odata, -1);
conf->headers = g_list_append (conf->headers, buf);
data += h_size;
size -= h_size;
}
rtptheoradepay->configs = g_list_append (rtptheoradepay->configs, conf);
}
gst_buffer_unmap (confbuf, bdata, -1);
return TRUE;
/* ERRORS */
too_small:
{
GST_DEBUG_OBJECT (rtptheoradepay, "configuration too small");
gst_buffer_unmap (confbuf, bdata, -1);
return FALSE;
}
}
@ -287,7 +287,7 @@ gst_rtp_theora_depay_parse_inband_configuration (GstRtpTheoraDepay *
/* transform inline to out-of-band and parse that one */
confbuf = gst_buffer_new_and_alloc (size + 9);
conf = GST_BUFFER_DATA (confbuf);
conf = gst_buffer_map (confbuf, NULL, NULL, GST_MAP_WRITE);
/* 1 header */
GST_WRITE_UINT32_BE (conf, 1);
/* write Ident */
@ -296,6 +296,7 @@ gst_rtp_theora_depay_parse_inband_configuration (GstRtpTheoraDepay *
GST_WRITE_UINT16_BE (conf + 7, length);
/* copy remainder */
memcpy (conf + 9, configuration, size);
gst_buffer_unmap (confbuf, conf, -1);
return gst_rtp_theora_depay_parse_configuration (rtptheoradepay, confbuf);
}
@ -330,9 +331,8 @@ gst_rtp_theora_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
data = g_base64_decode (configuration, &size);
confbuf = gst_buffer_new ();
GST_BUFFER_DATA (confbuf) = data;
GST_BUFFER_MALLOCDATA (confbuf) = data;
GST_BUFFER_SIZE (confbuf) = size;
gst_buffer_take_memory (confbuf, -1,
gst_memory_new_wrapped (0, data, g_free, size, 0, size));
if (!gst_rtp_theora_depay_parse_configuration (rtptheoradepay, confbuf))
goto invalid_configuration;
@ -407,10 +407,13 @@ gst_rtp_theora_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
guint32 timestamp;
guint32 header, ident;
guint8 F, TDT, packets;
GstRTPBuffer rtp;
rtptheoradepay = GST_RTP_THEORA_DEPAY (depayload);
payload_len = gst_rtp_buffer_get_payload_len (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
GST_DEBUG_OBJECT (depayload, "got RTP packet of size %d", payload_len);
@ -418,7 +421,7 @@ gst_rtp_theora_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
if (G_UNLIKELY (payload_len < 4))
goto packet_short;
payload = gst_rtp_buffer_get_payload (buf);
payload = gst_rtp_buffer_get_payload (&rtp);
header = GST_READ_UINT32_BE (payload);
/*
@ -483,7 +486,7 @@ gst_rtp_theora_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
/* first assembled packet, reuse 2 bytes to store the length */
headerskip = (F == 1 ? 4 : 6);
/* skip header and length. */
vdata = gst_rtp_buffer_get_payload_subbuffer (buf, headerskip, -1);
vdata = gst_rtp_buffer_get_payload_subbuffer (&rtp, headerskip, -1);
GST_DEBUG_OBJECT (depayload, "assemble theora packet");
gst_adapter_push (rtptheoradepay->adapter, vdata);
@ -522,7 +525,7 @@ gst_rtp_theora_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
* .. theora data |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+*
*/
timestamp = gst_rtp_buffer_get_timestamp (buf);
timestamp = gst_rtp_buffer_get_timestamp (&rtp);
while (payload_len >= 2) {
guint16 length;
@ -550,13 +553,17 @@ gst_rtp_theora_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
/* create buffer for packet */
if (G_UNLIKELY (to_free)) {
outbuf = gst_buffer_new ();
GST_BUFFER_DATA (outbuf) = payload;
GST_BUFFER_MALLOCDATA (outbuf) = to_free;
GST_BUFFER_SIZE (outbuf) = length;
gst_buffer_take_memory (buf, -1,
gst_memory_new_wrapped (0, to_free, g_free,
(payload - to_free) + length, payload - to_free, length));
to_free = NULL;
} else {
guint8 *odata;
outbuf = gst_buffer_new_and_alloc (length);
memcpy (GST_BUFFER_DATA (outbuf), payload, length);
odata = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
memcpy (odata, payload, length);
gst_buffer_unmap (outbuf, odata, -1);
}
if (payload_len > 0 && (payload[0] & 0xC0) == 0x0)
@ -584,10 +591,12 @@ gst_rtp_theora_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
if (rtptheoradepay->needs_keyframe)
goto request_keyframe;
gst_rtp_buffer_unmap (&rtp);
return NULL;
no_output:
{
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
/* ERORRS */
@ -606,6 +615,7 @@ packet_short:
ignore_reserved:
{
GST_WARNING_OBJECT (rtptheoradepay, "reserved TDT ignored");
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
length_short:
@ -627,6 +637,7 @@ request_config:
gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
gst_structure_new ("GstForceKeyUnit",
"all-headers", G_TYPE_BOOLEAN, TRUE, NULL)));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
request_keyframe:
@ -635,6 +646,7 @@ request_keyframe:
gst_pad_push_event (GST_BASE_RTP_DEPAYLOAD_SINKPAD (depayload),
gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
gst_structure_new ("GstForceKeyUnit", NULL)));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
@ -653,7 +665,7 @@ gst_rtp_theora_depay_packet_lost (GstBaseRTPDepayload * depayload,
GstRtpTheoraDepay *rtptheoradepay = GST_RTP_THEORA_DEPAY (depayload);
guint seqnum = 0;
gst_structure_get_uint (event->structure, "seqnum", &seqnum);
gst_structure_get_uint (gst_event_get_structure (event), "seqnum", &seqnum);
GST_LOG_OBJECT (depayload, "Requested keyframe because frame with seqnum %u"
" is missing", seqnum);
rtptheoradepay->needs_keyframe = TRUE;

View file

@ -75,8 +75,8 @@ enum
PROP_CONFIG_INTERVAL
};
GST_BOILERPLATE (GstRtpTheoraPay, gst_rtp_theora_pay, GstBaseRTPPayload,
GST_TYPE_BASE_RTP_PAYLOAD);
#define gst_rtp_theora_pay_parent_class parent_class
G_DEFINE_TYPE (GstRtpTheoraPay, gst_rtp_theora_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static gboolean gst_rtp_theora_pay_setcaps (GstBaseRTPPayload * basepayload,
GstCaps * caps);
@ -84,7 +84,7 @@ static GstStateChangeReturn gst_rtp_theora_pay_change_state (GstElement *
element, GstStateChange transition);
static GstFlowReturn gst_rtp_theora_pay_handle_buffer (GstBaseRTPPayload * pad,
GstBuffer * buffer);
static gboolean gst_rtp_theora_pay_handle_event (GstPad * pad,
static gboolean gst_rtp_theora_pay_handle_event (GstBaseRTPPayload * payload,
GstEvent * event);
@ -93,22 +93,6 @@ static void gst_rtp_theora_pay_set_property (GObject * object, guint prop_id,
static void gst_rtp_theora_pay_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void
gst_rtp_theora_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_theora_pay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_theora_pay_sink_template));
gst_element_class_set_details_simple (element_class, "RTP Theora payloader",
"Codec/Payloader/Network/RTP",
"Payload-encode Theora video into RTP packets (draft-01 RFC XXXX)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_theora_pay_class_init (GstRtpTheoraPayClass * klass)
{
@ -129,6 +113,16 @@ gst_rtp_theora_pay_class_init (GstRtpTheoraPayClass * klass)
gobject_class->set_property = gst_rtp_theora_pay_set_property;
gobject_class->get_property = gst_rtp_theora_pay_get_property;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_theora_pay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_theora_pay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP Theora payloader", "Codec/Payloader/Network/RTP",
"Payload-encode Theora video into RTP packets (draft-01 RFC XXXX)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtptheorapay_debug, "rtptheorapay", 0,
"Theora RTP Payloader");
@ -142,8 +136,7 @@ gst_rtp_theora_pay_class_init (GstRtpTheoraPayClass * klass)
}
static void
gst_rtp_theora_pay_init (GstRtpTheoraPay * rtptheorapay,
GstRtpTheoraPayClass * klass)
gst_rtp_theora_pay_init (GstRtpTheoraPay * rtptheorapay)
{
rtptheorapay->last_config = GST_CLOCK_TIME_NONE;
}
@ -187,11 +180,14 @@ static void
gst_rtp_theora_pay_reset_packet (GstRtpTheoraPay * rtptheorapay, guint8 TDT)
{
guint payload_len;
GstRTPBuffer rtp;
GST_DEBUG_OBJECT (rtptheorapay, "reset packet");
rtptheorapay->payload_pos = 4;
payload_len = gst_rtp_buffer_get_payload_len (rtptheorapay->packet);
gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
gst_rtp_buffer_unmap (&rtp);
rtptheorapay->payload_left = payload_len - 4;
rtptheorapay->payload_duration = 0;
rtptheorapay->payload_F = 0;
@ -223,6 +219,7 @@ gst_rtp_theora_pay_flush_packet (GstRtpTheoraPay * rtptheorapay)
GstFlowReturn ret;
guint8 *payload;
guint hlen;
GstRTPBuffer rtp;
/* check for empty packet */
if (!rtptheorapay->packet || rtptheorapay->payload_pos <= 4)
@ -230,8 +227,10 @@ gst_rtp_theora_pay_flush_packet (GstRtpTheoraPay * rtptheorapay)
GST_DEBUG_OBJECT (rtptheorapay, "flushing packet");
gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
/* fix header */
payload = gst_rtp_buffer_get_payload (rtptheorapay->packet);
payload = gst_rtp_buffer_get_payload (&rtp);
/*
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
@ -250,9 +249,11 @@ gst_rtp_theora_pay_flush_packet (GstRtpTheoraPay * rtptheorapay)
(rtptheorapay->payload_TDT & 0x3) << 4 |
(rtptheorapay->payload_pkts & 0xf);
gst_rtp_buffer_unmap (&rtp);
/* shrink the buffer size to the last written byte */
hlen = gst_rtp_buffer_calc_header_len (0);
GST_BUFFER_SIZE (rtptheorapay->packet) = hlen + rtptheorapay->payload_pos;
gst_buffer_resize (rtptheorapay->packet, 0, hlen + rtptheorapay->payload_pos);
GST_BUFFER_DURATION (rtptheorapay->packet) = rtptheorapay->payload_duration;
@ -334,10 +335,11 @@ gst_rtp_theora_pay_finish_headers (GstBaseRTPPayload * basepayload)
extralen = 1;
for (walk = rtptheorapay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
guint bsize;
guint8 *data;
gsize size;
bsize = GST_BUFFER_SIZE (buf);
bsize = gst_buffer_get_size (buf);
length += bsize;
n_headers++;
@ -351,8 +353,9 @@ gst_rtp_theora_pay_finish_headers (GstBaseRTPPayload * basepayload)
} while (bsize);
}
/* update hash */
ident = fnv1_hash_32_update (ident, GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
ident = fnv1_hash_32_update (ident, data, size);
gst_buffer_unmap (buf, data, -1);
}
/* packet length is header size + packet length */
@ -393,7 +396,7 @@ gst_rtp_theora_pay_finish_headers (GstBaseRTPPayload * basepayload)
if (!g_list_next (walk))
break;
bsize = GST_BUFFER_SIZE (buf);
bsize = gst_buffer_get_size (buf);
/* calc size */
size = 0;
@ -403,7 +406,7 @@ gst_rtp_theora_pay_finish_headers (GstBaseRTPPayload * basepayload)
} while (bsize);
temp = size;
bsize = GST_BUFFER_SIZE (buf);
bsize = gst_buffer_get_size (buf);
/* write the size backwards */
flag = 0;
while (size) {
@ -418,9 +421,13 @@ gst_rtp_theora_pay_finish_headers (GstBaseRTPPayload * basepayload)
/* copy header data */
for (walk = rtptheorapay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
guint8 *bdata;
gsize bsize;
memcpy (data, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
data += GST_BUFFER_SIZE (buf);
bdata = gst_buffer_map (buf, &bsize, NULL, GST_MAP_READ);
memcpy (data, bdata, bsize);
gst_buffer_unmap (buf, bdata, -1);
data += bsize;
}
/* serialize to base64 */
@ -532,6 +539,7 @@ gst_rtp_theora_pay_payload_buffer (GstRtpTheoraPay * rtptheorapay, guint8 TDT,
guint plen;
guint8 *ppos, *payload;
gboolean fragmented;
GstRTPBuffer rtp;
/* size increases with packet length and 2 bytes size eader. */
newduration = rtptheorapay->payload_duration;
@ -557,7 +565,8 @@ gst_rtp_theora_pay_payload_buffer (GstRtpTheoraPay * rtptheorapay, guint8 TDT,
gst_rtp_theora_pay_init_packet (rtptheorapay, TDT, timestamp);
}
payload = gst_rtp_buffer_get_payload (rtptheorapay->packet);
gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtptheorapay->payload_pos;
fragmented = FALSE;
@ -602,13 +611,15 @@ gst_rtp_theora_pay_payload_buffer (GstRtpTheoraPay * rtptheorapay, guint8 TDT,
if (fragmented) {
/* fragmented packets are always flushed and have ptks of 0 */
rtptheorapay->payload_pkts = 0;
gst_rtp_buffer_unmap (&rtp);
ret = gst_rtp_theora_pay_flush_packet (rtptheorapay);
if (size > 0) {
/* start new packet and get pointers. TDT stays the same. */
gst_rtp_theora_pay_init_packet (rtptheorapay,
rtptheorapay->payload_TDT, timestamp);
payload = gst_rtp_buffer_get_payload (rtptheorapay->packet);
gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtptheorapay->payload_pos;
}
} else {
@ -620,6 +631,9 @@ gst_rtp_theora_pay_payload_buffer (GstRtpTheoraPay * rtptheorapay, guint8 TDT,
}
} while (size);
if (rtp.buffer)
gst_rtp_buffer_unmap (&rtp);
return ret;
}
@ -629,7 +643,7 @@ gst_rtp_theora_pay_handle_buffer (GstBaseRTPPayload * basepayload,
{
GstRtpTheoraPay *rtptheorapay;
GstFlowReturn ret;
guint size;
gsize size;
guint8 *data;
GstClockTime duration, timestamp;
guint8 TDT;
@ -637,13 +651,12 @@ gst_rtp_theora_pay_handle_buffer (GstBaseRTPPayload * basepayload,
rtptheorapay = GST_RTP_THEORA_PAY (basepayload);
size = GST_BUFFER_SIZE (buffer);
data = GST_BUFFER_DATA (buffer);
data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
GST_DEBUG_OBJECT (rtptheorapay, "size %u, duration %" GST_TIME_FORMAT,
size, GST_TIME_ARGS (duration));
GST_DEBUG_OBJECT (rtptheorapay, "size %" G_GSIZE_FORMAT
", duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (duration));
if (G_UNLIKELY (size > 0xffff))
goto wrong_size;
@ -679,6 +692,7 @@ gst_rtp_theora_pay_handle_buffer (GstBaseRTPPayload * basepayload,
if (TDT != 0) {
GST_DEBUG_OBJECT (rtptheorapay, "collecting header, buffer %p", buffer);
/* append header to the list of headers */
gst_buffer_unmap (buffer, data, -1);
rtptheorapay->headers = g_list_append (rtptheorapay->headers, buffer);
ret = GST_FLOW_OK;
goto done;
@ -738,6 +752,8 @@ gst_rtp_theora_pay_handle_buffer (GstBaseRTPPayload * basepayload,
ret = gst_rtp_theora_pay_payload_buffer (rtptheorapay, TDT, data, size,
timestamp, duration, 0);
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
done:
@ -748,11 +764,13 @@ wrong_size:
{
GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
("Invalid packet size (%d <= 0xffff)", size), (NULL));
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
parse_id_failed:
{
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
@ -760,6 +778,7 @@ unknown_header:
{
GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
(NULL), ("Ignoring unknown header received"));
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
@ -767,15 +786,16 @@ header_error:
{
GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
(NULL), ("Error initializing header config"));
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
}
static gboolean
gst_rtp_theora_pay_handle_event (GstPad * pad, GstEvent * event)
gst_rtp_theora_pay_handle_event (GstBaseRTPPayload * payload, GstEvent * event)
{
GstRtpTheoraPay *rtptheorapay = GST_RTP_THEORA_PAY (GST_PAD_PARENT (pad));
GstRtpTheoraPay *rtptheorapay = GST_RTP_THEORA_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:

View file

@ -57,7 +57,8 @@ GST_STATIC_PAD_TEMPLATE ("src",
GST_STATIC_CAPS ("audio/x-vorbis")
);
GST_BOILERPLATE (GstRtpVorbisDepay, gst_rtp_vorbis_depay, GstBaseRTPDepayload,
#define gst_rtp_vorbis_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpVorbisDepay, gst_rtp_vorbis_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_vorbis_depay_setcaps (GstBaseRTPDepayload * depayload,
@ -70,23 +71,6 @@ static void gst_rtp_vorbis_depay_finalize (GObject * object);
static GstStateChangeReturn gst_rtp_vorbis_depay_change_state (GstElement *
element, GstStateChange transition);
static void
gst_rtp_vorbis_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_vorbis_depay_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_vorbis_depay_src_template));
gst_element_class_set_details_simple (element_class, "RTP Vorbis depayloader",
"Codec/Depayloader/Network/RTP",
"Extracts Vorbis Audio from RTP packets (RFC 5215)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_vorbis_depay_class_init (GstRtpVorbisDepayClass * klass)
{
@ -105,13 +89,22 @@ gst_rtp_vorbis_depay_class_init (GstRtpVorbisDepayClass * klass)
gstbasertpdepayload_class->process = gst_rtp_vorbis_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_vorbis_depay_setcaps;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_vorbis_depay_sink_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_vorbis_depay_src_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP Vorbis depayloader", "Codec/Depayloader/Network/RTP",
"Extracts Vorbis Audio from RTP packets (RFC 5215)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpvorbisdepay_debug, "rtpvorbisdepay", 0,
"Vorbis RTP Depayloader");
}
static void
gst_rtp_vorbis_depay_init (GstRtpVorbisDepay * rtpvorbisdepay,
GstRtpVorbisDepayClass * klass)
gst_rtp_vorbis_depay_init (GstRtpVorbisDepay * rtpvorbisdepay)
{
rtpvorbisdepay->adapter = gst_adapter_new ();
}
@ -159,15 +152,14 @@ gst_rtp_vorbis_depay_parse_configuration (GstRtpVorbisDepay * rtpvorbisdepay,
{
GstBuffer *buf;
guint32 num_headers;
guint8 *data;
guint size;
guint8 *data, *bdata;
gsize size;
guint offset;
gint i, j;
data = GST_BUFFER_DATA (confbuf);
size = GST_BUFFER_SIZE (confbuf);
bdata = data = gst_buffer_map (confbuf, &size, NULL, GST_MAP_READ);
GST_DEBUG_OBJECT (rtpvorbisdepay, "config size %u", size);
GST_DEBUG_OBJECT (rtpvorbisdepay, "config size %" G_GSIZE_FORMAT, size);
/* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Number of packed headers |
@ -293,13 +285,16 @@ gst_rtp_vorbis_depay_parse_configuration (GstRtpVorbisDepay * rtpvorbisdepay,
GST_DEBUG_OBJECT (rtpvorbisdepay, "reading header %d, size %u", j,
h_size);
buf = gst_buffer_create_sub (confbuf, offset, h_size);
buf = gst_buffer_copy_region (confbuf, GST_BUFFER_COPY_MEMORY, offset,
h_size);
conf->headers = g_list_append (conf->headers, buf);
offset += h_size;
size -= h_size;
}
rtpvorbisdepay->configs = g_list_append (rtpvorbisdepay->configs, conf);
}
gst_buffer_unmap (confbuf, bdata, -1);
gst_buffer_unref (confbuf);
return TRUE;
@ -308,6 +303,7 @@ gst_rtp_vorbis_depay_parse_configuration (GstRtpVorbisDepay * rtpvorbisdepay,
too_small:
{
GST_DEBUG_OBJECT (rtpvorbisdepay, "configuration too small");
gst_buffer_unmap (confbuf, bdata, -1);
gst_buffer_unref (confbuf);
return FALSE;
}
@ -326,7 +322,7 @@ gst_rtp_vorbis_depay_parse_inband_configuration (GstRtpVorbisDepay *
/* transform inline to out-of-band and parse that one */
confbuf = gst_buffer_new_and_alloc (size + 9);
conf = GST_BUFFER_DATA (confbuf);
conf = gst_buffer_map (confbuf, NULL, NULL, -1);
/* 1 header */
GST_WRITE_UINT32_BE (conf, 1);
/* write Ident */
@ -335,6 +331,7 @@ gst_rtp_vorbis_depay_parse_inband_configuration (GstRtpVorbisDepay *
GST_WRITE_UINT16_BE (conf + 7, length);
/* copy remainder */
memcpy (conf + 9, configuration, size);
gst_buffer_unmap (confbuf, conf, -1);
return gst_rtp_vorbis_depay_parse_configuration (rtpvorbisdepay, confbuf);
}
@ -368,9 +365,8 @@ gst_rtp_vorbis_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
data = g_base64_decode (configuration, &size);
confbuf = gst_buffer_new ();
GST_BUFFER_DATA (confbuf) = data;
GST_BUFFER_MALLOCDATA (confbuf) = data;
GST_BUFFER_SIZE (confbuf) = size;
gst_buffer_take_memory (confbuf, -1,
gst_memory_new_wrapped (0, data, g_free, size, 0, size));
if (!gst_rtp_vorbis_depay_parse_configuration (rtpvorbisdepay, confbuf))
goto invalid_configuration;
} else {
@ -447,10 +443,13 @@ gst_rtp_vorbis_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
guint32 timestamp;
guint32 header, ident;
guint8 F, VDT, packets;
GstRTPBuffer rtp;
rtpvorbisdepay = GST_RTP_VORBIS_DEPAY (depayload);
payload_len = gst_rtp_buffer_get_payload_len (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
GST_DEBUG_OBJECT (depayload, "got RTP packet of size %d", payload_len);
@ -458,7 +457,7 @@ gst_rtp_vorbis_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
if (G_UNLIKELY (payload_len < 4))
goto packet_short;
payload = gst_rtp_buffer_get_payload (buf);
payload = gst_rtp_buffer_get_payload (&rtp);
header = GST_READ_UINT32_BE (payload);
/*
@ -526,7 +525,7 @@ gst_rtp_vorbis_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
/* first assembled packet, reuse 2 bytes to store the length */
headerskip = (F == 1 ? 4 : 6);
/* skip header and length. */
vdata = gst_rtp_buffer_get_payload_subbuffer (buf, headerskip, -1);
vdata = gst_rtp_buffer_get_payload_subbuffer (&rtp, headerskip, -1);
GST_DEBUG_OBJECT (depayload, "assemble vorbis packet");
gst_adapter_push (rtpvorbisdepay->adapter, vdata);
@ -565,7 +564,7 @@ gst_rtp_vorbis_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
* .. vorbis data |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+*
*/
timestamp = gst_rtp_buffer_get_timestamp (buf);
timestamp = gst_rtp_buffer_get_timestamp (&rtp);
while (payload_len > 2) {
guint16 length;
@ -593,13 +592,17 @@ gst_rtp_vorbis_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
/* create buffer for packet */
if (G_UNLIKELY (to_free)) {
outbuf = gst_buffer_new ();
GST_BUFFER_DATA (outbuf) = payload;
GST_BUFFER_MALLOCDATA (outbuf) = to_free;
GST_BUFFER_SIZE (outbuf) = length;
gst_buffer_take_memory (outbuf, -1,
gst_memory_new_wrapped (0, to_free, g_free,
(payload - to_free) + length, payload - to_free, length));
to_free = NULL;
} else {
guint8 *data;
outbuf = gst_buffer_new_and_alloc (length);
memcpy (GST_BUFFER_DATA (outbuf), payload, length);
data = gst_buffer_map (outbuf, NULL, NULL, GST_MAP_WRITE);
memcpy (data, payload, length);
gst_buffer_unmap (outbuf, data, -1);
}
payload += length;
@ -621,10 +624,13 @@ gst_rtp_vorbis_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
g_free (to_free);
gst_rtp_buffer_unmap (&rtp);
return NULL;
no_output:
{
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
/* ERORRS */
@ -632,23 +638,27 @@ switch_failed:
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Could not switch codebooks"));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
packet_short:
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Packet was too short (%d < 4)", payload_len));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
ignore_reserved:
{
GST_WARNING_OBJECT (rtpvorbisdepay, "reserved VDT ignored");
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
length_short:
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Packet contains invalid data"));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_configuration:
@ -656,6 +666,7 @@ invalid_configuration:
/* fatal, as we otherwise risk carrying on without output */
GST_ELEMENT_ERROR (rtpvorbisdepay, STREAM, DECODE,
(NULL), ("Packet contains invalid configuration"));
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}

View file

@ -58,8 +58,8 @@ GST_STATIC_PAD_TEMPLATE ("sink",
GST_STATIC_CAPS ("audio/x-vorbis")
);
GST_BOILERPLATE (GstRtpVorbisPay, gst_rtp_vorbis_pay, GstBaseRTPPayload,
GST_TYPE_BASE_RTP_PAYLOAD);
#define gst_rtp_vorbis_pay_parent_class parent_class
G_DEFINE_TYPE (GstRtpVorbisPay, gst_rtp_vorbis_pay, GST_TYPE_BASE_RTP_PAYLOAD);
static gboolean gst_rtp_vorbis_pay_setcaps (GstBaseRTPPayload * basepayload,
GstCaps * caps);
@ -67,25 +67,9 @@ static GstStateChangeReturn gst_rtp_vorbis_pay_change_state (GstElement *
element, GstStateChange transition);
static GstFlowReturn gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * pad,
GstBuffer * buffer);
static gboolean gst_rtp_vorbis_pay_handle_event (GstPad * pad,
static gboolean gst_rtp_vorbis_pay_handle_event (GstBaseRTPPayload * payload,
GstEvent * event);
static void
gst_rtp_vorbis_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_vorbis_pay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_vorbis_pay_sink_template));
gst_element_class_set_details_simple (element_class, "RTP Vorbis depayloader",
"Codec/Payloader/Network/RTP",
"Payload-encode Vorbis audio into RTP packets (RFC 5215)",
"Wim Taymans <wimi.taymans@gmail.com>");
}
static void
gst_rtp_vorbis_pay_class_init (GstRtpVorbisPayClass * klass)
{
@ -101,13 +85,23 @@ gst_rtp_vorbis_pay_class_init (GstRtpVorbisPayClass * klass)
gstbasertppayload_class->handle_buffer = gst_rtp_vorbis_pay_handle_buffer;
gstbasertppayload_class->handle_event = gst_rtp_vorbis_pay_handle_event;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_vorbis_pay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_vorbis_pay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP Vorbis depayloader",
"Codec/Payloader/Network/RTP",
"Payload-encode Vorbis audio into RTP packets (RFC 5215)",
"Wim Taymans <wimi.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpvorbispay_debug, "rtpvorbispay", 0,
"Vorbis RTP Payloader");
}
static void
gst_rtp_vorbis_pay_init (GstRtpVorbisPay * rtpvorbispay,
GstRtpVorbisPayClass * klass)
gst_rtp_vorbis_pay_init (GstRtpVorbisPay * rtpvorbispay)
{
/* needed because of GST_BOILERPLATE */
}
@ -146,11 +140,14 @@ static void
gst_rtp_vorbis_pay_reset_packet (GstRtpVorbisPay * rtpvorbispay, guint8 VDT)
{
guint payload_len;
GstRTPBuffer rtp;
GST_LOG_OBJECT (rtpvorbispay, "reset packet");
rtpvorbispay->payload_pos = 4;
payload_len = gst_rtp_buffer_get_payload_len (rtpvorbispay->packet);
gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_READ, &rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
gst_rtp_buffer_unmap (&rtp);
rtpvorbispay->payload_left = payload_len - 4;
rtpvorbispay->payload_duration = 0;
rtpvorbispay->payload_F = 0;
@ -181,6 +178,7 @@ gst_rtp_vorbis_pay_flush_packet (GstRtpVorbisPay * rtpvorbispay)
GstFlowReturn ret;
guint8 *payload;
guint hlen;
GstRTPBuffer rtp;
/* check for empty packet */
if (!rtpvorbispay->packet || rtpvorbispay->payload_pos <= 4)
@ -188,8 +186,10 @@ gst_rtp_vorbis_pay_flush_packet (GstRtpVorbisPay * rtpvorbispay)
GST_LOG_OBJECT (rtpvorbispay, "flushing packet");
gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
/* fix header */
payload = gst_rtp_buffer_get_payload (rtpvorbispay->packet);
payload = gst_rtp_buffer_get_payload (&rtp);
/*
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
@ -208,9 +208,11 @@ gst_rtp_vorbis_pay_flush_packet (GstRtpVorbisPay * rtpvorbispay)
(rtpvorbispay->payload_VDT & 0x3) << 4 |
(rtpvorbispay->payload_pkts & 0xf);
gst_rtp_buffer_unmap (&rtp);
/* shrink the buffer size to the last written byte */
hlen = gst_rtp_buffer_calc_header_len (0);
GST_BUFFER_SIZE (rtpvorbispay->packet) = hlen + rtpvorbispay->payload_pos;
gst_buffer_resize (rtpvorbispay->packet, 0, hlen + rtpvorbispay->payload_pos);
GST_BUFFER_DURATION (rtpvorbispay->packet) = rtpvorbispay->payload_duration;
@ -290,8 +292,10 @@ gst_rtp_vorbis_pay_finish_headers (GstBaseRTPPayload * basepayload)
for (walk = rtpvorbispay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
guint bsize;
guint8 *data;
gsize size;
bsize = GST_BUFFER_SIZE (buf);
bsize = gst_buffer_get_size (buf);
length += bsize;
n_headers++;
@ -304,8 +308,9 @@ gst_rtp_vorbis_pay_finish_headers (GstBaseRTPPayload * basepayload)
} while (bsize);
}
/* update hash */
ident = fnv1_hash_32_update (ident, GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
data = gst_buffer_map (buf, NULL, NULL, GST_MAP_READ);
ident = fnv1_hash_32_update (ident, data, size);
gst_buffer_unmap (buf, data, -1);
}
/* packet length is header size + packet length */
@ -345,7 +350,7 @@ gst_rtp_vorbis_pay_finish_headers (GstBaseRTPPayload * basepayload)
if (!g_list_next (walk))
break;
bsize = GST_BUFFER_SIZE (buf);
bsize = gst_buffer_get_size (buf);
/* calc size */
size = 0;
@ -355,7 +360,7 @@ gst_rtp_vorbis_pay_finish_headers (GstBaseRTPPayload * basepayload)
} while (bsize);
temp = size;
bsize = GST_BUFFER_SIZE (buf);
bsize = gst_buffer_get_size (buf);
/* write the size backwards */
flag = 0;
while (size) {
@ -371,8 +376,8 @@ gst_rtp_vorbis_pay_finish_headers (GstBaseRTPPayload * basepayload)
for (walk = rtpvorbispay->headers; walk; walk = g_list_next (walk)) {
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
memcpy (data, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
data += GST_BUFFER_SIZE (buf);
gst_buffer_extract (buf, 0, data, gst_buffer_get_size (buf));
data += gst_buffer_get_size (buf);
}
/* serialize to base64 */
@ -470,7 +475,8 @@ gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * basepayload,
{
GstRtpVorbisPay *rtpvorbispay;
GstFlowReturn ret;
guint size, newsize;
guint newsize;
gsize size;
guint8 *data;
guint packet_len;
GstClockTime duration, newduration, timestamp;
@ -479,16 +485,16 @@ gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * basepayload,
guint plen;
guint8 *ppos, *payload;
gboolean fragmented;
GstRTPBuffer rtp;
rtpvorbispay = GST_RTP_VORBIS_PAY (basepayload);
size = GST_BUFFER_SIZE (buffer);
data = GST_BUFFER_DATA (buffer);
data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
duration = GST_BUFFER_DURATION (buffer);
timestamp = GST_BUFFER_TIMESTAMP (buffer);
GST_LOG_OBJECT (rtpvorbispay, "size %u, duration %" GST_TIME_FORMAT,
size, GST_TIME_ARGS (duration));
GST_LOG_OBJECT (rtpvorbispay, "size %" G_GSIZE_FORMAT
", duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (duration));
if (G_UNLIKELY (size < 1 || size > 0xffff))
goto wrong_size;
@ -518,6 +524,7 @@ gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * basepayload,
if (VDT != 0) {
GST_DEBUG_OBJECT (rtpvorbispay, "collecting header");
/* append header to the list of headers */
gst_buffer_unmap (buffer, data, -1);
rtpvorbispay->headers = g_list_append (rtpvorbispay->headers, buffer);
ret = GST_FLOW_OK;
goto done;
@ -551,7 +558,8 @@ gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * basepayload,
gst_rtp_vorbis_pay_init_packet (rtpvorbispay, VDT, timestamp);
}
payload = gst_rtp_buffer_get_payload (rtpvorbispay->packet);
gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtpvorbispay->payload_pos;
fragmented = FALSE;
@ -591,6 +599,7 @@ gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * basepayload,
}
}
if (fragmented) {
gst_rtp_buffer_unmap (&rtp);
/* fragmented packets are always flushed and have ptks of 0 */
rtpvorbispay->payload_pkts = 0;
ret = gst_rtp_vorbis_pay_flush_packet (rtpvorbispay);
@ -599,7 +608,8 @@ gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * basepayload,
/* start new packet and get pointers. VDT stays the same. */
gst_rtp_vorbis_pay_init_packet (rtpvorbispay,
rtpvorbispay->payload_VDT, timestamp);
payload = gst_rtp_buffer_get_payload (rtpvorbispay->packet);
gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
payload = gst_rtp_buffer_get_payload (&rtp);
ppos = payload + rtpvorbispay->payload_pos;
}
} else {
@ -610,6 +620,11 @@ gst_rtp_vorbis_pay_handle_buffer (GstBaseRTPPayload * basepayload,
rtpvorbispay->payload_duration += duration;
}
}
if (rtp.buffer)
gst_rtp_buffer_unmap (&rtp);
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
done:
@ -620,11 +635,13 @@ wrong_size:
{
GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
("Invalid packet size (1 < %d <= 0xffff)", size), (NULL));
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
parse_id_failed:
{
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_ERROR;
}
@ -632,6 +649,7 @@ unknown_header:
{
GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
(NULL), ("Ignoring unknown header received"));
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
@ -639,15 +657,16 @@ header_error:
{
GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
(NULL), ("Error initializing header config"));
gst_buffer_unmap (buffer, data, -1);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
}
static gboolean
gst_rtp_vorbis_pay_handle_event (GstPad * pad, GstEvent * event)
gst_rtp_vorbis_pay_handle_event (GstBaseRTPPayload * payload, GstEvent * event)
{
GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (GST_PAD_PARENT (pad));
GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (payload);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:

View file

@ -47,7 +47,8 @@ GST_STATIC_PAD_TEMPLATE ("sink",
"clock-rate = (int) 90000, " "encoding-name = (string) \"RAW\"")
);
GST_BOILERPLATE (GstRtpVRawDepay, gst_rtp_vraw_depay, GstBaseRTPDepayload,
#define gst_rtp_vraw_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay,
GST_TYPE_BASE_RTP_DEPAYLOAD);
static gboolean gst_rtp_vraw_depay_setcaps (GstBaseRTPDepayload * depayload,
@ -61,22 +62,6 @@ static GstStateChangeReturn gst_rtp_vraw_depay_change_state (GstElement *
static gboolean gst_rtp_vraw_depay_handle_event (GstBaseRTPDepayload * filter,
GstEvent * event);
static void
gst_rtp_vraw_depay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_vraw_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_vraw_depay_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
"Extracts raw video from RTP packets (RFC 4175)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
{
@ -92,13 +77,22 @@ gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
gstbasertpdepayload_class->process = gst_rtp_vraw_depay_process;
gstbasertpdepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_vraw_depay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_vraw_depay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
"Extracts raw video from RTP packets (RFC 4175)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpvrawdepay_debug, "rtpvrawdepay", 0,
"raw video RTP Depayloader");
}
static void
gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay,
GstRtpVRawDepayClass * klass)
gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay)
{
/* needed because of GST_BOILERPLATE */
}
@ -111,6 +105,61 @@ gst_rtp_vraw_depay_reset (GstRtpVRawDepay * rtpvrawdepay)
rtpvrawdepay->outbuf = NULL;
}
rtpvrawdepay->timestamp = -1;
if (rtpvrawdepay->pool) {
gst_buffer_pool_set_active (rtpvrawdepay->pool, FALSE);
gst_object_unref (rtpvrawdepay->pool);
rtpvrawdepay->pool = NULL;
}
}
static GstFlowReturn
gst_rtp_vraw_depay_negotiate_pool (GstRtpVRawDepay * depay, GstCaps * caps,
GstVideoInfo * info)
{
GstQuery *query;
GstBufferPool *pool = NULL;
guint size, min, max, prefix, alignment;
GstStructure *config;
/* find a pool for the negotiated caps now */
query = gst_query_new_allocation (caps, TRUE);
if (gst_pad_peer_query (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depay), query)) {
GST_DEBUG_OBJECT (depay, "got downstream ALLOCATION hints");
/* we got configuration from our peer, parse them */
gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
&alignment, &pool);
} else {
GST_DEBUG_OBJECT (depay, "didn't get downstream ALLOCATION hints");
size = info->size;
min = max = 0;
prefix = 0;
alignment = 0;
}
if (pool == NULL) {
/* we did not get a pool, make one ourselves then */
pool = gst_buffer_pool_new ();
}
if (depay->pool)
gst_object_unref (depay->pool);
depay->pool = pool;
config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_set (config, caps, size, min, max, prefix, alignment);
/* just set the metadata, if the pool can support it we will transparently use
* it through the video info API. We could also see if the pool support this
* metadata and only activate it then. */
gst_buffer_pool_config_add_meta (config, GST_META_API_VIDEO);
gst_buffer_pool_set_config (pool, config);
/* and activate */
gst_buffer_pool_set_active (pool, TRUE);
gst_query_unref (query);
return GST_FLOW_OK;
}
static gboolean
@ -119,18 +168,16 @@ gst_rtp_vraw_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
GstStructure *structure;
GstRtpVRawDepay *rtpvrawdepay;
gint clock_rate;
const gchar *str, *type;
const gchar *str;
gint format, width, height, pgroup, xinc, yinc;
guint ystride, uvstride, yp, up, vp, outsize;
GstCaps *srccaps;
guint32 fourcc = 0;
gboolean res;
GstFlowReturn ret;
rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
structure = gst_caps_get_structure (caps, 0);
yp = up = vp = uvstride = 0;
xinc = yinc = 1;
if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
@ -156,96 +203,55 @@ gst_rtp_vraw_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
if (!strcmp (str, "RGB")) {
format = GST_VIDEO_FORMAT_RGB;
pgroup = 3;
ystride = GST_ROUND_UP_4 (width * 3);
outsize = ystride * height;
type = "video/x-raw-rgb";
} else if (!strcmp (str, "RGBA")) {
format = GST_VIDEO_FORMAT_RGBA;
pgroup = 4;
ystride = width * 4;
outsize = ystride * height;
type = "video/x-raw-rgb";
} else if (!strcmp (str, "BGR")) {
format = GST_VIDEO_FORMAT_BGR;
pgroup = 3;
ystride = GST_ROUND_UP_4 (width * 3);
outsize = ystride * height;
type = "video/x-raw-rgb";
} else if (!strcmp (str, "BGRA")) {
format = GST_VIDEO_FORMAT_BGRA;
pgroup = 4;
ystride = width * 4;
outsize = ystride * height;
type = "video/x-raw-rgb";
} else if (!strcmp (str, "YCbCr-4:4:4")) {
format = GST_VIDEO_FORMAT_AYUV;
pgroup = 3;
ystride = width * 4;
outsize = ystride * height;
type = "video/x-raw-yuv";
fourcc = GST_MAKE_FOURCC ('A', 'Y', 'U', 'V');
} else if (!strcmp (str, "YCbCr-4:2:2")) {
format = GST_VIDEO_FORMAT_UYVY;
pgroup = 4;
ystride = GST_ROUND_UP_2 (width) * 2;
outsize = ystride * height;
type = "video/x-raw-yuv";
fourcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
xinc = 2;
} else if (!strcmp (str, "YCbCr-4:2:0")) {
format = GST_VIDEO_FORMAT_I420;
pgroup = 6;
ystride = GST_ROUND_UP_4 (width);
uvstride = GST_ROUND_UP_8 (width) / 2;
up = ystride * GST_ROUND_UP_2 (height);
vp = up + uvstride * GST_ROUND_UP_2 (height) / 2;
outsize = vp + uvstride * GST_ROUND_UP_2 (height) / 2;
type = "video/x-raw-yuv";
fourcc = GST_MAKE_FOURCC ('I', '4', '2', '0');
xinc = yinc = 2;
} else if (!strcmp (str, "YCbCr-4:1:1")) {
format = GST_VIDEO_FORMAT_Y41B;
pgroup = 6;
ystride = GST_ROUND_UP_4 (width);
uvstride = GST_ROUND_UP_8 (width) / 4;
up = ystride * height;
vp = up + uvstride * height;
outsize = vp + uvstride * height;
type = "video/x-raw-yuv";
fourcc = GST_MAKE_FOURCC ('Y', '4', '1', 'B');
xinc = 4;
} else
goto unknown_format;
rtpvrawdepay->width = width;
rtpvrawdepay->height = height;
rtpvrawdepay->format = format;
rtpvrawdepay->yp = yp;
rtpvrawdepay->up = up;
rtpvrawdepay->vp = vp;
gst_video_info_init (&rtpvrawdepay->vinfo);
gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
GST_VIDEO_INFO_FPS_N (&rtpvrawdepay->vinfo) = 0;
GST_VIDEO_INFO_FPS_D (&rtpvrawdepay->vinfo) = 1;
rtpvrawdepay->pgroup = pgroup;
rtpvrawdepay->xinc = xinc;
rtpvrawdepay->yinc = yinc;
rtpvrawdepay->ystride = ystride;
rtpvrawdepay->uvstride = uvstride;
rtpvrawdepay->outsize = outsize;
srccaps = gst_caps_new_simple (type,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"format", GST_TYPE_FOURCC, fourcc,
"framerate", GST_TYPE_FRACTION, 0, 1, NULL);
srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
format);
GST_DEBUG_OBJECT (depayload, "yp %d, up %d, vp %d", yp, up, vp);
GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d", xinc, yinc);
GST_DEBUG_OBJECT (depayload, "pgroup %d, ystride %d, uvstride %d", pgroup,
ystride, uvstride);
GST_DEBUG_OBJECT (depayload, "outsize %u", outsize);
GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d, pgroup %d",
xinc, yinc, pgroup);
/* negotiate a bufferpool */
if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
&rtpvrawdepay->vinfo)) != GST_FLOW_OK)
goto no_bufferpool;
return res;
@ -275,20 +281,29 @@ unknown_format:
GST_ERROR_OBJECT (depayload, "unknown sampling format '%s'", str);
return FALSE;
}
no_bufferpool:
{
GST_DEBUG_OBJECT (depayload, "no bufferpool");
return FALSE;
}
}
static GstBuffer *
gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
GstRtpVRawDepay *rtpvrawdepay;
guint8 *payload, *data, *yp, *up, *vp, *headers;
guint8 *payload, *yp, *up, *vp, *headers;
guint32 timestamp;
guint cont, ystride, uvstride, pgroup, payload_len;
gint width, height, xinc, yinc;
GstRTPBuffer rtp;
GstVideoFrame frame;
rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
timestamp = gst_rtp_buffer_get_timestamp (buf);
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
timestamp = gst_rtp_buffer_get_timestamp (&rtp);
if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
GstBuffer *outbuf;
@ -302,9 +317,18 @@ gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
rtpvrawdepay->outbuf = NULL;
}
ret = gst_pad_alloc_buffer (depayload->srcpad, -1, rtpvrawdepay->outsize,
GST_PAD_CAPS (depayload->srcpad), &outbuf);
if (ret != GST_FLOW_OK)
if (gst_pad_check_reconfigure (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload))) {
GstCaps *caps;
caps =
gst_pad_get_current_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload));
gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
&rtpvrawdepay->vinfo);
gst_caps_unref (caps);
}
ret = gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &outbuf, NULL);
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto alloc_failed;
/* clear timestamp from alloc... */
@ -314,23 +338,26 @@ gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
rtpvrawdepay->timestamp = timestamp;
}
data = GST_BUFFER_DATA (rtpvrawdepay->outbuf);
if (!gst_video_frame_map (&frame, &rtpvrawdepay->vinfo, rtpvrawdepay->outbuf,
GST_MAP_WRITE))
goto invalid_frame;
/* get pointer and strides of the planes */
yp = data + rtpvrawdepay->yp;
up = data + rtpvrawdepay->up;
vp = data + rtpvrawdepay->vp;
yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
ystride = rtpvrawdepay->ystride;
uvstride = rtpvrawdepay->uvstride;
pgroup = rtpvrawdepay->pgroup;
width = rtpvrawdepay->width;
height = rtpvrawdepay->height;
width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
xinc = rtpvrawdepay->xinc;
yinc = rtpvrawdepay->yinc;
payload = gst_rtp_buffer_get_payload (buf);
payload_len = gst_rtp_buffer_get_payload_len (buf);
payload = gst_rtp_buffer_get_payload (&rtp);
payload_len = gst_rtp_buffer_get_payload_len (&rtp);
if (payload_len < 3)
goto short_packet;
@ -398,7 +425,7 @@ gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
"writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
line, offs, payload_len);
switch (rtpvrawdepay->format) {
switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
case GST_VIDEO_FORMAT_RGB:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGR:
@ -493,7 +520,10 @@ gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
payload_len -= length;
}
if (gst_rtp_buffer_get_marker (buf)) {
gst_video_frame_unmap (&frame);
gst_rtp_buffer_unmap (&rtp);
if (gst_rtp_buffer_get_marker (&rtp)) {
GST_LOG_OBJECT (depayload, "marker, flushing frame");
if (rtpvrawdepay->outbuf) {
gst_base_rtp_depayload_push_ts (depayload, timestamp,
@ -509,21 +539,34 @@ unknown_sampling:
{
GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
(NULL), ("unimplemented sampling"));
gst_video_frame_unmap (&frame);
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
alloc_failed:
{
GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_frame:
{
GST_ERROR_OBJECT (depayload, "could not map video frame");
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
wrong_length:
{
GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
gst_video_frame_unmap (&frame);
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
short_packet:
{
GST_WARNING_OBJECT (depayload, "short packet");
gst_video_frame_unmap (&frame);
gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}

View file

@ -22,6 +22,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstmetavideo.h>
#include <gst/rtp/gstbasertpdepayload.h>
G_BEGIN_DECLS
@ -44,8 +45,8 @@ struct _GstRtpVRawDepay
{
GstBaseRTPDepayload payload;
gint width, height;
GstVideoFormat format;
GstBufferPool *pool;
GstVideoInfo vinfo;
GstBuffer *outbuf;
guint32 timestamp;
@ -53,9 +54,6 @@ struct _GstRtpVRawDepay
gint pgroup;
gint xinc, yinc;
guint yp, up, vp;
gint ystride;
gint uvstride;
};
struct _GstRtpVRawDepayClass

View file

@ -34,46 +34,8 @@ static GstStaticPadTemplate gst_rtp_vraw_pay_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int) 24, "
"depth = (int) 24, "
"endianness = (int) BIG_ENDIAN, "
"red_mask = (int) 0xFF000000, "
"green_mask = (int) 0x00FF0000, "
"blue_mask = (int) 0x0000FF00, "
"width = (int) [ 1, 32767 ], "
"height = (int) [ 1, 32767 ]; "
"video/x-raw-rgb, "
"bpp = (int) 32, "
"depth = (int) 32, "
"endianness = (int) BIG_ENDIAN, "
"red_mask = (int) 0xFF000000, "
"green_mask = (int) 0x00FF0000, "
"blue_mask = (int) 0x0000FF00, "
"alpha_mask = (int) 0x000000FF, "
"width = (int) [ 1, 32767 ], "
"height = (int) [ 1, 32767 ]; "
"video/x-raw-rgb, "
"bpp = (int) 24, "
"depth = (int) 24, "
"endianness = (int) BIG_ENDIAN, "
"red_mask = (int) 0x0000FF00, "
"green_mask = (int) 0x00FF0000, "
"blue_mask = (int) 0xFF000000, "
"width = (int) [ 1, 32767 ], "
"height = (int) [ 1, 32767 ]; "
"video/x-raw-rgb, "
"bpp = (int) 32, "
"depth = (int) 32, "
"endianness = (int) BIG_ENDIAN, "
"red_mask = (int) 0x0000FF00, "
"green_mask = (int) 0x00FF0000, "
"blue_mask = (int) 0xFF000000, "
"alpha_mask = (int) 0x000000FF, "
"width = (int) [ 1, 32767 ], "
"height = (int) [ 1, 32767 ]; "
"video/x-raw-yuv, "
"format = (fourcc) { AYUV, UYVY, I420, Y41B, UYVP }, "
GST_STATIC_CAPS ("video/x-raw, "
"format = (string) { RGB, RGBA, BGR, BGRA, AYUYV, UYVY, I420, Y41B, UYVP, I420, Y42B, Y444 }, "
"width = (int) [ 1, 32767 ], " "height = (int) [ 1, 32767 ]; ")
);
@ -109,40 +71,35 @@ static gboolean gst_rtp_vraw_pay_setcaps (GstBaseRTPPayload * payload,
static GstFlowReturn gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload *
payload, GstBuffer * buffer);
GST_BOILERPLATE (GstRtpVRawPay, gst_rtp_vraw_pay, GstBaseRTPPayload,
GST_TYPE_BASE_RTP_PAYLOAD)
G_DEFINE_TYPE (GstRtpVRawPay, gst_rtp_vraw_pay, GST_TYPE_BASE_RTP_PAYLOAD)
static void gst_rtp_vraw_pay_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_vraw_pay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_vraw_pay_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP Raw Video payloader", "Codec/Payloader/Network/RTP",
"Payload raw video as RTP packets (RFC 4175)",
"Wim Taymans <wim.taymans@gmail.com>");
}
static void
gst_rtp_vraw_pay_class_init (GstRtpVRawPayClass * klass)
static void gst_rtp_vraw_pay_class_init (GstRtpVRawPayClass * klass)
{
GstBaseRTPPayloadClass *gstbasertppayload_class;
GstElementClass *gstelement_class;
gstelement_class = (GstElementClass *) klass;
gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
gstbasertppayload_class->set_caps = gst_rtp_vraw_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_vraw_pay_handle_buffer;
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_vraw_pay_src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&gst_rtp_vraw_pay_sink_template));
gst_element_class_set_details_simple (gstelement_class,
"RTP Raw Video payloader", "Codec/Payloader/Network/RTP",
"Payload raw video as RTP packets (RFC 4175)",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_CATEGORY_INIT (rtpvrawpay_debug, "rtpvrawpay", 0,
"Raw video RTP Payloader");
}
static void
gst_rtp_vraw_pay_init (GstRtpVRawPay * rtpvrawpay, GstRtpVRawPayClass * klass)
gst_rtp_vraw_pay_init (GstRtpVRawPay * rtpvrawpay)
{
}
@ -150,177 +107,101 @@ static gboolean
gst_rtp_vraw_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
{
GstRtpVRawPay *rtpvrawpay;
GstStructure *s;
gboolean res;
const gchar *name;
gint width, height;
gint yp, up, vp;
gint pgroup, ystride, uvstride = 0, xinc, yinc;
GstVideoFormat sampling;
gint pgroup, xinc, yinc;
const gchar *depthstr, *samplingstr, *colorimetrystr;
gchar *wstr, *hstr;
gboolean interlaced;
const gchar *color_matrix;
gint depth;
GstVideoInfo info;
rtpvrawpay = GST_RTP_VRAW_PAY (payload);
s = gst_caps_get_structure (caps, 0);
if (!gst_video_info_from_caps (&info, caps))
goto invalid_caps;
/* start parsing the format */
name = gst_structure_get_name (s);
rtpvrawpay->vinfo = info;
/* these values are the only thing we can do */
depthstr = "8";
/* parse common width/height */
res = gst_structure_get_int (s, "width", &width);
res &= gst_structure_get_int (s, "height", &height);
if (!res)
goto missing_dimension;
if (!gst_structure_get_boolean (s, "interlaced", &interlaced))
interlaced = FALSE;
color_matrix = gst_structure_get_string (s, "color-matrix");
colorimetrystr = "SMPTE240M";
if (color_matrix) {
if (g_str_equal (color_matrix, "sdtv")) {
if (info.color_matrix) {
if (g_str_equal (info.color_matrix, "sdtv")) {
/* BT.601 implies a bit more than just color-matrix */
colorimetrystr = "BT601-5";
} else if (g_str_equal (color_matrix, "hdtv")) {
} else if (g_str_equal (info.color_matrix, "hdtv")) {
colorimetrystr = "BT709-2";
}
}
yp = up = vp = 0;
xinc = yinc = 1;
if (!strcmp (name, "video/x-raw-rgb")) {
gint amask, rmask;
gboolean has_alpha;
/* these values are the only thing we can do */
depthstr = "8";
depth = 8;
has_alpha = gst_structure_get_int (s, "alpha_mask", &amask);
depth = 8;
if (!gst_structure_get_int (s, "red_mask", &rmask))
goto unknown_mask;
if (has_alpha) {
switch (GST_VIDEO_INFO_FORMAT (&info)) {
case GST_VIDEO_FORMAT_RGBA:
samplingstr = "RGBA";
pgroup = 4;
ystride = width * 4;
if (rmask == 0xFF000000) {
sampling = GST_VIDEO_FORMAT_RGBA;
samplingstr = "RGBA";
} else {
sampling = GST_VIDEO_FORMAT_BGRA;
samplingstr = "BGRA";
}
} else {
break;
case GST_VIDEO_FORMAT_BGRA:
samplingstr = "BGRA";
pgroup = 4;
break;
case GST_VIDEO_FORMAT_RGB:
samplingstr = "RGB";
pgroup = 3;
ystride = GST_ROUND_UP_4 (width * 3);
if (rmask == 0xFF000000) {
sampling = GST_VIDEO_FORMAT_RGB;
samplingstr = "RGB";
} else {
sampling = GST_VIDEO_FORMAT_BGR;
samplingstr = "BGR";
}
}
} else if (!strcmp (name, "video/x-raw-yuv")) {
guint32 fourcc;
case GST_VIDEO_FORMAT_BGR:
samplingstr = "BGR";
pgroup = 3;
break;
case GST_VIDEO_FORMAT_AYUV:
samplingstr = "YCbCr-4:4:4";
pgroup = 3;
break;
case GST_VIDEO_FORMAT_UYVY:
samplingstr = "YCbCr-4:2:2";
pgroup = 4;
xinc = 2;
break;
case GST_VIDEO_FORMAT_Y41B:
samplingstr = "YCbCr-4:1:1";
pgroup = 6;
xinc = 4;
break;
case GST_VIDEO_FORMAT_I420:
samplingstr = "YCbCr-4:2:0";
pgroup = 6;
xinc = yinc = 2;
break;
case GST_VIDEO_FORMAT_UYVP:
samplingstr = "YCbCr-4:2:2";
pgroup = 4;
xinc = 2;
depth = 10;
depthstr = "10";
break;
default:
goto unknown_format;
break;
}
if (!gst_structure_get_fourcc (s, "format", &fourcc))
goto unknown_fourcc;
GST_LOG_OBJECT (payload, "have fourcc %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (fourcc));
switch (fourcc) {
case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
sampling = GST_VIDEO_FORMAT_AYUV;
samplingstr = "YCbCr-4:4:4";
pgroup = 3;
ystride = width * 4;
depth = 8;
break;
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
sampling = GST_VIDEO_FORMAT_UYVY;
samplingstr = "YCbCr-4:2:2";
pgroup = 4;
xinc = 2;
ystride = GST_ROUND_UP_2 (width) * 2;
depth = 8;
break;
case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
sampling = GST_VIDEO_FORMAT_Y41B;
samplingstr = "YCbCr-4:1:1";
pgroup = 6;
xinc = 4;
ystride = GST_ROUND_UP_4 (width);
uvstride = GST_ROUND_UP_8 (width) / 4;
up = ystride * height;
vp = up + uvstride * height;
depth = 8;
break;
case GST_MAKE_FOURCC ('I', '4', '2', '0'):
sampling = GST_VIDEO_FORMAT_I420;
samplingstr = "YCbCr-4:2:0";
pgroup = 6;
xinc = yinc = 2;
ystride = GST_ROUND_UP_4 (width);
uvstride = GST_ROUND_UP_8 (width) / 2;
up = ystride * GST_ROUND_UP_2 (height);
vp = up + uvstride * GST_ROUND_UP_2 (height) / 2;
depth = 8;
break;
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'P'):
#define GST_VIDEO_FORMAT_UYVP GST_VIDEO_FORMAT_UYVY /* FIXME */
sampling = GST_VIDEO_FORMAT_UYVP;
samplingstr = "YCbCr-4:2:2";
pgroup = 4;
xinc = 2;
ystride = GST_ROUND_UP_2 (width) * 2;
depth = 10;
break;
default:
goto unknown_fourcc;
}
} else
goto unknown_format;
if (interlaced) {
if (info.flags & GST_VIDEO_FLAG_INTERLACED) {
yinc *= 2;
}
if (depth == 10) {
depthstr = "10";
}
rtpvrawpay->width = width;
rtpvrawpay->height = height;
rtpvrawpay->sampling = sampling;
rtpvrawpay->pgroup = pgroup;
rtpvrawpay->xinc = xinc;
rtpvrawpay->yinc = yinc;
rtpvrawpay->yp = yp;
rtpvrawpay->up = up;
rtpvrawpay->vp = vp;
rtpvrawpay->ystride = ystride;
rtpvrawpay->uvstride = uvstride;
rtpvrawpay->interlaced = interlaced;
rtpvrawpay->depth = depth;
GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %d", width, height,
sampling);
GST_DEBUG_OBJECT (payload, "yp %d, up %d, vp %d", yp, up, vp);
GST_DEBUG_OBJECT (payload, "pgroup %d, ystride %d, uvstride %d", pgroup,
ystride, uvstride);
GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %s",
GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), samplingstr);
GST_DEBUG_OBJECT (payload, "xinc %d, yinc %d, pgroup %d", xinc, yinc, pgroup);
wstr = g_strdup_printf ("%d", rtpvrawpay->width);
hstr = g_strdup_printf ("%d", rtpvrawpay->height);
wstr = g_strdup_printf ("%d", GST_VIDEO_INFO_WIDTH (&info));
hstr = g_strdup_printf ("%d", GST_VIDEO_INFO_HEIGHT (&info));
gst_basertppayload_set_options (payload, "video", TRUE, "RAW", 90000);
if (interlaced) {
if (info.flags & GST_VIDEO_FLAG_INTERLACED) {
res = gst_basertppayload_set_outcaps (payload, "sampling", G_TYPE_STRING,
samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
@ -337,9 +218,9 @@ gst_rtp_vraw_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
return res;
/* ERRORS */
unknown_mask:
invalid_caps:
{
GST_ERROR_OBJECT (payload, "unknown red mask specified");
GST_ERROR_OBJECT (payload, "could not parse caps");
return FALSE;
}
unknown_format:
@ -347,16 +228,6 @@ unknown_format:
GST_ERROR_OBJECT (payload, "unknown caps format");
return FALSE;
}
unknown_fourcc:
{
GST_ERROR_OBJECT (payload, "invalid or missing fourcc");
return FALSE;
}
missing_dimension:
{
GST_ERROR_OBJECT (payload, "missing width or height property");
return FALSE;
}
}
static GstFlowReturn
@ -365,38 +236,42 @@ gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer)
GstRtpVRawPay *rtpvrawpay;
GstFlowReturn ret = GST_FLOW_OK;
guint line, offset;
guint8 *data, *yp, *up, *vp;
guint8 *yp, *up, *vp;
guint ystride, uvstride;
guint size, pgroup;
guint pgroup;
guint mtu;
guint width, height;
gint field;
GstVideoFrame frame;
gint interlaced;
GstRTPBuffer rtp;
rtpvrawpay = GST_RTP_VRAW_PAY (payload);
data = GST_BUFFER_DATA (buffer);
size = GST_BUFFER_SIZE (buffer);
gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ);
GST_LOG_OBJECT (rtpvrawpay, "new frame of %u bytes", size);
GST_LOG_OBJECT (rtpvrawpay, "new frame of %u bytes",
gst_buffer_get_size (buffer));
/* get pointer and strides of the planes */
yp = data + rtpvrawpay->yp;
up = data + rtpvrawpay->up;
vp = data + rtpvrawpay->vp;
yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
ystride = rtpvrawpay->ystride;
uvstride = rtpvrawpay->uvstride;
ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
mtu = GST_BASE_RTP_PAYLOAD_MTU (payload);
/* amount of bytes for one pixel */
pgroup = rtpvrawpay->pgroup;
width = rtpvrawpay->width;
height = rtpvrawpay->height;
width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo);
height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo);
interlaced = !!(rtpvrawpay->vinfo.flags & GST_VIDEO_FLAG_INTERLACED);
/* start with line 0, offset 0 */
for (field = 0; field < 1 + rtpvrawpay->interlaced; field++) {
for (field = 0; field < 1 + interlaced; field++) {
line = field;
offset = 0;
@ -419,7 +294,8 @@ gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer)
GST_BUFFER_DURATION (buffer) / 2;
}
outdata = gst_rtp_buffer_get_payload (out);
gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp);
outdata = gst_rtp_buffer_get_payload (&rtp);
GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left,
mtu);
@ -524,7 +400,7 @@ gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer)
"writing length %u, line %u, offset %u, cont %d", length, lin, offs,
cont);
switch (rtpvrawpay->sampling) {
switch (GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo)) {
case GST_VIDEO_FORMAT_RGB:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_BGR:
@ -595,6 +471,7 @@ gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer)
break;
}
default:
gst_rtp_buffer_unmap (&rtp);
gst_buffer_unref (out);
goto unknown_sampling;
}
@ -605,11 +482,12 @@ gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer)
if (line >= height) {
GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker");
gst_rtp_buffer_set_marker (out, TRUE);
gst_rtp_buffer_set_marker (&rtp, TRUE);
}
gst_rtp_buffer_unmap (&rtp);
if (left > 0) {
GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left);
GST_BUFFER_SIZE (out) -= left;
gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left);
}
/* push buffer */
@ -617,6 +495,8 @@ gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer)
}
}
gst_video_frame_unmap (&frame);
gst_buffer_unref (buffer);
return ret;
@ -626,6 +506,7 @@ unknown_sampling:
{
GST_ELEMENT_ERROR (payload, STREAM, FORMAT,
(NULL), ("unimplemented sampling"));
gst_video_frame_unmap (&frame);
gst_buffer_unref (buffer);
return GST_FLOW_NOT_SUPPORTED;
}

View file

@ -44,15 +44,14 @@ struct _GstRtpVRawPay
{
GstBaseRTPPayload payload;
gint width, height;
GstVideoFormat sampling;
GstVideoInfo vinfo;
gint pgroup;
gint xinc, yinc;
guint yp, up, vp;
gint ystride;
gint uvstride;
gboolean interlaced;
// guint yp, up, vp;
// gint ystride;
// gint uvstride;
// gboolean interlaced;
gint depth;
};