jpeg: add support for multiscan images.

Add support for images with multiple scans per frame. The Huffman table
can be updated before SOS, and thus possibly requiring multiple uploads
of Huffman tables to the VA driver. So, the latter must be able to cope
with multiple VA buffers of type 'huffman-table' and with the correct
sequential order.
This commit is contained in:
Gwenole Beauchesne 2013-09-23 16:49:41 +02:00
parent fd51db279a
commit 5444ab44d2
3 changed files with 65 additions and 20 deletions

View file

@ -313,23 +313,37 @@ fill_quantization_table(GstVaapiDecoderJpeg *decoder, GstVaapiPicture *picture)
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
fill_huffman_table(GstVaapiDecoderJpeg *decoder, GstVaapiPicture *picture)
static gboolean
huffman_tables_updated(const GstJpegHuffmanTables *huf_tables)
{
GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
GstJpegHuffmanTables * const huf_tables = &priv->huf_tables;
VAHuffmanTableBufferJPEGBaseline *huffman_table;
guint i, num_tables;
guint i;
if (!VALID_STATE(decoder, GOT_HUF_TABLE))
gst_jpeg_get_default_huffman_tables(&priv->huf_tables);
picture->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW(JPEGBaseline, decoder);
if (!picture->huf_table) {
GST_ERROR("failed to allocate Huffman tables");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
huffman_table = picture->huf_table->param;
for (i = 0; i < G_N_ELEMENTS(huf_tables->dc_tables); i++)
if (huf_tables->dc_tables[i].valid)
return TRUE;
for (i = 0; i < G_N_ELEMENTS(huf_tables->ac_tables); i++)
if (huf_tables->ac_tables[i].valid)
return TRUE;
return FALSE;
}
static void
huffman_tables_reset(GstJpegHuffmanTables *huf_tables)
{
guint i;
for (i = 0; i < G_N_ELEMENTS(huf_tables->dc_tables); i++)
huf_tables->dc_tables[i].valid = FALSE;
for (i = 0; i < G_N_ELEMENTS(huf_tables->ac_tables); i++)
huf_tables->ac_tables[i].valid = FALSE;
}
static void
fill_huffman_table(GstVaapiHuffmanTable *huf_table,
const GstJpegHuffmanTables *huf_tables)
{
VAHuffmanTableBufferJPEGBaseline * const huffman_table = huf_table->param;
guint i, num_tables;
num_tables = MIN(G_N_ELEMENTS(huffman_table->huffman_table),
GST_JPEG_MAX_SCAN_COMPONENTS);
@ -356,7 +370,6 @@ fill_huffman_table(GstVaapiDecoderJpeg *decoder, GstVaapiPicture *picture)
0,
sizeof(huffman_table->huffman_table[i].pad));
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static guint
@ -507,6 +520,21 @@ decode_scan(GstVaapiDecoderJpeg *decoder, GstJpegMarkerSegment *seg,
}
gst_vaapi_picture_add_slice(picture, slice);
if (!VALID_STATE(decoder, GOT_HUF_TABLE))
gst_jpeg_get_default_huffman_tables(&priv->huf_tables);
// Update VA Huffman table if it changed for this scan
if (huffman_tables_updated(&priv->huf_tables)) {
slice->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW(JPEGBaseline, decoder);
if (!slice->huf_table) {
GST_ERROR("failed to allocate Huffman tables");
huffman_tables_reset(&priv->huf_tables);
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
fill_huffman_table(slice->huf_table, &priv->huf_tables);
huffman_tables_reset(&priv->huf_tables);
}
slice_param = slice->param;
slice_param->num_components = scan_hdr.num_components;
for (i = 0; i < scan_hdr.num_components; i++) {
@ -700,6 +728,16 @@ gst_vaapi_decoder_jpeg_parse(GstVaapiDecoder *base_decoder,
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
break;
case GST_JPEG_MARKER_DAC:
case GST_JPEG_MARKER_DHT:
case GST_JPEG_MARKER_DQT:
if (priv->parser_state & GST_JPEG_VIDEO_STATE_GOT_SOF)
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
break;
case GST_JPEG_MARKER_DRI:
if (priv->parser_state & GST_JPEG_VIDEO_STATE_GOT_SOS)
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
break;
case GST_JPEG_MARKER_DNL:
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
break;
@ -792,10 +830,6 @@ gst_vaapi_decoder_jpeg_start_frame(GstVaapiDecoder *base_decoder,
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
status = fill_huffman_table(decoder, picture);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
/* Update presentation time */
picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
return GST_VAAPI_DECODER_STATUS_SUCCESS;

View file

@ -291,6 +291,11 @@ gst_vaapi_picture_decode(GstVaapiPicture *picture)
GstVaapiSlice * const slice = g_ptr_array_index(picture->slices, i);
VABufferID va_buffers[2];
huf_table = slice->huf_table;
if (huf_table && !do_decode(va_display, va_context,
&huf_table->param_id, (void **)&huf_table->param))
return FALSE;
vaapi_unmap_buffer(va_display, slice->param_id, NULL);
va_buffers[0] = slice->param_id;
va_buffers[1] = slice->data_id;
@ -396,6 +401,9 @@ gst_vaapi_slice_destroy(GstVaapiSlice *slice)
{
VADisplay const va_display = GET_VA_DISPLAY(slice);
gst_vaapi_mini_object_replace((GstVaapiMiniObject **)&slice->huf_table,
NULL);
vaapi_destroy_buffer(va_display, &slice->data_id);
vaapi_destroy_buffer(va_display, &slice->param_id);
slice->param = NULL;

View file

@ -216,6 +216,9 @@ struct _GstVaapiSlice {
VABufferID param_id;
VABufferID data_id;
gpointer param;
/* Per-slice overrides */
GstVaapiHuffmanTable *huf_table;
};
G_GNUC_INTERNAL