Copy cog plugin and video library from cog project

This commit is contained in:
David Schleef 2009-08-31 10:36:46 -07:00
parent 2d28da1994
commit 21aa38d13b
18 changed files with 9931 additions and 0 deletions

View file

@ -672,6 +672,17 @@ AG_GST_CHECK_FEATURE(CELT, [celt], celt, [
AC_SUBST(CELT_LIBS)
])
dnl *** Cog ***
translit(dnm, m, l) AM_CONDITIONAL(USE_COG, true)
AG_GST_CHECK_FEATURE(COG, [Cog plugin], cog, [
PKG_CHECK_MODULES(ORC, orc-0.4 >= 0.4.2.1, HAVE_COG="yes", [
HAVE_COG="no"
AC_MSG_RESULT(no)
])
AC_SUBST(ORC_CFLAGS)
AC_SUBST(ORC_LIBS)
])
dnl *** dc1394 ***
translit(dnm, m, l) AM_CONDITIONAL(USE_DC1394, true)
AG_GST_CHECK_FEATURE(DC1394, [libdc1394], dc1394, [
@ -1798,6 +1809,7 @@ ext/apexsink/Makefile
ext/bz2/Makefile
ext/cdaudio/Makefile
ext/celt/Makefile
ext/cog/Makefile
ext/dc1394/Makefile
ext/dirac/Makefile
ext/directfb/Makefile

32
ext/cog/Makefile.am Normal file
View file

@ -0,0 +1,32 @@
plugin_LTLIBRARIES = libgstcog.la
libgstcog_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_CFLAGS) \
$(COG_CFLAGS)
libgstcog_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
$(GST_BASE_LIBS) \
$(GST_LIBS) \
$(COG_LIBS)
libgstcog_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) $(LIBM)
libgstcog_la_LIBTOOLFLAGS = --tag=disable-static
libgstcog_la_SOURCES = \
cogframe.c \
cogframe.h \
cogorc.c \
cogorc.h \
cogvirtframe.c \
cogvirtframe.h \
gstcog.c \
gstcogcolorspace.c \
gstcogdownsample.c \
gstcogfilter.c \
gstcogmse.c \
gstcogscale.c \
gstcogutils.c \
gstcogutils.h \
gstcolorconvert.c \
gstlogoinsert.c

1002
ext/cog/cogframe.c Normal file

File diff suppressed because it is too large Load diff

195
ext/cog/cogframe.h Normal file
View file

@ -0,0 +1,195 @@
#ifndef __COG_FRAME_H__
#define __COG_FRAME_H__
#include <cog/cogutils.h>
COG_BEGIN_DECLS
typedef struct _CogFrame CogFrame;
typedef struct _CogFrameData CogFrameData;
typedef struct _CogUpsampledFrame CogUpsampledFrame;
typedef void (*CogFrameFreeFunc)(CogFrame *frame, void *priv);
typedef void (*CogFrameRenderFunc)(CogFrame *frame, void *dest, int component, int i);
/* bit pattern:
* 0x100 - 0: normal, 1: indirect (packed)
* 0x001 - horizontal chroma subsampling: 0: 1, 1: 2
* 0x002 - vertical chroma subsampling: 0: 1, 1: 2
* 0x00c - depth: 0: u8, 1: s16, 2: s32
* */
typedef enum _CogFrameFormat {
COG_FRAME_FORMAT_U8_444 = 0x00,
COG_FRAME_FORMAT_U8_422 = 0x01,
COG_FRAME_FORMAT_U8_420 = 0x03,
COG_FRAME_FORMAT_S16_444 = 0x04,
COG_FRAME_FORMAT_S16_422 = 0x05,
COG_FRAME_FORMAT_S16_420 = 0x07,
COG_FRAME_FORMAT_S32_444 = 0x08,
COG_FRAME_FORMAT_S32_422 = 0x09,
COG_FRAME_FORMAT_S32_420 = 0x0b,
/* indirectly supported */
COG_FRAME_FORMAT_YUYV = 0x100, /* YUYV order */
COG_FRAME_FORMAT_UYVY = 0x101, /* UYVY order */
COG_FRAME_FORMAT_AYUV = 0x102,
COG_FRAME_FORMAT_RGB = 0x104,
COG_FRAME_FORMAT_v216 = 0x105,
COG_FRAME_FORMAT_v210 = 0x106,
COG_FRAME_FORMAT_RGBx = 0x110,
COG_FRAME_FORMAT_xRGB = 0x111,
COG_FRAME_FORMAT_BGRx = 0x112,
COG_FRAME_FORMAT_xBGR = 0x113,
COG_FRAME_FORMAT_RGBA = 0x114,
COG_FRAME_FORMAT_ARGB = 0x115,
COG_FRAME_FORMAT_BGRA = 0x116,
COG_FRAME_FORMAT_ABGR = 0x117,
} CogFrameFormat;
#define COG_FRAME_FORMAT_DEPTH(format) ((format) & 0xc)
#define COG_FRAME_FORMAT_DEPTH_U8 0x00
#define COG_FRAME_FORMAT_DEPTH_S16 0x04
#define COG_FRAME_FORMAT_DEPTH_S32 0x08
#define COG_FRAME_FORMAT_H_SHIFT(format) ((format) & 0x1)
#define COG_FRAME_FORMAT_V_SHIFT(format) (((format)>>1) & 0x1)
#define COG_FRAME_IS_PACKED(format) (((format)>>8) & 0x1)
#define COG_FRAME_CACHE_SIZE 8
struct _CogFrameData {
CogFrameFormat format;
void *data;
int stride;
int width;
int height;
int length;
int h_shift;
int v_shift;
};
struct _CogFrame {
int refcount;
CogFrameFreeFunc free;
CogMemoryDomain *domain;
void *regions[3];
void *priv;
CogFrameFormat format;
int width;
int height;
CogFrameData components[3];
int is_virtual;
int cached_lines[3][COG_FRAME_CACHE_SIZE];
CogFrame *virt_frame1;
CogFrame *virt_frame2;
void (*render_line) (CogFrame *frame, void *dest, int component, int i);
void *virt_priv;
void *virt_priv2;
int param1;
int param2;
int extension;
};
struct _CogUpsampledFrame {
CogFrame *frames[4];
void *components[3];
};
#define COG_FRAME_DATA_GET_LINE(fd,i) (COG_OFFSET((fd)->data,(fd)->stride*(i)))
#define COG_FRAME_DATA_GET_PIXEL_U8(fd,i,j) ((uint8_t *)COG_OFFSET((fd)->data,(fd)->stride*(j)+(i)))
#define COG_FRAME_DATA_GET_PIXEL_S16(fd,i,j) ((int16_t *)COG_OFFSET((fd)->data,(fd)->stride*(j)+(i)*sizeof(int16_t)))
CogFrame * cog_frame_new (void);
CogFrame * cog_frame_new_and_alloc (CogMemoryDomain *domain,
CogFrameFormat format, int width, int height);
CogFrame * cog_frame_new_from_data_I420 (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_YV12 (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_YUY2 (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_UYVY (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_UYVY_full (void *data, int width, int height, int stride);
CogFrame * cog_frame_new_from_data_AYUV (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_v216 (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_v210 (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_Y42B (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_Y444 (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_RGB (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_RGBx (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_xRGB (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_BGRx (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_xBGR (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_RGBA (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_ARGB (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_BGRA (void *data, int width, int height);
CogFrame * cog_frame_new_from_data_ABGR (void *data, int width, int height);
void cog_frame_set_free_callback (CogFrame *frame,
CogFrameFreeFunc free_func, void *priv);
void cog_frame_unref (CogFrame *frame);
CogFrame *cog_frame_ref (CogFrame *frame);
CogFrame *cog_frame_dup (CogFrame *frame);
CogFrame *cog_frame_clone (CogMemoryDomain *domain, CogFrame *frame);
void cog_frame_convert (CogFrame *dest, CogFrame *src);
void cog_frame_add (CogFrame *dest, CogFrame *src);
void cog_frame_subtract (CogFrame *dest, CogFrame *src);
void cog_frame_shift_left (CogFrame *frame, int shift);
void cog_frame_shift_right (CogFrame *frame, int shift);
//void cog_frame_downsample (CogFrame *dest, CogFrame *src);
void cog_frame_upsample_horiz (CogFrame *dest, CogFrame *src);
void cog_frame_upsample_vert (CogFrame *dest, CogFrame *src);
double cog_frame_calculate_average_luma (CogFrame *frame);
CogFrame * cog_frame_convert_to_444 (CogFrame *frame);
void cog_frame_md5 (CogFrame *frame, uint32_t *state);
CogFrame * cog_frame_new_and_alloc_extended (CogMemoryDomain *domain,
CogFrameFormat format, int width, int height, int extension);
CogFrame *cog_frame_dup_extended (CogFrame *frame, int extension);
void cog_frame_edge_extend (CogFrame *frame, int width, int height);
void cog_frame_zero_extend (CogFrame *frame, int width, int height);
void cog_frame_mark (CogFrame *frame, int value);
void cog_frame_mc_edgeextend (CogFrame *frame);
void cog_frame_data_get_codeblock (CogFrameData *dest, CogFrameData *src,
int x, int y, int horiz_codeblocks, int vert_codeblocks);
CogUpsampledFrame * cog_upsampled_frame_new (CogFrame *frame);
void cog_upsampled_frame_free (CogUpsampledFrame *df);
void cog_upsampled_frame_upsample (CogUpsampledFrame *df);
#ifdef ENABLE_MOTION_REF
int cog_upsampled_frame_get_pixel_prec0 (CogUpsampledFrame *upframe, int k,
int x, int y);
int cog_upsampled_frame_get_pixel_prec1 (CogUpsampledFrame *upframe, int k,
int x, int y);
int cog_upsampled_frame_get_pixel_prec3 (CogUpsampledFrame *upframe, int k,
int x, int y);
int cog_upsampled_frame_get_pixel_precN (CogUpsampledFrame *upframe, int k,
int x, int y, int mv_precision);
#endif
void cog_upsampled_frame_get_block_precN (CogUpsampledFrame *upframe, int k,
int x, int y, int prec, CogFrameData *dest);
void cog_upsampled_frame_get_block_fast_precN (CogUpsampledFrame *upframe, int k,
int x, int y, int prec, CogFrameData *dest, CogFrameData *fd);
void cog_upsampled_frame_get_subdata_prec0 (CogUpsampledFrame *upframe,
int k, int x, int y, CogFrameData *fd);
void cog_upsampled_frame_get_subdata_prec1 (CogUpsampledFrame *upframe,
int k, int x, int y, CogFrameData *fd);
void cog_frame_get_subdata (CogFrame *frame, CogFrameData *fd,
int comp, int x, int y);
void cog_frame_split_fields (CogFrame *dest1, CogFrame *dest2, CogFrame *src);
COG_END_DECLS
#endif

2818
ext/cog/cogorc.c Normal file

File diff suppressed because it is too large Load diff

36
ext/cog/cogorc.h Normal file
View file

@ -0,0 +1,36 @@
/* autogenerated from cog.orc */
#ifndef _ORC_OUT_H_
#define _ORC_OUT_H_
void cogorc_downsample_horiz_cosite_3tap (uint8_t * d1, uint16_t * s1, uint16_t * s2, int n);
void cogorc_downsample_vert_halfsite_2tap (uint8_t * d1, uint8_t * s1, uint8_t * s2, int n);
void cogorc_downsample_vert_halfsite_3tap (uint8_t * d1, uint8_t * s1, uint8_t * s2, uint8_t * s3, int n);
void cogorc_downsample_vert_halfsite_4tap (uint8_t * d1, uint8_t * s1, uint8_t * s2, uint8_t * s3, uint8_t * s4, int n);
void cogorc_upsample_horiz_cosite (uint8_t * d1, uint8_t * s1, uint8_t * s2, int n);
void cogorc_upsample_vert_avgub (uint8_t * d1, uint8_t * s1, uint8_t * s2, int n);
void orc_unpack_yuyv_y (uint8_t * d1, uint16_t * s1, int n);
void orc_unpack_yuyv_u (uint8_t * d1, uint32_t * s1, int n);
void orc_unpack_yuyv_v (uint8_t * d1, uint32_t * s1, int n);
void orc_pack_yuyv (uint32_t * d1, uint8_t * s1, uint8_t * s2, uint8_t * s3, int n);
void orc_unpack_uyvy_y (uint8_t * d1, uint16_t * s1, int n);
void orc_unpack_uyvy_u (uint8_t * d1, uint32_t * s1, int n);
void orc_unpack_uyvy_v (uint8_t * d1, uint32_t * s1, int n);
void orc_pack_uyvy (uint32_t * d1, uint8_t * s1, uint8_t * s2, uint8_t * s3, int n);
void orc_memcpy (void * d1, void * s1, int n);
void orc_addc_convert_u8_s16 (uint8_t * d1, int16_t * s1, int n);
void orc_subc_convert_s16_u8 (int16_t * d1, uint8_t * s1, int n);
void orc_splat_u8_ns (uint8_t * d1, int p1, int n);
void orc_splat_s16_ns (int16_t * d1, int p1, int n);
void orc_matrix2_u8 (uint8_t * d1, uint8_t * s1, uint8_t * s2, int p1, int p2, int p3, int n);
void orc_matrix3_u8 (uint8_t * d1, uint8_t * s1, uint8_t * s2, uint8_t * s3, int p1, int p2, int p3, int p4, int n);
void orc_pack_123x (uint32_t * d1, uint8_t * s1, uint8_t * s2, uint8_t * s3, int p1, int n);
void orc_pack_x123 (uint32_t * d1, uint8_t * s1, uint8_t * s2, uint8_t * s3, int p1, int n);
void cogorc_combine4_u8 (uint8_t * d1, uint8_t * s1, uint8_t * s2, uint8_t * s3, uint8_t * s4, int p1, int p2, int p3, int p4, int n);
void cogorc_unpack_ayuv_y (uint8_t * d1, uint32_t * s1, int n);
void cogorc_unpack_ayuv_u (uint8_t * d1, uint32_t * s1, int n);
void cogorc_unpack_ayuv_v (uint8_t * d1, uint32_t * s1, int n);
#endif

1734
ext/cog/cogvirtframe.c Normal file

File diff suppressed because it is too large Load diff

50
ext/cog/cogvirtframe.h Normal file
View file

@ -0,0 +1,50 @@
#ifndef __COG_VIRT_FRAME_H__
#define __COG_VIRT_FRAME_H__
#include <cog/cogutils.h>
#include <cog-video/cogframe.h>
COG_BEGIN_DECLS
CogFrame *cog_frame_new_virtual (CogMemoryDomain *domain,
CogFrameFormat format, int width, int height);
void *cog_virt_frame_get_line (CogFrame *frame, int component, int i);
void cog_virt_frame_render_line (CogFrame *frame, void *dest,
int component, int i);
void cog_virt_frame_render (CogFrame *frame, CogFrame *dest);
CogFrame *cog_virt_frame_new_horiz_downsample (CogFrame *vf, int n_taps);
CogFrame *cog_virt_frame_new_vert_downsample (CogFrame *vf, int n_taps);
CogFrame *cog_virt_frame_new_vert_resample (CogFrame *vf, int height);
CogFrame *cog_virt_frame_new_horiz_resample (CogFrame *vf, int width);
CogFrame *cog_virt_frame_new_unpack (CogFrame *vf);
CogFrame *cog_virt_frame_new_pack_YUY2 (CogFrame *vf);
CogFrame *cog_virt_frame_new_pack_UYVY (CogFrame *vf);
CogFrame *cog_virt_frame_new_pack_AYUV (CogFrame *vf);
CogFrame *cog_virt_frame_new_pack_v216 (CogFrame *vf);
CogFrame *cog_virt_frame_new_pack_v210 (CogFrame *vf);
CogFrame *cog_virt_frame_new_pack_RGB (CogFrame *vf);
CogFrame *cog_virt_frame_new_color_matrix (CogFrame *vf);
CogFrame *cog_virt_frame_new_subsample (CogFrame *vf, CogFrameFormat format);
CogFrame * cog_virt_frame_new_convert_u8 (CogFrame *vf);
CogFrame * cog_virt_frame_new_convert_s16 (CogFrame *vf);
CogFrame * cog_virt_frame_new_crop (CogFrame *vf, int width, int height);
CogFrame * cog_virt_frame_new_edgeextend (CogFrame *vf, int width, int height);
CogFrame * cog_virt_frame_new_pack_RGBx (CogFrame *vf);
CogFrame * cog_virt_frame_new_pack_xRGB (CogFrame *vf);
CogFrame * cog_virt_frame_new_pack_BGRx (CogFrame *vf);
CogFrame * cog_virt_frame_new_pack_xBGR (CogFrame *vf);
CogFrame * cog_virt_frame_new_pack_RGBA (CogFrame *vf);
CogFrame * cog_virt_frame_new_pack_ARGB (CogFrame *vf);
CogFrame * cog_virt_frame_new_pack_BGRA (CogFrame *vf);
CogFrame * cog_virt_frame_new_pack_ABGR (CogFrame *vf);
COG_END_DECLS
#endif

75
ext/cog/gstcog.c Normal file
View file

@ -0,0 +1,75 @@
/* GStreamer
* Copyright (C) 2007 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <string.h>
#include <gst/gst.h>
#include <cog/cog.h>
#include "gstjpegdec.h"
//GType gst_dither_get_type (void);
GType gst_deblock_get_type (void);
GType gst_cogdownsample_get_type (void);
GType gst_motion_detect_get_type (void);
GType gst_cogcolorspace_get_type (void);
GType gst_cog_scale_get_type (void);
GType gst_colorconvert_get_type (void);
GType gst_logoinsert_get_type (void);
GType gst_mse_get_type (void);
GType gst_decimate_get_type (void);
static gboolean
plugin_init (GstPlugin * plugin)
{
cog_init ();
gst_element_register (plugin, "cogjpegdec", GST_RANK_PRIMARY,
GST_TYPE_JPEG_DEC);
//gst_element_register (plugin, "dither", GST_RANK_NONE,
// gst_dither_get_type());
gst_element_register (plugin, "deblock", GST_RANK_NONE,
gst_deblock_get_type ());
gst_element_register (plugin, "cogdownsample", GST_RANK_NONE,
gst_cogdownsample_get_type ());
gst_element_register (plugin, "motiondetect", GST_RANK_NONE,
gst_motion_detect_get_type ());
gst_element_register (plugin, "cogcolorspace", GST_RANK_NONE,
gst_cogcolorspace_get_type ());
gst_element_register (plugin, "cogscale", GST_RANK_NONE,
gst_cog_scale_get_type ());
gst_element_register (plugin, "colorconvert", GST_RANK_NONE,
gst_colorconvert_get_type ());
gst_element_register (plugin, "coglogoinsert", GST_RANK_NONE,
gst_logoinsert_get_type ());
gst_element_register (plugin, "cogmse", GST_RANK_NONE, gst_mse_get_type ());
gst_element_register (plugin, "cogdecimate", GST_RANK_NONE,
gst_decimate_get_type ());
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"cog",
"Cog plugin",
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

485
ext/cog/gstcogcolorspace.c Normal file
View file

@ -0,0 +1,485 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2003> David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/*
* This file was (probably) generated from
* gstvideotemplate.c,v 1.18 2005/11/14 02:13:34 thomasvs Exp
* and
* $Id: make_filter,v 1.8 2004/04/19 22:51:57 ds Exp $
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
#include <gst/video/video.h>
#include <string.h>
#include <cog/cog.h>
#include <math.h>
#include <cog-video/cogvirtframe.h>
#include "gstcogutils.h"
#define GST_TYPE_COGCOLORSPACE \
(gst_cogcolorspace_get_type())
#define GST_COGCOLORSPACE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_COGCOLORSPACE,GstCogcolorspace))
#define GST_COGCOLORSPACE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_COGCOLORSPACE,GstCogcolorspaceClass))
#define GST_IS_COGCOLORSPACE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_COGCOLORSPACE))
#define GST_IS_COGCOLORSPACE_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_COGCOLORSPACE))
typedef struct _GstCogcolorspace GstCogcolorspace;
typedef struct _GstCogcolorspaceClass GstCogcolorspaceClass;
struct _GstCogcolorspace
{
GstBaseTransform base_transform;
};
struct _GstCogcolorspaceClass
{
GstBaseTransformClass parent_class;
};
GType gst_cogcolorspace_get_type (void);
/* GstCogcolorspace signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0,
ARG_WAVELET_TYPE,
ARG_LEVEL
/* FILL ME */
};
static void gst_cogcolorspace_base_init (gpointer g_class);
static void gst_cogcolorspace_class_init (gpointer g_class,
gpointer class_data);
static void gst_cogcolorspace_init (GTypeInstance * instance, gpointer g_class);
static void gst_cogcolorspace_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_cogcolorspace_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstCaps *gst_cogcolorspace_transform_caps (GstBaseTransform *
base_transform, GstPadDirection direction, GstCaps * caps);
static GstFlowReturn gst_cogcolorspace_transform (GstBaseTransform *
base_transform, GstBuffer * inbuf, GstBuffer * outbuf);
static gboolean gst_cogcolorspace_get_unit_size (GstBaseTransform *
base_transform, GstCaps * caps, guint * size);
static GstStaticPadTemplate gst_cogcolorspace_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV
("{ I420, YV12, YUY2, UYVY, AYUV, Y42B, Y444, v216, v210 }")
";" GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xRGB
";" GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_BGRA
";" GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_ABGR)
);
static GstStaticPadTemplate gst_cogcolorspace_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV
("{ I420, YV12, YUY2, UYVY, AYUV, Y42B, Y444, v216, v210 }")
";" GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xRGB
";" GST_VIDEO_CAPS_xBGR ";" GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_BGRA
";" GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_ABGR)
);
GType
gst_cogcolorspace_get_type (void)
{
static GType compress_type = 0;
if (!compress_type) {
static const GTypeInfo compress_info = {
sizeof (GstCogcolorspaceClass),
gst_cogcolorspace_base_init,
NULL,
gst_cogcolorspace_class_init,
NULL,
NULL,
sizeof (GstCogcolorspace),
0,
gst_cogcolorspace_init,
};
compress_type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
"GstCogcolorspace", &compress_info, 0);
}
return compress_type;
}
static void
gst_cogcolorspace_base_init (gpointer g_class)
{
static GstElementDetails compress_details =
GST_ELEMENT_DETAILS ("YCbCr format conversion",
"Filter/Effect/Video",
"YCbCr format conversion",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
//GstBaseTransformClass *base_transform_class = GST_BASE_TRANSFORM_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_cogcolorspace_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_cogcolorspace_sink_template));
gst_element_class_set_details (element_class, &compress_details);
}
static void
gst_cogcolorspace_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstBaseTransformClass *base_transform_class;
GstCogcolorspaceClass *colorspace_class;
gobject_class = G_OBJECT_CLASS (g_class);
base_transform_class = GST_BASE_TRANSFORM_CLASS (g_class);
colorspace_class = GST_COGCOLORSPACE_CLASS (g_class);
gobject_class->set_property = gst_cogcolorspace_set_property;
gobject_class->get_property = gst_cogcolorspace_get_property;
#if 0
g_object_class_install_property (gobject_class, ARG_WAVELET_TYPE,
g_param_spec_int ("wavelet-type", "wavelet type", "wavelet type",
0, 4, 0, G_PARAM_READWRITE));
g_object_class_install_property (gobject_class, ARG_LEVEL,
g_param_spec_int ("level", "level", "level",
0, 100, 0, G_PARAM_READWRITE));
#endif
base_transform_class->transform = gst_cogcolorspace_transform;
base_transform_class->transform_caps = gst_cogcolorspace_transform_caps;
base_transform_class->get_unit_size = gst_cogcolorspace_get_unit_size;
}
static void
gst_cogcolorspace_init (GTypeInstance * instance, gpointer g_class)
{
//GstCogcolorspace *compress = GST_COGCOLORSPACE (instance);
//GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
GST_DEBUG ("gst_cogcolorspace_init");
}
static void
gst_cogcolorspace_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstCogcolorspace *src;
g_return_if_fail (GST_IS_COGCOLORSPACE (object));
src = GST_COGCOLORSPACE (object);
GST_DEBUG ("gst_cogcolorspace_set_property");
switch (prop_id) {
default:
break;
}
}
static void
gst_cogcolorspace_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstCogcolorspace *src;
g_return_if_fail (GST_IS_COGCOLORSPACE (object));
src = GST_COGCOLORSPACE (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
#if 0
static void
transform_value (GValue * dest)
{
GValue fourcc = { 0 };
g_value_init (dest, GST_TYPE_LIST);
g_value_init (&fourcc, GST_TYPE_FOURCC);
gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('I', '4', '2', '0'));
gst_value_list_append_value (dest, &fourcc);
gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('Y', 'V', '1', '2'));
gst_value_list_append_value (dest, &fourcc);
gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'));
gst_value_list_append_value (dest, &fourcc);
gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'));
gst_value_list_append_value (dest, &fourcc);
gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'));
gst_value_list_append_value (dest, &fourcc);
gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('Y', '4', '2', 'B'));
gst_value_list_append_value (dest, &fourcc);
gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('Y', '4', '4', '4'));
gst_value_list_append_value (dest, &fourcc);
gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('v', '2', '1', '0'));
gst_value_list_append_value (dest, &fourcc);
gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('v', '2', '1', '6'));
gst_value_list_append_value (dest, &fourcc);
g_value_unset (&fourcc);
}
#endif
static GstCaps *
gst_cogcolorspace_caps_remove_format_info (GstCaps * caps)
{
int i;
GstStructure *structure;
GstCaps *rgbcaps;
caps = gst_caps_copy (caps);
for (i = 0; i < gst_caps_get_size (caps); i++) {
structure = gst_caps_get_structure (caps, i);
gst_structure_set_name (structure, "video/x-raw-yuv");
gst_structure_remove_field (structure, "format");
gst_structure_remove_field (structure, "endianness");
gst_structure_remove_field (structure, "depth");
gst_structure_remove_field (structure, "bpp");
gst_structure_remove_field (structure, "red_mask");
gst_structure_remove_field (structure, "green_mask");
gst_structure_remove_field (structure, "blue_mask");
gst_structure_remove_field (structure, "alpha_mask");
gst_structure_remove_field (structure, "palette_data");
}
gst_caps_do_simplify (caps);
rgbcaps = gst_caps_copy (caps);
for (i = 0; i < gst_caps_get_size (rgbcaps); i++) {
structure = gst_caps_get_structure (rgbcaps, i);
gst_structure_set_name (structure, "video/x-raw-rgb");
}
gst_caps_append (caps, rgbcaps);
return caps;
}
static GstCaps *
gst_cogcolorspace_transform_caps (GstBaseTransform * base_transform,
GstPadDirection direction, GstCaps * caps)
{
#if 0
int i;
GstStructure *structure;
GValue new_value = { 0 };
const GValue *value;
caps = gst_caps_copy (caps);
for (i = 0; i < gst_caps_get_size (caps); i++) {
structure = gst_caps_get_structure (caps, i);
value = gst_structure_get_value (structure, "format");
transform_value (&new_value);
gst_structure_set_value (structure, "format", &new_value);
g_value_unset (&new_value);
}
return caps;
#endif
#if 0
GstCaps *template;
GstCaps *result;
template = gst_ffmpegcsp_codectype_to_caps (CODEC_TYPE_VIDEO, NULL);
result = gst_caps_intersect (caps, template);
gst_caps_unref (template);
gst_caps_append (result, gst_ffmpegcsp_caps_remove_format_info (caps));
return result;
#endif
return gst_cogcolorspace_caps_remove_format_info (caps);
}
static gboolean
gst_cogcolorspace_get_unit_size (GstBaseTransform * base_transform,
GstCaps * caps, guint * size)
{
int width, height;
GstVideoFormat format;
gboolean ret;
ret = gst_video_format_parse_caps (caps, &format, &width, &height);
if (!ret)
return FALSE;
*size = gst_video_format_get_size (format, width, height);
return TRUE;
}
static GstFlowReturn
gst_cogcolorspace_transform (GstBaseTransform * base_transform,
GstBuffer * inbuf, GstBuffer * outbuf)
{
GstCogcolorspace *compress;
CogFrame *out_frame;
CogFrame *frame;
int width, height;
uint32_t in_format;
uint32_t out_format;
CogFrameFormat new_subsample;
gboolean ret;
g_return_val_if_fail (GST_IS_COGCOLORSPACE (base_transform), GST_FLOW_ERROR);
compress = GST_COGCOLORSPACE (base_transform);
ret = gst_video_format_parse_caps (inbuf->caps, &in_format, &width, &height);
ret |=
gst_video_format_parse_caps (outbuf->caps, &out_format, &width, &height);
if (!ret) {
return GST_FLOW_ERROR;
}
frame = gst_cog_buffer_wrap (gst_buffer_ref (inbuf),
in_format, width, height);
out_frame = gst_cog_buffer_wrap (gst_buffer_ref (outbuf),
out_format, width, height);
switch (out_format) {
case GST_VIDEO_FORMAT_YUY2:
case GST_VIDEO_FORMAT_UYVY:
case GST_VIDEO_FORMAT_YVYU:
case GST_VIDEO_FORMAT_Y42B:
case GST_VIDEO_FORMAT_v210:
case GST_VIDEO_FORMAT_v216:
new_subsample = COG_FRAME_FORMAT_U8_422;
break;
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
new_subsample = COG_FRAME_FORMAT_U8_420;
break;
case GST_VIDEO_FORMAT_Y444:
case GST_VIDEO_FORMAT_RGBx:
case GST_VIDEO_FORMAT_xRGB:
case GST_VIDEO_FORMAT_BGRx:
case GST_VIDEO_FORMAT_xBGR:
case GST_VIDEO_FORMAT_RGBA:
case GST_VIDEO_FORMAT_ARGB:
case GST_VIDEO_FORMAT_BGRA:
case GST_VIDEO_FORMAT_ABGR:
default:
new_subsample = COG_FRAME_FORMAT_U8_444;
break;
}
frame = cog_virt_frame_new_unpack (frame);
frame = cog_virt_frame_new_subsample (frame, new_subsample);
if (gst_video_format_is_rgb (out_format) &&
gst_video_format_is_yuv (in_format)) {
frame = cog_virt_frame_new_color_matrix (frame);
}
if (gst_video_format_is_yuv (out_format) &&
gst_video_format_is_rgb (in_format)) {
GST_ERROR ("not supported!");
}
switch (out_format) {
case GST_VIDEO_FORMAT_YUY2:
frame = cog_virt_frame_new_pack_YUY2 (frame);
break;
case GST_VIDEO_FORMAT_UYVY:
frame = cog_virt_frame_new_pack_UYVY (frame);
break;
case GST_VIDEO_FORMAT_AYUV:
frame = cog_virt_frame_new_pack_AYUV (frame);
break;
case GST_VIDEO_FORMAT_v216:
frame = cog_virt_frame_new_pack_v216 (frame);
break;
case GST_VIDEO_FORMAT_v210:
frame = cog_virt_frame_new_pack_v210 (frame);
break;
case GST_VIDEO_FORMAT_RGBx:
frame = cog_virt_frame_new_pack_RGBx (frame);
break;
case GST_VIDEO_FORMAT_xRGB:
frame = cog_virt_frame_new_pack_xRGB (frame);
break;
case GST_VIDEO_FORMAT_BGRx:
frame = cog_virt_frame_new_pack_BGRx (frame);
break;
case GST_VIDEO_FORMAT_xBGR:
frame = cog_virt_frame_new_pack_xBGR (frame);
break;
case GST_VIDEO_FORMAT_RGBA:
frame = cog_virt_frame_new_pack_RGBA (frame);
break;
case GST_VIDEO_FORMAT_ARGB:
frame = cog_virt_frame_new_pack_ARGB (frame);
break;
case GST_VIDEO_FORMAT_BGRA:
frame = cog_virt_frame_new_pack_BGRA (frame);
break;
case GST_VIDEO_FORMAT_ABGR:
frame = cog_virt_frame_new_pack_ABGR (frame);
break;
default:
break;
}
cog_virt_frame_render (frame, out_frame);
cog_frame_unref (frame);
cog_frame_unref (out_frame);
return GST_FLOW_OK;
}

412
ext/cog/gstcogdownsample.c Normal file
View file

@ -0,0 +1,412 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2003> David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/*
* This file was (probably) generated from
* gstvideotemplate.c,v 1.18 2005/11/14 02:13:34 thomasvs Exp
* and
* $Id: make_filter,v 1.8 2004/04/19 22:51:57 ds Exp $
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
#include <gst/video/video.h>
#include <string.h>
#include <cog/cog.h>
#include <math.h>
#include <cog-video/cogvirtframe.h>
#define GST_TYPE_COGDOWNSAMPLE \
(gst_cogdownsample_get_type())
#define GST_COGDOWNSAMPLE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_COGDOWNSAMPLE,GstCogdownsample))
#define GST_COGDOWNSAMPLE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_COGDOWNSAMPLE,GstCogdownsampleClass))
#define GST_IS_COGDOWNSAMPLE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_COGDOWNSAMPLE))
#define GST_IS_COGDOWNSAMPLE_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_COGDOWNSAMPLE))
typedef struct _GstCogdownsample GstCogdownsample;
typedef struct _GstCogdownsampleClass GstCogdownsampleClass;
struct _GstCogdownsample
{
GstBaseTransform base_transform;
//CogVideoFormat format;
};
struct _GstCogdownsampleClass
{
GstBaseTransformClass parent_class;
};
GType gst_cogdownsample_get_type (void);
/* GstCogdownsample signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0,
ARG_WAVELET_TYPE,
ARG_LEVEL
/* FILL ME */
};
static void gst_cogdownsample_base_init (gpointer g_class);
static void gst_cogdownsample_class_init (gpointer g_class,
gpointer class_data);
static void gst_cogdownsample_init (GTypeInstance * instance, gpointer g_class);
static void gst_cogdownsample_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_cogdownsample_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstCaps *gst_cogdownsample_transform_caps (GstBaseTransform *
base_transform, GstPadDirection direction, GstCaps * caps);
static GstFlowReturn gst_cogdownsample_transform (GstBaseTransform *
base_transform, GstBuffer * inbuf, GstBuffer * outbuf);
static gboolean gst_cogdownsample_get_unit_size (GstBaseTransform *
base_transform, GstCaps * caps, guint * size);
static GstStaticPadTemplate gst_cogdownsample_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ I420, YV12, YUY2, UYVY, AYUV }"))
);
static GstStaticPadTemplate gst_cogdownsample_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ I420, YV12, YUY2, UYVY, AYUV }"))
);
GType
gst_cogdownsample_get_type (void)
{
static GType compress_type = 0;
if (!compress_type) {
static const GTypeInfo compress_info = {
sizeof (GstCogdownsampleClass),
gst_cogdownsample_base_init,
NULL,
gst_cogdownsample_class_init,
NULL,
NULL,
sizeof (GstCogdownsample),
0,
gst_cogdownsample_init,
};
compress_type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
"GstCogdownsample", &compress_info, 0);
}
return compress_type;
}
static void
gst_cogdownsample_base_init (gpointer g_class)
{
static GstElementDetails compress_details =
GST_ELEMENT_DETAILS ("Downsample video",
"Filter/Effect/Video",
"Decreases size of video by a factor of 2",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
//GstBaseTransformClass *base_transform_class = GST_BASE_TRANSFORM_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_cogdownsample_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_cogdownsample_sink_template));
gst_element_class_set_details (element_class, &compress_details);
}
static void
gst_cogdownsample_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstBaseTransformClass *base_transform_class;
GstCogdownsampleClass *downsample_class;
gobject_class = G_OBJECT_CLASS (g_class);
base_transform_class = GST_BASE_TRANSFORM_CLASS (g_class);
downsample_class = GST_COGDOWNSAMPLE_CLASS (g_class);
gobject_class->set_property = gst_cogdownsample_set_property;
gobject_class->get_property = gst_cogdownsample_get_property;
#if 0
g_object_class_install_property (gobject_class, ARG_WAVELET_TYPE,
g_param_spec_int ("wavelet-type", "wavelet type", "wavelet type",
0, 4, 0, G_PARAM_READWRITE));
g_object_class_install_property (gobject_class, ARG_LEVEL,
g_param_spec_int ("level", "level", "level",
0, 100, 0, G_PARAM_READWRITE));
#endif
base_transform_class->transform = gst_cogdownsample_transform;
base_transform_class->transform_caps = gst_cogdownsample_transform_caps;
base_transform_class->get_unit_size = gst_cogdownsample_get_unit_size;
}
static void
gst_cogdownsample_init (GTypeInstance * instance, gpointer g_class)
{
//GstCogdownsample *compress = GST_COGDOWNSAMPLE (instance);
//GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
GST_DEBUG ("gst_cogdownsample_init");
}
static void
gst_cogdownsample_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstCogdownsample *src;
g_return_if_fail (GST_IS_COGDOWNSAMPLE (object));
src = GST_COGDOWNSAMPLE (object);
GST_DEBUG ("gst_cogdownsample_set_property");
switch (prop_id) {
default:
break;
}
}
static void
gst_cogdownsample_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstCogdownsample *src;
g_return_if_fail (GST_IS_COGDOWNSAMPLE (object));
src = GST_COGDOWNSAMPLE (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
transform_value (GValue * dest, const GValue * src, GstPadDirection dir)
{
g_value_init (dest, G_VALUE_TYPE (src));
if (G_VALUE_HOLDS_INT (src)) {
int x;
x = g_value_get_int (src);
if (dir == GST_PAD_SINK) {
g_value_set_int (dest, x / 2);
} else {
g_value_set_int (dest, x * 2);
}
} else if (GST_VALUE_HOLDS_INT_RANGE (src)) {
int min, max;
min = gst_value_get_int_range_min (src);
max = gst_value_get_int_range_max (src);
if (dir == GST_PAD_SINK) {
min = (min + 1) / 2;
if (max == G_MAXINT) {
max = G_MAXINT / 2;
} else {
max = (max + 1) / 2;
}
} else {
if (max > G_MAXINT / 2) {
max = G_MAXINT;
} else {
max = max * 2;
}
if (min > G_MAXINT / 2) {
min = G_MAXINT;
} else {
min = min * 2;
}
}
gst_value_set_int_range (dest, min, max);
} else {
/* FIXME */
g_warning ("case not handled");
g_value_set_int (dest, 100);
}
}
static GstCaps *
gst_cogdownsample_transform_caps (GstBaseTransform * base_transform,
GstPadDirection direction, GstCaps * caps)
{
int i;
GstStructure *structure;
GValue new_value = { 0 };
const GValue *value;
caps = gst_caps_copy (caps);
for (i = 0; i < gst_caps_get_size (caps); i++) {
structure = gst_caps_get_structure (caps, i);
value = gst_structure_get_value (structure, "width");
transform_value (&new_value, value, direction);
gst_structure_set_value (structure, "width", &new_value);
g_value_unset (&new_value);
value = gst_structure_get_value (structure, "height");
transform_value (&new_value, value, direction);
gst_structure_set_value (structure, "height", &new_value);
g_value_unset (&new_value);
}
return caps;
}
static gboolean
gst_cogdownsample_get_unit_size (GstBaseTransform * base_transform,
GstCaps * caps, guint * size)
{
int width, height;
uint32_t format;
gst_structure_get_fourcc (gst_caps_get_structure (caps, 0),
"format", &format);
gst_structure_get_int (gst_caps_get_structure (caps, 0), "width", &width);
gst_structure_get_int (gst_caps_get_structure (caps, 0), "height", &height);
switch (format) {
case GST_MAKE_FOURCC ('I', '4', '2', '0'):
case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
*size = width * height * 3 / 2;
break;
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
*size = width * height * 2;
break;
case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
*size = width * height * 4;
break;
default:
g_assert_not_reached ();
}
return TRUE;
}
static GstFlowReturn
gst_cogdownsample_transform (GstBaseTransform * base_transform,
GstBuffer * inbuf, GstBuffer * outbuf)
{
GstCogdownsample *compress;
CogFrame *outframe;
int width, height;
uint32_t format;
CogFrame *frame;
g_return_val_if_fail (GST_IS_COGDOWNSAMPLE (base_transform), GST_FLOW_ERROR);
compress = GST_COGDOWNSAMPLE (base_transform);
gst_structure_get_fourcc (gst_caps_get_structure (inbuf->caps, 0),
"format", &format);
gst_structure_get_int (gst_caps_get_structure (inbuf->caps, 0),
"width", &width);
gst_structure_get_int (gst_caps_get_structure (inbuf->caps, 0),
"height", &height);
switch (format) {
case GST_MAKE_FOURCC ('I', '4', '2', '0'):
frame = cog_frame_new_from_data_I420 (GST_BUFFER_DATA (inbuf),
width, height);
outframe = cog_frame_new_from_data_I420 (GST_BUFFER_DATA (outbuf),
width / 2, height / 2);
break;
case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
frame = cog_frame_new_from_data_YV12 (GST_BUFFER_DATA (inbuf),
width, height);
outframe = cog_frame_new_from_data_YV12 (GST_BUFFER_DATA (outbuf),
width / 2, height / 2);
break;
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
frame = cog_frame_new_from_data_YUY2 (GST_BUFFER_DATA (inbuf),
width, height);
outframe = cog_frame_new_from_data_YUY2 (GST_BUFFER_DATA (outbuf),
width / 2, height / 2);
break;
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
frame = cog_frame_new_from_data_UYVY (GST_BUFFER_DATA (inbuf),
width, height);
outframe = cog_frame_new_from_data_UYVY (GST_BUFFER_DATA (outbuf),
width / 2, height / 2);
break;
case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
frame = cog_frame_new_from_data_AYUV (GST_BUFFER_DATA (inbuf),
width, height);
outframe = cog_frame_new_from_data_AYUV (GST_BUFFER_DATA (outbuf),
width / 2, height / 2);
break;
default:
g_assert_not_reached ();
}
frame = cog_virt_frame_new_unpack (frame);
frame = cog_virt_frame_new_horiz_downsample (frame, 3);
frame = cog_virt_frame_new_vert_downsample (frame, 2);
switch (format) {
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
frame = cog_virt_frame_new_pack_YUY2 (frame);
break;
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
frame = cog_virt_frame_new_pack_UYVY (frame);
break;
case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
frame = cog_virt_frame_new_pack_AYUV (frame);
break;
default:
break;
}
cog_virt_frame_render (frame, outframe);
cog_frame_unref (frame);
cog_frame_unref (outframe);
return GST_FLOW_OK;
}

265
ext/cog/gstcogfilter.c Normal file
View file

@ -0,0 +1,265 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2003> David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/*
* This file was (probably) generated from
* gstvideotemplate.c,v 1.18 2005/11/14 02:13:34 thomasvs Exp
* and
* $Id: make_filter,v 1.8 2004/04/19 22:51:57 ds Exp $
*/
#define SCHRO_ENABLE_UNSTABLE_API
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
#include <gst/video/video.h>
#include <string.h>
#include <schroedinger/schro.h>
#include <schroedinger/schrotables.h>
#include <liboil/liboil.h>
#include <math.h>
#define GST_TYPE_SCHROFILTER \
(gst_schrofilter_get_type())
#define GST_SCHROFILTER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SCHROFILTER,GstSchrofilter))
#define GST_SCHROFILTER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SCHROFILTER,GstSchrofilterClass))
#define GST_IS_SCHROFILTER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SCHROFILTER))
#define GST_IS_SCHROFILTER_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SCHROFILTER))
typedef struct _GstSchrofilter GstSchrofilter;
typedef struct _GstSchrofilterClass GstSchrofilterClass;
struct _GstSchrofilter
{
GstBaseTransform base_transform;
int wavelet_type;
int level;
SchroVideoFormat format;
SchroFrame *tmp_frame;
int16_t *tmpbuf;
int frame_number;
};
struct _GstSchrofilterClass
{
GstBaseTransformClass parent_class;
};
/* GstSchrofilter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0,
ARG_WAVELET_TYPE,
ARG_LEVEL
/* FILL ME */
};
GType gst_schrofilter_get_type (void);
static void gst_schrofilter_base_init (gpointer g_class);
static void gst_schrofilter_class_init (gpointer g_class, gpointer class_data);
static void gst_schrofilter_init (GTypeInstance * instance, gpointer g_class);
static void gst_schrofilter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_schrofilter_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstFlowReturn gst_schrofilter_transform_ip (GstBaseTransform *
base_transform, GstBuffer * buf);
static GstStaticPadTemplate gst_schrofilter_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
static GstStaticPadTemplate gst_schrofilter_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
GType
gst_schrofilter_get_type (void)
{
static GType compress_type = 0;
if (!compress_type) {
static const GTypeInfo compress_info = {
sizeof (GstSchrofilterClass),
gst_schrofilter_base_init,
NULL,
gst_schrofilter_class_init,
NULL,
NULL,
sizeof (GstSchrofilter),
0,
gst_schrofilter_init,
};
compress_type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
"GstSchrofilter", &compress_info, 0);
}
return compress_type;
}
static void
gst_schrofilter_base_init (gpointer g_class)
{
static GstElementDetails compress_details =
GST_ELEMENT_DETAILS ("Schroedinger Video Filters",
"Filter/Effect/Video",
"Applies a Schroedinger compression pre-filter to video",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
//GstBaseTransformClass *base_transform_class = GST_BASE_TRANSFORM_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schrofilter_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schrofilter_sink_template));
gst_element_class_set_details (element_class, &compress_details);
}
static void
gst_schrofilter_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstBaseTransformClass *base_transform_class;
GstSchrofilterClass *filter_class;
gobject_class = G_OBJECT_CLASS (g_class);
base_transform_class = GST_BASE_TRANSFORM_CLASS (g_class);
filter_class = GST_SCHROFILTER_CLASS (g_class);
gobject_class->set_property = gst_schrofilter_set_property;
gobject_class->get_property = gst_schrofilter_get_property;
g_object_class_install_property (gobject_class, ARG_WAVELET_TYPE,
g_param_spec_int ("wavelet-type", "wavelet type", "wavelet type",
0, 4, 0, G_PARAM_READWRITE));
g_object_class_install_property (gobject_class, ARG_LEVEL,
g_param_spec_int ("level", "level", "level",
0, 100, 0, G_PARAM_READWRITE));
base_transform_class->transform_ip = gst_schrofilter_transform_ip;
}
static void
gst_schrofilter_init (GTypeInstance * instance, gpointer g_class)
{
//GstSchrofilter *compress = GST_SCHROFILTER (instance);
//GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
GST_DEBUG ("gst_schrofilter_init");
}
static void
gst_schrofilter_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstSchrofilter *src;
g_return_if_fail (GST_IS_SCHROFILTER (object));
src = GST_SCHROFILTER (object);
GST_DEBUG ("gst_schrofilter_set_property");
switch (prop_id) {
case ARG_WAVELET_TYPE:
src->wavelet_type = g_value_get_int (value);
break;
case ARG_LEVEL:
src->level = g_value_get_int (value);
break;
default:
break;
}
}
static void
gst_schrofilter_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstSchrofilter *src;
g_return_if_fail (GST_IS_SCHROFILTER (object));
src = GST_SCHROFILTER (object);
switch (prop_id) {
case ARG_WAVELET_TYPE:
g_value_set_int (value, src->wavelet_type);
break;
case ARG_LEVEL:
g_value_set_int (value, src->level);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstFlowReturn
gst_schrofilter_transform_ip (GstBaseTransform * base_transform,
GstBuffer * buf)
{
GstSchrofilter *compress;
SchroFrame *frame;
int width, height;
g_return_val_if_fail (GST_IS_SCHROFILTER (base_transform), GST_FLOW_ERROR);
compress = GST_SCHROFILTER (base_transform);
gst_structure_get_int (gst_caps_get_structure (buf->caps, 0),
"width", &width);
gst_structure_get_int (gst_caps_get_structure (buf->caps, 0),
"height", &height);
frame = schro_frame_new_from_data_I420 (GST_BUFFER_DATA (buf), width, height);
schro_frame_filter_lowpass2 (frame, 5.0);
//schro_frame_filter_wavelet (frame);
return GST_FLOW_OK;
}

542
ext/cog/gstcogmse.c Normal file
View file

@ -0,0 +1,542 @@
/*
* GStreamer
* Copyright (C) 2007,2009 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/video/video.h>
#include <string.h>
#include <cog-video/cogframe.h>
#include <orc/orc.h>
#include <math.h>
#include "gstcogutils.h"
#define GST_CAT_DEFAULT gst_mse_debug
GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
#define GST_TYPE_MSE (gst_mse_get_type())
#define GST_MSE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MSE,GstMSE))
#define GST_IS_MSE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MSE))
#define GST_MSE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_MSE,GstMSEClass))
#define GST_IS_MSE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_MSE))
#define GST_MSE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_MSE,GstMSEClass))
typedef struct _GstMSE GstMSE;
typedef struct _GstMSEClass GstMSEClass;
typedef void (*GstMSEProcessFunc) (GstMSE *, guint8 *, guint);
struct _GstMSE
{
GstElement element;
/* < private > */
GstPad *srcpad;
GstPad *sinkpad_ref;
GstPad *sinkpad_test;
GstBuffer *buffer_ref;
GMutex *lock;
GCond *cond;
gboolean cancel;
GstVideoFormat format;
int width;
int height;
double luma_mse_sum;
double chroma_mse_sum;
int n_frames;
};
struct _GstMSEClass
{
GstElementClass parent;
};
static const GstElementDetails element_details = GST_ELEMENT_DETAILS ("FIXME",
"Filter/Effect",
"FIXME example filter",
"FIXME <fixme@fixme.com>");
enum
{
PROP_0,
LUMA_PSNR,
CHROMA_PSNR
};
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_mse_debug, "mse", 0, "cogmse element");
GST_BOILERPLATE_FULL (GstMSE, gst_mse, GstElement,
GST_TYPE_ELEMENT, DEBUG_INIT);
static void gst_mse_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_mse_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstFlowReturn gst_mse_chain_test (GstPad * pad, GstBuffer * buffer);
static GstFlowReturn gst_mse_chain_ref (GstPad * pad, GstBuffer * buffer);
static gboolean gst_mse_sink_event (GstPad * pad, GstEvent * event);
static void gst_mse_reset (GstMSE * filter);
//static GstPadLinkReturn gst_mse_link_src (GstPad *pad, GstPad *peer);
static GstCaps *gst_mse_getcaps (GstPad * pad);
static gboolean gst_mse_set_caps (GstPad * pad, GstCaps * outcaps);
static void gst_mse_finalize (GObject * object);
static void cog_frame_mse (CogFrame * a, CogFrame * b, double *mse);
static double mse_to_db (double mse, gboolean is_chroma);
static GstStaticPadTemplate gst_framestore_sink_ref_template =
GST_STATIC_PAD_TEMPLATE ("sink_ref",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{I420,YUY2,AYUV}"))
);
static GstStaticPadTemplate gst_framestore_sink_test_template =
GST_STATIC_PAD_TEMPLATE ("sink_test",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{I420,YUY2,AYUV}"))
);
static GstStaticPadTemplate gst_framestore_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{I420,YUY2,AYUV}"))
);
static void
gst_mse_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_framestore_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_framestore_sink_ref_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_framestore_sink_test_template));
gst_element_class_set_details (element_class, &element_details);
}
static void
gst_mse_class_init (GstMSEClass * klass)
{
GObjectClass *gobject_class;
gobject_class = (GObjectClass *) klass;
gobject_class->set_property = gst_mse_set_property;
gobject_class->get_property = gst_mse_get_property;
gobject_class->finalize = gst_mse_finalize;
g_object_class_install_property (gobject_class, LUMA_PSNR,
g_param_spec_double ("luma-psnr", "luma-psnr", "luma-psnr",
0, 70, 40, G_PARAM_READABLE));
g_object_class_install_property (gobject_class, CHROMA_PSNR,
g_param_spec_double ("chroma-psnr", "chroma-psnr", "chroma-psnr",
0, 70, 40, G_PARAM_READABLE));
}
static void
gst_mse_init (GstMSE * filter, GstMSEClass * klass)
{
gst_element_create_all_pads (GST_ELEMENT (filter));
filter->srcpad = gst_element_get_pad (GST_ELEMENT (filter), "src");
//gst_pad_set_link_function (filter->srcpad, gst_mse_link_src);
gst_pad_set_getcaps_function (filter->srcpad, gst_mse_getcaps);
filter->sinkpad_ref = gst_element_get_pad (GST_ELEMENT (filter), "sink_ref");
gst_pad_set_chain_function (filter->sinkpad_ref, gst_mse_chain_ref);
gst_pad_set_event_function (filter->sinkpad_ref, gst_mse_sink_event);
gst_pad_set_getcaps_function (filter->sinkpad_ref, gst_mse_getcaps);
filter->sinkpad_test =
gst_element_get_pad (GST_ELEMENT (filter), "sink_test");
gst_pad_set_chain_function (filter->sinkpad_test, gst_mse_chain_test);
gst_pad_set_event_function (filter->sinkpad_test, gst_mse_sink_event);
gst_pad_set_getcaps_function (filter->sinkpad_test, gst_mse_getcaps);
gst_pad_set_setcaps_function (filter->sinkpad_test, gst_mse_set_caps);
gst_mse_reset (filter);
filter->cond = g_cond_new ();
filter->lock = g_mutex_new ();
}
static void
gst_mse_finalize (GObject * object)
{
GstMSE *fs = GST_MSE (object);
g_mutex_free (fs->lock);
g_cond_free (fs->cond);
}
static GstCaps *
gst_mse_getcaps (GstPad * pad)
{
GstMSE *fs;
GstCaps *caps;
GstCaps *icaps;
GstCaps *peercaps;
fs = GST_MSE (gst_pad_get_parent (pad));
caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
if (pad != fs->srcpad) {
peercaps = gst_pad_peer_get_caps (fs->srcpad);
if (peercaps) {
icaps = gst_caps_intersect (caps, peercaps);
gst_caps_unref (caps);
gst_caps_unref (peercaps);
caps = icaps;
}
}
if (pad != fs->sinkpad_ref) {
peercaps = gst_pad_peer_get_caps (fs->sinkpad_ref);
if (peercaps) {
icaps = gst_caps_intersect (caps, peercaps);
gst_caps_unref (caps);
gst_caps_unref (peercaps);
caps = icaps;
}
}
if (pad != fs->sinkpad_test) {
peercaps = gst_pad_peer_get_caps (fs->sinkpad_ref);
if (peercaps) {
icaps = gst_caps_intersect (caps, peercaps);
gst_caps_unref (caps);
gst_caps_unref (peercaps);
caps = icaps;
}
}
gst_object_unref (fs);
return caps;
}
static gboolean
gst_mse_set_caps (GstPad * pad, GstCaps * caps)
{
GstMSE *fs;
fs = GST_MSE (gst_pad_get_parent (pad));
gst_video_format_parse_caps (caps, &fs->format, &fs->width, &fs->height);
gst_object_unref (fs);
return TRUE;
}
static void
gst_mse_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
//GstMSE *fs = GST_MSE (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_mse_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstMSE *fs = GST_MSE (object);
switch (prop_id) {
case LUMA_PSNR:
g_value_set_double (value,
mse_to_db (fs->luma_mse_sum / fs->n_frames, FALSE));
break;
case CHROMA_PSNR:
g_value_set_double (value,
mse_to_db (fs->chroma_mse_sum / fs->n_frames, TRUE));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_mse_reset (GstMSE * fs)
{
fs->luma_mse_sum = 0;
fs->chroma_mse_sum = 0;
fs->n_frames = 0;
if (fs->buffer_ref) {
gst_buffer_unref (fs->buffer_ref);
fs->buffer_ref = NULL;
}
}
static GstFlowReturn
gst_mse_chain_ref (GstPad * pad, GstBuffer * buffer)
{
GstMSE *fs;
fs = GST_MSE (gst_pad_get_parent (pad));
GST_DEBUG ("chain ref");
g_mutex_lock (fs->lock);
while (fs->buffer_ref) {
GST_DEBUG ("waiting for ref buffer clear");
g_cond_wait (fs->cond, fs->lock);
if (fs->cancel) {
g_mutex_unlock (fs->lock);
return GST_FLOW_WRONG_STATE;
}
}
fs->buffer_ref = buffer;
g_cond_signal (fs->cond);
g_mutex_unlock (fs->lock);
gst_object_unref (fs);
return GST_FLOW_OK;
}
static GstFlowReturn
gst_mse_chain_test (GstPad * pad, GstBuffer * buffer)
{
GstMSE *fs;
GstFlowReturn ret;
GstBuffer *buffer_ref;
fs = GST_MSE (gst_pad_get_parent (pad));
GST_DEBUG_OBJECT (fs, "chain test");
g_mutex_lock (fs->lock);
while (fs->buffer_ref == NULL) {
GST_DEBUG_OBJECT (fs, "waiting for ref buffer");
g_cond_wait (fs->cond, fs->lock);
if (fs->cancel) {
g_mutex_unlock (fs->lock);
return GST_FLOW_WRONG_STATE;
}
}
buffer_ref = fs->buffer_ref;
fs->buffer_ref = NULL;
g_cond_signal (fs->cond);
g_mutex_unlock (fs->lock);
if (1) {
CogFrame *frame_ref;
CogFrame *frame_test;
double mse[3];
frame_ref = gst_cog_buffer_wrap (gst_buffer_ref (buffer_ref), fs->format,
fs->width, fs->height);
frame_test = gst_cog_buffer_wrap (gst_buffer_ref (buffer), fs->format,
fs->width, fs->height);
cog_frame_mse (frame_ref, frame_test, mse);
GST_INFO ("mse %g %g %g", mse_to_db (mse[0], FALSE),
mse_to_db (mse[1], TRUE), mse_to_db (mse[2], TRUE));
fs->luma_mse_sum += mse[0];
fs->chroma_mse_sum += 0.5 * (mse[1] + mse[2]);
fs->n_frames++;
cog_frame_unref (frame_ref);
cog_frame_unref (frame_test);
}
ret = gst_pad_push (fs->srcpad, buffer);
gst_buffer_unref (buffer_ref);
gst_object_unref (fs);
return ret;
}
static gboolean
gst_mse_sink_event (GstPad * pad, GstEvent * event)
{
GstMSE *fs;
fs = GST_MSE (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:
{
gboolean update;
double rate;
double applied_rate;
GstFormat format;
gint64 start, stop, position;
gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
&format, &start, &stop, &position);
GST_DEBUG ("new_segment %d %g %g %d %lld %lld %lld",
update, rate, applied_rate, format, start, stop, position);
}
break;
case GST_EVENT_FLUSH_START:
GST_DEBUG ("flush start");
break;
case GST_EVENT_FLUSH_STOP:
GST_DEBUG ("flush stop");
break;
default:
break;
}
gst_pad_push_event (fs->srcpad, event);
return TRUE;
}
static int
sum_square_diff_u8 (uint8_t * s1, uint8_t * s2, int n)
{
#if 0
int sum = 0;
int i;
int x;
for (i = 0; i < n; i++) {
x = s1[i] - s2[i];
sum += x * x;
}
d_1[0] = sum;
#endif
static OrcProgram *p = NULL;
OrcExecutor *ex;
int val;
if (p == NULL) {
OrcCompileResult ret;
p = orc_program_new_ass (4, 1, 1);
orc_program_add_temporary (p, 2, "t1");
orc_program_add_temporary (p, 2, "t2");
orc_program_add_temporary (p, 4, "t3");
orc_program_append_ds_str (p, "convubw", "t1", "s1");
orc_program_append_ds_str (p, "convubw", "t2", "s2");
orc_program_append_str (p, "subw", "t1", "t1", "t2");
orc_program_append_str (p, "mullw", "t1", "t1", "t1");
orc_program_append_ds_str (p, "convuwl", "t3", "t1");
orc_program_append_ds_str (p, "accl", "a1", "t3");
ret = orc_program_compile (p);
if (!ORC_COMPILE_RESULT_IS_SUCCESSFUL (ret)) {
GST_ERROR ("Orc compiler failure");
return 0;
}
}
ex = orc_executor_new (p);
orc_executor_set_n (ex, n);
orc_executor_set_array_str (ex, "s1", s1);
orc_executor_set_array_str (ex, "s2", s2);
orc_executor_run (ex);
val = orc_executor_get_accumulator (ex, 0);
orc_executor_free (ex);
return val;
}
static double
cog_frame_component_squared_error (CogFrameData * a, CogFrameData * b)
{
int j;
double sum;
COG_ASSERT (a->width == b->width);
COG_ASSERT (a->height == b->height);
sum = 0;
for (j = 0; j < a->height; j++) {
sum += sum_square_diff_u8 (COG_FRAME_DATA_GET_LINE (a, j),
COG_FRAME_DATA_GET_LINE (b, j), a->width);
}
return sum;
}
static void
cog_frame_mse (CogFrame * a, CogFrame * b, double *mse)
{
double sum, n;
sum = cog_frame_component_squared_error (&a->components[0],
&b->components[0]);
n = a->components[0].width * a->components[0].height;
mse[0] = sum / n;
sum = cog_frame_component_squared_error (&a->components[1],
&b->components[1]);
n = a->components[1].width * a->components[1].height;
mse[1] = sum / n;
sum = cog_frame_component_squared_error (&a->components[2],
&b->components[2]);
n = a->components[2].width * a->components[2].height;
mse[2] = sum / n;
}
static double
mse_to_db (double mse, gboolean is_chroma)
{
if (is_chroma) {
return 10.0 * log (mse / (224.0 * 224.0)) / log (10.0);
} else {
return 10.0 * log (mse / (219.0 * 219.0)) / log (10.0);
}
}

732
ext/cog/gstcogscale.c Normal file
View file

@ -0,0 +1,732 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) 2005 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:element-videoscale
* @see_also: videorate, ffmpegcolorspace
*
* <refsect2>
* <para>
* This element resizes video frames. By default the element will try to
* negotiate to the same size on the source and sinkpad so that no scaling
* is needed. It is therefore safe to insert this element in a pipeline to
* get more robust behaviour without any cost if no scaling is needed.
* </para>
* <para>
* This element supports a wide range of color spaces including various YUV and
* RGB formats and is therefore generally able to operate anywhere in a
* pipeline.
* </para>
* <title>Example pipelines</title>
* <para>
* <programlisting>
* gst-launch -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! ffmpegcolorspace ! videoscale ! ximagesink
* </programlisting>
* Decode an Ogg/Theora and display the video using ximagesink. Since
* ximagesink cannot perform scaling, the video scaling will be performed by
* videoscale when you resize the video window.
* To create the test Ogg/Theora file refer to the documentation of theoraenc.
* </para>
* <para>
* <programlisting>
* gst-launch -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videoscale ! video/x-raw-yuv, width=50 ! xvimagesink
* </programlisting>
* Decode an Ogg/Theora and display the video using xvimagesink with a width of
* 50.
* </para>
* </refsect2>
*
* Last reviewed on 2006-03-02 (0.10.4)
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <string.h>
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/base/gstbasetransform.h>
#include <cog/cog.h>
#include <cog-video/cogvirtframe.h>
#include "gstcogutils.h"
GST_DEBUG_CATEGORY_STATIC (cog_scale_debug);
#define GST_CAT_DEFAULT cog_scale_debug
#define GST_TYPE_COG_SCALE \
(gst_cog_scale_get_type())
#define GST_COG_SCALE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_COG_SCALE,GstCogScale))
#define GST_COG_SCALE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_COG_SCALE,GstCogScaleClass))
#define GST_IS_COG_SCALE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_COG_SCALE))
#define GST_IS_COG_SCALE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_COG_SCALE))
/**
* GstCogScaleMethod:
* @GST_COG_SCALE_NEAREST: use nearest neighbour scaling (fast and ugly)
* @GST_COG_SCALE_BILINEAR: use bilinear scaling (slower but prettier).
* @GST_COG_SCALE_4TAP: use a 4-tap filter for scaling (slow).
*
* The videoscale method to use.
*/
typedef enum
{
GST_COG_SCALE_NEAREST,
GST_COG_SCALE_BILINEAR,
GST_COG_SCALE_4TAP
} GstCogScaleMethod;
typedef struct _GstCogScale GstCogScale;
typedef struct _GstCogScaleClass GstCogScaleClass;
/**
* GstCogScale:
*
* Opaque data structure
*/
struct _GstCogScale
{
GstBaseTransform element;
GstCogScaleMethod method;
/* negotiated stuff */
GstVideoFormat format;
guint src_size;
guint dest_size;
gint to_width;
gint to_height;
gint from_width;
gint from_height;
/*< private > */
};
struct _GstCogScaleClass
{
GstBaseTransformClass parent_class;
};
GType gst_cog_scale_get_type (void);
/* elementfactory information */
static const GstElementDetails cog_scale_details =
GST_ELEMENT_DETAILS ("Video scaler",
"Filter/Effect/Video",
"Resizes video",
"Wim Taymans <wim.taymans@chello.be>");
#define DEFAULT_PROP_METHOD GST_COG_SCALE_NEAREST
enum
{
PROP_0,
PROP_METHOD
/* FILL ME */
};
/* can't handle width/height of 1 yet, since we divide a lot by (n-1) */
#undef GST_VIDEO_SIZE_RANGE
#define GST_VIDEO_SIZE_RANGE "(int) [ 2, MAX ]"
#define TEMPLATE_CAPS \
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ I420, YV12, YUY2, UYVY, AYUV, Y42B }") ";" \
GST_VIDEO_CAPS_ARGB)
#if 0
/* not supported */
GST_VIDEO_CAPS_RGBx
GST_VIDEO_CAPS_BGRx
GST_VIDEO_CAPS_xRGB
GST_VIDEO_CAPS_xBGR
GST_VIDEO_CAPS_RGBA
GST_VIDEO_CAPS_BGRA
GST_VIDEO_CAPS_ABGR GST_VIDEO_CAPS_RGB GST_VIDEO_CAPS_BGR
GST_VIDEO_CAPS_YUV ("{ Y41B, YVYU }")
#endif
#define GST_TYPE_COG_SCALE_METHOD (gst_cog_scale_method_get_type())
static GType gst_cog_scale_method_get_type (void)
{
static GType cog_scale_method_type = 0;
static const GEnumValue cog_scale_methods[] = {
{GST_COG_SCALE_NEAREST, "Nearest Neighbour", "nearest-neighbour"},
{GST_COG_SCALE_BILINEAR, "Bilinear", "bilinear"},
{GST_COG_SCALE_4TAP, "4-tap", "4-tap"},
{0, NULL, NULL},
};
if (!cog_scale_method_type) {
cog_scale_method_type =
g_enum_register_static ("GstCogScaleMethod", cog_scale_methods);
}
return cog_scale_method_type;
}
static GstStaticPadTemplate gst_cog_scale_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
TEMPLATE_CAPS);
static GstStaticPadTemplate gst_cog_scale_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
TEMPLATE_CAPS);
static void gst_cog_scale_base_init (gpointer g_class);
static void gst_cog_scale_class_init (GstCogScaleClass * klass);
static void gst_cog_scale_init (GstCogScale * videoscale);
static void gst_cog_scale_finalize (GstCogScale * videoscale);
static gboolean gst_cog_scale_src_event (GstBaseTransform * trans,
GstEvent * event);
/* base transform vmethods */
static GstCaps *gst_cog_scale_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps);
static gboolean gst_cog_scale_set_caps (GstBaseTransform * trans,
GstCaps * in, GstCaps * out);
static gboolean gst_cog_scale_get_unit_size (GstBaseTransform * trans,
GstCaps * caps, guint * size);
static GstFlowReturn gst_cog_scale_transform (GstBaseTransform * trans,
GstBuffer * in, GstBuffer * out);
static void gst_cog_scale_fixate_caps (GstBaseTransform * base,
GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
static void gst_cog_scale_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_cog_scale_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstElementClass *parent_class = NULL;
GType
gst_cog_scale_get_type (void)
{
static GType cog_scale_type = 0;
if (!cog_scale_type) {
static const GTypeInfo cog_scale_info = {
sizeof (GstCogScaleClass),
gst_cog_scale_base_init,
NULL,
(GClassInitFunc) gst_cog_scale_class_init,
NULL,
NULL,
sizeof (GstCogScale),
0,
(GInstanceInitFunc) gst_cog_scale_init,
};
cog_scale_type =
g_type_register_static (GST_TYPE_BASE_TRANSFORM, "GstCogScale",
&cog_scale_info, 0);
GST_DEBUG_CATEGORY_INIT (cog_scale_debug, "cogscale", 0, "Cog scale");
}
return cog_scale_type;
}
static void
gst_cog_scale_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &cog_scale_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_cog_scale_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_cog_scale_sink_template));
}
static void
gst_cog_scale_class_init (GstCogScaleClass * klass)
{
GObjectClass *gobject_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
gobject_class->finalize = (GObjectFinalizeFunc) gst_cog_scale_finalize;
gobject_class->set_property = gst_cog_scale_set_property;
gobject_class->get_property = gst_cog_scale_get_property;
g_object_class_install_property (gobject_class, PROP_METHOD,
g_param_spec_enum ("method", "method", "method",
GST_TYPE_COG_SCALE_METHOD, DEFAULT_PROP_METHOD,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
trans_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_cog_scale_transform_caps);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_cog_scale_set_caps);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_cog_scale_get_unit_size);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_cog_scale_transform);
trans_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_cog_scale_fixate_caps);
trans_class->src_event = GST_DEBUG_FUNCPTR (gst_cog_scale_src_event);
trans_class->passthrough_on_same_caps = TRUE;
parent_class = g_type_class_peek_parent (klass);
}
static void
gst_cog_scale_init (GstCogScale * videoscale)
{
gst_base_transform_set_qos_enabled (GST_BASE_TRANSFORM (videoscale), TRUE);
videoscale->method = DEFAULT_PROP_METHOD;
}
static void
gst_cog_scale_finalize (GstCogScale * videoscale)
{
G_OBJECT_CLASS (parent_class)->finalize (G_OBJECT (videoscale));
}
static void
gst_cog_scale_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstCogScale *vscale = GST_COG_SCALE (object);
switch (prop_id) {
case PROP_METHOD:
GST_OBJECT_LOCK (vscale);
vscale->method = g_value_get_enum (value);
GST_OBJECT_UNLOCK (vscale);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_cog_scale_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstCogScale *vscale = GST_COG_SCALE (object);
switch (prop_id) {
case PROP_METHOD:
GST_OBJECT_LOCK (vscale);
g_value_set_enum (value, vscale->method);
GST_OBJECT_UNLOCK (vscale);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstCaps *
gst_cog_scale_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps)
{
GstCogScale *videoscale;
GstCaps *ret;
GstStructure *structure;
const GValue *par;
gint method;
/* this function is always called with a simple caps */
g_return_val_if_fail (GST_CAPS_IS_SIMPLE (caps), NULL);
videoscale = GST_COG_SCALE (trans);
GST_OBJECT_LOCK (videoscale);
method = videoscale->method;
GST_OBJECT_UNLOCK (videoscale);
structure = gst_caps_get_structure (caps, 0);
/* check compatibility of format and method before we copy the input caps */
if (method == GST_COG_SCALE_4TAP) {
guint32 fourcc;
if (!gst_structure_has_name (structure, "video/x-raw-yuv"))
goto method_not_implemented_for_format;
if (!gst_structure_get_fourcc (structure, "format", &fourcc))
goto method_not_implemented_for_format;
if (fourcc != GST_MAKE_FOURCC ('I', '4', '2', '0') &&
fourcc != GST_MAKE_FOURCC ('Y', 'V', '1', '2'))
goto method_not_implemented_for_format;
}
ret = gst_caps_copy (caps);
structure = gst_caps_get_structure (ret, 0);
gst_structure_set (structure,
"width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
"height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
/* if pixel aspect ratio, make a range of it */
if ((par = gst_structure_get_value (structure, "pixel-aspect-ratio"))) {
GstCaps *copy;
GstStructure *cstruct;
/* copy input PAR first, this is the prefered PAR */
gst_structure_set_value (structure, "pixel-aspect-ratio", par);
/* then make a copy with a fraction range as a second choice */
copy = gst_caps_copy (ret);
cstruct = gst_caps_get_structure (copy, 0);
gst_structure_set (cstruct,
"pixel-aspect-ratio", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
/* and append */
gst_caps_append (ret, copy);
}
GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret);
return ret;
method_not_implemented_for_format:
{
GST_DEBUG_OBJECT (trans, "method %d not implemented for format %"
GST_PTR_FORMAT ", returning empty caps", method, caps);
return gst_caps_new_empty ();
}
}
static gboolean
gst_cog_scale_set_caps (GstBaseTransform * trans, GstCaps * in, GstCaps * out)
{
GstCogScale *videoscale;
gboolean ret;
videoscale = GST_COG_SCALE (trans);
ret = gst_video_format_parse_caps (in, &videoscale->format,
&videoscale->from_width, &videoscale->from_height);
ret &= gst_video_format_parse_caps (out, NULL,
&videoscale->to_width, &videoscale->to_height);
if (!ret)
goto done;
videoscale->src_size = gst_video_format_get_size (videoscale->format,
videoscale->from_width, videoscale->from_height);
videoscale->dest_size = gst_video_format_get_size (videoscale->format,
videoscale->to_width, videoscale->to_height);
/* FIXME: par */
GST_DEBUG_OBJECT (videoscale, "from=%dx%d, size %d -> to=%dx%d, size %d",
videoscale->from_width, videoscale->from_height, videoscale->src_size,
videoscale->to_width, videoscale->to_height, videoscale->dest_size);
done:
return ret;
}
static gboolean
gst_cog_scale_get_unit_size (GstBaseTransform * trans, GstCaps * caps,
guint * size)
{
GstCogScale *videoscale;
GstVideoFormat format;
gint width, height;
g_assert (size);
videoscale = GST_COG_SCALE (trans);
if (!gst_video_format_parse_caps (caps, &format, &width, &height))
return FALSE;
*size = gst_video_format_get_size (format, width, height);
return TRUE;
}
static void
gst_cog_scale_fixate_caps (GstBaseTransform * base, GstPadDirection direction,
GstCaps * caps, GstCaps * othercaps)
{
GstStructure *ins, *outs;
const GValue *from_par, *to_par;
g_return_if_fail (gst_caps_is_fixed (caps));
GST_DEBUG_OBJECT (base, "trying to fixate othercaps %" GST_PTR_FORMAT
" based on caps %" GST_PTR_FORMAT, othercaps, caps);
ins = gst_caps_get_structure (caps, 0);
outs = gst_caps_get_structure (othercaps, 0);
from_par = gst_structure_get_value (ins, "pixel-aspect-ratio");
to_par = gst_structure_get_value (outs, "pixel-aspect-ratio");
/* we have both PAR but they might not be fixated */
if (from_par && to_par) {
gint from_w, from_h, from_par_n, from_par_d, to_par_n, to_par_d;
gint count = 0, w = 0, h = 0;
guint num, den;
/* from_par should be fixed */
g_return_if_fail (gst_value_is_fixed (from_par));
from_par_n = gst_value_get_fraction_numerator (from_par);
from_par_d = gst_value_get_fraction_denominator (from_par);
/* fixate the out PAR */
if (!gst_value_is_fixed (to_par)) {
GST_DEBUG_OBJECT (base, "fixating to_par to %dx%d", from_par_n,
from_par_d);
gst_structure_fixate_field_nearest_fraction (outs, "pixel-aspect-ratio",
from_par_n, from_par_d);
}
to_par_n = gst_value_get_fraction_numerator (to_par);
to_par_d = gst_value_get_fraction_denominator (to_par);
/* if both width and height are already fixed, we can't do anything
* about it anymore */
if (gst_structure_get_int (outs, "width", &w))
++count;
if (gst_structure_get_int (outs, "height", &h))
++count;
if (count == 2) {
GST_DEBUG_OBJECT (base, "dimensions already set to %dx%d, not fixating",
w, h);
return;
}
gst_structure_get_int (ins, "width", &from_w);
gst_structure_get_int (ins, "height", &from_h);
if (!gst_video_calculate_display_ratio (&num, &den, from_w, from_h,
from_par_n, from_par_d, to_par_n, to_par_d)) {
GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
("Error calculating the output scaled size - integer overflow"));
return;
}
GST_DEBUG_OBJECT (base,
"scaling input with %dx%d and PAR %d/%d to output PAR %d/%d",
from_w, from_h, from_par_n, from_par_d, to_par_n, to_par_d);
GST_DEBUG_OBJECT (base, "resulting output should respect ratio of %d/%d",
num, den);
/* now find a width x height that respects this display ratio.
* prefer those that have one of w/h the same as the incoming video
* using wd / hd = num / den */
/* if one of the output width or height is fixed, we work from there */
if (h) {
GST_DEBUG_OBJECT (base, "height is fixed,scaling width");
w = (guint) gst_util_uint64_scale_int (h, num, den);
} else if (w) {
GST_DEBUG_OBJECT (base, "width is fixed, scaling height");
h = (guint) gst_util_uint64_scale_int (w, den, num);
} else {
/* none of width or height is fixed, figure out both of them based only on
* the input width and height */
/* check hd / den is an integer scale factor, and scale wd with the PAR */
if (from_h % den == 0) {
GST_DEBUG_OBJECT (base, "keeping video height");
h = from_h;
w = (guint) gst_util_uint64_scale_int (h, num, den);
} else if (from_w % num == 0) {
GST_DEBUG_OBJECT (base, "keeping video width");
w = from_w;
h = (guint) gst_util_uint64_scale_int (w, den, num);
} else {
GST_DEBUG_OBJECT (base, "approximating but keeping video height");
h = from_h;
w = (guint) gst_util_uint64_scale_int (h, num, den);
}
}
GST_DEBUG_OBJECT (base, "scaling to %dx%d", w, h);
/* now fixate */
gst_structure_fixate_field_nearest_int (outs, "width", w);
gst_structure_fixate_field_nearest_int (outs, "height", h);
} else {
gint width, height;
if (gst_structure_get_int (ins, "width", &width)) {
if (gst_structure_has_field (outs, "width")) {
gst_structure_fixate_field_nearest_int (outs, "width", width);
}
}
if (gst_structure_get_int (ins, "height", &height)) {
if (gst_structure_has_field (outs, "height")) {
gst_structure_fixate_field_nearest_int (outs, "height", height);
}
}
}
GST_DEBUG_OBJECT (base, "fixated othercaps to %" GST_PTR_FORMAT, othercaps);
}
#if 0
static gboolean
gst_cog_scale_prepare_image (gint format, GstBuffer * buf,
VSImage * img, VSImage * img_u, VSImage * img_v)
{
gboolean res = TRUE;
img->pixels = GST_BUFFER_DATA (buf);
switch (format) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
img_u->pixels = img->pixels + GST_ROUND_UP_2 (img->height) * img->stride;
img_u->height = GST_ROUND_UP_2 (img->height) / 2;
img_u->width = GST_ROUND_UP_2 (img->width) / 2;
img_u->stride = GST_ROUND_UP_4 (img_u->width);
memcpy (img_v, img_u, sizeof (*img_v));
img_v->pixels = img_u->pixels + img_u->height * img_u->stride;
break;
default:
break;
}
return res;
}
#endif
static GstFlowReturn
gst_cog_scale_transform (GstBaseTransform * trans, GstBuffer * in,
GstBuffer * out)
{
GstCogScale *videoscale;
GstFlowReturn ret = GST_FLOW_OK;
CogFrame *outframe;
CogFrame *frame;
gint method;
int w, h;
videoscale = GST_COG_SCALE (trans);
GST_OBJECT_LOCK (videoscale);
method = videoscale->method;
GST_OBJECT_UNLOCK (videoscale);
frame = gst_cog_buffer_wrap (gst_buffer_ref (in), videoscale->format,
videoscale->from_width, videoscale->from_height);
outframe = gst_cog_buffer_wrap (gst_buffer_ref (out), videoscale->format,
videoscale->to_width, videoscale->to_height);
frame = cog_virt_frame_new_unpack (frame);
w = videoscale->from_width;
h = videoscale->from_height;
while (w >= 2 * videoscale->to_width || h >= 2 * videoscale->to_height) {
if (w >= 2 * videoscale->to_width) {
frame = cog_virt_frame_new_horiz_downsample (frame, 3);
w /= 2;
}
if (h >= 2 * videoscale->to_height) {
frame = cog_virt_frame_new_vert_downsample (frame, 2);
h /= 2;
}
}
if (w != videoscale->to_width) {
frame = cog_virt_frame_new_horiz_resample (frame, videoscale->to_width);
}
if (h != videoscale->to_height) {
frame = cog_virt_frame_new_vert_resample (frame, videoscale->to_height);
}
switch (videoscale->format) {
case GST_VIDEO_FORMAT_YUY2:
frame = cog_virt_frame_new_pack_YUY2 (frame);
break;
case GST_VIDEO_FORMAT_UYVY:
frame = cog_virt_frame_new_pack_UYVY (frame);
break;
default:
break;
}
cog_virt_frame_render (frame, outframe);
cog_frame_unref (frame);
cog_frame_unref (outframe);
GST_LOG_OBJECT (videoscale, "pushing buffer of %d bytes",
GST_BUFFER_SIZE (out));
return ret;
/* ERRORS */
#if 0
unsupported:
{
GST_ELEMENT_ERROR (videoscale, STREAM, NOT_IMPLEMENTED, (NULL),
("Unsupported format %d for scaling method %d",
videoscale->format, method));
return GST_FLOW_ERROR;
}
unknown_mode:
{
GST_ELEMENT_ERROR (videoscale, STREAM, NOT_IMPLEMENTED, (NULL),
("Unknown scaling method %d", videoscale->method));
return GST_FLOW_ERROR;
}
#endif
}
static gboolean
gst_cog_scale_src_event (GstBaseTransform * trans, GstEvent * event)
{
GstCogScale *videoscale;
gboolean ret;
double a;
GstStructure *structure;
videoscale = GST_COG_SCALE (trans);
GST_DEBUG_OBJECT (videoscale, "handling %s event",
GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NAVIGATION:
event =
GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event)));
structure = (GstStructure *) gst_event_get_structure (event);
if (gst_structure_get_double (structure, "pointer_x", &a)) {
gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE,
a * videoscale->from_width / videoscale->to_width, NULL);
}
if (gst_structure_get_double (structure, "pointer_y", &a)) {
gst_structure_set (structure, "pointer_y", G_TYPE_DOUBLE,
a * videoscale->from_height / videoscale->to_height, NULL);
}
break;
case GST_EVENT_QOS:
break;
default:
break;
}
ret = GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event);
return ret;
}

182
ext/cog/gstcogutils.c Normal file
View file

@ -0,0 +1,182 @@
/* Cog
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/video/video.h>
#include <cog/cog.h>
#include <cog-video/cogvirtframe.h>
#include <math.h>
#include <string.h>
//GST_DEBUG_CATEGORY_EXTERN (cog_debug);
//#define GST_CAT_DEFAULT cog_debug
static void
gst_cog_frame_free (CogFrame * frame, void *priv)
{
gst_buffer_unref (GST_BUFFER (priv));
}
CogFrame *
gst_cog_buffer_wrap (GstBuffer * buf, GstVideoFormat format, int width,
int height)
{
CogFrame *frame;
int size;
size = gst_video_format_get_size (format, width, height);
if (GST_BUFFER_SIZE (buf) != size) {
GST_ERROR ("size incorrect, expected %d, got %d", size,
GST_BUFFER_SIZE (buf));
}
switch (format) {
case GST_VIDEO_FORMAT_I420:
frame =
cog_frame_new_from_data_I420 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_YV12:
frame =
cog_frame_new_from_data_YV12 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_YUY2:
frame =
cog_frame_new_from_data_YUY2 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_UYVY:
frame =
cog_frame_new_from_data_UYVY (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_AYUV:
frame =
cog_frame_new_from_data_AYUV (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_Y42B:
frame =
cog_frame_new_from_data_Y42B (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_Y444:
frame =
cog_frame_new_from_data_Y444 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_v210:
frame =
cog_frame_new_from_data_v210 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_v216:
frame =
cog_frame_new_from_data_v216 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_RGBx:
frame =
cog_frame_new_from_data_RGBx (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_BGRx:
frame =
cog_frame_new_from_data_BGRx (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_xRGB:
frame =
cog_frame_new_from_data_xRGB (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_xBGR:
frame =
cog_frame_new_from_data_xBGR (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_RGBA:
frame =
cog_frame_new_from_data_RGBA (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_BGRA:
frame =
cog_frame_new_from_data_BGRA (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_ARGB:
frame =
cog_frame_new_from_data_ARGB (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_ABGR:
frame =
cog_frame_new_from_data_ABGR (GST_BUFFER_DATA (buf), width, height);
break;
default:
g_assert_not_reached ();
}
cog_frame_set_free_callback (frame, gst_cog_frame_free, buf);
return frame;
}
#if 0
#ifdef GST_BUFFER_FREE_FUNC
static void
cog_buf_free_func (gpointer priv)
{
CogBuffer *buffer = (CogBuffer *) priv;
cog_buffer_unref (buffer);
}
#endif
/* takes the reference */
GstBuffer *
gst_cog_wrap_cog_buffer (CogBuffer * buffer)
{
GstBuffer *gstbuf;
#ifdef GST_BUFFER_FREE_FUNC
gstbuf = gst_buffer_new ();
GST_BUFFER_DATA (gstbuf) = buffer->data;
GST_BUFFER_SIZE (gstbuf) = buffer->length;
GST_BUFFER_MALLOCDATA (gstbuf) = (void *) buffer;
GST_BUFFER_FREE_FUNC (gstbuf) = cog_buf_free_func;
#else
gstbuf = gst_buffer_new_and_alloc (buffer->length);
memcpy (GST_BUFFER_DATA (gstbuf), buffer->data, buffer->length);
#endif
return gstbuf;
}
static void
gst_cog_buffer_free (CogBuffer * buffer, void *priv)
{
gst_buffer_unref (GST_BUFFER (priv));
}
CogBuffer *
gst_cog_wrap_gst_buffer (GstBuffer * buffer)
{
CogBuffer *cogbuf;
cogbuf = cog_buffer_new_with_data (GST_BUFFER_DATA (buffer),
GST_BUFFER_SIZE (buffer));
cogbuf->free = gst_cog_buffer_free;
cogbuf->priv = buffer;
return cogbuf;
}
#endif

37
ext/cog/gstcogutils.h Normal file
View file

@ -0,0 +1,37 @@
/* Schrodinger
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_COG_UTILS_H_
#define _GST_COG_UTILS_H_
#include <gst/gst.h>
#include <gst/video/video.h>
#include <cog/cog.h>
#include <cog-video/cogframe.h>
CogFrame *
gst_cog_buffer_wrap (GstBuffer *buf, GstVideoFormat format, int width,
int height);
#if 0
GstBuffer * gst_cog_wrap_cog_buffer (CogBuffer *buffer);
CogBuffer * gst_cog_wrap_gst_buffer (GstBuffer *buffer);
#endif
#endif

864
ext/cog/gstcolorconvert.c Normal file
View file

@ -0,0 +1,864 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@entropywave.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
#include <gst/video/video.h>
#include <string.h>
#include <cog/cog.h>
#include <cog-video/cogvirtframe.h>
#include <math.h>
#include "gstcogutils.h"
#define GST_TYPE_COLORCONVERT \
(gst_colorconvert_get_type())
#define GST_COLORCONVERT(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_COLORCONVERT,GstColorconvert))
#define GST_COLORCONVERT_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_COLORCONVERT,GstColorconvertClass))
#define GST_IS_COLORCONVERT(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_COLORCONVERT))
#define GST_IS_COLORCONVERT_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_COLORCONVERT))
typedef struct _GstColorconvert GstColorconvert;
typedef struct _GstColorconvertClass GstColorconvertClass;
struct _GstColorconvert
{
GstBaseTransform base_transform;
gchar *location;
GstVideoFormat format;
int width;
int height;
};
struct _GstColorconvertClass
{
GstBaseTransformClass parent_class;
};
/* GstColorconvert signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0,
};
GType gst_colorconvert_get_type (void);
static void gst_colorconvert_base_init (gpointer g_class);
static void gst_colorconvert_class_init (gpointer g_class, gpointer class_data);
static void gst_colorconvert_init (GTypeInstance * instance, gpointer g_class);
static void gst_colorconvert_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_colorconvert_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_colorconvert_set_caps (GstBaseTransform * base_transform,
GstCaps * incaps, GstCaps * outcaps);
static GstFlowReturn gst_colorconvert_transform_ip (GstBaseTransform *
base_transform, GstBuffer * buf);
static CogFrame *cog_virt_frame_new_color_transform (CogFrame * frame);
static GstStaticPadTemplate gst_colorconvert_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{I420,YUY2,UYVY,AYUV}"))
);
static GstStaticPadTemplate gst_colorconvert_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{I420,YUY2,UYVY,AYUV}"))
);
GType
gst_colorconvert_get_type (void)
{
static GType compress_type = 0;
if (!compress_type) {
static const GTypeInfo compress_info = {
sizeof (GstColorconvertClass),
gst_colorconvert_base_init,
NULL,
gst_colorconvert_class_init,
NULL,
NULL,
sizeof (GstColorconvert),
0,
gst_colorconvert_init,
};
compress_type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
"GstColorconvert", &compress_info, 0);
}
return compress_type;
}
static void
gst_colorconvert_base_init (gpointer g_class)
{
static GstElementDetails compress_details =
GST_ELEMENT_DETAILS ("Video Filter Template",
"Filter/Effect/Video",
"Template for a video filter",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
//GstBaseTransformClass *base_transform_class = GST_BASE_TRANSFORM_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_colorconvert_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_colorconvert_sink_template));
gst_element_class_set_details (element_class, &compress_details);
}
static void
gst_colorconvert_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstBaseTransformClass *base_transform_class;
GstColorconvertClass *filter_class;
gobject_class = G_OBJECT_CLASS (g_class);
base_transform_class = GST_BASE_TRANSFORM_CLASS (g_class);
filter_class = GST_COLORCONVERT_CLASS (g_class);
gobject_class->set_property = gst_colorconvert_set_property;
gobject_class->get_property = gst_colorconvert_get_property;
base_transform_class->set_caps = gst_colorconvert_set_caps;
base_transform_class->transform_ip = gst_colorconvert_transform_ip;
}
static void
gst_colorconvert_init (GTypeInstance * instance, gpointer g_class)
{
//GstColorconvert *compress = GST_COLORCONVERT (instance);
//GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
GST_DEBUG ("gst_colorconvert_init");
}
static void
gst_colorconvert_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstColorconvert *src;
g_return_if_fail (GST_IS_COLORCONVERT (object));
src = GST_COLORCONVERT (object);
GST_DEBUG ("gst_colorconvert_set_property");
switch (prop_id) {
default:
break;
}
}
static void
gst_colorconvert_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstColorconvert *src;
g_return_if_fail (GST_IS_COLORCONVERT (object));
src = GST_COLORCONVERT (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gboolean
gst_colorconvert_set_caps (GstBaseTransform * base_transform,
GstCaps * incaps, GstCaps * outcaps)
{
GstColorconvert *li;
g_return_val_if_fail (GST_IS_COLORCONVERT (base_transform), GST_FLOW_ERROR);
li = GST_COLORCONVERT (base_transform);
gst_video_format_parse_caps (incaps, &li->format, &li->width, &li->height);
return TRUE;
}
static GstFlowReturn
gst_colorconvert_transform_ip (GstBaseTransform * base_transform,
GstBuffer * buf)
{
GstColorconvert *li;
CogFrame *frame;
CogFrame *vf;
g_return_val_if_fail (GST_IS_COLORCONVERT (base_transform), GST_FLOW_ERROR);
li = GST_COLORCONVERT (base_transform);
frame = gst_cog_buffer_wrap (gst_buffer_ref (buf),
li->format, li->width, li->height);
vf = cog_virt_frame_new_unpack (cog_frame_ref (frame));
vf = cog_virt_frame_new_subsample (vf, COG_FRAME_FORMAT_U8_444);
vf = cog_virt_frame_new_color_transform (vf);
if (frame->format == COG_FRAME_FORMAT_YUYV) {
vf = cog_virt_frame_new_subsample (vf, COG_FRAME_FORMAT_U8_422);
vf = cog_virt_frame_new_pack_YUY2 (vf);
} else if (frame->format == COG_FRAME_FORMAT_UYVY) {
vf = cog_virt_frame_new_subsample (vf, COG_FRAME_FORMAT_U8_422);
vf = cog_virt_frame_new_pack_UYVY (vf);
} else if (frame->format == COG_FRAME_FORMAT_AYUV) {
vf = cog_virt_frame_new_pack_AYUV (vf);
} else if (frame->format == COG_FRAME_FORMAT_U8_420) {
vf = cog_virt_frame_new_subsample (vf, COG_FRAME_FORMAT_U8_420);
} else {
g_assert_not_reached ();
}
cog_virt_frame_render (vf, frame);
cog_frame_unref (frame);
cog_frame_unref (vf);
return GST_FLOW_OK;
}
static void
color_transform (CogFrame * frame, void *_dest, int component, int j)
{
uint8_t *dest = _dest;
uint8_t *src_y;
uint8_t *src_u;
uint8_t *src_v;
uint8_t *table;
int i;
table = COG_OFFSET (frame->virt_priv2, component * 0x1000000);
src_y = cog_virt_frame_get_line (frame->virt_frame1, 0, j);
src_u = cog_virt_frame_get_line (frame->virt_frame1, 1, j);
src_v = cog_virt_frame_get_line (frame->virt_frame1, 2, j);
for (i = 0; i < frame->width; i++) {
dest[i] = table[(src_y[i] << 16) | (src_u[i] << 8) | (src_v[i])];
}
}
static uint8_t *get_color_transform_table (void);
static CogFrame *
cog_virt_frame_new_color_transform (CogFrame * frame)
{
CogFrame *virt_frame;
COG_ASSERT (frame->format == COG_FRAME_FORMAT_U8_444);
virt_frame = cog_frame_new_virtual (NULL, COG_FRAME_FORMAT_U8_444,
frame->width, frame->height);
virt_frame->virt_frame1 = frame;
virt_frame->render_line = color_transform;
virt_frame->virt_priv2 = get_color_transform_table ();
return virt_frame;
}
/* our simple CMS */
typedef struct _Color Color;
typedef struct _ColorMatrix ColorMatrix;
struct _Color
{
double v[3];
};
struct _ColorMatrix
{
double m[4][4];
};
void
color_xyY_to_XYZ (Color * c)
{
if (c->v[1] == 0) {
c->v[0] = 0;
c->v[1] = 0;
c->v[2] = 0;
} else {
double X, Y, Z;
X = c->v[0] * c->v[2] / c->v[1];
Y = c->v[2];
Z = (1.0 - c->v[0] - c->v[1]) * c->v[2] / c->v[1];
c->v[0] = X;
c->v[1] = Y;
c->v[2] = Z;
}
}
void
color_XYZ_to_xyY (Color * c)
{
double d;
d = c->v[0] + c->v[1] + c->v[2];
if (d == 0) {
c->v[0] = 0.3128;
c->v[1] = 0.3290;
c->v[2] = 0;
} else {
double x, y, Y;
x = c->v[0] / d;
y = c->v[1] / d;
Y = c->v[1];
c->v[0] = x;
c->v[1] = y;
c->v[2] = Y;
}
}
void
color_set (Color * c, double x, double y, double z)
{
c->v[0] = x;
c->v[1] = y;
c->v[2] = z;
}
void
color_matrix_set_identity (ColorMatrix * m)
{
int i, j;
for (i = 0; i < 4; i++) {
for (j = 0; j < 4; j++) {
m->m[i][j] = (i == j);
}
}
}
/* Prettyprint a 4x4 matrix @m@ */
void
color_matrix_dump (ColorMatrix * m)
{
int i, j;
printf ("[\n");
for (i = 0; i < 4; i++) {
printf (" ");
for (j = 0; j < 4; j++) {
printf (" %8.5g", m->m[i][j]);
}
printf ("\n");
}
printf ("]\n");
}
/* Perform 4x4 matrix multiplication:
* - @dst@ = @a@ * @b@
* - @dst@ may be a pointer to @a@ andor @b@
*/
void
color_matrix_multiply (ColorMatrix * dst, ColorMatrix * a, ColorMatrix * b)
{
ColorMatrix tmp;
int i, j, k;
for (i = 0; i < 4; i++) {
for (j = 0; j < 4; j++) {
double x = 0;
for (k = 0; k < 4; k++) {
x += a->m[i][k] * b->m[k][j];
}
tmp.m[i][j] = x;
}
}
memcpy (dst, &tmp, sizeof (ColorMatrix));
}
void
color_matrix_apply (ColorMatrix * m, Color * dest, Color * src)
{
int i;
Color tmp;
for (i = 0; i < 3; i++) {
double x = 0;
x += m->m[i][0] * src->v[0];
x += m->m[i][1] * src->v[1];
x += m->m[i][2] * src->v[2];
x += m->m[i][3];
tmp.v[i] = x;
}
memcpy (dest, &tmp, sizeof (tmp));
}
void
color_matrix_offset_components (ColorMatrix * m, double a1, double a2,
double a3)
{
ColorMatrix a;
color_matrix_set_identity (&a);
a.m[0][3] = a1;
a.m[1][3] = a2;
a.m[2][3] = a3;
color_matrix_multiply (m, &a, m);
}
void
color_matrix_scale_components (ColorMatrix * m, double a1, double a2, double a3)
{
ColorMatrix a;
color_matrix_set_identity (&a);
a.m[0][0] = a1;
a.m[1][1] = a2;
a.m[2][2] = a3;
color_matrix_multiply (m, &a, m);
}
void
color_matrix_YCbCr_to_RGB (ColorMatrix * m, double Kr, double Kb)
{
double Kg = 1.0 - Kr - Kb;
ColorMatrix k = {
{
{1., 0., 2 * (1 - Kr), 0.},
{1., -2 * Kb * (1 - Kb) / Kg, -2 * Kr * (1 - Kr) / Kg, 0.},
{1., 2 * (1 - Kb), 0., 0.},
{0., 0., 0., 1.},
}
};
color_matrix_multiply (m, &k, m);
}
void
color_matrix_RGB_to_YCbCr (ColorMatrix * m, double Kr, double Kb)
{
double Kg = 1.0 - Kr - Kb;
ColorMatrix k;
double x;
k.m[0][0] = Kr;
k.m[0][1] = Kg;
k.m[0][2] = Kb;
k.m[0][3] = 0;
x = 1 / (2 * (1 - Kb));
k.m[1][0] = -x * Kr;
k.m[1][1] = -x * Kg;
k.m[1][2] = x * (1 - Kb);
k.m[1][3] = 0;
x = 1 / (2 * (1 - Kr));
k.m[2][0] = x * (1 - Kr);
k.m[2][1] = -x * Kg;
k.m[2][2] = -x * Kb;
k.m[2][3] = 0;
k.m[3][0] = 0;
k.m[3][1] = 0;
k.m[3][2] = 0;
k.m[3][3] = 1;
color_matrix_multiply (m, &k, m);
}
void
color_matrix_build_yuv_to_rgb_601 (ColorMatrix * dst)
{
/*
* At this point, everything is in YCbCr
* All components are in the range [0,255]
*/
color_matrix_set_identity (dst);
/* offset required to get input video black to (0.,0.,0.) */
color_matrix_offset_components (dst, -16, -128, -128);
/* scale required to get input video black to (0.,0.,0.) */
color_matrix_scale_components (dst, (1 / 219.0), (1 / 224.0), (1 / 224.0));
/* colour matrix, YCbCr -> RGB */
/* Requires Y in [0,1.0], Cb&Cr in [-0.5,0.5] */
color_matrix_YCbCr_to_RGB (dst, 0.2990, 0.1140); // SD
//color_matrix_YCbCr_to_RGB (dst, 0.2126, 0.0722); // HD
/*
* We are now in RGB space
*/
/* scale to output range. */
//color_matrix_scale_components (dst, 255.0, 255.0, 255.0);
}
void
color_matrix_build_bt709_to_bt601 (ColorMatrix * dst)
{
color_matrix_set_identity (dst);
/* offset required to get input video black to (0.,0.,0.) */
color_matrix_offset_components (dst, -16, -128, -128);
/* scale required to get input video black to (0.,0.,0.) */
color_matrix_scale_components (dst, (1 / 219.0), (1 / 224.0), (1 / 224.0));
/* colour matrix, YCbCr -> RGB */
/* Requires Y in [0,1.0], Cb&Cr in [-0.5,0.5] */
//color_matrix_YCbCr_to_RGB (dst, 0.2990, 0.1140); // SD
color_matrix_YCbCr_to_RGB (dst, 0.2126, 0.0722); // HD
color_matrix_RGB_to_YCbCr (dst, 0.2990, 0.1140); // SD
color_matrix_scale_components (dst, 219.0, 224.0, 224.0);
color_matrix_offset_components (dst, 16, 128, 128);
}
void
color_matrix_build_rgb_to_yuv_601 (ColorMatrix * dst)
{
color_matrix_set_identity (dst);
//color_matrix_scale_components (dst, (1/255.0), (1/255.0), (1/255.0));
color_matrix_RGB_to_YCbCr (dst, 0.2990, 0.1140); // SD
//color_matrix_RGB_to_YCbCr (dst, 0.2126, 0.0722); // HD
//color_matrix_RGB_to_YCbCr (dst, 0.212, 0.087); // SMPTE 240M
color_matrix_scale_components (dst, 219.0, 224.0, 224.0);
color_matrix_offset_components (dst, 16, 128, 128);
{
Color c;
int i;
for (i = 7; i >= 0; i--) {
color_set (&c, (i & 2) ? 0.75 : 0.0, (i & 4) ? 0.75 : 0.0,
(i & 1) ? 0.75 : 0.0);
color_matrix_apply (dst, &c, &c);
g_print (" { %g, %g, %g },\n", rint (c.v[0]), rint (c.v[1]),
rint (c.v[2]));
}
color_set (&c, -0.075, -0.075, -0.075);
color_matrix_apply (dst, &c, &c);
g_print (" { %g, %g, %g },\n", rint (c.v[0]), rint (c.v[1]),
rint (c.v[2]));
color_set (&c, 0.075, 0.075, 0.075);
color_matrix_apply (dst, &c, &c);
g_print (" { %g, %g, %g },\n", rint (c.v[0]), rint (c.v[1]),
rint (c.v[2]));
}
}
void
color_matrix_invert (ColorMatrix * m)
{
ColorMatrix tmp;
int i, j;
double det;
color_matrix_set_identity (&tmp);
for (j = 0; j < 3; j++) {
for (i = 0; i < 3; i++) {
tmp.m[j][i] =
m->m[(i + 1) % 3][(j + 1) % 3] * m->m[(i + 2) % 3][(j + 2) % 3] -
m->m[(i + 1) % 3][(j + 2) % 3] * m->m[(i + 2) % 3][(j + 1) % 3];
}
}
det =
tmp.m[0][0] * m->m[0][0] + tmp.m[0][1] * m->m[1][0] +
tmp.m[0][2] * m->m[2][0];
for (j = 0; j < 3; j++) {
for (i = 0; i < 3; i++) {
tmp.m[i][j] /= det;
}
}
memcpy (m, &tmp, sizeof (tmp));
}
void
color_matrix_copy (ColorMatrix * dest, ColorMatrix * src)
{
memcpy (dest, src, sizeof (ColorMatrix));
}
void
color_matrix_transpose (ColorMatrix * m)
{
int i, j;
ColorMatrix tmp;
color_matrix_set_identity (&tmp);
for (i = 0; i < 3; i++) {
for (j = 0; j < 3; j++) {
tmp.m[i][j] = m->m[j][i];
}
}
memcpy (m, &tmp, sizeof (ColorMatrix));
}
void
color_matrix_build_XYZ (ColorMatrix * dst,
double rx, double ry,
double gx, double gy, double bx, double by, double wx, double wy)
{
Color r, g, b, w, scale;
ColorMatrix m;
color_set (&r, rx, ry, 1.0);
color_xyY_to_XYZ (&r);
color_set (&g, gx, gy, 1.0);
color_xyY_to_XYZ (&g);
color_set (&b, bx, by, 1.0);
color_xyY_to_XYZ (&b);
color_set (&w, wx, wy, 1.0);
color_xyY_to_XYZ (&w);
color_matrix_set_identity (dst);
dst->m[0][0] = r.v[0];
dst->m[0][1] = r.v[1];
dst->m[0][2] = r.v[2];
dst->m[1][0] = g.v[0];
dst->m[1][1] = g.v[1];
dst->m[1][2] = g.v[2];
dst->m[2][0] = b.v[0];
dst->m[2][1] = b.v[1];
dst->m[2][2] = b.v[2];
color_matrix_dump (dst);
color_matrix_copy (&m, dst);
color_matrix_invert (&m);
color_matrix_dump (&m);
color_matrix_transpose (&m);
color_matrix_apply (&m, &scale, &w);
g_print ("%g %g %g\n", scale.v[0], scale.v[1], scale.v[2]);
dst->m[0][0] = r.v[0] * scale.v[0];
dst->m[0][1] = r.v[1] * scale.v[0];
dst->m[0][2] = r.v[2] * scale.v[0];
dst->m[1][0] = g.v[0] * scale.v[1];
dst->m[1][1] = g.v[1] * scale.v[1];
dst->m[1][2] = g.v[2] * scale.v[1];
dst->m[2][0] = b.v[0] * scale.v[2];
dst->m[2][1] = b.v[1] * scale.v[2];
dst->m[2][2] = b.v[2] * scale.v[2];
color_matrix_transpose (dst);
color_matrix_dump (dst);
color_set (&scale, 1, 1, 1);
color_matrix_apply (dst, &scale, &scale);
color_XYZ_to_xyY (&scale);
g_print ("white %g %g %g\n", scale.v[0], scale.v[1], scale.v[2]);
}
void
color_matrix_build_rgb_to_XYZ_601 (ColorMatrix * dst)
{
/* SMPTE C primaries, SMPTE 170M-2004 */
color_matrix_build_XYZ (dst,
0.630, 0.340, 0.310, 0.595, 0.155, 0.070, 0.3127, 0.3290);
#if 0
/* NTSC 1953 primaries, SMPTE 170M-2004 */
color_matrix_build_XYZ (dst,
0.67, 0.33, 0.21, 0.71, 0.14, 0.08, 0.3127, 0.3290);
#endif
}
void
color_matrix_build_XYZ_to_rgb_709 (ColorMatrix * dst)
{
/* Rec. ITU-R BT.709-5 */
color_matrix_build_XYZ (dst,
0.640, 0.330, 0.300, 0.600, 0.150, 0.060, 0.3127, 0.3290);
}
void
color_matrix_build_XYZ_to_rgb_dell (ColorMatrix * dst)
{
/* Dell monitor */
#if 1
color_matrix_build_XYZ (dst,
0.662, 0.329, 0.205, 0.683, 0.146, 0.077, 0.3135, 0.3290);
#endif
#if 0
color_matrix_build_XYZ (dst,
0.630, 0.340, 0.310, 0.595, 0.155, 0.070, 0.3127, 0.3290);
#endif
color_matrix_invert (dst);
}
void
color_transfer_function_apply (Color * dest, Color * src)
{
int i;
for (i = 0; i < 3; i++) {
if (src->v[i] < 0.0812) {
dest->v[i] = src->v[i] / 4.500;
} else {
dest->v[i] = pow (src->v[i] + 0.099, 1 / 0.4500);
}
}
}
void
color_transfer_function_unapply (Color * dest, Color * src)
{
int i;
for (i = 0; i < 3; i++) {
if (src->v[i] < 0.0812 / 4.500) {
dest->v[i] = src->v[i] * 4.500;
} else {
dest->v[i] = pow (src->v[i], 0.4500) - 0.099;
}
}
}
void
color_gamut_clamp (Color * dest, Color * src)
{
dest->v[0] = CLAMP (src->v[0], 0.0, 1.0);
dest->v[1] = CLAMP (src->v[1], 0.0, 1.0);
dest->v[2] = CLAMP (src->v[2], 0.0, 1.0);
}
static uint8_t *
get_color_transform_table (void)
{
static uint8_t *color_transform_table = NULL;
#if 1
if (!color_transform_table) {
ColorMatrix bt601_to_rgb;
ColorMatrix bt601_to_yuv;
ColorMatrix bt601_rgb_to_XYZ;
ColorMatrix dell_XYZ_to_rgb;
uint8_t *table_y;
uint8_t *table_u;
uint8_t *table_v;
int y, u, v;
color_matrix_build_yuv_to_rgb_601 (&bt601_to_rgb);
color_matrix_build_rgb_to_yuv_601 (&bt601_to_yuv);
color_matrix_build_rgb_to_XYZ_601 (&bt601_rgb_to_XYZ);
color_matrix_build_XYZ_to_rgb_dell (&dell_XYZ_to_rgb);
color_transform_table = g_malloc (0x1000000 * 3);
table_y = COG_OFFSET (color_transform_table, 0 * 0x1000000);
table_u = COG_OFFSET (color_transform_table, 1 * 0x1000000);
table_v = COG_OFFSET (color_transform_table, 2 * 0x1000000);
for (y = 0; y < 256; y++) {
for (u = 0; u < 256; u++) {
for (v = 0; v < 256; v++) {
Color c;
c.v[0] = y;
c.v[1] = u;
c.v[2] = v;
color_matrix_apply (&bt601_to_rgb, &c, &c);
color_gamut_clamp (&c, &c);
color_transfer_function_apply (&c, &c);
color_matrix_apply (&bt601_rgb_to_XYZ, &c, &c);
color_matrix_apply (&dell_XYZ_to_rgb, &c, &c);
color_transfer_function_unapply (&c, &c);
color_gamut_clamp (&c, &c);
color_matrix_apply (&bt601_to_yuv, &c, &c);
table_y[(y << 16) | (u << 8) | (v)] = rint (c.v[0]);
table_u[(y << 16) | (u << 8) | (v)] = rint (c.v[1]);
table_v[(y << 16) | (u << 8) | (v)] = rint (c.v[2]);
}
}
}
}
#endif
#if 0
if (!color_transform_table) {
ColorMatrix bt709_to_bt601;
uint8_t *table_y;
uint8_t *table_u;
uint8_t *table_v;
int y, u, v;
color_matrix_build_bt709_to_bt601 (&bt709_to_bt601);
color_transform_table = g_malloc (0x1000000 * 3);
table_y = COG_OFFSET (color_transform_table, 0 * 0x1000000);
table_u = COG_OFFSET (color_transform_table, 1 * 0x1000000);
table_v = COG_OFFSET (color_transform_table, 2 * 0x1000000);
for (y = 0; y < 256; y++) {
for (u = 0; u < 256; u++) {
for (v = 0; v < 256; v++) {
Color c;
c.v[0] = y;
c.v[1] = u;
c.v[2] = v;
color_matrix_apply (&bt709_to_bt601, &c, &c);
table_y[(y << 16) | (u << 8) | (v)] = rint (c.v[0]);
table_u[(y << 16) | (u << 8) | (v)] = rint (c.v[1]);
table_v[(y << 16) | (u << 8) | (v)] = rint (c.v[2]);
}
}
}
}
#endif
return color_transform_table;
}

458
ext/cog/gstlogoinsert.c Normal file
View file

@ -0,0 +1,458 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@entropywave.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstbasetransform.h>
#include <gst/video/video.h>
#include <string.h>
#include <cog/cog.h>
#include <cog-video/cogvirtframe.h>
#include <math.h>
#include <png.h>
#include "gstcogutils.h"
#define GST_TYPE_LOGOINSERT \
(gst_logoinsert_get_type())
#define GST_LOGOINSERT(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_LOGOINSERT,GstLogoinsert))
#define GST_LOGOINSERT_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_LOGOINSERT,GstLogoinsertClass))
#define GST_IS_LOGOINSERT(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_LOGOINSERT))
#define GST_IS_LOGOINSERT_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_LOGOINSERT))
typedef struct _GstLogoinsert GstLogoinsert;
typedef struct _GstLogoinsertClass GstLogoinsertClass;
struct _GstLogoinsert
{
GstBaseTransform base_transform;
gchar *location;
GstVideoFormat format;
int width;
int height;
gchar *data;
gsize size;
CogFrame *overlay_frame;
CogFrame *ayuv_frame;
CogFrame *alpha_frame;
};
struct _GstLogoinsertClass
{
GstBaseTransformClass parent_class;
};
/* GstLogoinsert signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0,
ARG_LOCATION,
};
GType gst_logoinsert_get_type (void);
static void gst_logoinsert_base_init (gpointer g_class);
static void gst_logoinsert_class_init (gpointer g_class, gpointer class_data);
static void gst_logoinsert_init (GTypeInstance * instance, gpointer g_class);
static void gst_logoinsert_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_logoinsert_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_logoinsert_set_location (GstLogoinsert * li,
const gchar * location);
static gboolean gst_logoinsert_set_caps (GstBaseTransform * base_transform,
GstCaps * incaps, GstCaps * outcaps);
static GstFlowReturn gst_logoinsert_transform_ip (GstBaseTransform *
base_transform, GstBuffer * buf);
static CogFrame *cog_frame_new_from_png (void *data, int size);
static CogFrame *cog_virt_frame_extract_alpha (CogFrame * frame);
static CogFrame *cog_frame_realize (CogFrame * frame);
static GstStaticPadTemplate gst_logoinsert_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
);
static GstStaticPadTemplate gst_logoinsert_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{I420,YUY2,UYVY,AYUV}"))
);
GType
gst_logoinsert_get_type (void)
{
static GType compress_type = 0;
if (!compress_type) {
static const GTypeInfo compress_info = {
sizeof (GstLogoinsertClass),
gst_logoinsert_base_init,
NULL,
gst_logoinsert_class_init,
NULL,
NULL,
sizeof (GstLogoinsert),
0,
gst_logoinsert_init,
};
compress_type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
"GstLogoinsert", &compress_info, 0);
}
return compress_type;
}
static void
gst_logoinsert_base_init (gpointer g_class)
{
static GstElementDetails compress_details =
GST_ELEMENT_DETAILS ("Video Filter Template",
"Filter/Effect/Video",
"Template for a video filter",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
//GstBaseTransformClass *base_transform_class = GST_BASE_TRANSFORM_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_logoinsert_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_logoinsert_sink_template));
gst_element_class_set_details (element_class, &compress_details);
}
static void
gst_logoinsert_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstBaseTransformClass *base_transform_class;
GstLogoinsertClass *filter_class;
gobject_class = G_OBJECT_CLASS (g_class);
base_transform_class = GST_BASE_TRANSFORM_CLASS (g_class);
filter_class = GST_LOGOINSERT_CLASS (g_class);
gobject_class->set_property = gst_logoinsert_set_property;
gobject_class->get_property = gst_logoinsert_get_property;
g_object_class_install_property (gobject_class, ARG_LOCATION,
g_param_spec_string ("location", "location",
"location of PNG file to overlay", "", G_PARAM_READWRITE));
base_transform_class->set_caps = gst_logoinsert_set_caps;
base_transform_class->transform_ip = gst_logoinsert_transform_ip;
}
static void
gst_logoinsert_init (GTypeInstance * instance, gpointer g_class)
{
//GstLogoinsert *compress = GST_LOGOINSERT (instance);
//GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
GST_DEBUG ("gst_logoinsert_init");
}
static void
gst_logoinsert_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstLogoinsert *src;
g_return_if_fail (GST_IS_LOGOINSERT (object));
src = GST_LOGOINSERT (object);
GST_DEBUG ("gst_logoinsert_set_property");
switch (prop_id) {
case ARG_LOCATION:
gst_logoinsert_set_location (src, g_value_get_string (value));
break;
default:
break;
}
}
static void
gst_logoinsert_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstLogoinsert *src;
g_return_if_fail (GST_IS_LOGOINSERT (object));
src = GST_LOGOINSERT (object);
switch (prop_id) {
case ARG_LOCATION:
g_value_set_string (value, src->location);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gboolean
gst_logoinsert_set_caps (GstBaseTransform * base_transform,
GstCaps * incaps, GstCaps * outcaps)
{
GstLogoinsert *li;
g_return_val_if_fail (GST_IS_LOGOINSERT (base_transform), GST_FLOW_ERROR);
li = GST_LOGOINSERT (base_transform);
gst_video_format_parse_caps (incaps, &li->format, &li->width, &li->height);
return TRUE;
}
static GstFlowReturn
gst_logoinsert_transform_ip (GstBaseTransform * base_transform, GstBuffer * buf)
{
GstLogoinsert *li;
CogFrame *frame;
g_return_val_if_fail (GST_IS_LOGOINSERT (base_transform), GST_FLOW_ERROR);
li = GST_LOGOINSERT (base_transform);
frame = gst_cog_buffer_wrap (buf, li->format, li->width, li->height);
if (li->ayuv_frame == NULL)
return GST_FLOW_OK;
if (li->overlay_frame == NULL) {
CogFrame *f;
f = cog_virt_frame_extract_alpha (cog_frame_ref (li->ayuv_frame));
f = cog_virt_frame_new_subsample (f, frame->format);
li->alpha_frame = cog_frame_realize (f);
f = cog_virt_frame_new_unpack (cog_frame_ref (li->ayuv_frame));
f = cog_virt_frame_new_color_matrix (f);
f = cog_virt_frame_new_subsample (f, frame->format);
li->overlay_frame = cog_frame_realize (f);
}
if (1) {
int i, j;
int k;
guint8 *dest;
guint8 *src;
guint8 *alpha;
int offset_x, offset_y;
for (k = 0; k < 3; k++) {
offset_x = frame->components[k].width -
li->alpha_frame->components[k].width;
offset_y = frame->components[k].height -
li->alpha_frame->components[k].height;
for (j = 0; j < li->overlay_frame->components[k].height; j++) {
dest = COG_FRAME_DATA_GET_LINE (frame->components + k, j + offset_y);
src = COG_FRAME_DATA_GET_LINE (li->overlay_frame->components + k, j);
alpha = COG_FRAME_DATA_GET_LINE (li->alpha_frame->components + k, j);
#define oil_divide_255(x) ((((x)+128) + (((x)+128)>>8))>>8)
for (i = 0; i < li->overlay_frame->components[k].width; i++) {
dest[i + offset_x] =
oil_divide_255 (src[i] * alpha[i] + dest[i + offset_x] * (255 -
alpha[i]));
}
}
}
}
return GST_FLOW_OK;
}
static void
gst_logoinsert_set_location (GstLogoinsert * li, const gchar * location)
{
gboolean ret;
GError *error = NULL;
g_free (li->location);
g_free (li->data);
if (li->overlay_frame) {
cog_frame_unref (li->overlay_frame);
li->overlay_frame = NULL;
}
li->location = g_strdup (location);
ret = g_file_get_contents (li->location, &li->data, &li->size, &error);
if (!ret) {
li->data = NULL;
li->size = 0;
return;
}
li->ayuv_frame = cog_frame_new_from_png (li->data, li->size);
if (li->alpha_frame) {
cog_frame_unref (li->alpha_frame);
li->alpha_frame = NULL;
}
if (li->overlay_frame) {
cog_frame_unref (li->overlay_frame);
li->overlay_frame = NULL;
}
}
/* load PNG into CogFrame */
struct png_data_struct
{
unsigned char *data;
int size;
int offset;
};
static void
read_data (png_structp png_ptr, png_bytep data, png_size_t length)
{
struct png_data_struct *s = png_ptr->io_ptr;
memcpy (data, s->data + s->offset, length);
s->offset += length;
}
static CogFrame *
cog_frame_new_from_png (void *data, int size)
{
struct png_data_struct s = { 0 };
png_structp png_ptr;
png_infop info_ptr;
png_bytep *rows;
CogFrame *frame;
guchar *frame_data;
int rowbytes;
int j;
int width, height;
int color_type;
png_ptr = png_create_read_struct (PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);
info_ptr = png_create_info_struct (png_ptr);
s.data = data;
s.size = size;
png_set_read_fn (png_ptr, (void *) &s, read_data);
png_read_info (png_ptr, info_ptr);
width = png_get_image_width (png_ptr, info_ptr);
height = png_get_image_height (png_ptr, info_ptr);
color_type = png_get_color_type (png_ptr, info_ptr);
GST_DEBUG ("PNG size %dx%d color_type %d", width, height, color_type);
png_set_strip_16 (png_ptr);
png_set_packing (png_ptr);
if (color_type == PNG_COLOR_TYPE_RGB) {
png_set_filler (png_ptr, 0xff, PNG_FILLER_BEFORE);
}
if (color_type == PNG_COLOR_TYPE_RGB_ALPHA) {
png_set_swap_alpha (png_ptr);
}
frame_data = g_malloc (width * height * 4);
frame = cog_frame_new_from_data_AYUV (frame_data, width, height);
rowbytes = png_get_rowbytes (png_ptr, info_ptr);
rows = (png_bytep *) g_malloc (sizeof (png_bytep) * height);
for (j = 0; j < height; j++) {
rows[j] = COG_FRAME_DATA_GET_LINE (frame->components + 0, j);
}
png_read_image (png_ptr, rows);
g_free (rows);
// PNG_TRANSFORM_STRP_16 | PNG_TRANSFORM_PACKING,
png_destroy_read_struct (&png_ptr, &info_ptr, png_infopp_NULL);
return frame;
}
static void
extract_alpha (CogFrame * frame, void *_dest, int component, int j)
{
uint8_t *dest = _dest;
uint8_t *src;
int i;
src = COG_FRAME_DATA_GET_LINE (frame->virt_frame1->components + 0, j);
for (i = 0; i < frame->width; i++) {
dest[i] = src[i * 4 + 0];
}
}
static CogFrame *
cog_virt_frame_extract_alpha (CogFrame * frame)
{
CogFrame *virt_frame;
/* FIXME check that frame is a real AYUV frame */
virt_frame = cog_frame_new_virtual (NULL, COG_FRAME_FORMAT_U8_444,
frame->width, frame->height);
virt_frame->virt_frame1 = frame;
virt_frame->render_line = extract_alpha;
return virt_frame;
}
static CogFrame *
cog_frame_realize (CogFrame * frame)
{
CogFrame *dest;
dest = cog_frame_clone (NULL, frame);
cog_virt_frame_render (frame, dest);
cog_frame_unref (frame);
return dest;
}