bindings: We no longer use static bindings in 0.11

This commit is contained in:
Edward Hervey 2011-10-11 10:08:47 +02:00
parent c6d71ed335
commit 312942f836
42 changed files with 1 additions and 11085 deletions

View file

@ -1,6 +1,6 @@
DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc
SUBDIRS = ges tests tools common m4 pkgconfig docs bindings SUBDIRS = ges tests tools common m4 pkgconfig docs
DIST_SUBDIRS = $(SUBDIRS) DIST_SUBDIRS = $(SUBDIRS)

View file

@ -1,5 +0,0 @@
SUBDIRS =
if WITH_PYTHON
SUBDIRS += python
endif

View file

@ -1,42 +0,0 @@
SUBDIRS = codegen examples testsuite
pkgpyexecdir = $(pyexecdir)
# we install everything in pyexecdir; otherwise you end up with a mess for
# multilib
pygesdir = $(pkgpyexecdir)
pyges_PYTHON =
pygesexecdir = $(pkgpyexecdir)
pygesexec_LTLIBRARIES = ges.la
DEFS = $(srcdir)/ges-types.defs $(srcdir)/ges.defs
defs_DATA = $(DEFS)
defsdir = $(pkgdatadir)/$(GST_MAJORMINOR)/defs
OVERRIDES = ges.override
INCLUDES = -I$(top_srcdir) -I$(srcdir) $(PYTHON_INCLUDES)
ges_la_CFLAGS = -I$(top_srcdir)\
$(PYGOBJECT_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) -Wno-write-strings
ges_la_LDFLAGS = -export-symbols-regex "^(initges|_PyGObject_API).*" \
-module -avoid-version $(GST_PLUGIN_LDFLAGS)
ges_la_LIBADD = $(top_builddir)/ges/libges-@GST_MAJORMINOR@.la \
$(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) \
-lges-@GST_MAJORMINOR@ -lges-@GST_MAJORMINOR@ \
-lges-@GST_MAJORMINOR@ $(GST_LIBS) $(LIBM)
ges_la_SOURCES = gesmodule.c
nodist_ges_la_SOURCES = ges.c
EXTRA_DIST = $(defs_DATA) $(OVERRIDES) arg-types.py
CLEANFILES = ges.c
.defs.c:
($(PYTHON) $(srcdir)/codegen/codegen.py \
--load-types $(srcdir)/arg-types.py \
--register $(srcdir)/ges-types.defs \
--override $(srcdir)/$*.override \
--prefix pyges $<) > gen-$*.c \
&& cp gen-$*.c $*.c \
&& rm -f gen-$*.c

View file

@ -1,421 +0,0 @@
# -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# gst-python
# Copyright (C) 2002 David I. Lehn
# 2004 Johan Dahlin
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David I. Lehn <dlehn@users.sourceforge.net>
from argtypes import UInt64Arg, Int64Arg, PointerArg, ArgMatcher, ArgType, matcher
from reversewrapper import Parameter, ReturnType, GBoxedParam, GBoxedReturn, IntParam, IntReturn
class XmlNodeArg(ArgType):
"""libxml2 node generator"""
names = {"xobj":"xmlNode",
"xptr":"xmlNodePtr",
"xwrap":"libxml_xmlNodePtrWrap"}
parm = (' if(xml == NULL) return NULL;\n'
' xobj = PyObject_GetAttrString(xml, "%(xobj)s");\n'
' if(!PyObject_IsInstance(py%(name)s, xobj)) {\n'
' PyErr_Clear();\n'
' PyErr_SetString(PyExc_RuntimeError,"%(name)s is not a %(xobj)s instance");\n'
' Py_DECREF(xobj);Py_DECREF(xml);\n'
' return NULL;\n'
' }\n'
' o = PyObject_GetAttrString(py%(name)s, "_o");\n'
' %(name)s = PyCObject_AsVoidPtr(o);\n')
parmp = (' Py_DECREF(o); Py_DECREF(xobj);Py_DECREF(xml);\n')
ret = (' if(xml == NULL) return NULL;\n')
retp = (' xargs = PyTuple_New(1);\n'
' xobj = PyObject_GetAttrString(xml, "%(xobj)s");\n'
' o = %(xwrap)s(ret);\n'
' PyTuple_SetItem(xargs, 0, o);\n'
' return PyInstance_New(xobj, xargs, PyDict_New());\n')
def write_param(self, ptype, pname, pdflt, pnull, info):
info.varlist.add('PyObject', '*xml = _gst_get_libxml2_module()')
info.varlist.add('PyObject', '*o')
info.varlist.add('PyObject', '*xobj')
info.varlist.add('PyObject', '*py' + pname)
info.varlist.add(self.names["xptr"], pname)
#if pnull:
info.add_parselist('O', ['&py'+pname], [pname])
info.arglist.append(pname)
self.names["name"] = pname
info.codebefore.append(self.parm % self.names)
info.codeafter.append(self.parmp % self.names);
def write_return(self, ptype, ownsreturn, info):
info.varlist.add('PyObject', '*xml = _gst_get_libxml2_module()')
info.varlist.add('PyObject', '*xargs')
info.varlist.add('PyObject', '*xobj')
info.varlist.add('PyObject', '*o')
info.varlist.add(self.names["xptr"], 'ret')
info.codebefore.append(self.ret % self.names)
info.codeafter.append(self.retp % self.names)
class XmlDocArg(XmlNodeArg):
"""libxml2 doc generator"""
names = {"xobj":"xmlDoc",
"xptr":"xmlDocPtr",
"xwrap":"libxml_xmlDocPtrWrap"}
class GstCapsArg(ArgType):
"""GstCaps node generator"""
before = (' %(name)s = pygst_caps_from_pyobject (py_%(name)s, %(namecopy)s);\n'
' if (PyErr_Occurred())\n'
' return NULL;\n')
beforenull = (' if (py_%(name)s == Py_None || py_%(name)s == NULL)\n'
' %(name)s = NULL;\n'
' else\n'
' ' + before)
after = (' if (%(name)s && %(name)s_is_copy)\n'
' gst_caps_unref (%(name)s);\n')
def write_param(self, ptype, pname, pdflt, pnull, info):
if ptype == 'const-GstCaps*':
self.write_const_param(pname, pdflt, pnull, info)
elif ptype == 'GstCaps*':
self.write_normal_param(pname, pdflt, pnull, info)
else:
raise RuntimeError, "write_param not implemented for %s" % ptype
def write_const_param(self, pname, pdflt, pnull, info):
if pdflt:
assert pdflt == 'NULL'
info.varlist.add('PyObject', '*py_' + pname + ' = NULL')
else:
info.varlist.add('PyObject', '*py_' + pname)
info.varlist.add('GstCaps', '*'+pname)
info.varlist.add('gboolean', pname+'_is_copy')
info.add_parselist('O', ['&py_'+pname], [pname])
info.arglist.append(pname)
if pnull:
info.codebefore.append (self.beforenull % { 'name' : pname, 'namecopy' : '&'+pname+'_is_copy' })
else:
info.codebefore.append (self.before % { 'name' : pname, 'namecopy' : '&'+pname+'_is_copy' })
info.codeafter.append (self.after % { 'name' : pname, 'namecopy' : '&'+pname+'_is_copy' })
def write_normal_param(self, pname, pdflt, pnull, info):
if pdflt:
assert pdflt == 'NULL'
info.varlist.add('PyObject', '*py_' + pname + ' = NULL')
else:
info.varlist.add('PyObject', '*py_' + pname)
info.varlist.add('GstCaps', '*'+pname)
info.add_parselist('O', ['&py_'+pname], [pname])
info.arglist.append(pname)
if pnull:
info.codebefore.append (self.beforenull % { 'name' : pname, 'namecopy' : 'NULL' })
else:
info.codebefore.append (self.before % { 'name' : pname, 'namecopy' : 'NULL' })
def write_return(self, ptype, ownsreturn, info):
if ptype == 'GstCaps*':
info.varlist.add('GstCaps', '*ret')
copyval = 'FALSE'
elif ptype == 'const-GstCaps*':
info.varlist.add('const GstCaps', '*ret')
copyval = 'TRUE'
else:
raise RuntimeError, "write_return not implemented for %s" % ptype
info.codeafter.append(' return pyg_boxed_new (GST_TYPE_CAPS, (GstCaps*) ret, '+copyval+', TRUE);')
class GstIteratorArg(ArgType):
def write_return(self, ptype, ownsreturn, info):
info.varlist.add('GstIterator', '*ret')
info.codeafter.append(' return pygst_iterator_new(ret);')
class GstMiniObjectArg(ArgType):
before = (' %(name)s = %(macro)s(pygstminiobject_get (py_%(name)s));\n'
' if (PyErr_Occurred())\n'
' return NULL;\n')
def write_param(self, ptype, pname, pdflt, pnull, info):
if pdflt:
assert pdflt == 'NULL'
info.varlist.add('PyObject', '*py_' + pname + ' = NULL')
else:
info.varlist.add('PyObject', '*py_' + pname)
#Converts 'GstBuffer*' to 'GstBuffer'
#and const-GstBuffer* to 'const GstBuffer'
info.varlist.add(ptype.replace('-',' ').replace('*',''), '*'+pname)
if ptype in ['GstBuffer*', 'const-GstBuffer*']:
info.codebefore.append(self.before % { 'name' : pname, 'macro' : 'GST_BUFFER' })
elif ptype in ['GstEncodingProfile*', 'const-GstEncodingProfile*']:
info.codebefore.append(self.before % { 'name' : pname, 'macro' : 'GST_ENCODING_PROFILE' })
elif ptype in ['GstMessage*', 'const-GstMessage*']:
info.codebefore.append(self.before % { 'name' : pname, 'macro' : 'GST_MESSAGE' })
elif ptype in ['GstEvent*', 'const-GstEvent*']:
info.codebefore.append(self.before % { 'name' : pname, 'macro' : 'GST_EVENT' })
elif ptype in ['GstQuery*', 'const-GstQuery*']:
info.codebefore.append(self.before % { 'name' : pname, 'macro' : 'GST_QUERY' })
else:
raise RuntimeError, "write_param not implemented for %s" % ptype
info.add_parselist('O', ['&py_'+pname], [pname])
info.arglist.append(pname)
def write_return(self, ptype, ownsreturn, info):
info.varlist.add(ptype, 'ret')
info.codeafter.append(' return pygstminiobject_new((GstMiniObject *) ret);')
class GstMiniObjectParam(Parameter):
def get_c_type(self):
return self.props.get('c_type', 'GstMiniObject *')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s = NULL;" % self.name)
self.wrapper.write_code(code=("if (%s) {\n"
" py_%s = pygstminiobject_new((GstMiniObject *) %s);\n"
" gst_mini_object_unref ((GstMiniObject *) %s);\n"
"} else {\n"
" Py_INCREF(Py_None);\n"
" py_%s = Py_None;\n"
"}"
% (self.name, self.name, self.name, self.name, self.name)),
cleanup=("gst_mini_object_ref ((GstMiniObject *) %s); Py_DECREF(py_%s);" % (self.name, self.name)))
self.wrapper.add_pyargv_item("py_%s" % self.name)
matcher.register_reverse('GstMiniObject*', GstMiniObjectParam)
class GstMiniObjectReturn(ReturnType):
def get_c_type(self):
return self.props.get('c_type', 'GstMiniObject *')
def write_decl(self):
self.wrapper.add_declaration("%s retval;" % self.get_c_type())
def write_error_return(self):
self.wrapper.write_code("return NULL;")
def write_conversion(self):
self.wrapper.write_code("retval = (%s) pygstminiobject_get(py_retval);"
% self.get_c_type())
self.wrapper.write_code("gst_mini_object_ref((GstMiniObject *) retval);")
matcher.register_reverse_ret('GstMiniObject*', GstMiniObjectReturn)
class GstCapsParam(Parameter):
def get_c_type(self):
return self.props.get('c_type', 'GstCaps *')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s = NULL;" % self.name)
self.wrapper.write_code(code=("if (%s)\n"
" py_%s = pyg_boxed_new (GST_TYPE_CAPS, %s, FALSE, TRUE);\n"
"else {\n"
" Py_INCREF(Py_None);\n"
" py_%s = Py_None;\n"
"}"
% (self.name, self.name, self.name, self.name)),
cleanup=("gst_caps_ref(%s);\nPy_DECREF(py_%s);" % (self.name, self.name)))
self.wrapper.add_pyargv_item("py_%s" % self.name)
matcher.register_reverse('GstCaps*', GstCapsParam)
class GstCapsReturn(ReturnType):
def get_c_type(self):
return self.props.get('c_type', 'GstCaps *')
def write_decl(self):
self.wrapper.add_declaration("%s retval;" % self.get_c_type())
def write_error_return(self):
self.wrapper.write_code("return NULL;")
def write_conversion(self):
self.wrapper.write_code("retval = (%s) pygst_caps_from_pyobject (py_retval, NULL);"
% self.get_c_type())
## self.wrapper.write_code("gst_mini_object_ref((GstMiniObject *) retval);")
matcher.register_reverse_ret('GstCaps*', GstCapsReturn)
class Int64Param(Parameter):
def get_c_type(self):
return self.props.get('c_type', 'gint64')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=("py_%s = PyLong_FromLongLong(%s);" %
(self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
class Int64Return(ReturnType):
def get_c_type(self):
return self.props.get('c_type', 'gint64')
def write_decl(self):
self.wrapper.add_declaration("%s retval;" % self.get_c_type())
def write_error_return(self):
self.wrapper.write_code("return -G_MAXINT;")
def write_conversion(self):
self.wrapper.write_code(
code=None,
failure_expression="!PyLong_Check(py_retval)",
failure_cleanup='PyErr_SetString(PyExc_TypeError, "retval should be an long");')
self.wrapper.write_code("retval = PyLong_AsLongLong(py_retval);")
class UInt64Param(Parameter):
def get_c_type(self):
return self.props.get('c_type', 'guint64')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=("py_%s = PyLong_FromUnsignedLongLong(%s);" %
(self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
class UInt64Return(ReturnType):
def get_c_type(self):
return self.props.get('c_type', 'guint64')
def write_decl(self):
self.wrapper.add_declaration("%s retval;" % self.get_c_type())
def write_error_return(self):
self.wrapper.write_code("return -G_MAXINT;")
def write_conversion(self):
self.wrapper.write_code(
code=None,
failure_expression="!PyLong_Check(py_retval)",
failure_cleanup='PyErr_SetString(PyExc_TypeError, "retval should be an long");')
self.wrapper.write_code("retval = PyLong_AsUnsignedLongLongMask(py_retval);")
class ULongParam(Parameter):
def get_c_type(self):
return self.props.get('c_type', 'gulong')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=("py_%s = PyLong_FromUnsignedLong(%s);" %
(self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
class ULongReturn(ReturnType):
def get_c_type(self):
return self.props.get('c_type', 'gulong')
def write_decl(self):
self.wrapper.add_declaration("%s retval;" % self.get_c_type())
def write_error_return(self):
self.wrapper.write_code("return -G_MAXINT;")
def write_conversion(self):
self.wrapper.write_code(
code=None,
failure_expression="!PyLong_Check(py_retval)",
failure_cleanup='PyErr_SetString(PyExc_TypeError, "retval should be an long");')
self.wrapper.write_code("retval = PyLong_AsUnsignedLongMask(py_retval);")
class ConstStringReturn(ReturnType):
def get_c_type(self):
return "const gchar *"
def write_decl(self):
self.wrapper.add_declaration("const gchar *retval;")
def write_error_return(self):
self.wrapper.write_code("return NULL;")
def write_conversion(self):
self.wrapper.write_code(
code=None,
failure_expression="!PyString_Check(py_retval)",
failure_cleanup='PyErr_SetString(PyExc_TypeError, "retval should be a string");')
self.wrapper.write_code("retval = g_strdup(PyString_AsString(py_retval));")
class StringArrayArg(ArgType):
"""Arg type for NULL-terminated string pointer arrays (GStrv, aka gchar**)."""
def write_return(self, ptype, ownsreturn, info):
if ownsreturn:
raise NotImplementedError ()
else:
info.varlist.add("gchar", "**ret")
info.codeafter.append(" if (ret) {\n"
" guint size = g_strv_length(ret);\n"
" PyObject *py_ret = PyTuple_New(size);\n"
" gint i;\n"
" for (i = 0; i < size; i++)\n"
" PyTuple_SetItem(py_ret, i,\n"
" PyString_FromString(ret[i]));\n"
" return py_ret;\n"
" }\n"
" return PyTuple_New (0);\n")
matcher.register('GstClockTime', UInt64Arg())
matcher.register('GstElementFactoryListType', UInt64Arg())
matcher.register('GstClockTimeDiff', Int64Arg())
matcher.register('xmlNodePtr', XmlNodeArg())
matcher.register('xmlDocPtr', XmlDocArg())
matcher.register('GstCaps', GstCapsArg()) #FIXME: does this work?
matcher.register('GstCaps*', GstCapsArg()) #FIXME: does this work?
matcher.register('const-GstCaps*', GstCapsArg())
matcher.register('GstIterator*', GstIteratorArg())
arg = PointerArg('gpointer', 'G_TYPE_POINTER')
matcher.register('GstClockID', arg)
for typename in ["GstPlugin", "GstStructure", "GstTagList", "GError", "GstDate", "GstSegment"]:
matcher.register_reverse(typename, GBoxedParam)
matcher.register_reverse_ret(typename, GBoxedReturn)
for typename in ["GstEncodingProfile*", "cons-GstEncodingProfile", "GstBuffer*", "const-GstBuffer*", "GstEvent*", "const-GstEvent*", "GstMessage*", "const-GstMessage*", "GstQuery*", "const-GstQuery*"]:
matcher.register(typename, GstMiniObjectArg())
matcher.register_reverse(typename, GstMiniObjectParam)
matcher.register_reverse_ret(typename, GstMiniObjectReturn)
for typename in ["gint64", "GstClockTimeDiff"]:
matcher.register_reverse(typename, Int64Param)
matcher.register_reverse_ret(typename, Int64Return)
for typename in ["guint64", "GstClockTime", "GstElementFactoryListType"]:
matcher.register_reverse(typename, UInt64Param)
matcher.register_reverse_ret(typename, UInt64Return)
matcher.register_reverse_ret("const-gchar*", ConstStringReturn)
matcher.register_reverse("GType", IntParam)
matcher.register_reverse_ret("GType", IntReturn)
matcher.register_reverse("gulong", ULongParam)
matcher.register_reverse_ret("gulong", ULongReturn)
matcher.register("GStrv", StringArrayArg())
del arg

View file

@ -1,15 +0,0 @@
EXTRA_DIST = \
argtypes.py \
code-coverage.py \
codegen.py \
definitions.py \
defsparser.py \
docextract.py \
docgen.py \
h2def.py \
__init__.py \
mergedefs.py \
mkskel.py \
override.py \
reversewrapper.py \
scmexpr.py

View file

@ -1,16 +0,0 @@
# -*- Mode: Python; py-indent-offset: 4 -*-
__all__ = [
'argtypes',
'codegen',
'definitions',
'defsparser',
'docextract',
'docgen',
'h2def',
'defsgen'
'mergedefs',
'mkskel',
'override',
'scmexpr'
]

File diff suppressed because it is too large Load diff

View file

@ -1,44 +0,0 @@
#! /usr/bin/env python
from __future__ import generators
import sys, os
def read_symbols(file, type=None, dynamic=0):
if dynamic:
cmd = 'nm -D %s' % file
else:
cmd = 'nm %s' % file
for line in os.popen(cmd, 'r'):
if line[0] != ' ': # has an address as first bit of line
while line[0] != ' ':
line = line[1:]
while line[0] == ' ':
line = line[1:]
# we should be up to "type symbolname" now
sym_type = line[0]
symbol = line[1:].strip()
if not type or type == sym_type:
yield symbol
def main():
if len(sys.argv) != 3:
sys.stderr.write('usage: coverage-check library.so wrapper.so\n')
sys.exit(1)
library = sys.argv[1]
wrapper = sys.argv[2]
# first create a dict with all referenced symbols in the wrapper
# should really be a set, but a dict will do ...
wrapper_symbols = {}
for symbol in read_symbols(wrapper, type='U', dynamic=1):
wrapper_symbols[symbol] = 1
# now go through the library looking for matches on the defined symbols:
for symbol in read_symbols(library, type='T', dynamic=1):
if symbol[0] == '_': continue
if symbol not in wrapper_symbols:
print symbol
if __name__ == '__main__':
main()

File diff suppressed because it is too large Load diff

View file

@ -1,575 +0,0 @@
# -*- Mode: Python; py-indent-offset: 4 -*-
import copy
import sys
def get_valid_scheme_definitions(defs):
return [x for x in defs if isinstance(x, tuple) and len(x) >= 2]
def unescape(s):
s = s.replace('\r\n', '\\r\\n').replace('\t', '\\t')
return s.replace('\r', '\\r').replace('\n', '\\n')
def make_docstring(lines):
return "(char *) " + '\n'.join(['"%s"' % unescape(s) for s in lines])
# New Parameter class, wich emulates a tuple for compatibility reasons
class Parameter(object):
def __init__(self, ptype, pname, pdflt, pnull, pdir=None):
self.ptype = ptype
self.pname = pname
self.pdflt = pdflt
self.pnull = pnull
self.pdir = pdir
def __len__(self): return 4
def __getitem__(self, i):
return (self.ptype, self.pname, self.pdflt, self.pnull)[i]
def merge(self, old):
if old.pdflt is not None:
self.pdflt = old.pdflt
if old.pnull is not None:
self.pnull = old.pnull
# We currently subclass 'str' to make impact on the rest of codegen as
# little as possible. Later we can subclass 'object' instead, but
# then we must find and adapt all places which expect return types to
# be strings.
class ReturnType(str):
def __new__(cls, *args, **kwds):
return str.__new__(cls, *args[:1])
def __init__(self, type_name, optional=False):
str.__init__(self)
self.optional = optional
# Parameter for property based constructors
class Property(object):
def __init__(self, pname, optional, argname):
self.pname = pname
self.optional = optional
self.argname = argname
def __len__(self): return 4
def __getitem__(self, i):
return ('', self.pname, self.optional, self.argname)[i]
def merge(self, old):
if old.optional is not None:
self.optional = old.optional
if old.argname is not None:
self.argname = old.argname
class Definition(object):
docstring = "NULL"
def py_name(self):
return '%s.%s' % (self.module, self.name)
py_name = property(py_name)
def __init__(self, *args):
"""Create a new defs object of this type. The arguments are the
components of the definition"""
raise RuntimeError("this is an abstract class")
def merge(self, old):
"""Merge in customisations from older version of definition"""
raise RuntimeError("this is an abstract class")
def write_defs(self, fp=sys.stdout):
"""write out this definition in defs file format"""
raise RuntimeError("this is an abstract class")
def guess_return_value_ownership(self):
"return 1 if caller owns return value"
if getattr(self, 'is_constructor_of', False):
self.caller_owns_return = True
elif self.ret in ('char*', 'gchar*', 'string'):
self.caller_owns_return = True
else:
self.caller_owns_return = False
class ObjectDef(Definition):
def __init__(self, name, *args):
self.name = name
self.module = None
self.parent = None
self.c_name = None
self.typecode = None
self.fields = []
self.implements = []
self.class_init_func = None
self.has_new_constructor_api = False
for arg in get_valid_scheme_definitions(args):
if arg[0] == 'in-module':
self.module = arg[1]
elif arg[0] == 'docstring':
self.docstring = make_docstring(arg[1:])
elif arg[0] == 'parent':
self.parent = arg[1]
elif arg[0] == 'c-name':
self.c_name = arg[1]
elif arg[0] == 'gtype-id':
self.typecode = arg[1]
elif arg[0] == 'fields':
for parg in arg[1:]:
self.fields.append((parg[0], parg[1]))
elif arg[0] == 'implements':
self.implements.append(arg[1])
def merge(self, old):
# currently the .h parser doesn't try to work out what fields of
# an object structure should be public, so we just copy the list
# from the old version ...
self.fields = old.fields
self.implements = old.implements
def write_defs(self, fp=sys.stdout):
fp.write('(define-object ' + self.name + '\n')
if self.module:
fp.write(' (in-module "' + self.module + '")\n')
if self.parent != (None, None):
fp.write(' (parent "' + self.parent + '")\n')
for interface in self.implements:
fp.write(' (implements "' + interface + '")\n')
if self.c_name:
fp.write(' (c-name "' + self.c_name + '")\n')
if self.typecode:
fp.write(' (gtype-id "' + self.typecode + '")\n')
if self.fields:
fp.write(' (fields\n')
for (ftype, fname) in self.fields:
fp.write(' \'("' + ftype + '" "' + fname + '")\n')
fp.write(' )\n')
fp.write(')\n\n')
class InterfaceDef(Definition):
def __init__(self, name, *args):
self.name = name
self.module = None
self.c_name = None
self.typecode = None
self.vtable = None
self.fields = []
self.interface_info = None
for arg in get_valid_scheme_definitions(args):
if arg[0] == 'in-module':
self.module = arg[1]
elif arg[0] == 'docstring':
self.docstring = make_docstring(arg[1:])
elif arg[0] == 'c-name':
self.c_name = arg[1]
elif arg[0] == 'gtype-id':
self.typecode = arg[1]
elif arg[0] == 'vtable':
self.vtable = arg[1]
if self.vtable is None:
self.vtable = self.c_name + "Iface"
def write_defs(self, fp=sys.stdout):
fp.write('(define-interface ' + self.name + '\n')
if self.module:
fp.write(' (in-module "' + self.module + '")\n')
if self.c_name:
fp.write(' (c-name "' + self.c_name + '")\n')
if self.typecode:
fp.write(' (gtype-id "' + self.typecode + '")\n')
fp.write(')\n\n')
class EnumDef(Definition):
def __init__(self, name, *args):
self.deftype = 'enum'
self.name = name
self.in_module = None
self.c_name = None
self.typecode = None
self.values = []
for arg in get_valid_scheme_definitions(args):
if arg[0] == 'in-module':
self.in_module = arg[1]
elif arg[0] == 'c-name':
self.c_name = arg[1]
elif arg[0] == 'gtype-id':
self.typecode = arg[1]
elif arg[0] == 'values':
for varg in arg[1:]:
self.values.append((varg[0], varg[1]))
def merge(self, old):
pass
def write_defs(self, fp=sys.stdout):
fp.write('(define-' + self.deftype + ' ' + self.name + '\n')
if self.in_module:
fp.write(' (in-module "' + self.in_module + '")\n')
fp.write(' (c-name "' + self.c_name + '")\n')
fp.write(' (gtype-id "' + self.typecode + '")\n')
if self.values:
fp.write(' (values\n')
for name, val in self.values:
fp.write(' \'("' + name + '" "' + val + '")\n')
fp.write(' )\n')
fp.write(')\n\n')
class FlagsDef(EnumDef):
def __init__(self, *args):
apply(EnumDef.__init__, (self,) + args)
self.deftype = 'flags'
class BoxedDef(Definition):
def __init__(self, name, *args):
self.name = name
self.module = None
self.c_name = None
self.typecode = None
self.copy = None
self.release = None
self.fields = []
for arg in get_valid_scheme_definitions(args):
if arg[0] == 'in-module':
self.module = arg[1]
elif arg[0] == 'c-name':
self.c_name = arg[1]
elif arg[0] == 'gtype-id':
self.typecode = arg[1]
elif arg[0] == 'copy-func':
self.copy = arg[1]
elif arg[0] == 'release-func':
self.release = arg[1]
elif arg[0] == 'fields':
for parg in arg[1:]:
self.fields.append((parg[0], parg[1]))
def merge(self, old):
# currently the .h parser doesn't try to work out what fields of
# an object structure should be public, so we just copy the list
# from the old version ...
self.fields = old.fields
def write_defs(self, fp=sys.stdout):
fp.write('(define-boxed ' + self.name + '\n')
if self.module:
fp.write(' (in-module "' + self.module + '")\n')
if self.c_name:
fp.write(' (c-name "' + self.c_name + '")\n')
if self.typecode:
fp.write(' (gtype-id "' + self.typecode + '")\n')
if self.copy:
fp.write(' (copy-func "' + self.copy + '")\n')
if self.release:
fp.write(' (release-func "' + self.release + '")\n')
if self.fields:
fp.write(' (fields\n')
for (ftype, fname) in self.fields:
fp.write(' \'("' + ftype + '" "' + fname + '")\n')
fp.write(' )\n')
fp.write(')\n\n')
class PointerDef(Definition):
def __init__(self, name, *args):
self.name = name
self.module = None
self.c_name = None
self.typecode = None
self.fields = []
for arg in get_valid_scheme_definitions(args):
if arg[0] == 'in-module':
self.module = arg[1]
elif arg[0] == 'c-name':
self.c_name = arg[1]
elif arg[0] == 'gtype-id':
self.typecode = arg[1]
elif arg[0] == 'fields':
for parg in arg[1:]:
self.fields.append((parg[0], parg[1]))
def merge(self, old):
# currently the .h parser doesn't try to work out what fields of
# an object structure should be public, so we just copy the list
# from the old version ...
self.fields = old.fields
def write_defs(self, fp=sys.stdout):
fp.write('(define-pointer ' + self.name + '\n')
if self.module:
fp.write(' (in-module "' + self.module + '")\n')
if self.c_name:
fp.write(' (c-name "' + self.c_name + '")\n')
if self.typecode:
fp.write(' (gtype-id "' + self.typecode + '")\n')
if self.fields:
fp.write(' (fields\n')
for (ftype, fname) in self.fields:
fp.write(' \'("' + ftype + '" "' + fname + '")\n')
fp.write(' )\n')
fp.write(')\n\n')
class MethodDefBase(Definition):
def __init__(self, name, *args):
dump = 0
self.name = name
self.ret = None
self.caller_owns_return = None
self.unblock_threads = None
self.c_name = None
self.typecode = None
self.of_object = None
self.params = [] # of form (type, name, default, nullok)
self.varargs = 0
self.deprecated = None
for arg in get_valid_scheme_definitions(args):
if arg[0] == 'of-object':
self.of_object = arg[1]
elif arg[0] == 'docstring':
self.docstring = make_docstring(arg[1:])
elif arg[0] == 'c-name':
self.c_name = arg[1]
elif arg[0] == 'gtype-id':
self.typecode = arg[1]
elif arg[0] == 'return-type':
type_name = arg[1]
optional = False
for prop in arg[2:]:
if prop[0] == 'optional':
optional = True
self.ret = ReturnType(type_name, optional)
elif arg[0] == 'caller-owns-return':
self.caller_owns_return = arg[1] in ('t', '#t')
elif arg[0] == 'unblock-threads':
self.unblock_threads = arg[1] in ('t', '#t')
elif arg[0] == 'parameters':
for parg in arg[1:]:
ptype = parg[0]
pname = parg[1]
pdflt = None
pnull = 0
pdir = None
for farg in parg[2:]:
assert isinstance(farg, tuple)
if farg[0] == 'default':
pdflt = farg[1]
elif farg[0] == 'null-ok':
pnull = 1
elif farg[0] == 'direction':
pdir = farg[1]
self.params.append(Parameter(ptype, pname, pdflt, pnull, pdir))
elif arg[0] == 'varargs':
self.varargs = arg[1] in ('t', '#t')
elif arg[0] == 'deprecated':
self.deprecated = arg[1]
else:
sys.stderr.write("Warning: %s argument unsupported.\n"
% (arg[0]))
dump = 1
if dump:
self.write_defs(sys.stderr)
if self.caller_owns_return is None and self.ret is not None:
self.guess_return_value_ownership()
def merge(self, old, parmerge):
self.caller_owns_return = old.caller_owns_return
self.varargs = old.varargs
# here we merge extra parameter flags accross to the new object.
if not parmerge:
self.params = copy.deepcopy(old.params)
return
for i in range(len(self.params)):
ptype, pname, pdflt, pnull = self.params[i]
for p2 in old.params:
if p2[1] == pname:
self.params[i] = (ptype, pname, p2[2], p2[3])
break
def _write_defs(self, fp=sys.stdout):
if self.of_object != (None, None):
fp.write(' (of-object "' + self.of_object + '")\n')
if self.c_name:
fp.write(' (c-name "' + self.c_name + '")\n')
if self.typecode:
fp.write(' (gtype-id "' + self.typecode + '")\n')
if self.caller_owns_return:
fp.write(' (caller-owns-return #t)\n')
if self.unblock_threads:
fp.write(' (unblock_threads #t)\n')
if self.ret:
fp.write(' (return-type "' + self.ret + '")\n')
if self.deprecated:
fp.write(' (deprecated "' + self.deprecated + '")\n')
if self.params:
fp.write(' (parameters\n')
for ptype, pname, pdflt, pnull in self.params:
fp.write(' \'("' + ptype + '" "' + pname +'"')
if pdflt: fp.write(' (default "' + pdflt + '")')
if pnull: fp.write(' (null-ok)')
fp.write(')\n')
fp.write(' )\n')
if self.varargs:
fp.write(' (varargs #t)\n')
fp.write(')\n\n')
class MethodDef(MethodDefBase):
def __init__(self, name, *args):
MethodDefBase.__init__(self, name, *args)
for item in ('c_name', 'of_object'):
if self.__dict__[item] == None:
self.write_defs(sys.stderr)
raise RuntimeError("definition missing required %s" % (item,))
def write_defs(self, fp=sys.stdout):
fp.write('(define-method ' + self.name + '\n')
self._write_defs(fp)
class VirtualDef(MethodDefBase):
def write_defs(self, fp=sys.stdout):
fp.write('(define-virtual ' + self.name + '\n')
self._write_defs(fp)
class FunctionDef(Definition):
def __init__(self, name, *args):
dump = 0
self.name = name
self.in_module = None
self.is_constructor_of = None
self.ret = None
self.caller_owns_return = None
self.unblock_threads = None
self.c_name = None
self.typecode = None
self.params = [] # of form (type, name, default, nullok)
self.varargs = 0
self.deprecated = None
for arg in get_valid_scheme_definitions(args):
if arg[0] == 'in-module':
self.in_module = arg[1]
elif arg[0] == 'docstring':
self.docstring = make_docstring(arg[1:])
elif arg[0] == 'is-constructor-of':
self.is_constructor_of = arg[1]
elif arg[0] == 'c-name':
self.c_name = arg[1]
elif arg[0] == 'gtype-id':
self.typecode = arg[1]
elif arg[0] == 'return-type':
self.ret = arg[1]
elif arg[0] == 'caller-owns-return':
self.caller_owns_return = arg[1] in ('t', '#t')
elif arg[0] == 'unblock-threads':
self.unblock_threads = arg[1] in ('t', '#t')
elif arg[0] == 'parameters':
for parg in arg[1:]:
ptype = parg[0]
pname = parg[1]
pdflt = None
pnull = 0
for farg in parg[2:]:
if farg[0] == 'default':
pdflt = farg[1]
elif farg[0] == 'null-ok':
pnull = 1
self.params.append(Parameter(ptype, pname, pdflt, pnull))
elif arg[0] == 'properties':
if self.is_constructor_of is None:
print >> sys.stderr, "Warning: (properties ...) "\
"is only valid for constructors"
for prop in arg[1:]:
pname = prop[0]
optional = False
argname = pname
for farg in prop[1:]:
if farg[0] == 'optional':
optional = True
elif farg[0] == 'argname':
argname = farg[1]
self.params.append(Property(pname, optional, argname))
elif arg[0] == 'varargs':
self.varargs = arg[1] in ('t', '#t')
elif arg[0] == 'deprecated':
self.deprecated = arg[1]
else:
sys.stderr.write("Warning: %s argument unsupported\n"
% (arg[0],))
dump = 1
if dump:
self.write_defs(sys.stderr)
if self.caller_owns_return is None and self.ret is not None:
self.guess_return_value_ownership()
for item in ('c_name',):
if self.__dict__[item] == None:
self.write_defs(sys.stderr)
raise RuntimeError("definition missing required %s" % (item,))
_method_write_defs = MethodDef.__dict__['write_defs']
def merge(self, old, parmerge):
self.caller_owns_return = old.caller_owns_return
self.varargs = old.varargs
if not parmerge:
self.params = copy.deepcopy(old.params)
return
# here we merge extra parameter flags accross to the new object.
def merge_param(param):
for old_param in old.params:
if old_param.pname == param.pname:
if isinstance(old_param, Property):
# h2def never scans Property's, therefore if
# we have one it was manually written, so we
# keep it.
return copy.deepcopy(old_param)
else:
param.merge(old_param)
return param
raise RuntimeError("could not find %s in old_parameters %r" % (
param.pname, [p.pname for p in old.params]))
try:
self.params = map(merge_param, self.params)
except RuntimeError:
# parameter names changed and we can't find a match; it's
# safer to keep the old parameter list untouched.
self.params = copy.deepcopy(old.params)
if not self.is_constructor_of:
try:
self.is_constructor_of = old.is_constructor_of
except AttributeError:
pass
if isinstance(old, MethodDef):
self.name = old.name
# transmogrify from function into method ...
self.write_defs = self._method_write_defs
self.of_object = old.of_object
del self.params[0]
def write_defs(self, fp=sys.stdout):
fp.write('(define-function ' + self.name + '\n')
if self.in_module:
fp.write(' (in-module "' + self.in_module + '")\n')
if self.is_constructor_of:
fp.write(' (is-constructor-of "' + self.is_constructor_of +'")\n')
if self.c_name:
fp.write(' (c-name "' + self.c_name + '")\n')
if self.typecode:
fp.write(' (gtype-id "' + self.typecode + '")\n')
if self.caller_owns_return:
fp.write(' (caller-owns-return #t)\n')
if self.unblock_threads:
fp.write(' (unblock-threads #t)\n')
if self.ret:
fp.write(' (return-type "' + self.ret + '")\n')
if self.deprecated:
fp.write(' (deprecated "' + self.deprecated + '")\n')
if self.params:
if isinstance(self.params[0], Parameter):
fp.write(' (parameters\n')
for ptype, pname, pdflt, pnull in self.params:
fp.write(' \'("' + ptype + '" "' + pname +'"')
if pdflt: fp.write(' (default "' + pdflt + '")')
if pnull: fp.write(' (null-ok)')
fp.write(')\n')
fp.write(' )\n')
elif isinstance(self.params[0], Property):
fp.write(' (properties\n')
for prop in self.params:
fp.write(' \'("' + prop.pname +'"')
if prop.optional: fp.write(' (optional)')
fp.write(')\n')
fp.write(' )\n')
else:
assert False, "strange parameter list %r" % self.params[0]
if self.varargs:
fp.write(' (varargs #t)\n')
fp.write(')\n\n')

View file

@ -1,153 +0,0 @@
# -*- Mode: Python; py-indent-offset: 4 -*-
import os, sys
import scmexpr
from definitions import BoxedDef, EnumDef, FlagsDef, FunctionDef, \
InterfaceDef, MethodDef, ObjectDef, PointerDef, VirtualDef
include_path = ['.']
class IncludeParser(scmexpr.Parser):
"""A simple parser that follows include statements automatically"""
def include(self, input_filename):
global include_path
if os.path.isabs(input_filename):
filename = input_filename
# set self.filename to the include name, to handle recursive includes
oldfile = self.filename
self.filename = filename
self.startParsing()
self.filename = oldfile
else:
inc_path = [os.path.dirname(self.filename)] + include_path
for filename in [os.path.join(path_entry, input_filename)
for path_entry in inc_path]:
if not os.path.exists(filename):
continue
# set self.filename to the include name, to handle recursive includes
oldfile = self.filename
self.filename = filename
self.startParsing()
self.filename = oldfile
break
else:
raise IOError("%s not found in include path %s" % (input_filename, inc_path))
class DefsParser(IncludeParser):
def __init__(self, arg, defines={}):
IncludeParser.__init__(self, arg)
self.objects = []
self.interfaces = []
self.enums = [] # enums and flags
self.boxes = [] # boxed types
self.pointers = [] # pointer types
self.functions = [] # functions and methods
self.virtuals = [] # virtual methods
self.c_name = {} # hash of c names of functions
self.methods = {} # hash of methods of particular objects
self.defines = defines # -Dfoo=bar options, as dictionary
def define_object(self, *args):
odef = apply(ObjectDef, args)
self.objects.append(odef)
self.c_name[odef.c_name] = odef
def define_interface(self, *args):
idef = apply(InterfaceDef, args)
self.interfaces.append(idef)
self.c_name[idef.c_name] = idef
def define_enum(self, *args):
edef = apply(EnumDef, args)
self.enums.append(edef)
self.c_name[edef.c_name] = edef
def define_flags(self, *args):
fdef = apply(FlagsDef, args)
self.enums.append(fdef)
self.c_name[fdef.c_name] = fdef
def define_boxed(self, *args):
bdef = apply(BoxedDef, args)
self.boxes.append(bdef)
self.c_name[bdef.c_name] = bdef
def define_pointer(self, *args):
pdef = apply(PointerDef, args)
self.pointers.append(pdef)
self.c_name[pdef.c_name] = pdef
def define_function(self, *args):
fdef = apply(FunctionDef, args)
self.functions.append(fdef)
self.c_name[fdef.c_name] = fdef
def define_method(self, *args):
mdef = apply(MethodDef, args)
self.functions.append(mdef)
self.c_name[mdef.c_name] = mdef
def define_virtual(self, *args):
vdef = apply(VirtualDef, args)
self.virtuals.append(vdef)
def merge(self, old, parmerge):
for obj in self.objects:
if old.c_name.has_key(obj.c_name):
obj.merge(old.c_name[obj.c_name])
for f in self.functions:
if old.c_name.has_key(f.c_name):
f.merge(old.c_name[f.c_name], parmerge)
def printMissing(self, old):
for obj in self.objects:
if not old.c_name.has_key(obj.c_name):
obj.write_defs()
for f in self.functions:
if not old.c_name.has_key(f.c_name):
f.write_defs()
def write_defs(self, fp=sys.stdout):
for obj in self.objects:
obj.write_defs(fp)
for enum in self.enums:
enum.write_defs(fp)
for boxed in self.boxes:
boxed.write_defs(fp)
for pointer in self.pointers:
pointer.write_defs(fp)
for func in self.functions:
func.write_defs(fp)
def find_object(self, c_name):
for obj in self.objects:
if obj.c_name == c_name:
return obj
else:
raise ValueError('object %r not found' % c_name)
def find_constructor(self, obj, overrides):
for func in self.functions:
if isinstance(func, FunctionDef) and \
func.is_constructor_of == obj.c_name and \
not overrides.is_ignored(func.c_name):
return func
def find_methods(self, obj):
objname = obj.c_name
return filter(lambda func, on=objname: isinstance(func, MethodDef) and
func.of_object == on, self.functions)
def find_virtuals(self, obj):
objname = obj.c_name
retval = filter(lambda func, on=objname: isinstance(func, VirtualDef) and
func.of_object == on, self.virtuals)
return retval
def find_functions(self):
return filter(lambda func: isinstance(func, FunctionDef) and
not func.is_constructor_of, self.functions)
def ifdef(self, *args):
if args[0] in self.defines:
for arg in args[1:]:
#print >> sys.stderr, "-----> Handling conditional definition (%s): %s" % (args[0], arg)
self.handle(arg)
else:
pass
#print >> sys.stderr, "-----> Conditional %s is not true" % (args[0],)
def ifndef(self, *args):
if args[0] not in self.defines:
for arg in args[1:]:
self.handle(arg)

View file

@ -1,461 +0,0 @@
# -*- Mode: Python; py-indent-offset: 4 -*-
'''Simple module for extracting GNOME style doc comments from C
sources, so I can use them for other purposes.'''
import sys, os, string, re
# Used to tell if the "Since: ..." portion of the gtkdoc function description
# should be omitted. This is useful for some C++ modules such as gstreamermm
# that wrap C API which is still unstable and including this information would
# not be useful.
# This variable is modified from docextract_to_xml based on the --no-since
# option being specified.
no_since = False
__all__ = ['extract']
class GtkDoc:
def __init__(self):
self.name = None
self.block_type = '' # The block type ('function', 'signal', 'property')
self.params = []
self.annotations = []
self.description = ''
self.ret = ('', []) # (return, annotations)
def set_name(self, name):
self.name = name
def set_type(self, block_type):
self.block_type = block_type
def get_type(self):
return self.block_type
def add_param(self, name, description, annotations=[]):
if name == '...':
name = 'Varargs'
self.params.append((name, description, annotations))
def append_to_last_param(self, extra):
self.params[-1] = (self.params[-1][0], self.params[-1][1] + extra,
self.params[-1][2])
def append_to_named_param(self, name, extra):
for i in range(len(self.params)):
if self.params[i][0] == name:
self.params[i] = (name, self.params[i][1] + extra,
self.params[i][2])
return
# fall through to adding extra parameter ...
self.add_param(name, extra)
def add_annotation(self, annotation):
self.annotations.append(annotation)
def get_annotations(self):
return self.annotations
def append_to_description(self, extra):
self.description = self.description + extra
def get_description(self):
return self.description
def add_return(self, first_line, annotations=[]):
self.ret = (first_line, annotations)
def append_to_return(self, extra):
self.ret = (self.ret[0] + extra, self.ret[1])
comment_start_pattern = re.compile(r'^\s*/\*\*\s')
comment_end_pattern = re.compile(r'^\s*\*+/')
comment_line_lead_pattern = re.compile(r'^\s*\*\s*')
comment_empty_line_pattern = re.compile(r'^\s*\**\s*$')
function_name_pattern = re.compile(r'^([a-z]\w*)\s*:?(\s*\(.*\)\s*){0,2}\s*$')
signal_name_pattern = re.compile(r'^([A-Z]\w+::[a-z0-9-]+)\s*:?(\s*\(.*\)\s*){0,2}\s*$')
property_name_pattern = re.compile(r'^([A-Z]\w+:[a-z0-9-]+)\s*:?(\s*\(.*\)\s*){0,2}\s*$')
return_pattern = re.compile(r'^@?(returns:|return\s+value:)(.*\n?)$', re.IGNORECASE)
deprecated_pattern = re.compile(r'^(deprecated\s*:\s*.*\n?)$', re.IGNORECASE)
rename_to_pattern = re.compile(r'^(rename\s+to)\s*:\s*(.*\n?)$', re.IGNORECASE)
param_pattern = re.compile(r'^@(\S+)\s*:(.*\n?)$')
# Used to extract the annotations in the parameter and return descriptions
# extracted using above [param|return]_pattern patterns.
annotations_pattern = re.compile(r'^(?:(\s*\(.*\)\s*)*:)')
# Used to construct the annotation lists.
annotation_lead_pattern = re.compile(r'^\s*\(\s*(.*?)\s*\)\s*')
# These patterns determine the identifier of the current comment block. They
# are grouped in a list for easy determination of block identifiers (in
# skip_to_identifier). The function_name_pattern should be tested for last
# because it always matches signal and property identifiers.
identifier_patterns = [ signal_name_pattern, property_name_pattern, function_name_pattern ]
# This pattern is to match return sections that forget to have a colon (':')
# after the initial 'Return' phrase. It is not included by default in the list
# of final sections below because a lot of function descriptions begin with
# 'Returns ...' and the process_description() function would stop right at that
# first line, thinking it is a return section.
no_colon_return_pattern = re.compile(r'^@?(returns|return\s+value)\s*(.*\n?)$', re.IGNORECASE)
since_pattern = re.compile(r'^(since\s*:\s*.*\n?)$', re.IGNORECASE)
# These patterns normally will be encountered after the description. Knowing
# the order of their appearance is difficult so this list is used to test when
# one begins and the other ends when processing the rest of the sections after
# the description.
final_section_patterns = [ return_pattern, since_pattern, deprecated_pattern, rename_to_pattern ]
def parse_file(fp, doc_dict):
line = fp.readline()
while line:
cur_doc = GtkDoc()
line = skip_to_comment_block(fp, line)
line = skip_to_identifier(fp, line, cur_doc)
# See if the identifier is found (stored in the current GtkDoc by
# skip_to_identifier). If so, continue reading the rest of the comment
# block.
if cur_doc.name:
line = process_params(fp, line, cur_doc)
line = process_description(fp, line, cur_doc)
line = process_final_sections(fp, line, cur_doc)
# Add the current doc block to the dictionary of doc blocks.
doc_dict[cur_doc.name] = cur_doc
# Given a list of annotations as string of the form
# '(annotation1) (annotation2) ...' return a list of annotations of the form
# [ (name1, value1), (name2, value2) ... ]. Not all annotations have values so
# the values in the list of tuples could be empty ('').
def get_annotation_list(annotations):
annotation_list = []
while annotations:
match = annotation_lead_pattern.match(annotations)
if match:
annotation_contents = match.group(1)
name, split, value = annotation_contents.strip().partition(' ')
annotation_list.append((name, value))
# Remove first occurrence to continue processing.
annotations = annotation_lead_pattern.sub('', annotations)
else:
break
return annotation_list
# Given a currently read line, test that line and continue reading until the
# beginning of a comment block is found or eof is reached. Return the last
# read line.
def skip_to_comment_block(fp, line):
while line:
if comment_start_pattern.match(line):
break
line = fp.readline()
return line
# Given the current line in a comment block, continue skipping lines until a
# non-blank line in the comment block is found or until the end of the block
# (or eof) is reached. Returns the line where reading stopped.
def skip_to_nonblank(fp, line):
while line:
if not comment_empty_line_pattern.match(line):
break
line = fp.readline()
# Stop processing if eof or end of comment block is reached.
if not line or comment_end_pattern.match(line):
break
return line
# Given the first line of a comment block (the '/**'), see if the next
# non-blank line is the identifier of the comment block. Stop processing if
# the end of the block or eof is reached. Store the identifier (if there is
# one) and its type ('function', 'signal' or 'property') in the given GtkDoc.
# Return the line where the identifier is found or the line that stops the
# processing (if eof or the end of the comment block is found first).
def skip_to_identifier(fp, line, cur_doc):
# Skip the initial comment block line ('/**') if not eof.
if line: line = fp.readline()
# Now skip empty lines.
line = skip_to_nonblank(fp, line)
# See if the first non-blank line is the identifier.
if line and not comment_end_pattern.match(line):
# Remove the initial ' * ' in comment block line and see if there is an
# identifier.
line = comment_line_lead_pattern.sub('', line)
for pattern in identifier_patterns:
match = pattern.match(line)
if match:
# Set the GtkDoc name.
cur_doc.set_name(match.group(1))
# Get annotations and add them to the GtkDoc.
annotations = get_annotation_list(match.group(2))
for annotation in annotations:
cur_doc.add_annotation(annotation)
# Set the GtkDoc type.
if pattern == signal_name_pattern:
cur_doc.set_type('signal')
elif pattern == property_name_pattern:
cur_doc.set_type('property')
elif pattern == function_name_pattern:
cur_doc.set_type('function')
return line
return line
# Given a currently read line (presumably the identifier line), read the next
# lines, testing to see if the lines are part of parameter descriptions. If
# so, store the parameter descriptions in the given doc block. Stop on eof and
# return the last line that stops the processing.
def process_params(fp, line, cur_doc):
# Skip the identifier line if not eof. Also skip any blank lines in the
# comment block. Return if eof or the end of the comment block are
# encountered.
if line: line = fp.readline()
line = skip_to_nonblank(fp, line)
if not line or comment_end_pattern.match(line):
return line
# Remove initial ' * ' in first non-empty comment block line.
line = comment_line_lead_pattern.sub('', line)
# Now process possible parameters as long as no eof or the end of the
# param section is not reached (which could be triggered by anything that
# doesn't match a '@param:..." line, even the end of the comment block).
match = param_pattern.match(line)
while line and match:
description = match.group(2)
# First extract the annotations from the description and save them.
annotations = []
annotation_match = annotations_pattern.match(description)
if annotation_match:
annotations = get_annotation_list(annotation_match.group(1))
# Remove the annotations from the description
description = annotations_pattern.sub('', description)
# Default to appending lines to current parameter.
append_func = cur_doc.append_to_last_param
# See if the return has been included as part of the parameter
# section and make sure that lines are added to the GtkDoc return if
# so.
if match.group(1).lower() == "returns":
cur_doc.add_return(description, annotations)
append_func = cur_doc.append_to_return
# If not, just add it as a regular parameter.
else:
cur_doc.add_param(match.group(1), description, annotations)
# Now read lines and append them until next parameter, beginning of
# description (an empty line), the end of the comment block or eof.
line = fp.readline()
while line:
# Stop processing if end of comment block or a blank comment line
# is encountered.
if comment_empty_line_pattern.match(line) or \
comment_end_pattern.match(line):
break
# Remove initial ' * ' in comment block line.
line = comment_line_lead_pattern.sub('', line)
# Break from current param processing if a new one is
# encountered.
if param_pattern.match(line): break;
# Otherwise, just append the current line and get the next line.
append_func(line)
line = fp.readline()
# Re-evaluate match for while condition
match = param_pattern.match(line)
# End by returning the current line.
return line
# Having processed parameters, read the following lines into the description of
# the current doc block until the end of the comment block, the end of file or
# a return section is encountered.
def process_description(fp, line, cur_doc):
# First skip empty lines returning on eof or end of comment block.
line = skip_to_nonblank(fp, line)
if not line or comment_end_pattern.match(line):
return line
# Remove initial ' * ' in non-empty comment block line.
line = comment_line_lead_pattern.sub('', line)
# Also remove possible 'Description:' prefix.
if line[:12] == 'Description:': line = line[12:]
# Used to tell if the previous line was blank and a return section
# uncommonly marked with 'Returns ...' instead of 'Returns: ...' has
# started (assume it is non-empty to begin with).
prev_line = 'non-empty'
# Now read lines until a new section (like a return or a since section) is
# encountered.
while line:
# See if the description section has ended (if the line begins with
# 'Returns ...' and the previous line was empty -- this loop replaces
# empty lines with a newline).
if no_colon_return_pattern.match(line) and prev_line == '\n':
return line
# Or if one of the patterns of the final sections match
for pattern in final_section_patterns:
if pattern.match(line):
return line
# If not, append lines to description in the doc comment block.
cur_doc.append_to_description(line)
prev_line = line
line = fp.readline()
# Stop processing on eof or at the end of comment block.
if not line or comment_end_pattern.match(line):
return line
# Remove initial ' * ' in line so that the text can be appended to the
# description of the comment block and make sure that if the line is
# empty it be interpreted as a newline.
line = comment_line_lead_pattern.sub('', line)
if not line: line = '\n'
# Given the line that ended the description (the first line of one of the final
# sections) process the final sections ('Returns:', 'Since:', etc.) until the
# end of the comment block or eof. Return the line that ends the processing.
def process_final_sections(fp, line, cur_doc):
while line and not comment_end_pattern.match(line):
# Remove leading ' * ' from current non-empty comment line.
line = comment_line_lead_pattern.sub('', line)
# Temporarily append the no colon return pattern to the final section
# patterns now that the description has been processed. It will be
# removed after the for loop below executes so that future descriptions
# that begin with 'Returns ...' are not interpreted as a return
# section.
final_section_patterns.append(no_colon_return_pattern)
for pattern in final_section_patterns:
match = pattern.match(line)
if match:
if pattern == return_pattern or \
pattern == no_colon_return_pattern:
# Dealing with a 'Returns:' so first extract the
# annotations from the description and save them.
description = match.group(2)
annotations = []
annotation_match = \
annotations_pattern.match(description)
if annotation_match:
annotations = \
get_annotation_list(annotation_match.group(1))
# Remove the annotations from the description
description = annotations_pattern.sub('', description)
# Now add the return.
cur_doc.add_return(description, annotations)
# In case more lines need to be appended.
append_func = cur_doc.append_to_return
elif pattern == rename_to_pattern:
# Dealing with a 'Rename to:' section (GObjectIntrospection
# annotation) so no further lines will be appended but this
# single one (and only to the annotations).
append_func = None
cur_doc.add_annotation((match.group(1),
match.group(2)))
else:
# For all others ('Since:' and 'Deprecated:') just append
# the line to the description for now.
# But if --no-since is specified, don't append it.
if no_since and pattern == since_pattern:
pass
else:
cur_doc.append_to_description(line)
# In case more lines need to be appended.
append_func = cur_doc.append_to_description
# Stop final section pattern matching for loop since a match
# has already been found.
break
# Remove the no colon return pattern (which was temporarily added in
# the just executed loop) from the list of final section patterns.
final_section_patterns.pop()
line = fp.readline()
# Now continue appending lines to current section until a new one is
# found or an eof or the end of the comment block is encountered.
finished = False
while not finished and line and \
not comment_end_pattern.match(line):
# Remove leading ' * ' from line and make sure that if it is empty,
# it be interpreted as a newline.
line = comment_line_lead_pattern.sub('', line)
if not line: line = '\n'
for pattern in final_section_patterns:
if pattern.match(line):
finished = True
break
# Break out of loop if a new section is found (determined in above
# inner loop).
if finished: break
# Now it's safe to append line.
if append_func: append_func(line)
# Get the next line to continue processing.
line = fp.readline()
return line
def parse_dir(dir, doc_dict):
for file in os.listdir(dir):
if file in ('.', '..'): continue
path = os.path.join(dir, file)
if os.path.isdir(path):
parse_dir(path, doc_dict)
if len(file) > 2 and file[-2:] == '.c':
sys.stderr.write("Processing " + path + '\n')
parse_file(open(path, 'r'), doc_dict)
def extract(dirs, doc_dict=None):
if not doc_dict: doc_dict = {}
for dir in dirs:
parse_dir(dir, doc_dict)
return doc_dict
tmpl_section_pattern = re.compile(r'^<!-- ##### (\w+) (\w+) ##### -->$')
def parse_tmpl(fp, doc_dict):
cur_doc = None
line = fp.readline()
while line:
match = tmpl_section_pattern.match(line)
if match:
cur_doc = None # new input shouldn't affect the old doc dict
sect_type = match.group(1)
sect_name = match.group(2)
if sect_type == 'FUNCTION':
cur_doc = doc_dict.get(sect_name)
if not cur_doc:
cur_doc = GtkDoc()
cur_doc.set_name(sect_name)
doc_dict[sect_name] = cur_doc
elif line == '<!-- # Unused Parameters # -->\n':
cur_doc = None # don't worry about unused params.
elif cur_doc:
if line[:10] == '@Returns: ':
if string.strip(line[10:]):
cur_doc.append_to_return(line[10:])
elif line[0] == '@':
pos = string.find(line, ':')
if pos >= 0:
cur_doc.append_to_named_param(line[1:pos], line[pos+1:])
else:
cur_doc.append_to_description(line)
else:
cur_doc.append_to_description(line)
line = fp.readline()
def extract_tmpl(dirs, doc_dict=None):
if not doc_dict: doc_dict = {}
for dir in dirs:
for file in os.listdir(dir):
if file in ('.', '..'): continue
path = os.path.join(dir, file)
if os.path.isdir(path):
continue
if len(file) > 2 and file[-2:] == '.sgml':
parse_tmpl(open(path, 'r'), doc_dict)
return doc_dict

View file

@ -1,766 +0,0 @@
#!/usr/bin/env python
import getopt
import os
import re
import sys
import definitions
import defsparser
import docextract
import override
class Node:
def __init__(self, name, interfaces=[]):
self.name = name
self.interfaces = interfaces
self.subclasses = []
def add_child(self, node):
self.subclasses.append(node)
def build_object_tree(parser):
# reorder objects so that parent classes come first ...
objects = parser.objects[:]
pos = 0
while pos < len(objects):
parent = objects[pos].parent
for i in range(pos+1, len(objects)):
if objects[i].c_name == parent:
objects.insert(i+1, objects[pos])
del objects[pos]
break
else:
pos = pos + 1
root = Node(None)
nodes = {None: root}
for obj_def in objects:
parent_name = obj_def.parent
if parent_name == 'GObject':
parent_name = None
parent_node = nodes[parent_name]
node = Node(obj_def.c_name, obj_def.implements)
parent_node.add_child(node)
nodes[node.name] = node
if parser.interfaces:
interfaces = Node('gobject.GInterface')
root.add_child(interfaces)
nodes[interfaces.name] = interfaces
for obj_def in parser.interfaces:
node = Node(obj_def.c_name)
interfaces.add_child(node)
nodes[node.name] = node
if parser.boxes:
boxed = Node('gobject.GBoxed')
root.add_child(boxed)
nodes[boxed.name] = boxed
for obj_def in parser.boxes:
node = Node(obj_def.c_name)
boxed.add_child(node)
nodes[node.name] = node
if parser.pointers:
pointers = Node('gobject.GPointer')
root.add_child(pointers)
nodes[pointers.name] = pointers
for obj_def in parser.pointers:
node = Node(obj_def.c_name)
pointers.add_child(node)
nodes[node.name] = node
return root
class DocWriter:
def __init__(self):
self._fp = None
# parse the defs file
self.parser = defsparser.DefsParser(())
self.overrides = override.Overrides()
self.classmap = {}
self.docs = {}
def add_sourcedirs(self, source_dirs):
self.docs = docextract.extract(source_dirs, self.docs)
def add_tmpldirs(self, tmpl_dirs):
self.docs = docextract.extract_tmpl(tmpl_dirs, self.docs)
def add_docs(self, defs_file, overrides_file, module_name):
'''parse information about a given defs file'''
self.parser.filename = defs_file
self.parser.startParsing(defs_file)
if overrides_file:
self.overrides.handle_file(overrides_file)
for obj in (self.parser.objects + self.parser.interfaces +
self.parser.boxes + self.parser.pointers):
if not obj.c_name in self.classmap:
self.classmap[obj.c_name] = '%s.%s' % (
module_name, obj.name)
def pyname(self, name):
return self.classmap.get(name, name)
def _compare(self, obja, objb):
return cmp(self.pyname(obja.c_name), self.pyname(objb.c_name))
def output_docs(self, output_prefix):
files = {}
# class hierarchy
hierarchy = build_object_tree(self.parser)
filename = self.create_filename('hierarchy', output_prefix)
self._fp = open(filename, 'w')
self.write_full_hierarchy(hierarchy)
self._fp.close()
obj_defs = (self.parser.objects + self.parser.interfaces +
self.parser.boxes + self.parser.pointers)
obj_defs.sort(self._compare)
for obj_def in obj_defs:
filename = self.create_filename(obj_def.c_name, output_prefix)
self._fp = open(filename, 'w')
if isinstance(obj_def, definitions.ObjectDef):
self.output_object_docs(obj_def)
elif isinstance(obj_def, definitions.InterfaceDef):
self.output_interface_docs(obj_def)
elif isinstance(obj_def, definitions.BoxedDef):
self.output_boxed_docs(obj_def)
elif isinstance(obj_def, definitions.PointerDef):
self.output_boxed_docs(obj_def)
self._fp.close()
files[os.path.basename(filename)] = obj_def
if not files:
return
output_filename = self.create_toc_filename(output_prefix)
self._fp = open(output_filename, 'w')
self.output_toc(files)
self._fp.close()
def output_object_docs(self, obj_def):
self.write_class_header(obj_def.c_name)
self.write_heading('Synopsis')
self.write_synopsis(obj_def)
self.close_section()
# construct the inheritence hierarchy ...
ancestry = [(obj_def.c_name, obj_def.implements)]
try:
parent = obj_def.parent
while parent != None:
if parent == 'GObject':
ancestry.append(('GObject', []))
parent = None
else:
parent_def = self.parser.find_object(parent)
ancestry.append((parent_def.c_name, parent_def.implements))
parent = parent_def.parent
except ValueError:
pass
ancestry.reverse()
self.write_heading('Ancestry')
self.write_hierarchy(obj_def.c_name, ancestry)
self.close_section()
constructor = self.parser.find_constructor(obj_def, self.overrides)
if constructor:
self.write_heading('Constructor')
self.write_constructor(constructor,
self.docs.get(constructor.c_name, None))
self.close_section()
methods = self.parser.find_methods(obj_def)
methods = filter(lambda meth, self=self:
not self.overrides.is_ignored(meth.c_name), methods)
if methods:
self.write_heading('Methods')
for method in methods:
self.write_method(method, self.docs.get(method.c_name, None))
self.close_section()
self.write_class_footer(obj_def.c_name)
def get_methods_for_object(self, obj_def):
methods = []
for method in self.parser.find_methods(obj_def):
if not self.overrides.is_ignored(method.c_name):
methods.append(method)
return methods
def output_interface_docs(self, int_def):
self.write_class_header(int_def.c_name)
self.write_heading('Synopsis')
self.write_synopsis(int_def)
self.close_section()
methods = self.get_methods_for_object(int_def)
if methods:
self.write_heading('Methods')
for method in methods:
self.write_method(method, self.docs.get(method.c_name, None))
self.close_section()
self.write_class_footer(int_def.c_name)
def output_boxed_docs(self, box_def):
self.write_class_header(box_def.c_name)
self.write_heading('Synopsis')
self.write_synopsis(box_def)
self.close_section()
constructor = self.parser.find_constructor(box_def, self.overrides)
if constructor:
self.write_heading('Constructor')
self.write_constructor(constructor,
self.docs.get(constructor.c_name, None))
self.close_section()
methods = self.get_methods_for_object(box_def)
if methods:
self.write_heading('Methods')
for method in methods:
self.write_method(method, self.docs.get(method.c_name, None))
self.close_section()
self.write_class_footer(box_def.c_name)
def output_toc(self, files):
self._fp.write('TOC\n\n')
for filename in sorted(files):
obj_def = files[filename]
self._fp.write(obj_def.c_name + ' - ' + filename + '\n')
# override the following to create a more complex output format
def create_filename(self, obj_name, output_prefix):
'''Create output filename for this particular object'''
return output_prefix + '-' + obj_name.lower() + '.txt'
def create_toc_filename(self, output_prefix):
return self.create_filename(self, 'docs', output_prefix)
def write_full_hierarchy(self, hierarchy):
def handle_node(node, indent=''):
for child in node.subclasses:
self._fp.write(indent + node.name)
if node.interfaces:
self._fp.write(' (implements ')
self._fp.write(', '.join(node.interfaces))
self._fp.write(')\n')
else:
self._fp.write('\n')
handle_node(child, indent + ' ')
handle_node(hierarchy)
def serialize_params(self, func_def):
params = []
for param in func_def.params:
params.append(param[1])
return ', '.join(params)
# these need to handle default args ...
def create_constructor_prototype(self, func_def):
return '%s(%s)' % (func_def.is_constructor_of,
self.serialize_params(func_def))
def create_function_prototype(self, func_def):
return '%s(%s)' % (func_def.name,
self.serialize_params(func_def))
def create_method_prototype(self, meth_def):
return '%s.%s(%s)' % (meth_def.of_object,
meth_def.name,
self.serialize_params(meth_def))
def write_class_header(self, obj_name):
self._fp.write('Class %s\n' % obj_name)
self._fp.write('======%s\n\n' % ('=' * len(obj_name)))
def write_class_footer(self, obj_name):
pass
def write_heading(self, text):
self._fp.write('\n' + text + '\n' + ('-' * len(text)) + '\n')
def close_section(self):
pass
def write_synopsis(self, obj_def):
self._fp.write('class %s' % obj_def.c_name)
if isinstance(obj_def, definitions.ObjectDef):
bases = []
if obj_def.parent:
bases.append(obj_def.parent)
bases = bases = obj_def.implements
if bases:
self._fp.write('(%s)' % ', '.join(bases, ))
self._fp.write(':\n')
constructor = self.parser.find_constructor(obj_def, self.overrides)
if constructor:
prototype = self.create_constructor_prototype(constructor)
self._fp.write(' def %s\n' % prototype)
for method in self.get_methods_for_object(obj_def):
prototype = self.create_method_prototype(method)
self._fp.write(' def %s\n' % prototype)
def write_hierarchy(self, obj_name, ancestry):
indent = ''
for name, interfaces in ancestry:
self._fp.write(indent + '+-- ' + name)
if interfaces:
self._fp.write(' (implements ')
self._fp.write(', '.join(interfaces))
self._fp.write(')\n')
else:
self._fp.write('\n')
indent = indent + ' '
self._fp.write('\n')
def write_constructor(self, func_def, func_doc):
prototype = self.create_constructor_prototype(func_def)
self._fp.write(prototype + '\n\n')
for type, name, dflt, null in func_def.params:
self.write_parameter(name, func_doc)
self.write_return_value(func_def, func_doc)
if func_doc and func_doc.description:
self._fp.write(func_doc.description)
self._fp.write('\n\n\n')
def write_method(self, meth_def, func_doc):
prototype = self.create_method_prototype(meth_def)
self._fp.write(prototype + '\n\n')
for type, name, dflt, null in meth_def.params:
self.write_parameter(name, func_doc)
self.write_return_value(meth_def, func_doc)
if func_doc and func_doc.description:
self._fp.write('\n')
self._fp.write(func_doc.description)
self._fp.write('\n\n')
def write_parameter(self, param_name, func_doc):
if func_doc:
descr = func_doc.get_param_description(param_name)
else:
descr = 'a ' + type
self._fp.write(' ' + param_name + ': ' + descr + '\n')
def write_return_value(self, meth_def, func_doc):
if meth_def.ret and meth_def.ret != 'none':
if func_doc and func_doc.ret:
descr = func_doc.ret
else:
descr = 'a ' + meth_def.ret
self._fp.write(' Returns: ' + descr + '\n')
CLASS_HEADER_TEMPLATE = """<refentry id="%(entryid)s">
<refmeta>
<refentrytitle>%(name)s</refentrytitle>
<manvolnum>3</manvolnum>
<refmiscinfo>%(miscinfo)s</refmiscinfo>
</refmeta>
<refnamediv>
<refname>%(name)s</refname><refpurpose></refpurpose>
</refnamediv>
"""
VARIABLE_TEMPLATE = """<varlistentry>
<term><parameter>%(parameter)s</parameter>&nbsp;:</term>
<listitem><simpara>%(description)s</simpara></listitem>
</varlistentry>
"""
DOCBOOK_HEADER = """<?xml version="1.0" standalone="no"?>
<!DOCTYPE synopsis PUBLIC "-//OASIS//DTD DocBook XML V4.1.2//EN"
"http://www.oasis-open.org/docbook/xml/4.1.2/docbookx.dtd">
"""
class DocbookDocWriter(DocWriter):
def __init__(self):
DocWriter.__init__(self)
self._function_pat = re.compile(r'(\w+)\s*\(\)')
self._parameter_pat = re.compile(r'\@(\w+)')
self._constant_pat = re.compile(r'\%(-?\w+)')
self._symbol_pat = re.compile(r'#([\w-]+)')
self._transtable = ['-'] * 256
# make string -> reference translation func
for digit in '0123456789':
self._transtable[ord(digit)] = digit
for letter in 'abcdefghijklmnopqrstuvwxyz':
self._transtable[ord(letter)] = letter
self._transtable[ord(letter.upper())] = letter
self._transtable = ''.join(self._transtable)
def create_filename(self, obj_name, output_prefix):
'''Create output filename for this particular object'''
stem = output_prefix + '-' + obj_name.lower()
return stem + '.xml'
def create_toc_filename(self, output_prefix):
return self.create_filename('classes', output_prefix)
def make_class_ref(self, obj_name):
return 'class-' + obj_name.translate(self._transtable)
def make_method_ref(self, meth_def):
return 'method-%s--%s' % (
meth_def.of_object.translate(self._transtable),
meth_def.name.translate(self._transtable))
def _format_function(self, match):
info = self.parser.c_name.get(match.group(1), None)
if info:
if isinstance(info, defsparser.FunctionDef):
return self._format_funcdef(info)
if isinstance(info, defsparser.MethodDef):
return self._format_method(info)
# fall through through
return '<function>%s()</function>' % (match.group(1), )
def _format_funcdef(self, info):
if info.is_constructor_of is not None:
# should have a link here
return '<methodname>%s()</methodname>' % (
self.pyname(info.is_constructor_of), )
else:
return '<function>%s()</function>' % (info.name, )
def _format_param(self, match):
return '<parameter>%s</parameterdliteral>' % (match.group(1), )
def _format_const(self, match):
return '<literal>%s</literal>' % (match.group(1), )
def _format_method(self, info):
return ('<link linkend="%s">'
'<methodname>%s.%s</methodname>'
'</link>') % (self.make_method_ref(info),
self.pyname(info.of_object),
info.name)
def _format_object(self, info):
return ('<link linkend="%s">'
'<classname>%s</classname>'
'</link>') % (self.make_class_ref(info.c_name),
self.pyname(info.c_name))
def _format_symbol(self, match):
info = self.parser.c_name.get(match.group(1), None)
if info:
if isinstance(info, defsparser.FunctionDef):
return self._format_funcdef(info)
elif isinstance(info, defsparser.MethodDef):
return self._format_method(info)
elif isinstance(info, (defsparser.ObjectDef,
defsparser.InterfaceDef,
defsparser.BoxedDef,
defsparser.PointerDef)):
return self._format_object(info)
# fall through through
return '<literal>%s</literal>' % (match.group(1), )
def reformat_text(self, text, singleline=0):
# replace special strings ...
text = self._function_pat.sub(self._format_function, text)
text = self._parameter_pat.sub(self._format_param, text)
text = self._constant_pat.sub(self._format_const, text)
text = self._symbol_pat.sub(self._format_symbol, text)
# don't bother with <para> expansion for single line text.
if singleline:
return text
lines = text.strip().split('\n')
for index in range(len(lines)):
if lines[index].strip() == '':
lines[index] = '</para>\n<para>'
continue
return '<para>%s</para>' % ('\n'.join(lines), )
# write out hierarchy
def write_full_hierarchy(self, hierarchy):
def handle_node(node, indent=''):
if node.name:
self._fp.write('%s<link linkend="%s">%s</link>' %
(indent, self.make_class_ref(node.name),
self.pyname(node.name)))
if node.interfaces:
self._fp.write(' (implements ')
for i in range(len(node.interfaces)):
self._fp.write('<link linkend="%s">%s</link>' %
(self.make_class_ref(node.interfaces[i]),
self.pyname(node.interfaces[i])))
if i != len(node.interfaces) - 1:
self._fp.write(', ')
self._fp.write(')\n')
else:
self._fp.write('\n')
indent = indent + ' '
node.subclasses.sort(lambda a, b:
cmp(self.pyname(a.name), self.pyname(b.name)))
for child in node.subclasses:
handle_node(child, indent)
self._fp.write(DOCBOOK_HEADER)
self._fp.write('<synopsis>')
handle_node(hierarchy)
self._fp.write('</synopsis>\n')
# these need to handle default args ...
def create_constructor_prototype(self, func_def):
xml = ['<constructorsynopsis language="python">\n']
xml.append(' <methodname>__init__</methodname>\n')
for type, name, dflt, null in func_def.params:
xml.append(' <methodparam><parameter>')
xml.append(name)
xml.append('</parameter>')
if dflt:
xml.append('<initializer>')
xml.append(dflt)
xml.append('</initializer>')
xml.append('</methodparam>\n')
if not func_def.params:
xml.append(' <methodparam></methodparam>')
xml.append(' </constructorsynopsis>')
return ''.join(xml)
def create_function_prototype(self, func_def):
xml = ['<funcsynopsis language="python">\n <funcprototype>\n']
xml.append(' <funcdef><function>')
xml.append(func_def.name)
xml.append('</function></funcdef>\n')
for type, name, dflt, null in func_def.params:
xml.append(' <paramdef><parameter>')
xml.append(name)
xml.append('</parameter>')
if dflt:
xml.append('<initializer>')
xml.append(dflt)
xml.append('</initializer>')
xml.append('</paramdef>\n')
if not func_def.params:
xml.append(' <paramdef></paramdef')
xml.append(' </funcprototype>\n </funcsynopsis>')
return ''.join(xml)
def create_method_prototype(self, meth_def, addlink=0):
xml = ['<methodsynopsis language="python">\n']
xml.append(' <methodname>')
if addlink:
xml.append('<link linkend="%s">' % self.make_method_ref(meth_def))
xml.append(self.pyname(meth_def.name))
if addlink:
xml.append('</link>')
xml.append('</methodname>\n')
for type, name, dflt, null in meth_def.params:
xml.append(' <methodparam><parameter>')
xml.append(name)
xml.append('</parameter>')
if dflt:
xml.append('<initializer>')
xml.append(dflt)
xml.append('</initializer>')
xml.append('</methodparam>\n')
if not meth_def.params:
xml.append(' <methodparam></methodparam>')
xml.append(' </methodsynopsis>')
return ''.join(xml)
def write_class_header(self, obj_name):
self._fp.write(DOCBOOK_HEADER)
self._fp.write(CLASS_HEADER_TEMPLATE % dict(
entryid=self.make_class_ref(obj_name),
name=self.pyname(obj_name),
miscinfo="PyGTK Docs"))
def write_class_footer(self, obj_name):
self._fp.write('</refentry>\n')
def write_heading(self, text):
self._fp.write(' <refsect1>\n')
self._fp.write(' <title>' + text + '</title>\n\n')
def close_section(self):
self._fp.write(' </refsect1>\n')
def write_synopsis(self, obj_def):
self._fp.write('<classsynopsis language="python">\n')
self._fp.write(' <ooclass><classname>%s</classname></ooclass>\n'
% self.pyname(obj_def.c_name))
if isinstance(obj_def, definitions.ObjectDef):
if obj_def.parent:
self._fp.write(' <ooclass><classname><link linkend="%s">%s'
'</link></classname></ooclass>\n'
% (self.make_class_ref(obj_def.parent),
self.pyname(obj_def.parent)))
for base in obj_def.implements:
self._fp.write(' <ooclass><classname><link linkend="%s">%s'
'</link></classname></ooclass>\n'
% (self.make_class_ref(base), self.pyname(base)))
elif isinstance(obj_def, definitions.InterfaceDef):
self._fp.write(' <ooclass><classname>gobject.GInterface'
'</classname></ooclass>\n')
elif isinstance(obj_def, definitions.BoxedDef):
self._fp.write(' <ooclass><classname>gobject.GBoxed'
'</classname></ooclass>\n')
elif isinstance(obj_def, definitions.PointerDef):
self._fp.write(' <ooclass><classname>gobject.GPointer'
'</classname></ooclass>\n')
constructor = self.parser.find_constructor(obj_def, self.overrides)
if constructor:
self._fp.write(
'%s\n' % self.create_constructor_prototype(constructor))
for method in self.get_methods_for_object(obj_def):
self._fp.write(
'%s\n' % self.create_method_prototype(method, addlink=1))
self._fp.write('</classsynopsis>\n\n')
def write_hierarchy(self, obj_name, ancestry):
self._fp.write('<synopsis>')
indent = ''
for name, interfaces in ancestry:
self._fp.write(
'%s+-- <link linkend="%s">%s</link>' %
(indent, self.make_class_ref(name), self.pyname(name)))
if interfaces:
self._fp.write(' (implements ')
for i in range(len(interfaces)):
self._fp.write('<link linkend="%s">%s</link>' %
(self.make_class_ref(interfaces[i]),
self.pyname(interfaces[i])))
if i != len(interfaces) - 1:
self._fp.write(', ')
self._fp.write(')\n')
else:
self._fp.write('\n')
indent = indent + ' '
self._fp.write('</synopsis>\n\n')
def write_params(self, params, ret, func_doc):
if not params and (not ret or ret == 'none'):
return
self._fp.write(' <variablelist>\n')
for type, name, dflt, null in params:
if func_doc:
descr = func_doc.get_param_description(name).strip()
else:
descr = 'a ' + type
self._fp.write(VARIABLE_TEMPLATE % dict(
parameter=name,
description=self.reformat_text(descr, singleline=1)))
if ret and ret != 'none':
if func_doc and func_doc.ret:
descr = func_doc.ret.strip()
else:
descr = 'a ' + ret
self._fp.write(VARIABLE_TEMPLATE % dict(
parameter='Returns',
description=self.reformat_text(descr, singleline=1)))
self._fp.write(' </variablelist>\n')
def write_constructor(self, func_def, func_doc):
prototype = self.create_constructor_prototype(func_def)
self._fp.write('<programlisting>%s</programlisting>\n' % prototype)
self.write_params(func_def.params, func_def.ret, func_doc)
if func_doc and func_doc.description:
self._fp.write(self.reformat_text(func_doc.description))
self._fp.write('\n\n\n')
def write_method(self, meth_def, func_doc):
self._fp.write(' <refsect2 id="%s">\n' % (
self.make_method_ref(meth_def), ))
self._fp.write(' <title>%s.%s</title>\n\n' % (
self.pyname(meth_def.of_object),
meth_def.name))
prototype = self.create_method_prototype(meth_def)
self._fp.write('<programlisting>%s</programlisting>\n' % prototype)
self.write_params(meth_def.params, meth_def.ret, func_doc)
if func_doc and func_doc.description:
self._fp.write(self.reformat_text(func_doc.description))
self._fp.write(' </refsect2>\n\n\n')
def output_toc(self, files, fp=sys.stdout):
self._fp.write(DOCBOOK_HEADER)
#self._fp.write('<reference id="class-reference">\n')
#self._fp.write(' <title>Class Documentation</title>\n')
#for filename, obj_def in files:
# self._fp.write('&' +
# obj_def.c_name.translate(self._transtable) + ';\n')
#self._fp.write('</reference>\n')
self._fp.write('<reference id="class-reference" '
'xmlns:xi="http://www.w3.org/2001/XInclude">\n')
self._fp.write(' <title>Class Reference</title>\n')
for filename in sorted(files):
self._fp.write(' <xi:include href="%s"/>\n' % filename)
self._fp.write('</reference>\n')
def main(args):
try:
opts, args = getopt.getopt(args[1:], "d:s:o:",
["defs-file=", "override=", "source-dir=",
"output-prefix="])
except getopt.error, e:
sys.stderr.write('docgen.py: %s\n' % e)
sys.stderr.write(
'usage: docgen.py -d file.defs [-s /src/dir] [-o output-prefix]\n')
return 1
defs_file = None
overrides_file = None
source_dirs = []
output_prefix = 'docs'
for opt, arg in opts:
if opt in ('-d', '--defs-file'):
defs_file = arg
if opt in ('--override', ):
overrides_file = arg
elif opt in ('-s', '--source-dir'):
source_dirs.append(arg)
elif opt in ('-o', '--output-prefix'):
output_prefix = arg
if len(args) != 0 or not defs_file:
sys.stderr.write(
'usage: docgen.py -d file.defs [-s /src/dir] [-o output-prefix]\n')
return 1
d = DocbookDocWriter()
d.add_sourcedirs(source_dirs)
d.add_docs(defs_file, overrides_file, 'gio')
d.output_docs(output_prefix)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))

View file

@ -1,637 +0,0 @@
#!/usr/bin/env python
# -*- Mode: Python; py-indent-offset: 4 -*-
# GPL'ed
# Toby D. Reeves <toby@max.rl.plh.af.mil>
#
# Modified by James Henstridge <james@daa.com.au> to output stuff in
# Havoc's new defs format. Info on this format can be seen at:
# http://mail.gnome.org/archives/gtk-devel-list/2000-January/msg00070.html
# Updated to be PEP-8 compatible and refactored to use OOP
#
# Scan the given public .h files of a GTK module (or module using
# GTK object conventions) and generates a set of scheme defs.
#
# h2def searches through a header file looking for function prototypes and
# generates a scheme style defenition for each prototype.
# Basically the operation of h2def is:
#
# - read each .h file into a buffer which is scrubbed of extraneous data
# - find all object defenitions:
# - find all structures that may represent a GtkObject
# - find all structures that might represent a class
# - find all structures that may represent a GtkObject subclass
# - find all structures that might represent a class/Iface inherited from
# GTypeInterface
# - find all enum defenitions
# - write out the defs
#
# The command line options are:
#
# -s --separate Create separate files for objects and function/method defs
# using the given name as the base name (optional). If this
# is not specified the combined object and function defs
# will be output to sys.stdout.
# -f --defsfilter Extract defs from the given file to filter the output defs
# that is don't output defs that are defined in the
# defsfile. More than one deffile may be specified.
# -m --modulename The prefix to be stripped from the front of function names
# for the given module
# -n --namespace The module or namespace name to be used, for example
# WebKit where h2def is unable to detect the module name
# automatically. it also sets the gtype-id prefix.
# --onlyenums Only produce defs for enums and flags
# --onlyobjdefs Only produce defs for objects
# -v Verbose output
#
# Examples:
#
# python h2def.py /usr/local/include/pango-1.0/pango/*.h >/tmp/pango.defs
#
# - Outputs all defs for the pango module.
#
# python h2def.py -m gdk -s /tmp/gdk-2.10 \
# -f /usr/tmp/pygtk/gtk/gdk-base.defs \
# /usr/local/include/gtk-2.0/gdk/*.h \
# /usr/local/include/gtk-2.0/gdk-pixbuf/*.h
#
# - Outputs the gdk module defs that are not contained in the defs file
# /usr/tmp/pygtk/gtk/gdk-base.defs. Two output files are created:
# /tmp/gdk-2.10-types.defs and /tmp/gdk-2.10.defs.
#
# python h2def.py -n WebKit /usr/incude/webkit-1.0/webkit/*.h \
# >/tmp/webkit.defs
#
# - Outputs all the defs for webkit module, setting the module name to WebKit
# and the gtype-id prefix to WEBKIT_ which can't be detected automatically.
#
import getopt
import os
import re
import string
import sys
import defsparser
# ------------------ Create typecodes from typenames ---------
_upperstr_pat1 = re.compile(r'([^A-Z])([A-Z])')
_upperstr_pat2 = re.compile(r'([A-Z][A-Z])([A-Z][0-9a-z])')
_upperstr_pat3 = re.compile(r'^([A-Z])([A-Z])')
def to_upper_str(name):
"""Converts a typename to the equivalent upercase and underscores
name. This is used to form the type conversion macros and enum/flag
name variables"""
name = _upperstr_pat1.sub(r'\1_\2', name)
name = _upperstr_pat2.sub(r'\1_\2', name)
name = _upperstr_pat3.sub(r'\1_\2', name, count=1)
return string.upper(name)
def typecode(typename, namespace=None):
"""create a typecode (eg. GTK_TYPE_WIDGET) from a typename"""
if namespace:
return string.replace(string.upper(namespace) + "_" + to_upper_str(typename[len(namespace):]), '_', '_TYPE_', 1)
return string.replace(to_upper_str(typename), '_', '_TYPE_', 1)
# ------------------ Find object definitions -----------------
# Strips the comments from buffer
def strip_comments(buf):
parts = []
lastpos = 0
while 1:
pos = string.find(buf, '/*', lastpos)
if pos >= 0:
parts.append(buf[lastpos:pos])
pos = string.find(buf, '*/', pos)
if pos >= 0:
lastpos = pos + 2
else:
break
else:
parts.append(buf[lastpos:])
break
return string.join(parts, '')
# Strips the dll API from buffer, for example WEBKIT_API
def strip_dll_api(buf):
pat = re.compile("[A-Z]*_API ")
buf = pat.sub("", buf)
return buf
obj_name_pat = "[A-Z][a-z]*[A-Z][A-Za-z0-9]*"
split_prefix_pat = re.compile('([A-Z]+[a-z]*)([A-Za-z0-9]+)')
def find_obj_defs(buf, objdefs=[]):
"""
Try to find object definitions in header files.
"""
# filter out comments from buffer.
buf = strip_comments(buf)
# filter out dll api
buf = strip_dll_api(buf)
maybeobjdefs = [] # contains all possible objects from file
# first find all structures that look like they may represent a GtkObject
pat = re.compile("struct\s+_(" + obj_name_pat + ")\s*{\s*" +
"(" + obj_name_pat + ")\s+", re.MULTILINE)
pos = 0
while pos < len(buf):
m = pat.search(buf, pos)
if not m: break
maybeobjdefs.append((m.group(1), m.group(2)))
pos = m.end()
# handle typedef struct { ... } style struct defs.
pat = re.compile("typedef struct\s+[_\w]*\s*{\s*" +
"(" + obj_name_pat + ")\s+[^}]*}\s*" +
"(" + obj_name_pat + ")\s*;", re.MULTILINE)
pos = 0
while pos < len(buf):
m = pat.search(buf, pos)
if not m: break
maybeobjdefs.append((m.group(2), m.group(1)))
pos = m.end()
# now find all structures that look like they might represent a class:
pat = re.compile("struct\s+_(" + obj_name_pat + ")Class\s*{\s*" +
"(" + obj_name_pat + ")Class\s+", re.MULTILINE)
pos = 0
while pos < len(buf):
m = pat.search(buf, pos)
if not m: break
t = (m.group(1), m.group(2))
# if we find an object structure together with a corresponding
# class structure, then we have probably found a GtkObject subclass.
if t in maybeobjdefs:
objdefs.append(t)
pos = m.end()
pat = re.compile("typedef struct\s+[_\w]*\s*{\s*" +
"(" + obj_name_pat + ")Class\s+[^}]*}\s*" +
"(" + obj_name_pat + ")Class\s*;", re.MULTILINE)
pos = 0
while pos < len(buf):
m = pat.search(buf, pos)
if not m: break
t = (m.group(2), m.group(1))
# if we find an object structure together with a corresponding
# class structure, then we have probably found a GtkObject subclass.
if t in maybeobjdefs:
objdefs.append(t)
pos = m.end()
# now find all structures that look like they might represent
# a class inherited from GTypeInterface:
pat = re.compile("struct\s+_(" + obj_name_pat + ")Class\s*{\s*" +
"GTypeInterface\s+", re.MULTILINE)
pos = 0
while pos < len(buf):
m = pat.search(buf, pos)
if not m: break
t = (m.group(1), '')
t2 = (m.group(1)+'Class', 'GTypeInterface')
# if we find an object structure together with a corresponding
# class structure, then we have probably found a GtkObject subclass.
if t2 in maybeobjdefs:
objdefs.append(t)
pos = m.end()
# now find all structures that look like they might represent
# an Iface inherited from GTypeInterface:
pat = re.compile("struct\s+_(" + obj_name_pat + ")Iface\s*{\s*" +
"GTypeInterface\s+", re.MULTILINE)
pos = 0
while pos < len(buf):
m = pat.search(buf, pos)
if not m: break
t = (m.group(1), '')
t2 = (m.group(1)+'Iface', 'GTypeInterface')
# if we find an object structure together with a corresponding
# class structure, then we have probably found a GtkObject subclass.
if t2 in maybeobjdefs:
objdefs.append(t)
pos = m.end()
def sort_obj_defs(objdefs):
objdefs.sort() # not strictly needed, but looks nice
pos = 0
while pos < len(objdefs):
klass,parent = objdefs[pos]
for i in range(pos+1, len(objdefs)):
# parent below subclass ... reorder
if objdefs[i][0] == parent:
objdefs.insert(i+1, objdefs[pos])
del objdefs[pos]
break
else:
pos = pos + 1
return objdefs
# ------------------ Find enum definitions -----------------
def find_enum_defs(buf, enums=[]):
# strip comments
# bulk comments
buf = strip_comments(buf)
# strip dll api macros
buf = strip_dll_api(buf)
# strip # directives
pat = re.compile(r"""^[#].*?$""", re.MULTILINE)
buf = pat.sub('', buf)
buf = re.sub('\n', ' ', buf)
enum_pat = re.compile(r'enum\s*{([^}]*)}\s*([A-Z][A-Za-z]*)(\s|;)')
splitter = re.compile(r'\s*,\s', re.MULTILINE)
pos = 0
while pos < len(buf):
m = enum_pat.search(buf, pos)
if not m: break
name = m.group(2)
vals = m.group(1)
isflags = string.find(vals, '<<') >= 0
entries = []
for val in splitter.split(vals):
if not string.strip(val): continue
entries.append(string.split(val)[0])
if name != 'GdkCursorType':
enums.append((name, isflags, entries))
pos = m.end()
# ------------------ Find function definitions -----------------
def clean_func(buf):
"""
Ideally would make buf have a single prototype on each line.
Actually just cuts out a good deal of junk, but leaves lines
where a regex can figure prototypes out.
"""
# bulk comments
buf = strip_comments(buf)
# dll api
buf = strip_dll_api(buf)
# compact continued lines
pat = re.compile(r"""\\\n""", re.MULTILINE)
buf = pat.sub('', buf)
# Preprocess directives
pat = re.compile(r"""^[#].*?$""", re.MULTILINE)
buf = pat.sub('', buf)
#typedefs, stucts, and enums
pat = re.compile(r"""^(typedef|struct|enum)(\s|.|\n)*?;\s*""",
re.MULTILINE)
buf = pat.sub('', buf)
#strip DECLS macros
pat = re.compile(r"""G_BEGIN_DECLS|BEGIN_LIBGTOP_DECLS""", re.MULTILINE)
buf = pat.sub('', buf)
#extern "C"
pat = re.compile(r"""^\s*(extern)\s+\"C\"\s+{""", re.MULTILINE)
buf = pat.sub('', buf)
#multiple whitespace
pat = re.compile(r"""\s+""", re.MULTILINE)
buf = pat.sub(' ', buf)
#clean up line ends
pat = re.compile(r""";\s*""", re.MULTILINE)
buf = pat.sub('\n', buf)
buf = buf.lstrip()
#associate *, &, and [] with type instead of variable
#pat = re.compile(r'\s+([*|&]+)\s*(\w+)')
pat = re.compile(r' \s* ([*|&]+) \s* (\w+)', re.VERBOSE)
buf = pat.sub(r'\1 \2', buf)
pat = re.compile(r'\s+ (\w+) \[ \s* \]', re.VERBOSE)
buf = pat.sub(r'[] \1', buf)
# make return types that are const work.
buf = re.sub(r'\s*\*\s*G_CONST_RETURN\s*\*\s*', '** ', buf)
buf = string.replace(buf, 'G_CONST_RETURN ', 'const-')
buf = string.replace(buf, 'const ', 'const-')
#strip GSEAL macros from the middle of function declarations:
pat = re.compile(r"""GSEAL""", re.VERBOSE)
buf = pat.sub('', buf)
return buf
proto_pat=re.compile(r"""
(?P<ret>(-|\w|\&|\*)+\s*) # return type
\s+ # skip whitespace
(?P<func>\w+)\s*[(] # match the function name until the opening (
\s*(?P<args>.*?)\s*[)] # group the function arguments
""", re.IGNORECASE|re.VERBOSE)
#"""
arg_split_pat = re.compile("\s*,\s*")
get_type_pat = re.compile(r'(const-)?([A-Za-z0-9]+)\*?\s+')
pointer_pat = re.compile('.*\*$')
func_new_pat = re.compile('(\w+)_new$')
class DefsWriter:
def __init__(self, fp=None, prefix=None, ns=None, verbose=False,
defsfilter=None):
if not fp:
fp = sys.stdout
self.fp = fp
self.prefix = prefix
self.namespace = ns
self.verbose = verbose
self._enums = {}
self._objects = {}
self._functions = {}
if defsfilter:
filter = defsparser.DefsParser(defsfilter)
filter.startParsing()
for func in filter.functions + filter.methods.values():
self._functions[func.c_name] = func
for obj in filter.objects + filter.boxes + filter.interfaces:
self._objects[obj.c_name] = obj
for obj in filter.enums:
self._enums[obj.c_name] = obj
def write_def(self, deffile):
buf = open(deffile).read()
self.fp.write('\n;; From %s\n\n' % os.path.basename(deffile))
self._define_func(buf)
self.fp.write('\n')
def write_enum_defs(self, enums, fp=None):
if not fp:
fp = self.fp
fp.write(';; Enumerations and flags ...\n\n')
trans = string.maketrans(string.uppercase + '_',
string.lowercase + '-')
filter = self._enums
for cname, isflags, entries in enums:
if filter:
if cname in filter:
continue
name = cname
module = None
if self.namespace:
module = self.namespace
name = cname[len(self.namespace):]
else:
m = split_prefix_pat.match(cname)
if m:
module = m.group(1)
name = m.group(2)
if isflags:
fp.write('(define-flags ' + name + '\n')
else:
fp.write('(define-enum ' + name + '\n')
if module:
fp.write(' (in-module "' + module + '")\n')
fp.write(' (c-name "' + cname + '")\n')
fp.write(' (gtype-id "' + typecode(cname, self.namespace) + '")\n')
prefix = entries[0]
for ent in entries:
# shorten prefix til we get a match ...
# and handle GDK_FONT_FONT, GDK_FONT_FONTSET case
while ((len(prefix) and prefix[-1] != '_') or ent[:len(prefix)] != prefix
or len(prefix) >= len(ent)):
prefix = prefix[:-1]
prefix_len = len(prefix)
fp.write(' (values\n')
for ent in entries:
fp.write(' \'("%s" "%s")\n' %
(string.translate(ent[prefix_len:], trans), ent))
fp.write(' )\n')
fp.write(')\n\n')
def write_obj_defs(self, objdefs, fp=None):
if not fp:
fp = self.fp
fp.write(';; -*- scheme -*-\n')
fp.write('; object definitions ...\n')
filter = self._objects
for klass, parent in objdefs:
if filter:
if klass in filter:
continue
if self.namespace:
cname = klass[len(self.namespace):]
cmodule = self.namespace
else:
m = split_prefix_pat.match(klass)
cname = klass
cmodule = None
if m:
cmodule = m.group(1)
cname = m.group(2)
fp.write('(define-object ' + cname + '\n')
if cmodule:
fp.write(' (in-module "' + cmodule + '")\n')
if parent:
fp.write(' (parent "' + parent + '")\n')
fp.write(' (c-name "' + klass + '")\n')
fp.write(' (gtype-id "' + typecode(klass, self.namespace) + '")\n')
# should do something about accessible fields
fp.write(')\n\n')
def _define_func(self, buf):
buf = clean_func(buf)
buf = string.split(buf,'\n')
filter = self._functions
for p in buf:
if not p:
continue
m = proto_pat.match(p)
if m == None:
if self.verbose:
sys.stderr.write('No match:|%s|\n' % p)
continue
func = m.group('func')
if func[0] == '_':
continue
if filter:
if func in filter:
continue
ret = m.group('ret')
args = m.group('args')
args = arg_split_pat.split(args)
for i in range(len(args)):
spaces = string.count(args[i], ' ')
if spaces > 1:
args[i] = string.replace(args[i], ' ', '-', spaces - 1)
self._write_func(func, ret, args)
def _write_func(self, name, ret, args):
if len(args) >= 1:
# methods must have at least one argument
munged_name = name.replace('_', '')
m = get_type_pat.match(args[0])
if m:
obj = m.group(2)
if munged_name[:len(obj)] == obj.lower():
self._write_method(obj, name, ret, args)
return
if self.prefix:
l = len(self.prefix)
if name[:l] == self.prefix and name[l] == '_':
fname = name[l+1:]
else:
fname = name
else:
fname = name
# it is either a constructor or normal function
self.fp.write('(define-function ' + fname + '\n')
self.fp.write(' (c-name "' + name + '")\n')
# Hmmm... Let's asume that a constructor function name
# ends with '_new' and it returns a pointer.
m = func_new_pat.match(name)
if pointer_pat.match(ret) and m:
cname = ''
names = m.group(1).split('_')
if self.namespace:
cname = self.namespace
names = names[1:]
for s in names:
cname += s.title()
if cname != '':
self.fp.write(' (is-constructor-of "' + cname + '")\n')
self._write_return(ret)
self._write_arguments(args)
def _write_method(self, obj, name, ret, args):
regex = string.join(map(lambda x: x+'_?', string.lower(obj)),'')
mname = re.sub(regex, '', name, 1)
if self.prefix:
l = len(self.prefix) + 1
if mname[:l] == self.prefix and mname[l+1] == '_':
mname = mname[l+1:]
self.fp.write('(define-method ' + mname + '\n')
self.fp.write(' (of-object "' + obj + '")\n')
self.fp.write(' (c-name "' + name + '")\n')
self._write_return(ret)
self._write_arguments(args[1:])
def _write_return(self, ret):
if ret != 'void':
self.fp.write(' (return-type "' + ret + '")\n')
else:
self.fp.write(' (return-type "none")\n')
def _write_arguments(self, args):
is_varargs = 0
has_args = len(args) > 0
for arg in args:
if arg == '...':
is_varargs = 1
elif arg in ('void', 'void '):
has_args = 0
if has_args:
self.fp.write(' (parameters\n')
for arg in args:
if arg != '...':
tupleArg = tuple(string.split(arg))
if len(tupleArg) == 2:
self.fp.write(' \'("%s" "%s")\n' % tupleArg)
self.fp.write(' )\n')
if is_varargs:
self.fp.write(' (varargs #t)\n')
self.fp.write(')\n\n')
# ------------------ Main function -----------------
def main(args):
verbose = False
onlyenums = False
onlyobjdefs = False
separate = False
modulename = None
namespace = None
defsfilter = None
opts, args = getopt.getopt(args[1:], 'vs:m:n:f:',
['onlyenums', 'onlyobjdefs',
'modulename=', 'namespace=',
'separate=', 'defsfilter='])
for o, v in opts:
if o == '-v':
verbose = True
if o == '--onlyenums':
onlyenums = True
if o == '--onlyobjdefs':
onlyobjdefs = True
if o in ('-s', '--separate'):
separate = v
if o in ('-m', '--modulename'):
modulename = v
if o in ('-n', '--namespace'):
namespace = v
if o in ('-f', '--defsfilter'):
defsfilter = v
if not args[0:1]:
print 'Must specify at least one input file name'
return -1
# read all the object definitions in
objdefs = []
enums = []
for filename in args:
buf = open(filename).read()
find_obj_defs(buf, objdefs)
find_enum_defs(buf, enums)
objdefs = sort_obj_defs(objdefs)
if separate:
methods = file(separate + '.defs', 'w')
types = file(separate + '-types.defs', 'w')
dw = DefsWriter(methods, prefix=modulename, ns=namespace,
verbose=verbose, defsfilter=defsfilter)
dw.write_obj_defs(objdefs, types)
dw.write_enum_defs(enums, types)
print "Wrote %s-types.defs" % separate
for filename in args:
dw.write_def(filename)
print "Wrote %s.defs" % separate
else:
dw = DefsWriter(prefix=modulename, ns=namespace,
verbose=verbose, defsfilter=defsfilter)
if onlyenums:
dw.write_enum_defs(enums)
elif onlyobjdefs:
dw.write_obj_defs(objdefs)
else:
dw.write_obj_defs(objdefs)
dw.write_enum_defs(enums)
for filename in args:
dw.write_def(filename)
if __name__ == '__main__':
sys.exit(main(sys.argv))

View file

@ -1,26 +0,0 @@
#!/usr/bin/env python
# -*- Mode: Python; py-indent-offset: 4 -*-
import optparse
import defsparser
parser = optparse.OptionParser(
usage="usage: %prog [options] generated-defs old-defs")
parser.add_option("-p", "--merge-parameters",
help="Merge changes in function/methods parameter lists",
action="store_true", dest="parmerge", default=False)
(options, args) = parser.parse_args()
if len(args) != 2:
parser.error("wrong number of arguments")
newp = defsparser.DefsParser(args[0])
oldp = defsparser.DefsParser(args[1])
newp.startParsing()
oldp.startParsing()
newp.merge(oldp, options.parmerge)
newp.write_defs()

View file

@ -1,89 +0,0 @@
#!/usr/bin/env python
# -*- Mode: Python; py-indent-offset: 4 -*-
import sys, os, getopt
module_init_template = \
'/* -*- Mode: C; c-basic-offset: 4 -*- */\n' + \
'#ifdef HAVE_CONFIG_H\n' + \
'# include "config.h"\n' + \
'#endif\n' + \
'#include <Python.h>\n' + \
'#include <pygtk.h>\n' + \
'\n' + \
'/* include any extra headers needed here */\n' + \
'\n' + \
'void %(prefix)s_register_classes(PyObject *d);\n' + \
'extern PyMethodDef %(prefix)s_functions[];\n' + \
'\n' + \
'DL_EXPORT(void)\n' + \
'init%(module)s(void)\n' + \
'{\n' + \
' PyObject *m, *d;\n' + \
'\n' + \
' /* perform any initialisation required by the library here */\n' + \
'\n' + \
' m = Py_InitModule("%(module)s", %(prefix)s_functions);\n' + \
' d = PyModule_GetDict(m);\n' + \
'\n' + \
' init_pygtk();\n' + \
'\n' + \
' %(prefix)s_register_classes(d);\n' + \
'\n' + \
' /* add anything else to the module dictionary (such as constants) */\n' +\
'\n' + \
' if (PyErr_Occurred())\n' + \
' Py_FatalError("could not initialise module %(module)s");\n' + \
'}\n'
override_template = \
'/* -*- Mode: C; c-basic-offset: 4 -*- */\n' + \
'%%%%\n' + \
'headers\n' + \
'/* include any required headers here */\n' + \
'%%%%\n' + \
'init\n' + \
' /* include any code here that needs to be executed before the\n' + \
' * extension classes get initialised */\n' + \
'%%%%\n' + \
'\n' + \
'/* you should add appropriate ignore, ignore-glob and\n' + \
' * override sections here */\n'
def open_with_backup(file):
if os.path.exists(file):
try:
os.rename(file, file+'~')
except OSError:
# fail silently if we can't make a backup
pass
return open(file, 'w')
def write_skels(fileprefix, prefix, module):
fp = open_with_backup(fileprefix+'module.c')
fp.write(module_init_template % { 'prefix': prefix, 'module': module })
fp.close()
fp = open_with_backup(fileprefix+'.override')
fp.write(override_template % { 'prefix': prefix, 'module': module })
fp.close()
if __name__ == '__main__':
opts, args = getopt.getopt(sys.argv[1:], 'f:p:m:h',
['file-prefix=', 'prefix=', 'module=', 'help'])
fileprefix = None
prefix = None
module = None
for opt, arg in opts:
if opt in ('-f', '--file-prefix'):
fileprefix = arg
elif opt in ('-p', '--prefix'):
prefix = arg
elif opt in ('-m', '--module'):
module = arg
elif opt in ('-h', '--help'):
print 'usage: mkskel.py -f fileprefix -p prefix -m module'
sys.exit(0)
if not fileprefix or not prefix or not module:
print 'usage: mkskel.py -f fileprefix -p prefix -m module'
sys.exit(1)
write_skels(fileprefix, prefix, module)

View file

@ -1,285 +0,0 @@
# -*- Mode: Python; py-indent-offset: 4 -*-
# this file contains code for loading up an override file. The override file
# provides implementations of functions where the code generator could not
# do its job correctly.
import fnmatch
import os
import re
import string
import sys
def class2cname(klass, method):
c_name = ''
for c in klass:
if c.isupper():
c_name += '_' + c.lower()
else:
c_name += c
return c_name[1:] + '_' + method
# import python_type as c_name [for arg_type]
# Last ('for') clause is optional. If present, the type will be
# imported only if given 'arg_type' is registered.
import_pat = re.compile(r'\s*import\s+(\S+)\.([^\s.]+)\s+as\s+(\S+)(\s+for\s+(\S+))?')
class Overrides:
def __init__(self, filename=None):
self.modulename = None
self.ignores = {}
self.glob_ignores = []
self.type_ignores = {}
self.overrides = {}
self.overridden = {}
self.kwargs = {}
self.noargs = {}
self.onearg = {}
self.staticmethod = {}
self.classmethod = {}
self.startlines = {}
self.override_attrs = {}
self.override_slots = {}
self.headers = ''
self.body = ''
self.init = ''
self.imports = []
self.defines = {}
self.functions = {}
self.newstyle_constructors = {}
self.dynamicnamespace = False
if filename:
self.handle_file(filename)
def handle_file(self, filename):
oldpath = os.getcwd()
fp = open(filename, 'r')
dirname = os.path.dirname(os.path.abspath(filename))
if dirname != oldpath:
os.chdir(dirname)
# read all the components of the file ...
bufs = []
startline = 1
lines = []
line = fp.readline()
linenum = 1
while line:
if line == '%%\n' or line == '%%':
if lines:
bufs.append((string.join(lines, ''), startline))
startline = linenum + 1
lines = []
else:
lines.append(line)
line = fp.readline()
linenum = linenum + 1
if lines:
bufs.append((string.join(lines, ''), startline))
if not bufs: return
for buf, startline in bufs:
self.__parse_override(buf, startline, filename)
os.chdir(oldpath)
def __parse_override(self, buffer, startline, filename):
pos = string.find(buffer, '\n')
if pos >= 0:
line = buffer[:pos]
rest = buffer[pos+1:]
else:
line = buffer ; rest = ''
words = string.split(line)
command = words[0]
if (command == 'ignore' or
command == 'ignore-' + sys.platform):
"ignore/ignore-platform [functions..]"
for func in words[1:]:
self.ignores[func] = 1
for func in string.split(rest):
self.ignores[func] = 1
elif (command == 'ignore-glob' or
command == 'ignore-glob-' + sys.platform):
"ignore-glob/ignore-glob-platform [globs..]"
for func in words[1:]:
self.glob_ignores.append(func)
for func in string.split(rest):
self.glob_ignores.append(func)
elif (command == 'ignore-type' or
command == 'ignore-type-' + sys.platform):
"ignore-type/ignore-type-platform [typenames..]"
for typename in words[1:]:
self.type_ignores[typename] = 1
for typename in string.split(rest):
self.type_ignores[typename] = 1
elif command == 'override':
"override function/method [kwargs|noargs|onearg] [staticmethod|classmethod]"
func = words[1]
if 'kwargs' in words[1:]:
self.kwargs[func] = 1
elif 'noargs' in words[1:]:
self.noargs[func] = 1
elif 'onearg' in words[1:]:
self.onearg[func] = True
if 'staticmethod' in words[1:]:
self.staticmethod[func] = True
elif 'classmethod' in words[1:]:
self.classmethod[func] = True
if func in self.overrides:
raise RuntimeError("Function %s is being overridden more than once" % (func,))
self.overrides[func] = rest
self.startlines[func] = (startline + 1, filename)
elif command == 'override-attr':
"override-slot Class.attr"
attr = words[1]
self.override_attrs[attr] = rest
self.startlines[attr] = (startline + 1, filename)
elif command == 'override-slot':
"override-slot Class.slot"
slot = words[1]
self.override_slots[slot] = rest
self.startlines[slot] = (startline + 1, filename)
elif command == 'headers':
"headers"
self.headers = '%s\n#line %d "%s"\n%s' % \
(self.headers, startline + 1, filename, rest)
elif command == 'body':
"body"
self.body = '%s\n#line %d "%s"\n%s' % \
(self.body, startline + 1, filename, rest)
elif command == 'init':
"init"
self.init = '%s\n#line %d "%s"\n%s' % \
(self.init, startline + 1, filename, rest)
elif command == 'modulename':
"modulename name"
self.modulename = words[1]
elif command == 'include':
"include filename"
for filename in words[1:]:
self.handle_file(filename)
for filename in string.split(rest):
self.handle_file(filename)
elif command == 'import':
"import module1 [\n module2, \n module3 ...]"
for line in string.split(buffer, '\n'):
match = import_pat.match(line)
if match:
module, pyname, cname, conditional, importing_for = match.groups()
self.imports.append((module, pyname, cname, importing_for or None))
elif command == 'define':
"define funcname [kwargs|noargs|onearg] [classmethod|staticmethod]"
"define Class.method [kwargs|noargs|onearg] [classmethod|staticmethod]"
func = words[1]
klass = None
if func.find('.') != -1:
klass, func = func.split('.', 1)
if not self.defines.has_key(klass):
self.defines[klass] = {}
self.defines[klass][func] = rest
else:
self.functions[func] = rest
if 'kwargs' in words[1:]:
self.kwargs[func] = 1
elif 'noargs' in words[1:]:
self.noargs[func] = 1
elif 'onearg' in words[1:]:
self.onearg[func] = 1
if 'staticmethod' in words[1:]:
self.staticmethod[func] = True
elif 'classmethod' in words[1:]:
self.classmethod[func] = True
self.startlines[func] = (startline + 1, filename)
elif command == 'new-constructor':
"new-constructor GType"
gtype, = words[1:]
self.newstyle_constructors[gtype] = True
elif command == 'options':
for option in words[1:]:
if option == 'dynamicnamespace':
self.dynamicnamespace = True
def is_ignored(self, name):
if self.ignores.has_key(name):
return 1
for glob in self.glob_ignores:
if fnmatch.fnmatchcase(name, glob):
return 1
return 0
def is_type_ignored(self, name):
return name in self.type_ignores
def is_overriden(self, name):
return self.overrides.has_key(name)
def is_already_included(self, name):
return self.overridden.has_key(name)
def override(self, name):
self.overridden[name] = 1
return self.overrides[name]
def define(self, klass, name):
self.overridden[class2cname(klass, name)] = 1
return self.defines[klass][name]
def function(self, name):
return self.functions[name]
def getstartline(self, name):
return self.startlines[name]
def wants_kwargs(self, name):
return self.kwargs.has_key(name)
def wants_noargs(self, name):
return self.noargs.has_key(name)
def wants_onearg(self, name):
return self.onearg.has_key(name)
def is_staticmethod(self, name):
return self.staticmethod.has_key(name)
def is_classmethod(self, name):
return self.classmethod.has_key(name)
def attr_is_overriden(self, attr):
return self.override_attrs.has_key(attr)
def attr_override(self, attr):
return self.override_attrs[attr]
def slot_is_overriden(self, slot):
return self.override_slots.has_key(slot)
def slot_override(self, slot):
return self.override_slots[slot]
def get_headers(self):
return self.headers
def get_body(self):
return self.body
def get_init(self):
return self.init
def get_imports(self):
return self.imports
def get_defines_for(self, klass):
return self.defines.get(klass, {})
def get_functions(self):
return self.functions

View file

@ -1,912 +0,0 @@
### -*- python -*-
### Code to generate "Reverse Wrappers", i.e. C->Python wrappers
### (C) 2004 Gustavo Carneiro <gjc@gnome.org>
import argtypes
import os
DEBUG_MODE = ('PYGTK_CODEGEN_DEBUG' in os.environ)
def join_ctype_name(ctype, name):
'''Joins a C type and a variable name into a single string'''
if ctype[-1] != '*':
return " ".join((ctype, name))
else:
return "".join((ctype, name))
class CodeSink(object):
def __init__(self):
self.indent_level = 0 # current indent level
self.indent_stack = [] # previous indent levels
def _format_code(self, code):
assert isinstance(code, str)
l = []
for line in code.split('\n'):
l.append(' '*self.indent_level + line)
if l[-1]:
l.append('')
return '\n'.join(l)
def writeln(self, line=''):
raise NotImplementedError
def indent(self, level=4):
'''Add a certain ammount of indentation to all lines written
from now on and until unindent() is called'''
self.indent_stack.append(self.indent_level)
self.indent_level += level
def unindent(self):
'''Revert indentation level to the value before last indent() call'''
self.indent_level = self.indent_stack.pop()
class FileCodeSink(CodeSink):
def __init__(self, fp):
CodeSink.__init__(self)
assert isinstance(fp, file)
self.fp = fp
def writeln(self, line=''):
self.fp.write(self._format_code(line))
class MemoryCodeSink(CodeSink):
def __init__(self):
CodeSink.__init__(self)
self.lines = []
def writeln(self, line=''):
self.lines.append(self._format_code(line))
def flush_to(self, sink):
assert isinstance(sink, CodeSink)
for line in self.lines:
sink.writeln(line.rstrip())
self.lines = []
def flush(self):
l = []
for line in self.lines:
l.append(self._format_code(line))
self.lines = []
return "".join(l)
class ReverseWrapper(object):
'''Object that generates a C->Python wrapper'''
def __init__(self, cname, is_static=True):
assert isinstance(cname, str)
self.cname = cname
## function object we will call, or object whose method we will call
self.called_pyobj = None
## name of method of self.called_pyobj we will call
self.method_name = None
self.is_static = is_static
self.parameters = []
self.declarations = MemoryCodeSink()
self.post_return_code = MemoryCodeSink()
self.body = MemoryCodeSink()
self.check_exception_code = MemoryCodeSink()
self.cleanup_actions = []
self.pyargv_items = []
self.pyargv_optional_items = []
self.pyret_parse_items = [] # list of (format_spec, parameter)
self.code_sinks_stack = [self.body]
def set_call_target(self, called_pyobj, method_name=None):
assert called_pyobj is not None
assert self.called_pyobj is None
self.called_pyobj = called_pyobj
self.method_name = method_name
def set_return_type(self, return_type):
assert isinstance(return_type, ReturnType)
self.return_type = return_type
def add_parameter(self, param):
assert isinstance(param, Parameter)
self.parameters.append(param)
def add_declaration(self, decl_code):
self.declarations.writeln(decl_code)
def add_pyargv_item(self, variable, optional=False):
if optional:
self.pyargv_optional_items.append(variable)
else:
self.pyargv_items.append(variable)
def add_pyret_parse_item(self, format_specifier, parameter, prepend=False):
if prepend:
self.pyret_parse_items.insert(0, (format_specifier, parameter))
else:
self.pyret_parse_items.append((format_specifier, parameter))
def push_code_sink(self, code_sink):
self.code_sinks_stack.insert(0, code_sink)
def pop_code_sink(self):
return self.code_sinks_stack.pop(0)
def write_code(self, code,
cleanup=None,
failure_expression=None,
failure_cleanup=None,
failure_exception=None,
code_sink=None):
'''Add a chunk of code with cleanup and error handling
This method is to be used by TypeHandlers when generating code
Keywork arguments:
code -- code to add
cleanup -- code to cleanup any dynamic resources created by @code
(except in case of failure) (default None)
failure_expression -- C boolean expression to indicate
if anything failed (default None)
failure_cleanup -- code to cleanup any dynamic resources
created by @code in case of failure (default None)
failure_exception -- code to raise an exception in case of
failure (which will be immediately
printed and cleared), (default None)
code_sink -- "code sink" to use; by default,
ReverseWrapper.body is used, which writes the
main body of the wrapper, before calling the
python method. Alternatively,
ReverseWrapper.after_pyret_parse can be used, to
write code after the PyArg_ParseTuple that
parses the python method return value.
'''
if code_sink is None:
code_sink = self.code_sinks_stack[0]
if code is not None:
code_sink.writeln(code)
if failure_expression is not None:
code_sink.writeln("if (%s) {" % (failure_expression,))
code_sink.indent()
if failure_exception is None:
code_sink.writeln("if (PyErr_Occurred())")
code_sink.indent()
code_sink.writeln("PyErr_Print();")
code_sink.unindent()
else:
code_sink.writeln(failure_exception)
code_sink.writeln("PyErr_Print();")
if failure_cleanup is not None:
code_sink.writeln(failure_cleanup)
for cleanup_action in self.cleanup_actions:
code_sink.writeln(cleanup_action)
self.push_code_sink(code_sink)
try:
self.return_type.write_error_return()
finally:
self.pop_code_sink()
code_sink.unindent()
code_sink.writeln("}")
if cleanup is not None:
self.cleanup_actions.insert(0, cleanup)
def generate(self, sink):
'''Generate the code into a CodeSink object'''
assert isinstance(sink, CodeSink)
if DEBUG_MODE:
self.declarations.writeln("/* begin declarations */")
self.body.writeln("/* begin main body */")
self.post_return_code.writeln("/* begin post-return code */")
self.add_declaration("PyGILState_STATE __py_state;")
self.write_code(code="__py_state = pyg_gil_state_ensure();",
cleanup="pyg_gil_state_release(__py_state);")
for param in self.parameters:
param.convert_c2py()
assert self.called_pyobj is not None,\
"Parameters failed to provide a target function or method."
if self.is_static:
sink.writeln('static %s' % self.return_type.get_c_type())
else:
sink.writeln(self.return_type.get_c_type())
c_proto_params = map(Parameter.format_for_c_proto, self.parameters)
sink.writeln("%s(%s)\n{" % (self.cname, ", ".join(c_proto_params)))
self.return_type.write_decl()
self.add_declaration("PyObject *py_retval;")
## Handle number of arguments
if self.pyargv_items:
self.add_declaration("PyObject *py_args;")
py_args = "py_args"
if self.pyargv_optional_items:
self.add_declaration("int argc = %i;" % len(self.pyargv_items))
argc = "argc"
for arg in self.pyargv_optional_items:
self.body.writeln("if (%s)" % arg)
self.body.indent()
self.body.writeln("++argc;")
self.body.unindent()
else:
argc = str(len(self.pyargv_items))
else:
if self.pyargv_optional_items:
self.add_declaration("PyObject *py_args;")
py_args = "py_args"
self.add_declaration("int argc = 0;")
argc = "argc"
for arg in self.pyargv_optional_items:
self.body.writeln("if (%s)" % arg)
self.body.indent()
self.body.writeln("++argc;")
self.body.unindent()
else:
py_args = "NULL"
argc = None
self.body.writeln()
if py_args != "NULL":
self.write_code("py_args = PyTuple_New(%s);" % argc,
cleanup="Py_DECREF(py_args);")
pos = 0
for arg in self.pyargv_items:
try: # try to remove the Py_DECREF cleanup action, if we can
self.cleanup_actions.remove("Py_DECREF(%s);" % arg)
except ValueError: # otherwise we have to Py_INCREF..
self.body.writeln("Py_INCREF(%s);" % arg)
self.body.writeln("PyTuple_SET_ITEM(%s, %i, %s);" % (py_args, pos, arg))
pos += 1
for arg in self.pyargv_optional_items:
self.body.writeln("if (%s) {" % arg)
self.body.indent()
try: # try to remove the Py_DECREF cleanup action, if we can
self.cleanup_actions.remove("Py_XDECREF(%s);" % arg)
except ValueError: # otherwise we have to Py_INCREF..
self.body.writeln("Py_INCREF(%s);" % arg)
self.body.writeln("PyTuple_SET_ITEM(%s, %i, %s);" % (py_args, pos, arg))
self.body.unindent()
self.body.writeln("}")
pos += 1
self.body.writeln()
## Call the python method
if self.method_name is None:
self.write_code("py_retval = PyObject_Call(%s, %s);"
% (self.called_pyobj, py_args),
cleanup="Py_XDECREF(py_retval);")
self.check_exception_code.flush_to(self.body)
self.write_code(None, failure_expression="!py_retval")
else:
self.add_declaration("PyObject *py_method;")
self.write_code("py_method = PyObject_GetAttrString(%s, \"%s\");"
% (self.called_pyobj, self.method_name),
cleanup="Py_DECREF(py_method);",
failure_expression="!py_method")
self.write_code("py_retval = PyObject_CallObject(py_method, %s);"
% (py_args,),
cleanup="Py_XDECREF(py_retval);")
self.check_exception_code.flush_to(self.body)
self.write_code(None, failure_expression="!py_retval")
## -- Handle the return value --
## we need to check if the return_type object is prepared to cooperate with multiple return values
len_before = len(self.pyret_parse_items)
self.return_type.write_conversion()
len_after = len(self.pyret_parse_items)
assert (self.return_type.get_c_type() == 'void'
or not (len_before == len_after and len_after > 0)),\
("Bug in reverse wrappers: return type handler %s"
" is not prepared to cooperate multiple return values") % (type(self.return_type),)
sink.indent()
if self.pyret_parse_items == [("", "")]:
## special case when there are no return parameters
self.write_code(
code=None,
failure_expression='py_retval != Py_None',
failure_exception=('PyErr_SetString(PyExc_TypeError, '
'"virtual method should return None");'))
else:
if len(self.pyret_parse_items) == 1:
## if retval is one item only, pack it in a tuple so we
## can use PyArg_ParseTuple as usual..
self.write_code('py_retval = Py_BuildValue("(N)", py_retval);')
if len(self.pyret_parse_items) > 0:
## Parse return values using PyArg_ParseTuple
params = ["py_retval",
'"%s"' % "".join([format for format, param in self.pyret_parse_items])]
params.extend([param for format, param in self.pyret_parse_items if param])
self.write_code(code=None, failure_expression=(
'!PyArg_ParseTuple(%s)' % (', '.join(params),)))
if DEBUG_MODE:
self.declarations.writeln("/* end declarations */")
self.declarations.flush_to(sink)
sink.writeln()
if DEBUG_MODE:
self.body.writeln("/* end main body */")
self.body.flush_to(sink)
sink.writeln()
if DEBUG_MODE:
self.post_return_code.writeln("/* end post-return code */")
self.post_return_code.flush_to(sink)
sink.writeln()
for cleanup_action in self.cleanup_actions:
sink.writeln(cleanup_action)
if self.return_type.get_c_type() != 'void':
sink.writeln()
sink.writeln("return retval;")
sink.unindent()
sink.writeln("}")
class TypeHandler(object):
def __init__(self, wrapper, **props):
assert isinstance(wrapper, ReverseWrapper)
self.wrapper = wrapper
self.props = props
class ReturnType(TypeHandler):
supports_optional = False
def get_c_type(self):
raise NotImplementedError
def write_decl(self):
raise NotImplementedError
def write_error_return(self):
'''Write "return <value>" code in case of error'''
raise NotImplementedError
def write_conversion(self):
'''Writes code to convert Python return value in 'py_retval'
into C 'retval'. Returns a string with C boolean expression
that determines if anything went wrong. '''
raise NotImplementedError
class Parameter(TypeHandler):
def __init__(self, wrapper, name, **props):
TypeHandler.__init__(self, wrapper, **props)
self.name = name
def get_c_type(self):
raise NotImplementedError
def convert_c2py(self):
'''Write some code before calling the Python method.'''
pass
def format_for_c_proto(self):
return join_ctype_name(self.get_c_type(), self.name)
###---
class StringParam(Parameter):
def get_c_type(self):
return self.props.get('c_type', 'char *').replace('const-', 'const ')
def convert_c2py(self):
if self.props.get('optional', False):
self.wrapper.add_declaration("PyObject *py_%s = NULL;" % self.name)
self.wrapper.write_code(code=("if (%s)\n"
" py_%s = PyString_FromString(%s);\n"
% (self.name, self.name, self.name)),
cleanup=("Py_XDECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name, optional=True)
elif self.props.get('nullok', False):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=("if (%s)\n"
" py_%s = PyString_FromString(%s);\n"
"else {\n"
" Py_INCREF(Py_None);\n"
" py_%s = Py_None;\n"
"}\n"
% (self.name, self.name, self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
else:
self.wrapper.add_declaration("PyObject *py_%s = NULL;" % self.name)
self.wrapper.write_code(code=("if (%s)\n"
" py_%s = PyString_FromString(%s);\n" %
(self.name, self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name),
failure_expression=("!py_%s" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
for ctype in ('char*', 'gchar*', 'const-char*', 'char-const*', 'const-gchar*',
'gchar-const*', 'string', 'static_string'):
argtypes.matcher.register_reverse(ctype, StringParam)
del ctype
class StringReturn(ReturnType):
def get_c_type(self):
return self.props.get('c_type', 'char *').replace('const-', 'const ')
#return "char *"
def write_decl(self):
self.wrapper.add_declaration("%s retval;" % self.get_c_type())
#self.wrapper.add_declaration("char *retval;")
def write_error_return(self):
self.wrapper.write_code("return NULL;")
def write_conversion(self):
self.wrapper.add_pyret_parse_item("s", "&retval", prepend=True)
self.wrapper.write_code("retval = g_strdup(retval);", code_sink=self.wrapper.post_return_code)
for ctype in ('char*', 'gchar*', 'const-gchar*'):
argtypes.matcher.register_reverse_ret(ctype, StringReturn)
del ctype
class VoidReturn(ReturnType):
def get_c_type(self):
return "void"
def write_decl(self):
pass
def write_error_return(self):
self.wrapper.write_code("return;")
def write_conversion(self):
self.wrapper.add_pyret_parse_item("", "", prepend=True)
argtypes.matcher.register_reverse_ret('void', VoidReturn)
argtypes.matcher.register_reverse_ret('none', VoidReturn)
class GObjectParam(Parameter):
def get_c_type(self):
return self.props.get('c_type', 'GObject *')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s = NULL;" % self.name)
self.wrapper.write_code(code=("if (%s)\n"
" py_%s = pygobject_new((GObject *) %s);\n"
"else {\n"
" Py_INCREF(Py_None);\n"
" py_%s = Py_None;\n"
"}"
% (self.name, self.name, self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
argtypes.matcher.register_reverse('GObject*', GObjectParam)
class GObjectReturn(ReturnType):
supports_optional = True
def get_c_type(self):
return self.props.get('c_type', 'GObject *')
def write_decl(self):
if not self.props.get('optional'):
self.wrapper.add_declaration("%s retval;" % self.get_c_type())
else:
self.wrapper.add_declaration("%s retval = NULL;" % self.get_c_type())
def write_error_return(self):
self.wrapper.write_code("return NULL;")
def write_conversion(self):
if not self.props.get('optional'):
self.wrapper.write_code(
code=None,
failure_expression="!PyObject_TypeCheck(py_retval, &PyGObject_Type)",
failure_exception='PyErr_SetString(PyExc_TypeError, "retval should be a GObject");')
self.wrapper.write_code("retval = (%s) pygobject_get(py_retval);"
% self.get_c_type())
self.wrapper.write_code("g_object_ref((GObject *) retval);")
else:
self.wrapper.write_code(
code=None,
failure_expression="py_retval != Py_None && !PyObject_TypeCheck(py_retval, &PyGObject_Type)",
failure_exception='PyErr_SetString(PyExc_TypeError, "retval should be None or a GObject");')
self.wrapper.write_code("if (py_retval != Py_None) {\n"
" retval = (%s) pygobject_get(py_retval);\n"
" g_object_ref((GObject *) retval);\n"
"}\n"
% self.get_c_type())
argtypes.matcher.register_reverse_ret('GObject*', GObjectReturn)
class IntParam(Parameter):
def get_c_type(self):
return self.props.get('c_type', 'int')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=("py_%s = PyInt_FromLong(%s);" %
(self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
class IntReturn(ReturnType):
def get_c_type(self):
return self.props.get('c_type', 'int')
def write_decl(self):
self.wrapper.add_declaration("%s retval;" % self.get_c_type())
def write_error_return(self):
self.wrapper.write_code("return -G_MAXINT;")
def write_conversion(self):
self.wrapper.add_pyret_parse_item("i", "&retval", prepend=True)
for argtype in ('int', 'gint', 'guint', 'short', 'gshort', 'gushort', 'long',
'glong', 'gsize', 'gssize', 'guint8', 'gint8', 'guint16',
'gint16', 'gint32', 'GTime'):
argtypes.matcher.register_reverse(argtype, IntParam)
argtypes.matcher.register_reverse_ret(argtype, IntReturn)
del argtype
class IntPtrParam(Parameter):
def __init__(self, wrapper, name, **props):
if "direction" not in props:
raise argtypes.ArgTypeConfigurationError(
"cannot use int* parameter without direction")
if props["direction"] not in ("out", "inout"):
raise argtypes.ArgTypeConfigurationError(
"cannot use int* parameter with direction '%s'"
% (props["direction"],))
Parameter.__init__(self, wrapper, name, **props)
def get_c_type(self):
return self.props.get('c_type', 'int*')
def convert_c2py(self):
if self.props["direction"] == "inout":
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=("py_%s = PyInt_FromLong(*%s);" %
(self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
self.wrapper.add_pyret_parse_item("i", self.name)
for argtype in ('int*', 'gint*'):
argtypes.matcher.register_reverse(argtype, IntPtrParam)
del argtype
class GEnumReturn(IntReturn):
def write_conversion(self):
self.wrapper.write_code(
code=None,
failure_expression=(
"pyg_enum_get_value(%s, py_retval, (gint *)&retval)"
% (self.props['typecode'],)))
argtypes.matcher.register_reverse_ret("GEnum", GEnumReturn)
class GEnumParam(IntParam):
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=("py_%s = pyg_enum_from_gtype(%s, %s);" %
(self.name, self.props['typecode'], self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name),
failure_expression=("!py_%s" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
argtypes.matcher.register_reverse("GEnum", GEnumParam)
class GFlagsReturn(IntReturn):
def write_conversion(self):
self.wrapper.write_code(
code=None,
failure_expression=(
"pyg_flags_get_value(%s, py_retval, (gint *)&retval)" %
self.props['typecode']))
argtypes.matcher.register_reverse_ret("GFlags", GFlagsReturn)
class GFlagsParam(IntParam):
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=(
"py_%s = pyg_flags_from_gtype(%s, %s);" %
(self.name, self.props['typecode'], self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name),
failure_expression=("!py_%s" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
argtypes.matcher.register_reverse("GFlags", GFlagsParam)
class GtkTreePathParam(IntParam):
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=(
"py_%s = pygtk_tree_path_to_pyobject(%s);" %
(self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name),
failure_expression=("!py_%s" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
argtypes.matcher.register_reverse("GtkTreePath*", GtkTreePathParam)
class GtkTreePathReturn(ReturnType):
def get_c_type(self):
return self.props.get('c_type', 'GtkTreePath *')
def write_decl(self):
self.wrapper.add_declaration("GtkTreePath * retval;")
def write_error_return(self):
self.wrapper.write_code("return NULL;")
def write_conversion(self):
self.wrapper.write_code(
"retval = pygtk_tree_path_from_pyobject(py_retval);\n",
failure_expression=('!retval'),
failure_exception=(
'PyErr_SetString(PyExc_TypeError, "retval should be a GtkTreePath");'))
argtypes.matcher.register_reverse_ret("GtkTreePath*", GtkTreePathReturn)
class BooleanReturn(ReturnType):
def get_c_type(self):
return "gboolean"
def write_decl(self):
self.wrapper.add_declaration("gboolean retval;")
self.wrapper.add_declaration("PyObject *py_main_retval;")
def write_error_return(self):
self.wrapper.write_code("return FALSE;")
def write_conversion(self):
self.wrapper.add_pyret_parse_item("O", "&py_main_retval", prepend=True)
self.wrapper.write_code(
"retval = PyObject_IsTrue(py_main_retval)? TRUE : FALSE;",
code_sink=self.wrapper.post_return_code)
argtypes.matcher.register_reverse_ret("gboolean", BooleanReturn)
class BooleanParam(Parameter):
def get_c_type(self):
return "gboolean"
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code("py_%s = %s? Py_True : Py_False;"
% (self.name, self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
argtypes.matcher.register_reverse("gboolean", BooleanParam)
class DoubleParam(Parameter):
def get_c_type(self):
return self.props.get('c_type', 'gdouble')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=("py_%s = PyFloat_FromDouble(%s);" %
(self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
class DoublePtrParam(Parameter):
def __init__(self, wrapper, name, **props):
if "direction" not in props:
raise argtypes.ArgTypeConfigurationError(
"cannot use double* parameter without direction")
if props["direction"] not in ("out", ): # inout not yet implemented
raise argtypes.ArgTypeConfigurationError(
"cannot use double* parameter with direction '%s'"
% (props["direction"],))
Parameter.__init__(self, wrapper, name, **props)
def get_c_type(self):
return self.props.get('c_type', 'double*')
def convert_c2py(self):
self.wrapper.add_pyret_parse_item("d", self.name)
for argtype in ('double*', 'gdouble*'):
argtypes.matcher.register_reverse(argtype, DoublePtrParam)
del argtype
class DoubleReturn(ReturnType):
def get_c_type(self):
return self.props.get('c_type', 'gdouble')
def write_decl(self):
self.wrapper.add_declaration("%s retval;" % self.get_c_type())
def write_error_return(self):
self.wrapper.write_code("return -G_MAXFLOAT;")
def write_conversion(self):
self.wrapper.add_pyret_parse_item("d", "&retval", prepend=True)
for argtype in ('float', 'double', 'gfloat', 'gdouble'):
argtypes.matcher.register_reverse(argtype, DoubleParam)
argtypes.matcher.register_reverse_ret(argtype, DoubleReturn)
class GBoxedParam(Parameter):
def get_c_type(self):
return self.props.get('c_type').replace('const-', 'const ')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
ctype = self.get_c_type()
if ctype.startswith('const '):
ctype_no_const = ctype[len('const '):]
self.wrapper.write_code(
code=('py_%s = pyg_boxed_new(%s, (%s) %s, TRUE, TRUE);' %
(self.name, self.props['typecode'],
ctype_no_const, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
else:
self.wrapper.write_code(
code=('py_%s = pyg_boxed_new(%s, %s, FALSE, FALSE);' %
(self.name, self.props['typecode'], self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
argtypes.matcher.register_reverse("GBoxed", GBoxedParam)
class GBoxedReturn(ReturnType):
def get_c_type(self):
return self.props.get('c_type')
def write_decl(self):
self.wrapper.add_declaration("%s retval;" % self.get_c_type())
def write_error_return(self):
self.wrapper.write_code("return retval;")
def write_conversion(self):
self.wrapper.write_code(code = None,
failure_expression=("!pyg_boxed_check(py_retval, %s)" %
(self.props['typecode'],)),
failure_exception=(
'PyErr_SetString(PyExc_TypeError, "retval should be a %s");'
% (self.props['typename'],)))
self.wrapper.write_code('retval = pyg_boxed_get(py_retval, %s);' %
self.props['typename'])
argtypes.matcher.register_reverse_ret("GBoxed", GBoxedReturn)
class GdkRegionPtrReturn(GBoxedReturn):
def write_error_return(self):
self.wrapper.write_code("return gdk_region_new();")
def write_conversion(self):
self.props['typecode'] = 'PYGDK_TYPE_REGION'
self.props['typename'] = 'GdkRegion'
super(GdkRegionPtrReturn, self).write_conversion()
argtypes.matcher.register_reverse_ret("GdkRegion*", GdkRegionPtrReturn)
class PangoFontDescriptionReturn(GBoxedReturn):
def write_error_return(self):
self.wrapper.write_code("return pango_font_description_new();")
def write_conversion(self):
self.props['typecode'] = 'PANGO_TYPE_FONT_DESCRIPTION'
self.props['typename'] = 'PangoFontDescription'
super(PangoFontDescriptionReturn, self).write_conversion()
argtypes.matcher.register_reverse_ret("PangoFontDescription*",
PangoFontDescriptionReturn)
class PangoFontMetricsReturn(GBoxedReturn):
def write_error_return(self):
self.wrapper.write_code("return pango_font_metrics_new();")
def write_conversion(self):
self.props['typecode'] = 'PANGO_TYPE_FONT_METRICS'
self.props['typename'] = 'PangoFontMetrics'
super(PangoFontMetricsReturn, self).write_conversion()
argtypes.matcher.register_reverse_ret("PangoFontMetrics*",
PangoFontMetricsReturn)
class PangoLanguageReturn(GBoxedReturn):
def write_error_return(self):
self.wrapper.write_code("return pango_language_from_string(\"\");")
def write_conversion(self):
self.props['typecode'] = 'PANGO_TYPE_LANGUAGE'
self.props['typename'] = 'PangoLanguage'
super(PangoLanguageReturn, self).write_conversion()
argtypes.matcher.register_reverse_ret("PangoLanguage*", PangoLanguageReturn)
class GdkRectanglePtrParam(Parameter):
def get_c_type(self):
return self.props.get('c_type').replace('const-', 'const ')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(
code=('py_%s = pyg_boxed_new(GDK_TYPE_RECTANGLE, %s, TRUE, TRUE);' %
(self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name))
self.wrapper.add_pyargv_item("py_%s" % self.name)
argtypes.matcher.register_reverse("GdkRectangle*", GdkRectanglePtrParam)
argtypes.matcher.register_reverse('GtkAllocation*', GdkRectanglePtrParam)
class GErrorParam(Parameter):
def get_c_type(self):
return self.props.get('c_type').replace('**', ' **')
def convert_c2py(self):
self.wrapper.write_code(code=None,
failure_expression=("pyg_gerror_exception_check(%s)" % self.name),
code_sink=self.wrapper.check_exception_code)
argtypes.matcher.register_reverse('GError**', GErrorParam)
class PyGObjectMethodParam(Parameter):
def __init__(self, wrapper, name, method_name, **props):
Parameter.__init__(self, wrapper, name, **props)
self.method_name = method_name
def get_c_type(self):
return self.props.get('c_type', 'GObject *')
def convert_c2py(self):
self.wrapper.add_declaration("PyObject *py_%s;" % self.name)
self.wrapper.write_code(code=("py_%s = pygobject_new((GObject *) %s);" %
(self.name, self.name)),
cleanup=("Py_DECREF(py_%s);" % self.name),
failure_expression=("!py_%s" % self.name))
self.wrapper.set_call_target("py_%s" % self.name, self.method_name)
class CallbackInUserDataParam(Parameter):
def __init__(self, wrapper, name, free_it, **props):
Parameter.__init__(self, wrapper, name, **props)
self.free_it = free_it
def get_c_type(self):
return "gpointer"
def convert_c2py(self):
self.wrapper.add_declaration("PyObject **_user_data;")
cleanup = self.free_it and ("g_free(%s);" % self.name) or None
self.wrapper.write_code(code=("_real_user_data = (PyObject **) %s;"
% self.name),
cleanup=cleanup)
self.wrapper.add_declaration("PyObject *py_func;")
cleanup = self.free_it and "Py_DECREF(py_func);" or None
self.wrapper.write_code(code="py_func = _user_data[0];",
cleanup=cleanup)
self.wrapper.set_call_target("py_func")
self.wrapper.add_declaration("PyObject *py_user_data;")
cleanup = self.free_it and "Py_XDECREF(py_user_data);" or None
self.wrapper.write_code(code="py_user_data = _user_data[1];",
cleanup=cleanup)
self.wrapper.add_pyargv_item("py_user_data", optional=True)
def _test():
import sys
if 1:
wrapper = ReverseWrapper("this_is_the_c_function_name", is_static=True)
wrapper.set_return_type(StringReturn(wrapper))
wrapper.add_parameter(PyGObjectMethodParam(wrapper, "self", method_name="do_xxx"))
wrapper.add_parameter(StringParam(wrapper, "param2", optional=True))
wrapper.add_parameter(GObjectParam(wrapper, "param3"))
#wrapper.add_parameter(InoutIntParam(wrapper, "param4"))
wrapper.generate(FileCodeSink(sys.stderr))
if 0:
wrapper = ReverseWrapper("this_a_callback_wrapper")
wrapper.set_return_type(VoidReturn(wrapper))
wrapper.add_parameter(StringParam(wrapper, "param1", optional=False))
wrapper.add_parameter(GObjectParam(wrapper, "param2"))
wrapper.add_parameter(CallbackInUserDataParam(wrapper, "data", free_it=True))
wrapper.generate(FileCodeSink(sys.stderr))
if __name__ == '__main__':
_test()

View file

@ -1,143 +0,0 @@
#!/usr/bin/env python
# -*- Mode: Python; py-indent-offset: 4 -*-
from __future__ import generators
import string
from cStringIO import StringIO
class error(Exception):
def __init__(self, filename, lineno, msg):
Exception.__init__(self, msg)
self.filename = filename
self.lineno = lineno
self.msg = msg
def __str__(self):
return '%s:%d: error: %s' % (self.filename, self.lineno, self.msg)
trans = [' '] * 256
for i in range(256):
if chr(i) in string.letters + string.digits + '_':
trans[i] = chr(i)
else:
trans[i] = '_'
trans = string.join(trans, '')
def parse(filename):
if isinstance(filename, str):
fp = open(filename, 'r')
else: # if not string, assume it is some kind of iterator
fp = filename
filename = getattr(fp, 'name', '<unknown>')
whitespace = ' \t\n\r\x0b\x0c'
nonsymbol = whitespace + '();\'"'
stack = []
openlines = []
lineno = 0
for line in fp:
pos = 0
lineno += 1
while pos < len(line):
if line[pos] in whitespace: # ignore whitespace
pass
elif line[pos] == ';': # comment
break
elif line[pos:pos+2] == "'(":
pass # the open parenthesis will be handled next iteration
elif line[pos] == '(':
stack.append(())
openlines.append(lineno)
elif line[pos] == ')':
if len(stack) == 0:
raise error(filename, lineno, 'close parenthesis found when none open')
closed = stack[-1]
del stack[-1]
del openlines[-1]
if stack:
stack[-1] += (closed,)
else:
yield closed
elif line[pos] == '"': # quoted string
if not stack:
raise error(filename, lineno,
'string found outside of s-expression')
endpos = pos + 1
chars = []
while endpos < len(line):
if endpos+1 < len(line) and line[endpos] == '\\':
endpos += 1
if line[endpos] == 'n':
chars.append('\n')
elif line[endpos] == 'r':
chars.append('\r')
elif line[endpos] == 't':
chars.append('\t')
else:
chars.append('\\')
chars.append(line[endpos])
elif line[endpos] == '"':
break
else:
chars.append(line[endpos])
endpos += 1
if endpos >= len(line):
raise error(filename, lineno, "unclosed quoted string")
pos = endpos
stack[-1] += (''.join(chars),)
else: # symbol/number
if not stack:
raise error(filename, lineno,
'identifier found outside of s-expression')
endpos = pos
while endpos < len(line) and line[endpos] not in nonsymbol:
endpos += 1
symbol = line[pos:endpos]
pos = max(pos, endpos-1)
try: symbol = int(symbol)
except ValueError:
try: symbol = float(symbol)
except ValueError: pass
stack[-1] += (symbol,)
pos += 1
if len(stack) != 0:
msg = '%d unclosed parentheses found at end of ' \
'file (opened on line(s) %s)' % (len(stack),
', '.join(map(str, openlines)))
raise error(filename, lineno, msg)
class Parser:
def __init__(self, filename):
"""Argument is either a string, a parse tree, or file object"""
self.filename = filename
def startParsing(self, filename=None):
statements = parse(filename or self.filename)
for statement in statements:
self.handle(statement)
def handle(self, tup):
cmd = string.translate(tup[0], trans)
if hasattr(self, cmd):
getattr(self, cmd)(*tup[1:])
else:
self.unknown(tup)
def unknown(self, tup):
pass
_testString = """; a scheme file
(define-func gdk_font_load ; a comment at end of line
GdkFont
((string name)))
(define-boxed GdkEvent
gdk_event_copy
gdk_event_free
"sizeof(GdkEvent)")
"""
if __name__ == '__main__':
import sys
if sys.argv[1:]:
fp = open(sys.argv[1])
else:
fp = StringIO(_testString)
statements = parse(fp)
for s in statements:
print `s`

View file

@ -1,7 +0,0 @@
examplesdir = $(pkgdatadir)/$(GST_MAJORMINOR)/examples
examples_DATA = \
simple.py
effect.py
EXTRA_DIST = $(examples_DATA)

View file

@ -1,100 +0,0 @@
#!/usr/bin/env python
#
# effect.py
#
# Copyright (C) 2011 Mathieu Duponchelle <seeed@laposte.net>
# Copyright (C) 2011 Luis de Bethencourt <luis.debethencourt@collabora.co.uk>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
import sys
import optparse
import glib
import gobject
gobject.threads_init()
import gst
import ges
class Effect:
def __init__(self, effects):
ges.init()
self.mainloop = glib.MainLoop()
self.timeline = ges.timeline_new_audio_video()
layer = ges.TimelineLayer()
self.src = ges.TimelineTestSource()
self.src.set_start(long(0))
self.src.set_duration(long(3000000000))
self.src.set_vpattern("smpte75")
layer.add_object(self.src)
self.timeline.add_layer(layer)
self.add_effects(effects)
self.pipeline = ges.TimelinePipeline()
self.pipeline.add_timeline(self.timeline)
bus = self.pipeline.get_bus()
bus.set_sync_handler(self.bus_handler)
def add_effects(self, effects):
for e in effects:
effect = ges.TrackParseLaunchEffect(e)
self.src.add_track_object(effect)
for track in self.timeline.get_tracks():
if track.get_caps().to_string() == \
"video/x-raw-yuv; video/x-raw-rgb":
print "setting effect: " + e
track.add_object(effect)
def bus_handler(self, unused_bus, message):
if message.type == gst.MESSAGE_ERROR:
print "ERROR"
self.mainloop.quit()
elif message.type == gst.MESSAGE_EOS:
print "Done"
self.mainloop.quit()
return gst.BUS_PASS
def run(self):
if (self.pipeline.set_state(gst.STATE_PLAYING) == \
gst.STATE_CHANGE_FAILURE):
print "Couldn't start pipeline"
self.mainloop.run()
def main(args):
usage = "usage: %s effect_name-1 .. effect_name-n\n" % args[0]
if len(args) < 2:
print usage + "using aging tv as a default instead"
args.append("agingtv")
parser = optparse.OptionParser (usage=usage)
(opts, args) = parser.parse_args ()
effect = Effect(args)
effect.run()
if __name__ == "__main__":
main(sys.argv)

View file

@ -1,99 +0,0 @@
#!/usr/bin/env python
#
# simple.py
#
# Copyright (C) 2011 Thibault Saunier <thibault.saunier@collabora.co.uk>
# Copyright (C) 2011 Luis de Bethencourt <luis.debethencourt@collabora.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
import sys
import optparse
import glib
import gobject
gobject.threads_init()
import gst
import ges
class Simple:
def __init__(self, uris):
# init ges to have debug logs
ges.init()
self.mainloop = glib.MainLoop()
timeline = ges.timeline_new_audio_video()
self.layer = ges.SimpleTimelineLayer()
timeline.add_layer(self.layer)
self.pipeline = ges.TimelinePipeline()
self.pipeline.add_timeline(timeline)
bus = self.pipeline.get_bus()
bus.set_sync_handler(self.bus_handler)
# all timeline objects except the last will have a transition at the end
for n in uris[:-1]:
self.add_timeline_object(n, True)
self.add_timeline_object(uris[-1], False)
def add_timeline_object(self, uri, do_transition):
filesource = ges.TimelineFileSource (uri)
filesource.set_duration(long (gst.SECOND * 5))
self.layer.add_object(filesource, -1)
if do_transition:
transition = ges.TimelineStandardTransition("crossfade")
transition.duration = gst.SECOND * 2
self.layer.add_object(transition, -1)
def bus_handler(self, unused_bus, message):
if message.type == gst.MESSAGE_ERROR:
print "ERROR"
self.mainloop.quit()
elif message.type == gst.MESSAGE_EOS:
print "Done"
self.mainloop.quit()
return gst.BUS_PASS
def run(self):
if (self.pipeline.set_state(gst.STATE_PLAYING) == \
gst.STATE_CHANGE_FAILURE):
print "Couldn't start pipeline"
self.mainloop.run()
def main(args):
usage = "usage: %s URI-OF-VIDEO-1 ... URI-OF-VIDEO-n\n" % args[0]
if len(args) < 2:
sys.stderr.write(usage)
sys.exit(1)
parser = optparse.OptionParser (usage=usage)
(options, args) = parser.parse_args ()
simple = Simple(args)
simple.run()
if __name__ == "__main__":
sys.exit(main(sys.argv))

View file

@ -1,391 +0,0 @@
;; -*- scheme -*-
; object definitions ...
(define-object Formatter
(in-module "GES")
(parent "GObject")
(c-name "GESFormatter")
(gtype-id "GES_TYPE_FORMATTER")
)
(define-object KeyfileFormatter
(in-module "GES")
(parent "GESFormatter")
(c-name "GESKeyfileFormatter")
(gtype-id "GES_TYPE_KEYFILE_FORMATTER")
)
(define-object Timeline
(in-module "GES")
(parent "GstBin")
(c-name "GESTimeline")
(gtype-id "GES_TYPE_TIMELINE")
)
(define-object TimelineLayer
(in-module "GES")
(parent "GInitiallyUnowned")
(c-name "GESTimelineLayer")
(gtype-id "GES_TYPE_TIMELINE_LAYER")
)
(define-object SimpleTimelineLayer
(in-module "GES")
(parent "GESTimelineLayer")
(c-name "GESSimpleTimelineLayer")
(gtype-id "GES_TYPE_SIMPLE_TIMELINE_LAYER")
)
(define-object TimelineObject
(in-module "GES")
(parent "GInitiallyUnowned")
(c-name "GESTimelineObject")
(gtype-id "GES_TYPE_TIMELINE_OBJECT")
)
(define-object TimelineOperation
(in-module "GES")
(parent "GESTimelineObject")
(c-name "GESTimelineOperation")
(gtype-id "GES_TYPE_TIMELINE_OPERATION")
)
(define-object TimelineEffect
(in-module "GES")
(parent "GESTimelineOperation")
(c-name "GESTimelineEffect")
(gtype-id "GES_TYPE_TIMELINE_EFFECT")
)
(define-object TimelineOverlay
(in-module "GES")
(parent "GESTimelineOperation")
(c-name "GESTimelineOverlay")
(gtype-id "GES_TYPE_TIMELINE_OVERLAY")
)
(define-object TimelineParseLaunchEffect
(in-module "GES")
(parent "GESTimelineEffect")
(c-name "GESTimelineParseLaunchEffect")
(gtype-id "GES_TYPE_TIMELINE_PARSE_LAUNCH_EFFECT")
)
(define-object TimelinePipeline
(in-module "GES")
(parent "GstPipeline")
(c-name "GESTimelinePipeline")
(gtype-id "GES_TYPE_TIMELINE_PIPELINE")
)
(define-object TimelineSource
(in-module "GES")
(parent "GESTimelineObject")
(c-name "GESTimelineSource")
(gtype-id "GES_TYPE_TIMELINE_SOURCE")
)
(define-object TimelineFileSource
(in-module "GES")
(parent "GESTimelineSource")
(c-name "GESTimelineFileSource")
(gtype-id "GES_TYPE_TIMELINE_FILE_SOURCE")
)
(define-object CustomTimelineSource
(in-module "GES")
(parent "GESTimelineSource")
(c-name "GESCustomTimelineSource")
(gtype-id "GES_TYPE_CUSTOM_TIMELINE_SOURCE")
)
(define-object TimelineTestSource
(in-module "GES")
(parent "GESTimelineSource")
(c-name "GESTimelineTestSource")
(gtype-id "GES_TYPE_TIMELINE_TEST_SOURCE")
)
(define-object TimelineTextOverlay
(in-module "GES")
(parent "GESTimelineOverlay")
(c-name "GESTimelineTextOverlay")
(gtype-id "GES_TYPE_TIMELINE_TEXT_OVERLAY")
)
(define-object TimelineTitleSource
(in-module "GES")
(parent "GESTimelineSource")
(c-name "GESTimelineTitleSource")
(gtype-id "GES_TYPE_TIMELINE_TITLE_SOURCE")
)
(define-object TimelineTransition
(in-module "GES")
(parent "GESTimelineOperation")
(c-name "GESTimelineTransition")
(gtype-id "GES_TYPE_TIMELINE_TRANSITION")
)
(define-object TimelineStandardTransition
(in-module "GES")
(parent "GESTimelineTransition")
(c-name "GESTimelineStandardTransition")
(gtype-id "GES_TYPE_TIMELINE_STANDARD_TRANSITION")
)
(define-object Track
(in-module "GES")
(parent "GstBin")
(c-name "GESTrack")
(gtype-id "GES_TYPE_TRACK")
)
(define-object TrackObject
(in-module "GES")
(parent "GInitiallyUnowned")
(c-name "GESTrackObject")
(gtype-id "GES_TYPE_TRACK_OBJECT")
)
(define-object TrackOperation
(in-module "GES")
(parent "GESTrackObject")
(c-name "GESTrackOperation")
(gtype-id "GES_TYPE_TRACK_OPERATION")
)
(define-object TrackEffect
(in-module "GES")
(parent "GESTrackOperation")
(c-name "GESTrackEffect")
(gtype-id "GES_TYPE_TRACK_EFFECT")
)
(define-object TrackParseLaunchEffect
(in-module "GES")
(parent "GESTrackEffect")
(c-name "GESTrackParseLaunchEffect")
(gtype-id "GES_TYPE_TRACK_PARSE_LAUNCH_EFFECT")
)
(define-object TrackSource
(in-module "GES")
(parent "GESTrackObject")
(c-name "GESTrackSource")
(gtype-id "GES_TYPE_TRACK_SOURCE")
)
(define-object TrackImageSource
(in-module "GES")
(parent "GESTrackSource")
(c-name "GESTrackImageSource")
(gtype-id "GES_TYPE_TRACK_IMAGE_SOURCE")
)
(define-object TrackFileSource
(in-module "GES")
(parent "GESTrackSource")
(c-name "GESTrackFileSource")
(gtype-id "GES_TYPE_TRACK_FILE_SOURCE")
)
(define-object TrackAudioTestSource
(in-module "GES")
(parent "GESTrackSource")
(c-name "GESTrackAudioTestSource")
(gtype-id "GES_TYPE_TRACK_AUDIO_TEST_SOURCE")
)
(define-object TrackTextOverlay
(in-module "GES")
(parent "GESTrackOperation")
(c-name "GESTrackTextOverlay")
(gtype-id "GES_TYPE_TRACK_TEXT_OVERLAY")
)
(define-object TrackTitleSource
(in-module "GES")
(parent "GESTrackSource")
(c-name "GESTrackTitleSource")
(gtype-id "GES_TYPE_TRACK_TITLE_SOURCE")
)
(define-object TrackTransition
(in-module "GES")
(parent "GESTrackOperation")
(c-name "GESTrackTransition")
(gtype-id "GES_TYPE_TRACK_TRANSITION")
)
(define-object TrackAudioTransition
(in-module "GES")
(parent "GESTrackTransition")
(c-name "GESTrackAudioTransition")
(gtype-id "GES_TYPE_TRACK_AUDIO_TRANSITION")
)
(define-object TrackVideoTestSource
(in-module "GES")
(parent "GESTrackSource")
(c-name "GESTrackVideoTestSource")
(gtype-id "GES_TYPE_TRACK_VIDEO_TEST_SOURCE")
)
(define-object TrackVideoTransition
(in-module "GES")
(parent "GESTrackTransition")
(c-name "GESTrackVideoTransition")
(gtype-id "GES_TYPE_TRACK_VIDEO_TRANSITION")
)
;; Enumerations and flags ...
(define-flags TrackType
(in-module "GES")
(c-name "GESTrackType")
(gtype-id "GES_TYPE_TRACK_TYPE")
(values
'("unknown" "GES_TRACK_TYPE_UNKNOWN")
'("audio" "GES_TRACK_TYPE_AUDIO")
'("video" "GES_TRACK_TYPE_VIDEO")
'("text" "GES_TRACK_TYPE_TEXT")
'("custom" "GES_TRACK_TYPE_CUSTOM")
)
)
(define-enum VideoStandardTransitionType
(in-module "GES")
(c-name "GESVideoStandardTransitionType")
(gtype-id "GES_VIDEO_STANDARD_TRANSITION_TYPE_TYPE")
(values
'("none" "GES_VIDEO_STANDARD_TRANSITION_TYPE_NONE")
'("bar-wipe-lr" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BAR_WIPE_LR")
'("bar-wipe-tb" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BAR_WIPE_TB")
'("box-wipe-tl" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BOX_WIPE_TL")
'("box-wipe-tr" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BOX_WIPE_TR")
'("box-wipe-br" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BOX_WIPE_BR")
'("box-wipe-bl" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BOX_WIPE_BL")
'("four-box-wipe-ci" "GES_VIDEO_STANDARD_TRANSITION_TYPE_FOUR_BOX_WIPE_CI")
'("four-box-wipe-co" "GES_VIDEO_STANDARD_TRANSITION_TYPE_FOUR_BOX_WIPE_CO")
'("barndoor-v" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BARNDOOR_V")
'("barndoor-h" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BARNDOOR_H")
'("box-wipe-tc" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BOX_WIPE_TC")
'("box-wipe-rc" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BOX_WIPE_RC")
'("box-wipe-bc" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BOX_WIPE_BC")
'("box-wipe-lc" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BOX_WIPE_LC")
'("diagonal-tl" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DIAGONAL_TL")
'("diagonal-tr" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DIAGONAL_TR")
'("bowtie-v" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BOWTIE_V")
'("bowtie-h" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BOWTIE_H")
'("barndoor-dbl" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BARNDOOR_DBL")
'("barndoor-dtl" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BARNDOOR_DTL")
'("misc-diagonal-dbd" "GES_VIDEO_STANDARD_TRANSITION_TYPE_MISC_DIAGONAL_DBD")
'("misc-diagonal-dd" "GES_VIDEO_STANDARD_TRANSITION_TYPE_MISC_DIAGONAL_DD")
'("vee-d" "GES_VIDEO_STANDARD_TRANSITION_TYPE_VEE_D")
'("vee-l" "GES_VIDEO_STANDARD_TRANSITION_TYPE_VEE_L")
'("vee-u" "GES_VIDEO_STANDARD_TRANSITION_TYPE_VEE_U")
'("vee-r" "GES_VIDEO_STANDARD_TRANSITION_TYPE_VEE_R")
'("barnvee-d" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BARNVEE_D")
'("barnvee-l" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BARNVEE_L")
'("barnvee-u" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BARNVEE_U")
'("barnvee-r" "GES_VIDEO_STANDARD_TRANSITION_TYPE_BARNVEE_R")
'("iris-rect" "GES_VIDEO_STANDARD_TRANSITION_TYPE_IRIS_RECT")
'("clock-cw12" "GES_VIDEO_STANDARD_TRANSITION_TYPE_CLOCK_CW12")
'("clock-cw3" "GES_VIDEO_STANDARD_TRANSITION_TYPE_CLOCK_CW3")
'("clock-cw6" "GES_VIDEO_STANDARD_TRANSITION_TYPE_CLOCK_CW6")
'("clock-cw9" "GES_VIDEO_STANDARD_TRANSITION_TYPE_CLOCK_CW9")
'("pinwheel-tbv" "GES_VIDEO_STANDARD_TRANSITION_TYPE_PINWHEEL_TBV")
'("pinwheel-tbh" "GES_VIDEO_STANDARD_TRANSITION_TYPE_PINWHEEL_TBH")
'("pinwheel-fb" "GES_VIDEO_STANDARD_TRANSITION_TYPE_PINWHEEL_FB")
'("fan-ct" "GES_VIDEO_STANDARD_TRANSITION_TYPE_FAN_CT")
'("fan-cr" "GES_VIDEO_STANDARD_TRANSITION_TYPE_FAN_CR")
'("doublefan-fov" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DOUBLEFAN_FOV")
'("doublefan-foh" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DOUBLEFAN_FOH")
'("singlesweep-cwt" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SINGLESWEEP_CWT")
'("singlesweep-cwr" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SINGLESWEEP_CWR")
'("singlesweep-cwb" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SINGLESWEEP_CWB")
'("singlesweep-cwl" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SINGLESWEEP_CWL")
'("doublesweep-pv" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DOUBLESWEEP_PV")
'("doublesweep-pd" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DOUBLESWEEP_PD")
'("doublesweep-ov" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DOUBLESWEEP_OV")
'("doublesweep-oh" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DOUBLESWEEP_OH")
'("fan-t" "GES_VIDEO_STANDARD_TRANSITION_TYPE_FAN_T")
'("fan-r" "GES_VIDEO_STANDARD_TRANSITION_TYPE_FAN_R")
'("fan-b" "GES_VIDEO_STANDARD_TRANSITION_TYPE_FAN_B")
'("fan-l" "GES_VIDEO_STANDARD_TRANSITION_TYPE_FAN_L")
'("doublefan-fiv" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DOUBLEFAN_FIV")
'("doublefan-fih" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DOUBLEFAN_FIH")
'("singlesweep-cwtl" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SINGLESWEEP_CWTL")
'("singlesweep-cwbl" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SINGLESWEEP_CWBL")
'("singlesweep-cwbr" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SINGLESWEEP_CWBR")
'("singlesweep-cwtr" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SINGLESWEEP_CWTR")
'("doublesweep-pdtl" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DOUBLESWEEP_PDTL")
'("doublesweep-pdbl" "GES_VIDEO_STANDARD_TRANSITION_TYPE_DOUBLESWEEP_PDBL")
'("saloondoor-t" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SALOONDOOR_T")
'("saloondoor-l" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SALOONDOOR_L")
'("saloondoor-b" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SALOONDOOR_B")
'("saloondoor-r" "GES_VIDEO_STANDARD_TRANSITION_TYPE_SALOONDOOR_R")
'("windshield-r" "GES_VIDEO_STANDARD_TRANSITION_TYPE_WINDSHIELD_R")
'("windshield-u" "GES_VIDEO_STANDARD_TRANSITION_TYPE_WINDSHIELD_U")
'("windshield-v" "GES_VIDEO_STANDARD_TRANSITION_TYPE_WINDSHIELD_V")
'("windshield-h" "GES_VIDEO_STANDARD_TRANSITION_TYPE_WINDSHIELD_H")
'("crossfade" "GES_VIDEO_STANDARD_TRANSITION_TYPE_CROSSFADE")
)
)
(define-enum TextVAlign
(in-module "GES")
(c-name "GESTextVAlign")
(gtype-id "GES_TEXT_VALIGN_TYPE")
(values
'("baseline" "GES_TEXT_VALIGN_BASELINE")
'("bottom" "GES_TEXT_VALIGN_BOTTOM")
'("top" "GES_TEXT_VALIGN_TOP")
)
)
(define-enum TextHAlign
(in-module "GES")
(c-name "GESTextHAlign")
(gtype-id "GES_TEXT_HALIGN_TYPE")
(values
'("left" "GES_TEXT_HALIGN_LEFT")
'("center" "GES_TEXT_HALIGN_CENTER")
'("right" "GES_TEXT_HALIGN_RIGHT")
)
)
(define-enum VideoTestPattern
(in-module "GES")
(c-name "GESVideoTestPattern")
(gtype-id "GES_VIDEO_TEST_PATTERN_TYPE")
(values
'("smpte" "GES_VIDEO_TEST_PATTERN_SMPTE")
'("snow" "GES_VIDEO_TEST_PATTERN_SNOW")
'("black" "GES_VIDEO_TEST_PATTERN_BLACK")
'("white" "GES_VIDEO_TEST_PATTERN_WHITE")
'("red" "GES_VIDEO_TEST_PATTERN_RED")
'("green" "GES_VIDEO_TEST_PATTERN_GREEN")
'("blue" "GES_VIDEO_TEST_PATTERN_BLUE")
'("checkers1" "GES_VIDEO_TEST_PATTERN_CHECKERS1")
'("checkers2" "GES_VIDEO_TEST_PATTERN_CHECKERS2")
'("checkers4" "GES_VIDEO_TEST_PATTERN_CHECKERS4")
'("checkers8" "GES_VIDEO_TEST_PATTERN_CHECKERS8")
'("circular" "GES_VIDEO_TEST_PATTERN_CIRCULAR")
'("blink" "GES_VIDEO_TEST_PATTERN_BLINK")
'("smpte75" "GES_VIDEO_TEST_PATTERN_SMPTE75")
)
)
(define-flags PipelineFlags
(in-module "GES")
(c-name "GESPipelineFlags")
(gtype-id "GES_TYPE_PIPELINE_FLAGS")
(values
'("preview-audio" "TIMELINE_MODE_PREVIEW_AUDIO")
'("preview-video" "TIMELINE_MODE_PREVIEW_VIDEO")
'("preview" "TIMELINE_MODE_PREVIEW")
'("render" "TIMELINE_MODE_RENDER")
'("smart-render" "TIMELINE_MODE_SMART_RENDER")
)
)

File diff suppressed because it is too large Load diff

View file

@ -1,401 +0,0 @@
/* -*- Mode: C; c-basic-offset: 4 -*- */
%%
headers
#define NO_IMPORT_PYGOBJECT
#include <pygobject.h>
#include <pygst/pygst.h>
#include <gst/gst.h>
#include <ges/ges.h>
#include <pygst/pygst.h>
#include <glib.h>
#include <pyglib.h>
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
/* Boonky define that allows for backwards compatibility with Python 2.4 */
#if PY_VERSION_HEX < 0x02050000
#define Py_ssize_t int
#endif
void pyges_register_classes(PyObject *d);
void pyges_add_constants(PyObject *module, const gchar *strip_prefix);
%%
import gobject.GObject as PyGObject_Type
import gobject.MainContext as PyGMainContext_Type
import gobject.GObject as PyGInitiallyUnowned_Type
import gst.Bin as PyGstBin_Type
import gst.Pipeline as PyGstPipeline_Type
import gst.Element as PyGstElement_Type
import gst.Buffer as PyGstBuffer_Type
import gst.Pad as PyGstPad_Type
import gst.pbutils.EncodingProfile as PyGstEncodingProfile_Type
%%
override ges_track_get_timeline kwargs
static PyObject *
_wrap_ges_track_get_timeline(PyGObject *self)
{
const GESTimeline *ret;
pyg_begin_allow_threads;
ret = ges_track_get_timeline(GES_TRACK(self->obj));
pyg_end_allow_threads;
/* pygobject_new handles NULL checking */
return pygobject_new((GObject *)ret);
}
%%
override ges_track_get_caps noargs
static PyObject *
_wrap_ges_track_get_caps(PyGObject *self, void* closure)
{
const GstCaps *ret;
ret = ges_track_get_caps(GES_TRACK(self->obj));
return pyg_boxed_new (GST_TYPE_CAPS, (GstCaps*) ret, TRUE, TRUE);
}
%%
override ges_track_set_caps kwargs
static PyObject *
_wrap_ges_track_set_caps(PyGObject *self, PyObject *args, PyObject *kwargs)
{
static char *kwlist[] = { "caps", NULL };
PyObject *py_caps;
GstCaps *caps;
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:GstBuffer.set_caps", kwlist, &py_caps))
return NULL;
caps = pyg_boxed_get (py_caps, GstCaps);
if (PyErr_Occurred())
return NULL;
pyg_begin_allow_threads;
ges_track_set_caps (GES_TRACK(self->obj), caps);
pyg_end_allow_threads;
Py_INCREF(Py_None);
return Py_None;
}
%%
new-constructor GES_TYPE_TRACK
%%
override ges_track_new kwargs
static int
_wrap_ges_track_new(PyGObject *self, PyObject *args, PyObject *kwargs)
{
static char *kwlist[] = { "type", "caps", NULL };
PyObject *py_type = NULL;
GESTrackType type;
GstCaps *caps;
PyObject *py_caps;
if (!PyArg_ParseTupleAndKeywords(args, kwargs,"OO:GES.Track.__init__", kwlist, &py_type, &py_caps))
return -1;
if (pyg_flags_get_value(GES_TYPE_TRACK_TYPE, py_type, (gpointer)&type))
return -1;
caps = pyg_boxed_get (py_caps, GstCaps);
self->obj = (GObject *)ges_track_new(type, caps);
if (!self->obj) {
PyErr_SetString(PyExc_RuntimeError, "could not create GESTrack object");
return -1;
}
pygobject_register_wrapper((PyObject *)self);
return 0;
}
%%
override ges_track_object_lookup_child kwargs
static PyObject *
_wrap_ges_track_object_lookup_child(PyGObject *self, PyObject *args, PyObject *kwargs)
{
static char *kwlist[] = {"name", NULL};
char *name = NULL;
GParamSpec *pspec;
GstElement *element;
gboolean ret;
if (!PyArg_ParseTupleAndKeywords(args, kwargs,
"z:GES.TrackObject.lookup_child", kwlist, &name))
return FALSE;
pyg_begin_allow_threads;
ret = ges_track_object_lookup_child (GES_TRACK_OBJECT (self->obj), name, &element, &pspec);
pyg_end_allow_threads;
if (!ret)
return PyBool_FromLong(ret);
return pygobject_new((GObject *)element);
}
%%
override ges_timeline_parse_launch_effect_new kwargs
static int
_wrap_ges_timeline_parse_launch_effect_new(PyGObject *self, PyObject *args, PyObject *kwargs)
{
static char *kwlist[] = { "video_bin_description", "audio_bin_description", NULL };
char *video_bin_description, *audio_bin_description = NULL;
if (!PyArg_ParseTupleAndKeywords(args, kwargs,"zz:GES.TimelineParseLaunchEffect.__init__", kwlist, &video_bin_description, &audio_bin_description))
return -1;
self->obj = (GObject *)ges_timeline_parse_launch_effect_new(video_bin_description, audio_bin_description);
if (!self->obj) {
PyErr_SetString(PyExc_RuntimeError, "could not create GESTimelineParseLaunchEffect object");
return -1;
}
pygobject_register_wrapper((PyObject *)self);
return 0;
}
/* I did not override ges_formatter_get_data and set_data for these functions are deprecated */
%%
override ges_timeline_object_get_top_effects noargs
static PyObject *
_wrap_ges_timeline_object_get_top_effects(PyGObject *self)
{
const GList *l, *list;
PyObject *py_list;
g_return_val_if_fail (GES_IS_TIMELINE_OBJECT (self->obj),PyList_New(0));
pyg_begin_allow_threads;
list = ges_timeline_object_get_top_effects(GES_TIMELINE_OBJECT(self->obj));
pyg_end_allow_threads;
py_list = PyList_New(0);
for (l = list; l; l = l->next) {
GESTrackEffect *track_effect = (GESTrackEffect*)l->data;
PyObject *py_track_effect = pygobject_new(G_OBJECT(track_effect));
PyList_Append(py_list, py_track_effect);
Py_DECREF(py_track_effect);
}
return py_list;
}
%%
override ges_timeline_get_tracks noargs
static PyObject *
_wrap_ges_timeline_get_tracks(PyGObject *self)
{
const GList *l, *list;
PyObject *py_list;
g_return_val_if_fail (GES_IS_TIMELINE (self->obj),PyList_New(0));
pyg_begin_allow_threads;
list = ges_timeline_get_tracks(GES_TIMELINE(self->obj));
pyg_end_allow_threads;
py_list = PyList_New(0);
for (l = list; l; l = l->next) {
GESTrack *track = (GESTrack*)l->data;
PyObject *py_track = pygobject_new(G_OBJECT(track));
PyList_Append(py_list, py_track);
Py_DECREF(py_track);
}
return py_list;
}
%%
override ges_timeline_get_layers noargs
static PyObject *
_wrap_ges_timeline_get_layers(PyGObject *self)
{
const GList *l, *list;
PyObject *py_list;
g_return_val_if_fail (GES_IS_TIMELINE (self->obj), PyList_New(0));
pyg_begin_allow_threads;
list = ges_timeline_get_layers(GES_TIMELINE(self->obj));
pyg_end_allow_threads;
py_list = PyList_New(0);
for (l = list; l; l = l->next) {
GESTimelineLayer *layer = (GESTimelineLayer*)l->data;
PyObject *py_layer = pygobject_new(G_OBJECT(layer));
PyList_Append(py_list, py_layer);
Py_DECREF(py_layer);
}
return py_list;
}
%%
override ges_timeline_layer_get_objects noargs
static PyObject *
_wrap_ges_timeline_layer_get_objects(PyGObject *self)
{
const GList *l, *list;
PyObject *py_list;
g_return_val_if_fail (GES_IS_TIMELINE_LAYER (self->obj), PyList_New(0));
pyg_begin_allow_threads;
list = ges_timeline_layer_get_objects(GES_TIMELINE_LAYER(self->obj));
pyg_end_allow_threads;
py_list = PyList_New(0);
for (l = list; l; l = l->next) {
GESTimelineObject *object = (GESTimelineObject*)l->data;
PyObject *py_object = pygobject_new(G_OBJECT(object));
PyList_Append(py_list, py_object);
Py_DECREF(py_object);
}
return py_list;
}
%%
override ges_timeline_object_get_track_objects noargs
static PyObject *
_wrap_ges_timeline_object_get_track_objects(PyGObject *self)
{
const GList *l, *list;
PyObject *py_list;
g_return_val_if_fail (GES_IS_TIMELINE_OBJECT (self->obj), PyList_New(0));
pyg_begin_allow_threads;
list = ges_timeline_object_get_track_objects(GES_TIMELINE_OBJECT(self->obj));
pyg_end_allow_threads;
py_list = PyList_New(0);
for (l = list; l; l = l->next) {
GESTrackObject *object = (GESTrackObject*)l->data;
PyObject *py_object = pygobject_new(G_OBJECT(object));
PyList_Append(py_list, py_object);
Py_DECREF(py_object);
}
return py_list;
}
%%
override ges_track_object_get_child_property kwargs
static PyObject *
_wrap_ges_track_object_get_child_property (PyGObject *self, PyObject *args, PyObject *kwargs)
{
gchar *property_name;
GESTrackObject *obj = GES_TRACK_OBJECT (self->obj);
GParamSpec *pspec = NULL;
GValue value = { 0, } ;
PyObject *ret;
if (!PyArg_ParseTuple(args, "s:GESTrackObject.get_child_property",
&property_name)) {
return NULL;
}
ges_track_object_lookup_child(obj, property_name, NULL, &pspec);
if (!pspec) {
gchar buf[512];
g_snprintf(buf, sizeof(buf),
"container does not support property `%s'",
property_name);
PyErr_SetString(PyExc_TypeError, buf);
return NULL;
}
g_value_init(&value, G_PARAM_SPEC_VALUE_TYPE(pspec));
ges_track_object_get_child_property_by_pspec(obj,
pspec,
&value);
ret = pyg_value_as_pyobject(&value, TRUE);
g_value_unset(&value);
return ret;
}
%%
override ges_track_object_set_child_property kwargs
static PyObject *
_wrap_ges_track_object_set_child_property (PyGObject *self, PyObject *args, PyObject *kwargs)
{
gchar *property_name;
GESTrackObject *obj = GES_TRACK_OBJECT (self->obj);
GParamSpec *pspec = NULL;
PyGObject *pyvalue;
GValue value = { 0, } ;
if (!PyArg_ParseTuple(args, "sO:GESTrackObject.set_child_property",
&property_name, &pyvalue)) {
return NULL;
}
ges_track_object_lookup_child(obj, property_name, NULL, &pspec);
if (!pspec) {
gchar buf[512];
g_snprintf(buf, sizeof(buf),
"container does not support property `%s'",
property_name);
PyErr_SetString(PyExc_TypeError, buf);
return NULL;
}
g_value_init(&value, G_PARAM_SPEC_VALUE_TYPE(pspec));
pyg_value_from_pyobject(&value, (PyObject*)pyvalue);
ges_track_object_set_child_property_by_pspec(obj,
pspec,
&value);
g_value_unset(&value);
Py_INCREF(Py_None);
return Py_None;
}
%%
override ges_track_object_list_children_properties noargs
static PyObject *
_wrap_ges_track_object_list_children_properties (PyGObject *self)
{
GParamSpec **specs;
PyObject *list;
guint nprops;
guint i;
specs = ges_track_object_list_children_properties(GES_TRACK_OBJECT (self->obj), &nprops);
list = PyTuple_New(nprops);
if (list == NULL) {
g_free(specs);
return NULL;
}
for (i = 0; i < nprops; i++) {
PyTuple_SetItem(list, i, pyg_param_spec_new(specs[i]));
}
g_free(specs);
return list;
}
%%
ignore-glob
*_get_type
ges_formatter_set_data
ges_formatter_set_data
*_valist
*_by_pspec

View file

@ -1,37 +0,0 @@
/* -*- Mode: C; c-basic-offset: 4 -*- */
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <Python.h>
#include <pygobject.h>
#include <pyglib.h>
#include <pygst/pygst.h>
/* include any extra headers needed here */
void pyges_register_classes (PyObject * d);
extern PyMethodDef pyges_functions[];
DL_EXPORT (void)
initges (void);
DL_EXPORT (void)
initges (void)
{
PyObject *m, *d;
/* perform any initialisation required by the library here */
m = Py_InitModule ("ges", pyges_functions);
d = PyModule_GetDict (m);
init_pygobject ();
pygst_init ();
pygst_init ();
pyges_register_classes (d);
/* add anything else to the module dictionary (such as constants) */
if (PyErr_Occurred ())
Py_FatalError ("could not initialise module ges");
}

View file

@ -1,19 +0,0 @@
EXTRA_DIST = \
common.py \
test_timeline_pipeline.py \
test_timeline.py \
runtests.py \
test_global_functions.py \
test_timeline_test_source.py \
test_layer.py \
test_timeline_title_source.py \
test_simple_layer.py \
test_track.py \
test_textoverlay.py \
test_transition.py \
test_timeline_file_source.py \
test_timeline_parse_launch_effect.py
check:
@PYTHONPATH=$(top_builddir):$(top_builddir)/bindings/python/.libs:`pwd`:$(top_srcdir):$(PYTHONPATH) $(PYTHON) $(srcdir)/runtests.py
@rm -fr *.pyc

View file

@ -1,128 +0,0 @@
# -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# gst-python - Python bindings for GStreamer
# Copyright (C) 2002 David I. Lehn
# Copyright (C) 2004 Johan Dahlin
# Copyright (C) 2005 Edward Hervey
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
import sys
import gc
import unittest
import gst
import gobject
try:
gobject.threads_init()
except:
print "WARNING: gobject doesn't have threads_init, no threadsafety"
def disable_stderr():
global _stderr
_stderr = file('/tmp/stderr', 'w+')
sys.stderr = os.fdopen(os.dup(2), 'w')
os.close(2)
os.dup(_stderr.fileno())
def enable_stderr():
global _stderr
os.close(2)
os.dup(sys.stderr.fileno())
_stderr.seek(0, 0)
data = _stderr.read()
_stderr.close()
os.remove('/tmp/stderr')
return data
def run_silent(function, *args, **kwargs):
disable_stderr()
try:
function(*args, **kwargs)
except Exception, exc:
enable_stderr()
raise exc
output = enable_stderr()
return output
class TestCase(unittest.TestCase):
_types = [gst.Object, gst.MiniObject]
def gccollect(self):
# run the garbage collector
ret = 0
gst.debug('garbage collecting')
while True:
c = gc.collect()
ret += c
if c == 0: break
gst.debug('done garbage collecting, %d objects' % ret)
return ret
def gctrack(self):
# store all gst objects in the gc in a tracking dict
# call before doing any allocation in your test, from setUp
gst.debug('tracking gc GstObjects for types %r' % self._types)
self.gccollect()
self._tracked = {}
for c in self._types:
self._tracked[c] = [o for o in gc.get_objects() if isinstance(o, c)]
def gcverify(self):
# verify no new gst objects got added to the gc
# call after doing all cleanup in your test, from tearDown
gst.debug('verifying gc GstObjects for types %r' % self._types)
new = []
for c in self._types:
objs = [o for o in gc.get_objects() if isinstance(o, c)]
new.extend([o for o in objs if o not in self._tracked[c]])
self.failIf(new, new)
#self.failIf(new, ["%r:%d" % (type(o), id(o)) for o in new])
del self._tracked
def setUp(self):
"""
Override me by chaining up to me at the start of your setUp.
"""
# Using private variables is BAD ! this variable changed name in
# python 2.5
try:
methodName = self.__testMethodName
except:
methodName = self._testMethodName
gst.debug('%s.%s' % (self.__class__.__name__, methodName))
self.gctrack()
def tearDown(self):
"""
Override me by chaining up to me at the end of your tearDown.
"""
# Using private variables is BAD ! this variable changed name in
# python 2.5
try:
methodName = self.__testMethodName
except:
methodName = self._testMethodName
gst.debug('%s.%s' % (self.__class__.__name__, methodName))
self.gccollect()
self.gcverify()

View file

@ -1,37 +0,0 @@
import glob
import os
import sys
import unittest
SKIP_FILES = ['common', 'runtests']
os.environ['LC_MESSAGES'] = 'C'
def gettestnames(which):
if not which:
dir = os.path.split(os.path.abspath(__file__))[0]
which = [os.path.basename(p) for p in glob.glob('%s/test_*.py' % dir)]
names = map(lambda x: x[:-3], which)
for f in SKIP_FILES:
if f in names:
names.remove(f)
return names
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for name in gettestnames(sys.argv[1:]):
suite.addTest(loader.loadTestsFromName(name))
descriptions = 1
verbosity = 1
if os.environ.has_key('VERBOSE'):
descriptions = 2
verbosity = 2
testRunner = unittest.TextTestRunner(descriptions=descriptions,
verbosity=verbosity)
result = testRunner.run(suite)
if result.failures or result.errors:
sys.exit(1)

View file

@ -1,11 +0,0 @@
import glib
import gst
from common import TestCase
import ges
class GlobalFunctions(TestCase):
def testGlobalFunctions(self):
tl = ges.timeline_new_audio_video()
tr = ges.timeline_standard_transition_new_for_nick("crossfade")

View file

@ -1,21 +0,0 @@
import glib
import gst
from common import TestCase
import ges
class Layer(TestCase):
def testLayer(self):
lyr = ges.TimelineLayer()
tl = ges.Timeline()
src = ges.TimelineTestSource()
lyr.set_timeline(tl)
assert (lyr.add_object(src) == True)
self.failIf(len (lyr.get_objects()) != 1)
assert (lyr.remove_object(src) == True)
lyr.set_priority(1)
self.failIf(lyr.get_priority() != 1)

View file

@ -1,22 +0,0 @@
import glib
import gst
from common import TestCase
import ges
class SimpleLayer(TestCase):
def testSimpleLayer(self):
lyr = ges.SimpleTimelineLayer()
tl = ges.Timeline()
src = ges.TimelineTestSource()
src2 = ges.TimelineTestSource()
lyr.set_timeline(tl)
assert (lyr.add_object(src, 0) == True)
assert (lyr.add_object(src2, 1) == True)
assert (lyr.nth(0) == src)
assert (lyr.move_object (src, 1) == True)
self.failIf(lyr.index(src) != 1)
assert (lyr.is_valid() == True)

View file

@ -1,60 +0,0 @@
import gst
from common import TestCase
import ges
from time import sleep
class TextOverlay(TestCase):
def testTextOverlay(self):
ovrl = ges.TimelineTextOverlay()
lyr = ges.TimelineLayer()
tl = ges.timeline_new_audio_video()
tck = tl.get_tracks()[0]
ovrl.set_text("Foo")
self.failIf (ovrl.get_text() != "Foo")
ovrl.set_font_desc ("Arial")
self.failIf (ovrl.get_font_desc() != "Arial")
ovrl.set_valign("top")
self.failIf (ovrl.get_valignment().value_name != "top")
ovrl.set_halign("left")
self.failIf (ovrl.get_halignment().value_name != "left")
#We will test Timeline Object class functions here
ovrl.set_start(long(100))
ovrl.set_inpoint(long(50))
ovrl.set_duration(long(500))
ovrl.set_priority(2)
ovrl.set_layer(lyr)
tck_obj = ovrl.create_track_object(tck)
self.failIf (ovrl.release_track_object(tck_obj) != True)
self.failIf (ovrl.add_track_object(tck_obj) != True)
self.failIf (len(ovrl.get_track_objects()) != 1)
self.failIf (ovrl.get_layer() != lyr)
ovrl.release_track_object(tck_obj)
#We test TrackTextOverlay and TrackObject here
tck_obj.set_text("Bar")
self.failIf (tck_obj.get_text() != "Bar")
tck_obj.set_font_desc ("Arial")
self.failIf (tck_obj.get_font_desc() != "Arial")
tck_obj.set_valignment("top")
self.failIf (tck_obj.get_valignment().value_name != "top")
tck_obj.set_halignment("left")
self.failIf (tck_obj.get_halignment().value_name != "left")
tck_obj.set_locked(False)
self.failIf (tck_obj.is_locked() != False)
tck_obj.set_start(long(100))
tck_obj.set_inpoint(long(50))
tck_obj.set_duration(long(500))
tck_obj.set_priority(2)
self.failIf (tck_obj.get_start() != 100)
self.failIf (tck_obj.get_inpoint() != 50)
self.failIf (tck_obj.get_duration() != 500)
self.failIf (tck_obj.get_priority() != 2)
tck_obj.set_timeline_object(ovrl)
self.failIf(tck_obj.get_timeline_object() != ovrl)

View file

@ -1,22 +0,0 @@
import glib
import gst
from common import TestCase
import ges
class Timeline(TestCase):
def testTimeline(self):
tl = ges.timeline_new_audio_video()
lyr = ges.SimpleTimelineLayer()
tck = ges.track_audio_raw_new()
assert (tl.add_track(tck) == True)
#We should have two tracks from the timeline_new_audio_video() function + 1
self.failIf(len(tl.get_tracks()) != 3)
assert (tl.remove_track(tck) == True)
assert (tl.add_layer(lyr) == True)
self.failIf(len(tl.get_layers()) != 1)
assert (tl.remove_layer(lyr) == True)

View file

@ -1,18 +0,0 @@
import gst
from common import TestCase
import ges
class TimelineFileSource(TestCase):
def testTimelineFileSource(self):
src = ges.TimelineFileSource("file://blahblahblah")
src.set_mute(True)
src.set_max_duration(long(100))
src.set_supported_formats("video")
assert (src.get_supported_formats().value_nicks[0] == "video")
src.set_is_image(True)
assert (src.get_max_duration() == 100)
assert (src.is_image() == True)
assert (src.get_uri() == "file://blahblahblah")

View file

@ -1,24 +0,0 @@
import gst
from common import TestCase
import ges
from time import sleep
class ParseLaunchEffect(TestCase):
def testParseLaunchEffect(self):
tl = ges.Timeline()
tck = ges.track_video_raw_new()
lyr = ges.TimelineLayer()
efct = ges.TimelineParseLaunchEffect("agingtv", None)
tck_efct = ges.TrackParseLaunchEffect("agingtv")
tl.add_layer(lyr)
efct.add_track_object(tck_efct)
lyr.add_object(efct)
tck.set_timeline(tl)
tck.add_object(tck_efct)
tck_efct.set_child_property("GstAgingTV::scratch-lines", 17)
self.failIf(tck_efct.get_child_property("GstAgingTV::scratch-lines") != 17)
self.failIf(len(tck_efct.list_children_properties()) != 6)
self.failIf (tck_efct.lookup_child ("scratch-lines") == None)

View file

@ -1,14 +0,0 @@
import glib
import gst
from common import TestCase
import ges
class TimelinePipeline(TestCase):
def testTimelinePipeline(self):
stgs = gst.pbutils.EncodingAudioProfile(gst.Caps("video/x-dirac"), "test", gst.caps_new_any(), 0)
ppln = ges.TimelinePipeline()
tl = ges.Timeline()
assert (ppln.add_timeline (tl) == True)
assert (ppln.set_mode("TIMELINE_MODE_PREVIEW_AUDIO") == True)

View file

@ -1,19 +0,0 @@
import glib
import gst
from common import TestCase
import ges
class TimelineTestSource(TestCase):
def testTimelineTestSource(self):
src = ges.TimelineTestSource()
tck_src = ges.TrackAudioTestSource()
src.set_mute(True)
src.set_vpattern("snow")
src.set_frequency(880)
src.set_volume (1)
assert (src.get_vpattern() != None)
assert (src.is_muted() == True)
assert (src.get_frequency() == 880)
assert (src.get_volume() == 1)

View file

@ -1,23 +0,0 @@
import gst
from common import TestCase
import ges
class TimelineTitleSource(TestCase):
def testTimelineTitleSource(self):
src = ges.TimelineTitleSource()
lyr = ges.TimelineLayer()
tck = ges.track_video_raw_new()
src.set_text("Foo")
self.failIf (src.get_text() != "Foo")
src.set_font_desc ("Arial")
self.failIf (src.get_font_desc() != "Arial")
src.set_valignment("top")
assert (src.get_valignment().value_name == "top")
src.set_halignment("left")
assert (src.get_halignment().value_name == "left")
src.set_mute(True)
assert (src.is_muted() == True)

View file

@ -1,24 +0,0 @@
import gst
from common import TestCase
import ges
class Track(TestCase):
def testTrack(self):
tck = ges.track_video_raw_new()
tl = ges.Timeline()
lyr = ges.TimelineLayer()
src = ges.TimelineTestSource()
caps = gst.caps_from_string("image/jpeg")
obj = ges.TrackParseLaunchEffect ("agingtv")
tl.add_layer(lyr)
src.add_track_object(obj)
lyr.add_object(src)
tck.set_timeline(tl)
assert (tck.add_object(obj) == True)
assert (tck.get_timeline() == tl)
tck.set_caps(caps)
assert (tck.get_caps().to_string() == "image/jpeg")

View file

@ -1,50 +0,0 @@
import glib
import gst
from common import TestCase
import ges
class Timeline(TestCase):
def testTimeline(self):
tl = ges.timeline_new_audio_video()
lyr = ges.SimpleTimelineLayer()
src = ges.TimelineTestSource()
src2 = ges.TimelineTestSource()
tr = ges.TimelineStandardTransition("crossfade")
pip = ges.TimelinePipeline()
bus = pip.get_bus()
self.mainloop = glib.MainLoop()
# Let's add the layer to the timeline, and the sources
# and transition to the layer.
tl.add_layer(lyr)
src.set_duration(long(gst.SECOND * 10))
src2.set_duration(long(gst.SECOND * 10))
src.set_vpattern("Random (television snow)")
tr.set_duration(long(gst.SECOND * 10))
lyr.add_object(src, -1)
lyr.add_object(tr, -1)
assert (lyr.add_object(src2, -1) == True)
pip.add_timeline(tl)
bus.set_sync_handler(self.bus_handler)
self.pipeline = pip
self.layer = lyr
#Mainloop is finished, tear down.
self.pipeline = None
def bus_handler(self, unused_bus, message):
if message.type == gst.MESSAGE_ERROR:
print "ERROR"
self.mainloop.quit()
elif message.type == gst.MESSAGE_EOS:
print "Done"
self.mainloop.quit()
return gst.BUS_PASS

View file

@ -115,57 +115,8 @@ GOBJECT_INTROSPECTION_CHECK([0.9.6])
dnl check for documentation tools dnl check for documentation tools
AG_GST_DOCBOOK_CHECK AG_GST_DOCBOOK_CHECK
GTK_DOC_CHECK([1.3]) GTK_DOC_CHECK([1.3])
AS_PATH_PYTHON([2.1])
AG_GST_PLUGIN_DOCS([1.3],[2.1]) AG_GST_PLUGIN_DOCS([1.3],[2.1])
dnl check for python
AM_PATH_PYTHON
AC_MSG_CHECKING(for python >= 2.3)
prog="
import sys, string
minver = (2,3,0,'final',0)
if sys.version_info < minver:
sys.exit(1)
sys.exit(0)"
if $PYTHON -c "$prog" 1>&AC_FD_CC 2>&AC_FD_CC
then
HAVE_PYTHON=yes
AC_MSG_RESULT(okay)
else
HAVE_PYTHON=no
AC_MSG_RESULT(no python)
fi
AM_CHECK_PYTHON_HEADERS([HAVE_PYTHON_HEADERS=yes],[HAVE_PYTHON_HEADERS=no])
dnl check for pygobject (optional, used in the bindings)
PYGOBJECT_REQ=2.11.2
PKG_CHECK_MODULES(PYGOBJECT, pygobject-2.0 >= $PYGOBJECT_REQ,
[HAVE_PYGOBJECT="yes"], [HAVE_PYGOBJECT="no"])
AC_SUBST(PYGOBJECT_CFLAGS)
dnl check for gst-python
PKG_CHECK_MODULES(PYGST, gst-python-0.10,
[HAVE_PYGST="yes"], [HAVE_PYGST="no"])
if test "x$HAVE_PYGST" = "xyes"; then
PYGST_DEFSDIR=`pkg-config gst-python-0.10 --variable=defsdir`
fi
AC_SUBST(PYGST_DEFSDIR, $PYGST_DEFSDIR)
if test \
"x$HAVE_PYTHON" = "xyes" -a \
"x$HAVE_PYTHON_HEADERS" = "xyes" -a \
"x$HAVE_PYGOBJECT" = "xyes" -a \
"x$HAVE_PYGST" = "xyes"; then
HAVE_PYTHON_BINDINGS="yes"
else
HAVE_PYTHON_BINDINGS="no"
fi
AM_CONDITIONAL(WITH_PYTHON, [test "x$HAVE_PYTHON_BINDINGS" = "xyes"])
dnl *** checks for libraries *** dnl *** checks for libraries ***
dnl check for libm, for sin() etc. dnl check for libm, for sin() etc.
@ -350,11 +301,6 @@ common/m4/Makefile
gst-editing-services.spec gst-editing-services.spec
m4/Makefile m4/Makefile
ges/Makefile ges/Makefile
bindings/Makefile
bindings/python/Makefile
bindings/python/codegen/Makefile
bindings/python/examples/Makefile
bindings/python/testsuite/Makefile
tests/Makefile tests/Makefile
tests/check/Makefile tests/check/Makefile
tests/examples/Makefile tests/examples/Makefile