Pass python files through autopep8

This commit is contained in:
Thibault Saunier 2019-03-16 12:21:34 -03:00 committed by Thibault Saunier
parent 091ce6bcfe
commit 6f9e5d4494
18 changed files with 167 additions and 159 deletions

View file

@ -58,7 +58,7 @@ def handle_exception(default_return):
def wrapped_func(*args, **kargs):
try:
return func(*args, **kargs)
except:
except BaseException:
# Use excepthook directly to avoid any printing to the screen
# if someone installed an except hook.
sys.excepthook(*sys.exc_info())

View file

@ -137,7 +137,7 @@ class App (object):
try:
Common.Main.MainLoopWrapper(Gtk.main, Gtk.main_quit).run()
except:
except BaseException:
raise
else:
self.detach()

View file

@ -409,7 +409,7 @@ class SubRange (object):
raise ValueError(
"need start <= stop (got %r, %r)" % (start, stop,))
if type(size) == type(self):
if isinstance(size, type(self)):
# Another SubRange, don't stack:
start += size.start
stop += size.start

View file

@ -20,13 +20,14 @@
"""GStreamer Debug Viewer program invocation."""
def main ():
def main():
import sys
import os.path
def substituted (s):
if s.startswith ("@") and s.endswith ("@"):
def substituted(s):
if s.startswith("@") and s.endswith("@"):
return None
else:
return s
@ -34,35 +35,36 @@ def main ():
# These "$"-enclosed strings are substituted at install time by a custom
# distutils extension (see setup.py). If you don't see any dollar signs at
# all, you are looking at an installed version of this file.
data_dir = substituted ("@DATADIR@")
lib_dir = substituted ("@LIBDIR@")
data_dir = substituted("@DATADIR@")
lib_dir = substituted("@LIBDIR@")
if data_dir:
installed = True
else:
# Substitution has not been run, we are running uninstalled:
lib_dir = os.path.dirname (os.path.realpath (sys.argv[0]))
lib_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
installed = False
if lib_dir:
if not os.path.normpath (lib_dir) in [os.path.normpath (p)
for p in sys.path]:
sys.path.insert (0, lib_dir)
if not os.path.normpath(lib_dir) in [os.path.normpath(p)
for p in sys.path]:
sys.path.insert(0, lib_dir)
try:
import GstDebugViewer
except ImportError as exc:
print(str (exc), file=sys.stderr)
sys.exit (1)
print(str(exc), file=sys.stderr)
sys.exit(1)
else:
if installed:
GstDebugViewer.Paths.setup_installed (data_dir)
GstDebugViewer.Paths.setup_installed(data_dir)
else:
# Assume that we reside inside the source dist.
source_dir = os.path.dirname (os.path.realpath (sys.argv[0]))
GstDebugViewer.Paths.setup_uninstalled (source_dir)
source_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
GstDebugViewer.Paths.setup_uninstalled(source_dir)
GstDebugViewer.run()
GstDebugViewer.run ()
if __name__ == "__main__":
main ()
main()

View file

@ -28,7 +28,7 @@ def system(*args, **kwargs):
kwargs.setdefault('stdout', subprocess.PIPE)
proc = subprocess.Popen(args, **kwargs)
out, err = proc.communicate()
if type(out) == bytes:
if isinstance(out, bytes):
out = out.decode()
return out
@ -56,7 +56,7 @@ def main():
try:
if not modified_file.endswith(".py"):
continue
pycodestyle_errors = system('pycodestyle', '--repeat', '--ignore', 'E501,E128,W605,W503', modified_file)
pycodestyle_errors = system('pycodestyle', '--repeat', '--ignore', 'E402,E501,E128,W605,W503', modified_file)
if pycodestyle_errors:
if output_message is None:
output_message = NOT_PYCODESTYLE_COMPLIANT_MESSAGE_PRE

View file

@ -50,29 +50,28 @@ class Stats(Analyzer):
def handle_tracer_class(self, event):
s = Structure(event[Parser.F_MESSAGE])
# TODO only for debugging
#print("tracer class:", repr(s))
# print("tracer class:", repr(s))
name = s.name[:-len('.class')]
record = {
'class': s,
'scope' : {},
'value' : {},
'scope': {},
'value': {},
}
self.records[name] = record
for k,v in s.values.items():
for k, v in s.values.items():
if v.name == 'scope':
# TODO only for debugging
#print("scope: [%s]=%s" % (k, v))
# print("scope: [%s]=%s" % (k, v))
record['scope'][k] = v
elif v.name == 'value':
# skip non numeric and those without min/max
if (v.values['type'] in _NUMERIC_TYPES and
'min' in v.values and 'max' in v.values):
if v.values['type'] in _NUMERIC_TYPES and 'min' in v.values and 'max' in v.values:
# TODO only for debugging
#print("value: [%s]=%s" % (k, v))
# print("value: [%s]=%s" % (k, v))
record['value'][k] = v
#else:
# else:
# TODO only for debugging
#print("skipping value: [%s]=%s" % (k, v))
# print("skipping value: [%s]=%s" % (k, v))
def handle_tracer_entry(self, event):
# use first field in message (structure-id) if none
@ -100,17 +99,16 @@ class Stats(Analyzer):
return
# aggregate event based on class
for sk,sv in record['scope'].items():
for sk, sv in record['scope'].items():
# look up bin by scope (or create new)
key = (_SCOPE_RELATED_TO[sv.values['related-to']] +
":" + str(s.values[sk]))
key = (_SCOPE_RELATED_TO[sv.values['related-to']] + ":" + str(s.values[sk]))
scope = self.data.get(key)
if not scope:
scope = {}
self.data[key] = scope
for vk,vv in record['value'].items():
for vk, vv in record['value'].items():
# skip optional fields
if not vk in s.values:
if vk not in s.values:
continue
if not s.values.get('have-' + vk, True):
continue
@ -118,8 +116,8 @@ class Stats(Analyzer):
key = entry_name + "/" + vk
data = scope.get(key)
if not data:
data = { 'num': 0 }
if not '_FLAGS_AGGREGATED' in vv.values.get('flags', ''):
data = {'num': 0}
if '_FLAGS_AGGREGATED' not in vv.values.get('flags', ''):
data['sum'] = 0
if 'max' in vv.values and 'min' in vv.values:
data['min'] = int(vv.values['max'])
@ -144,15 +142,15 @@ class Stats(Analyzer):
def report(self):
# headline
print("%-45s: %30s: %16s/%16s/%16s" % (
'scope', 'value', 'min','avg','max'))
'scope', 'value', 'min', 'avg', 'max'))
# iterate scopes
for sk,sv in self.data.items():
for sk, sv in self.data.items():
# iterate tracers
for tk,tv in sv.items():
for tk, tv in sv.items():
mi = tv.get('min', '-')
ma = tv.get('max', '-')
if 'sum' in tv:
avg = tv['sum']/tv['num']
avg = tv['sum'] / tv['num']
else:
avg = '-'
if mi == ma:
@ -190,8 +188,8 @@ def format_ts(ts):
def is_time_field(f):
# TODO: need proper units
return (f.endswith('/time') or f.endswith('-dts') or f.endswith('-pts') or
f.endswith('-duration'))
return (f.endswith('/time') or f.endswith('-dts') or f.endswith('-pts')
or f.endswith('-duration'))
if __name__ == '__main__':

View file

@ -37,7 +37,7 @@ logger = logging.getLogger('gsttr-tsplot')
_HANDLED_CLASSES = ('buffer', 'event', 'new-pad', 'new-element')
_GST_BUFFER_FLAG_DISCONT = (1<<6)
_GST_BUFFER_FLAG_DISCONT = (1 << 6)
_PLOT_SCRIPT_HEAD = Template(
'''
@ -82,6 +82,7 @@ _PLOT_SCRIPT_BODY = Template(
unset multiplot
''')
class TsPlot(Analyzer):
'''Generate a timestamp plots from a tracer log.
@ -123,18 +124,18 @@ class TsPlot(Analyzer):
data = self.ev_data.get(ix)
if not data:
return
l = self.ev_labels[ix]
line = self.ev_labels[ix]
ct = data['ct']
x1 = data['first-ts']
# TODO: scale 'y' according to max-y of buf or do a multiplot
y = (1 + data['ypos']) * -10
if ct == 1:
pad_file.write('%f %f %f %f "%s"\n' % (x1, x1, 0.0, y, l))
pad_file.write('%f %f %f %f "%s"\n' % (x1, x1, 0.0, y, line))
else:
x2 = data['last-ts']
xd = (x2 - x1)
xm = x1 + xd / 2
pad_file.write('%f %f %f %f "%s (%d)"\n' % (x1, xm, xd, y, l, ct))
pad_file.write('%f %f %f %f "%s (%d)"\n' % (x1, xm, xd, y, line, ct))
def _log_event(self, s):
# build a [ts, event-name] data file
@ -146,8 +147,8 @@ class TsPlot(Analyzer):
x = int(s.values['ts']) / 1e9
# some events fire often, labeling each would be unreadable
# so we aggregate a series of events of the same type
l = s.values['name']
if l == self.ev_labels.get(ix):
line = s.values['name']
if line == self.ev_labels.get(ix):
# count lines and track last ts
data = self.ev_data[ix]
data['ct'] += 1
@ -155,17 +156,17 @@ class TsPlot(Analyzer):
else:
self._log_event_data(pad_file, ix)
# start new data, assign a -y coord by event type
if not ix in self.ev_ypos:
if ix not in self.ev_ypos:
ypos = {}
self.ev_ypos[ix] = ypos
else:
ypos = self.ev_ypos[ix]
if l in ypos:
y = ypos[l]
if line in ypos:
y = ypos[line]
else:
y = len(ypos)
ypos[l] = y
self.ev_labels[ix] = l
ypos[line] = y
self.ev_labels[ix] = line
self.ev_data[ix] = {
'ct': 1,
'first-ts': x,
@ -187,7 +188,7 @@ class TsPlot(Analyzer):
cts = int(s.values['ts']) / 1e9
pts = int(s.values['buffer-pts']) / 1e9
dur = int(s.values['buffer-duration']) / 1e9
if not ix in self.buf_cts:
if ix not in self.buf_cts:
dcts = 0
else:
dcts = cts - self.buf_cts[ix]

View file

@ -1,8 +1,9 @@
try:
from tracer.parser import Parser
except:
except BaseException:
from parser import Parser
class AnalysisRunner(object):
"""
Runs several Analyzers over a log.
@ -26,9 +27,9 @@ class AnalysisRunner(object):
analyzer.handle_tracer_entry(event)
def is_tracer_class(self, event):
return (event[Parser.F_FILENAME] == 'gsttracerrecord.c' and
event[Parser.F_CATEGORY] == 'GST_TRACER' and
'.class' in event[Parser.F_MESSAGE])
return (event[Parser.F_FILENAME] == 'gsttracerrecord.c'
and event[Parser.F_CATEGORY] == 'GST_TRACER'
and '.class' in event[Parser.F_MESSAGE])
def is_tracer_entry(self, event):
return (not event[Parser.F_LINE] and not event[Parser.F_FILENAME])
@ -41,7 +42,7 @@ class AnalysisRunner(object):
self.handle_tracer_entry(event)
elif self.is_tracer_class(event):
self.handle_tracer_class(event)
#else:
# else:
# print("unhandled:", repr(event))
except StopIteration:
pass

View file

@ -8,11 +8,11 @@ def _log_line_regex():
# "0:00:00.777913000 "
TIME = r"(\d+:\d\d:\d\d\.\d+)\s+"
# "DEBUG "
#LEVEL = "([A-Z]+)\s+"
# LEVEL = "([A-Z]+)\s+"
LEVEL = "(TRACE)\s+"
# "0x8165430 "
THREAD = r"(0x[0-9a-f]+)\s+"
# "GST_REFCOUNTING ", "flacdec "
# "GST_REFCOUNTING ", "flacdec "
CATEGORY = "([A-Za-z0-9_-]+)\s+"
# " 3089 "
PID = r"(\d+)\s*"

View file

@ -30,16 +30,16 @@ class Structure(object):
@staticmethod
def _find_eos(s):
# find next '"' without preceeding '\'
l = 0
#logger.debug("find_eos: '%s'", s)
while 1: # faster than regexp for '[^\\]\"'
i = 0
# logger.debug("find_eos: '%s'", s)
while True: # faster than regexp for '[^\\]\"'
p = s.index('"')
l += p + 1
i += p + 1
if s[p - 1] != '\\':
#logger.debug("... ok : '%s'", s[p:])
return l
# logger.debug("... ok : '%s'", s[p:])
return i
s = s[(p + 1):]
#logger.debug("... : '%s'", s)
# logger.debug("... : '%s'", s)
return -1
@staticmethod
@ -47,7 +47,7 @@ class Structure(object):
types = {}
values = {}
scan = True
#logger.debug("===: '%s'", s)
# logger.debug("===: '%s'", s)
# parse id
p = s.find(',')
if p == -1:
@ -57,7 +57,7 @@ class Structure(object):
# parse fields
while scan:
s = s[(p + 2):] # skip 'name, ' / 'value, '
#logger.debug("...: '%s'", s)
# logger.debug("...: '%s'", s)
p = s.index('=')
k = s[:p]
if not s[p + 1] == '(':

View file

@ -51,7 +51,7 @@ def gi_get_value():
def perf(method, n, flavor):
t = timeit.timeit(method + '()', 'from __main__ import ' + method, number=n)
print("%6s: %lf s, (%lf calls/s)" % (flavor, t, (n/t)))
print("%6s: %lf s, (%lf calls/s)" % (flavor, t, (n / t)))
if __name__ == '__main__':

View file

@ -11,17 +11,18 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
@ -34,7 +35,7 @@ templates_path = ['_templates']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
@ -54,37 +55,37 @@ release = '1.0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
@ -96,26 +97,26 @@ html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
@ -124,44 +125,44 @@ html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'gst-validate-launcherdoc'
@ -170,42 +171,42 @@ htmlhelp_basename = 'gst-validate-launcherdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'gst-validate-launcher.tex', u'gst-validate-launcher Documentation',
('index', 'gst-validate-launcher.tex', u'gst-validate-launcher Documentation',
u'Thibault Saunier', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
@ -218,7 +219,7 @@ man_pages = [
]
# If true, show URL addresses after external links.
#man_show_urls = False
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
@ -227,17 +228,17 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'gst-validate-launcher', u'gst-validate-launcher Documentation',
('index', 'gst-validate-launcher', u'gst-validate-launcher Documentation',
u'Thibault Saunier', 'gst-validate-launcher', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# texinfo_show_urls = 'footnote'
autoclass_content = 'both'

View file

@ -49,9 +49,11 @@ import time
_bandwidth = 0
class ThreadingSimpleServer(ThreadingMixIn, http.server.HTTPServer):
pass
class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
"""Simple HTTP request handler with GET and HEAD commands.
@ -70,7 +72,7 @@ class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
"""Serve a GET request."""
f, start_range, end_range = self.send_head()
print ("Got values of {} and {}".format(start_range, end_range))
print("Got values of {} and {}".format(start_range, end_range))
if f:
f.seek(start_range, 0)
chunk = 0x1000
@ -85,7 +87,7 @@ class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
try:
self.wfile.write(f.read(chunk))
except:
except Exception:
break
total += chunk
start_range += chunk
@ -136,8 +138,8 @@ class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
return (None, 0, 0)
if "Range" in self.headers:
self.send_response(206) #partial content response
else :
self.send_response(206) # partial content response
else:
self.send_response(200)
self.send_header("Content-type", ctype)
@ -148,7 +150,7 @@ class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
self.send_header("Accept-Ranges", "bytes")
if "Range" in self.headers:
s, e = self.headers['range'][6:].split('-', 1) #bytes:%d-%d
s, e = self.headers['range'][6:].split('-', 1) # bytes:%d-%d
sl = len(s)
el = len(e)
@ -163,7 +165,7 @@ class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
self.send_header("Content-Length", end_range - start_range)
self.end_headers()
print ("Sending bytes {} to {}...".format(start_range, end_range))
print("Sending bytes {} to {}...".format(start_range, end_range))
return (f, start_range, end_range)
def list_directory(self, path):
@ -180,7 +182,7 @@ class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
self.send_error(404, "Access Forbidden")
return None
lst.sort(key=lambda file_name : file_name.lower())
lst.sort(key=lambda file_name: file_name.lower())
html_text = []
displaypath = html.escape(urllib.parse.unquote(self.path))
@ -226,7 +228,7 @@ class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
probably be diagnosed.)
"""
#abandon query parameters
# abandon query parameters
path = path.split("?", 1)[0]
path = path.split("#", 1)[0]
path = posixpath.normpath(urllib.parse.unquote(path))
@ -237,11 +239,11 @@ class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
for word in words:
drive, word = os.path.splitdrive(word)
head, word = os.path.split(word)
if word in (os.curdir, os.pardir): continue
if word in (os.curdir, os.pardir):
continue
path = os.path.join(path, word)
return path
def guess_type(self, path):
"""Guess the type of a file.
@ -266,23 +268,24 @@ class RangeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
else:
return self.extension_map['']
if not mimetypes.inited:
if not mimetypes.inited:
mimetypes.init()
extension_map = mimetypes.types_map.copy()
extension_map.update({
'': 'application/octet-stream', # Default
'': 'application/octet-stream', # Default
'.py': 'text/plain',
'.c': 'text/plain',
'.h': 'text/plain',
'.mp4': 'video/mp4',
'.ogg': 'video/ogg',
'.java' : 'text/plain',
})
'.java': 'text/plain',
})
def test(handler_class = RangeHTTPRequestHandler,server_class = http.server.HTTPServer):
def test(handler_class=RangeHTTPRequestHandler, server_class=http.server.HTTPServer):
http.server.test(handler_class, server_class)
if __name__ == "__main__":
httpd = ThreadingSimpleServer(("0.0.0.0", int(sys.argv[1])), RangeHTTPRequestHandler)
httpd.serve_forever()

View file

@ -989,8 +989,9 @@ class GstValidateTest(Test):
for key, value in report.items():
if key == "type":
continue
res += '\n%s%s"%s": "%s",' % (" " * 12, "# " if key ==
"details" else "", key, value.replace('\n', '\\n'))
res += '\n%s%s"%s": "%s",' % (
" " * 12, "# " if key == "details" else "",
key, value.replace('\n', '\\n'))
res += "\n%s}," % (" " * 8)

View file

@ -22,7 +22,9 @@ import time
from . import loggable
import subprocess
import sys
import urllib.request, urllib.error, urllib.parse
import urllib.request
import urllib.error
import urllib.parse
logcat = "httpserver"

View file

@ -164,7 +164,7 @@ class TerminalController:
# terminal has no capabilities.
try:
curses.setupterm()
except:
except BaseException:
return
# Look up numeric capabilities.
@ -258,15 +258,15 @@ class ProgressBar:
self.cleared = 0
n = int((self.width - 10) * percent)
sys.stdout.write(
self.term.BOL + self.term.UP + self.term.CLEAR_EOL +
(self.bar % (100 * percent, '=' * n, '-' * (self.width - 10 - n))) +
self.term.CLEAR_EOL + message.center(self.width))
self.term.BOL + self.term.UP + self.term.CLEAR_EOL
+ (self.bar % (100 * percent, '=' * n, '-' * (self.width - 10 - n)))
+ self.term.CLEAR_EOL + message.center(self.width))
def clear(self):
if not self.cleared:
sys.stdout.write(self.term.BOL + self.term.CLEAR_EOL +
self.term.UP + self.term.CLEAR_EOL +
self.term.UP + self.term.CLEAR_EOL)
sys.stdout.write(self.term.BOL + self.term.CLEAR_EOL
+ self.term.UP + self.term.CLEAR_EOL
+ self.term.UP + self.term.CLEAR_EOL)
self.cleared = 1
@ -648,7 +648,7 @@ def _preformatLevels(enableColorOutput):
terminal_controller = TerminalController()
for level in ERROR, WARN, FIXME, INFO, DEBUG, LOG:
if enableColorOutput:
if type(terminal_controller.BOLD) == bytes:
if isinstance(terminal_controller.BOLD, bytes):
formatter = ''.join(
(terminal_controller.BOLD.decode(),
getattr(terminal_controller, COLORS[level]).decode(),

View file

@ -21,6 +21,7 @@ import os
import sys
import xml.etree.cElementTree
def extract_info(xmlfile):
e = xml.etree.cElementTree.parse(xmlfile).getroot()
r = {}
@ -28,6 +29,7 @@ def extract_info(xmlfile):
r[(i.get("classname"), i.get("name"))] = i
return r
if "__main__" == __name__:
if len(sys.argv) < 2:
print("Usage : %s [<old run xml>] <new run xml>" % sys.argv[0])
@ -58,12 +60,12 @@ if "__main__" == __name__:
if oldfile:
# tests that weren't present in old run
newtests = [x for x in newfile.keys() if not oldfile.has_key(x)]
newtests = [x for x in newfile.keys() if x not in oldfile]
# tests that are no longer present in new run
gonetests = [x for x in oldfile.keys() if not newfile.has_key(x)]
gonetests = [x for x in oldfile.keys() if x not in newfile]
# go over new tests
for k,v in newfile.iteritems():
for k, v in newfile.iteritems():
tn, fn = k
if not fn in allfiles:
allfiles.append(fn)
@ -75,10 +77,10 @@ if "__main__" == __name__:
rs = r.split('[')[1].split(']')[0].split(',')
for la in rs:
la = la.strip()
if not reasons.has_key(la):
if la not in reasons:
reasons[la] = []
reasons[la].append(k)
if not failedfiles.has_key(fn):
if fn not in failedfiles:
failedfiles[fn] = []
failedfiles[fn].append((tn, r))
@ -102,7 +104,6 @@ if "__main__" == __name__:
elif a.get("message") != b.get("message"):
failchange.append(k)
if newfail:
print("New failures", len(newfail))
newfail.sort()
@ -132,23 +133,21 @@ if "__main__" == __name__:
print " New message :", newfile[i].find("error").get("message")
print
for k,v in reasons.iteritems():
for k, v in reasons.iteritems():
print "Failure type : ", k, len(v)
v.sort()
for i in v:
print " %s : %s" % (i[0], i[1])
print
nofailfiles = [fn for fn in allfiles if not failedfiles.has_key(fn)]
nofailfiles.sort()
nofailfiles = sorted([fn for fn in allfiles if fn not in failedfiles])
if nofailfiles:
print "Files without failures", len(nofailfiles)
for f in nofailfiles:
print " ", f
print
for k,v in failedfiles.iteritems():
for k, v in failedfiles.iteritems():
print "Failed File :", k
for i in v:
print " %s : %s" % (i[0], i[1])

View file

@ -74,4 +74,4 @@ if "__main__" == __name__:
prof.dump_stats('gst-validate-launcher-runstats')
exit(res)
exit(main(libsdir))
exit(main(libsdir))