2016-11-04 21:04:37 +00:00
|
|
|
#!/usr/bin/env python3
|
2013-12-31 10:45:07 +00:00
|
|
|
#
|
|
|
|
# Copyright (c) 2013,Thibault Saunier <thibault.saunier@collabora.com>
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
|
|
# License as published by the Free Software Foundation; either
|
|
|
|
# version 2.1 of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
# Lesser General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
|
|
# License along with this program; if not, write to the
|
|
|
|
# Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
# Boston, MA 02110-1301, USA.
|
|
|
|
""" Some utilies. """
|
|
|
|
|
2016-11-09 20:37:24 +00:00
|
|
|
try:
|
|
|
|
import config
|
|
|
|
except ImportError:
|
|
|
|
from . import config
|
|
|
|
|
2019-03-17 00:37:16 +00:00
|
|
|
import json
|
2013-12-31 10:45:07 +00:00
|
|
|
import os
|
2017-01-03 18:58:35 +00:00
|
|
|
import platform
|
2014-01-31 11:21:21 +00:00
|
|
|
import re
|
2016-11-07 20:20:09 +00:00
|
|
|
import shutil
|
2018-07-01 15:32:10 +00:00
|
|
|
import shlex
|
2014-12-05 11:16:36 +00:00
|
|
|
import signal
|
2016-11-07 20:20:09 +00:00
|
|
|
import subprocess
|
2014-07-16 08:12:04 +00:00
|
|
|
import sys
|
2016-11-07 20:20:09 +00:00
|
|
|
import tempfile
|
|
|
|
import time
|
|
|
|
import urllib.request
|
|
|
|
import urllib.error
|
2016-11-04 21:04:37 +00:00
|
|
|
import urllib.parse
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2016-11-07 20:20:09 +00:00
|
|
|
from .loggable import Loggable
|
2014-01-31 11:21:21 +00:00
|
|
|
from operator import itemgetter
|
2016-11-16 13:47:21 +00:00
|
|
|
from xml.etree import ElementTree
|
2019-03-17 00:37:16 +00:00
|
|
|
from collections import defaultdict
|
2014-01-31 11:21:21 +00:00
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2016-11-04 21:04:37 +00:00
|
|
|
GST_SECOND = int(1000000000)
|
2014-03-26 19:09:12 +00:00
|
|
|
DEFAULT_TIMEOUT = 30
|
2019-03-13 22:08:25 +00:00
|
|
|
|
|
|
|
DEFAULT_MAIN_DIR = os.path.join(config.BUILDDIR, "subprojects", "gst-integration-testsuites")
|
|
|
|
DEFAULT_GST_QA_ASSETS = os.path.join(config.SRCDIR, "subprojects", "gst-integration-testsuites")
|
|
|
|
USING_SUBPROJECT = os.path.exists(os.path.join(config.BUILDDIR, "subprojects", "gst-integration-testsuites"))
|
|
|
|
if not USING_SUBPROJECT:
|
|
|
|
DEFAULT_MAIN_DIR = os.path.join(os.path.expanduser("~"), "gst-validate")
|
|
|
|
DEFAULT_GST_QA_ASSETS = os.path.join(DEFAULT_MAIN_DIR, "gst-integration-testsuites")
|
|
|
|
|
|
|
|
DEFAULT_MAIN_DIR = os.environ.get('GST_VALIDATE_LAUNCHER_MAIN_DIR', DEFAULT_MAIN_DIR)
|
|
|
|
DEFAULT_TESTSUITES_DIRS = [os.path.join(DEFAULT_GST_QA_ASSETS, "testsuites")]
|
|
|
|
|
|
|
|
|
2014-01-09 14:23:38 +00:00
|
|
|
DISCOVERER_COMMAND = "gst-discoverer-1.0"
|
2016-03-08 06:54:32 +00:00
|
|
|
# Use to set the duration from which a test is considered as being 'long'
|
2014-05-07 09:30:09 +00:00
|
|
|
LONG_TEST = 40
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
class Result(object):
|
|
|
|
NOT_RUN = "Not run"
|
|
|
|
FAILED = "Failed"
|
|
|
|
TIMEOUT = "Timeout"
|
|
|
|
PASSED = "Passed"
|
2017-02-07 16:12:09 +00:00
|
|
|
SKIPPED = "Skipped"
|
2014-02-13 14:31:58 +00:00
|
|
|
KNOWN_ERROR = "Known error"
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-11-28 21:42:47 +00:00
|
|
|
|
2014-01-30 15:58:58 +00:00
|
|
|
class Protocols(object):
|
|
|
|
HTTP = "http"
|
|
|
|
FILE = "file"
|
2018-05-23 15:57:23 +00:00
|
|
|
PUSHFILE = "pushfile"
|
2014-01-30 15:58:58 +00:00
|
|
|
HLS = "hls"
|
2014-08-09 14:34:09 +00:00
|
|
|
DASH = "dash"
|
2014-12-05 11:16:36 +00:00
|
|
|
RTSP = "rtsp"
|
2014-01-30 15:58:58 +00:00
|
|
|
|
2014-11-20 10:55:45 +00:00
|
|
|
@staticmethod
|
|
|
|
def needs_clock_sync(protocol):
|
2015-08-15 14:19:24 +00:00
|
|
|
if protocol in [Protocols.HLS, Protocols.DASH]:
|
2014-11-20 10:55:45 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-01-30 15:58:58 +00:00
|
|
|
|
2019-03-19 15:15:35 +00:00
|
|
|
def is_tty():
|
|
|
|
return hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
|
|
|
|
|
|
|
|
|
2017-08-26 13:50:44 +00:00
|
|
|
def supports_ansi_colors():
|
2019-03-19 15:16:13 +00:00
|
|
|
if 'GST_VALIDATE_LAUNCHER_FORCE_COLORS' in os.environ:
|
|
|
|
return True
|
|
|
|
|
2017-08-26 13:50:44 +00:00
|
|
|
platform = sys.platform
|
|
|
|
supported_platform = platform != 'win32' or 'ANSICON' in os.environ
|
2019-03-19 15:15:35 +00:00
|
|
|
if not supported_platform or not is_tty():
|
2017-08-26 13:50:44 +00:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
class Colors(object):
|
|
|
|
HEADER = '\033[95m'
|
|
|
|
OKBLUE = '\033[94m'
|
|
|
|
OKGREEN = '\033[92m'
|
|
|
|
WARNING = '\033[93m'
|
|
|
|
FAIL = '\033[91m'
|
|
|
|
ENDC = '\033[0m'
|
|
|
|
|
|
|
|
|
2014-01-10 09:27:25 +00:00
|
|
|
def desactivate_colors():
|
2013-12-31 10:45:07 +00:00
|
|
|
Colors.HEADER = ''
|
|
|
|
Colors.OKBLUE = ''
|
|
|
|
Colors.OKGREEN = ''
|
|
|
|
Colors.WARNING = ''
|
|
|
|
Colors.FAIL = ''
|
|
|
|
Colors.ENDC = ''
|
|
|
|
|
2018-03-23 23:58:38 +00:00
|
|
|
|
2017-08-26 13:50:44 +00:00
|
|
|
if not supports_ansi_colors():
|
|
|
|
desactivate_colors()
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def mkdir(directory):
|
|
|
|
try:
|
|
|
|
os.makedirs(directory)
|
|
|
|
except os.error:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2016-09-09 15:09:45 +00:00
|
|
|
def which(name, extra_path=None):
|
2016-11-04 21:04:37 +00:00
|
|
|
exts = [_f for _f in os.environ.get('PATHEXT', '').split(os.pathsep) if _f]
|
2016-09-09 15:09:45 +00:00
|
|
|
path = os.environ.get('PATH', '')
|
|
|
|
if extra_path:
|
|
|
|
path = extra_path + os.pathsep + path
|
|
|
|
if not path:
|
2014-01-09 14:17:53 +00:00
|
|
|
return []
|
2016-09-09 15:09:45 +00:00
|
|
|
|
|
|
|
for p in path.split(os.pathsep):
|
2014-01-09 14:17:53 +00:00
|
|
|
p = os.path.join(p, name)
|
|
|
|
if os.access(p, os.X_OK):
|
2016-09-09 15:09:45 +00:00
|
|
|
return p
|
2014-01-09 14:17:53 +00:00
|
|
|
for e in exts:
|
|
|
|
pext = p + e
|
|
|
|
if os.access(pext, os.X_OK):
|
2016-09-09 15:09:45 +00:00
|
|
|
return pext
|
|
|
|
return None
|
2014-01-09 14:17:53 +00:00
|
|
|
|
|
|
|
|
2014-01-15 15:11:39 +00:00
|
|
|
def get_color_for_result(result):
|
|
|
|
if result is Result.FAILED:
|
|
|
|
color = Colors.FAIL
|
|
|
|
elif result is Result.TIMEOUT:
|
|
|
|
color = Colors.WARNING
|
|
|
|
elif result is Result.PASSED:
|
|
|
|
color = Colors.OKGREEN
|
|
|
|
else:
|
|
|
|
color = Colors.OKBLUE
|
|
|
|
|
|
|
|
return color
|
|
|
|
|
|
|
|
|
2018-03-23 23:58:38 +00:00
|
|
|
last_carriage_return_len = 0
|
|
|
|
|
|
|
|
|
2018-03-23 20:44:06 +00:00
|
|
|
def printc(message, color="", title=False, title_char='', end="\n"):
|
2018-03-23 23:58:38 +00:00
|
|
|
global last_carriage_return_len
|
2016-11-24 13:29:53 +00:00
|
|
|
if title or title_char:
|
2014-01-09 10:14:19 +00:00
|
|
|
length = 0
|
|
|
|
for l in message.split("\n"):
|
|
|
|
if len(l) > length:
|
|
|
|
length = len(l)
|
|
|
|
if length == 0:
|
|
|
|
length = len(message)
|
2016-11-24 13:29:53 +00:00
|
|
|
|
2018-03-23 23:58:38 +00:00
|
|
|
needed_spaces = ' ' * max(0, last_carriage_return_len - length)
|
2016-11-24 13:29:53 +00:00
|
|
|
if title is True:
|
2018-03-23 23:58:38 +00:00
|
|
|
message = length * "=" + needed_spaces + "\n" \
|
|
|
|
+ str(message) + "\n" + length * '='
|
2016-11-24 13:29:53 +00:00
|
|
|
else:
|
2018-03-23 23:58:38 +00:00
|
|
|
message = str(message) + needed_spaces + "\n" + \
|
|
|
|
length * title_char
|
2014-01-09 10:14:19 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
if hasattr(message, "result") and color == '':
|
2014-01-15 15:11:39 +00:00
|
|
|
color = get_color_for_result(message.result)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2019-03-19 15:15:35 +00:00
|
|
|
if not is_tty():
|
2018-03-23 20:44:06 +00:00
|
|
|
end = "\n"
|
|
|
|
|
2018-04-15 22:45:43 +00:00
|
|
|
message = str(message)
|
2018-03-23 23:58:38 +00:00
|
|
|
message += ' ' * max(0, last_carriage_return_len - len(message))
|
2019-03-19 15:15:35 +00:00
|
|
|
if end == '\r':
|
|
|
|
term_width = shutil.get_terminal_size((80, 20))[0]
|
|
|
|
if len(message) > term_width:
|
|
|
|
message = message[0:term_width - 2] + '…'
|
|
|
|
last_carriage_return_len = len(message)
|
|
|
|
else:
|
|
|
|
last_carriage_return_len = 0
|
2018-03-23 20:44:06 +00:00
|
|
|
sys.stdout.write(color + str(message) + Colors.ENDC + end)
|
2014-03-31 11:54:27 +00:00
|
|
|
sys.stdout.flush()
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
|
2014-04-28 11:08:09 +00:00
|
|
|
def launch_command(command, color=None, fails=False):
|
2013-12-31 10:45:07 +00:00
|
|
|
printc(command, Colors.OKGREEN, True)
|
2014-04-28 11:08:09 +00:00
|
|
|
res = os.system(command)
|
|
|
|
if res != 0 and fails is True:
|
|
|
|
raise subprocess.CalledProcessError(res, "%s failed" % command)
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-01-09 14:24:52 +00:00
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def path2url(path):
|
2016-11-04 21:04:37 +00:00
|
|
|
return urllib.parse.urljoin('file:', urllib.request.pathname2url(path))
|
2014-01-09 08:14:27 +00:00
|
|
|
|
|
|
|
|
2017-01-03 18:58:35 +00:00
|
|
|
def is_windows():
|
|
|
|
platname = platform.system().lower()
|
|
|
|
return platname == 'windows' or 'mingw' in platname
|
|
|
|
|
|
|
|
|
2014-01-10 09:12:13 +00:00
|
|
|
def url2path(url):
|
2016-11-04 21:04:37 +00:00
|
|
|
path = urllib.parse.urlparse(url).path
|
2014-03-28 14:00:01 +00:00
|
|
|
if "win32" in sys.platform:
|
|
|
|
if path[0] == '/':
|
2014-10-24 12:23:52 +00:00
|
|
|
return path[1:] # We need to remove the first '/' on windows
|
2016-11-04 21:04:37 +00:00
|
|
|
path = urllib.parse.unquote(path)
|
2014-03-28 14:00:01 +00:00
|
|
|
return path
|
2014-01-10 09:12:13 +00:00
|
|
|
|
|
|
|
|
|
|
|
def isuri(string):
|
2016-11-04 21:04:37 +00:00
|
|
|
url = urllib.parse.urlparse(string)
|
2014-10-24 12:23:52 +00:00
|
|
|
if url.scheme != "" and url.scheme != "":
|
2014-01-10 09:12:13 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-04-23 09:47:10 +00:00
|
|
|
def touch(fname, times=None):
|
|
|
|
with open(fname, 'a'):
|
|
|
|
os.utime(fname, times)
|
2014-01-10 09:12:13 +00:00
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-04-30 13:40:10 +00:00
|
|
|
def get_subclasses(klass, env):
|
|
|
|
subclasses = []
|
2016-11-04 21:04:37 +00:00
|
|
|
for symb in env.items():
|
2014-04-30 13:40:10 +00:00
|
|
|
try:
|
|
|
|
if issubclass(symb[1], klass) and not symb[1] is klass:
|
|
|
|
subclasses.append(symb[1])
|
|
|
|
except TypeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return subclasses
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-06-16 14:46:21 +00:00
|
|
|
def TIME_ARGS(time):
|
|
|
|
return "%u:%02u:%02u.%09u" % (time / (GST_SECOND * 60 * 60),
|
|
|
|
(time / (GST_SECOND * 60)) % 60,
|
|
|
|
(time / GST_SECOND) % 60,
|
|
|
|
time % GST_SECOND)
|
|
|
|
|
2015-04-27 11:25:44 +00:00
|
|
|
|
|
|
|
def look_for_file_in_source_dir(subdir, name):
|
2014-12-05 11:16:36 +00:00
|
|
|
root_dir = os.path.abspath(os.path.dirname(
|
|
|
|
os.path.join(os.path.dirname(os.path.abspath(__file__)))))
|
2015-04-27 11:25:44 +00:00
|
|
|
p = os.path.join(root_dir, subdir, name)
|
|
|
|
if os.path.exists(p):
|
|
|
|
return p
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2015-05-11 10:22:25 +00:00
|
|
|
# Returns the path $top_src_dir/@subdir/@name if running from source, or
|
|
|
|
# $DATADIR/gstreamer-1.0/validate/@name if not
|
|
|
|
def get_data_file(subdir, name):
|
2015-04-27 11:25:44 +00:00
|
|
|
# Are we running from sources?
|
|
|
|
p = look_for_file_in_source_dir(subdir, name)
|
|
|
|
if p:
|
|
|
|
return p
|
|
|
|
|
|
|
|
# Look in system data dirs
|
|
|
|
p = os.path.join(config.DATADIR, 'gstreamer-1.0', 'validate', name)
|
|
|
|
if os.path.exists(p):
|
|
|
|
return p
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
#
|
|
|
|
# Some utilities to parse gst-validate output #
|
|
|
|
#
|
|
|
|
|
|
|
|
|
2014-01-31 11:21:21 +00:00
|
|
|
def gsttime_from_tuple(stime):
|
2016-11-04 21:04:37 +00:00
|
|
|
return int((int(stime[0]) * 3600 + int(stime[1]) * 60 + int(stime[2])) * GST_SECOND + int(stime[3]))
|
2014-01-31 11:21:21 +00:00
|
|
|
|
2018-03-23 23:58:38 +00:00
|
|
|
|
2014-01-31 11:21:21 +00:00
|
|
|
timeregex = re.compile(r'(?P<_0>.+):(?P<_1>.+):(?P<_2>.+)\.(?P<_3>.+)')
|
2014-10-24 12:23:52 +00:00
|
|
|
|
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
def parse_gsttimeargs(time):
|
2016-11-04 21:04:37 +00:00
|
|
|
stime = list(map(itemgetter(1), sorted(
|
|
|
|
timeregex.match(time).groupdict().items())))
|
|
|
|
return int((int(stime[0]) * 3600 + int(stime[1]) * 60 + int(stime[2])) * GST_SECOND + int(stime[3]))
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-01-09 14:23:38 +00:00
|
|
|
|
2014-01-09 14:24:52 +00:00
|
|
|
def get_duration(media_file):
|
2014-01-14 17:05:45 +00:00
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
duration = 0
|
2014-06-16 14:46:21 +00:00
|
|
|
res = ''
|
2014-01-09 14:24:52 +00:00
|
|
|
try:
|
2014-12-05 11:16:36 +00:00
|
|
|
res = subprocess.check_output(
|
|
|
|
[DISCOVERER_COMMAND, media_file]).decode()
|
2014-01-09 14:24:52 +00:00
|
|
|
except subprocess.CalledProcessError:
|
2014-10-24 12:23:52 +00:00
|
|
|
# gst-media-check returns !0 if seeking is not possible, we do not care
|
|
|
|
# in that case.
|
2014-01-09 14:24:52 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
for l in res.split('\n'):
|
|
|
|
if "Duration: " in l:
|
|
|
|
duration = parse_gsttimeargs(l.replace("Duration: ", ""))
|
|
|
|
break
|
|
|
|
|
|
|
|
return duration
|
2014-01-09 14:23:38 +00:00
|
|
|
|
2014-02-12 10:18:14 +00:00
|
|
|
|
|
|
|
def get_scenarios():
|
|
|
|
GST_VALIDATE_COMMAND = "gst-validate-1.0"
|
|
|
|
os.system("%s --scenarios-defs-output-file %s" % (GST_VALIDATE_COMMAND,
|
|
|
|
))
|
2016-11-07 20:20:09 +00:00
|
|
|
|
|
|
|
|
2019-03-28 13:08:16 +00:00
|
|
|
def get_gst_build_valgrind_suppressions():
|
|
|
|
if hasattr(get_gst_build_valgrind_suppressions, "data"):
|
|
|
|
return get_gst_build_valgrind_suppressions.data
|
|
|
|
|
|
|
|
get_gst_build_valgrind_suppressions.data = []
|
|
|
|
if not os.path.exists(os.path.join(config.SRCDIR, "subprojects")):
|
|
|
|
return get_gst_build_valgrind_suppressions.data
|
|
|
|
|
|
|
|
for suppression_path in ["gstreamer/tests/check/gstreamer.supp",
|
|
|
|
"gst-plugins-base/tests/check/gst-plugins-base.supp",
|
|
|
|
"gst-plugins-good/tests/check/gst-plugins-good.supp",
|
|
|
|
"gst-plugins-bad/tests/check/gst-plugins-bad.supp",
|
|
|
|
"gst-plugins-ugly/tests/check/gst-plugins-ugly.supp",
|
|
|
|
"gst-libav/tests/check/gst-libav.supp",
|
|
|
|
"gst-devtools/validate/data/gstvalidate.supp",
|
|
|
|
"libnice/tests/libnice.supp",
|
|
|
|
"libsoup/tests/libsoup.supp",
|
2019-04-06 14:40:32 +00:00
|
|
|
"glib/glib.supp",
|
|
|
|
"gst-python/testsuite/gstpython.supp",
|
|
|
|
"gst-python/testsuite/python.supp",
|
|
|
|
]:
|
2019-03-28 13:08:16 +00:00
|
|
|
suppression = os.path.join(config.SRCDIR, "subprojects", suppression_path)
|
|
|
|
if os.path.exists(suppression):
|
|
|
|
get_gst_build_valgrind_suppressions.data.append(suppression)
|
|
|
|
|
|
|
|
return get_gst_build_valgrind_suppressions.data
|
|
|
|
|
|
|
|
|
2016-11-07 20:20:09 +00:00
|
|
|
class BackTraceGenerator(Loggable):
|
|
|
|
__instance = None
|
2018-07-01 15:32:10 +00:00
|
|
|
_command_line_regex = re.compile(r'Command Line: (.*)\n')
|
2016-11-07 20:20:09 +00:00
|
|
|
_timestamp_regex = re.compile(r'Timestamp: .*\((\d*)s ago\)')
|
2018-07-01 15:32:10 +00:00
|
|
|
_pid_regex = re.compile(r'PID: (\d+) \(.*\)')
|
2016-11-07 20:20:09 +00:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
Loggable.__init__(self)
|
|
|
|
|
2018-07-01 15:32:10 +00:00
|
|
|
self.in_flatpak = os.path.exists("/usr/manifest.json")
|
|
|
|
if self.in_flatpak:
|
|
|
|
coredumpctl = ['flatpak-spawn', '--host', 'coredumpctl']
|
|
|
|
else:
|
|
|
|
coredumpctl = ['coredumpctl']
|
|
|
|
|
|
|
|
try:
|
|
|
|
subprocess.check_output(coredumpctl)
|
|
|
|
self.coredumpctl = coredumpctl
|
|
|
|
except Exception as e:
|
|
|
|
self.warning(e)
|
|
|
|
self.coredumpctl = None
|
2016-11-07 20:20:09 +00:00
|
|
|
self.gdb = shutil.which('gdb')
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_default(cls):
|
|
|
|
if not cls.__instance:
|
|
|
|
cls.__instance = BackTraceGenerator()
|
|
|
|
|
|
|
|
return cls.__instance
|
|
|
|
|
|
|
|
def get_trace(self, test):
|
|
|
|
if not test.process.returncode:
|
|
|
|
return self.get_trace_on_running_process(test)
|
|
|
|
|
|
|
|
if self.coredumpctl:
|
|
|
|
return self.get_trace_from_systemd(test)
|
|
|
|
|
|
|
|
self.debug("coredumpctl not present, and it is the only"
|
|
|
|
" supported way to get backtraces for now.")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_trace_on_running_process(self, test):
|
|
|
|
if not self.gdb:
|
|
|
|
return "Can not generate stack trace as `gdb` is not" \
|
|
|
|
"installed."
|
|
|
|
|
|
|
|
gdb = ['gdb', '-ex', 't a a bt', '-batch',
|
2014-12-05 11:16:36 +00:00
|
|
|
'-p', str(test.process.pid)]
|
2016-11-07 20:20:09 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
return subprocess.check_output(
|
2016-11-14 21:41:34 +00:00
|
|
|
gdb, stderr=subprocess.STDOUT, timeout=30).decode()
|
2016-11-07 20:20:09 +00:00
|
|
|
except Exception as e:
|
|
|
|
return "Could not run `gdb` on process (pid: %d):\n%s" % (
|
|
|
|
test.process.pid, e)
|
|
|
|
|
|
|
|
def get_trace_from_systemd(self, test):
|
|
|
|
for ntry in range(10):
|
|
|
|
if ntry != 0:
|
2014-12-05 11:16:36 +00:00
|
|
|
# Loopping, it means we conceder the logs might not be ready
|
|
|
|
# yet.
|
2016-11-07 20:20:09 +00:00
|
|
|
time.sleep(1)
|
|
|
|
|
2018-07-01 15:32:10 +00:00
|
|
|
if not self.in_flatpak:
|
|
|
|
coredumpctl = self.coredumpctl + ['info', str(test.process.pid)]
|
|
|
|
else:
|
|
|
|
newer_than = time.strftime("%a %Y-%m-%d %H:%M:%S %Z", time.localtime(test._starting_time))
|
|
|
|
coredumpctl = self.coredumpctl + ['info', os.path.basename(test.command[0]),
|
|
|
|
'--since', newer_than]
|
|
|
|
|
2016-11-07 20:20:09 +00:00
|
|
|
try:
|
2018-07-01 15:32:10 +00:00
|
|
|
info = subprocess.check_output(coredumpctl, stderr=subprocess.STDOUT)
|
2016-11-07 20:20:09 +00:00
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
# The trace might not be ready yet
|
|
|
|
time.sleep(1)
|
|
|
|
continue
|
|
|
|
|
|
|
|
info = info.decode()
|
|
|
|
try:
|
2018-07-01 15:32:10 +00:00
|
|
|
pid = self._pid_regex.findall(info)[0]
|
2016-11-07 20:20:09 +00:00
|
|
|
except IndexError:
|
|
|
|
self.debug("Backtrace could not be found yet, trying harder.")
|
|
|
|
continue
|
|
|
|
|
2018-07-01 15:32:10 +00:00
|
|
|
application = test.process.args[0]
|
|
|
|
command_line = BackTraceGenerator._command_line_regex.findall(info)[0]
|
|
|
|
if shlex.split(command_line)[0] != application:
|
2016-11-07 20:20:09 +00:00
|
|
|
self.debug("PID: %s -- executable %s != test application: %s" % (
|
2018-07-01 15:32:10 +00:00
|
|
|
pid, command_line[0], test.application))
|
2016-11-07 20:20:09 +00:00
|
|
|
# The trace might not be ready yet
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not BackTraceGenerator._timestamp_regex.findall(info):
|
|
|
|
self.debug("Timestamp %s is more than 1min old",
|
|
|
|
re.findall(r'Timestamp: .*', info))
|
|
|
|
# The trace might not be ready yet
|
|
|
|
continue
|
|
|
|
|
|
|
|
bt_all = None
|
|
|
|
if self.gdb:
|
|
|
|
try:
|
2018-07-01 15:32:10 +00:00
|
|
|
with tempfile.NamedTemporaryFile() as stderr:
|
|
|
|
coredump = subprocess.check_output(self.coredumpctl + ['dump', pid],
|
|
|
|
stderr=stderr)
|
|
|
|
|
|
|
|
with tempfile.NamedTemporaryFile() as tf:
|
|
|
|
tf.write(coredump)
|
|
|
|
tf.flush()
|
|
|
|
gdb = ['gdb', '-ex', 't a a bt', '-ex', 'quit', application, tf.name]
|
|
|
|
bt_all = subprocess.check_output(
|
|
|
|
gdb, stderr=subprocess.STDOUT).decode()
|
|
|
|
|
|
|
|
info += "\nThread apply all bt:\n\n%s" % (
|
|
|
|
bt_all.replace('\n', '\n' + 15 * ' '))
|
2016-11-07 20:20:09 +00:00
|
|
|
except Exception as e:
|
2018-07-01 15:32:10 +00:00
|
|
|
self.error("Could not get backtrace from gdb: %s" % e)
|
2016-11-07 20:20:09 +00:00
|
|
|
|
|
|
|
return info
|
|
|
|
|
|
|
|
return None
|
2016-11-16 13:47:21 +00:00
|
|
|
|
|
|
|
|
2019-03-17 00:37:16 +00:00
|
|
|
ALL_GITLAB_ISSUES = defaultdict(list)
|
|
|
|
|
|
|
|
|
2016-11-16 13:47:21 +00:00
|
|
|
def check_bugs_resolution(bugs_definitions):
|
|
|
|
bugz = {}
|
2019-03-17 00:37:16 +00:00
|
|
|
gitlab_issues = defaultdict(list)
|
|
|
|
|
2016-11-16 13:47:21 +00:00
|
|
|
regexes = {}
|
2016-11-24 13:29:53 +00:00
|
|
|
for regex, bugs in bugs_definitions:
|
|
|
|
if isinstance(bugs, str):
|
|
|
|
bugs = [bugs]
|
2016-11-16 13:47:21 +00:00
|
|
|
|
2016-11-24 13:29:53 +00:00
|
|
|
for bug in bugs:
|
|
|
|
url = urllib.parse.urlparse(bug)
|
|
|
|
|
2019-03-17 00:37:16 +00:00
|
|
|
if "gitlab" in url.netloc:
|
|
|
|
components = [c for c in url.path.split('/') if c]
|
|
|
|
if len(components) != 4:
|
|
|
|
printc("\n + %s \n --> bug: %s\n --> Status: Not a proper gitlab report" % (regex, bug),
|
|
|
|
Colors.WARNING)
|
|
|
|
continue
|
|
|
|
project_id = components[0] + '%2F' + components[1]
|
|
|
|
issue_id = components[3]
|
|
|
|
|
2019-04-06 01:23:29 +00:00
|
|
|
gitlab_url = "https://%s/api/v4/projects/%s/issues/%s" % (url.hostname, project_id, issue_id)
|
2019-03-17 00:37:16 +00:00
|
|
|
if gitlab_url in ALL_GITLAB_ISSUES:
|
|
|
|
continue
|
|
|
|
gitlab_issues[gitlab_url].append(regex)
|
|
|
|
ALL_GITLAB_ISSUES[gitlab_url].append(regex)
|
|
|
|
continue
|
|
|
|
|
2016-11-24 13:29:53 +00:00
|
|
|
if "bugzilla" not in url.netloc:
|
|
|
|
continue
|
|
|
|
|
|
|
|
query = urllib.parse.parse_qs(url.query)
|
|
|
|
_id = query.get('id')
|
|
|
|
if not _id:
|
2019-03-17 00:37:16 +00:00
|
|
|
printc("\n + '%s' -- Can't check bug '%s'" %
|
2014-12-05 11:16:36 +00:00
|
|
|
(regex, bug), Colors.WARNING)
|
2016-11-24 13:29:53 +00:00
|
|
|
continue
|
2016-11-16 13:47:21 +00:00
|
|
|
|
2016-11-24 13:29:53 +00:00
|
|
|
if isinstance(_id, list):
|
|
|
|
_id = _id[0]
|
2016-11-16 13:47:21 +00:00
|
|
|
|
2016-11-24 13:29:53 +00:00
|
|
|
regexes[_id] = (regex, bug)
|
|
|
|
url_parts = tuple(list(url)[:3] + ['', '', ''])
|
|
|
|
ids = bugz.get(url_parts, [])
|
|
|
|
ids.append(_id)
|
|
|
|
bugz[url_parts] = ids
|
2016-11-16 13:47:21 +00:00
|
|
|
|
|
|
|
res = True
|
2019-03-17 00:37:16 +00:00
|
|
|
for gitlab_url, regexe in gitlab_issues.items():
|
|
|
|
try:
|
|
|
|
issue = json.load(urllib.request.urlopen(gitlab_url))
|
|
|
|
except Exception as e:
|
|
|
|
printc("\n + Could not properly check bugs status for: %s (%s)"
|
|
|
|
% (gitlab_url, e), Colors.FAIL)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if issue['state'] in ['closed']:
|
|
|
|
printc("\n + %s \n --> %s: '%s'\n ==> Bug CLOSED already (status: %s)" % (
|
|
|
|
regexe, issue['web_url'], issue['title'], issue['state']), Colors.FAIL)
|
|
|
|
|
|
|
|
res = False
|
|
|
|
|
2016-11-16 13:47:21 +00:00
|
|
|
for url_parts, ids in bugz.items():
|
|
|
|
url_parts = list(url_parts)
|
|
|
|
query = {'id': ','.join(ids)}
|
|
|
|
query['ctype'] = 'xml'
|
|
|
|
url_parts[4] = urllib.parse.urlencode(query)
|
|
|
|
try:
|
|
|
|
res = urllib.request.urlopen(urllib.parse.urlunparse(url_parts))
|
|
|
|
except Exception as e:
|
2019-03-17 00:37:16 +00:00
|
|
|
printc("\n + Could not properly check bugs status for: %s (%s)"
|
2016-11-16 13:47:21 +00:00
|
|
|
% (urllib.parse.urlunparse(url_parts), e), Colors.FAIL)
|
|
|
|
continue
|
|
|
|
|
|
|
|
root = ElementTree.fromstring(res.read())
|
|
|
|
bugs = root.findall('./bug')
|
|
|
|
|
|
|
|
if len(bugs) != len(ids):
|
2019-03-17 00:37:16 +00:00
|
|
|
printc("\n + Could not properly check bugs status on server %s" %
|
2016-11-16 13:47:21 +00:00
|
|
|
urllib.parse.urlunparse(url_parts), Colors.FAIL)
|
|
|
|
continue
|
|
|
|
|
|
|
|
for bugelem in bugs:
|
|
|
|
status = bugelem.findtext('./bug_status')
|
|
|
|
bugid = bugelem.findtext('./bug_id')
|
|
|
|
regex, bug = regexes[bugid]
|
|
|
|
desc = bugelem.findtext('./short_desc')
|
|
|
|
|
|
|
|
if not status:
|
2019-03-17 00:37:16 +00:00
|
|
|
printc("\n + %s \n --> bug: %s\n --> Status: UNKNOWN" % (regex, bug),
|
2016-11-16 13:47:21 +00:00
|
|
|
Colors.WARNING)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not status.lower() in ['new', 'verified']:
|
2019-03-17 00:37:16 +00:00
|
|
|
printc("\n + %s \n --> bug: #%s: '%s'\n ==> Bug CLOSED already (status: %s)" % (
|
2016-11-16 13:47:21 +00:00
|
|
|
regex, bugid, desc, status), Colors.WARNING)
|
|
|
|
|
|
|
|
res = False
|
|
|
|
|
2019-03-17 00:37:16 +00:00
|
|
|
printc("\n + %s \n --> bug: #%s: '%s'\n --> Status: %s" % (
|
2016-11-16 13:47:21 +00:00
|
|
|
regex, bugid, desc, status), Colors.OKGREEN)
|
|
|
|
|
2019-03-17 00:37:16 +00:00
|
|
|
if not res:
|
|
|
|
printc("\n==> Some bugs marked as known issues have been closed!", Colors.FAIL)
|
|
|
|
|
2016-11-16 13:47:21 +00:00
|
|
|
return res
|
2014-12-05 11:16:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
def kill_subprocess(owner, process, timeout):
|
|
|
|
if process is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
stime = time.time()
|
|
|
|
res = process.poll()
|
2017-12-03 10:07:00 +00:00
|
|
|
waittime = 0.05
|
2019-03-21 20:10:25 +00:00
|
|
|
killsig = None
|
|
|
|
if not is_windows():
|
|
|
|
killsig = signal.SIGINT
|
2014-12-05 11:16:36 +00:00
|
|
|
while res is None:
|
|
|
|
try:
|
|
|
|
owner.debug("Subprocess is still alive, sending KILL signal")
|
|
|
|
if is_windows():
|
|
|
|
subprocess.call(
|
|
|
|
['taskkill', '/F', '/T', '/PID', str(process.pid)])
|
|
|
|
else:
|
2019-03-21 20:10:25 +00:00
|
|
|
process.send_signal(killsig)
|
2017-12-03 10:07:00 +00:00
|
|
|
time.sleep(waittime)
|
|
|
|
waittime *= 2
|
2014-12-05 11:16:36 +00:00
|
|
|
except OSError:
|
|
|
|
pass
|
2019-03-21 20:10:25 +00:00
|
|
|
if not is_windows() and time.time() - stime > timeout / 4:
|
|
|
|
killsig = signal.SIGKILL
|
|
|
|
if time.time() - stime > timeout:
|
|
|
|
printc("Could not kill %s subprocess after %s second"
|
|
|
|
" Something is really wrong, => EXITING"
|
|
|
|
% (owner, timeout), Colors.FAIL)
|
|
|
|
|
|
|
|
return
|
2014-12-05 11:16:36 +00:00
|
|
|
res = process.poll()
|
|
|
|
|
|
|
|
return res
|
2019-03-10 20:07:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
def format_config_template(extra_data, config_text, test_name):
|
|
|
|
# Variables available for interpolation inside config blocks.
|
|
|
|
|
|
|
|
extra_vars = extra_data.copy()
|
|
|
|
|
|
|
|
if 'validate-flow-expectations-dir' in extra_vars and \
|
|
|
|
'validate-flow-actual-results-dir' in extra_vars:
|
|
|
|
expectations_dir = os.path.join(extra_vars['validate-flow-expectations-dir'],
|
|
|
|
test_name.replace('.', os.sep))
|
|
|
|
actual_results_dir = os.path.join(extra_vars['validate-flow-actual-results-dir'],
|
|
|
|
test_name.replace('.', os.sep))
|
|
|
|
extra_vars['validateflow'] = "validateflow, expectations-dir=\"%s\", actual-results-dir=\"%s\"" % (expectations_dir, actual_results_dir)
|
|
|
|
|
|
|
|
return config_text % extra_vars
|