2016-11-04 21:04:37 +00:00
|
|
|
#!/usr/bin/env python3
|
2013-12-31 10:45:07 +00:00
|
|
|
#
|
|
|
|
# Copyright (c) 2013,Thibault Saunier <thibault.saunier@collabora.com>
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
|
|
# License as published by the Free Software Foundation; either
|
|
|
|
# version 2.1 of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
# Lesser General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
|
|
# License along with this program; if not, write to the
|
|
|
|
# Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
# Boston, MA 02110-1301, USA.
|
|
|
|
|
|
|
|
""" Class representing tests and test managers. """
|
|
|
|
|
2019-11-11 21:57:27 +00:00
|
|
|
import importlib.util
|
2016-09-01 20:39:38 +00:00
|
|
|
import json
|
2013-12-31 10:45:07 +00:00
|
|
|
import os
|
2014-03-28 14:00:01 +00:00
|
|
|
import sys
|
2013-12-31 10:45:07 +00:00
|
|
|
import re
|
2016-09-02 20:37:24 +00:00
|
|
|
import copy
|
2018-04-21 02:57:32 +00:00
|
|
|
import shlex
|
2016-11-04 21:04:37 +00:00
|
|
|
import socketserver
|
2016-09-01 20:39:38 +00:00
|
|
|
import struct
|
2013-12-31 10:45:07 +00:00
|
|
|
import time
|
2016-11-04 21:04:37 +00:00
|
|
|
from . import utils
|
2014-04-22 08:49:10 +00:00
|
|
|
import signal
|
2016-11-04 21:04:37 +00:00
|
|
|
import urllib.parse
|
2013-12-31 10:45:07 +00:00
|
|
|
import subprocess
|
2015-01-12 12:09:33 +00:00
|
|
|
import threading
|
2016-11-04 21:04:37 +00:00
|
|
|
import queue
|
|
|
|
import configparser
|
2017-02-27 15:10:49 +00:00
|
|
|
import xml
|
2017-12-03 09:49:22 +00:00
|
|
|
import random
|
2019-01-29 18:59:44 +00:00
|
|
|
import shutil
|
2017-12-03 09:42:49 +00:00
|
|
|
import uuid
|
2020-01-12 04:00:06 +00:00
|
|
|
from itertools import cycle
|
2020-03-21 14:57:51 +00:00
|
|
|
from fractions import Fraction
|
2017-02-27 15:10:49 +00:00
|
|
|
|
2018-05-03 09:27:31 +00:00
|
|
|
from .utils import which
|
2017-02-27 15:10:49 +00:00
|
|
|
from . import reporters
|
2016-11-04 21:04:37 +00:00
|
|
|
from . import loggable
|
|
|
|
from .loggable import Loggable
|
2017-06-22 17:08:30 +00:00
|
|
|
|
2019-03-17 00:37:16 +00:00
|
|
|
from collections import defaultdict
|
2017-06-22 17:08:30 +00:00
|
|
|
try:
|
|
|
|
from lxml import etree as ET
|
2017-06-28 19:54:13 +00:00
|
|
|
except ImportError:
|
2017-06-22 17:08:30 +00:00
|
|
|
import xml.etree.cElementTree as ET
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2019-04-06 14:10:14 +00:00
|
|
|
|
2019-01-26 01:09:30 +00:00
|
|
|
from .vfb_server import get_virual_frame_buffer_server
|
|
|
|
from .httpserver import HTTPServer
|
2016-11-04 21:04:37 +00:00
|
|
|
from .utils import mkdir, Result, Colors, printc, DEFAULT_TIMEOUT, GST_SECOND, \
|
2016-11-16 13:47:21 +00:00
|
|
|
Protocols, look_for_file_in_source_dir, get_data_file, BackTraceGenerator, \
|
2019-03-19 15:15:35 +00:00
|
|
|
check_bugs_resolution, is_tty
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2015-03-19 15:06:54 +00:00
|
|
|
# The factor by which we increase the hard timeout when running inside
|
|
|
|
# Valgrind
|
2015-11-10 16:43:54 +00:00
|
|
|
GDB_TIMEOUT_FACTOR = VALGRIND_TIMEOUT_FACTOR = 20
|
2020-03-10 14:52:35 +00:00
|
|
|
RR_TIMEOUT_FACTOR = 2
|
2016-12-22 13:08:27 +00:00
|
|
|
TIMEOUT_FACTOR = float(os.environ.get("TIMEOUT_FACTOR", 1))
|
2015-03-23 15:19:49 +00:00
|
|
|
# The error reported by valgrind when detecting errors
|
|
|
|
VALGRIND_ERROR_CODE = 20
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
VALIDATE_OVERRIDE_EXTENSION = ".override"
|
2019-01-15 19:52:24 +00:00
|
|
|
EXITING_SIGNALS = dict([(-getattr(signal, s), s) for s in [
|
2017-01-03 18:34:39 +00:00
|
|
|
'SIGQUIT', 'SIGILL', 'SIGABRT', 'SIGFPE', 'SIGSEGV', 'SIGBUS', 'SIGSYS',
|
2019-01-15 19:52:24 +00:00
|
|
|
'SIGTRAP', 'SIGXCPU', 'SIGXFSZ', 'SIGIOT'] if hasattr(signal, s)])
|
|
|
|
EXITING_SIGNALS.update({139: "SIGSEGV"})
|
2019-03-17 17:39:38 +00:00
|
|
|
EXITING_SIGNALS.update({(v, k) for k, v in EXITING_SIGNALS.items()})
|
2015-08-15 14:40:11 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2019-03-18 17:30:59 +00:00
|
|
|
CI_ARTIFACTS_URL = os.environ.get('CI_ARTIFACTS_URL')
|
|
|
|
|
|
|
|
|
2014-01-08 17:51:14 +00:00
|
|
|
class Test(Loggable):
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
""" A class representing a particular test. """
|
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def __init__(self, application_name, classname, options,
|
2014-05-07 09:30:09 +00:00
|
|
|
reporter, duration=0, timeout=DEFAULT_TIMEOUT,
|
2016-09-02 20:37:24 +00:00
|
|
|
hard_timeout=None, extra_env_variables=None,
|
2019-03-18 19:52:11 +00:00
|
|
|
expected_issues=None, is_parallel=True,
|
2018-04-15 23:47:36 +00:00
|
|
|
workdir=None):
|
2014-01-24 12:59:56 +00:00
|
|
|
"""
|
|
|
|
@timeout: The timeout during which the value return by get_current_value
|
|
|
|
keeps being exactly equal
|
|
|
|
@hard_timeout: Max time the test can take in absolute
|
|
|
|
"""
|
2014-01-08 17:51:14 +00:00
|
|
|
Loggable.__init__(self)
|
2017-02-06 15:16:41 +00:00
|
|
|
self.timeout = timeout * TIMEOUT_FACTOR * options.timeout_factor
|
2016-12-22 13:08:27 +00:00
|
|
|
if hard_timeout:
|
|
|
|
self.hard_timeout = hard_timeout * TIMEOUT_FACTOR
|
2017-02-21 16:39:37 +00:00
|
|
|
self.hard_timeout *= options.timeout_factor
|
2016-12-22 13:08:27 +00:00
|
|
|
else:
|
|
|
|
self.hard_timeout = hard_timeout
|
2013-12-31 10:45:07 +00:00
|
|
|
self.classname = classname
|
|
|
|
self.options = options
|
|
|
|
self.application = application_name
|
2016-12-22 13:07:58 +00:00
|
|
|
self.command = []
|
2014-12-05 11:16:36 +00:00
|
|
|
self.server_command = None
|
2013-12-31 10:45:07 +00:00
|
|
|
self.reporter = reporter
|
|
|
|
self.process = None
|
2015-01-12 12:09:33 +00:00
|
|
|
self.proc_env = None
|
|
|
|
self.thread = None
|
2015-01-16 18:08:19 +00:00
|
|
|
self.queue = None
|
2014-05-07 09:30:09 +00:00
|
|
|
self.duration = duration
|
2016-11-26 13:24:11 +00:00
|
|
|
self.stack_trace = None
|
2017-12-03 09:42:49 +00:00
|
|
|
self._uuid = None
|
2019-03-18 19:52:11 +00:00
|
|
|
if expected_issues is None:
|
|
|
|
self.expected_issues = []
|
|
|
|
elif not isinstance(expected_issues, list):
|
|
|
|
self.expected_issues = [expected_issues]
|
2016-09-02 20:37:24 +00:00
|
|
|
else:
|
2019-03-18 19:52:11 +00:00
|
|
|
self.expected_issues = expected_issues
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
extra_env_variables = extra_env_variables or {}
|
2015-05-13 13:29:43 +00:00
|
|
|
self.extra_env_variables = extra_env_variables
|
2014-12-05 11:16:36 +00:00
|
|
|
self.optional = False
|
2017-06-07 19:06:10 +00:00
|
|
|
self.is_parallel = is_parallel
|
2017-07-13 20:43:32 +00:00
|
|
|
self.generator = None
|
2018-04-15 23:47:36 +00:00
|
|
|
self.workdir = workdir
|
2021-10-08 12:09:21 +00:00
|
|
|
self.max_retries = 0
|
2019-04-06 02:05:20 +00:00
|
|
|
self.html_log = None
|
2020-03-10 14:52:35 +00:00
|
|
|
self.rr_logdir = None
|
2015-05-13 13:29:43 +00:00
|
|
|
|
2015-01-15 14:32:12 +00:00
|
|
|
self.clean()
|
2014-02-19 12:07:03 +00:00
|
|
|
|
2019-03-18 19:52:11 +00:00
|
|
|
def _generate_expected_issues(self):
|
2019-03-17 17:39:38 +00:00
|
|
|
return ''
|
|
|
|
|
2019-03-18 19:52:11 +00:00
|
|
|
def generate_expected_issues(self):
|
|
|
|
res = '%s"FIXME \'%s\' issues [REPORT A BUG ' % (" " * 4, self.classname) \
|
|
|
|
+ 'in https://gitlab.freedesktop.org/gstreamer/ '\
|
|
|
|
+ 'or use a proper bug description]": {'
|
|
|
|
res += """
|
|
|
|
"tests": [
|
|
|
|
"%s"
|
|
|
|
],
|
|
|
|
"issues": [""" % (self.classname)
|
2019-03-17 17:39:38 +00:00
|
|
|
|
|
|
|
retcode = self.process.returncode if self.process else 0
|
|
|
|
if retcode != 0:
|
|
|
|
signame = EXITING_SIGNALS.get(retcode)
|
|
|
|
val = "'" + signame + "'" if signame else retcode
|
2019-03-18 19:52:11 +00:00
|
|
|
res += """\n {
|
|
|
|
'%s': %s,
|
|
|
|
'sometimes': True,
|
|
|
|
},""" % ("signame" if signame else "returncode", val)
|
|
|
|
|
|
|
|
res += self._generate_expected_issues()
|
|
|
|
res += "\n%s],\n%s},\n" % (" " * 8, " " * 4)
|
2019-03-17 17:39:38 +00:00
|
|
|
|
|
|
|
return res
|
2019-03-16 02:46:00 +00:00
|
|
|
|
2020-05-13 22:25:00 +00:00
|
|
|
def copy(self, nth=None):
|
|
|
|
copied_test = copy.copy(self)
|
|
|
|
if nth:
|
|
|
|
copied_test.classname += '_it' + str(nth)
|
|
|
|
copied_test.options = copy.copy(self.options)
|
|
|
|
copied_test.options.logsdir = os.path.join(copied_test.options.logsdir, str(nth))
|
|
|
|
os.makedirs(copied_test.options.logsdir, exist_ok=True)
|
|
|
|
|
|
|
|
return copied_test
|
|
|
|
|
2015-01-15 14:32:12 +00:00
|
|
|
def clean(self):
|
2016-09-02 20:37:24 +00:00
|
|
|
self.kill_subprocess()
|
2013-12-31 10:45:07 +00:00
|
|
|
self.message = ""
|
2014-01-22 23:15:54 +00:00
|
|
|
self.error_str = ""
|
2013-12-31 10:45:07 +00:00
|
|
|
self.time_taken = 0.0
|
2013-12-31 10:45:07 +00:00
|
|
|
self._starting_time = None
|
|
|
|
self.result = Result.NOT_RUN
|
2014-01-10 14:30:38 +00:00
|
|
|
self.logfile = None
|
2015-01-16 17:50:38 +00:00
|
|
|
self.out = None
|
2019-04-05 13:40:45 +00:00
|
|
|
self.extra_logfiles = set()
|
2015-06-12 09:17:43 +00:00
|
|
|
self.__env_variable = []
|
2016-09-02 20:39:50 +00:00
|
|
|
self.kill_subprocess()
|
2019-03-27 18:07:48 +00:00
|
|
|
self.process = None
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
string = self.classname
|
2013-12-31 10:45:07 +00:00
|
|
|
if self.result != Result.NOT_RUN:
|
2013-12-31 10:45:07 +00:00
|
|
|
string += ": " + self.result
|
2014-01-09 08:27:50 +00:00
|
|
|
if self.result in [Result.FAILED, Result.TIMEOUT]:
|
2019-03-15 10:39:04 +00:00
|
|
|
string += " '%s'" % self.message
|
|
|
|
if not self.options.dump_on_failure:
|
|
|
|
if not self.options.redirect_logs and self.result != Result.PASSED:
|
|
|
|
string += self.get_logfile_repr()
|
|
|
|
else:
|
|
|
|
string = "\n==> %s" % string
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
return string
|
|
|
|
|
2015-02-27 23:20:43 +00:00
|
|
|
def add_env_variable(self, variable, value=None):
|
2014-08-11 11:19:22 +00:00
|
|
|
"""
|
2020-05-03 05:20:19 +00:00
|
|
|
Only useful so that the gst-validate-launcher can print the exact
|
2014-08-11 11:19:22 +00:00
|
|
|
right command line to reproduce the tests
|
|
|
|
"""
|
2015-02-27 23:20:43 +00:00
|
|
|
if value is None:
|
|
|
|
value = os.environ.get(variable, None)
|
|
|
|
|
|
|
|
if value is None:
|
|
|
|
return
|
|
|
|
|
2015-06-12 09:17:43 +00:00
|
|
|
self.__env_variable.append(variable)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _env_variable(self):
|
|
|
|
res = ""
|
2019-06-17 21:59:21 +00:00
|
|
|
if not self.options.verbose or self.options.verbose > 1:
|
|
|
|
for var in set(self.__env_variable):
|
|
|
|
if res:
|
|
|
|
res += " "
|
|
|
|
value = self.proc_env.get(var, None)
|
|
|
|
if value is not None:
|
|
|
|
res += "%s='%s'" % (var, value)
|
|
|
|
else:
|
|
|
|
res += "[Not displaying environment variables, rerun with -vv for the full command]"
|
2015-06-12 09:17:43 +00:00
|
|
|
|
|
|
|
return res
|
2014-08-11 11:19:22 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
def open_logfile(self):
|
2017-01-03 16:01:31 +00:00
|
|
|
if self.out:
|
|
|
|
return
|
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
path = os.path.join(self.options.logsdir,
|
2019-03-18 17:30:59 +00:00
|
|
|
self.classname.replace(".", os.sep) + '.md')
|
2015-01-16 17:50:38 +00:00
|
|
|
mkdir(os.path.dirname(path))
|
|
|
|
self.logfile = path
|
|
|
|
|
|
|
|
if self.options.redirect_logs == 'stdout':
|
|
|
|
self.out = sys.stdout
|
|
|
|
elif self.options.redirect_logs == 'stderr':
|
|
|
|
self.out = sys.stderr
|
|
|
|
else:
|
|
|
|
self.out = open(path, 'w+')
|
|
|
|
|
2019-04-05 13:41:14 +00:00
|
|
|
def finalize_logfiles(self):
|
2020-01-16 00:22:49 +00:00
|
|
|
self.out.write("\n**Duration**: %s" % self.time_taken)
|
2015-01-16 17:50:38 +00:00
|
|
|
if not self.options.redirect_logs:
|
2019-03-27 15:36:16 +00:00
|
|
|
self.out.flush()
|
2019-04-05 13:41:14 +00:00
|
|
|
for logfile in self.extra_logfiles:
|
2020-06-03 07:32:32 +00:00
|
|
|
# Only copy over extra logfile content if it's below a certain threshold
|
|
|
|
# Avoid copying gigabytes of data if a lot of debugging is activated
|
|
|
|
if os.path.getsize(logfile) < 500 * 1024:
|
|
|
|
self.out.write('\n\n## %s:\n\n```\n%s\n```\n' % (
|
|
|
|
os.path.basename(logfile), self.get_extra_log_content(logfile))
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.out.write('\n\n## %s:\n\n**Log file too big.**\n %s\n\n Check file content directly\n\n' % (
|
|
|
|
os.path.basename(logfile), logfile)
|
|
|
|
)
|
2020-03-10 14:52:35 +00:00
|
|
|
|
|
|
|
if self.rr_logdir:
|
|
|
|
self.out.write('\n\n## rr trace:\n\n```\nrr replay %s/latest-trace\n```\n' % (
|
|
|
|
self.rr_logdir))
|
|
|
|
|
2019-04-05 13:41:14 +00:00
|
|
|
self.out.flush()
|
2015-01-16 17:50:38 +00:00
|
|
|
self.out.close()
|
|
|
|
|
2019-04-06 02:05:20 +00:00
|
|
|
if self.options.html:
|
|
|
|
self.html_log = os.path.splitext(self.logfile)[0] + '.html'
|
|
|
|
import commonmark
|
|
|
|
parser = commonmark.Parser()
|
|
|
|
with open(self.logfile) as f:
|
|
|
|
ast = parser.parse(f.read())
|
|
|
|
|
|
|
|
renderer = commonmark.HtmlRenderer()
|
|
|
|
html = renderer.render(ast)
|
|
|
|
with open(self.html_log, 'w') as f:
|
|
|
|
f.write(html)
|
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
self.out = None
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
def _get_file_content(self, file_name):
|
|
|
|
f = open(file_name, 'r+')
|
2014-04-23 09:47:10 +00:00
|
|
|
value = f.read()
|
|
|
|
f.close()
|
|
|
|
|
2014-04-26 07:16:26 +00:00
|
|
|
return value
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
def get_log_content(self):
|
|
|
|
return self._get_file_content(self.logfile)
|
|
|
|
|
|
|
|
def get_extra_log_content(self, extralog):
|
|
|
|
if extralog not in self.extra_logfiles:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
return self._get_file_content(extralog)
|
|
|
|
|
2014-01-10 15:46:00 +00:00
|
|
|
def get_classname(self):
|
|
|
|
name = self.classname.split('.')[-1]
|
|
|
|
classname = self.classname.replace('.%s' % name, '')
|
|
|
|
|
|
|
|
return classname
|
|
|
|
|
|
|
|
def get_name(self):
|
|
|
|
return self.classname.split('.')[-1]
|
|
|
|
|
2017-12-03 09:42:49 +00:00
|
|
|
def get_uuid(self):
|
|
|
|
if self._uuid is None:
|
|
|
|
self._uuid = self.classname + str(uuid.uuid4())
|
|
|
|
return self._uuid
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def add_arguments(self, *args):
|
2016-12-22 13:07:58 +00:00
|
|
|
self.command += args
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def build_arguments(self):
|
2015-02-27 23:20:43 +00:00
|
|
|
self.add_env_variable("LD_PRELOAD")
|
|
|
|
self.add_env_variable("DISPLAY")
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-11-07 20:20:09 +00:00
|
|
|
def add_stack_trace_to_logfile(self):
|
2018-12-07 12:03:24 +00:00
|
|
|
self.debug("Adding stack trace")
|
2020-03-10 14:52:35 +00:00
|
|
|
if self.options.rr:
|
|
|
|
return
|
|
|
|
|
2016-11-07 20:20:09 +00:00
|
|
|
trace_gatherer = BackTraceGenerator.get_default()
|
|
|
|
stack_trace = trace_gatherer.get_trace(self)
|
|
|
|
|
|
|
|
if not stack_trace:
|
|
|
|
return
|
|
|
|
|
2019-04-06 14:10:14 +00:00
|
|
|
info = "\n\n## Stack trace\n\n```\n%s\n```" % stack_trace
|
2016-11-07 20:20:09 +00:00
|
|
|
if self.options.redirect_logs:
|
|
|
|
print(info)
|
2018-12-07 12:03:24 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
if self.options.xunit_file:
|
2016-11-26 13:24:11 +00:00
|
|
|
self.stack_trace = stack_trace
|
2018-12-07 12:03:24 +00:00
|
|
|
|
2019-04-05 13:41:14 +00:00
|
|
|
self.out.write(info)
|
|
|
|
self.out.flush()
|
2016-11-07 20:20:09 +00:00
|
|
|
|
2019-03-19 13:12:42 +00:00
|
|
|
def add_known_issue_information(self):
|
2019-04-04 20:07:58 +00:00
|
|
|
if self.expected_issues:
|
2019-04-06 14:10:14 +00:00
|
|
|
info = "\n\n## Already known issues\n\n``` python\n%s\n```\n\n" % (
|
2019-04-12 14:12:45 +00:00
|
|
|
json.dumps(self.expected_issues, indent=4)
|
2019-04-04 20:07:58 +00:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
info = ""
|
2019-03-26 18:18:27 +00:00
|
|
|
|
|
|
|
info += "\n\n**You can mark the issues as 'known' by adding the " \
|
|
|
|
+ " following lines to the list of known issues**\n" \
|
|
|
|
+ "\n\n``` python\n%s\n```" % (self.generate_expected_issues())
|
2019-03-19 13:12:42 +00:00
|
|
|
|
|
|
|
if self.options.redirect_logs:
|
|
|
|
print(info)
|
|
|
|
return
|
|
|
|
|
2019-04-06 14:10:14 +00:00
|
|
|
self.out.write(info)
|
2019-03-19 13:12:42 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def set_result(self, result, message="", error=""):
|
2019-03-17 01:52:53 +00:00
|
|
|
|
|
|
|
if not self.options.redirect_logs:
|
|
|
|
self.out.write("\n```\n")
|
|
|
|
self.out.flush()
|
|
|
|
|
2014-12-08 14:27:54 +00:00
|
|
|
self.debug("Setting result: %s (message: %s, error: %s)" % (result,
|
2018-03-23 20:44:06 +00:00
|
|
|
message, error))
|
2015-11-10 16:43:54 +00:00
|
|
|
|
2016-11-07 20:20:09 +00:00
|
|
|
if result is Result.TIMEOUT:
|
|
|
|
if self.options.debug is True:
|
|
|
|
if self.options.gdb:
|
|
|
|
printc("Timeout, you should process <ctrl>c to get into gdb",
|
2018-03-23 20:44:06 +00:00
|
|
|
Colors.FAIL)
|
2016-11-07 20:20:09 +00:00
|
|
|
# and wait here until gdb exits
|
|
|
|
self.process.communicate()
|
|
|
|
else:
|
2017-02-20 15:52:06 +00:00
|
|
|
pname = self.command[0]
|
2019-03-26 13:49:26 +00:00
|
|
|
input("%sTimeout happened on %s you can attach gdb doing:\n $gdb %s %d%s\n"
|
|
|
|
"Press enter to continue" % (Colors.FAIL, self.classname,
|
|
|
|
pname, self.process.pid, Colors.ENDC))
|
2016-11-07 20:20:09 +00:00
|
|
|
else:
|
|
|
|
self.add_stack_trace_to_logfile()
|
2014-03-26 18:37:44 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
self.result = result
|
|
|
|
self.message = message
|
2014-01-22 23:15:54 +00:00
|
|
|
self.error_str = error
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2020-05-03 05:20:19 +00:00
|
|
|
if result not in [Result.PASSED, Result.NOT_RUN, Result.SKIPPED]:
|
2019-03-19 13:12:42 +00:00
|
|
|
self.add_known_issue_information()
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def check_results(self):
|
2015-03-29 02:13:01 +00:00
|
|
|
if self.result is Result.FAILED or self.result is Result.TIMEOUT:
|
2014-01-09 08:14:27 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
self.debug("%s returncode: %s", self, self.process.returncode)
|
2020-03-10 14:52:35 +00:00
|
|
|
if self.options.rr and self.process.returncode == -signal.SIGPIPE:
|
|
|
|
self.set_result(Result.SKIPPED, "SIGPIPE received under `rr`, known issue.")
|
|
|
|
elif self.process.returncode == 0:
|
2014-02-19 09:31:15 +00:00
|
|
|
self.set_result(Result.PASSED)
|
2019-01-15 19:52:24 +00:00
|
|
|
elif self.process.returncode in EXITING_SIGNALS:
|
2017-01-06 15:09:13 +00:00
|
|
|
self.add_stack_trace_to_logfile()
|
2016-11-26 13:25:43 +00:00
|
|
|
self.set_result(Result.FAILED,
|
2019-01-15 19:52:24 +00:00
|
|
|
"Application exited with signal %s" % (
|
|
|
|
EXITING_SIGNALS[self.process.returncode]))
|
2015-03-26 12:59:30 +00:00
|
|
|
elif self.process.returncode == VALGRIND_ERROR_CODE:
|
|
|
|
self.set_result(Result.FAILED, "Valgrind reported errors")
|
2013-12-31 10:45:07 +00:00
|
|
|
else:
|
2014-01-09 08:28:02 +00:00
|
|
|
self.set_result(Result.FAILED,
|
2015-03-09 17:41:54 +00:00
|
|
|
"Application returned %d" % (self.process.returncode))
|
2014-01-09 08:14:27 +00:00
|
|
|
|
|
|
|
def get_current_value(self):
|
|
|
|
"""
|
|
|
|
Lets subclasses implement a nicer timeout measurement method
|
|
|
|
They should return some value with which we will compare
|
|
|
|
the previous and timeout if they are egual during self.timeout
|
|
|
|
seconds
|
|
|
|
"""
|
|
|
|
return Result.NOT_RUN
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2015-01-16 17:57:06 +00:00
|
|
|
def process_update(self):
|
|
|
|
"""
|
|
|
|
Returns True when process has finished running or has timed out.
|
|
|
|
"""
|
2015-01-16 20:29:55 +00:00
|
|
|
|
|
|
|
if self.process is None:
|
|
|
|
# Process has not started running yet
|
|
|
|
return False
|
|
|
|
|
2015-01-16 17:57:06 +00:00
|
|
|
self.process.poll()
|
|
|
|
if self.process.returncode is not None:
|
|
|
|
return True
|
|
|
|
|
|
|
|
val = self.get_current_value()
|
|
|
|
|
|
|
|
self.debug("Got value: %s" % val)
|
|
|
|
if val is Result.NOT_RUN:
|
|
|
|
# The get_current_value logic is not implemented... dumb
|
|
|
|
# timeout
|
|
|
|
if time.time() - self.last_change_ts > self.timeout:
|
2015-03-29 02:13:01 +00:00
|
|
|
self.set_result(Result.TIMEOUT,
|
|
|
|
"Application timed out: %s secs" %
|
|
|
|
self.timeout,
|
|
|
|
"timeout")
|
2015-01-16 17:57:06 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
elif val is Result.FAILED:
|
|
|
|
return True
|
|
|
|
elif val is Result.KNOWN_ERROR:
|
|
|
|
return True
|
|
|
|
|
|
|
|
self.log("New val %s" % val)
|
|
|
|
|
|
|
|
if val == self.last_val:
|
|
|
|
delta = time.time() - self.last_change_ts
|
|
|
|
self.debug("%s: Same value for %d/%d seconds" %
|
|
|
|
(self, delta, self.timeout))
|
|
|
|
if delta > self.timeout:
|
2015-03-29 02:13:01 +00:00
|
|
|
self.set_result(Result.TIMEOUT,
|
|
|
|
"Application timed out: %s secs" %
|
|
|
|
self.timeout,
|
|
|
|
"timeout")
|
2015-01-16 17:57:06 +00:00
|
|
|
return True
|
|
|
|
elif self.hard_timeout and time.time() - self.start_ts > self.hard_timeout:
|
|
|
|
self.set_result(
|
|
|
|
Result.TIMEOUT, "Hard timeout reached: %d secs" % self.hard_timeout)
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
self.last_change_ts = time.time()
|
|
|
|
self.last_val = val
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2015-01-16 17:57:06 +00:00
|
|
|
return False
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
def get_subproc_env(self):
|
2017-02-21 16:39:37 +00:00
|
|
|
return os.environ.copy()
|
2014-03-26 19:09:12 +00:00
|
|
|
|
2015-01-16 18:08:19 +00:00
|
|
|
def kill_subprocess(self):
|
2020-03-10 14:52:35 +00:00
|
|
|
subprocs_id = None
|
|
|
|
if self.options.rr and self.process and self.process.returncode is None:
|
|
|
|
cmd = ["ps", "-o", "pid", "--ppid", str(self.process.pid), "--noheaders"]
|
|
|
|
try:
|
|
|
|
subprocs_id = [int(pid.strip('\n')) for
|
|
|
|
pid in subprocess.check_output(cmd).decode().split(' ') if pid]
|
|
|
|
except FileNotFoundError:
|
|
|
|
self.error("Ps not found, will probably not be able to get rr "
|
|
|
|
"working properly after we kill the process")
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
self.error("Couldn't get rr subprocess pid: %s" % (e))
|
|
|
|
|
|
|
|
utils.kill_subprocess(self, self.process, DEFAULT_TIMEOUT, subprocs_id)
|
2014-12-08 13:37:15 +00:00
|
|
|
|
2018-04-21 02:57:32 +00:00
|
|
|
def run_external_checks(self):
|
|
|
|
pass
|
|
|
|
|
2015-01-12 12:09:33 +00:00
|
|
|
def thread_wrapper(self):
|
2018-05-25 10:03:46 +00:00
|
|
|
def enable_sigint():
|
|
|
|
# Restore the SIGINT handler for the child process (gdb) to ensure
|
|
|
|
# it can handle it.
|
|
|
|
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
|
|
|
|
|
|
|
if self.options.gdb and os.name != "nt":
|
|
|
|
preexec_fn = enable_sigint
|
|
|
|
else:
|
|
|
|
preexec_fn = None
|
|
|
|
|
2016-12-22 13:07:58 +00:00
|
|
|
self.process = subprocess.Popen(self.command,
|
2015-01-16 17:50:38 +00:00
|
|
|
stderr=self.out,
|
|
|
|
stdout=self.out,
|
2018-04-15 23:47:36 +00:00
|
|
|
env=self.proc_env,
|
2018-05-25 10:03:46 +00:00
|
|
|
cwd=self.workdir,
|
|
|
|
preexec_fn=preexec_fn)
|
2015-01-12 12:09:33 +00:00
|
|
|
self.process.wait()
|
2015-01-16 18:03:07 +00:00
|
|
|
if self.result is not Result.TIMEOUT:
|
2018-04-21 02:57:32 +00:00
|
|
|
if self.process.returncode == 0:
|
|
|
|
self.run_external_checks()
|
2015-01-16 18:03:07 +00:00
|
|
|
self.queue.put(None)
|
2015-01-12 12:09:33 +00:00
|
|
|
|
2017-01-03 16:01:31 +00:00
|
|
|
def get_valgrind_suppression_file(self, subdir, name):
|
|
|
|
p = get_data_file(subdir, name)
|
|
|
|
if p:
|
|
|
|
return p
|
|
|
|
|
|
|
|
self.error("Could not find any %s file" % name)
|
|
|
|
|
2015-03-19 16:22:26 +00:00
|
|
|
def get_valgrind_suppressions(self):
|
2015-04-20 08:53:29 +00:00
|
|
|
return [self.get_valgrind_suppression_file('data', 'gstvalidate.supp')]
|
2015-03-19 16:22:26 +00:00
|
|
|
|
2014-12-05 11:16:36 +00:00
|
|
|
def use_gdb(self, command):
|
2015-11-10 16:43:54 +00:00
|
|
|
if self.hard_timeout is not None:
|
|
|
|
self.hard_timeout *= GDB_TIMEOUT_FACTOR
|
|
|
|
self.timeout *= GDB_TIMEOUT_FACTOR
|
2018-05-25 10:06:22 +00:00
|
|
|
|
|
|
|
if not self.options.gdb_non_stop:
|
|
|
|
self.timeout = sys.maxsize
|
|
|
|
self.hard_timeout = sys.maxsize
|
|
|
|
|
2018-05-24 16:25:59 +00:00
|
|
|
args = ["gdb"]
|
|
|
|
if self.options.gdb_non_stop:
|
|
|
|
args += ["-ex", "run", "-ex", "backtrace", "-ex", "quit"]
|
|
|
|
args += ["--args"] + command
|
|
|
|
return args
|
2015-11-10 16:43:54 +00:00
|
|
|
|
2020-03-10 14:52:35 +00:00
|
|
|
def use_rr(self, command, subenv):
|
|
|
|
command = ["rr", 'record', '-h'] + command
|
|
|
|
|
|
|
|
self.timeout *= RR_TIMEOUT_FACTOR
|
|
|
|
self.rr_logdir = os.path.join(self.options.logsdir, self.classname.replace(".", os.sep), 'rr-logs')
|
|
|
|
subenv['_RR_TRACE_DIR'] = self.rr_logdir
|
|
|
|
try:
|
|
|
|
shutil.rmtree(self.rr_logdir, ignore_errors=False, onerror=None)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
self.add_env_variable('_RR_TRACE_DIR', self.rr_logdir)
|
|
|
|
|
|
|
|
return command
|
|
|
|
|
2014-12-05 11:16:36 +00:00
|
|
|
def use_valgrind(self, command, subenv):
|
2019-04-05 13:40:45 +00:00
|
|
|
vglogsfile = os.path.splitext(self.logfile)[0] + '.valgrind'
|
|
|
|
self.extra_logfiles.add(vglogsfile)
|
2015-03-23 08:39:30 +00:00
|
|
|
|
2016-12-22 13:07:58 +00:00
|
|
|
vg_args = []
|
|
|
|
|
|
|
|
for o, v in [('trace-children', 'yes'),
|
2018-03-23 21:02:43 +00:00
|
|
|
('tool', 'memcheck'),
|
|
|
|
('leak-check', 'full'),
|
|
|
|
('leak-resolution', 'high'),
|
|
|
|
# TODO: errors-for-leak-kinds should be set to all instead of definite
|
2019-01-26 13:27:47 +00:00
|
|
|
# and all false positives should be added to suppression
|
|
|
|
# files.
|
2019-04-17 00:29:36 +00:00
|
|
|
('errors-for-leak-kinds', 'definite,indirect'),
|
|
|
|
('show-leak-kinds', 'definite,indirect'),
|
2019-04-17 12:56:46 +00:00
|
|
|
('show-possibly-lost', 'no'),
|
2018-03-23 21:02:43 +00:00
|
|
|
('num-callers', '20'),
|
|
|
|
('error-exitcode', str(VALGRIND_ERROR_CODE)),
|
|
|
|
('gen-suppressions', 'all')]:
|
2016-12-22 13:07:58 +00:00
|
|
|
vg_args.append("--%s=%s" % (o, v))
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2017-05-30 20:15:19 +00:00
|
|
|
if not self.options.redirect_logs:
|
2019-04-05 13:40:45 +00:00
|
|
|
vglogsfile = os.path.splitext(self.logfile)[0] + '.valgrind'
|
|
|
|
self.extra_logfiles.add(vglogsfile)
|
2017-05-30 20:15:19 +00:00
|
|
|
vg_args.append("--%s=%s" % ('log-file', vglogsfile))
|
|
|
|
|
2015-04-20 08:53:29 +00:00
|
|
|
for supp in self.get_valgrind_suppressions():
|
2016-12-22 13:07:58 +00:00
|
|
|
vg_args.append("--suppressions=%s" % supp)
|
2015-03-19 16:22:26 +00:00
|
|
|
|
2014-12-05 11:16:36 +00:00
|
|
|
command = ["valgrind"] + vg_args + command
|
2015-03-19 15:06:54 +00:00
|
|
|
|
|
|
|
# Tune GLib's memory allocator to be more valgrind friendly
|
2014-12-05 11:16:36 +00:00
|
|
|
subenv['G_DEBUG'] = 'gc-friendly'
|
|
|
|
subenv['G_SLICE'] = 'always-malloc'
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2015-03-26 12:57:34 +00:00
|
|
|
if self.hard_timeout is not None:
|
|
|
|
self.hard_timeout *= VALGRIND_TIMEOUT_FACTOR
|
2015-03-26 10:29:06 +00:00
|
|
|
self.timeout *= VALGRIND_TIMEOUT_FACTOR
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2015-05-08 14:33:50 +00:00
|
|
|
# Enable 'valgrind.config'
|
2018-04-21 02:57:32 +00:00
|
|
|
self.add_validate_config(get_data_file(
|
|
|
|
'data', 'valgrind.config'), subenv)
|
2014-12-05 11:16:36 +00:00
|
|
|
if subenv == self.proc_env:
|
|
|
|
self.add_env_variable('G_DEBUG', 'gc-friendly')
|
|
|
|
self.add_env_variable('G_SLICE', 'always-malloc')
|
2018-03-23 21:02:43 +00:00
|
|
|
self.add_env_variable('GST_VALIDATE_CONFIG',
|
|
|
|
self.proc_env['GST_VALIDATE_CONFIG'])
|
2014-12-05 11:16:36 +00:00
|
|
|
|
|
|
|
return command
|
|
|
|
|
2018-04-13 02:05:01 +00:00
|
|
|
def add_validate_config(self, config, subenv=None):
|
|
|
|
if not subenv:
|
|
|
|
subenv = self.extra_env_variables
|
|
|
|
|
2020-05-05 22:09:08 +00:00
|
|
|
cconf = subenv.get('GST_VALIDATE_CONFIG', "")
|
|
|
|
paths = [c for c in cconf.split(os.pathsep) if c] + [config]
|
|
|
|
subenv['GST_VALIDATE_CONFIG'] = os.pathsep.join(paths)
|
2018-04-13 02:05:01 +00:00
|
|
|
|
2014-12-05 11:16:36 +00:00
|
|
|
def launch_server(self):
|
|
|
|
return None
|
2015-05-08 14:33:50 +00:00
|
|
|
|
2017-06-06 16:45:31 +00:00
|
|
|
def get_logfile_repr(self):
|
2017-06-20 14:43:54 +00:00
|
|
|
if not self.options.redirect_logs:
|
2019-04-15 15:50:16 +00:00
|
|
|
if self.html_log:
|
|
|
|
log = self.html_log
|
|
|
|
else:
|
|
|
|
log = self.logfile
|
|
|
|
|
|
|
|
if CI_ARTIFACTS_URL:
|
|
|
|
log = CI_ARTIFACTS_URL + os.path.relpath(log, self.options.logsdir)
|
|
|
|
|
|
|
|
return "\n Log: %s" % (log)
|
2017-06-20 14:43:54 +00:00
|
|
|
|
2019-04-05 13:41:14 +00:00
|
|
|
return ""
|
2017-06-06 16:45:31 +00:00
|
|
|
|
2017-06-20 14:43:54 +00:00
|
|
|
def get_command_repr(self):
|
2018-07-30 19:36:48 +00:00
|
|
|
message = "%s %s" % (self._env_variable, ' '.join(
|
|
|
|
shlex.quote(arg) for arg in self.command))
|
2017-06-20 14:43:54 +00:00
|
|
|
if self.server_command:
|
|
|
|
message = "%s & %s" % (self.server_command, message)
|
|
|
|
|
2018-07-30 19:36:48 +00:00
|
|
|
return message
|
2017-06-20 14:43:54 +00:00
|
|
|
|
2015-01-16 18:08:19 +00:00
|
|
|
def test_start(self, queue):
|
2015-01-16 17:50:38 +00:00
|
|
|
self.open_logfile()
|
|
|
|
|
2017-06-06 16:45:31 +00:00
|
|
|
self.server_command = self.launch_server()
|
2015-01-16 18:08:19 +00:00
|
|
|
self.queue = queue
|
2016-12-22 13:07:58 +00:00
|
|
|
self.command = [self.application]
|
2013-12-31 10:45:07 +00:00
|
|
|
self._starting_time = time.time()
|
|
|
|
self.build_arguments()
|
2015-01-12 12:09:33 +00:00
|
|
|
self.proc_env = self.get_subproc_env()
|
2014-03-26 19:09:12 +00:00
|
|
|
|
2016-11-04 21:04:37 +00:00
|
|
|
for var, value in list(self.extra_env_variables.items()):
|
2016-03-01 13:59:29 +00:00
|
|
|
value = self.proc_env.get(var, '') + os.pathsep + value
|
|
|
|
self.proc_env[var] = value.strip(os.pathsep)
|
2015-05-13 13:29:43 +00:00
|
|
|
self.add_env_variable(var, self.proc_env[var])
|
|
|
|
|
2015-11-10 16:43:54 +00:00
|
|
|
if self.options.gdb:
|
2014-12-05 11:16:36 +00:00
|
|
|
self.command = self.use_gdb(self.command)
|
2018-05-25 10:03:46 +00:00
|
|
|
|
|
|
|
self.previous_sigint_handler = signal.getsignal(signal.SIGINT)
|
2019-01-26 13:27:47 +00:00
|
|
|
# Make the gst-validate executable ignore SIGINT while gdb is
|
|
|
|
# running.
|
2018-05-25 10:03:46 +00:00
|
|
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
|
|
|
2015-03-19 15:06:54 +00:00
|
|
|
if self.options.valgrind:
|
2014-12-05 11:16:36 +00:00
|
|
|
self.command = self.use_valgrind(self.command, self.proc_env)
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2020-03-10 14:52:35 +00:00
|
|
|
if self.options.rr:
|
|
|
|
self.command = self.use_rr(self.command, self.proc_env)
|
|
|
|
|
2015-01-15 14:26:14 +00:00
|
|
|
if not self.options.redirect_logs:
|
2019-04-06 14:10:14 +00:00
|
|
|
self.out.write("# `%s`\n\n"
|
|
|
|
"## Command\n\n``` bash\n%s\n```\n\n" % (
|
2019-03-17 01:52:53 +00:00
|
|
|
self.classname, self.get_command_repr()))
|
2019-04-06 14:10:14 +00:00
|
|
|
self.out.write("## %s output\n\n``` \n\n" % os.path.basename(self.application))
|
2015-01-16 17:50:38 +00:00
|
|
|
self.out.flush()
|
2018-03-23 20:44:06 +00:00
|
|
|
else:
|
|
|
|
message = "Launching: %s%s\n" \
|
|
|
|
" Command: %s\n" % (Colors.ENDC, self.classname,
|
|
|
|
self.get_command_repr())
|
|
|
|
printc(message, Colors.OKBLUE)
|
2014-04-02 17:14:30 +00:00
|
|
|
|
2015-01-12 12:09:33 +00:00
|
|
|
self.thread = threading.Thread(target=self.thread_wrapper)
|
|
|
|
self.thread.start()
|
|
|
|
|
2015-01-16 18:00:25 +00:00
|
|
|
self.last_val = 0
|
|
|
|
self.last_change_ts = time.time()
|
|
|
|
self.start_ts = time.time()
|
|
|
|
|
2016-09-01 01:39:18 +00:00
|
|
|
def _dump_log_file(self, logfile):
|
2019-08-12 20:56:41 +00:00
|
|
|
if which('bat'):
|
|
|
|
try:
|
|
|
|
subprocess.check_call(['bat', '-H', '1', '--paging=never', logfile])
|
|
|
|
return
|
|
|
|
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
|
|
pass
|
|
|
|
|
2016-09-01 01:39:18 +00:00
|
|
|
with open(logfile, 'r') as fin:
|
2019-08-12 20:56:41 +00:00
|
|
|
for line in fin.readlines():
|
|
|
|
print('> ' + line, end='')
|
2016-09-01 01:39:18 +00:00
|
|
|
|
|
|
|
def _dump_log_files(self):
|
|
|
|
self._dump_log_file(self.logfile)
|
|
|
|
|
2019-03-27 15:36:16 +00:00
|
|
|
def copy_logfiles(self, extra_folder="flaky_tests"):
|
|
|
|
path = os.path.dirname(os.path.join(self.options.logsdir, extra_folder,
|
|
|
|
self.classname.replace(".", os.sep)))
|
|
|
|
mkdir(path)
|
|
|
|
self.logfile = shutil.copy(self.logfile, path)
|
|
|
|
extra_logs = []
|
|
|
|
for logfile in self.extra_logfiles:
|
|
|
|
extra_logs.append(shutil.copy(logfile, path))
|
|
|
|
self.extra_logfiles = extra_logs
|
|
|
|
|
2021-10-08 15:09:47 +00:00
|
|
|
def test_end(self, retry_on_failures=False):
|
2015-01-16 18:08:19 +00:00
|
|
|
self.kill_subprocess()
|
2015-01-12 12:09:33 +00:00
|
|
|
self.thread.join()
|
2013-12-31 10:45:07 +00:00
|
|
|
self.time_taken = time.time() - self._starting_time
|
|
|
|
|
2018-05-25 10:03:46 +00:00
|
|
|
if self.options.gdb:
|
|
|
|
signal.signal(signal.SIGINT, self.previous_sigint_handler)
|
|
|
|
|
2019-04-06 02:05:20 +00:00
|
|
|
self.finalize_logfiles()
|
2021-10-08 15:09:47 +00:00
|
|
|
if self.options.dump_on_failure and not retry_on_failures and not self.max_retries:
|
2020-02-26 15:05:39 +00:00
|
|
|
if self.result not in [Result.PASSED, Result.KNOWN_ERROR, Result.NOT_RUN]:
|
2016-09-01 01:39:18 +00:00
|
|
|
self._dump_log_files()
|
|
|
|
|
2017-12-18 09:51:05 +00:00
|
|
|
# Only keep around env variables we need later
|
|
|
|
clean_env = {}
|
|
|
|
for n in self.__env_variable:
|
|
|
|
clean_env[n] = self.proc_env.get(n, None)
|
|
|
|
self.proc_env = clean_env
|
|
|
|
|
|
|
|
# Don't keep around JSON report objects, they were processed
|
|
|
|
# in check_results already
|
|
|
|
self.reports = []
|
|
|
|
|
2014-01-13 16:31:57 +00:00
|
|
|
return self.result
|
|
|
|
|
2018-03-23 21:02:43 +00:00
|
|
|
|
2017-12-03 09:42:49 +00:00
|
|
|
class GstValidateTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
|
|
|
|
pass
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2018-03-23 21:02:43 +00:00
|
|
|
|
2019-03-20 21:36:17 +00:00
|
|
|
class GstValidateListener(socketserver.BaseRequestHandler, Loggable):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
Loggable.__init__(self, "GstValidateListener")
|
2019-01-26 01:27:07 +00:00
|
|
|
|
2016-09-01 20:39:38 +00:00
|
|
|
def handle(self):
|
|
|
|
"""Implements BaseRequestHandler handle method"""
|
2017-12-03 09:42:49 +00:00
|
|
|
test = None
|
2019-03-20 21:36:17 +00:00
|
|
|
self.logCategory = "GstValidateListener"
|
2016-09-01 20:39:38 +00:00
|
|
|
while True:
|
|
|
|
raw_len = self.request.recv(4)
|
2016-11-04 21:04:37 +00:00
|
|
|
if raw_len == b'':
|
2016-09-01 20:39:38 +00:00
|
|
|
return
|
|
|
|
msglen = struct.unpack('>I', raw_len)[0]
|
2020-02-11 12:18:23 +00:00
|
|
|
e = None
|
|
|
|
raw_msg = bytes()
|
|
|
|
while msglen != len(raw_msg):
|
|
|
|
raw_msg += self.request.recv(msglen - len(raw_msg))
|
|
|
|
if e is not None:
|
|
|
|
continue
|
2019-03-20 21:36:17 +00:00
|
|
|
try:
|
2020-02-11 12:18:23 +00:00
|
|
|
msg = raw_msg.decode('utf-8', 'ignore')
|
2019-03-20 21:36:17 +00:00
|
|
|
except UnicodeDecodeError as e:
|
2020-02-11 12:18:23 +00:00
|
|
|
self.error("%s Could not decode message: %s - %s" % (test.classname if test else "unknown", msg, e))
|
2019-03-20 21:36:17 +00:00
|
|
|
continue
|
2020-02-11 12:18:23 +00:00
|
|
|
|
2016-09-01 20:39:38 +00:00
|
|
|
if msg == '':
|
|
|
|
return
|
|
|
|
|
2019-03-20 21:36:17 +00:00
|
|
|
try:
|
|
|
|
obj = json.loads(msg)
|
2020-02-06 13:34:40 +00:00
|
|
|
except json.decoder.JSONDecodeError as e:
|
2020-02-11 12:18:23 +00:00
|
|
|
self.error("%s Could not decode message: %s - %s" % (test.classname if test else "unknown", msg, e))
|
2019-03-20 21:36:17 +00:00
|
|
|
continue
|
2017-12-03 09:42:49 +00:00
|
|
|
|
|
|
|
if test is None:
|
|
|
|
# First message must contain the uuid
|
|
|
|
uuid = obj.get("uuid", None)
|
|
|
|
if uuid is None:
|
|
|
|
return
|
|
|
|
# Find test from launcher
|
|
|
|
for t in self.server.launcher.tests:
|
|
|
|
if uuid == t.get_uuid():
|
|
|
|
test = t
|
|
|
|
break
|
|
|
|
if test is None:
|
2018-03-23 21:02:43 +00:00
|
|
|
self.server.launcher.error(
|
|
|
|
"Could not find test for UUID %s" % uuid)
|
2017-12-03 09:42:49 +00:00
|
|
|
return
|
2016-09-01 20:39:38 +00:00
|
|
|
|
|
|
|
obj_type = obj.get("type", '')
|
|
|
|
if obj_type == 'position':
|
|
|
|
test.set_position(obj['position'], obj['duration'],
|
2018-03-23 21:02:43 +00:00
|
|
|
obj['speed'])
|
2016-09-01 20:39:38 +00:00
|
|
|
elif obj_type == 'buffering':
|
|
|
|
test.set_position(obj['position'], 100)
|
|
|
|
elif obj_type == 'action':
|
|
|
|
test.add_action_execution(obj)
|
2017-02-16 18:12:44 +00:00
|
|
|
# Make sure that action is taken into account when checking if process
|
|
|
|
# is updating
|
|
|
|
test.position += 1
|
2017-02-16 17:52:15 +00:00
|
|
|
elif obj_type == 'action-done':
|
2017-02-16 18:12:44 +00:00
|
|
|
# Make sure that action end is taken into account when checking if process
|
|
|
|
# is updating
|
|
|
|
test.position += 1
|
2021-10-07 22:41:07 +00:00
|
|
|
if test.actions_infos:
|
|
|
|
test.actions_infos[-1]['execution-duration'] = obj['execution-duration']
|
2016-09-01 20:39:38 +00:00
|
|
|
elif obj_type == 'report':
|
|
|
|
test.add_report(obj)
|
2020-05-03 05:20:19 +00:00
|
|
|
elif obj_type == 'skip-test':
|
|
|
|
test.set_result(Result.SKIPPED)
|
2016-09-01 20:39:38 +00:00
|
|
|
|
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
class GstValidateTest(Test):
|
|
|
|
|
|
|
|
""" A class representing a particular test. """
|
2014-12-08 14:27:54 +00:00
|
|
|
HARD_TIMEOUT_FACTOR = 5
|
2018-12-07 12:03:24 +00:00
|
|
|
fault_sig_regex = re.compile("<Caught SIGNAL: .*>")
|
2020-03-10 14:52:35 +00:00
|
|
|
needs_gst_inspect = set()
|
2014-12-08 14:27:54 +00:00
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def __init__(self, application_name, classname,
|
2014-05-07 09:30:09 +00:00
|
|
|
options, reporter, duration=0,
|
2015-05-13 13:29:43 +00:00
|
|
|
timeout=DEFAULT_TIMEOUT, scenario=None, hard_timeout=None,
|
2016-09-02 20:37:24 +00:00
|
|
|
media_descriptor=None, extra_env_variables=None,
|
2021-10-13 02:05:20 +00:00
|
|
|
expected_issues=None, workdir=None, **kwargs):
|
2015-08-15 14:40:11 +00:00
|
|
|
|
|
|
|
extra_env_variables = extra_env_variables or {}
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-12-08 14:27:54 +00:00
|
|
|
if not hard_timeout and self.HARD_TIMEOUT_FACTOR:
|
2014-12-08 17:23:10 +00:00
|
|
|
if timeout:
|
|
|
|
hard_timeout = timeout * self.HARD_TIMEOUT_FACTOR
|
|
|
|
elif duration:
|
2014-12-08 14:27:54 +00:00
|
|
|
hard_timeout = duration * self.HARD_TIMEOUT_FACTOR
|
|
|
|
else:
|
|
|
|
hard_timeout = None
|
|
|
|
|
2015-04-21 09:00:58 +00:00
|
|
|
# If we are running from source, use the -debug version of the
|
|
|
|
# application which is using rpath instead of libtool's wrappers. It's
|
|
|
|
# slightly faster to start and will not confuse valgrind.
|
|
|
|
debug = '%s-debug' % application_name
|
2015-04-27 11:25:44 +00:00
|
|
|
p = look_for_file_in_source_dir('tools', debug)
|
2015-04-21 09:00:58 +00:00
|
|
|
if p:
|
|
|
|
application_name = p
|
|
|
|
|
2016-09-01 20:39:38 +00:00
|
|
|
self.reports = []
|
|
|
|
self.position = -1
|
2016-09-02 20:37:24 +00:00
|
|
|
self.media_duration = -1
|
2016-09-01 20:39:38 +00:00
|
|
|
self.speed = 1.0
|
|
|
|
self.actions_infos = []
|
2015-08-15 14:23:02 +00:00
|
|
|
self.media_descriptor = media_descriptor
|
2016-09-02 20:39:50 +00:00
|
|
|
self.server = None
|
2019-03-16 02:46:00 +00:00
|
|
|
self.criticals = []
|
2015-08-15 14:23:02 +00:00
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
override_path = self.get_override_file(media_descriptor)
|
|
|
|
if override_path:
|
|
|
|
if extra_env_variables:
|
|
|
|
if extra_env_variables.get("GST_VALIDATE_OVERRIDE", ""):
|
2019-01-26 13:27:47 +00:00
|
|
|
extra_env_variables[
|
|
|
|
"GST_VALIDATE_OVERRIDE"] += os.path.pathsep
|
2015-08-15 14:40:11 +00:00
|
|
|
|
|
|
|
extra_env_variables["GST_VALIDATE_OVERRIDE"] = override_path
|
|
|
|
|
2021-10-13 02:05:20 +00:00
|
|
|
super().__init__(application_name,
|
|
|
|
classname,
|
|
|
|
options, reporter,
|
|
|
|
duration=duration,
|
|
|
|
timeout=timeout,
|
|
|
|
hard_timeout=hard_timeout,
|
|
|
|
extra_env_variables=extra_env_variables,
|
|
|
|
expected_issues=expected_issues,
|
|
|
|
workdir=workdir,
|
|
|
|
**kwargs)
|
2021-04-29 08:09:05 +00:00
|
|
|
if media_descriptor and media_descriptor.get_media_filepath():
|
|
|
|
config_file = os.path.join(media_descriptor.get_media_filepath() + '.config')
|
|
|
|
if os.path.isfile(config_file):
|
|
|
|
self.add_validate_config(config_file, extra_env_variables)
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
if scenario is None or scenario.name.lower() == "none":
|
2014-01-09 08:14:27 +00:00
|
|
|
self.scenario = None
|
|
|
|
else:
|
|
|
|
self.scenario = scenario
|
2016-09-01 20:39:38 +00:00
|
|
|
|
|
|
|
def kill_subprocess(self):
|
|
|
|
Test.kill_subprocess(self)
|
|
|
|
|
|
|
|
def add_report(self, report):
|
|
|
|
self.reports.append(report)
|
|
|
|
|
|
|
|
def set_position(self, position, duration, speed=None):
|
|
|
|
self.position = position
|
2016-09-02 20:37:24 +00:00
|
|
|
self.media_duration = duration
|
2016-09-01 20:39:38 +00:00
|
|
|
if speed:
|
|
|
|
self.speed = speed
|
|
|
|
|
|
|
|
def add_action_execution(self, action_infos):
|
|
|
|
self.actions_infos.append(action_infos)
|
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
def get_override_file(self, media_descriptor):
|
|
|
|
if media_descriptor:
|
|
|
|
if media_descriptor.get_path():
|
2018-03-23 21:02:43 +00:00
|
|
|
override_path = os.path.splitext(media_descriptor.get_path())[
|
|
|
|
0] + VALIDATE_OVERRIDE_EXTENSION
|
2015-08-15 14:40:11 +00:00
|
|
|
if os.path.exists(override_path):
|
|
|
|
return override_path
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2016-09-01 20:39:38 +00:00
|
|
|
def get_current_position(self):
|
|
|
|
return self.position
|
|
|
|
|
2015-02-27 13:16:01 +00:00
|
|
|
def get_current_value(self):
|
2016-09-01 20:39:38 +00:00
|
|
|
return self.position
|
2015-02-27 13:16:01 +00:00
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
def get_subproc_env(self):
|
|
|
|
subproc_env = os.environ.copy()
|
|
|
|
|
2020-05-05 22:09:08 +00:00
|
|
|
if self.options.validate_default_config:
|
|
|
|
self.add_validate_config(self.options.validate_default_config,
|
|
|
|
subproc_env, )
|
|
|
|
|
2017-12-03 09:42:49 +00:00
|
|
|
subproc_env["GST_VALIDATE_UUID"] = self.get_uuid()
|
2019-06-23 07:09:58 +00:00
|
|
|
subproc_env["GST_VALIDATE_LOGSDIR"] = self.options.logsdir
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2015-03-02 16:32:56 +00:00
|
|
|
if 'GST_DEBUG' in os.environ and not self.options.redirect_logs:
|
2019-04-05 13:40:45 +00:00
|
|
|
gstlogsfile = os.path.splitext(self.logfile)[0] + '.gstdebug'
|
|
|
|
self.extra_logfiles.add(gstlogsfile)
|
2014-03-26 19:09:12 +00:00
|
|
|
subproc_env["GST_DEBUG_FILE"] = gstlogsfile
|
2015-03-02 16:32:56 +00:00
|
|
|
|
|
|
|
if self.options.no_color:
|
2014-11-27 12:48:17 +00:00
|
|
|
subproc_env["GST_DEBUG_NO_COLOR"] = '1'
|
2014-03-26 19:09:12 +00:00
|
|
|
|
2015-04-14 10:31:32 +00:00
|
|
|
# Ensure XInitThreads is called, see bgo#731525
|
|
|
|
subproc_env['GST_GL_XINITTHREADS'] = '1'
|
|
|
|
self.add_env_variable('GST_GL_XINITTHREADS', '1')
|
2022-04-01 13:47:59 +00:00
|
|
|
subproc_env['GST_XINITTHREADS'] = '1'
|
|
|
|
self.add_env_variable('GST_XINITTHREADS', '1')
|
2015-04-14 10:31:32 +00:00
|
|
|
|
2015-04-16 10:02:11 +00:00
|
|
|
if self.scenario is not None:
|
2015-04-30 15:39:55 +00:00
|
|
|
scenario = self.scenario.get_execution_name()
|
|
|
|
subproc_env["GST_VALIDATE_SCENARIO"] = scenario
|
2015-04-16 10:02:11 +00:00
|
|
|
self.add_env_variable("GST_VALIDATE_SCENARIO",
|
|
|
|
subproc_env["GST_VALIDATE_SCENARIO"])
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
del subproc_env["GST_VALIDATE_SCENARIO"]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2019-03-25 19:46:46 +00:00
|
|
|
if not subproc_env.get('GST_DEBUG_DUMP_DOT_DIR'):
|
|
|
|
dotfilesdir = os.path.join(self.options.logsdir,
|
|
|
|
self.classname.replace(".", os.sep) + '.pipelines_dot_files')
|
|
|
|
mkdir(dotfilesdir)
|
|
|
|
subproc_env['GST_DEBUG_DUMP_DOT_DIR'] = dotfilesdir
|
|
|
|
if CI_ARTIFACTS_URL:
|
|
|
|
dotfilesurl = CI_ARTIFACTS_URL + os.path.relpath(dotfilesdir,
|
|
|
|
self.options.logsdir)
|
|
|
|
subproc_env['GST_VALIDATE_DEBUG_DUMP_DOT_URL'] = dotfilesurl
|
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
return subproc_env
|
|
|
|
|
2015-01-15 14:32:12 +00:00
|
|
|
def clean(self):
|
|
|
|
Test.clean(self)
|
2016-09-02 20:37:24 +00:00
|
|
|
self.reports = []
|
|
|
|
self.position = -1
|
|
|
|
self.media_duration = -1
|
|
|
|
self.speed = 1.0
|
|
|
|
self.actions_infos = []
|
2014-02-19 12:07:03 +00:00
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def build_arguments(self):
|
2015-02-27 23:20:43 +00:00
|
|
|
super(GstValidateTest, self).build_arguments()
|
2014-08-11 11:19:22 +00:00
|
|
|
if "GST_VALIDATE" in os.environ:
|
|
|
|
self.add_env_variable("GST_VALIDATE", os.environ["GST_VALIDATE"])
|
|
|
|
|
|
|
|
if "GST_VALIDATE_SCENARIOS_PATH" in os.environ:
|
|
|
|
self.add_env_variable("GST_VALIDATE_SCENARIOS_PATH",
|
|
|
|
os.environ["GST_VALIDATE_SCENARIOS_PATH"])
|
2015-02-27 13:18:04 +00:00
|
|
|
|
2015-02-27 23:20:43 +00:00
|
|
|
self.add_env_variable("GST_VALIDATE_CONFIG")
|
|
|
|
self.add_env_variable("GST_VALIDATE_OVERRIDE")
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-04-23 09:47:10 +00:00
|
|
|
def get_extra_log_content(self, extralog):
|
2014-04-26 07:16:26 +00:00
|
|
|
value = Test.get_extra_log_content(self, extralog)
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2014-04-26 07:16:26 +00:00
|
|
|
return value
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2019-07-15 21:58:05 +00:00
|
|
|
def report_matches_expected_issues(self, report, expected_issue):
|
2016-11-24 13:29:53 +00:00
|
|
|
for key in ['bug', 'bugs', 'sometimes']:
|
2019-07-15 21:58:05 +00:00
|
|
|
if key in expected_issue:
|
|
|
|
del expected_issue[key]
|
2016-11-04 21:04:37 +00:00
|
|
|
for key, value in list(report.items()):
|
2019-07-15 21:58:05 +00:00
|
|
|
if key in expected_issue:
|
|
|
|
if not re.findall(expected_issue[key], str(value)):
|
2016-09-02 20:37:24 +00:00
|
|
|
return False
|
2019-07-15 21:58:05 +00:00
|
|
|
expected_issue.pop(key)
|
2016-09-02 20:37:24 +00:00
|
|
|
|
2019-07-15 21:58:05 +00:00
|
|
|
if "can-happen-several-times" in expected_issue:
|
|
|
|
expected_issue.pop("can-happen-several-times")
|
|
|
|
return not bool(expected_issue)
|
2016-09-02 20:37:24 +00:00
|
|
|
|
2019-03-20 00:21:09 +00:00
|
|
|
def check_reported_issues(self, expected_issues):
|
2016-09-01 20:39:38 +00:00
|
|
|
ret = []
|
2016-09-02 20:37:24 +00:00
|
|
|
expected_retcode = [0]
|
2016-09-01 20:39:38 +00:00
|
|
|
for report in self.reports:
|
2016-09-02 20:37:24 +00:00
|
|
|
found = None
|
2019-03-18 19:52:11 +00:00
|
|
|
for expected_issue in expected_issues:
|
|
|
|
if self.report_matches_expected_issues(report,
|
|
|
|
expected_issue.copy()):
|
|
|
|
found = expected_issue
|
2016-09-02 20:37:24 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
if found is not None:
|
2019-03-21 13:01:14 +00:00
|
|
|
if not found.get('can-happen-several-times', False):
|
|
|
|
expected_issues.remove(found)
|
2016-09-02 20:37:24 +00:00
|
|
|
if report['level'] == 'critical':
|
2019-03-16 02:46:00 +00:00
|
|
|
if found.get('sometimes', True) and isinstance(expected_retcode, list):
|
2016-09-02 20:37:24 +00:00
|
|
|
expected_retcode.append(18)
|
|
|
|
else:
|
|
|
|
expected_retcode = [18]
|
|
|
|
elif report['level'] == 'critical':
|
2019-03-16 02:46:00 +00:00
|
|
|
ret.append(report)
|
2016-09-01 20:39:38 +00:00
|
|
|
|
|
|
|
if not ret:
|
2019-03-18 19:52:11 +00:00
|
|
|
return None, expected_issues, expected_retcode
|
2016-09-01 20:39:38 +00:00
|
|
|
|
2019-03-18 19:52:11 +00:00
|
|
|
return ret, expected_issues, expected_retcode
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2019-03-20 00:21:09 +00:00
|
|
|
def check_expected_issue(self, expected_issue):
|
|
|
|
res = True
|
|
|
|
msg = ''
|
|
|
|
expected_symbols = expected_issue.get('stacktrace_symbols')
|
2016-11-26 13:25:43 +00:00
|
|
|
if expected_symbols:
|
|
|
|
trace_gatherer = BackTraceGenerator.get_default()
|
|
|
|
stack_trace = trace_gatherer.get_trace(self)
|
|
|
|
|
|
|
|
if stack_trace:
|
|
|
|
if not isinstance(expected_symbols, list):
|
|
|
|
expected_symbols = [expected_symbols]
|
|
|
|
|
|
|
|
not_found_symbols = [s for s in expected_symbols
|
|
|
|
if s not in stack_trace]
|
|
|
|
if not_found_symbols:
|
2019-03-17 17:39:38 +00:00
|
|
|
msg = " Expected symbols '%s' not found in stack trace " % (
|
2016-11-26 13:25:43 +00:00
|
|
|
not_found_symbols)
|
2019-03-20 00:21:09 +00:00
|
|
|
res = False
|
2016-11-26 13:25:43 +00:00
|
|
|
else:
|
2019-03-17 17:39:38 +00:00
|
|
|
msg += " No stack trace available, could not verify symbols "
|
|
|
|
|
2019-03-20 00:21:09 +00:00
|
|
|
_, not_found_expected_issues, _ = self.check_reported_issues(expected_issue.get('issues', []))
|
|
|
|
if not_found_expected_issues:
|
|
|
|
mandatory_failures = [f for f in not_found_expected_issues
|
|
|
|
if not f.get('sometimes', True)]
|
|
|
|
if mandatory_failures:
|
|
|
|
msg = " (Expected issues not found: %s) " % mandatory_failures
|
|
|
|
res = False
|
|
|
|
|
|
|
|
return msg, res
|
2019-03-17 17:39:38 +00:00
|
|
|
|
|
|
|
def check_expected_timeout(self, expected_timeout):
|
|
|
|
msg = "Expected timeout happened. "
|
|
|
|
result = Result.PASSED
|
|
|
|
message = expected_timeout.get('message')
|
|
|
|
if message:
|
|
|
|
if not re.findall(message, self.message):
|
|
|
|
result = Result.FAILED
|
|
|
|
msg = "Expected timeout message: %s got %s " % (
|
|
|
|
message, self.message)
|
|
|
|
|
2019-03-20 00:21:09 +00:00
|
|
|
stack_msg, stack_res = self.check_expected_issue(expected_timeout)
|
2019-03-17 17:39:38 +00:00
|
|
|
if not stack_res:
|
|
|
|
result = Result.TIMEOUT
|
|
|
|
msg += stack_msg
|
2016-11-26 13:25:43 +00:00
|
|
|
|
|
|
|
return result, msg
|
|
|
|
|
2014-01-09 08:28:02 +00:00
|
|
|
def check_results(self):
|
2020-05-03 05:20:19 +00:00
|
|
|
if self.result in [Result.FAILED, Result.PASSED, Result.SKIPPED]:
|
2014-01-09 08:28:02 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
self.debug("%s returncode: %s", self, self.process.returncode)
|
2019-03-20 00:21:09 +00:00
|
|
|
expected_issues = copy.deepcopy(self.expected_issues)
|
2020-03-10 14:52:35 +00:00
|
|
|
if self.options.rr:
|
|
|
|
# signal.SIGPPIPE is 13 but it sometimes isn't present in python for some reason.
|
|
|
|
expected_issues.append({"returncode": -13, "sometimes": True})
|
2019-03-20 00:21:09 +00:00
|
|
|
self.criticals, not_found_expected_issues, expected_returncode = self.check_reported_issues(expected_issues)
|
2016-11-26 13:25:43 +00:00
|
|
|
expected_timeout = None
|
2019-03-17 17:39:38 +00:00
|
|
|
expected_signal = None
|
2019-03-18 19:52:11 +00:00
|
|
|
for i, f in enumerate(not_found_expected_issues):
|
2019-03-17 17:39:38 +00:00
|
|
|
returncode = f.get('returncode', [])
|
|
|
|
if not isinstance(returncode, list):
|
|
|
|
returncode = [returncode]
|
|
|
|
|
|
|
|
if f.get('signame'):
|
|
|
|
signames = f['signame']
|
|
|
|
if not isinstance(signames, list):
|
|
|
|
signames = [signames]
|
|
|
|
|
|
|
|
returncode = [EXITING_SIGNALS[signame] for signame in signames]
|
|
|
|
|
|
|
|
if returncode:
|
2016-09-02 20:37:24 +00:00
|
|
|
if 'sometimes' in f:
|
|
|
|
returncode.append(0)
|
2019-03-17 17:39:38 +00:00
|
|
|
expected_returncode = returncode
|
|
|
|
expected_signal = f
|
2016-11-26 13:25:43 +00:00
|
|
|
elif f.get("timeout"):
|
|
|
|
expected_timeout = f
|
2016-09-02 20:37:24 +00:00
|
|
|
|
2019-03-18 19:52:11 +00:00
|
|
|
not_found_expected_issues = [f for f in not_found_expected_issues
|
|
|
|
if not f.get('returncode') and not f.get('signame')]
|
2016-09-02 20:37:24 +00:00
|
|
|
|
|
|
|
msg = ""
|
|
|
|
result = Result.PASSED
|
2016-11-26 13:25:43 +00:00
|
|
|
if self.result == Result.TIMEOUT:
|
2018-12-07 12:03:24 +00:00
|
|
|
with open(self.logfile) as f:
|
|
|
|
signal_fault_info = self.fault_sig_regex.findall(f.read())
|
|
|
|
if signal_fault_info:
|
|
|
|
result = Result.FAILED
|
|
|
|
msg = signal_fault_info[0]
|
|
|
|
elif expected_timeout:
|
2019-03-18 19:52:11 +00:00
|
|
|
not_found_expected_issues.remove(expected_timeout)
|
2018-12-07 12:03:24 +00:00
|
|
|
result, msg = self.check_expected_timeout(expected_timeout)
|
|
|
|
else:
|
|
|
|
return
|
2019-01-15 19:52:24 +00:00
|
|
|
elif self.process.returncode in EXITING_SIGNALS:
|
2019-03-18 19:52:11 +00:00
|
|
|
msg = "Application exited with signal %s" % (
|
|
|
|
EXITING_SIGNALS[self.process.returncode])
|
2019-03-17 17:39:38 +00:00
|
|
|
if self.process.returncode not in expected_returncode:
|
|
|
|
result = Result.FAILED
|
|
|
|
else:
|
|
|
|
if expected_signal:
|
2019-03-20 00:21:09 +00:00
|
|
|
stack_msg, stack_res = self.check_expected_issue(
|
2019-03-18 19:52:11 +00:00
|
|
|
expected_signal)
|
2019-03-17 17:39:38 +00:00
|
|
|
if not stack_res:
|
|
|
|
msg += stack_msg
|
|
|
|
result = Result.FAILED
|
2016-11-07 20:20:09 +00:00
|
|
|
self.add_stack_trace_to_logfile()
|
2015-03-23 15:19:49 +00:00
|
|
|
elif self.process.returncode == VALGRIND_ERROR_CODE:
|
2016-09-02 20:37:24 +00:00
|
|
|
msg = "Valgrind reported errors "
|
|
|
|
result = Result.FAILED
|
|
|
|
elif self.process.returncode not in expected_returncode:
|
|
|
|
msg = "Application returned %s " % self.process.returncode
|
2016-09-21 17:10:53 +00:00
|
|
|
if expected_returncode != [0]:
|
2016-09-02 20:37:24 +00:00
|
|
|
msg += "(expected %s) " % expected_returncode
|
|
|
|
result = Result.FAILED
|
|
|
|
|
2019-03-16 02:46:00 +00:00
|
|
|
if self.criticals:
|
2019-06-11 20:34:28 +00:00
|
|
|
msg += "(critical errors: [%s]) " % ', '.join(set([c['summary']
|
|
|
|
for c in self.criticals]))
|
2016-09-02 20:37:24 +00:00
|
|
|
result = Result.FAILED
|
|
|
|
|
2019-03-18 19:52:11 +00:00
|
|
|
if not_found_expected_issues:
|
|
|
|
mandatory_failures = [f for f in not_found_expected_issues
|
2019-03-16 02:46:00 +00:00
|
|
|
if not f.get('sometimes', True)]
|
2016-09-02 20:37:24 +00:00
|
|
|
|
|
|
|
if mandatory_failures:
|
2019-03-17 17:39:38 +00:00
|
|
|
msg += " (Expected errors not found: %s) " % mandatory_failures
|
2016-09-02 20:37:24 +00:00
|
|
|
result = Result.FAILED
|
2019-03-18 19:52:11 +00:00
|
|
|
elif self.expected_issues:
|
2020-05-03 05:20:19 +00:00
|
|
|
msg += ' %s(Expected errors occurred: %s)%s' % (Colors.OKBLUE,
|
|
|
|
self.expected_issues,
|
|
|
|
Colors.ENDC)
|
2019-12-03 17:26:18 +00:00
|
|
|
result = Result.KNOWN_ERROR
|
2016-09-02 20:37:24 +00:00
|
|
|
|
2020-05-07 04:23:07 +00:00
|
|
|
if result == Result.PASSED:
|
|
|
|
for report in self.reports:
|
|
|
|
if report["level"] == "expected":
|
|
|
|
result = Result.KNOWN_ERROR
|
|
|
|
break
|
|
|
|
|
2016-09-02 20:37:24 +00:00
|
|
|
self.set_result(result, msg.strip())
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2019-03-18 19:52:11 +00:00
|
|
|
def _generate_expected_issues(self):
|
2019-03-17 17:39:38 +00:00
|
|
|
res = ""
|
|
|
|
self.criticals = self.criticals or []
|
2019-03-16 02:46:00 +00:00
|
|
|
if self.result == Result.TIMEOUT:
|
2019-03-18 19:52:11 +00:00
|
|
|
res += """ {
|
|
|
|
'timeout': True,
|
|
|
|
'sometimes': True,
|
|
|
|
},"""
|
2019-03-16 02:46:00 +00:00
|
|
|
|
|
|
|
for report in self.criticals:
|
2019-03-18 19:52:11 +00:00
|
|
|
res += "\n%s{" % (" " * 12)
|
2019-03-16 02:46:00 +00:00
|
|
|
|
|
|
|
for key, value in report.items():
|
|
|
|
if key == "type":
|
|
|
|
continue
|
2019-03-17 17:39:38 +00:00
|
|
|
if value is None:
|
|
|
|
continue
|
2019-03-16 15:21:34 +00:00
|
|
|
res += '\n%s%s"%s": "%s",' % (
|
2019-03-18 19:52:11 +00:00
|
|
|
" " * 16, "# " if key == "details" else "",
|
2019-03-16 15:21:34 +00:00
|
|
|
key, value.replace('\n', '\\n'))
|
2019-03-16 02:46:00 +00:00
|
|
|
|
2019-03-18 19:52:11 +00:00
|
|
|
res += "\n%s}," % (" " * 12)
|
2019-03-16 02:46:00 +00:00
|
|
|
|
|
|
|
return res
|
|
|
|
|
2015-04-20 08:53:29 +00:00
|
|
|
def get_valgrind_suppressions(self):
|
|
|
|
result = super(GstValidateTest, self).get_valgrind_suppressions()
|
2019-03-28 13:08:16 +00:00
|
|
|
result.extend(utils.get_gst_build_valgrind_suppressions())
|
2017-02-03 14:02:49 +00:00
|
|
|
return result
|
2015-03-19 16:22:26 +00:00
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
class GstValidateEncodingTestInterface(object):
|
|
|
|
DURATION_TOLERANCE = GST_SECOND / 4
|
|
|
|
|
|
|
|
def __init__(self, combination, media_descriptor, duration_tolerance=None):
|
|
|
|
super(GstValidateEncodingTestInterface, self).__init__()
|
|
|
|
|
|
|
|
self.media_descriptor = media_descriptor
|
|
|
|
self.combination = combination
|
|
|
|
self.dest_file = ""
|
2014-01-30 11:42:25 +00:00
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
self._duration_tolerance = duration_tolerance
|
|
|
|
if duration_tolerance is None:
|
|
|
|
self._duration_tolerance = self.DURATION_TOLERANCE
|
|
|
|
|
|
|
|
def get_current_size(self):
|
2014-02-06 16:23:10 +00:00
|
|
|
try:
|
2016-11-04 21:04:37 +00:00
|
|
|
size = os.stat(urllib.parse.urlparse(self.dest_file).path).st_size
|
2014-10-24 12:38:00 +00:00
|
|
|
except OSError:
|
2014-07-16 10:03:14 +00:00
|
|
|
return None
|
2014-02-06 16:23:10 +00:00
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
self.debug("Size: %s" % size)
|
|
|
|
return size
|
2014-01-09 08:28:02 +00:00
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
def _get_profile_full(self, muxer, venc, aenc, video_restriction=None,
|
2014-10-24 12:23:52 +00:00
|
|
|
audio_restriction=None, audio_presence=0,
|
2020-03-19 21:48:08 +00:00
|
|
|
video_presence=0, variable_framerate=False):
|
2016-12-22 13:07:58 +00:00
|
|
|
ret = ""
|
2014-07-16 10:03:14 +00:00
|
|
|
if muxer:
|
|
|
|
ret += muxer
|
|
|
|
ret += ":"
|
|
|
|
if venc:
|
|
|
|
if video_restriction is not None:
|
|
|
|
ret = ret + video_restriction + '->'
|
|
|
|
ret += venc
|
2020-03-19 21:48:08 +00:00
|
|
|
props = ""
|
2014-07-16 10:03:14 +00:00
|
|
|
if video_presence:
|
2020-03-19 21:48:08 +00:00
|
|
|
props += 'presence=%s,' % str(video_presence)
|
|
|
|
if variable_framerate:
|
|
|
|
props += 'variable-framerate=true,'
|
|
|
|
if props:
|
|
|
|
ret = ret + '|' + props[:-1]
|
2014-07-16 10:03:14 +00:00
|
|
|
if aenc:
|
|
|
|
ret += ":"
|
|
|
|
if audio_restriction is not None:
|
|
|
|
ret = ret + audio_restriction + '->'
|
|
|
|
ret += aenc
|
|
|
|
if audio_presence:
|
|
|
|
ret = ret + '|' + str(audio_presence)
|
|
|
|
|
|
|
|
return ret.replace("::", ":")
|
|
|
|
|
2020-03-19 21:48:08 +00:00
|
|
|
def get_profile(self, video_restriction=None, audio_restriction=None,
|
|
|
|
variable_framerate=False):
|
2014-07-16 10:16:03 +00:00
|
|
|
vcaps = self.combination.get_video_caps()
|
|
|
|
acaps = self.combination.get_audio_caps()
|
2019-09-24 17:23:49 +00:00
|
|
|
if video_restriction is None:
|
|
|
|
video_restriction = self.combination.video_restriction
|
|
|
|
if audio_restriction is None:
|
|
|
|
audio_restriction = self.combination.audio_restriction
|
2014-07-16 10:03:14 +00:00
|
|
|
if self.media_descriptor is not None:
|
2020-03-21 14:57:51 +00:00
|
|
|
if self.combination.video == "theora":
|
|
|
|
# Theoraenc doesn't support variable framerate, make sure to avoid them
|
|
|
|
framerate = self.media_descriptor.get_framerate()
|
|
|
|
if framerate == Fraction(0, 1):
|
|
|
|
framerate = Fraction(30, 1)
|
|
|
|
restriction = utils.GstCaps.new_from_str(video_restriction or "video/x-raw")
|
|
|
|
for struct, _ in restriction:
|
|
|
|
if struct.get("framerate") is None:
|
|
|
|
struct.set("framerate", struct.FRACTION_TYPE, framerate)
|
|
|
|
video_restriction = str(restriction)
|
|
|
|
|
|
|
|
video_presence = self.media_descriptor.get_num_tracks("video")
|
|
|
|
if video_presence == 0:
|
2014-07-16 10:03:14 +00:00
|
|
|
vcaps = None
|
|
|
|
|
2020-03-21 14:57:51 +00:00
|
|
|
audio_presence = self.media_descriptor.get_num_tracks("audio")
|
|
|
|
if audio_presence == 0:
|
2014-07-16 10:03:14 +00:00
|
|
|
acaps = None
|
|
|
|
|
2014-07-16 10:16:03 +00:00
|
|
|
return self._get_profile_full(self.combination.get_muxer_caps(),
|
2014-07-16 10:03:14 +00:00
|
|
|
vcaps, acaps,
|
2020-03-21 14:57:51 +00:00
|
|
|
audio_presence=audio_presence,
|
|
|
|
video_presence=video_presence,
|
2014-07-16 10:03:14 +00:00
|
|
|
video_restriction=video_restriction,
|
2020-03-19 21:48:08 +00:00
|
|
|
audio_restriction=audio_restriction,
|
|
|
|
variable_framerate=variable_framerate)
|
2014-07-16 10:03:14 +00:00
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
def _clean_caps(self, caps):
|
|
|
|
"""
|
|
|
|
Returns a list of key=value or structure name, without "(types)" or ";" or ","
|
|
|
|
"""
|
|
|
|
return re.sub(r"\(.+?\)\s*| |;", '', caps).split(',')
|
|
|
|
|
2018-04-21 02:57:32 +00:00
|
|
|
# pylint: disable=E1101
|
2014-07-16 11:54:54 +00:00
|
|
|
def _has_caps_type_variant(self, c, ccaps):
|
|
|
|
"""
|
|
|
|
Handle situations where we can have application/ogg or video/ogg or
|
|
|
|
audio/ogg
|
|
|
|
"""
|
|
|
|
has_variant = False
|
|
|
|
media_type = re.findall("application/|video/|audio/", c)
|
|
|
|
if media_type:
|
|
|
|
media_type = media_type[0].replace('/', '')
|
|
|
|
possible_mtypes = ["application", "video", "audio"]
|
|
|
|
possible_mtypes.remove(media_type)
|
|
|
|
for tmptype in possible_mtypes:
|
|
|
|
possible_c_variant = c.replace(media_type, tmptype)
|
|
|
|
if possible_c_variant in ccaps:
|
2014-10-24 12:23:52 +00:00
|
|
|
self.info(
|
2015-03-18 10:05:08 +00:00
|
|
|
"Found %s in %s, good enough!", possible_c_variant, ccaps)
|
2014-07-16 11:54:54 +00:00
|
|
|
has_variant = True
|
|
|
|
|
|
|
|
return has_variant
|
|
|
|
|
2018-04-21 02:57:32 +00:00
|
|
|
# pylint: disable=E1101
|
|
|
|
def run_iqa_test(self, reference_file_uri):
|
|
|
|
"""
|
|
|
|
Runs IQA test if @reference_file_path exists
|
|
|
|
@test: The test to run tests on
|
|
|
|
"""
|
2018-05-13 20:30:25 +00:00
|
|
|
if not GstValidateBaseTestManager.has_feature('iqa'):
|
|
|
|
self.debug('Iqa element not present, not running extra test.')
|
|
|
|
return
|
|
|
|
|
2018-04-21 02:57:32 +00:00
|
|
|
pipeline_desc = """
|
|
|
|
uridecodebin uri=%s !
|
|
|
|
iqa name=iqa do-dssim=true dssim-error-threshold=1.0 ! fakesink
|
|
|
|
uridecodebin uri=%s ! iqa.
|
|
|
|
""" % (reference_file_uri, self.dest_file)
|
|
|
|
pipeline_desc = pipeline_desc.replace("\n", "")
|
|
|
|
|
2018-05-03 09:27:31 +00:00
|
|
|
command = [GstValidateBaseTestManager.COMMAND] + \
|
2018-04-21 02:57:32 +00:00
|
|
|
shlex.split(pipeline_desc)
|
2019-04-06 14:10:14 +00:00
|
|
|
msg = "## Running IQA tests on results of: " \
|
|
|
|
+ "%s\n### Command: \n```\n%s\n```\n" % (
|
2019-03-17 01:52:53 +00:00
|
|
|
self.classname, ' '.join(command))
|
2018-04-21 02:57:32 +00:00
|
|
|
if not self.options.redirect_logs:
|
2019-03-17 01:52:53 +00:00
|
|
|
self.out.write(msg)
|
2018-04-21 02:57:32 +00:00
|
|
|
self.out.flush()
|
|
|
|
else:
|
2019-03-17 01:52:53 +00:00
|
|
|
printc(msg, Colors.OKBLUE)
|
2018-04-21 02:57:32 +00:00
|
|
|
|
|
|
|
self.process = subprocess.Popen(command,
|
|
|
|
stderr=self.out,
|
|
|
|
stdout=self.out,
|
|
|
|
env=self.proc_env,
|
|
|
|
cwd=self.workdir)
|
|
|
|
self.process.wait()
|
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
def check_encoded_file(self):
|
2014-10-24 12:23:52 +00:00
|
|
|
result_descriptor = GstValidateMediaDescriptor.new_from_uri(
|
|
|
|
self.dest_file)
|
2014-09-19 07:13:13 +00:00
|
|
|
if result_descriptor is None:
|
|
|
|
return (Result.FAILED, "Could not discover encoded file %s"
|
|
|
|
% self.dest_file)
|
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
duration = result_descriptor.get_duration()
|
2014-07-16 10:03:14 +00:00
|
|
|
orig_duration = self.media_descriptor.get_duration()
|
|
|
|
tolerance = self._duration_tolerance
|
|
|
|
|
|
|
|
if orig_duration - tolerance >= duration <= orig_duration + tolerance:
|
2014-07-16 11:54:54 +00:00
|
|
|
os.remove(result_descriptor.get_path())
|
2019-10-04 12:59:57 +00:00
|
|
|
self.add_report(
|
|
|
|
{
|
|
|
|
'type': 'report',
|
|
|
|
'issue-id': 'transcoded-file-wrong-duration',
|
|
|
|
'summary': 'The duration of a transcoded file doesn\'t match the duration of the original file',
|
|
|
|
'level': 'critical',
|
|
|
|
'detected-on': 'pipeline',
|
|
|
|
'details': "Duration of encoded file is " " wrong (%s instead of %s)" % (
|
|
|
|
utils.TIME_ARGS(duration), utils.TIME_ARGS(orig_duration))
|
|
|
|
}
|
|
|
|
)
|
2014-07-16 10:03:14 +00:00
|
|
|
else:
|
2014-07-16 11:54:54 +00:00
|
|
|
all_tracks_caps = result_descriptor.get_tracks_caps()
|
|
|
|
container_caps = result_descriptor.get_caps()
|
|
|
|
if container_caps:
|
|
|
|
all_tracks_caps.insert(0, ("container", container_caps))
|
|
|
|
|
|
|
|
for track_type, caps in all_tracks_caps:
|
|
|
|
ccaps = self._clean_caps(caps)
|
|
|
|
wanted_caps = self.combination.get_caps(track_type)
|
|
|
|
cwanted_caps = self._clean_caps(wanted_caps)
|
|
|
|
|
2014-10-24 12:38:00 +00:00
|
|
|
if wanted_caps is None:
|
2014-07-16 11:54:54 +00:00
|
|
|
os.remove(result_descriptor.get_path())
|
2019-10-04 12:59:57 +00:00
|
|
|
self.add_report(
|
|
|
|
{
|
|
|
|
'type': 'report',
|
|
|
|
'issue-id': 'transcoded-file-wrong-stream-type',
|
|
|
|
'summary': 'Expected stream types during transcoding do not match expectations',
|
|
|
|
'level': 'critical',
|
|
|
|
'detected-on': 'pipeline',
|
|
|
|
'details': "Found a track of type %s in the encoded files"
|
|
|
|
" but none where wanted in the encoded profile: %s" % (
|
|
|
|
track_type, self.combination)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
return
|
2014-07-16 11:54:54 +00:00
|
|
|
|
|
|
|
for c in cwanted_caps:
|
|
|
|
if c not in ccaps:
|
2014-10-24 12:23:52 +00:00
|
|
|
if not self._has_caps_type_variant(c, ccaps):
|
2014-07-16 11:54:54 +00:00
|
|
|
os.remove(result_descriptor.get_path())
|
2019-10-04 12:59:57 +00:00
|
|
|
self.add_report(
|
|
|
|
{
|
|
|
|
'type': 'report',
|
|
|
|
'issue-id': 'transcoded-file-wrong-caps',
|
|
|
|
'summary': 'Expected stream caps during transcoding do not match expectations',
|
|
|
|
'level': 'critical',
|
|
|
|
'detected-on': 'pipeline',
|
2020-05-03 05:20:19 +00:00
|
|
|
'details': "Field: %s (from %s) not in caps of the outputted file %s" % (
|
2019-10-04 12:59:57 +00:00
|
|
|
wanted_caps, c, ccaps)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
return
|
2014-07-16 11:54:54 +00:00
|
|
|
|
|
|
|
os.remove(result_descriptor.get_path())
|
2014-07-16 10:03:14 +00:00
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-01-09 15:57:54 +00:00
|
|
|
class TestsManager(Loggable):
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
""" A class responsible for managing tests. """
|
|
|
|
|
2017-04-30 17:35:29 +00:00
|
|
|
name = "base"
|
2017-07-13 20:43:32 +00:00
|
|
|
loading_testsuite = None
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def __init__(self):
|
2014-01-09 15:57:54 +00:00
|
|
|
|
|
|
|
Loggable.__init__(self)
|
|
|
|
|
2014-07-23 08:54:37 +00:00
|
|
|
self.tests = []
|
|
|
|
self.unwanted_tests = []
|
2013-12-31 10:45:07 +00:00
|
|
|
self.options = None
|
|
|
|
self.args = None
|
|
|
|
self.reporter = None
|
|
|
|
self.wanted_tests_patterns = []
|
2014-01-22 23:15:54 +00:00
|
|
|
self.blacklisted_tests_patterns = []
|
2014-06-26 10:42:38 +00:00
|
|
|
self._generators = []
|
2015-03-13 17:09:08 +00:00
|
|
|
self.check_testslist = True
|
2015-03-14 15:08:12 +00:00
|
|
|
self.all_tests = None
|
2019-03-18 19:52:11 +00:00
|
|
|
self.expected_issues = {}
|
2016-11-16 13:47:21 +00:00
|
|
|
self.blacklisted_tests = []
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-09 14:17:53 +00:00
|
|
|
def init(self):
|
2017-04-30 17:35:29 +00:00
|
|
|
return True
|
2014-01-09 14:17:53 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def list_tests(self):
|
2017-02-21 16:39:37 +00:00
|
|
|
return sorted(list(self.tests), key=lambda x: x.classname)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2018-04-13 02:11:04 +00:00
|
|
|
def find_tests(self, classname):
|
|
|
|
regex = re.compile(classname)
|
|
|
|
return [test for test in self.list_tests() if regex.findall(test.classname)]
|
|
|
|
|
2019-03-18 19:52:11 +00:00
|
|
|
def add_expected_issues(self, expected_issues):
|
|
|
|
for bugid, failure_def in list(expected_issues.items()):
|
|
|
|
tests_regexes = []
|
|
|
|
for test_name_regex in failure_def['tests']:
|
|
|
|
regex = re.compile(test_name_regex)
|
|
|
|
tests_regexes.append(regex)
|
|
|
|
for test in self.tests:
|
|
|
|
if regex.findall(test.classname):
|
2021-10-08 12:09:21 +00:00
|
|
|
max_retries = failure_def.get('allow_flakiness', failure_def.get('max_retries'))
|
|
|
|
if max_retries:
|
|
|
|
test.max_retries = int(max_retries)
|
|
|
|
self.debug(f"{test.classname} allow {test.max_retries}")
|
2019-03-27 15:36:16 +00:00
|
|
|
else:
|
2019-04-12 14:13:15 +00:00
|
|
|
for issue in failure_def['issues']:
|
|
|
|
issue['bug'] = bugid
|
2019-03-27 15:36:16 +00:00
|
|
|
test.expected_issues.extend(failure_def['issues'])
|
|
|
|
self.debug("%s added expected issues from %s" % (
|
|
|
|
test.classname, bugid))
|
2019-03-18 19:52:11 +00:00
|
|
|
failure_def['tests'] = tests_regexes
|
|
|
|
|
|
|
|
self.expected_issues.update(expected_issues)
|
2016-09-02 20:37:24 +00:00
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
def add_test(self, test):
|
2017-07-13 20:43:32 +00:00
|
|
|
if test.generator is None:
|
|
|
|
test.classname = self.loading_testsuite + '.' + test.classname
|
2019-03-18 19:52:11 +00:00
|
|
|
|
|
|
|
for bugid, failure_def in list(self.expected_issues.items()):
|
2019-04-12 14:13:15 +00:00
|
|
|
failure_def['bug'] = bugid
|
2019-03-18 19:52:11 +00:00
|
|
|
for regex in failure_def['tests']:
|
|
|
|
if regex.findall(test.classname):
|
2021-10-08 12:09:21 +00:00
|
|
|
max_retries = failure_def.get('allow_flakiness', failure_def.get('max_retries'))
|
|
|
|
if max_retries:
|
|
|
|
test.max_retries = int(max_retries)
|
2021-10-08 15:09:47 +00:00
|
|
|
self.debug(f"{test.classname} allow {test.max_retries} retries.")
|
2019-03-27 15:36:16 +00:00
|
|
|
else:
|
2019-04-12 14:13:15 +00:00
|
|
|
for issue in failure_def['issues']:
|
|
|
|
issue['bug'] = bugid
|
2019-03-27 15:36:16 +00:00
|
|
|
test.expected_issues.extend(failure_def['issues'])
|
|
|
|
self.debug("%s added expected issues from %s" % (
|
|
|
|
test.classname, bugid))
|
2016-09-02 20:37:24 +00:00
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
if self._is_test_wanted(test):
|
2014-10-24 12:38:00 +00:00
|
|
|
if test not in self.tests:
|
2014-07-23 08:54:37 +00:00
|
|
|
self.tests.append(test)
|
2014-01-22 23:15:54 +00:00
|
|
|
else:
|
2014-10-24 12:38:00 +00:00
|
|
|
if test not in self.tests:
|
2014-07-23 08:54:37 +00:00
|
|
|
self.unwanted_tests.append(test)
|
2014-01-22 23:15:54 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def get_tests(self):
|
|
|
|
return self.tests
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def populate_testsuite(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def add_generators(self, generators):
|
|
|
|
"""
|
|
|
|
@generators: A list of, or one single #TestsGenerator to be used to generate tests
|
|
|
|
"""
|
2017-07-13 20:43:32 +00:00
|
|
|
if not isinstance(generators, list):
|
|
|
|
generators = [generators]
|
|
|
|
self._generators.extend(generators)
|
|
|
|
for generator in generators:
|
|
|
|
generator.testsuite = self.loading_testsuite
|
2014-06-26 10:42:38 +00:00
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
self._generators = list(set(self._generators))
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def get_generators(self):
|
|
|
|
return self._generators
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
def _add_blacklist(self, blacklisted_tests):
|
|
|
|
if not isinstance(blacklisted_tests, list):
|
|
|
|
blacklisted_tests = [blacklisted_tests]
|
|
|
|
|
|
|
|
for patterns in blacklisted_tests:
|
|
|
|
for pattern in patterns.split(","):
|
|
|
|
self.blacklisted_tests_patterns.append(re.compile(pattern))
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def set_default_blacklist(self, default_blacklist):
|
2017-07-13 20:43:32 +00:00
|
|
|
for test_regex, reason in default_blacklist:
|
|
|
|
if not test_regex.startswith(self.loading_testsuite + '.'):
|
|
|
|
test_regex = self.loading_testsuite + '.' + test_regex
|
|
|
|
self.blacklisted_tests.append((test_regex, reason))
|
2019-04-17 00:31:30 +00:00
|
|
|
self._add_blacklist(test_regex)
|
2014-06-26 10:42:38 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def add_options(self, parser):
|
|
|
|
""" Add more arguments. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
def set_settings(self, options, args, reporter):
|
|
|
|
""" Set properties after options parsing. """
|
|
|
|
self.options = options
|
|
|
|
self.args = args
|
|
|
|
self.reporter = reporter
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
self.populate_testsuite()
|
2015-04-27 12:04:05 +00:00
|
|
|
|
|
|
|
if self.options.valgrind:
|
|
|
|
self.print_valgrind_bugs()
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
if options.wanted_tests:
|
2014-01-22 23:15:54 +00:00
|
|
|
for patterns in options.wanted_tests:
|
|
|
|
for pattern in patterns.split(","):
|
|
|
|
self.wanted_tests_patterns.append(re.compile(pattern))
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
if options.blacklisted_tests:
|
|
|
|
for patterns in options.blacklisted_tests:
|
2014-11-28 23:03:04 +00:00
|
|
|
self._add_blacklist(patterns)
|
2014-01-22 23:15:54 +00:00
|
|
|
|
2019-04-17 00:31:30 +00:00
|
|
|
def check_blacklists(self):
|
2016-11-16 13:47:21 +00:00
|
|
|
if self.options.check_bugs_status:
|
|
|
|
if not check_bugs_resolution(self.blacklisted_tests):
|
|
|
|
return False
|
|
|
|
|
2019-04-17 00:31:30 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
def log_blacklists(self):
|
|
|
|
if self.blacklisted_tests:
|
|
|
|
self.info("Currently 'hardcoded' %s blacklisted tests:" %
|
|
|
|
self.name)
|
|
|
|
|
2016-11-16 13:47:21 +00:00
|
|
|
for name, bug in self.blacklisted_tests:
|
|
|
|
if not self.options.check_bugs_status:
|
2019-01-30 00:24:16 +00:00
|
|
|
self.info(" + %s --> bug: %s" % (name, bug))
|
2016-11-16 13:47:21 +00:00
|
|
|
|
2019-03-18 19:52:11 +00:00
|
|
|
def check_expected_issues(self):
|
|
|
|
if not self.expected_issues or not self.options.check_bugs_status:
|
2016-11-24 13:29:53 +00:00
|
|
|
return True
|
|
|
|
|
2019-03-17 00:37:16 +00:00
|
|
|
bugs_definitions = defaultdict(list)
|
2019-03-18 19:52:11 +00:00
|
|
|
for bug, failure_def in list(self.expected_issues.items()):
|
|
|
|
tests_names = '|'.join(
|
|
|
|
[regex.pattern for regex in failure_def['tests']])
|
|
|
|
bugs_definitions[tests_names].extend([bug])
|
2016-11-24 13:29:53 +00:00
|
|
|
|
|
|
|
return check_bugs_resolution(bugs_definitions.items())
|
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
def _check_blacklisted(self, test):
|
|
|
|
for pattern in self.blacklisted_tests_patterns:
|
|
|
|
if pattern.findall(test.classname):
|
2016-03-23 18:34:10 +00:00
|
|
|
self.info("%s is blacklisted by %s", test.classname, pattern)
|
2014-01-22 23:15:54 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-11-16 13:48:26 +00:00
|
|
|
def _check_whitelisted(self, test):
|
|
|
|
for pattern in self.wanted_tests_patterns:
|
|
|
|
if pattern.findall(test.classname):
|
2016-12-22 13:08:31 +00:00
|
|
|
if self._check_blacklisted(test):
|
|
|
|
# If explicitly white listed that specific test
|
|
|
|
# bypass the blacklisting
|
|
|
|
if pattern.pattern != test.classname:
|
|
|
|
return False
|
2016-11-16 13:48:26 +00:00
|
|
|
return True
|
2016-12-22 13:08:31 +00:00
|
|
|
return False
|
2016-11-16 13:48:26 +00:00
|
|
|
|
2016-12-23 17:58:56 +00:00
|
|
|
def _check_duration(self, test):
|
|
|
|
if test.duration > 0 and int(self.options.long_limit) < int(test.duration):
|
|
|
|
self.info("Not activating %s as its duration (%d) is superior"
|
|
|
|
" than the long limit (%d)" % (test, test.duration,
|
|
|
|
int(self.options.long_limit)))
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def _is_test_wanted(self, test):
|
2016-11-16 13:48:26 +00:00
|
|
|
if self._check_whitelisted(test):
|
2016-12-23 17:58:56 +00:00
|
|
|
if not self._check_duration(test):
|
|
|
|
return False
|
2016-11-16 13:48:26 +00:00
|
|
|
return True
|
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
if self._check_blacklisted(test):
|
|
|
|
return False
|
|
|
|
|
2016-12-23 17:58:56 +00:00
|
|
|
if not self._check_duration(test):
|
2014-05-07 09:30:09 +00:00
|
|
|
return False
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
if not self.wanted_tests_patterns:
|
|
|
|
return True
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
return False
|
|
|
|
|
2014-01-13 16:31:57 +00:00
|
|
|
def needs_http_server(self):
|
|
|
|
return False
|
|
|
|
|
2015-04-27 12:04:05 +00:00
|
|
|
def print_valgrind_bugs(self):
|
|
|
|
pass
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
class TestsGenerator(Loggable):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def __init__(self, name, test_manager, tests=[]):
|
|
|
|
Loggable.__init__(self)
|
|
|
|
self.name = name
|
|
|
|
self.test_manager = test_manager
|
2017-07-13 20:43:32 +00:00
|
|
|
self.testsuite = None
|
2014-06-26 10:42:38 +00:00
|
|
|
self._tests = {}
|
|
|
|
for test in tests:
|
|
|
|
self._tests[test.classname] = test
|
|
|
|
|
|
|
|
def generate_tests(self, *kwargs):
|
|
|
|
"""
|
|
|
|
Method that generates tests
|
|
|
|
"""
|
|
|
|
return list(self._tests.values())
|
|
|
|
|
|
|
|
def add_test(self, test):
|
2017-07-13 20:43:32 +00:00
|
|
|
test.generator = self
|
|
|
|
test.classname = self.testsuite + '.' + test.classname
|
2014-06-26 10:42:38 +00:00
|
|
|
self._tests[test.classname] = test
|
|
|
|
|
|
|
|
|
|
|
|
class GstValidateTestsGenerator(TestsGenerator):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def populate_tests(self, uri_minfo_special_scenarios, scenarios):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def generate_tests(self, uri_minfo_special_scenarios, scenarios):
|
|
|
|
self.populate_tests(uri_minfo_special_scenarios, scenarios)
|
|
|
|
return super(GstValidateTestsGenerator, self).generate_tests()
|
|
|
|
|
|
|
|
|
2014-01-09 14:17:53 +00:00
|
|
|
class _TestsLauncher(Loggable):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2019-01-26 01:13:28 +00:00
|
|
|
def __init__(self):
|
2014-01-09 14:17:53 +00:00
|
|
|
|
|
|
|
Loggable.__init__(self)
|
|
|
|
|
2014-01-14 17:07:46 +00:00
|
|
|
self.options = None
|
2013-12-31 10:45:07 +00:00
|
|
|
self.testers = []
|
|
|
|
self.tests = []
|
|
|
|
self.reporter = None
|
|
|
|
self._list_testers()
|
2015-03-14 15:08:12 +00:00
|
|
|
self.all_tests = None
|
2013-12-31 10:45:07 +00:00
|
|
|
self.wanted_tests_patterns = []
|
|
|
|
|
2017-12-02 12:36:27 +00:00
|
|
|
self.queue = queue.Queue()
|
|
|
|
self.jobs = []
|
|
|
|
self.total_num_tests = 0
|
2019-12-30 15:57:57 +00:00
|
|
|
self.current_progress = -1
|
2017-12-03 09:42:49 +00:00
|
|
|
self.server = None
|
2019-01-26 01:09:30 +00:00
|
|
|
self.httpsrv = None
|
|
|
|
self.vfb_server = None
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
def _list_app_dirs(self):
|
2014-10-23 19:36:03 +00:00
|
|
|
app_dirs = []
|
2019-01-26 01:13:28 +00:00
|
|
|
env_dirs = os.environ["GST_VALIDATE_APPS_DIR"]
|
2014-10-23 19:36:03 +00:00
|
|
|
if env_dirs is not None:
|
2019-08-24 11:57:23 +00:00
|
|
|
for dir_ in env_dirs.split(os.pathsep):
|
2014-10-24 12:23:52 +00:00
|
|
|
app_dirs.append(dir_)
|
2014-10-23 19:36:03 +00:00
|
|
|
|
|
|
|
return app_dirs
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
def _exec_app(self, app_dir, env):
|
2014-11-15 17:08:42 +00:00
|
|
|
try:
|
|
|
|
files = os.listdir(app_dir)
|
|
|
|
except OSError as e:
|
|
|
|
self.debug("Could not list %s: %s" % (app_dir, e))
|
|
|
|
files = []
|
|
|
|
for f in files:
|
2014-10-23 19:36:03 +00:00
|
|
|
if f.endswith(".py"):
|
2018-03-23 21:02:43 +00:00
|
|
|
exec(compile(open(os.path.join(app_dir, f)).read(),
|
|
|
|
os.path.join(app_dir, f), 'exec'), env)
|
2014-10-23 19:36:03 +00:00
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
def _exec_apps(self, env):
|
2014-10-23 19:36:03 +00:00
|
|
|
app_dirs = self._list_app_dirs()
|
|
|
|
for app_dir in app_dirs:
|
|
|
|
self._exec_app(app_dir, env)
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def _list_testers(self):
|
|
|
|
env = globals().copy()
|
2014-10-23 19:36:03 +00:00
|
|
|
self._exec_apps(env)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-04-30 13:40:10 +00:00
|
|
|
testers = [i() for i in utils.get_subclasses(TestsManager, env)]
|
2014-01-09 14:17:53 +00:00
|
|
|
for tester in testers:
|
|
|
|
if tester.init() is True:
|
|
|
|
self.testers.append(tester)
|
|
|
|
else:
|
2014-01-10 09:27:25 +00:00
|
|
|
self.warning("Can not init tester: %s -- PATH is %s"
|
|
|
|
% (tester.name, os.environ["PATH"]))
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def add_options(self, parser):
|
|
|
|
for tester in self.testers:
|
2014-04-30 13:40:10 +00:00
|
|
|
tester.add_options(parser)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-12-22 13:08:21 +00:00
|
|
|
def _load_testsuite(self, testsuites):
|
|
|
|
exceptions = []
|
|
|
|
for testsuite in testsuites:
|
2014-11-28 23:03:04 +00:00
|
|
|
try:
|
|
|
|
sys.path.insert(0, os.path.dirname(testsuite))
|
2019-11-11 21:57:27 +00:00
|
|
|
spec = importlib.util.spec_from_file_location(os.path.basename(testsuite).replace(".py", ""), testsuite)
|
|
|
|
module = importlib.util.module_from_spec(spec)
|
|
|
|
spec.loader.exec_module(module)
|
|
|
|
return (module, None)
|
2014-11-28 23:03:04 +00:00
|
|
|
except Exception as e:
|
2016-12-22 13:08:21 +00:00
|
|
|
exceptions.append("Could not load %s: %s" % (testsuite, e))
|
2014-11-28 23:03:04 +00:00
|
|
|
continue
|
|
|
|
finally:
|
|
|
|
sys.path.remove(os.path.dirname(testsuite))
|
|
|
|
|
2016-12-22 13:08:21 +00:00
|
|
|
return (None, exceptions)
|
|
|
|
|
|
|
|
def _load_testsuites(self):
|
2020-02-17 13:32:48 +00:00
|
|
|
testsuites = {}
|
2016-12-22 13:08:21 +00:00
|
|
|
for testsuite in self.options.testsuites:
|
2019-03-13 22:08:25 +00:00
|
|
|
if testsuite.endswith('.py') and os.path.exists(testsuite):
|
2017-06-16 21:31:19 +00:00
|
|
|
testsuite = os.path.abspath(os.path.expanduser(testsuite))
|
2016-12-22 13:08:21 +00:00
|
|
|
loaded_module = self._load_testsuite([testsuite])
|
|
|
|
else:
|
|
|
|
possible_testsuites_paths = [os.path.join(d, testsuite + ".py")
|
2018-03-23 21:02:43 +00:00
|
|
|
for d in self.options.testsuites_dirs]
|
2016-12-22 13:08:21 +00:00
|
|
|
loaded_module = self._load_testsuite(possible_testsuites_paths)
|
|
|
|
|
|
|
|
module = loaded_module[0]
|
|
|
|
if not loaded_module[0]:
|
2017-07-13 20:43:32 +00:00
|
|
|
if "." in testsuite:
|
|
|
|
self.options.testsuites.append(testsuite.split('.')[0])
|
2018-03-23 21:02:43 +00:00
|
|
|
self.info("%s looks like a test name, trying that" %
|
|
|
|
testsuite)
|
2017-07-13 20:43:32 +00:00
|
|
|
self.options.wanted_tests.append(testsuite)
|
|
|
|
else:
|
2020-05-26 19:55:55 +00:00
|
|
|
if testsuite in testsuites:
|
|
|
|
self.info('Testuite %s was loaded previously', testsuite)
|
|
|
|
continue
|
2017-07-13 20:43:32 +00:00
|
|
|
printc("Could not load testsuite: %s, reasons: %s" % (
|
|
|
|
testsuite, loaded_module[1]), Colors.FAIL)
|
2016-12-22 13:08:21 +00:00
|
|
|
continue
|
|
|
|
|
2020-02-17 13:32:48 +00:00
|
|
|
if module.__name__ in testsuites:
|
|
|
|
self.info("Trying to load testsuite '%s' a second time?", module.__name__)
|
|
|
|
continue
|
|
|
|
|
|
|
|
testsuites[module.__name__] = module
|
2014-11-28 23:03:04 +00:00
|
|
|
if not hasattr(module, "TEST_MANAGER"):
|
|
|
|
module.TEST_MANAGER = [tester.name for tester in self.testers]
|
|
|
|
elif not isinstance(module.TEST_MANAGER, list):
|
|
|
|
module.TEST_MANAGER = [module.TEST_MANAGER]
|
|
|
|
|
2020-02-17 13:32:48 +00:00
|
|
|
self.options.testsuites = list(testsuites.values())
|
2014-11-28 23:03:04 +00:00
|
|
|
|
|
|
|
def _setup_testsuites(self):
|
|
|
|
for testsuite in self.options.testsuites:
|
|
|
|
loaded = False
|
|
|
|
wanted_test_manager = None
|
2018-10-28 17:27:22 +00:00
|
|
|
# TEST_MANAGER has been set in _load_testsuites()
|
|
|
|
assert hasattr(testsuite, "TEST_MANAGER")
|
|
|
|
wanted_test_manager = testsuite.TEST_MANAGER
|
|
|
|
if not isinstance(wanted_test_manager, list):
|
|
|
|
wanted_test_manager = [wanted_test_manager]
|
2014-11-28 23:03:04 +00:00
|
|
|
|
|
|
|
for tester in self.testers:
|
|
|
|
if wanted_test_manager is not None and \
|
|
|
|
tester.name not in wanted_test_manager:
|
|
|
|
continue
|
|
|
|
|
2019-01-26 01:27:07 +00:00
|
|
|
prev_testsuite_name = TestsManager.loading_testsuite
|
2016-03-23 19:02:47 +00:00
|
|
|
if self.options.user_paths:
|
2017-07-13 20:43:32 +00:00
|
|
|
TestsManager.loading_testsuite = tester.name
|
2015-07-17 07:45:35 +00:00
|
|
|
tester.register_defaults()
|
|
|
|
loaded = True
|
2017-07-13 20:43:32 +00:00
|
|
|
else:
|
|
|
|
TestsManager.loading_testsuite = testsuite.__name__
|
|
|
|
if testsuite.setup_tests(tester, self.options):
|
|
|
|
loaded = True
|
2019-01-26 01:27:07 +00:00
|
|
|
if prev_testsuite_name:
|
|
|
|
TestsManager.loading_testsuite = prev_testsuite_name
|
2014-11-28 23:03:04 +00:00
|
|
|
|
|
|
|
if not loaded:
|
|
|
|
printc("Could not load testsuite: %s"
|
|
|
|
" maybe because of missing TestManager"
|
|
|
|
% (testsuite), Colors.FAIL)
|
2016-09-05 15:16:59 +00:00
|
|
|
return False
|
2014-11-28 23:03:04 +00:00
|
|
|
|
2014-12-08 11:42:51 +00:00
|
|
|
def _load_config(self, options):
|
2014-11-28 23:03:04 +00:00
|
|
|
printc("Loading config files is DEPRECATED"
|
|
|
|
" you should use the new testsuite format now",)
|
|
|
|
|
|
|
|
for tester in self.testers:
|
|
|
|
tester.options = options
|
|
|
|
globals()[tester.name] = tester
|
|
|
|
globals()["options"] = options
|
|
|
|
c__file__ = __file__
|
|
|
|
globals()["__file__"] = self.options.config
|
2018-03-23 21:02:43 +00:00
|
|
|
exec(compile(open(self.options.config).read(),
|
|
|
|
self.options.config, 'exec'), globals())
|
2014-11-28 23:03:04 +00:00
|
|
|
globals()["__file__"] = c__file__
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def set_settings(self, options, args):
|
2016-09-06 19:21:05 +00:00
|
|
|
if options.xunit_file:
|
|
|
|
self.reporter = reporters.XunitReporter(options)
|
|
|
|
else:
|
|
|
|
self.reporter = reporters.Reporter(options)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-14 17:07:46 +00:00
|
|
|
self.options = options
|
2013-12-31 10:45:07 +00:00
|
|
|
wanted_testers = None
|
|
|
|
for tester in self.testers:
|
|
|
|
if tester.name in args:
|
|
|
|
wanted_testers = tester.name
|
2014-11-28 23:03:04 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
if wanted_testers:
|
|
|
|
testers = self.testers
|
|
|
|
self.testers = []
|
|
|
|
for tester in testers:
|
|
|
|
if tester.name in args:
|
|
|
|
self.testers.append(tester)
|
|
|
|
args.remove(tester.name)
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
if options.config:
|
2014-12-08 11:42:51 +00:00
|
|
|
self._load_config(options)
|
2014-11-28 23:03:04 +00:00
|
|
|
|
|
|
|
self._load_testsuites()
|
2017-07-18 16:09:13 +00:00
|
|
|
if not self.options.testsuites:
|
|
|
|
printc("Not testsuite loaded!", Colors.FAIL)
|
|
|
|
return False
|
2014-06-26 10:42:38 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
for tester in self.testers:
|
|
|
|
tester.set_settings(options, args, self.reporter)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
if not options.config and options.testsuites:
|
2016-09-05 15:16:59 +00:00
|
|
|
if self._setup_testsuites() is False:
|
|
|
|
return False
|
2016-11-16 13:47:21 +00:00
|
|
|
|
2019-03-17 00:37:16 +00:00
|
|
|
if self.options.check_bugs_status:
|
|
|
|
printc("-> Checking bugs resolution... ", end='')
|
|
|
|
|
2016-11-16 13:47:21 +00:00
|
|
|
for tester in self.testers:
|
2019-04-17 00:31:30 +00:00
|
|
|
if not tester.check_blacklists():
|
2016-11-16 13:47:21 +00:00
|
|
|
return False
|
|
|
|
|
2019-04-17 00:31:30 +00:00
|
|
|
tester.log_blacklists()
|
|
|
|
|
2019-03-18 19:52:11 +00:00
|
|
|
if not tester.check_expected_issues():
|
2016-11-24 13:29:53 +00:00
|
|
|
return False
|
|
|
|
|
2019-03-17 00:37:16 +00:00
|
|
|
if self.options.check_bugs_status:
|
|
|
|
printc("OK", Colors.OKGREEN)
|
|
|
|
|
2019-01-26 01:09:30 +00:00
|
|
|
if self.needs_http_server() or options.httponly is True:
|
|
|
|
self.httpsrv = HTTPServer(options)
|
|
|
|
self.httpsrv.start()
|
|
|
|
|
|
|
|
if options.no_display:
|
|
|
|
self.vfb_server = get_virual_frame_buffer_server(options)
|
2019-01-26 01:13:28 +00:00
|
|
|
res = self.vfb_server.start()
|
2019-01-26 01:09:30 +00:00
|
|
|
if res[0] is False:
|
|
|
|
printc("Could not start virtual frame server: %s" % res[1],
|
|
|
|
Colors.FAIL)
|
|
|
|
return False
|
2019-01-26 01:13:28 +00:00
|
|
|
os.environ["DISPLAY"] = self.vfb_server.display_id
|
2019-01-26 01:09:30 +00:00
|
|
|
|
2016-09-05 15:16:59 +00:00
|
|
|
return True
|
2014-11-28 23:03:04 +00:00
|
|
|
|
2015-03-14 15:40:17 +00:00
|
|
|
def _check_tester_has_other_testsuite(self, testsuite, tester):
|
2014-11-29 12:43:06 +00:00
|
|
|
if tester.name != testsuite.TEST_MANAGER[0]:
|
|
|
|
return True
|
|
|
|
|
|
|
|
for t in self.options.testsuites:
|
|
|
|
if t != testsuite:
|
2016-03-23 18:34:10 +00:00
|
|
|
for other_testmanager in t.TEST_MANAGER:
|
2014-11-29 12:43:06 +00:00
|
|
|
if other_testmanager == tester.name:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
def _check_defined_tests(self, tester, tests):
|
2015-03-14 15:08:12 +00:00
|
|
|
if self.options.blacklisted_tests or self.options.wanted_tests:
|
2014-11-29 12:43:06 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
tests_names = [test.classname for test in tests]
|
2016-02-23 10:29:56 +00:00
|
|
|
testlist_changed = False
|
2014-11-29 12:43:06 +00:00
|
|
|
for testsuite in self.options.testsuites:
|
2015-03-14 15:40:17 +00:00
|
|
|
if not self._check_tester_has_other_testsuite(testsuite, tester) \
|
2015-03-14 15:08:12 +00:00
|
|
|
and tester.check_testslist:
|
2014-11-29 12:43:06 +00:00
|
|
|
try:
|
|
|
|
testlist_file = open(os.path.splitext(testsuite.__file__)[0] + ".testslist",
|
2016-11-04 21:04:37 +00:00
|
|
|
'r+')
|
2014-11-29 12:43:06 +00:00
|
|
|
|
|
|
|
know_tests = testlist_file.read().split("\n")
|
|
|
|
testlist_file.close()
|
|
|
|
|
|
|
|
testlist_file = open(os.path.splitext(testsuite.__file__)[0] + ".testslist",
|
2015-01-20 08:59:23 +00:00
|
|
|
'w')
|
2014-11-29 12:43:06 +00:00
|
|
|
except IOError:
|
2014-12-05 11:16:36 +00:00
|
|
|
continue
|
2014-11-29 12:43:06 +00:00
|
|
|
|
2017-06-06 20:23:48 +00:00
|
|
|
optional_out = []
|
2014-11-29 12:43:06 +00:00
|
|
|
for test in know_tests:
|
2014-12-05 11:16:36 +00:00
|
|
|
if test and test.strip('~') not in tests_names:
|
|
|
|
if not test.startswith('~'):
|
|
|
|
testlist_changed = True
|
|
|
|
printc("Test %s Not in testsuite %s anymore"
|
2018-03-23 21:02:43 +00:00
|
|
|
% (test, testsuite.__file__), Colors.FAIL)
|
2017-06-06 20:23:48 +00:00
|
|
|
else:
|
|
|
|
optional_out.append((test, None))
|
2014-12-05 11:16:36 +00:00
|
|
|
|
2017-06-06 20:23:48 +00:00
|
|
|
tests_names = sorted([(test.classname, test) for test in tests] + optional_out,
|
|
|
|
key=lambda x: x[0].strip('~'))
|
|
|
|
|
|
|
|
for tname, test in tests_names:
|
|
|
|
if test and test.optional:
|
|
|
|
tname = '~' + tname
|
|
|
|
testlist_file.write("%s\n" % (tname))
|
|
|
|
if tname and tname not in know_tests:
|
2014-11-29 12:43:06 +00:00
|
|
|
printc("Test %s is NEW in testsuite %s"
|
2018-05-15 18:40:45 +00:00
|
|
|
% (tname, testsuite.__file__),
|
|
|
|
Colors.FAIL if self.options.fail_on_testlist_change else Colors.OKGREEN)
|
2016-02-23 10:29:56 +00:00
|
|
|
testlist_changed = True
|
2014-11-29 12:43:06 +00:00
|
|
|
|
|
|
|
testlist_file.close()
|
2016-02-23 10:29:56 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
return testlist_changed
|
2014-11-29 12:43:06 +00:00
|
|
|
|
2020-01-12 04:00:06 +00:00
|
|
|
def _split_tests(self, num_groups):
|
|
|
|
groups = [[] for x in range(num_groups)]
|
|
|
|
group = cycle(groups)
|
|
|
|
for test in self.tests:
|
|
|
|
next(group).append(test)
|
|
|
|
return groups
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def list_tests(self):
|
|
|
|
for tester in self.testers:
|
2017-01-03 17:52:38 +00:00
|
|
|
if not self._tester_needed(tester):
|
|
|
|
continue
|
|
|
|
|
2014-11-29 12:43:06 +00:00
|
|
|
tests = tester.list_tests()
|
2016-02-23 10:29:56 +00:00
|
|
|
if self._check_defined_tests(tester, tests) and \
|
|
|
|
self.options.fail_on_testlist_change:
|
2018-05-15 18:40:45 +00:00
|
|
|
raise RuntimeError("Unexpected new test in testsuite.")
|
2016-02-23 10:29:56 +00:00
|
|
|
|
2014-11-29 12:43:06 +00:00
|
|
|
self.tests.extend(tests)
|
2019-02-09 20:25:03 +00:00
|
|
|
self.tests.sort(key=lambda test: test.classname)
|
2020-01-12 04:00:06 +00:00
|
|
|
|
|
|
|
if self.options.num_parts < 1:
|
|
|
|
raise RuntimeError("Tests must be split in positive number of parts.")
|
|
|
|
if self.options.num_parts > len(self.tests):
|
|
|
|
raise RuntimeError("Cannot have more parts then there exist tests.")
|
|
|
|
if self.options.part_index < 1 or self.options.part_index > self.options.num_parts:
|
|
|
|
raise RuntimeError("Part index is out of range")
|
|
|
|
|
|
|
|
self.tests = self._split_tests(self.options.num_parts)[self.options.part_index - 1]
|
2019-02-09 20:25:03 +00:00
|
|
|
return self.tests
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-12-22 13:08:23 +00:00
|
|
|
def _tester_needed(self, tester):
|
|
|
|
for testsuite in self.options.testsuites:
|
|
|
|
if tester.name in testsuite.TEST_MANAGER:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2017-12-03 09:42:49 +00:00
|
|
|
def server_wrapper(self, ready):
|
2018-03-23 20:44:06 +00:00
|
|
|
self.server = GstValidateTCPServer(
|
|
|
|
('localhost', 0), GstValidateListener)
|
2017-12-03 09:42:49 +00:00
|
|
|
self.server.socket.settimeout(None)
|
|
|
|
self.server.launcher = self
|
|
|
|
self.serverport = self.server.socket.getsockname()[1]
|
|
|
|
self.info("%s server port: %s" % (self, self.serverport))
|
|
|
|
ready.set()
|
|
|
|
|
|
|
|
self.server.serve_forever(poll_interval=0.05)
|
|
|
|
|
|
|
|
def _start_server(self):
|
|
|
|
self.info("Starting TCP Server")
|
|
|
|
ready = threading.Event()
|
|
|
|
self.server_thread = threading.Thread(target=self.server_wrapper,
|
|
|
|
kwargs={'ready': ready})
|
|
|
|
self.server_thread.start()
|
|
|
|
ready.wait()
|
|
|
|
os.environ["GST_VALIDATE_SERVER"] = "tcp://localhost:%s" % self.serverport
|
|
|
|
|
|
|
|
def _stop_server(self):
|
|
|
|
if self.server:
|
|
|
|
self.server.shutdown()
|
|
|
|
self.server_thread.join()
|
|
|
|
self.server.server_close()
|
|
|
|
self.server = None
|
|
|
|
|
2017-12-02 12:36:27 +00:00
|
|
|
def test_wait(self):
|
|
|
|
while True:
|
|
|
|
# Check process every second for timeout
|
|
|
|
try:
|
|
|
|
self.queue.get(timeout=1)
|
|
|
|
except queue.Empty:
|
|
|
|
pass
|
|
|
|
|
|
|
|
for test in self.jobs:
|
|
|
|
if test.process_update():
|
|
|
|
self.jobs.remove(test)
|
|
|
|
return test
|
|
|
|
|
|
|
|
def tests_wait(self):
|
|
|
|
try:
|
|
|
|
test = self.test_wait()
|
|
|
|
test.check_results()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
for test in self.jobs:
|
|
|
|
test.kill_subprocess()
|
|
|
|
raise
|
|
|
|
|
|
|
|
return test
|
|
|
|
|
|
|
|
def start_new_job(self, tests_left):
|
|
|
|
try:
|
|
|
|
test = tests_left.pop(0)
|
|
|
|
except IndexError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
test.test_start(self.queue)
|
|
|
|
|
|
|
|
self.jobs.append(test)
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2021-10-08 15:09:47 +00:00
|
|
|
def print_result(self, current_test_num, test, total_num_tests, retry_on_failures=False):
|
2022-04-21 14:38:24 +00:00
|
|
|
if test.result not in [Result.PASSED, Result.KNOWN_ERROR] and (not retry_on_failures or test.max_retries):
|
2019-12-30 15:57:57 +00:00
|
|
|
printc(str(test), color=utils.get_color_for_result(test.result))
|
|
|
|
|
|
|
|
length = 80
|
2021-10-08 00:28:12 +00:00
|
|
|
progress = int(length * current_test_num // total_num_tests)
|
2019-12-30 15:57:57 +00:00
|
|
|
bar = '█' * progress + '-' * (length - progress)
|
|
|
|
if is_tty():
|
2021-10-08 00:28:12 +00:00
|
|
|
printc('\r|%s| [%s/%s]' % (bar, current_test_num, total_num_tests), end='\r')
|
2019-12-30 15:57:57 +00:00
|
|
|
else:
|
|
|
|
if progress > self.current_progress:
|
|
|
|
self.current_progress = progress
|
2021-10-08 00:28:12 +00:00
|
|
|
printc('|%s| [%s/%s]' % (bar, current_test_num, total_num_tests))
|
2019-12-30 15:57:57 +00:00
|
|
|
|
2021-10-08 00:28:12 +00:00
|
|
|
def _run_tests(self, running_tests=None, all_alone=False, retry_on_failures=False, total_num_tests=None):
|
2015-03-14 15:08:12 +00:00
|
|
|
if not self.all_tests:
|
2018-10-28 17:27:22 +00:00
|
|
|
self.all_tests = self.list_tests()
|
2019-03-27 15:36:16 +00:00
|
|
|
|
|
|
|
if not running_tests:
|
|
|
|
running_tests = self.tests
|
|
|
|
|
2015-01-16 17:25:56 +00:00
|
|
|
self.reporter.init_timer()
|
2017-12-02 12:36:27 +00:00
|
|
|
alone_tests = []
|
|
|
|
tests = []
|
2019-03-27 15:36:16 +00:00
|
|
|
for test in running_tests:
|
|
|
|
if test.is_parallel and not all_alone:
|
2017-12-02 12:36:27 +00:00
|
|
|
tests.append(test)
|
|
|
|
else:
|
|
|
|
alone_tests.append(test)
|
|
|
|
|
2021-01-30 16:01:54 +00:00
|
|
|
# use max to defend against the case where all tests are alone_tests
|
|
|
|
max_num_jobs = max(min(self.options.num_jobs, len(tests)), 1)
|
2017-12-02 12:36:27 +00:00
|
|
|
jobs_running = 0
|
|
|
|
|
2020-05-13 22:25:00 +00:00
|
|
|
if self.options.forever and len(tests) < self.options.num_jobs and len(tests):
|
|
|
|
max_num_jobs = self.options.num_jobs
|
|
|
|
copied = []
|
|
|
|
i = 0
|
|
|
|
while (len(tests) + len(copied)) < max_num_jobs:
|
|
|
|
copied.append(tests[i].copy(len(copied) + 1))
|
|
|
|
|
|
|
|
i += 1
|
|
|
|
if i >= len(tests):
|
|
|
|
i = 0
|
|
|
|
tests += copied
|
|
|
|
self.tests += copied
|
|
|
|
|
|
|
|
self.total_num_tests = len(self.all_tests)
|
2021-10-08 00:28:12 +00:00
|
|
|
prefix = "=> Re-r" if total_num_tests else "R"
|
|
|
|
total_num_tests = total_num_tests if total_num_tests else self.total_num_tests
|
|
|
|
printc(f"\n{prefix}unning {total_num_tests} tests...", color=Colors.HEADER)
|
2017-12-03 09:49:22 +00:00
|
|
|
# if order of test execution doesn't matter, shuffle
|
|
|
|
# the order to optimize cpu usage
|
|
|
|
if self.options.shuffle:
|
|
|
|
random.shuffle(tests)
|
|
|
|
random.shuffle(alone_tests)
|
|
|
|
|
2018-03-23 23:58:38 +00:00
|
|
|
current_test_num = 1
|
2019-03-27 15:36:16 +00:00
|
|
|
to_retry = []
|
2017-12-02 12:36:27 +00:00
|
|
|
for num_jobs, tests in [(max_num_jobs, tests), (1, alone_tests)]:
|
|
|
|
tests_left = list(tests)
|
|
|
|
for i in range(num_jobs):
|
|
|
|
if not self.start_new_job(tests_left):
|
|
|
|
break
|
|
|
|
jobs_running += 1
|
|
|
|
|
|
|
|
while jobs_running != 0:
|
|
|
|
test = self.tests_wait()
|
|
|
|
jobs_running -= 1
|
2018-03-23 23:58:38 +00:00
|
|
|
current_test_num += 1
|
2021-10-08 15:09:47 +00:00
|
|
|
res = test.test_end(retry_on_failures=retry_on_failures)
|
2019-03-27 15:36:16 +00:00
|
|
|
to_report = True
|
2020-06-15 14:54:20 +00:00
|
|
|
if res not in [Result.PASSED, Result.SKIPPED, Result.KNOWN_ERROR]:
|
2019-03-27 15:36:16 +00:00
|
|
|
if self.options.forever or self.options.fatal_error:
|
2021-10-08 15:09:47 +00:00
|
|
|
self.print_result(current_test_num - 1, test, retry_on_failures=retry_on_failures,
|
2021-10-08 00:28:12 +00:00
|
|
|
total_num_tests=total_num_tests)
|
2020-02-03 14:14:33 +00:00
|
|
|
self.reporter.after_test(test)
|
2019-03-27 15:36:16 +00:00
|
|
|
return False
|
|
|
|
|
2021-10-08 12:09:21 +00:00
|
|
|
if retry_on_failures or test.max_retries:
|
2021-10-08 15:09:47 +00:00
|
|
|
if not self.options.redirect_logs:
|
2019-03-27 15:36:16 +00:00
|
|
|
test.copy_logfiles()
|
|
|
|
to_retry.append(test)
|
|
|
|
|
|
|
|
# Not adding to final report if flakiness is tolerated
|
2021-10-08 12:09:21 +00:00
|
|
|
if test.max_retries:
|
|
|
|
test.max_retries -= 1
|
|
|
|
to_report = False
|
2021-10-08 00:28:12 +00:00
|
|
|
self.print_result(current_test_num - 1, test,
|
2021-10-08 15:09:47 +00:00
|
|
|
retry_on_failures=retry_on_failures,
|
2021-10-08 00:28:12 +00:00
|
|
|
total_num_tests=total_num_tests)
|
2019-03-27 15:36:16 +00:00
|
|
|
if to_report:
|
|
|
|
self.reporter.after_test(test)
|
2017-12-02 12:36:27 +00:00
|
|
|
if self.start_new_job(tests_left):
|
|
|
|
jobs_running += 1
|
2014-01-14 17:07:46 +00:00
|
|
|
|
2019-03-27 15:36:16 +00:00
|
|
|
if to_retry:
|
|
|
|
printc("--> Rerunning the following tests to see if they are flaky:", Colors.WARNING)
|
|
|
|
for test in to_retry:
|
2021-10-08 12:09:21 +00:00
|
|
|
test.clean()
|
|
|
|
printc(f' * {test.classname}')
|
2019-03-27 15:36:16 +00:00
|
|
|
printc('')
|
2021-10-08 00:28:12 +00:00
|
|
|
self.current_progress = -1
|
|
|
|
res = self._run_tests(
|
|
|
|
to_retry,
|
|
|
|
all_alone=True,
|
|
|
|
retry_on_failures=False,
|
|
|
|
total_num_tests=len(to_retry),
|
|
|
|
)
|
|
|
|
|
|
|
|
return res
|
2019-03-27 15:36:16 +00:00
|
|
|
|
2014-01-14 17:07:46 +00:00
|
|
|
return True
|
|
|
|
|
2019-04-12 16:33:25 +00:00
|
|
|
def clean_tests(self, stop_server=False):
|
2017-12-02 12:36:27 +00:00
|
|
|
for test in self.tests:
|
|
|
|
test.clean()
|
2019-04-12 16:33:25 +00:00
|
|
|
if stop_server:
|
|
|
|
self._stop_server()
|
2014-02-19 12:07:03 +00:00
|
|
|
|
2014-01-14 17:07:46 +00:00
|
|
|
def run_tests(self):
|
2019-04-11 14:31:07 +00:00
|
|
|
r = 0
|
2019-01-26 01:09:30 +00:00
|
|
|
try:
|
|
|
|
self._start_server()
|
|
|
|
if self.options.forever:
|
|
|
|
r = 1
|
|
|
|
while True:
|
2021-10-08 00:28:12 +00:00
|
|
|
self.current_progress = -1
|
2019-03-19 15:15:35 +00:00
|
|
|
printc("-> Iteration %d" % r, end='\r')
|
2016-12-22 13:08:26 +00:00
|
|
|
|
2019-01-26 01:09:30 +00:00
|
|
|
if not self._run_tests():
|
|
|
|
break
|
|
|
|
r += 1
|
|
|
|
self.clean_tests()
|
2019-03-19 15:15:35 +00:00
|
|
|
msg = "-> Iteration %d... %sOK%s" % (r, Colors.OKGREEN, Colors.ENDC)
|
|
|
|
printc(msg, end="\r")
|
2014-01-14 17:07:46 +00:00
|
|
|
|
2019-01-26 01:09:30 +00:00
|
|
|
return False
|
|
|
|
elif self.options.n_runs:
|
|
|
|
res = True
|
|
|
|
for r in range(self.options.n_runs):
|
2021-10-08 00:28:12 +00:00
|
|
|
self.current_progress = -1
|
2019-03-19 15:15:35 +00:00
|
|
|
printc("-> Iteration %d" % r, end='\r')
|
2021-10-07 22:42:10 +00:00
|
|
|
if not self._run_tests(retry_on_failures=self.options.retry_on_failures):
|
2019-01-26 01:09:30 +00:00
|
|
|
res = False
|
2019-03-19 15:15:35 +00:00
|
|
|
printc("ERROR", Colors.FAIL, end="\r")
|
2019-03-17 17:39:38 +00:00
|
|
|
else:
|
2019-03-19 15:15:35 +00:00
|
|
|
printc("OK", Colors.OKGREEN, end="\r")
|
2019-01-26 01:09:30 +00:00
|
|
|
self.clean_tests()
|
|
|
|
|
|
|
|
return res
|
|
|
|
else:
|
2019-03-27 15:36:16 +00:00
|
|
|
return self._run_tests(retry_on_failures=self.options.retry_on_failures)
|
2019-01-26 01:09:30 +00:00
|
|
|
finally:
|
2019-04-11 14:31:07 +00:00
|
|
|
if self.options.forever:
|
|
|
|
printc("\n-> Ran %d times" % r)
|
2019-01-26 01:09:30 +00:00
|
|
|
if self.httpsrv:
|
|
|
|
self.httpsrv.stop()
|
|
|
|
if self.vfb_server:
|
2019-01-26 01:13:28 +00:00
|
|
|
self.vfb_server.stop()
|
2019-04-12 16:33:25 +00:00
|
|
|
self.clean_tests(True)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def final_report(self):
|
2017-08-18 14:37:28 +00:00
|
|
|
return self.reporter.final_report()
|
2014-01-13 16:31:57 +00:00
|
|
|
|
|
|
|
def needs_http_server(self):
|
|
|
|
for tester in self.testers:
|
|
|
|
if tester.needs_http_server():
|
|
|
|
return True
|
2014-01-24 10:41:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
class NamedDic(object):
|
|
|
|
|
|
|
|
def __init__(self, props):
|
|
|
|
if props:
|
2016-11-04 21:04:37 +00:00
|
|
|
for name, value in props.items():
|
2014-01-24 10:41:25 +00:00
|
|
|
setattr(self, name, value)
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-01-30 15:56:51 +00:00
|
|
|
class Scenario(object):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
def __init__(self, name, props, path=None):
|
2014-01-24 10:41:25 +00:00
|
|
|
self.name = name
|
2014-04-30 09:06:09 +00:00
|
|
|
self.path = path
|
2014-02-12 10:18:14 +00:00
|
|
|
|
|
|
|
for prop, value in props:
|
2014-04-25 11:17:39 +00:00
|
|
|
setattr(self, prop.replace("-", "_"), value)
|
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
def get_execution_name(self):
|
|
|
|
if self.path is not None:
|
|
|
|
return self.path
|
|
|
|
else:
|
|
|
|
return self.name
|
|
|
|
|
2014-04-25 11:17:39 +00:00
|
|
|
def seeks(self):
|
|
|
|
if hasattr(self, "seek"):
|
|
|
|
return bool(self.seek)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-07-23 12:43:29 +00:00
|
|
|
def needs_clock_sync(self):
|
|
|
|
if hasattr(self, "need_clock_sync"):
|
|
|
|
return bool(self.need_clock_sync)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2017-05-25 11:55:52 +00:00
|
|
|
def needs_live_content(self):
|
|
|
|
# Scenarios that can only be used on live content
|
|
|
|
if hasattr(self, "live_content_required"):
|
|
|
|
return bool(self.live_content_required)
|
|
|
|
return False
|
|
|
|
|
|
|
|
def compatible_with_live_content(self):
|
2020-05-03 05:20:19 +00:00
|
|
|
# if a live content is required it's implicitly compatible with
|
2017-05-25 11:55:52 +00:00
|
|
|
# live content
|
|
|
|
if self.needs_live_content():
|
|
|
|
return True
|
|
|
|
if hasattr(self, "live_content_compatible"):
|
|
|
|
return bool(self.live_content_compatible)
|
|
|
|
return False
|
|
|
|
|
2014-07-23 18:39:05 +00:00
|
|
|
def get_min_media_duration(self):
|
|
|
|
if hasattr(self, "min_media_duration"):
|
2015-09-30 16:13:28 +00:00
|
|
|
return float(self.min_media_duration)
|
2014-07-23 18:39:05 +00:00
|
|
|
|
|
|
|
return 0
|
|
|
|
|
2014-04-25 11:17:39 +00:00
|
|
|
def does_reverse_playback(self):
|
|
|
|
if hasattr(self, "reverse_playback"):
|
2017-05-10 11:12:18 +00:00
|
|
|
return bool(self.reverse_playback)
|
2014-04-25 11:17:39 +00:00
|
|
|
|
2014-04-29 17:04:46 +00:00
|
|
|
return False
|
|
|
|
|
2014-05-07 09:30:09 +00:00
|
|
|
def get_duration(self):
|
|
|
|
try:
|
|
|
|
return float(getattr(self, "duration"))
|
|
|
|
except AttributeError:
|
|
|
|
return 0
|
2014-04-25 11:17:39 +00:00
|
|
|
|
2014-04-29 17:04:46 +00:00
|
|
|
def get_min_tracks(self, track_type):
|
|
|
|
try:
|
|
|
|
return int(getattr(self, "min_%s_track" % track_type))
|
|
|
|
except AttributeError:
|
|
|
|
return 0
|
2014-04-25 11:17:39 +00:00
|
|
|
|
2017-05-25 13:50:23 +00:00
|
|
|
def __repr__(self):
|
|
|
|
return "<Scenario %s>" % self.name
|
2014-02-12 10:18:14 +00:00
|
|
|
|
2014-12-05 11:16:36 +00:00
|
|
|
|
2014-03-28 14:00:01 +00:00
|
|
|
class ScenarioManager(Loggable):
|
2014-02-12 10:18:14 +00:00
|
|
|
_instance = None
|
2020-01-07 22:29:05 +00:00
|
|
|
system_scenarios = []
|
|
|
|
special_scenarios = {}
|
2014-04-30 09:06:09 +00:00
|
|
|
|
2015-02-19 10:32:05 +00:00
|
|
|
FILE_EXTENSION = "scenario"
|
2014-02-12 10:18:14 +00:00
|
|
|
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
if not cls._instance:
|
|
|
|
cls._instance = super(ScenarioManager, cls).__new__(
|
2014-10-24 12:23:52 +00:00
|
|
|
cls, *args, **kwargs)
|
2014-02-12 10:18:14 +00:00
|
|
|
cls._instance.config = None
|
2014-03-28 14:01:12 +00:00
|
|
|
cls._instance.discovered = False
|
|
|
|
Loggable.__init__(cls._instance)
|
2014-02-12 10:18:14 +00:00
|
|
|
|
2014-03-28 14:01:12 +00:00
|
|
|
return cls._instance
|
2014-04-25 11:19:19 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
def find_special_scenarios(self, mfile):
|
|
|
|
scenarios = []
|
|
|
|
mfile_bname = os.path.basename(mfile)
|
2015-02-04 14:27:37 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
for f in os.listdir(os.path.dirname(mfile)):
|
2015-07-21 23:45:26 +00:00
|
|
|
if re.findall("%s\..*\.%s$" % (re.escape(mfile_bname), self.FILE_EXTENSION), f):
|
2014-04-30 09:06:09 +00:00
|
|
|
scenarios.append(os.path.join(os.path.dirname(mfile), f))
|
|
|
|
|
|
|
|
if scenarios:
|
|
|
|
scenarios = self.discover_scenarios(scenarios, mfile)
|
|
|
|
|
|
|
|
return scenarios
|
|
|
|
|
|
|
|
def discover_scenarios(self, scenario_paths=[], mfile=None):
|
|
|
|
"""
|
|
|
|
Discover scenarios specified in scenario_paths or the default ones
|
|
|
|
if nothing specified there
|
|
|
|
"""
|
|
|
|
scenarios = []
|
2014-02-12 10:18:14 +00:00
|
|
|
scenario_defs = os.path.join(self.config.main_dir, "scenarios.def")
|
2020-01-16 20:36:54 +00:00
|
|
|
log_path = os.path.join(self.config.logsdir, "scenarios_discovery.log")
|
|
|
|
logs = open(log_path, 'w')
|
2014-08-10 10:04:31 +00:00
|
|
|
|
2014-02-12 10:18:14 +00:00
|
|
|
try:
|
2018-05-03 09:27:31 +00:00
|
|
|
command = [GstValidateBaseTestManager.COMMAND,
|
2014-10-24 12:23:52 +00:00
|
|
|
"--scenarios-defs-output-file", scenario_defs]
|
2014-04-30 09:06:09 +00:00
|
|
|
command.extend(scenario_paths)
|
2014-05-07 10:21:30 +00:00
|
|
|
subprocess.check_call(command, stdout=logs, stderr=logs)
|
2018-07-20 02:00:17 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
self.error(e)
|
2020-01-16 20:36:54 +00:00
|
|
|
self.error('See %s' % log_path)
|
2014-02-12 10:18:14 +00:00
|
|
|
pass
|
|
|
|
|
2016-11-04 21:04:37 +00:00
|
|
|
config = configparser.RawConfigParser()
|
2014-02-12 10:18:14 +00:00
|
|
|
f = open(scenario_defs)
|
|
|
|
config.readfp(f)
|
|
|
|
|
|
|
|
for section in config.sections():
|
2020-01-07 22:29:05 +00:00
|
|
|
name = None
|
2014-04-30 09:06:09 +00:00
|
|
|
if scenario_paths:
|
|
|
|
for scenario_path in scenario_paths:
|
2020-01-07 22:29:05 +00:00
|
|
|
if section == scenario_path:
|
2018-09-08 14:12:32 +00:00
|
|
|
if mfile is None:
|
2020-01-07 22:29:05 +00:00
|
|
|
name = os.path.basename(section).replace("." + self.FILE_EXTENSION, "")
|
2018-09-08 14:12:32 +00:00
|
|
|
path = scenario_path
|
|
|
|
else:
|
|
|
|
# The real name of the scenario is:
|
|
|
|
# filename.REALNAME.scenario
|
|
|
|
name = scenario_path.replace(mfile + ".", "").replace(
|
|
|
|
"." + self.FILE_EXTENSION, "")
|
|
|
|
path = scenario_path
|
2020-01-07 22:29:05 +00:00
|
|
|
break
|
2014-04-30 09:06:09 +00:00
|
|
|
else:
|
2020-01-07 22:29:05 +00:00
|
|
|
name = os.path.basename(section).replace("." + self.FILE_EXTENSION, "")
|
2014-04-30 09:06:09 +00:00
|
|
|
path = None
|
|
|
|
|
2020-01-07 22:29:05 +00:00
|
|
|
assert name
|
|
|
|
|
2016-11-04 21:04:37 +00:00
|
|
|
props = config.items(section)
|
2020-01-07 22:29:05 +00:00
|
|
|
scenario = Scenario(name, props, path)
|
|
|
|
if scenario_paths:
|
|
|
|
self.special_scenarios[path] = scenario
|
|
|
|
scenarios.append(scenario)
|
2014-04-30 09:06:09 +00:00
|
|
|
|
|
|
|
if not scenario_paths:
|
|
|
|
self.discovered = True
|
2020-01-07 22:29:05 +00:00
|
|
|
self.system_scenarios.extend(scenarios)
|
2014-02-12 10:18:14 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
return scenarios
|
2014-03-28 14:01:12 +00:00
|
|
|
|
|
|
|
def get_scenario(self, name):
|
2015-02-19 10:32:05 +00:00
|
|
|
if name is not None and os.path.isabs(name) and name.endswith(self.FILE_EXTENSION):
|
2020-01-07 22:29:05 +00:00
|
|
|
scenario = self.special_scenarios.get(name)
|
|
|
|
if scenario:
|
|
|
|
return scenario
|
|
|
|
|
2015-02-04 14:27:37 +00:00
|
|
|
scenarios = self.discover_scenarios([name])
|
2020-01-07 22:29:05 +00:00
|
|
|
self.special_scenarios[name] = scenarios
|
2015-02-04 14:27:37 +00:00
|
|
|
|
|
|
|
if scenarios:
|
|
|
|
return scenarios[0]
|
|
|
|
|
2014-03-28 14:01:12 +00:00
|
|
|
if self.discovered is False:
|
2014-04-30 09:06:09 +00:00
|
|
|
self.discover_scenarios()
|
2014-03-28 14:01:12 +00:00
|
|
|
|
2014-04-25 09:32:04 +00:00
|
|
|
if name is None:
|
2020-01-07 22:29:05 +00:00
|
|
|
return self.system_scenarios
|
2014-04-25 09:32:04 +00:00
|
|
|
|
2014-03-28 14:01:12 +00:00
|
|
|
try:
|
2020-01-07 22:29:05 +00:00
|
|
|
return [scenario for scenario in self.system_scenarios if scenario.name == name][0]
|
2014-03-28 14:01:12 +00:00
|
|
|
except IndexError:
|
|
|
|
self.warning("Scenario: %s not found" % name)
|
|
|
|
return None
|
2014-06-26 10:42:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
class GstValidateBaseTestManager(TestsManager):
|
|
|
|
scenarios_manager = ScenarioManager()
|
2018-05-13 20:30:25 +00:00
|
|
|
features_cache = {}
|
2014-06-26 10:42:38 +00:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super(GstValidateBaseTestManager, self).__init__()
|
|
|
|
self._scenarios = []
|
|
|
|
self._encoding_formats = []
|
|
|
|
|
2018-05-03 09:27:31 +00:00
|
|
|
@classmethod
|
|
|
|
def update_commands(cls, extra_paths=None):
|
|
|
|
for varname, cmd in {'': 'gst-validate',
|
|
|
|
'TRANSCODING_': 'gst-validate-transcoding',
|
|
|
|
'MEDIA_CHECK_': 'gst-validate-media-check',
|
2018-05-13 20:30:25 +00:00
|
|
|
'RTSP_SERVER_': 'gst-validate-rtsp-server',
|
|
|
|
'INSPECT_': 'gst-inspect'}.items():
|
2018-05-03 09:27:31 +00:00
|
|
|
setattr(cls, varname + 'COMMAND', which(cmd + '-1.0', extra_paths))
|
|
|
|
|
2018-05-13 20:30:25 +00:00
|
|
|
@classmethod
|
|
|
|
def has_feature(cls, featurename):
|
|
|
|
try:
|
|
|
|
return cls.features_cache[featurename]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
subprocess.check_output([cls.INSPECT_COMMAND, featurename])
|
|
|
|
res = True
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
res = False
|
|
|
|
|
|
|
|
cls.features_cache[featurename] = res
|
|
|
|
return res
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def add_scenarios(self, scenarios):
|
|
|
|
"""
|
2014-10-13 08:32:07 +00:00
|
|
|
@scenarios A list or a unic scenario name(s) to be run on the tests.
|
2014-06-26 10:42:38 +00:00
|
|
|
They are just the default scenarios, and then depending on
|
|
|
|
the TestsGenerator to be used you can have more fine grained
|
2020-05-03 05:20:19 +00:00
|
|
|
control on what to be run on each series of tests.
|
2014-06-26 10:42:38 +00:00
|
|
|
"""
|
|
|
|
if isinstance(scenarios, list):
|
|
|
|
self._scenarios.extend(scenarios)
|
|
|
|
else:
|
|
|
|
self._scenarios.append(scenarios)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
self._scenarios = list(set(self._scenarios))
|
|
|
|
|
2017-04-11 05:48:21 +00:00
|
|
|
def set_scenarios(self, scenarios):
|
|
|
|
"""
|
|
|
|
Override the scenarios
|
|
|
|
"""
|
|
|
|
self._scenarios = []
|
|
|
|
self.add_scenarios(scenarios)
|
2014-12-05 11:16:36 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def get_scenarios(self):
|
|
|
|
return self._scenarios
|
|
|
|
|
|
|
|
def add_encoding_formats(self, encoding_formats):
|
|
|
|
"""
|
2014-10-13 08:32:07 +00:00
|
|
|
:param encoding_formats: A list or one single #MediaFormatCombinations describing wanted output
|
2014-06-26 10:42:38 +00:00
|
|
|
formats for transcoding test.
|
|
|
|
They are just the default encoding formats, and then depending on
|
|
|
|
the TestsGenerator to be used you can have more fine grained
|
2020-05-03 05:20:19 +00:00
|
|
|
control on what to be run on each series of tests.
|
2014-06-26 10:42:38 +00:00
|
|
|
"""
|
|
|
|
if isinstance(encoding_formats, list):
|
|
|
|
self._encoding_formats.extend(encoding_formats)
|
|
|
|
else:
|
|
|
|
self._encoding_formats.append(encoding_formats)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
self._encoding_formats = list(set(self._encoding_formats))
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def get_encoding_formats(self):
|
|
|
|
return self._encoding_formats
|
2014-07-16 08:10:44 +00:00
|
|
|
|
|
|
|
|
2018-05-03 09:27:31 +00:00
|
|
|
GstValidateBaseTestManager.update_commands()
|
|
|
|
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
class MediaDescriptor(Loggable):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def __init__(self):
|
|
|
|
Loggable.__init__(self)
|
|
|
|
|
2014-07-16 10:50:41 +00:00
|
|
|
def get_path(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2018-05-23 15:57:23 +00:00
|
|
|
def has_frames(self):
|
|
|
|
return False
|
|
|
|
|
2020-03-21 14:57:51 +00:00
|
|
|
def get_framerate(self):
|
|
|
|
for ttype, caps_str in self.get_tracks_caps():
|
|
|
|
if ttype != "video":
|
|
|
|
continue
|
|
|
|
|
|
|
|
caps = utils.GstCaps.new_from_str(caps_str)
|
|
|
|
if not caps:
|
|
|
|
self.warning("Could not create caps for %s" % caps_str)
|
|
|
|
continue
|
|
|
|
|
|
|
|
framerate = caps[0].get("framerate")
|
|
|
|
if framerate:
|
|
|
|
return framerate
|
|
|
|
|
|
|
|
return Fraction(0, 1)
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def get_media_filepath(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2018-05-25 13:35:10 +00:00
|
|
|
def skip_parsers(self):
|
|
|
|
return False
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def get_caps(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_uri(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_duration(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_protocol(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def is_seekable(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2017-05-25 11:55:52 +00:00
|
|
|
def is_live(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def is_image(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_num_tracks(self, track_type):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2020-03-21 14:57:51 +00:00
|
|
|
def get_tracks_caps(self):
|
|
|
|
return []
|
|
|
|
|
2014-12-13 15:01:49 +00:00
|
|
|
def can_play_reverse(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2017-05-10 11:12:18 +00:00
|
|
|
def prerrols(self):
|
|
|
|
return True
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def is_compatible(self, scenario):
|
2014-12-13 15:01:49 +00:00
|
|
|
if scenario is None:
|
|
|
|
return True
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
if scenario.seeks() and (not self.is_seekable() or self.is_image()):
|
|
|
|
self.debug("Do not run %s as %s does not support seeking",
|
|
|
|
scenario, self.get_uri())
|
|
|
|
return False
|
|
|
|
|
2014-07-23 12:43:29 +00:00
|
|
|
if self.is_image() and scenario.needs_clock_sync():
|
|
|
|
self.debug("Do not run %s as %s is an image",
|
|
|
|
scenario, self.get_uri())
|
|
|
|
return False
|
|
|
|
|
2014-12-13 15:01:49 +00:00
|
|
|
if not self.can_play_reverse() and scenario.does_reverse_playback():
|
|
|
|
return False
|
|
|
|
|
2017-05-25 11:55:52 +00:00
|
|
|
if not self.is_live() and scenario.needs_live_content():
|
|
|
|
self.debug("Do not run %s as %s is not a live content",
|
|
|
|
scenario, self.get_uri())
|
|
|
|
return False
|
|
|
|
|
|
|
|
if self.is_live() and not scenario.compatible_with_live_content():
|
2018-03-23 21:02:43 +00:00
|
|
|
self.debug("Do not run %s as %s is a live content",
|
|
|
|
scenario, self.get_uri())
|
2017-05-25 11:55:52 +00:00
|
|
|
return False
|
|
|
|
|
2017-05-10 11:12:18 +00:00
|
|
|
if not self.prerrols() and getattr(scenario, 'needs_preroll', False):
|
|
|
|
return False
|
|
|
|
|
2015-04-28 14:44:42 +00:00
|
|
|
if self.get_duration() and self.get_duration() / GST_SECOND < scenario.get_min_media_duration():
|
2014-10-24 12:23:52 +00:00
|
|
|
self.debug(
|
|
|
|
"Do not run %s as %s is too short (%i < min media duation : %i",
|
2014-10-24 12:38:00 +00:00
|
|
|
scenario, self.get_uri(),
|
|
|
|
self.get_duration() / GST_SECOND,
|
|
|
|
scenario.get_min_media_duration())
|
2014-07-23 18:39:05 +00:00
|
|
|
return False
|
|
|
|
|
2016-10-26 15:34:49 +00:00
|
|
|
for track_type in ['audio', 'subtitle', 'video']:
|
2014-07-16 09:36:29 +00:00
|
|
|
if self.get_num_tracks(track_type) < scenario.get_min_tracks(track_type):
|
|
|
|
self.debug("%s -- %s | At least %s %s track needed < %s"
|
|
|
|
% (scenario, self.get_uri(), track_type,
|
|
|
|
scenario.get_min_tracks(track_type),
|
|
|
|
self.get_num_tracks(track_type)))
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
class GstValidateMediaDescriptor(MediaDescriptor):
|
2014-07-16 08:10:44 +00:00
|
|
|
# Some extension file for discovering results
|
2020-01-07 18:46:21 +00:00
|
|
|
SKIPPED_MEDIA_INFO_EXT = "media_info.skipped"
|
2014-07-16 08:10:44 +00:00
|
|
|
MEDIA_INFO_EXT = "media_info"
|
2018-05-23 15:57:23 +00:00
|
|
|
PUSH_MEDIA_INFO_EXT = "media_info.push"
|
2014-07-16 08:10:44 +00:00
|
|
|
STREAM_INFO_EXT = "stream_info"
|
|
|
|
|
2020-01-08 12:54:15 +00:00
|
|
|
__all_descriptors = {}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, xml_path):
|
|
|
|
if xml_path in cls.__all_descriptors:
|
|
|
|
return cls.__all_descriptors[xml_path]
|
|
|
|
return GstValidateMediaDescriptor(xml_path)
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def __init__(self, xml_path):
|
2014-07-16 10:03:14 +00:00
|
|
|
super(GstValidateMediaDescriptor, self).__init__()
|
2014-07-16 09:36:29 +00:00
|
|
|
|
2020-02-11 19:01:07 +00:00
|
|
|
self._media_file_path = None
|
2020-01-08 12:54:15 +00:00
|
|
|
main_descriptor = self.__all_descriptors.get(xml_path)
|
|
|
|
if main_descriptor:
|
|
|
|
self._copy_data_from_main(main_descriptor)
|
|
|
|
else:
|
|
|
|
self.__all_descriptors[xml_path] = self
|
2014-07-16 08:10:44 +00:00
|
|
|
|
2020-01-08 12:54:15 +00:00
|
|
|
self._xml_path = xml_path
|
|
|
|
try:
|
|
|
|
media_xml = ET.parse(xml_path).getroot()
|
|
|
|
except xml.etree.ElementTree.ParseError:
|
|
|
|
printc("Could not parse %s" % xml_path,
|
|
|
|
Colors.FAIL)
|
|
|
|
raise
|
|
|
|
self._extract_data(media_xml)
|
2014-07-16 08:10:44 +00:00
|
|
|
|
2020-02-11 19:01:07 +00:00
|
|
|
self.set_protocol(urllib.parse.urlparse(self.get_uri()).scheme)
|
2015-05-13 13:30:23 +00:00
|
|
|
|
2018-05-25 13:35:10 +00:00
|
|
|
def skip_parsers(self):
|
|
|
|
return self._skip_parsers
|
|
|
|
|
2018-05-23 15:57:23 +00:00
|
|
|
def has_frames(self):
|
|
|
|
return self._has_frames
|
|
|
|
|
2020-01-08 12:54:15 +00:00
|
|
|
def _copy_data_from_main(self, main_descriptor):
|
|
|
|
for attr in main_descriptor.__dict__.keys():
|
|
|
|
setattr(self, attr, getattr(main_descriptor, attr))
|
|
|
|
|
2017-12-18 08:48:21 +00:00
|
|
|
def _extract_data(self, media_xml):
|
|
|
|
# Extract the information we need from the xml
|
|
|
|
self._caps = media_xml.findall("streams")[0].attrib["caps"]
|
|
|
|
self._track_caps = []
|
|
|
|
try:
|
|
|
|
streams = media_xml.findall("streams")[0].findall("stream")
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
for stream in streams:
|
2018-03-23 21:02:43 +00:00
|
|
|
self._track_caps.append(
|
|
|
|
(stream.attrib["type"], stream.attrib["caps"]))
|
2020-03-21 14:57:51 +00:00
|
|
|
|
2018-05-25 13:35:10 +00:00
|
|
|
self._skip_parsers = bool(int(media_xml.attrib.get('skip-parsers', 0)))
|
2018-05-23 15:57:23 +00:00
|
|
|
self._has_frames = bool(int(media_xml.attrib["frame-detection"]))
|
2017-12-18 08:48:21 +00:00
|
|
|
self._duration = int(media_xml.attrib["duration"])
|
2020-02-11 19:01:07 +00:00
|
|
|
self._uri = media_xml.attrib["uri"]
|
|
|
|
parsed_uri = urllib.parse.urlparse(self.get_uri())
|
|
|
|
self._protocol = media_xml.get("protocol", parsed_uri.scheme)
|
|
|
|
if parsed_uri.scheme == "file":
|
|
|
|
if not os.path.exists(parsed_uri.path) and os.path.exists(self.get_media_filepath()):
|
|
|
|
self._uri = "file://" + self.get_media_filepath()
|
|
|
|
elif parsed_uri.scheme == Protocols.IMAGESEQUENCE:
|
|
|
|
self._media_file_path = os.path.join(os.path.dirname(self.__cleanup_media_info_ext()), os.path.basename(parsed_uri.path))
|
|
|
|
self._uri = parsed_uri._replace(path=os.path.join(os.path.dirname(self.__cleanup_media_info_ext()), os.path.basename(self._media_file_path))).geturl()
|
2017-12-18 08:48:21 +00:00
|
|
|
self._is_seekable = media_xml.attrib["seekable"].lower() == "true"
|
|
|
|
self._is_live = media_xml.get("live", "false").lower() == "true"
|
|
|
|
self._is_image = False
|
|
|
|
for stream in media_xml.findall("streams")[0].findall("stream"):
|
|
|
|
if stream.attrib["type"] == "image":
|
|
|
|
self._is_image = True
|
|
|
|
self._track_types = []
|
|
|
|
for stream in media_xml.findall("streams")[0].findall("stream"):
|
|
|
|
self._track_types.append(stream.attrib["type"])
|
|
|
|
|
2020-02-11 19:01:07 +00:00
|
|
|
def __cleanup_media_info_ext(self):
|
|
|
|
for ext in [self.MEDIA_INFO_EXT, self.PUSH_MEDIA_INFO_EXT, self.STREAM_INFO_EXT,
|
2020-01-07 18:46:21 +00:00
|
|
|
self.SKIPPED_MEDIA_INFO_EXT, ]:
|
2020-02-11 19:01:07 +00:00
|
|
|
if self._xml_path.endswith(ext):
|
2020-05-03 05:20:19 +00:00
|
|
|
return self._xml_path[:len(self._xml_path) - (len(ext) + 1)]
|
2020-02-11 19:01:07 +00:00
|
|
|
|
2020-05-04 20:59:54 +00:00
|
|
|
assert "Not reached" == None # noqa
|
2020-02-11 19:01:07 +00:00
|
|
|
|
2014-07-16 10:50:41 +00:00
|
|
|
@staticmethod
|
2020-01-07 18:46:21 +00:00
|
|
|
def new_from_uri(uri, verbose=False, include_frames=False, is_push=False, is_skipped=False):
|
2016-12-22 13:08:30 +00:00
|
|
|
"""
|
|
|
|
include_frames = 0 # Never
|
|
|
|
include_frames = 1 # always
|
|
|
|
include_frames = 2 # if previous file included them
|
|
|
|
|
|
|
|
"""
|
2014-07-16 10:50:41 +00:00
|
|
|
media_path = utils.url2path(uri)
|
2016-12-22 13:08:30 +00:00
|
|
|
|
2020-01-07 18:46:21 +00:00
|
|
|
ext = GstValidateMediaDescriptor.MEDIA_INFO_EXT
|
|
|
|
if is_push:
|
|
|
|
ext = GstValidateMediaDescriptor.PUSH_MEDIA_INFO_EXT
|
|
|
|
elif is_skipped:
|
|
|
|
ext = GstValidateMediaDescriptor.SKIPPED_MEDIA_INFO_EXT
|
2018-05-23 15:57:23 +00:00
|
|
|
descriptor_path = "%s.%s" % (media_path, ext)
|
2018-05-25 13:35:10 +00:00
|
|
|
args = GstValidateBaseTestManager.MEDIA_CHECK_COMMAND.split(" ")
|
2016-12-22 13:08:30 +00:00
|
|
|
if include_frames == 2:
|
|
|
|
try:
|
|
|
|
media_xml = ET.parse(descriptor_path).getroot()
|
2020-02-11 19:01:07 +00:00
|
|
|
prev_uri = urllib.parse.urlparse(media_xml.attrib['uri'])
|
|
|
|
if prev_uri.scheme == Protocols.IMAGESEQUENCE:
|
|
|
|
parsed_uri = urllib.parse.urlparse(uri)
|
|
|
|
uri = prev_uri._replace(path=os.path.join(os.path.dirname(parsed_uri.path), os.path.basename(prev_uri.path))).geturl()
|
2018-05-25 13:35:10 +00:00
|
|
|
include_frames = bool(int(media_xml.attrib["frame-detection"]))
|
2019-03-18 14:09:10 +00:00
|
|
|
if bool(int(media_xml.attrib.get("skip-parsers", 0))):
|
2018-05-25 13:35:10 +00:00
|
|
|
args.append("--skip-parsers")
|
2016-12-22 13:08:30 +00:00
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
include_frames = bool(include_frames)
|
2020-02-11 19:01:07 +00:00
|
|
|
args.append(uri)
|
2016-12-22 13:08:30 +00:00
|
|
|
|
2014-07-16 10:50:41 +00:00
|
|
|
args.extend(["--output-file", descriptor_path])
|
2016-12-22 13:08:30 +00:00
|
|
|
if include_frames:
|
2014-09-15 17:14:27 +00:00
|
|
|
args.extend(["--full"])
|
2014-07-16 10:50:41 +00:00
|
|
|
|
|
|
|
if verbose:
|
|
|
|
printc("Generating media info for %s\n"
|
2014-10-24 12:23:52 +00:00
|
|
|
" Command: '%s'" % (media_path, ' '.join(args)),
|
2014-07-16 10:50:41 +00:00
|
|
|
Colors.OKBLUE)
|
|
|
|
|
|
|
|
try:
|
2014-10-24 12:38:00 +00:00
|
|
|
subprocess.check_output(args, stderr=open(os.devnull))
|
2014-07-16 10:50:41 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
2014-07-17 14:48:21 +00:00
|
|
|
if verbose:
|
2014-07-16 10:50:41 +00:00
|
|
|
printc("Result: Failed", Colors.FAIL)
|
|
|
|
else:
|
2018-03-23 21:02:43 +00:00
|
|
|
loggable.warning("GstValidateMediaDescriptor",
|
|
|
|
"Exception: %s" % e)
|
2014-07-16 10:50:41 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
if verbose:
|
|
|
|
printc("Result: Passed", Colors.OKGREEN)
|
|
|
|
|
2016-11-30 17:07:04 +00:00
|
|
|
try:
|
|
|
|
return GstValidateMediaDescriptor(descriptor_path)
|
2017-08-14 19:39:56 +00:00
|
|
|
except (IOError, xml.etree.ElementTree.ParseError):
|
2016-11-30 17:07:04 +00:00
|
|
|
return None
|
2014-07-16 10:50:41 +00:00
|
|
|
|
|
|
|
def get_path(self):
|
|
|
|
return self._xml_path
|
|
|
|
|
2014-11-20 10:55:45 +00:00
|
|
|
def need_clock_sync(self):
|
|
|
|
return Protocols.needs_clock_sync(self.get_protocol())
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def get_media_filepath(self):
|
2020-02-11 19:01:07 +00:00
|
|
|
if self._media_file_path is None:
|
|
|
|
self._media_file_path = self.__cleanup_media_info_ext()
|
|
|
|
return self._media_file_path
|
2014-07-16 08:10:44 +00:00
|
|
|
|
|
|
|
def get_caps(self):
|
2017-12-18 08:48:21 +00:00
|
|
|
return self._caps
|
2014-07-16 08:10:44 +00:00
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
def get_tracks_caps(self):
|
2017-12-18 08:48:21 +00:00
|
|
|
return self._track_caps
|
2014-07-16 11:54:54 +00:00
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def get_uri(self):
|
2017-12-18 08:48:21 +00:00
|
|
|
return self._uri
|
2014-07-16 08:10:44 +00:00
|
|
|
|
|
|
|
def get_duration(self):
|
2017-12-18 08:48:21 +00:00
|
|
|
return self._duration
|
2014-07-16 08:10:44 +00:00
|
|
|
|
|
|
|
def set_protocol(self, protocol):
|
2018-05-23 15:57:23 +00:00
|
|
|
if self._xml_path.endswith(GstValidateMediaDescriptor.PUSH_MEDIA_INFO_EXT):
|
|
|
|
self._protocol = Protocols.PUSHFILE
|
|
|
|
else:
|
|
|
|
self._protocol = protocol
|
2014-07-16 08:10:44 +00:00
|
|
|
|
|
|
|
def get_protocol(self):
|
2017-12-18 08:48:21 +00:00
|
|
|
return self._protocol
|
2014-07-16 08:10:44 +00:00
|
|
|
|
|
|
|
def is_seekable(self):
|
2017-12-18 08:48:21 +00:00
|
|
|
return self._is_seekable
|
2014-07-16 08:10:44 +00:00
|
|
|
|
2017-05-25 11:55:52 +00:00
|
|
|
def is_live(self):
|
2017-12-18 08:48:21 +00:00
|
|
|
return self._is_live
|
2017-05-25 11:55:52 +00:00
|
|
|
|
2014-12-13 15:01:49 +00:00
|
|
|
def can_play_reverse(self):
|
|
|
|
return True
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def is_image(self):
|
2017-12-18 08:48:21 +00:00
|
|
|
return self._is_image
|
2014-07-16 08:10:44 +00:00
|
|
|
|
|
|
|
def get_num_tracks(self, track_type):
|
|
|
|
n = 0
|
2017-12-18 08:48:21 +00:00
|
|
|
for t in self._track_types:
|
|
|
|
if t == track_type:
|
2014-07-16 08:10:44 +00:00
|
|
|
n += 1
|
|
|
|
|
|
|
|
return n
|
2014-07-16 10:16:03 +00:00
|
|
|
|
2014-09-12 08:47:18 +00:00
|
|
|
def get_clean_name(self):
|
|
|
|
name = os.path.basename(self.get_path())
|
2020-01-08 18:26:41 +00:00
|
|
|
regex = '|'.join(['\\.%s$' % ext for ext in [self.SKIPPED_MEDIA_INFO_EXT, self.MEDIA_INFO_EXT, self.PUSH_MEDIA_INFO_EXT, self.STREAM_INFO_EXT]])
|
|
|
|
name = re.sub(regex, "", name)
|
2014-09-12 08:47:18 +00:00
|
|
|
|
|
|
|
return name.replace('.', "_")
|
2014-07-16 10:16:03 +00:00
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-07-16 10:16:03 +00:00
|
|
|
class MediaFormatCombination(object):
|
2016-07-29 19:52:48 +00:00
|
|
|
FORMATS = {"aac": "audio/mpeg,mpegversion=4", # Audio
|
2014-07-16 10:16:03 +00:00
|
|
|
"ac3": "audio/x-ac3",
|
|
|
|
"vorbis": "audio/x-vorbis",
|
|
|
|
"mp3": "audio/mpeg,mpegversion=1,layer=3",
|
2016-07-15 12:56:02 +00:00
|
|
|
"opus": "audio/x-opus",
|
|
|
|
"rawaudio": "audio/x-raw",
|
|
|
|
|
|
|
|
# Video
|
2014-07-16 10:16:03 +00:00
|
|
|
"h264": "video/x-h264",
|
2016-07-15 12:56:02 +00:00
|
|
|
"h265": "video/x-h265",
|
2014-07-16 10:16:03 +00:00
|
|
|
"vp8": "video/x-vp8",
|
2016-07-15 12:56:02 +00:00
|
|
|
"vp9": "video/x-vp9",
|
2014-07-16 10:16:03 +00:00
|
|
|
"theora": "video/x-theora",
|
2016-07-15 12:56:02 +00:00
|
|
|
"prores": "video/x-prores",
|
2016-07-29 19:52:48 +00:00
|
|
|
"jpeg": "image/jpeg",
|
2016-07-15 12:56:02 +00:00
|
|
|
|
|
|
|
# Containers
|
|
|
|
"webm": "video/webm",
|
2014-07-16 10:16:03 +00:00
|
|
|
"ogg": "application/ogg",
|
|
|
|
"mkv": "video/x-matroska",
|
|
|
|
"mp4": "video/quicktime,variant=iso;",
|
2016-07-29 19:52:48 +00:00
|
|
|
"quicktime": "video/quicktime;"}
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def __str__(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return "%s and %s in %s" % (self.audio, self.video, self.container)
|
2014-07-16 10:16:03 +00:00
|
|
|
|
2020-03-19 21:48:08 +00:00
|
|
|
def __init__(self, container, audio, video, duration_factor=1,
|
|
|
|
video_restriction=None, audio_restriction=None):
|
2014-10-13 08:32:07 +00:00
|
|
|
"""
|
|
|
|
Describes a media format to be used for transcoding tests.
|
|
|
|
|
|
|
|
:param container: A string defining the container format to be used, must bin in self.FORMATS
|
|
|
|
:param audio: A string defining the audio format to be used, must bin in self.FORMATS
|
|
|
|
:param video: A string defining the video format to be used, must bin in self.FORMATS
|
|
|
|
"""
|
2014-07-16 10:16:03 +00:00
|
|
|
self.container = container
|
2014-07-16 11:54:54 +00:00
|
|
|
self.audio = audio
|
|
|
|
self.video = video
|
2019-09-24 17:23:49 +00:00
|
|
|
self.video_restriction = video_restriction
|
|
|
|
self.audio_restriction = audio_restriction
|
2014-07-16 11:54:54 +00:00
|
|
|
|
|
|
|
def get_caps(self, track_type):
|
|
|
|
try:
|
2014-10-13 08:32:07 +00:00
|
|
|
return self.FORMATS[self.__dict__[track_type]]
|
2014-07-16 11:54:54 +00:00
|
|
|
except KeyError:
|
|
|
|
return None
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def get_audio_caps(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return self.get_caps("audio")
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def get_video_caps(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return self.get_caps("video")
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def get_muxer_caps(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return self.get_caps("container")
|