2016-11-04 21:04:37 +00:00
|
|
|
#!/usr/bin/env python3
|
2013-12-31 10:45:07 +00:00
|
|
|
#
|
|
|
|
# Copyright (c) 2013,Thibault Saunier <thibault.saunier@collabora.com>
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
|
|
# License as published by the Free Software Foundation; either
|
|
|
|
# version 2.1 of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
# Lesser General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
|
|
# License along with this program; if not, write to the
|
|
|
|
# Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
# Boston, MA 02110-1301, USA.
|
|
|
|
|
|
|
|
""" Class representing tests and test managers. """
|
|
|
|
|
2016-09-01 20:39:38 +00:00
|
|
|
import json
|
2013-12-31 10:45:07 +00:00
|
|
|
import os
|
2014-03-28 14:00:01 +00:00
|
|
|
import sys
|
2013-12-31 10:45:07 +00:00
|
|
|
import re
|
2016-09-02 20:37:24 +00:00
|
|
|
import copy
|
2016-11-04 21:04:37 +00:00
|
|
|
import socketserver
|
2016-09-01 20:39:38 +00:00
|
|
|
import struct
|
2013-12-31 10:45:07 +00:00
|
|
|
import time
|
2016-11-04 21:04:37 +00:00
|
|
|
from . import utils
|
2014-04-22 08:49:10 +00:00
|
|
|
import signal
|
2016-11-04 21:04:37 +00:00
|
|
|
import urllib.parse
|
2013-12-31 10:45:07 +00:00
|
|
|
import subprocess
|
2015-01-12 12:09:33 +00:00
|
|
|
import threading
|
2016-11-04 21:04:37 +00:00
|
|
|
import queue
|
|
|
|
import configparser
|
2017-02-27 15:10:49 +00:00
|
|
|
import xml
|
|
|
|
|
|
|
|
from . import reporters
|
2016-11-04 21:04:37 +00:00
|
|
|
from . import loggable
|
|
|
|
from .loggable import Loggable
|
2017-06-22 17:08:30 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
from lxml import etree as ET
|
2017-06-28 19:54:13 +00:00
|
|
|
except ImportError:
|
2017-06-22 17:08:30 +00:00
|
|
|
import xml.etree.cElementTree as ET
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-11-04 21:04:37 +00:00
|
|
|
from .utils import mkdir, Result, Colors, printc, DEFAULT_TIMEOUT, GST_SECOND, \
|
2016-11-16 13:47:21 +00:00
|
|
|
Protocols, look_for_file_in_source_dir, get_data_file, BackTraceGenerator, \
|
|
|
|
check_bugs_resolution
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2015-03-19 15:06:54 +00:00
|
|
|
# The factor by which we increase the hard timeout when running inside
|
|
|
|
# Valgrind
|
2015-11-10 16:43:54 +00:00
|
|
|
GDB_TIMEOUT_FACTOR = VALGRIND_TIMEOUT_FACTOR = 20
|
2016-12-22 13:08:27 +00:00
|
|
|
TIMEOUT_FACTOR = float(os.environ.get("TIMEOUT_FACTOR", 1))
|
2015-03-23 15:19:49 +00:00
|
|
|
# The error reported by valgrind when detecting errors
|
|
|
|
VALGRIND_ERROR_CODE = 20
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
VALIDATE_OVERRIDE_EXTENSION = ".override"
|
2017-01-03 18:34:39 +00:00
|
|
|
COREDUMP_SIGNALS = [-getattr(signal, s) for s in [
|
|
|
|
'SIGQUIT', 'SIGILL', 'SIGABRT', 'SIGFPE', 'SIGSEGV', 'SIGBUS', 'SIGSYS',
|
|
|
|
'SIGTRAP', 'SIGXCPU', 'SIGXFSZ', 'SIGIOT'] if hasattr(signal, s)] + [139]
|
2015-08-15 14:40:11 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-08 17:51:14 +00:00
|
|
|
class Test(Loggable):
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
""" A class representing a particular test. """
|
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def __init__(self, application_name, classname, options,
|
2014-05-07 09:30:09 +00:00
|
|
|
reporter, duration=0, timeout=DEFAULT_TIMEOUT,
|
2016-09-02 20:37:24 +00:00
|
|
|
hard_timeout=None, extra_env_variables=None,
|
2017-06-07 19:06:10 +00:00
|
|
|
expected_failures=None, is_parallel=True):
|
2014-01-24 12:59:56 +00:00
|
|
|
"""
|
|
|
|
@timeout: The timeout during which the value return by get_current_value
|
|
|
|
keeps being exactly equal
|
|
|
|
@hard_timeout: Max time the test can take in absolute
|
|
|
|
"""
|
2014-01-08 17:51:14 +00:00
|
|
|
Loggable.__init__(self)
|
2017-02-06 15:16:41 +00:00
|
|
|
self.timeout = timeout * TIMEOUT_FACTOR * options.timeout_factor
|
2016-12-22 13:08:27 +00:00
|
|
|
if hard_timeout:
|
|
|
|
self.hard_timeout = hard_timeout * TIMEOUT_FACTOR
|
2017-02-21 16:39:37 +00:00
|
|
|
self.hard_timeout *= options.timeout_factor
|
2016-12-22 13:08:27 +00:00
|
|
|
else:
|
|
|
|
self.hard_timeout = hard_timeout
|
2013-12-31 10:45:07 +00:00
|
|
|
self.classname = classname
|
|
|
|
self.options = options
|
|
|
|
self.application = application_name
|
2016-12-22 13:07:58 +00:00
|
|
|
self.command = []
|
2014-12-05 11:16:36 +00:00
|
|
|
self.server_command = None
|
2013-12-31 10:45:07 +00:00
|
|
|
self.reporter = reporter
|
|
|
|
self.process = None
|
2015-01-12 12:09:33 +00:00
|
|
|
self.proc_env = None
|
|
|
|
self.thread = None
|
2015-01-16 18:08:19 +00:00
|
|
|
self.queue = None
|
2014-05-07 09:30:09 +00:00
|
|
|
self.duration = duration
|
2016-11-26 13:24:11 +00:00
|
|
|
self.stack_trace = None
|
2016-09-02 20:37:24 +00:00
|
|
|
if expected_failures is None:
|
|
|
|
self.expected_failures = []
|
|
|
|
elif not isinstance(expected_failures, list):
|
|
|
|
self.expected_failures = [expected_failures]
|
|
|
|
else:
|
|
|
|
self.expected_failures = expected_failures
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
extra_env_variables = extra_env_variables or {}
|
2015-05-13 13:29:43 +00:00
|
|
|
self.extra_env_variables = extra_env_variables
|
2014-12-05 11:16:36 +00:00
|
|
|
self.optional = False
|
2017-06-07 19:06:10 +00:00
|
|
|
self.is_parallel = is_parallel
|
2015-05-13 13:29:43 +00:00
|
|
|
|
2015-01-15 14:32:12 +00:00
|
|
|
self.clean()
|
2014-02-19 12:07:03 +00:00
|
|
|
|
2015-01-15 14:32:12 +00:00
|
|
|
def clean(self):
|
2016-09-02 20:37:24 +00:00
|
|
|
self.kill_subprocess()
|
2013-12-31 10:45:07 +00:00
|
|
|
self.message = ""
|
2014-01-22 23:15:54 +00:00
|
|
|
self.error_str = ""
|
2013-12-31 10:45:07 +00:00
|
|
|
self.time_taken = 0.0
|
2013-12-31 10:45:07 +00:00
|
|
|
self._starting_time = None
|
|
|
|
self.result = Result.NOT_RUN
|
2014-01-10 14:30:38 +00:00
|
|
|
self.logfile = None
|
2015-01-16 17:50:38 +00:00
|
|
|
self.out = None
|
2014-03-26 19:09:12 +00:00
|
|
|
self.extra_logfiles = []
|
2015-06-12 09:17:43 +00:00
|
|
|
self.__env_variable = []
|
2016-09-02 20:39:50 +00:00
|
|
|
self.kill_subprocess()
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
string = self.classname
|
2013-12-31 10:45:07 +00:00
|
|
|
if self.result != Result.NOT_RUN:
|
2013-12-31 10:45:07 +00:00
|
|
|
string += ": " + self.result
|
2014-01-09 08:27:50 +00:00
|
|
|
if self.result in [Result.FAILED, Result.TIMEOUT]:
|
2014-03-26 19:09:12 +00:00
|
|
|
string += " '%s'\n" \
|
2017-06-20 14:43:54 +00:00
|
|
|
" You can reproduce with: %s\n" \
|
|
|
|
% (self.message, self.get_command_repr())
|
2014-08-10 10:04:31 +00:00
|
|
|
|
2017-06-20 14:43:54 +00:00
|
|
|
string += self.get_logfile_repr()
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
return string
|
|
|
|
|
2015-02-27 23:20:43 +00:00
|
|
|
def add_env_variable(self, variable, value=None):
|
2014-08-11 11:19:22 +00:00
|
|
|
"""
|
|
|
|
Only usefull so that the gst-validate-launcher can print the exact
|
|
|
|
right command line to reproduce the tests
|
|
|
|
"""
|
2015-02-27 23:20:43 +00:00
|
|
|
if value is None:
|
|
|
|
value = os.environ.get(variable, None)
|
|
|
|
|
|
|
|
if value is None:
|
|
|
|
return
|
|
|
|
|
2015-06-12 09:17:43 +00:00
|
|
|
self.__env_variable.append(variable)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _env_variable(self):
|
|
|
|
res = ""
|
|
|
|
for var in set(self.__env_variable):
|
|
|
|
if res:
|
|
|
|
res += " "
|
|
|
|
value = self.proc_env.get(var, None)
|
2017-07-11 14:11:33 +00:00
|
|
|
if value is not None:
|
2016-12-22 13:07:58 +00:00
|
|
|
res += "%s='%s'" % (var, value)
|
2015-06-12 09:17:43 +00:00
|
|
|
|
|
|
|
return res
|
2014-08-11 11:19:22 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
def open_logfile(self):
|
2017-01-03 16:01:31 +00:00
|
|
|
if self.out:
|
|
|
|
return
|
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
path = os.path.join(self.options.logsdir,
|
|
|
|
self.classname.replace(".", os.sep))
|
|
|
|
mkdir(os.path.dirname(path))
|
|
|
|
self.logfile = path
|
|
|
|
|
|
|
|
if self.options.redirect_logs == 'stdout':
|
|
|
|
self.out = sys.stdout
|
|
|
|
elif self.options.redirect_logs == 'stderr':
|
|
|
|
self.out = sys.stderr
|
|
|
|
else:
|
|
|
|
self.out = open(path, 'w+')
|
|
|
|
|
|
|
|
def close_logfile(self):
|
|
|
|
if not self.options.redirect_logs:
|
|
|
|
self.out.close()
|
|
|
|
|
|
|
|
self.out = None
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
def _get_file_content(self, file_name):
|
|
|
|
f = open(file_name, 'r+')
|
2014-04-23 09:47:10 +00:00
|
|
|
value = f.read()
|
|
|
|
f.close()
|
|
|
|
|
2014-04-26 07:16:26 +00:00
|
|
|
return value
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
def get_log_content(self):
|
|
|
|
return self._get_file_content(self.logfile)
|
|
|
|
|
|
|
|
def get_extra_log_content(self, extralog):
|
|
|
|
if extralog not in self.extra_logfiles:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
return self._get_file_content(extralog)
|
|
|
|
|
2014-01-10 15:46:00 +00:00
|
|
|
def get_classname(self):
|
|
|
|
name = self.classname.split('.')[-1]
|
|
|
|
classname = self.classname.replace('.%s' % name, '')
|
|
|
|
|
|
|
|
return classname
|
|
|
|
|
|
|
|
def get_name(self):
|
|
|
|
return self.classname.split('.')[-1]
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def add_arguments(self, *args):
|
2016-12-22 13:07:58 +00:00
|
|
|
self.command += args
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def build_arguments(self):
|
2015-02-27 23:20:43 +00:00
|
|
|
self.add_env_variable("LD_PRELOAD")
|
|
|
|
self.add_env_variable("DISPLAY")
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-11-07 20:20:09 +00:00
|
|
|
def add_stack_trace_to_logfile(self):
|
|
|
|
trace_gatherer = BackTraceGenerator.get_default()
|
|
|
|
stack_trace = trace_gatherer.get_trace(self)
|
|
|
|
|
|
|
|
if not stack_trace:
|
|
|
|
return
|
|
|
|
|
2016-11-16 13:47:21 +00:00
|
|
|
info = "\n\n== Stack trace: == \n%s" % stack_trace
|
2016-11-07 20:20:09 +00:00
|
|
|
if self.options.redirect_logs:
|
|
|
|
print(info)
|
2016-11-26 13:24:11 +00:00
|
|
|
elif self.options.xunit_file:
|
|
|
|
self.stack_trace = stack_trace
|
2016-11-07 20:20:09 +00:00
|
|
|
else:
|
|
|
|
with open(self.logfile, 'a') as f:
|
|
|
|
f.write(info)
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def set_result(self, result, message="", error=""):
|
2014-12-08 14:27:54 +00:00
|
|
|
self.debug("Setting result: %s (message: %s, error: %s)" % (result,
|
|
|
|
message, error))
|
2015-11-10 16:43:54 +00:00
|
|
|
|
2016-11-07 20:20:09 +00:00
|
|
|
if result is Result.TIMEOUT:
|
|
|
|
if self.options.debug is True:
|
|
|
|
if self.options.gdb:
|
|
|
|
printc("Timeout, you should process <ctrl>c to get into gdb",
|
|
|
|
Colors.FAIL)
|
|
|
|
# and wait here until gdb exits
|
|
|
|
self.process.communicate()
|
|
|
|
else:
|
2017-02-20 15:52:06 +00:00
|
|
|
pname = self.command[0]
|
2016-11-07 20:20:09 +00:00
|
|
|
input("%sTimeout happened you can attach gdb doing: $gdb %s %d%s\n"
|
2015-11-10 16:43:54 +00:00
|
|
|
"Press enter to continue" % (Colors.FAIL, pname, self.process.pid,
|
|
|
|
Colors.ENDC))
|
2016-11-07 20:20:09 +00:00
|
|
|
else:
|
|
|
|
self.add_stack_trace_to_logfile()
|
2014-03-26 18:37:44 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
self.result = result
|
|
|
|
self.message = message
|
2014-01-22 23:15:54 +00:00
|
|
|
self.error_str = error
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def check_results(self):
|
2015-03-29 02:13:01 +00:00
|
|
|
if self.result is Result.FAILED or self.result is Result.TIMEOUT:
|
2014-01-09 08:14:27 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
self.debug("%s returncode: %s", self, self.process.returncode)
|
2015-03-29 02:13:01 +00:00
|
|
|
if self.process.returncode == 0:
|
2014-02-19 09:31:15 +00:00
|
|
|
self.set_result(Result.PASSED)
|
2016-11-07 20:20:09 +00:00
|
|
|
elif self.process.returncode in [-signal.SIGSEGV, -signal.SIGABRT, 139]:
|
2017-01-06 15:09:13 +00:00
|
|
|
self.add_stack_trace_to_logfile()
|
2016-11-26 13:25:43 +00:00
|
|
|
self.set_result(Result.FAILED,
|
|
|
|
"Application segfaulted, returne code: %d" % (
|
|
|
|
self.process.returncode))
|
2015-03-26 12:59:30 +00:00
|
|
|
elif self.process.returncode == VALGRIND_ERROR_CODE:
|
|
|
|
self.set_result(Result.FAILED, "Valgrind reported errors")
|
2013-12-31 10:45:07 +00:00
|
|
|
else:
|
2014-01-09 08:28:02 +00:00
|
|
|
self.set_result(Result.FAILED,
|
2015-03-09 17:41:54 +00:00
|
|
|
"Application returned %d" % (self.process.returncode))
|
2014-01-09 08:14:27 +00:00
|
|
|
|
|
|
|
def get_current_value(self):
|
|
|
|
"""
|
|
|
|
Lets subclasses implement a nicer timeout measurement method
|
|
|
|
They should return some value with which we will compare
|
|
|
|
the previous and timeout if they are egual during self.timeout
|
|
|
|
seconds
|
|
|
|
"""
|
|
|
|
return Result.NOT_RUN
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2015-01-16 17:57:06 +00:00
|
|
|
def process_update(self):
|
|
|
|
"""
|
|
|
|
Returns True when process has finished running or has timed out.
|
|
|
|
"""
|
2015-01-16 20:29:55 +00:00
|
|
|
|
|
|
|
if self.process is None:
|
|
|
|
# Process has not started running yet
|
|
|
|
return False
|
|
|
|
|
2015-01-16 17:57:06 +00:00
|
|
|
self.process.poll()
|
|
|
|
if self.process.returncode is not None:
|
|
|
|
return True
|
|
|
|
|
|
|
|
val = self.get_current_value()
|
|
|
|
|
|
|
|
self.debug("Got value: %s" % val)
|
|
|
|
if val is Result.NOT_RUN:
|
|
|
|
# The get_current_value logic is not implemented... dumb
|
|
|
|
# timeout
|
|
|
|
if time.time() - self.last_change_ts > self.timeout:
|
2015-03-29 02:13:01 +00:00
|
|
|
self.set_result(Result.TIMEOUT,
|
|
|
|
"Application timed out: %s secs" %
|
|
|
|
self.timeout,
|
|
|
|
"timeout")
|
2015-01-16 17:57:06 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
elif val is Result.FAILED:
|
|
|
|
return True
|
|
|
|
elif val is Result.KNOWN_ERROR:
|
|
|
|
return True
|
|
|
|
|
|
|
|
self.log("New val %s" % val)
|
|
|
|
|
|
|
|
if val == self.last_val:
|
|
|
|
delta = time.time() - self.last_change_ts
|
|
|
|
self.debug("%s: Same value for %d/%d seconds" %
|
|
|
|
(self, delta, self.timeout))
|
|
|
|
if delta > self.timeout:
|
2015-03-29 02:13:01 +00:00
|
|
|
self.set_result(Result.TIMEOUT,
|
|
|
|
"Application timed out: %s secs" %
|
|
|
|
self.timeout,
|
|
|
|
"timeout")
|
2015-01-16 17:57:06 +00:00
|
|
|
return True
|
|
|
|
elif self.hard_timeout and time.time() - self.start_ts > self.hard_timeout:
|
|
|
|
self.set_result(
|
|
|
|
Result.TIMEOUT, "Hard timeout reached: %d secs" % self.hard_timeout)
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
self.last_change_ts = time.time()
|
|
|
|
self.last_val = val
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2015-01-16 17:57:06 +00:00
|
|
|
return False
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
def get_subproc_env(self):
|
2017-02-21 16:39:37 +00:00
|
|
|
return os.environ.copy()
|
2014-03-26 19:09:12 +00:00
|
|
|
|
2015-01-16 18:08:19 +00:00
|
|
|
def kill_subprocess(self):
|
2014-12-05 11:16:36 +00:00
|
|
|
utils.kill_subprocess(self, self.process, DEFAULT_TIMEOUT)
|
2014-12-08 13:37:15 +00:00
|
|
|
|
2015-01-12 12:09:33 +00:00
|
|
|
def thread_wrapper(self):
|
2016-12-22 13:07:58 +00:00
|
|
|
self.process = subprocess.Popen(self.command,
|
2015-01-16 17:50:38 +00:00
|
|
|
stderr=self.out,
|
|
|
|
stdout=self.out,
|
2015-01-12 12:09:33 +00:00
|
|
|
env=self.proc_env)
|
|
|
|
self.process.wait()
|
2015-01-16 18:03:07 +00:00
|
|
|
if self.result is not Result.TIMEOUT:
|
|
|
|
self.queue.put(None)
|
2015-01-12 12:09:33 +00:00
|
|
|
|
2017-01-03 16:01:31 +00:00
|
|
|
def get_valgrind_suppression_file(self, subdir, name):
|
|
|
|
p = get_data_file(subdir, name)
|
|
|
|
if p:
|
|
|
|
return p
|
|
|
|
|
|
|
|
self.error("Could not find any %s file" % name)
|
|
|
|
|
2015-03-19 16:22:26 +00:00
|
|
|
def get_valgrind_suppressions(self):
|
2015-04-20 08:53:29 +00:00
|
|
|
return [self.get_valgrind_suppression_file('data', 'gstvalidate.supp')]
|
2015-03-19 16:22:26 +00:00
|
|
|
|
2014-12-05 11:16:36 +00:00
|
|
|
def use_gdb(self, command):
|
2015-11-10 16:43:54 +00:00
|
|
|
if self.hard_timeout is not None:
|
|
|
|
self.hard_timeout *= GDB_TIMEOUT_FACTOR
|
|
|
|
self.timeout *= GDB_TIMEOUT_FACTOR
|
2014-12-05 11:16:36 +00:00
|
|
|
return ["gdb", "-ex", "run", "-ex", "backtrace", "-ex", "quit", "--args"] + command
|
2015-11-10 16:43:54 +00:00
|
|
|
|
2014-12-05 11:16:36 +00:00
|
|
|
def use_valgrind(self, command, subenv):
|
|
|
|
vglogsfile = self.logfile + '.valgrind'
|
|
|
|
self.extra_logfiles.append(vglogsfile)
|
2015-03-23 08:39:30 +00:00
|
|
|
|
2016-12-22 13:07:58 +00:00
|
|
|
vg_args = []
|
|
|
|
|
|
|
|
for o, v in [('trace-children', 'yes'),
|
|
|
|
('tool', 'memcheck'),
|
|
|
|
('leak-check', 'full'),
|
|
|
|
('leak-resolution', 'high'),
|
|
|
|
# TODO: errors-for-leak-kinds should be set to all instead of definite
|
|
|
|
# and all false positives should be added to suppression files.
|
|
|
|
('errors-for-leak-kinds', 'definite'),
|
|
|
|
('num-callers', '20'),
|
|
|
|
('error-exitcode', str(VALGRIND_ERROR_CODE)),
|
|
|
|
('gen-suppressions', 'all')]:
|
|
|
|
vg_args.append("--%s=%s" % (o, v))
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2017-05-30 20:15:19 +00:00
|
|
|
if not self.options.redirect_logs:
|
|
|
|
vglogsfile = self.logfile + '.valgrind'
|
|
|
|
self.extra_logfiles.append(vglogsfile)
|
|
|
|
vg_args.append("--%s=%s" % ('log-file', vglogsfile))
|
|
|
|
|
2015-04-20 08:53:29 +00:00
|
|
|
for supp in self.get_valgrind_suppressions():
|
2016-12-22 13:07:58 +00:00
|
|
|
vg_args.append("--suppressions=%s" % supp)
|
2015-03-19 16:22:26 +00:00
|
|
|
|
2014-12-05 11:16:36 +00:00
|
|
|
command = ["valgrind"] + vg_args + command
|
2015-03-19 15:06:54 +00:00
|
|
|
|
|
|
|
# Tune GLib's memory allocator to be more valgrind friendly
|
2014-12-05 11:16:36 +00:00
|
|
|
subenv['G_DEBUG'] = 'gc-friendly'
|
|
|
|
subenv['G_SLICE'] = 'always-malloc'
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2015-03-26 12:57:34 +00:00
|
|
|
if self.hard_timeout is not None:
|
|
|
|
self.hard_timeout *= VALGRIND_TIMEOUT_FACTOR
|
2015-03-26 10:29:06 +00:00
|
|
|
self.timeout *= VALGRIND_TIMEOUT_FACTOR
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2015-05-08 14:33:50 +00:00
|
|
|
# Enable 'valgrind.config'
|
|
|
|
vg_config = get_data_file('data', 'valgrind.config')
|
|
|
|
|
|
|
|
if self.proc_env.get('GST_VALIDATE_CONFIG'):
|
2014-12-05 11:16:36 +00:00
|
|
|
subenv['GST_VALIDATE_CONFIG'] = '%s%s%s' % (self.proc_env['GST_VALIDATE_CONFIG'], os.pathsep, vg_config)
|
2015-05-08 14:33:50 +00:00
|
|
|
else:
|
2014-12-05 11:16:36 +00:00
|
|
|
subenv['GST_VALIDATE_CONFIG'] = vg_config
|
2015-05-08 14:33:50 +00:00
|
|
|
|
2014-12-05 11:16:36 +00:00
|
|
|
if subenv == self.proc_env:
|
|
|
|
self.add_env_variable('G_DEBUG', 'gc-friendly')
|
|
|
|
self.add_env_variable('G_SLICE', 'always-malloc')
|
|
|
|
self.add_env_variable('GST_VALIDATE_CONFIG', self.proc_env['GST_VALIDATE_CONFIG'])
|
|
|
|
|
|
|
|
return command
|
|
|
|
|
|
|
|
def launch_server(self):
|
|
|
|
return None
|
2015-05-08 14:33:50 +00:00
|
|
|
|
2017-06-06 16:45:31 +00:00
|
|
|
def get_logfile_repr(self):
|
2017-06-20 14:43:54 +00:00
|
|
|
message = " Logs:\n"
|
|
|
|
logfiles = self.extra_logfiles.copy()
|
|
|
|
|
|
|
|
if not self.options.redirect_logs:
|
|
|
|
logfiles.insert(0, self.logfile)
|
|
|
|
|
|
|
|
for log in logfiles:
|
2017-06-06 16:45:31 +00:00
|
|
|
message += "\n - %s" % log
|
|
|
|
|
|
|
|
return message
|
|
|
|
|
2017-06-20 14:43:54 +00:00
|
|
|
def get_command_repr(self):
|
|
|
|
message = "%s %s" % (self._env_variable, ' '.join(self.command))
|
|
|
|
if self.server_command:
|
|
|
|
message = "%s & %s" % (self.server_command, message)
|
|
|
|
|
|
|
|
return "'%s'" % message
|
|
|
|
|
2015-01-16 18:08:19 +00:00
|
|
|
def test_start(self, queue):
|
2015-01-16 17:50:38 +00:00
|
|
|
self.open_logfile()
|
|
|
|
|
2017-06-06 16:45:31 +00:00
|
|
|
self.server_command = self.launch_server()
|
2015-01-16 18:08:19 +00:00
|
|
|
self.queue = queue
|
2016-12-22 13:07:58 +00:00
|
|
|
self.command = [self.application]
|
2013-12-31 10:45:07 +00:00
|
|
|
self._starting_time = time.time()
|
|
|
|
self.build_arguments()
|
2015-01-12 12:09:33 +00:00
|
|
|
self.proc_env = self.get_subproc_env()
|
2014-03-26 19:09:12 +00:00
|
|
|
|
2016-11-04 21:04:37 +00:00
|
|
|
for var, value in list(self.extra_env_variables.items()):
|
2016-03-01 13:59:29 +00:00
|
|
|
value = self.proc_env.get(var, '') + os.pathsep + value
|
|
|
|
self.proc_env[var] = value.strip(os.pathsep)
|
2015-05-13 13:29:43 +00:00
|
|
|
self.add_env_variable(var, self.proc_env[var])
|
|
|
|
|
2015-11-10 16:43:54 +00:00
|
|
|
if self.options.gdb:
|
2014-12-05 11:16:36 +00:00
|
|
|
self.command = self.use_gdb(self.command)
|
2015-03-19 15:06:54 +00:00
|
|
|
if self.options.valgrind:
|
2014-12-05 11:16:36 +00:00
|
|
|
self.command = self.use_valgrind(self.command, self.proc_env)
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
message = "Launching: %s%s\n" \
|
2017-06-20 14:43:54 +00:00
|
|
|
" Command: %s\n" % (Colors.ENDC, self.classname,
|
|
|
|
self.get_command_repr())
|
2014-12-05 11:16:36 +00:00
|
|
|
|
2015-01-15 14:26:14 +00:00
|
|
|
if not self.options.redirect_logs:
|
2017-06-06 16:45:31 +00:00
|
|
|
message += self.get_logfile_repr()
|
2014-03-26 19:09:12 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
self.out.write("=================\n"
|
|
|
|
"Test name: %s\n"
|
|
|
|
"Command: '%s'\n"
|
|
|
|
"=================\n\n"
|
2016-12-22 13:07:58 +00:00
|
|
|
% (self.classname, ' '.join(self.command)))
|
2015-01-16 17:50:38 +00:00
|
|
|
self.out.flush()
|
2015-01-13 01:32:16 +00:00
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
printc(message, Colors.OKBLUE)
|
2014-04-02 17:14:30 +00:00
|
|
|
|
2015-01-12 12:09:33 +00:00
|
|
|
self.thread = threading.Thread(target=self.thread_wrapper)
|
|
|
|
self.thread.start()
|
|
|
|
|
2015-01-16 18:00:25 +00:00
|
|
|
self.last_val = 0
|
|
|
|
self.last_change_ts = time.time()
|
|
|
|
self.start_ts = time.time()
|
|
|
|
|
2016-09-01 01:39:18 +00:00
|
|
|
def _dump_log_file(self, logfile):
|
|
|
|
message = "Dumping contents of %s\n" % logfile
|
|
|
|
printc(message, Colors.FAIL)
|
|
|
|
|
|
|
|
with open(logfile, 'r') as fin:
|
2016-11-04 21:04:37 +00:00
|
|
|
print(fin.read())
|
2016-09-01 01:39:18 +00:00
|
|
|
|
|
|
|
def _dump_log_files(self):
|
|
|
|
printc("Dumping log files on failure\n", Colors.FAIL)
|
|
|
|
self._dump_log_file(self.logfile)
|
|
|
|
for logfile in self.extra_logfiles:
|
|
|
|
self._dump_log_file(logfile)
|
|
|
|
|
2015-01-16 18:08:19 +00:00
|
|
|
def test_end(self):
|
|
|
|
self.kill_subprocess()
|
2015-01-12 12:09:33 +00:00
|
|
|
self.thread.join()
|
2013-12-31 10:45:07 +00:00
|
|
|
self.time_taken = time.time() - self._starting_time
|
|
|
|
|
2017-06-06 16:45:31 +00:00
|
|
|
message = "%s: %s%s\n" % (self.classname, self.result,
|
|
|
|
" (" + self.message + ")" if self.message else "")
|
|
|
|
if not self.options.redirect_logs:
|
|
|
|
message += self.get_logfile_repr()
|
|
|
|
|
|
|
|
printc(message, color=utils.get_color_for_result(self.result))
|
2014-01-10 16:21:44 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
self.close_logfile()
|
|
|
|
|
2016-09-01 01:39:18 +00:00
|
|
|
if self.options.dump_on_failure:
|
|
|
|
if self.result is not Result.PASSED:
|
|
|
|
self._dump_log_files()
|
|
|
|
|
2014-01-13 16:31:57 +00:00
|
|
|
return self.result
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-11-04 21:04:37 +00:00
|
|
|
class GstValidateListener(socketserver.BaseRequestHandler):
|
2016-09-01 20:39:38 +00:00
|
|
|
def handle(self):
|
|
|
|
"""Implements BaseRequestHandler handle method"""
|
|
|
|
while True:
|
|
|
|
raw_len = self.request.recv(4)
|
2016-11-04 21:04:37 +00:00
|
|
|
if raw_len == b'':
|
2016-09-01 20:39:38 +00:00
|
|
|
return
|
|
|
|
msglen = struct.unpack('>I', raw_len)[0]
|
2016-11-04 21:04:37 +00:00
|
|
|
msg = self.request.recv(msglen).decode()
|
2016-09-01 20:39:38 +00:00
|
|
|
if msg == '':
|
|
|
|
return
|
|
|
|
|
|
|
|
obj = json.loads(msg)
|
|
|
|
test = getattr(self.server, "test")
|
|
|
|
|
|
|
|
obj_type = obj.get("type", '')
|
|
|
|
if obj_type == 'position':
|
|
|
|
test.set_position(obj['position'], obj['duration'],
|
|
|
|
obj['speed'])
|
|
|
|
elif obj_type == 'buffering':
|
|
|
|
test.set_position(obj['position'], 100)
|
|
|
|
elif obj_type == 'action':
|
|
|
|
test.add_action_execution(obj)
|
2017-02-16 18:12:44 +00:00
|
|
|
# Make sure that action is taken into account when checking if process
|
|
|
|
# is updating
|
|
|
|
test.position += 1
|
2017-02-16 17:52:15 +00:00
|
|
|
elif obj_type == 'action-done':
|
2017-02-16 18:12:44 +00:00
|
|
|
# Make sure that action end is taken into account when checking if process
|
|
|
|
# is updating
|
|
|
|
test.position += 1
|
|
|
|
test.actions_infos[-1]['execution-duration'] = obj['execution-duration']
|
2016-09-01 20:39:38 +00:00
|
|
|
elif obj_type == 'report':
|
|
|
|
test.add_report(obj)
|
|
|
|
|
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
class GstValidateTest(Test):
|
|
|
|
|
|
|
|
""" A class representing a particular test. """
|
2014-10-24 12:23:52 +00:00
|
|
|
findpos_regex = re.compile(
|
|
|
|
'.*position.*(\d+):(\d+):(\d+).(\d+).*duration.*(\d+):(\d+):(\d+).(\d+)')
|
|
|
|
findlastseek_regex = re.compile(
|
|
|
|
'seeking to.*(\d+):(\d+):(\d+).(\d+).*stop.*(\d+):(\d+):(\d+).(\d+).*rate.*(\d+)\.(\d+)')
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-12-08 14:27:54 +00:00
|
|
|
HARD_TIMEOUT_FACTOR = 5
|
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def __init__(self, application_name, classname,
|
2014-05-07 09:30:09 +00:00
|
|
|
options, reporter, duration=0,
|
2015-05-13 13:29:43 +00:00
|
|
|
timeout=DEFAULT_TIMEOUT, scenario=None, hard_timeout=None,
|
2016-09-02 20:37:24 +00:00
|
|
|
media_descriptor=None, extra_env_variables=None,
|
|
|
|
expected_failures=None):
|
2015-08-15 14:40:11 +00:00
|
|
|
|
|
|
|
extra_env_variables = extra_env_variables or {}
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-12-08 14:27:54 +00:00
|
|
|
if not hard_timeout and self.HARD_TIMEOUT_FACTOR:
|
2014-12-08 17:23:10 +00:00
|
|
|
if timeout:
|
|
|
|
hard_timeout = timeout * self.HARD_TIMEOUT_FACTOR
|
|
|
|
elif duration:
|
2014-12-08 14:27:54 +00:00
|
|
|
hard_timeout = duration * self.HARD_TIMEOUT_FACTOR
|
|
|
|
else:
|
|
|
|
hard_timeout = None
|
|
|
|
|
2015-04-21 09:00:58 +00:00
|
|
|
# If we are running from source, use the -debug version of the
|
|
|
|
# application which is using rpath instead of libtool's wrappers. It's
|
|
|
|
# slightly faster to start and will not confuse valgrind.
|
|
|
|
debug = '%s-debug' % application_name
|
2015-04-27 11:25:44 +00:00
|
|
|
p = look_for_file_in_source_dir('tools', debug)
|
2015-04-21 09:00:58 +00:00
|
|
|
if p:
|
|
|
|
application_name = p
|
|
|
|
|
2016-09-01 20:39:38 +00:00
|
|
|
self.reports = []
|
|
|
|
self.position = -1
|
2016-09-02 20:37:24 +00:00
|
|
|
self.media_duration = -1
|
2016-09-01 20:39:38 +00:00
|
|
|
self.speed = 1.0
|
|
|
|
self.actions_infos = []
|
2015-08-15 14:23:02 +00:00
|
|
|
self.media_descriptor = media_descriptor
|
2016-09-02 20:39:50 +00:00
|
|
|
self.server = None
|
2015-08-15 14:23:02 +00:00
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
override_path = self.get_override_file(media_descriptor)
|
|
|
|
if override_path:
|
|
|
|
if extra_env_variables:
|
|
|
|
if extra_env_variables.get("GST_VALIDATE_OVERRIDE", ""):
|
|
|
|
extra_env_variables["GST_VALIDATE_OVERRIDE"] += os.path.pathsep
|
|
|
|
|
|
|
|
extra_env_variables["GST_VALIDATE_OVERRIDE"] = override_path
|
|
|
|
|
2014-12-08 17:23:10 +00:00
|
|
|
super(GstValidateTest, self).__init__(application_name, classname,
|
|
|
|
options, reporter,
|
|
|
|
duration=duration,
|
|
|
|
timeout=timeout,
|
2015-05-13 13:29:43 +00:00
|
|
|
hard_timeout=hard_timeout,
|
2016-09-02 20:37:24 +00:00
|
|
|
extra_env_variables=extra_env_variables,
|
|
|
|
expected_failures=expected_failures)
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
# defines how much the process can be outside of the configured
|
|
|
|
# segment / seek
|
2016-09-12 17:21:30 +00:00
|
|
|
self._sent_eos_time = None
|
2014-01-30 11:42:25 +00:00
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
if scenario is None or scenario.name.lower() == "none":
|
2014-01-09 08:14:27 +00:00
|
|
|
self.scenario = None
|
|
|
|
else:
|
|
|
|
self.scenario = scenario
|
2016-09-01 20:39:38 +00:00
|
|
|
|
|
|
|
def stop_server(self):
|
|
|
|
if self.server:
|
|
|
|
self.server.shutdown()
|
|
|
|
self.server_thread.join()
|
2016-11-30 10:40:05 +00:00
|
|
|
self.server.server_close()
|
2016-09-01 20:39:38 +00:00
|
|
|
self.server = None
|
|
|
|
|
|
|
|
def kill_subprocess(self):
|
|
|
|
Test.kill_subprocess(self)
|
|
|
|
self.stop_server()
|
|
|
|
|
|
|
|
def add_report(self, report):
|
|
|
|
self.reports.append(report)
|
|
|
|
|
|
|
|
def set_position(self, position, duration, speed=None):
|
|
|
|
self.position = position
|
2016-09-02 20:37:24 +00:00
|
|
|
self.media_duration = duration
|
2016-09-01 20:39:38 +00:00
|
|
|
if speed:
|
|
|
|
self.speed = speed
|
|
|
|
|
|
|
|
def add_action_execution(self, action_infos):
|
|
|
|
if action_infos['action-type'] == 'eos':
|
2016-09-12 17:21:30 +00:00
|
|
|
self._sent_eos_time = time.time()
|
2016-09-01 20:39:38 +00:00
|
|
|
self.actions_infos.append(action_infos)
|
|
|
|
|
|
|
|
def server_wrapper(self, ready):
|
2016-11-04 21:04:37 +00:00
|
|
|
self.server = socketserver.TCPServer(('localhost', 0), GstValidateListener)
|
2017-01-03 19:25:47 +00:00
|
|
|
self.server.socket.settimeout(None)
|
2016-09-01 20:39:38 +00:00
|
|
|
self.server.test = self
|
|
|
|
self.serverport = self.server.socket.getsockname()[1]
|
|
|
|
self.info("%s server port: %s" % (self, self.serverport))
|
|
|
|
ready.set()
|
|
|
|
|
|
|
|
self.server.serve_forever()
|
|
|
|
|
|
|
|
def test_start(self, queue):
|
|
|
|
ready = threading.Event()
|
|
|
|
self.server_thread = threading.Thread(target=self.server_wrapper,
|
|
|
|
kwargs={'ready': ready})
|
|
|
|
self.server_thread.start()
|
|
|
|
ready.wait()
|
|
|
|
|
|
|
|
Test.test_start(self, queue)
|
|
|
|
|
|
|
|
def test_end(self):
|
2016-09-02 20:37:24 +00:00
|
|
|
res = Test.test_end(self)
|
2016-09-01 20:39:38 +00:00
|
|
|
self.stop_server()
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2016-09-02 20:37:24 +00:00
|
|
|
return res
|
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
def get_override_file(self, media_descriptor):
|
|
|
|
if media_descriptor:
|
|
|
|
if media_descriptor.get_path():
|
|
|
|
override_path = os.path.splitext(media_descriptor.get_path())[0] + VALIDATE_OVERRIDE_EXTENSION
|
|
|
|
if os.path.exists(override_path):
|
|
|
|
return override_path
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2016-09-01 20:39:38 +00:00
|
|
|
def get_current_position(self):
|
|
|
|
return self.position
|
|
|
|
|
2015-02-27 13:16:01 +00:00
|
|
|
def get_current_value(self):
|
|
|
|
if self.scenario:
|
2016-09-12 17:21:30 +00:00
|
|
|
if self._sent_eos_time is not None:
|
2015-02-27 13:16:01 +00:00
|
|
|
t = time.time()
|
2016-09-12 17:21:30 +00:00
|
|
|
if ((t - self._sent_eos_time)) > 30:
|
2015-02-27 13:16:01 +00:00
|
|
|
if self.media_descriptor.get_protocol() == Protocols.HLS:
|
|
|
|
self.set_result(Result.PASSED,
|
|
|
|
"""Got no EOS 30 seconds after sending EOS,
|
|
|
|
in HLS known and tolerated issue:
|
|
|
|
https://bugzilla.gnome.org/show_bug.cgi?id=723868""")
|
|
|
|
return Result.KNOWN_ERROR
|
|
|
|
|
|
|
|
self.set_result(
|
|
|
|
Result.FAILED, "Pipeline did not stop 30 Seconds after sending EOS")
|
|
|
|
|
|
|
|
return Result.FAILED
|
|
|
|
|
2016-09-01 20:39:38 +00:00
|
|
|
return self.position
|
2015-02-27 13:16:01 +00:00
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
def get_subproc_env(self):
|
|
|
|
subproc_env = os.environ.copy()
|
|
|
|
|
2016-09-01 20:39:38 +00:00
|
|
|
subproc_env["GST_VALIDATE_SERVER"] = "tcp://localhost:%s" % self.serverport
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2015-03-02 16:32:56 +00:00
|
|
|
if 'GST_DEBUG' in os.environ and not self.options.redirect_logs:
|
2014-03-26 19:09:12 +00:00
|
|
|
gstlogsfile = self.logfile + '.gstdebug'
|
|
|
|
self.extra_logfiles.append(gstlogsfile)
|
|
|
|
subproc_env["GST_DEBUG_FILE"] = gstlogsfile
|
2015-03-02 16:32:56 +00:00
|
|
|
|
|
|
|
if self.options.no_color:
|
2014-11-27 12:48:17 +00:00
|
|
|
subproc_env["GST_DEBUG_NO_COLOR"] = '1'
|
2014-03-26 19:09:12 +00:00
|
|
|
|
2015-04-14 10:31:32 +00:00
|
|
|
# Ensure XInitThreads is called, see bgo#731525
|
|
|
|
subproc_env['GST_GL_XINITTHREADS'] = '1'
|
|
|
|
self.add_env_variable('GST_GL_XINITTHREADS', '1')
|
|
|
|
|
2015-04-16 10:02:11 +00:00
|
|
|
if self.scenario is not None:
|
2015-04-30 15:39:55 +00:00
|
|
|
scenario = self.scenario.get_execution_name()
|
|
|
|
if self.options.valgrind:
|
|
|
|
# Increase sink's max-lateness property when running inside
|
|
|
|
# Valgrind as it slows down everything quiet a lot.
|
|
|
|
scenario = "setup_sink_props_max_lateness:%s" % scenario
|
|
|
|
|
|
|
|
subproc_env["GST_VALIDATE_SCENARIO"] = scenario
|
2015-04-16 10:02:11 +00:00
|
|
|
self.add_env_variable("GST_VALIDATE_SCENARIO",
|
|
|
|
subproc_env["GST_VALIDATE_SCENARIO"])
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
del subproc_env["GST_VALIDATE_SCENARIO"]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
return subproc_env
|
|
|
|
|
2015-01-15 14:32:12 +00:00
|
|
|
def clean(self):
|
|
|
|
Test.clean(self)
|
2016-09-12 17:21:30 +00:00
|
|
|
self._sent_eos_time = None
|
2016-09-02 20:37:24 +00:00
|
|
|
self.reports = []
|
|
|
|
self.position = -1
|
|
|
|
self.media_duration = -1
|
|
|
|
self.speed = 1.0
|
|
|
|
self.actions_infos = []
|
2014-02-19 12:07:03 +00:00
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def build_arguments(self):
|
2015-02-27 23:20:43 +00:00
|
|
|
super(GstValidateTest, self).build_arguments()
|
2014-08-11 11:19:22 +00:00
|
|
|
if "GST_VALIDATE" in os.environ:
|
|
|
|
self.add_env_variable("GST_VALIDATE", os.environ["GST_VALIDATE"])
|
|
|
|
|
|
|
|
if "GST_VALIDATE_SCENARIOS_PATH" in os.environ:
|
|
|
|
self.add_env_variable("GST_VALIDATE_SCENARIOS_PATH",
|
|
|
|
os.environ["GST_VALIDATE_SCENARIOS_PATH"])
|
2015-02-27 13:18:04 +00:00
|
|
|
|
2015-02-27 23:20:43 +00:00
|
|
|
self.add_env_variable("GST_VALIDATE_CONFIG")
|
|
|
|
self.add_env_variable("GST_VALIDATE_OVERRIDE")
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-04-23 09:47:10 +00:00
|
|
|
def get_extra_log_content(self, extralog):
|
2014-04-26 07:16:26 +00:00
|
|
|
value = Test.get_extra_log_content(self, extralog)
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2014-04-26 07:16:26 +00:00
|
|
|
return value
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2016-09-02 20:37:24 +00:00
|
|
|
def report_matches_expected_failure(self, report, expected_failure):
|
2016-11-24 13:29:53 +00:00
|
|
|
for key in ['bug', 'bugs', 'sometimes']:
|
2016-09-02 20:37:24 +00:00
|
|
|
if key in expected_failure:
|
|
|
|
del expected_failure[key]
|
2016-11-04 21:04:37 +00:00
|
|
|
for key, value in list(report.items()):
|
2016-09-02 20:37:24 +00:00
|
|
|
if key in expected_failure:
|
2016-11-29 17:47:35 +00:00
|
|
|
if not re.findall(expected_failure[key], str(value)):
|
2016-09-02 20:37:24 +00:00
|
|
|
return False
|
|
|
|
expected_failure.pop(key)
|
|
|
|
|
|
|
|
return not bool(expected_failure)
|
|
|
|
|
|
|
|
def check_reported_issues(self):
|
2016-09-01 20:39:38 +00:00
|
|
|
ret = []
|
2016-09-02 20:37:24 +00:00
|
|
|
expected_failures = copy.deepcopy(self.expected_failures)
|
|
|
|
expected_retcode = [0]
|
2016-09-01 20:39:38 +00:00
|
|
|
for report in self.reports:
|
2016-09-02 20:37:24 +00:00
|
|
|
found = None
|
|
|
|
for expected_failure in expected_failures:
|
|
|
|
if self.report_matches_expected_failure(report,
|
|
|
|
expected_failure.copy()):
|
|
|
|
found = expected_failure
|
|
|
|
break
|
|
|
|
|
|
|
|
if found is not None:
|
|
|
|
expected_failures.remove(found)
|
|
|
|
if report['level'] == 'critical':
|
|
|
|
if found.get('sometimes') and isinstance(expected_retcode, list):
|
|
|
|
expected_retcode.append(18)
|
|
|
|
else:
|
|
|
|
expected_retcode = [18]
|
|
|
|
elif report['level'] == 'critical':
|
2016-09-01 20:39:38 +00:00
|
|
|
ret.append(report['summary'])
|
|
|
|
|
|
|
|
if not ret:
|
2016-09-02 20:37:24 +00:00
|
|
|
return None, expected_failures, expected_retcode
|
2016-09-01 20:39:38 +00:00
|
|
|
|
2016-09-02 20:37:24 +00:00
|
|
|
return ret, expected_failures, expected_retcode
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2016-11-26 13:25:43 +00:00
|
|
|
def check_expected_timeout(self, expected_timeout):
|
|
|
|
msg = "Expected timeout happened. "
|
|
|
|
result = Result.PASSED
|
|
|
|
message = expected_timeout.get('message')
|
|
|
|
if message:
|
|
|
|
if not re.findall(message, self.message):
|
|
|
|
result = Result.FAILED
|
|
|
|
msg = "Expected timeout message: %s got %s " % (
|
|
|
|
message, self.message)
|
|
|
|
|
|
|
|
expected_symbols = expected_timeout.get('stacktrace_symbols')
|
|
|
|
if expected_symbols:
|
|
|
|
trace_gatherer = BackTraceGenerator.get_default()
|
|
|
|
stack_trace = trace_gatherer.get_trace(self)
|
|
|
|
|
|
|
|
if stack_trace:
|
|
|
|
if not isinstance(expected_symbols, list):
|
|
|
|
expected_symbols = [expected_symbols]
|
|
|
|
|
|
|
|
not_found_symbols = [s for s in expected_symbols
|
|
|
|
if s not in stack_trace]
|
|
|
|
if not_found_symbols:
|
|
|
|
result = Result.TIMEOUT
|
|
|
|
msg = "Expected symbols '%s' not found in stack trace " % (
|
|
|
|
not_found_symbols)
|
|
|
|
else:
|
|
|
|
msg += "No stack trace available, could not verify symbols "
|
|
|
|
|
|
|
|
return result, msg
|
|
|
|
|
2014-01-09 08:28:02 +00:00
|
|
|
def check_results(self):
|
2016-11-26 13:25:43 +00:00
|
|
|
if self.result in [Result.FAILED, self.result is Result.PASSED]:
|
2014-01-09 08:28:02 +00:00
|
|
|
return
|
|
|
|
|
2017-02-07 16:12:09 +00:00
|
|
|
for report in self.reports:
|
|
|
|
if report.get('issue-id') == 'runtime::missing-plugin':
|
|
|
|
self.set_result(Result.SKIPPED, "%s\n%s" % (report['summary'],
|
|
|
|
report['details']))
|
|
|
|
return
|
|
|
|
|
2014-01-09 08:28:02 +00:00
|
|
|
self.debug("%s returncode: %s", self, self.process.returncode)
|
2015-03-04 16:30:41 +00:00
|
|
|
|
2016-09-02 20:37:24 +00:00
|
|
|
criticals, not_found_expected_failures, expected_returncode = self.check_reported_issues()
|
|
|
|
|
2016-11-26 13:25:43 +00:00
|
|
|
expected_timeout = None
|
2016-09-02 20:37:24 +00:00
|
|
|
for i, f in enumerate(not_found_expected_failures):
|
|
|
|
if len(f) == 1 and f.get("returncode"):
|
|
|
|
returncode = f['returncode']
|
|
|
|
if not isinstance(expected_returncode, list):
|
|
|
|
returncode = [expected_returncode]
|
|
|
|
if 'sometimes' in f:
|
|
|
|
returncode.append(0)
|
2016-11-26 13:25:43 +00:00
|
|
|
elif f.get("timeout"):
|
|
|
|
expected_timeout = f
|
2016-09-02 20:37:24 +00:00
|
|
|
|
|
|
|
not_found_expected_failures = [f for f in not_found_expected_failures
|
|
|
|
if not f.get('returncode')]
|
|
|
|
|
|
|
|
msg = ""
|
|
|
|
result = Result.PASSED
|
2016-11-26 13:25:43 +00:00
|
|
|
if self.result == Result.TIMEOUT:
|
|
|
|
if expected_timeout:
|
|
|
|
not_found_expected_failures.remove(expected_timeout)
|
|
|
|
result, msg = self.check_expected_timeout(expected_timeout)
|
|
|
|
else:
|
|
|
|
return
|
|
|
|
elif self.process.returncode in COREDUMP_SIGNALS:
|
2016-09-02 20:37:24 +00:00
|
|
|
result = Result.FAILED
|
|
|
|
msg = "Application segfaulted "
|
2016-11-07 20:20:09 +00:00
|
|
|
self.add_stack_trace_to_logfile()
|
2015-03-23 15:19:49 +00:00
|
|
|
elif self.process.returncode == VALGRIND_ERROR_CODE:
|
2016-09-02 20:37:24 +00:00
|
|
|
msg = "Valgrind reported errors "
|
|
|
|
result = Result.FAILED
|
|
|
|
elif self.process.returncode not in expected_returncode:
|
|
|
|
msg = "Application returned %s " % self.process.returncode
|
2016-09-21 17:10:53 +00:00
|
|
|
if expected_returncode != [0]:
|
2016-09-02 20:37:24 +00:00
|
|
|
msg += "(expected %s) " % expected_returncode
|
|
|
|
result = Result.FAILED
|
|
|
|
|
|
|
|
if criticals:
|
|
|
|
msg += "(critical errors: [%s]) " % ', '.join(criticals)
|
|
|
|
result = Result.FAILED
|
|
|
|
|
|
|
|
if not_found_expected_failures:
|
|
|
|
mandatory_failures = [f for f in not_found_expected_failures
|
|
|
|
if not f.get('sometimes')]
|
|
|
|
|
|
|
|
if mandatory_failures:
|
|
|
|
msg += "(Expected errors not found: %s) " % mandatory_failures
|
|
|
|
result = Result.FAILED
|
|
|
|
elif self.expected_failures:
|
|
|
|
msg += '%s(Expected errors occured: %s)%s' % (Colors.OKBLUE,
|
|
|
|
self.expected_failures,
|
|
|
|
Colors.ENDC)
|
|
|
|
|
|
|
|
self.set_result(result, msg.strip())
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2015-04-20 08:53:29 +00:00
|
|
|
def get_valgrind_suppressions(self):
|
|
|
|
result = super(GstValidateTest, self).get_valgrind_suppressions()
|
2017-02-03 14:02:49 +00:00
|
|
|
gst_sup = self.get_valgrind_suppression_file('common', 'gst.supp')
|
|
|
|
if gst_sup:
|
2017-02-08 20:46:23 +00:00
|
|
|
result.append(gst_sup)
|
2017-02-03 14:02:49 +00:00
|
|
|
return result
|
2015-03-19 16:22:26 +00:00
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
class GstValidateEncodingTestInterface(object):
|
|
|
|
DURATION_TOLERANCE = GST_SECOND / 4
|
|
|
|
|
|
|
|
def __init__(self, combination, media_descriptor, duration_tolerance=None):
|
|
|
|
super(GstValidateEncodingTestInterface, self).__init__()
|
|
|
|
|
|
|
|
self.media_descriptor = media_descriptor
|
|
|
|
self.combination = combination
|
|
|
|
self.dest_file = ""
|
2014-01-30 11:42:25 +00:00
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
self._duration_tolerance = duration_tolerance
|
|
|
|
if duration_tolerance is None:
|
|
|
|
self._duration_tolerance = self.DURATION_TOLERANCE
|
|
|
|
|
|
|
|
def get_current_size(self):
|
2014-02-06 16:23:10 +00:00
|
|
|
try:
|
2016-11-04 21:04:37 +00:00
|
|
|
size = os.stat(urllib.parse.urlparse(self.dest_file).path).st_size
|
2014-10-24 12:38:00 +00:00
|
|
|
except OSError:
|
2014-07-16 10:03:14 +00:00
|
|
|
return None
|
2014-02-06 16:23:10 +00:00
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
self.debug("Size: %s" % size)
|
|
|
|
return size
|
2014-01-09 08:28:02 +00:00
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
def _get_profile_full(self, muxer, venc, aenc, video_restriction=None,
|
2014-10-24 12:23:52 +00:00
|
|
|
audio_restriction=None, audio_presence=0,
|
|
|
|
video_presence=0):
|
2016-12-22 13:07:58 +00:00
|
|
|
ret = ""
|
2014-07-16 10:03:14 +00:00
|
|
|
if muxer:
|
|
|
|
ret += muxer
|
|
|
|
ret += ":"
|
|
|
|
if venc:
|
|
|
|
if video_restriction is not None:
|
|
|
|
ret = ret + video_restriction + '->'
|
|
|
|
ret += venc
|
|
|
|
if video_presence:
|
|
|
|
ret = ret + '|' + str(video_presence)
|
|
|
|
if aenc:
|
|
|
|
ret += ":"
|
|
|
|
if audio_restriction is not None:
|
|
|
|
ret = ret + audio_restriction + '->'
|
|
|
|
ret += aenc
|
|
|
|
if audio_presence:
|
|
|
|
ret = ret + '|' + str(audio_presence)
|
|
|
|
|
|
|
|
return ret.replace("::", ":")
|
|
|
|
|
|
|
|
def get_profile(self, video_restriction=None, audio_restriction=None):
|
2014-07-16 10:16:03 +00:00
|
|
|
vcaps = self.combination.get_video_caps()
|
|
|
|
acaps = self.combination.get_audio_caps()
|
2014-07-16 10:03:14 +00:00
|
|
|
if self.media_descriptor is not None:
|
|
|
|
if self.media_descriptor.get_num_tracks("video") == 0:
|
|
|
|
vcaps = None
|
|
|
|
|
|
|
|
if self.media_descriptor.get_num_tracks("audio") == 0:
|
|
|
|
acaps = None
|
|
|
|
|
2014-07-16 10:16:03 +00:00
|
|
|
return self._get_profile_full(self.combination.get_muxer_caps(),
|
2014-07-16 10:03:14 +00:00
|
|
|
vcaps, acaps,
|
|
|
|
video_restriction=video_restriction,
|
|
|
|
audio_restriction=audio_restriction)
|
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
def _clean_caps(self, caps):
|
|
|
|
"""
|
|
|
|
Returns a list of key=value or structure name, without "(types)" or ";" or ","
|
|
|
|
"""
|
|
|
|
return re.sub(r"\(.+?\)\s*| |;", '', caps).split(',')
|
|
|
|
|
|
|
|
def _has_caps_type_variant(self, c, ccaps):
|
|
|
|
"""
|
|
|
|
Handle situations where we can have application/ogg or video/ogg or
|
|
|
|
audio/ogg
|
|
|
|
"""
|
|
|
|
has_variant = False
|
|
|
|
media_type = re.findall("application/|video/|audio/", c)
|
|
|
|
if media_type:
|
|
|
|
media_type = media_type[0].replace('/', '')
|
|
|
|
possible_mtypes = ["application", "video", "audio"]
|
|
|
|
possible_mtypes.remove(media_type)
|
|
|
|
for tmptype in possible_mtypes:
|
|
|
|
possible_c_variant = c.replace(media_type, tmptype)
|
|
|
|
if possible_c_variant in ccaps:
|
2014-10-24 12:23:52 +00:00
|
|
|
self.info(
|
2015-03-18 10:05:08 +00:00
|
|
|
"Found %s in %s, good enough!", possible_c_variant, ccaps)
|
2014-07-16 11:54:54 +00:00
|
|
|
has_variant = True
|
|
|
|
|
|
|
|
return has_variant
|
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
def check_encoded_file(self):
|
2014-10-24 12:23:52 +00:00
|
|
|
result_descriptor = GstValidateMediaDescriptor.new_from_uri(
|
|
|
|
self.dest_file)
|
2014-09-19 07:13:13 +00:00
|
|
|
if result_descriptor is None:
|
|
|
|
return (Result.FAILED, "Could not discover encoded file %s"
|
|
|
|
% self.dest_file)
|
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
duration = result_descriptor.get_duration()
|
2014-07-16 10:03:14 +00:00
|
|
|
orig_duration = self.media_descriptor.get_duration()
|
|
|
|
tolerance = self._duration_tolerance
|
|
|
|
|
|
|
|
if orig_duration - tolerance >= duration <= orig_duration + tolerance:
|
2014-07-16 11:54:54 +00:00
|
|
|
os.remove(result_descriptor.get_path())
|
2014-07-16 10:03:14 +00:00
|
|
|
return (Result.FAILED, "Duration of encoded file is "
|
|
|
|
" wrong (%s instead of %s)" %
|
2014-10-24 12:23:52 +00:00
|
|
|
(utils.TIME_ARGS(duration),
|
|
|
|
utils.TIME_ARGS(orig_duration)))
|
2014-07-16 10:03:14 +00:00
|
|
|
else:
|
2014-07-16 11:54:54 +00:00
|
|
|
all_tracks_caps = result_descriptor.get_tracks_caps()
|
|
|
|
container_caps = result_descriptor.get_caps()
|
|
|
|
if container_caps:
|
|
|
|
all_tracks_caps.insert(0, ("container", container_caps))
|
|
|
|
|
|
|
|
for track_type, caps in all_tracks_caps:
|
|
|
|
ccaps = self._clean_caps(caps)
|
|
|
|
wanted_caps = self.combination.get_caps(track_type)
|
|
|
|
cwanted_caps = self._clean_caps(wanted_caps)
|
|
|
|
|
2014-10-24 12:38:00 +00:00
|
|
|
if wanted_caps is None:
|
2014-07-16 11:54:54 +00:00
|
|
|
os.remove(result_descriptor.get_path())
|
|
|
|
return (Result.FAILED,
|
|
|
|
"Found a track of type %s in the encoded files"
|
|
|
|
" but none where wanted in the encoded profile: %s"
|
|
|
|
% (track_type, self.combination))
|
|
|
|
|
|
|
|
for c in cwanted_caps:
|
|
|
|
if c not in ccaps:
|
2014-10-24 12:23:52 +00:00
|
|
|
if not self._has_caps_type_variant(c, ccaps):
|
2014-07-16 11:54:54 +00:00
|
|
|
os.remove(result_descriptor.get_path())
|
|
|
|
return (Result.FAILED,
|
|
|
|
"Field: %s (from %s) not in caps of the outputed file %s"
|
|
|
|
% (wanted_caps, c, ccaps))
|
|
|
|
|
|
|
|
os.remove(result_descriptor.get_path())
|
2014-07-16 10:03:14 +00:00
|
|
|
return (Result.PASSED, "")
|
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-01-09 15:57:54 +00:00
|
|
|
class TestsManager(Loggable):
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
""" A class responsible for managing tests. """
|
|
|
|
|
2017-04-30 17:35:29 +00:00
|
|
|
name = "base"
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def __init__(self):
|
2014-01-09 15:57:54 +00:00
|
|
|
|
|
|
|
Loggable.__init__(self)
|
|
|
|
|
2014-07-23 08:54:37 +00:00
|
|
|
self.tests = []
|
|
|
|
self.unwanted_tests = []
|
2013-12-31 10:45:07 +00:00
|
|
|
self.options = None
|
|
|
|
self.args = None
|
|
|
|
self.reporter = None
|
|
|
|
self.wanted_tests_patterns = []
|
2014-01-22 23:15:54 +00:00
|
|
|
self.blacklisted_tests_patterns = []
|
2014-06-26 10:42:38 +00:00
|
|
|
self._generators = []
|
2016-11-04 21:04:37 +00:00
|
|
|
self.queue = queue.Queue()
|
2015-01-16 20:08:54 +00:00
|
|
|
self.jobs = []
|
2015-01-16 19:35:33 +00:00
|
|
|
self.total_num_tests = 0
|
2015-03-30 07:00:09 +00:00
|
|
|
self.starting_test_num = 0
|
2015-03-13 17:09:08 +00:00
|
|
|
self.check_testslist = True
|
2015-03-14 15:08:12 +00:00
|
|
|
self.all_tests = None
|
2016-09-02 20:37:24 +00:00
|
|
|
self.expected_failures = {}
|
2016-11-16 13:47:21 +00:00
|
|
|
self.blacklisted_tests = []
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-09 14:17:53 +00:00
|
|
|
def init(self):
|
2017-04-30 17:35:29 +00:00
|
|
|
return True
|
2014-01-09 14:17:53 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def list_tests(self):
|
2017-02-21 16:39:37 +00:00
|
|
|
return sorted(list(self.tests), key=lambda x: x.classname)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-09-02 20:37:24 +00:00
|
|
|
def add_expected_issues(self, expected_failures):
|
|
|
|
expected_failures_re = {}
|
2016-11-04 21:04:37 +00:00
|
|
|
for test_name_regex, failures in list(expected_failures.items()):
|
2016-09-02 20:37:24 +00:00
|
|
|
regex = re.compile(test_name_regex)
|
|
|
|
expected_failures_re[regex] = failures
|
|
|
|
for test in self.tests:
|
|
|
|
if regex.findall(test.classname):
|
|
|
|
test.expected_failures.extend(failures)
|
|
|
|
|
|
|
|
self.expected_failures.update(expected_failures_re)
|
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
def add_test(self, test):
|
2016-11-04 21:04:37 +00:00
|
|
|
for regex, failures in list(self.expected_failures.items()):
|
2016-09-02 20:37:24 +00:00
|
|
|
if regex.findall(test.classname):
|
|
|
|
test.expected_failures.extend(failures)
|
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
if self._is_test_wanted(test):
|
2014-10-24 12:38:00 +00:00
|
|
|
if test not in self.tests:
|
2014-07-23 08:54:37 +00:00
|
|
|
self.tests.append(test)
|
|
|
|
self.tests.sort(key=lambda test: test.classname)
|
2014-01-22 23:15:54 +00:00
|
|
|
else:
|
2014-10-24 12:38:00 +00:00
|
|
|
if test not in self.tests:
|
2014-07-23 08:54:37 +00:00
|
|
|
self.unwanted_tests.append(test)
|
|
|
|
self.unwanted_tests.sort(key=lambda test: test.classname)
|
2014-01-22 23:15:54 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def get_tests(self):
|
|
|
|
return self.tests
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def populate_testsuite(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def add_generators(self, generators):
|
|
|
|
"""
|
|
|
|
@generators: A list of, or one single #TestsGenerator to be used to generate tests
|
|
|
|
"""
|
|
|
|
if isinstance(generators, list):
|
|
|
|
self._generators.extend(generators)
|
|
|
|
else:
|
|
|
|
self._generators.append(generators)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
self._generators = list(set(self._generators))
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def get_generators(self):
|
|
|
|
return self._generators
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
def _add_blacklist(self, blacklisted_tests):
|
|
|
|
if not isinstance(blacklisted_tests, list):
|
|
|
|
blacklisted_tests = [blacklisted_tests]
|
|
|
|
|
|
|
|
for patterns in blacklisted_tests:
|
|
|
|
for pattern in patterns.split(","):
|
|
|
|
self.blacklisted_tests_patterns.append(re.compile(pattern))
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def set_default_blacklist(self, default_blacklist):
|
2016-11-16 13:47:21 +00:00
|
|
|
self.blacklisted_tests += default_blacklist
|
2014-06-26 10:42:38 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def add_options(self, parser):
|
|
|
|
""" Add more arguments. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
def set_settings(self, options, args, reporter):
|
|
|
|
""" Set properties after options parsing. """
|
|
|
|
self.options = options
|
|
|
|
self.args = args
|
|
|
|
self.reporter = reporter
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
self.populate_testsuite()
|
2015-04-27 12:04:05 +00:00
|
|
|
|
|
|
|
if self.options.valgrind:
|
|
|
|
self.print_valgrind_bugs()
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
if options.wanted_tests:
|
2014-01-22 23:15:54 +00:00
|
|
|
for patterns in options.wanted_tests:
|
|
|
|
for pattern in patterns.split(","):
|
|
|
|
self.wanted_tests_patterns.append(re.compile(pattern))
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
if options.blacklisted_tests:
|
|
|
|
for patterns in options.blacklisted_tests:
|
2014-11-28 23:03:04 +00:00
|
|
|
self._add_blacklist(patterns)
|
2014-01-22 23:15:54 +00:00
|
|
|
|
2016-11-16 13:47:21 +00:00
|
|
|
def set_blacklists(self):
|
|
|
|
if self.blacklisted_tests:
|
2016-11-24 13:29:53 +00:00
|
|
|
printc("\nCurrently 'hardcoded' %s blacklisted tests:" %
|
|
|
|
self.name, Colors.WARNING, title_char='-')
|
2016-11-16 13:47:21 +00:00
|
|
|
|
|
|
|
if self.options.check_bugs_status:
|
|
|
|
if not check_bugs_resolution(self.blacklisted_tests):
|
|
|
|
return False
|
|
|
|
|
|
|
|
for name, bug in self.blacklisted_tests:
|
|
|
|
self._add_blacklist(name)
|
|
|
|
if not self.options.check_bugs_status:
|
|
|
|
print(" + %s \n --> bug: %s\n" % (name, bug))
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2016-11-24 13:29:53 +00:00
|
|
|
def check_expected_failures(self):
|
2016-11-26 13:25:43 +00:00
|
|
|
if not self.expected_failures or not self.options.check_bugs_status:
|
2016-11-24 13:29:53 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
if self.expected_failures:
|
|
|
|
printc("\nCurrently known failures in the %s testsuite:"
|
|
|
|
% self.name, Colors.WARNING, title_char='-')
|
|
|
|
|
|
|
|
bugs_definitions = {}
|
|
|
|
for regex, failures in list(self.expected_failures.items()):
|
|
|
|
for failure in failures:
|
|
|
|
bugs = failure.get('bug')
|
|
|
|
if not bugs:
|
|
|
|
bugs = failure.get('bugs')
|
|
|
|
if not bugs:
|
|
|
|
printc('+ %s:\n --> no bug reported associated with %s\n' % (
|
|
|
|
regex.pattern, failure), Colors.WARNING)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not isinstance(bugs, list):
|
|
|
|
bugs = [bugs]
|
|
|
|
cbugs = bugs_definitions.get(regex.pattern, [])
|
|
|
|
bugs.extend([b for b in bugs if b not in cbugs])
|
|
|
|
bugs_definitions[regex.pattern] = bugs
|
|
|
|
|
|
|
|
return check_bugs_resolution(bugs_definitions.items())
|
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
def _check_blacklisted(self, test):
|
|
|
|
for pattern in self.blacklisted_tests_patterns:
|
|
|
|
if pattern.findall(test.classname):
|
2016-03-23 18:34:10 +00:00
|
|
|
self.info("%s is blacklisted by %s", test.classname, pattern)
|
2014-01-22 23:15:54 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-11-16 13:48:26 +00:00
|
|
|
def _check_whitelisted(self, test):
|
|
|
|
for pattern in self.wanted_tests_patterns:
|
|
|
|
if pattern.findall(test.classname):
|
2016-12-22 13:08:31 +00:00
|
|
|
if self._check_blacklisted(test):
|
|
|
|
# If explicitly white listed that specific test
|
|
|
|
# bypass the blacklisting
|
|
|
|
if pattern.pattern != test.classname:
|
|
|
|
return False
|
2016-11-16 13:48:26 +00:00
|
|
|
return True
|
2016-12-22 13:08:31 +00:00
|
|
|
return False
|
2016-11-16 13:48:26 +00:00
|
|
|
|
2016-12-23 17:58:56 +00:00
|
|
|
def _check_duration(self, test):
|
|
|
|
if test.duration > 0 and int(self.options.long_limit) < int(test.duration):
|
|
|
|
self.info("Not activating %s as its duration (%d) is superior"
|
|
|
|
" than the long limit (%d)" % (test, test.duration,
|
|
|
|
int(self.options.long_limit)))
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def _is_test_wanted(self, test):
|
2016-11-16 13:48:26 +00:00
|
|
|
if self._check_whitelisted(test):
|
2016-12-23 17:58:56 +00:00
|
|
|
if not self._check_duration(test):
|
|
|
|
return False
|
2016-11-16 13:48:26 +00:00
|
|
|
return True
|
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
if self._check_blacklisted(test):
|
|
|
|
return False
|
|
|
|
|
2016-12-23 17:58:56 +00:00
|
|
|
if not self._check_duration(test):
|
2014-05-07 09:30:09 +00:00
|
|
|
return False
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
if not self.wanted_tests_patterns:
|
|
|
|
return True
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
return False
|
|
|
|
|
2015-01-16 20:08:54 +00:00
|
|
|
def test_wait(self):
|
|
|
|
while True:
|
|
|
|
# Check process every second for timeout
|
|
|
|
try:
|
|
|
|
self.queue.get(timeout=1)
|
2016-11-04 21:04:37 +00:00
|
|
|
except queue.Empty:
|
2015-01-16 20:08:54 +00:00
|
|
|
pass
|
2015-01-16 18:08:19 +00:00
|
|
|
|
2015-01-16 20:08:54 +00:00
|
|
|
for test in self.jobs:
|
2015-01-16 18:08:19 +00:00
|
|
|
if test.process_update():
|
2015-01-16 20:08:54 +00:00
|
|
|
self.jobs.remove(test)
|
|
|
|
return test
|
2015-01-16 18:08:19 +00:00
|
|
|
|
2015-01-16 20:08:54 +00:00
|
|
|
def tests_wait(self):
|
|
|
|
try:
|
|
|
|
test = self.test_wait()
|
2015-01-16 18:08:19 +00:00
|
|
|
test.check_results()
|
|
|
|
except KeyboardInterrupt:
|
2015-01-16 20:08:54 +00:00
|
|
|
for test in self.jobs:
|
|
|
|
test.kill_subprocess()
|
2015-01-16 18:08:19 +00:00
|
|
|
raise
|
|
|
|
|
2015-01-16 20:08:54 +00:00
|
|
|
return test
|
|
|
|
|
|
|
|
def start_new_job(self, tests_left):
|
|
|
|
try:
|
|
|
|
test = tests_left.pop(0)
|
|
|
|
except IndexError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
self.print_test_num(test)
|
|
|
|
test.test_start(self.queue)
|
|
|
|
|
|
|
|
self.jobs.append(test)
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2015-01-16 19:35:33 +00:00
|
|
|
def run_tests(self, starting_test_num, total_num_tests):
|
|
|
|
self.total_num_tests = total_num_tests
|
2015-03-30 07:00:09 +00:00
|
|
|
self.starting_test_num = starting_test_num
|
2015-01-16 19:35:33 +00:00
|
|
|
|
2017-06-07 19:06:10 +00:00
|
|
|
alone_tests = []
|
|
|
|
tests = []
|
|
|
|
for test in self.tests:
|
|
|
|
if test.is_parallel:
|
|
|
|
tests.append(test)
|
|
|
|
else:
|
|
|
|
alone_tests.append(test)
|
|
|
|
|
|
|
|
max_num_jobs = min(self.options.num_jobs, len(tests))
|
2015-01-16 20:29:55 +00:00
|
|
|
jobs_running = 0
|
2015-01-16 20:08:54 +00:00
|
|
|
|
2017-06-07 19:06:10 +00:00
|
|
|
for num_jobs, tests in [(max_num_jobs, tests), (1, alone_tests)]:
|
|
|
|
tests_left = list(tests)
|
|
|
|
for i in range(num_jobs):
|
|
|
|
if not self.start_new_job(tests_left):
|
|
|
|
break
|
2015-01-16 20:29:55 +00:00
|
|
|
jobs_running += 1
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2017-06-07 19:06:10 +00:00
|
|
|
while jobs_running != 0:
|
|
|
|
test = self.tests_wait()
|
|
|
|
jobs_running -= 1
|
|
|
|
self.print_test_num(test)
|
|
|
|
res = test.test_end()
|
|
|
|
self.reporter.after_test(test)
|
|
|
|
if res != Result.PASSED and (self.options.forever or
|
|
|
|
self.options.fatal_error):
|
|
|
|
return test.result
|
|
|
|
if self.start_new_job(tests_left):
|
|
|
|
jobs_running += 1
|
|
|
|
|
2015-03-30 07:00:09 +00:00
|
|
|
return Result.PASSED
|
2014-01-14 17:07:46 +00:00
|
|
|
|
2015-01-16 19:35:33 +00:00
|
|
|
def print_test_num(self, test):
|
2015-03-30 07:00:09 +00:00
|
|
|
cur_test_num = self.starting_test_num + self.tests.index(test) + 1
|
|
|
|
sys.stdout.write("[%d / %d] " % (cur_test_num, self.total_num_tests))
|
2015-01-16 19:35:33 +00:00
|
|
|
|
2014-02-19 12:07:03 +00:00
|
|
|
def clean_tests(self):
|
|
|
|
for test in self.tests:
|
|
|
|
test.clean()
|
|
|
|
|
2014-01-13 16:31:57 +00:00
|
|
|
def needs_http_server(self):
|
|
|
|
return False
|
|
|
|
|
2015-04-27 12:04:05 +00:00
|
|
|
def print_valgrind_bugs(self):
|
|
|
|
pass
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
class TestsGenerator(Loggable):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def __init__(self, name, test_manager, tests=[]):
|
|
|
|
Loggable.__init__(self)
|
|
|
|
self.name = name
|
|
|
|
self.test_manager = test_manager
|
|
|
|
self._tests = {}
|
|
|
|
for test in tests:
|
|
|
|
self._tests[test.classname] = test
|
|
|
|
|
|
|
|
def generate_tests(self, *kwargs):
|
|
|
|
"""
|
|
|
|
Method that generates tests
|
|
|
|
"""
|
|
|
|
return list(self._tests.values())
|
|
|
|
|
|
|
|
def add_test(self, test):
|
|
|
|
self._tests[test.classname] = test
|
|
|
|
|
|
|
|
|
|
|
|
class GstValidateTestsGenerator(TestsGenerator):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def populate_tests(self, uri_minfo_special_scenarios, scenarios):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def generate_tests(self, uri_minfo_special_scenarios, scenarios):
|
|
|
|
self.populate_tests(uri_minfo_special_scenarios, scenarios)
|
|
|
|
return super(GstValidateTestsGenerator, self).generate_tests()
|
|
|
|
|
|
|
|
|
2014-01-09 14:17:53 +00:00
|
|
|
class _TestsLauncher(Loggable):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-08-11 18:19:02 +00:00
|
|
|
def __init__(self, libsdir):
|
2014-01-09 14:17:53 +00:00
|
|
|
|
|
|
|
Loggable.__init__(self)
|
|
|
|
|
2014-08-11 18:19:02 +00:00
|
|
|
self.libsdir = libsdir
|
2014-01-14 17:07:46 +00:00
|
|
|
self.options = None
|
2013-12-31 10:45:07 +00:00
|
|
|
self.testers = []
|
|
|
|
self.tests = []
|
|
|
|
self.reporter = None
|
|
|
|
self._list_testers()
|
2015-03-14 15:08:12 +00:00
|
|
|
self.all_tests = None
|
2013-12-31 10:45:07 +00:00
|
|
|
self.wanted_tests_patterns = []
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
def _list_app_dirs(self):
|
2014-10-23 19:36:03 +00:00
|
|
|
app_dirs = []
|
2014-10-24 12:23:52 +00:00
|
|
|
app_dirs.append(os.path.join(self.libsdir, "apps"))
|
2014-10-23 19:36:03 +00:00
|
|
|
env_dirs = os.environ.get("GST_VALIDATE_APPS_DIR")
|
|
|
|
if env_dirs is not None:
|
|
|
|
for dir_ in env_dirs.split(":"):
|
2014-10-24 12:23:52 +00:00
|
|
|
app_dirs.append(dir_)
|
2015-02-04 14:27:37 +00:00
|
|
|
sys.path.append(dir_)
|
2014-10-23 19:36:03 +00:00
|
|
|
|
|
|
|
return app_dirs
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
def _exec_app(self, app_dir, env):
|
2014-11-15 17:08:42 +00:00
|
|
|
try:
|
|
|
|
files = os.listdir(app_dir)
|
|
|
|
except OSError as e:
|
|
|
|
self.debug("Could not list %s: %s" % (app_dir, e))
|
|
|
|
files = []
|
|
|
|
for f in files:
|
2014-10-23 19:36:03 +00:00
|
|
|
if f.endswith(".py"):
|
2016-11-04 21:04:37 +00:00
|
|
|
exec(compile(open(os.path.join(app_dir, f)).read(), os.path.join(app_dir, f), 'exec'), env)
|
2014-10-23 19:36:03 +00:00
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
def _exec_apps(self, env):
|
2014-10-23 19:36:03 +00:00
|
|
|
app_dirs = self._list_app_dirs()
|
|
|
|
for app_dir in app_dirs:
|
|
|
|
self._exec_app(app_dir, env)
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def _list_testers(self):
|
|
|
|
env = globals().copy()
|
2014-10-23 19:36:03 +00:00
|
|
|
self._exec_apps(env)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-04-30 13:40:10 +00:00
|
|
|
testers = [i() for i in utils.get_subclasses(TestsManager, env)]
|
2014-01-09 14:17:53 +00:00
|
|
|
for tester in testers:
|
|
|
|
if tester.init() is True:
|
|
|
|
self.testers.append(tester)
|
|
|
|
else:
|
2014-01-10 09:27:25 +00:00
|
|
|
self.warning("Can not init tester: %s -- PATH is %s"
|
|
|
|
% (tester.name, os.environ["PATH"]))
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def add_options(self, parser):
|
|
|
|
for tester in self.testers:
|
2014-04-30 13:40:10 +00:00
|
|
|
tester.add_options(parser)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-12-22 13:08:21 +00:00
|
|
|
def _load_testsuite(self, testsuites):
|
|
|
|
exceptions = []
|
|
|
|
for testsuite in testsuites:
|
2014-11-28 23:03:04 +00:00
|
|
|
try:
|
|
|
|
sys.path.insert(0, os.path.dirname(testsuite))
|
2016-12-22 13:08:21 +00:00
|
|
|
return (__import__(os.path.basename(testsuite).replace(".py", "")), None)
|
2014-11-28 23:03:04 +00:00
|
|
|
except Exception as e:
|
2016-12-22 13:08:21 +00:00
|
|
|
exceptions.append("Could not load %s: %s" % (testsuite, e))
|
2014-11-28 23:03:04 +00:00
|
|
|
continue
|
|
|
|
finally:
|
|
|
|
sys.path.remove(os.path.dirname(testsuite))
|
|
|
|
|
2016-12-22 13:08:21 +00:00
|
|
|
return (None, exceptions)
|
|
|
|
|
|
|
|
def _load_testsuites(self):
|
|
|
|
testsuites = []
|
|
|
|
for testsuite in self.options.testsuites:
|
2017-06-16 21:31:19 +00:00
|
|
|
if os.path.exists(testsuite):
|
|
|
|
testsuite = os.path.abspath(os.path.expanduser(testsuite))
|
2016-12-22 13:08:21 +00:00
|
|
|
loaded_module = self._load_testsuite([testsuite])
|
|
|
|
else:
|
|
|
|
possible_testsuites_paths = [os.path.join(d, testsuite + ".py")
|
|
|
|
for d in self.options.testsuites_dirs]
|
|
|
|
loaded_module = self._load_testsuite(possible_testsuites_paths)
|
|
|
|
|
|
|
|
module = loaded_module[0]
|
|
|
|
if not loaded_module[0]:
|
|
|
|
printc("Could not load testsuite: %s, reasons: %s" % (
|
|
|
|
testsuite, loaded_module[1]), Colors.FAIL)
|
|
|
|
continue
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
testsuites.append(module)
|
|
|
|
if not hasattr(module, "TEST_MANAGER"):
|
|
|
|
module.TEST_MANAGER = [tester.name for tester in self.testers]
|
|
|
|
elif not isinstance(module.TEST_MANAGER, list):
|
|
|
|
module.TEST_MANAGER = [module.TEST_MANAGER]
|
|
|
|
|
|
|
|
self.options.testsuites = testsuites
|
|
|
|
|
|
|
|
def _setup_testsuites(self):
|
|
|
|
for testsuite in self.options.testsuites:
|
|
|
|
loaded = False
|
|
|
|
wanted_test_manager = None
|
|
|
|
if hasattr(testsuite, "TEST_MANAGER"):
|
|
|
|
wanted_test_manager = testsuite.TEST_MANAGER
|
|
|
|
if not isinstance(wanted_test_manager, list):
|
|
|
|
wanted_test_manager = [wanted_test_manager]
|
|
|
|
|
|
|
|
for tester in self.testers:
|
|
|
|
if wanted_test_manager is not None and \
|
|
|
|
tester.name not in wanted_test_manager:
|
|
|
|
continue
|
|
|
|
|
2016-03-23 19:02:47 +00:00
|
|
|
if self.options.user_paths:
|
2015-07-17 07:45:35 +00:00
|
|
|
tester.register_defaults()
|
|
|
|
loaded = True
|
|
|
|
elif testsuite.setup_tests(tester, self.options):
|
2014-11-28 23:03:04 +00:00
|
|
|
loaded = True
|
|
|
|
|
|
|
|
if not loaded:
|
|
|
|
printc("Could not load testsuite: %s"
|
|
|
|
" maybe because of missing TestManager"
|
|
|
|
% (testsuite), Colors.FAIL)
|
2016-09-05 15:16:59 +00:00
|
|
|
return False
|
2014-11-28 23:03:04 +00:00
|
|
|
|
2014-12-08 11:42:51 +00:00
|
|
|
def _load_config(self, options):
|
2014-11-28 23:03:04 +00:00
|
|
|
printc("Loading config files is DEPRECATED"
|
|
|
|
" you should use the new testsuite format now",)
|
|
|
|
|
|
|
|
for tester in self.testers:
|
|
|
|
tester.options = options
|
|
|
|
globals()[tester.name] = tester
|
|
|
|
globals()["options"] = options
|
|
|
|
c__file__ = __file__
|
|
|
|
globals()["__file__"] = self.options.config
|
2016-11-04 21:04:37 +00:00
|
|
|
exec(compile(open(self.options.config).read(), self.options.config, 'exec'), globals())
|
2014-11-28 23:03:04 +00:00
|
|
|
globals()["__file__"] = c__file__
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def set_settings(self, options, args):
|
2016-09-06 19:21:05 +00:00
|
|
|
if options.xunit_file:
|
|
|
|
self.reporter = reporters.XunitReporter(options)
|
|
|
|
else:
|
|
|
|
self.reporter = reporters.Reporter(options)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-14 17:07:46 +00:00
|
|
|
self.options = options
|
2013-12-31 10:45:07 +00:00
|
|
|
wanted_testers = None
|
|
|
|
for tester in self.testers:
|
|
|
|
if tester.name in args:
|
|
|
|
wanted_testers = tester.name
|
2014-11-28 23:03:04 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
if wanted_testers:
|
|
|
|
testers = self.testers
|
|
|
|
self.testers = []
|
|
|
|
for tester in testers:
|
|
|
|
if tester.name in args:
|
|
|
|
self.testers.append(tester)
|
|
|
|
args.remove(tester.name)
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
if options.config:
|
2014-12-08 11:42:51 +00:00
|
|
|
self._load_config(options)
|
2014-11-28 23:03:04 +00:00
|
|
|
|
|
|
|
self._load_testsuites()
|
2014-06-26 10:42:38 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
for tester in self.testers:
|
|
|
|
tester.set_settings(options, args, self.reporter)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
if not options.config and options.testsuites:
|
2016-09-05 15:16:59 +00:00
|
|
|
if self._setup_testsuites() is False:
|
|
|
|
return False
|
2016-11-16 13:47:21 +00:00
|
|
|
|
|
|
|
for tester in self.testers:
|
|
|
|
if not tester.set_blacklists():
|
|
|
|
return False
|
|
|
|
|
2016-11-24 13:29:53 +00:00
|
|
|
if not tester.check_expected_failures():
|
|
|
|
return False
|
|
|
|
|
2016-09-05 15:16:59 +00:00
|
|
|
return True
|
2014-11-28 23:03:04 +00:00
|
|
|
|
2015-03-14 15:40:17 +00:00
|
|
|
def _check_tester_has_other_testsuite(self, testsuite, tester):
|
2014-11-29 12:43:06 +00:00
|
|
|
if tester.name != testsuite.TEST_MANAGER[0]:
|
|
|
|
return True
|
|
|
|
|
|
|
|
for t in self.options.testsuites:
|
|
|
|
if t != testsuite:
|
2016-03-23 18:34:10 +00:00
|
|
|
for other_testmanager in t.TEST_MANAGER:
|
2014-11-29 12:43:06 +00:00
|
|
|
if other_testmanager == tester.name:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
def _check_defined_tests(self, tester, tests):
|
2015-03-14 15:08:12 +00:00
|
|
|
if self.options.blacklisted_tests or self.options.wanted_tests:
|
2014-11-29 12:43:06 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
tests_names = [test.classname for test in tests]
|
2016-02-23 10:29:56 +00:00
|
|
|
testlist_changed = False
|
2014-11-29 12:43:06 +00:00
|
|
|
for testsuite in self.options.testsuites:
|
2015-03-14 15:40:17 +00:00
|
|
|
if not self._check_tester_has_other_testsuite(testsuite, tester) \
|
2015-03-14 15:08:12 +00:00
|
|
|
and tester.check_testslist:
|
2014-11-29 12:43:06 +00:00
|
|
|
try:
|
|
|
|
testlist_file = open(os.path.splitext(testsuite.__file__)[0] + ".testslist",
|
2016-11-04 21:04:37 +00:00
|
|
|
'r+')
|
2014-11-29 12:43:06 +00:00
|
|
|
|
|
|
|
know_tests = testlist_file.read().split("\n")
|
|
|
|
testlist_file.close()
|
|
|
|
|
|
|
|
testlist_file = open(os.path.splitext(testsuite.__file__)[0] + ".testslist",
|
2015-01-20 08:59:23 +00:00
|
|
|
'w')
|
2014-11-29 12:43:06 +00:00
|
|
|
except IOError:
|
2014-12-05 11:16:36 +00:00
|
|
|
continue
|
2014-11-29 12:43:06 +00:00
|
|
|
|
2017-06-06 20:23:48 +00:00
|
|
|
optional_out = []
|
2014-11-29 12:43:06 +00:00
|
|
|
for test in know_tests:
|
2014-12-05 11:16:36 +00:00
|
|
|
if test and test.strip('~') not in tests_names:
|
|
|
|
if not test.startswith('~'):
|
|
|
|
testlist_changed = True
|
|
|
|
printc("Test %s Not in testsuite %s anymore"
|
|
|
|
% (test, testsuite.__file__), Colors.FAIL)
|
2017-06-06 20:23:48 +00:00
|
|
|
else:
|
|
|
|
optional_out.append((test, None))
|
2014-12-05 11:16:36 +00:00
|
|
|
|
2017-06-06 20:23:48 +00:00
|
|
|
tests_names = sorted([(test.classname, test) for test in tests] + optional_out,
|
|
|
|
key=lambda x: x[0].strip('~'))
|
|
|
|
|
|
|
|
for tname, test in tests_names:
|
|
|
|
if test and test.optional:
|
|
|
|
tname = '~' + tname
|
|
|
|
testlist_file.write("%s\n" % (tname))
|
|
|
|
if tname and tname not in know_tests:
|
2014-11-29 12:43:06 +00:00
|
|
|
printc("Test %s is NEW in testsuite %s"
|
2017-06-06 20:23:48 +00:00
|
|
|
% (tname, testsuite.__file__), Colors.OKGREEN)
|
2016-02-23 10:29:56 +00:00
|
|
|
testlist_changed = True
|
2014-11-29 12:43:06 +00:00
|
|
|
|
|
|
|
testlist_file.close()
|
2016-02-23 10:29:56 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
return testlist_changed
|
2014-11-29 12:43:06 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def list_tests(self):
|
|
|
|
for tester in self.testers:
|
2017-01-03 17:52:38 +00:00
|
|
|
if not self._tester_needed(tester):
|
|
|
|
continue
|
|
|
|
|
2014-11-29 12:43:06 +00:00
|
|
|
tests = tester.list_tests()
|
2016-02-23 10:29:56 +00:00
|
|
|
if self._check_defined_tests(tester, tests) and \
|
|
|
|
self.options.fail_on_testlist_change:
|
|
|
|
return -1
|
|
|
|
|
2014-11-29 12:43:06 +00:00
|
|
|
self.tests.extend(tests)
|
2016-11-04 21:04:37 +00:00
|
|
|
return sorted(list(self.tests), key=lambda t: t.classname)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2016-12-22 13:08:23 +00:00
|
|
|
def _tester_needed(self, tester):
|
|
|
|
for testsuite in self.options.testsuites:
|
|
|
|
if tester.name in testsuite.TEST_MANAGER:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2014-01-14 17:07:46 +00:00
|
|
|
def _run_tests(self):
|
2014-04-02 17:14:30 +00:00
|
|
|
cur_test_num = 0
|
2015-03-14 15:08:12 +00:00
|
|
|
|
|
|
|
if not self.all_tests:
|
2015-03-18 10:05:08 +00:00
|
|
|
total_num_tests = 1
|
2015-03-14 15:08:12 +00:00
|
|
|
self.all_tests = []
|
|
|
|
for tester in self.testers:
|
2016-12-22 13:08:23 +00:00
|
|
|
if self._tester_needed(tester):
|
|
|
|
self.all_tests.extend(tester.list_tests())
|
2015-03-14 15:08:12 +00:00
|
|
|
total_num_tests = len(self.all_tests)
|
2014-04-02 17:14:30 +00:00
|
|
|
|
2015-01-16 17:25:56 +00:00
|
|
|
self.reporter.init_timer()
|
2013-12-31 10:45:07 +00:00
|
|
|
for tester in self.testers:
|
2016-12-22 13:08:23 +00:00
|
|
|
if not self._tester_needed(tester):
|
|
|
|
continue
|
2015-03-30 07:00:09 +00:00
|
|
|
res = tester.run_tests(cur_test_num, total_num_tests)
|
|
|
|
cur_test_num += len(tester.list_tests())
|
2014-01-24 15:38:12 +00:00
|
|
|
if res != Result.PASSED and (self.options.forever or
|
2014-10-24 12:23:52 +00:00
|
|
|
self.options.fatal_error):
|
2014-01-14 17:07:46 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2016-09-02 20:39:50 +00:00
|
|
|
def clean_tests(self):
|
2014-02-19 12:07:03 +00:00
|
|
|
for tester in self.testers:
|
|
|
|
tester.clean_tests()
|
|
|
|
|
2014-01-14 17:07:46 +00:00
|
|
|
def run_tests(self):
|
|
|
|
if self.options.forever:
|
2016-12-22 13:08:26 +00:00
|
|
|
r = 1
|
|
|
|
while True:
|
|
|
|
t = "Running iteration %d" % r
|
|
|
|
print("%s\n%s\n%s\n" % ("=" * len(t), t, "=" * len(t)))
|
|
|
|
|
|
|
|
if not self._run_tests():
|
|
|
|
break
|
|
|
|
r += 1
|
2016-09-02 20:39:50 +00:00
|
|
|
self.clean_tests()
|
2014-01-14 17:07:46 +00:00
|
|
|
|
|
|
|
return False
|
2016-10-29 14:22:31 +00:00
|
|
|
elif self.options.n_runs:
|
|
|
|
res = True
|
|
|
|
for r in range(self.options.n_runs):
|
|
|
|
t = "Running iteration %d" % r
|
|
|
|
print("%s\n%s\n%s\n" % ("=" * len(t), t, "=" * len(t)))
|
|
|
|
if not self._run_tests():
|
|
|
|
res = False
|
|
|
|
self.clean_tests()
|
|
|
|
|
|
|
|
return res
|
2014-01-14 17:07:46 +00:00
|
|
|
else:
|
|
|
|
return self._run_tests()
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def final_report(self):
|
|
|
|
self.reporter.final_report()
|
2014-01-13 16:31:57 +00:00
|
|
|
|
|
|
|
def needs_http_server(self):
|
|
|
|
for tester in self.testers:
|
|
|
|
if tester.needs_http_server():
|
|
|
|
return True
|
2014-01-24 10:41:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
class NamedDic(object):
|
|
|
|
|
|
|
|
def __init__(self, props):
|
|
|
|
if props:
|
2016-11-04 21:04:37 +00:00
|
|
|
for name, value in props.items():
|
2014-01-24 10:41:25 +00:00
|
|
|
setattr(self, name, value)
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-01-30 15:56:51 +00:00
|
|
|
class Scenario(object):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
def __init__(self, name, props, path=None):
|
2014-01-24 10:41:25 +00:00
|
|
|
self.name = name
|
2014-04-30 09:06:09 +00:00
|
|
|
self.path = path
|
2014-02-12 10:18:14 +00:00
|
|
|
|
|
|
|
for prop, value in props:
|
2014-04-25 11:17:39 +00:00
|
|
|
setattr(self, prop.replace("-", "_"), value)
|
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
def get_execution_name(self):
|
|
|
|
if self.path is not None:
|
|
|
|
return self.path
|
|
|
|
else:
|
|
|
|
return self.name
|
|
|
|
|
2014-04-25 11:17:39 +00:00
|
|
|
def seeks(self):
|
|
|
|
if hasattr(self, "seek"):
|
|
|
|
return bool(self.seek)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-07-23 12:43:29 +00:00
|
|
|
def needs_clock_sync(self):
|
|
|
|
if hasattr(self, "need_clock_sync"):
|
|
|
|
return bool(self.need_clock_sync)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2017-05-25 11:55:52 +00:00
|
|
|
def needs_live_content(self):
|
|
|
|
# Scenarios that can only be used on live content
|
|
|
|
if hasattr(self, "live_content_required"):
|
|
|
|
return bool(self.live_content_required)
|
|
|
|
return False
|
|
|
|
|
|
|
|
def compatible_with_live_content(self):
|
|
|
|
# if a live content is required it's implicitely compatible with
|
|
|
|
# live content
|
|
|
|
if self.needs_live_content():
|
|
|
|
return True
|
|
|
|
if hasattr(self, "live_content_compatible"):
|
|
|
|
return bool(self.live_content_compatible)
|
|
|
|
return False
|
|
|
|
|
2014-07-23 18:39:05 +00:00
|
|
|
def get_min_media_duration(self):
|
|
|
|
if hasattr(self, "min_media_duration"):
|
2015-09-30 16:13:28 +00:00
|
|
|
return float(self.min_media_duration)
|
2014-07-23 18:39:05 +00:00
|
|
|
|
|
|
|
return 0
|
|
|
|
|
2014-04-25 11:17:39 +00:00
|
|
|
def does_reverse_playback(self):
|
|
|
|
if hasattr(self, "reverse_playback"):
|
2017-05-10 11:12:18 +00:00
|
|
|
return bool(self.reverse_playback)
|
2014-04-25 11:17:39 +00:00
|
|
|
|
2014-04-29 17:04:46 +00:00
|
|
|
return False
|
|
|
|
|
2014-05-07 09:30:09 +00:00
|
|
|
def get_duration(self):
|
|
|
|
try:
|
|
|
|
return float(getattr(self, "duration"))
|
|
|
|
except AttributeError:
|
|
|
|
return 0
|
2014-04-25 11:17:39 +00:00
|
|
|
|
2014-04-29 17:04:46 +00:00
|
|
|
def get_min_tracks(self, track_type):
|
|
|
|
try:
|
|
|
|
return int(getattr(self, "min_%s_track" % track_type))
|
|
|
|
except AttributeError:
|
|
|
|
return 0
|
2014-04-25 11:17:39 +00:00
|
|
|
|
2017-05-25 13:50:23 +00:00
|
|
|
def __repr__(self):
|
|
|
|
return "<Scenario %s>" % self.name
|
2014-02-12 10:18:14 +00:00
|
|
|
|
2014-12-05 11:16:36 +00:00
|
|
|
|
2014-03-28 14:00:01 +00:00
|
|
|
class ScenarioManager(Loggable):
|
2014-02-12 10:18:14 +00:00
|
|
|
_instance = None
|
|
|
|
all_scenarios = []
|
2014-04-30 09:06:09 +00:00
|
|
|
|
2015-02-19 10:32:05 +00:00
|
|
|
FILE_EXTENSION = "scenario"
|
2016-09-09 15:09:45 +00:00
|
|
|
GST_VALIDATE_COMMAND = ""
|
2014-02-12 10:18:14 +00:00
|
|
|
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
if not cls._instance:
|
|
|
|
cls._instance = super(ScenarioManager, cls).__new__(
|
2014-10-24 12:23:52 +00:00
|
|
|
cls, *args, **kwargs)
|
2014-02-12 10:18:14 +00:00
|
|
|
cls._instance.config = None
|
2014-03-28 14:01:12 +00:00
|
|
|
cls._instance.discovered = False
|
|
|
|
Loggable.__init__(cls._instance)
|
2014-02-12 10:18:14 +00:00
|
|
|
|
2014-03-28 14:01:12 +00:00
|
|
|
return cls._instance
|
2014-04-25 11:19:19 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
def find_special_scenarios(self, mfile):
|
|
|
|
scenarios = []
|
|
|
|
mfile_bname = os.path.basename(mfile)
|
2015-02-04 14:27:37 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
for f in os.listdir(os.path.dirname(mfile)):
|
2015-07-21 23:45:26 +00:00
|
|
|
if re.findall("%s\..*\.%s$" % (re.escape(mfile_bname), self.FILE_EXTENSION), f):
|
2014-04-30 09:06:09 +00:00
|
|
|
scenarios.append(os.path.join(os.path.dirname(mfile), f))
|
|
|
|
|
|
|
|
if scenarios:
|
|
|
|
scenarios = self.discover_scenarios(scenarios, mfile)
|
|
|
|
|
|
|
|
return scenarios
|
|
|
|
|
|
|
|
def discover_scenarios(self, scenario_paths=[], mfile=None):
|
|
|
|
"""
|
|
|
|
Discover scenarios specified in scenario_paths or the default ones
|
|
|
|
if nothing specified there
|
|
|
|
"""
|
|
|
|
scenarios = []
|
2014-02-12 10:18:14 +00:00
|
|
|
scenario_defs = os.path.join(self.config.main_dir, "scenarios.def")
|
2015-01-15 14:32:12 +00:00
|
|
|
logs = open(os.path.join(self.config.logsdir,
|
|
|
|
"scenarios_discovery.log"), 'w')
|
2014-08-10 10:04:31 +00:00
|
|
|
|
2014-02-12 10:18:14 +00:00
|
|
|
try:
|
2014-10-24 12:23:52 +00:00
|
|
|
command = [self.GST_VALIDATE_COMMAND,
|
|
|
|
"--scenarios-defs-output-file", scenario_defs]
|
2014-04-30 09:06:09 +00:00
|
|
|
command.extend(scenario_paths)
|
2014-05-07 10:21:30 +00:00
|
|
|
subprocess.check_call(command, stdout=logs, stderr=logs)
|
2014-02-12 10:18:14 +00:00
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
pass
|
|
|
|
|
2016-11-04 21:04:37 +00:00
|
|
|
config = configparser.RawConfigParser()
|
2014-02-12 10:18:14 +00:00
|
|
|
f = open(scenario_defs)
|
|
|
|
config.readfp(f)
|
|
|
|
|
|
|
|
for section in config.sections():
|
2014-04-30 09:06:09 +00:00
|
|
|
if scenario_paths:
|
|
|
|
for scenario_path in scenario_paths:
|
2015-02-04 14:27:37 +00:00
|
|
|
if mfile is None:
|
|
|
|
name = section
|
|
|
|
path = scenario_path
|
|
|
|
elif section in scenario_path:
|
2014-04-30 09:06:09 +00:00
|
|
|
# The real name of the scenario is:
|
|
|
|
# filename.REALNAME.scenario
|
2014-10-24 12:23:52 +00:00
|
|
|
name = scenario_path.replace(mfile + ".", "").replace(
|
2015-02-19 10:32:05 +00:00
|
|
|
"." + self.FILE_EXTENSION, "")
|
2014-04-30 09:06:09 +00:00
|
|
|
path = scenario_path
|
|
|
|
else:
|
|
|
|
name = section
|
|
|
|
path = None
|
|
|
|
|
2016-11-04 21:04:37 +00:00
|
|
|
props = config.items(section)
|
|
|
|
scenarios.append(Scenario(name, props, path))
|
2014-04-30 09:06:09 +00:00
|
|
|
|
|
|
|
if not scenario_paths:
|
|
|
|
self.discovered = True
|
|
|
|
self.all_scenarios.extend(scenarios)
|
2014-02-12 10:18:14 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
return scenarios
|
2014-03-28 14:01:12 +00:00
|
|
|
|
|
|
|
def get_scenario(self, name):
|
2015-02-19 10:32:05 +00:00
|
|
|
if name is not None and os.path.isabs(name) and name.endswith(self.FILE_EXTENSION):
|
2015-02-04 14:27:37 +00:00
|
|
|
scenarios = self.discover_scenarios([name])
|
|
|
|
|
|
|
|
if scenarios:
|
|
|
|
return scenarios[0]
|
|
|
|
|
2014-03-28 14:01:12 +00:00
|
|
|
if self.discovered is False:
|
2014-04-30 09:06:09 +00:00
|
|
|
self.discover_scenarios()
|
2014-03-28 14:01:12 +00:00
|
|
|
|
2014-04-25 09:32:04 +00:00
|
|
|
if name is None:
|
|
|
|
return self.all_scenarios
|
|
|
|
|
2014-03-28 14:01:12 +00:00
|
|
|
try:
|
|
|
|
return [scenario for scenario in self.all_scenarios if scenario.name == name][0]
|
|
|
|
except IndexError:
|
|
|
|
self.warning("Scenario: %s not found" % name)
|
|
|
|
return None
|
2014-06-26 10:42:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
class GstValidateBaseTestManager(TestsManager):
|
|
|
|
scenarios_manager = ScenarioManager()
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super(GstValidateBaseTestManager, self).__init__()
|
|
|
|
self._scenarios = []
|
|
|
|
self._encoding_formats = []
|
|
|
|
|
|
|
|
def add_scenarios(self, scenarios):
|
|
|
|
"""
|
2014-10-13 08:32:07 +00:00
|
|
|
@scenarios A list or a unic scenario name(s) to be run on the tests.
|
2014-06-26 10:42:38 +00:00
|
|
|
They are just the default scenarios, and then depending on
|
|
|
|
the TestsGenerator to be used you can have more fine grained
|
|
|
|
control on what to be run on each serie of tests.
|
|
|
|
"""
|
|
|
|
if isinstance(scenarios, list):
|
|
|
|
self._scenarios.extend(scenarios)
|
|
|
|
else:
|
|
|
|
self._scenarios.append(scenarios)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
self._scenarios = list(set(self._scenarios))
|
|
|
|
|
2017-04-11 05:48:21 +00:00
|
|
|
def set_scenarios(self, scenarios):
|
|
|
|
"""
|
|
|
|
Override the scenarios
|
|
|
|
"""
|
|
|
|
self._scenarios = []
|
|
|
|
self.add_scenarios(scenarios)
|
2014-12-05 11:16:36 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def get_scenarios(self):
|
|
|
|
return self._scenarios
|
|
|
|
|
|
|
|
def add_encoding_formats(self, encoding_formats):
|
|
|
|
"""
|
2014-10-13 08:32:07 +00:00
|
|
|
:param encoding_formats: A list or one single #MediaFormatCombinations describing wanted output
|
2014-06-26 10:42:38 +00:00
|
|
|
formats for transcoding test.
|
|
|
|
They are just the default encoding formats, and then depending on
|
|
|
|
the TestsGenerator to be used you can have more fine grained
|
|
|
|
control on what to be run on each serie of tests.
|
|
|
|
"""
|
|
|
|
if isinstance(encoding_formats, list):
|
|
|
|
self._encoding_formats.extend(encoding_formats)
|
|
|
|
else:
|
|
|
|
self._encoding_formats.append(encoding_formats)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
self._encoding_formats = list(set(self._encoding_formats))
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def get_encoding_formats(self):
|
|
|
|
return self._encoding_formats
|
2014-07-16 08:10:44 +00:00
|
|
|
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
class MediaDescriptor(Loggable):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def __init__(self):
|
|
|
|
Loggable.__init__(self)
|
|
|
|
|
2014-07-16 10:50:41 +00:00
|
|
|
def get_path(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def get_media_filepath(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_caps(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_uri(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_duration(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_protocol(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def is_seekable(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2017-05-25 11:55:52 +00:00
|
|
|
def is_live(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def is_image(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_num_tracks(self, track_type):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2014-12-13 15:01:49 +00:00
|
|
|
def can_play_reverse(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2017-05-10 11:12:18 +00:00
|
|
|
def prerrols(self):
|
|
|
|
return True
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def is_compatible(self, scenario):
|
2014-12-13 15:01:49 +00:00
|
|
|
if scenario is None:
|
|
|
|
return True
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
if scenario.seeks() and (not self.is_seekable() or self.is_image()):
|
|
|
|
self.debug("Do not run %s as %s does not support seeking",
|
|
|
|
scenario, self.get_uri())
|
|
|
|
return False
|
|
|
|
|
2014-07-23 12:43:29 +00:00
|
|
|
if self.is_image() and scenario.needs_clock_sync():
|
|
|
|
self.debug("Do not run %s as %s is an image",
|
|
|
|
scenario, self.get_uri())
|
|
|
|
return False
|
|
|
|
|
2014-12-13 15:01:49 +00:00
|
|
|
if not self.can_play_reverse() and scenario.does_reverse_playback():
|
|
|
|
return False
|
|
|
|
|
2017-05-25 11:55:52 +00:00
|
|
|
if not self.is_live() and scenario.needs_live_content():
|
|
|
|
self.debug("Do not run %s as %s is not a live content",
|
|
|
|
scenario, self.get_uri())
|
|
|
|
return False
|
|
|
|
|
|
|
|
if self.is_live() and not scenario.compatible_with_live_content():
|
|
|
|
self.debug("Do not run %s as %s is a live content", scenario, self.get_uri())
|
|
|
|
return False
|
|
|
|
|
2017-05-10 11:12:18 +00:00
|
|
|
if not self.prerrols() and getattr(scenario, 'needs_preroll', False):
|
|
|
|
return False
|
|
|
|
|
2015-04-28 14:44:42 +00:00
|
|
|
if self.get_duration() and self.get_duration() / GST_SECOND < scenario.get_min_media_duration():
|
2014-10-24 12:23:52 +00:00
|
|
|
self.debug(
|
|
|
|
"Do not run %s as %s is too short (%i < min media duation : %i",
|
2014-10-24 12:38:00 +00:00
|
|
|
scenario, self.get_uri(),
|
|
|
|
self.get_duration() / GST_SECOND,
|
|
|
|
scenario.get_min_media_duration())
|
2014-07-23 18:39:05 +00:00
|
|
|
return False
|
|
|
|
|
2016-10-26 15:34:49 +00:00
|
|
|
for track_type in ['audio', 'subtitle', 'video']:
|
2014-07-16 09:36:29 +00:00
|
|
|
if self.get_num_tracks(track_type) < scenario.get_min_tracks(track_type):
|
|
|
|
self.debug("%s -- %s | At least %s %s track needed < %s"
|
|
|
|
% (scenario, self.get_uri(), track_type,
|
|
|
|
scenario.get_min_tracks(track_type),
|
|
|
|
self.get_num_tracks(track_type)))
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
class GstValidateMediaDescriptor(MediaDescriptor):
|
2014-07-16 08:10:44 +00:00
|
|
|
# Some extension file for discovering results
|
|
|
|
MEDIA_INFO_EXT = "media_info"
|
|
|
|
STREAM_INFO_EXT = "stream_info"
|
|
|
|
|
2014-07-16 10:50:41 +00:00
|
|
|
DISCOVERER_COMMAND = "gst-validate-media-check-1.0"
|
|
|
|
if "win32" in sys.platform:
|
|
|
|
DISCOVERER_COMMAND += ".exe"
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def __init__(self, xml_path):
|
2014-07-16 10:03:14 +00:00
|
|
|
super(GstValidateMediaDescriptor, self).__init__()
|
2014-07-16 09:36:29 +00:00
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
self._xml_path = xml_path
|
2017-02-27 15:10:49 +00:00
|
|
|
try:
|
|
|
|
self.media_xml = ET.parse(xml_path).getroot()
|
|
|
|
except xml.etree.ElementTree.ParseError:
|
|
|
|
printc("Could not parse %s" % xml_path,
|
|
|
|
Colors.FAIL)
|
|
|
|
raise
|
2014-07-16 08:10:44 +00:00
|
|
|
|
2014-10-24 12:38:00 +00:00
|
|
|
# Sanity checks
|
2014-07-16 08:10:44 +00:00
|
|
|
self.media_xml.attrib["duration"]
|
|
|
|
self.media_xml.attrib["seekable"]
|
|
|
|
|
2016-11-04 21:04:37 +00:00
|
|
|
self.set_protocol(urllib.parse.urlparse(urllib.parse.urlparse(self.get_uri()).scheme).scheme)
|
2015-05-13 13:30:23 +00:00
|
|
|
|
2014-07-16 10:50:41 +00:00
|
|
|
@staticmethod
|
2016-12-22 13:08:30 +00:00
|
|
|
def new_from_uri(uri, verbose=False, include_frames=False):
|
|
|
|
"""
|
|
|
|
include_frames = 0 # Never
|
|
|
|
include_frames = 1 # always
|
|
|
|
include_frames = 2 # if previous file included them
|
|
|
|
|
|
|
|
"""
|
2014-07-16 10:50:41 +00:00
|
|
|
media_path = utils.url2path(uri)
|
2016-12-22 13:08:30 +00:00
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
descriptor_path = "%s.%s" % (
|
|
|
|
media_path, GstValidateMediaDescriptor.MEDIA_INFO_EXT)
|
2016-12-22 13:08:30 +00:00
|
|
|
if include_frames == 2:
|
|
|
|
try:
|
|
|
|
media_xml = ET.parse(descriptor_path).getroot()
|
|
|
|
frames = media_xml.findall('streams/stream/frame')
|
|
|
|
include_frames = bool(frames)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
include_frames = bool(include_frames)
|
|
|
|
|
2014-07-16 10:50:41 +00:00
|
|
|
args = GstValidateMediaDescriptor.DISCOVERER_COMMAND.split(" ")
|
|
|
|
args.append(uri)
|
|
|
|
|
|
|
|
args.extend(["--output-file", descriptor_path])
|
2016-12-22 13:08:30 +00:00
|
|
|
if include_frames:
|
2014-09-15 17:14:27 +00:00
|
|
|
args.extend(["--full"])
|
2014-07-16 10:50:41 +00:00
|
|
|
|
|
|
|
if verbose:
|
|
|
|
printc("Generating media info for %s\n"
|
2014-10-24 12:23:52 +00:00
|
|
|
" Command: '%s'" % (media_path, ' '.join(args)),
|
2014-07-16 10:50:41 +00:00
|
|
|
Colors.OKBLUE)
|
|
|
|
|
|
|
|
try:
|
2014-10-24 12:38:00 +00:00
|
|
|
subprocess.check_output(args, stderr=open(os.devnull))
|
2014-07-16 10:50:41 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
2014-07-17 14:48:21 +00:00
|
|
|
if verbose:
|
2014-07-16 10:50:41 +00:00
|
|
|
printc("Result: Failed", Colors.FAIL)
|
|
|
|
else:
|
2014-09-19 07:13:13 +00:00
|
|
|
loggable.warning("GstValidateMediaDescriptor", "Exception: %s" % e)
|
2014-07-16 10:50:41 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
if verbose:
|
|
|
|
printc("Result: Passed", Colors.OKGREEN)
|
|
|
|
|
2016-11-30 17:07:04 +00:00
|
|
|
try:
|
|
|
|
return GstValidateMediaDescriptor(descriptor_path)
|
|
|
|
except FileNotFoundError:
|
|
|
|
return None
|
2014-07-16 10:50:41 +00:00
|
|
|
|
|
|
|
def get_path(self):
|
|
|
|
return self._xml_path
|
|
|
|
|
2014-11-20 10:55:45 +00:00
|
|
|
def need_clock_sync(self):
|
|
|
|
return Protocols.needs_clock_sync(self.get_protocol())
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def get_media_filepath(self):
|
|
|
|
if self.get_protocol() == Protocols.FILE:
|
|
|
|
return self._xml_path.replace("." + self.MEDIA_INFO_EXT, "")
|
|
|
|
else:
|
|
|
|
return self._xml_path.replace("." + self.STREAM_INFO_EXT, "")
|
|
|
|
|
|
|
|
def get_caps(self):
|
|
|
|
return self.media_xml.findall("streams")[0].attrib["caps"]
|
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
def get_tracks_caps(self):
|
|
|
|
res = []
|
|
|
|
try:
|
|
|
|
streams = self.media_xml.findall("streams")[0].findall("stream")
|
|
|
|
except IndexError:
|
|
|
|
return res
|
|
|
|
|
|
|
|
for stream in streams:
|
|
|
|
res.append((stream.attrib["type"], stream.attrib["caps"]))
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def get_uri(self):
|
|
|
|
return self.media_xml.attrib["uri"]
|
|
|
|
|
|
|
|
def get_duration(self):
|
2016-11-04 21:04:37 +00:00
|
|
|
return int(self.media_xml.attrib["duration"])
|
2014-07-16 08:10:44 +00:00
|
|
|
|
|
|
|
def set_protocol(self, protocol):
|
|
|
|
self.media_xml.attrib["protocol"] = protocol
|
|
|
|
|
|
|
|
def get_protocol(self):
|
|
|
|
return self.media_xml.attrib["protocol"]
|
|
|
|
|
|
|
|
def is_seekable(self):
|
2015-11-12 00:04:01 +00:00
|
|
|
return self.media_xml.attrib["seekable"].lower() == "true"
|
2014-07-16 08:10:44 +00:00
|
|
|
|
2017-05-25 11:55:52 +00:00
|
|
|
def is_live(self):
|
|
|
|
return self.media_xml.get("live", "false").lower() == "true"
|
|
|
|
|
2014-12-13 15:01:49 +00:00
|
|
|
def can_play_reverse(self):
|
|
|
|
return True
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def is_image(self):
|
|
|
|
for stream in self.media_xml.findall("streams")[0].findall("stream"):
|
|
|
|
if stream.attrib["type"] == "image":
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def get_num_tracks(self, track_type):
|
|
|
|
n = 0
|
|
|
|
for stream in self.media_xml.findall("streams")[0].findall("stream"):
|
|
|
|
if stream.attrib["type"] == track_type:
|
|
|
|
n += 1
|
|
|
|
|
|
|
|
return n
|
2014-07-16 10:16:03 +00:00
|
|
|
|
2014-09-12 08:47:18 +00:00
|
|
|
def get_clean_name(self):
|
|
|
|
name = os.path.basename(self.get_path())
|
|
|
|
name = re.sub("\.stream_info|\.media_info", "", name)
|
|
|
|
|
|
|
|
return name.replace('.', "_")
|
2014-07-16 10:16:03 +00:00
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-07-16 10:16:03 +00:00
|
|
|
class MediaFormatCombination(object):
|
2016-07-29 19:52:48 +00:00
|
|
|
FORMATS = {"aac": "audio/mpeg,mpegversion=4", # Audio
|
2014-07-16 10:16:03 +00:00
|
|
|
"ac3": "audio/x-ac3",
|
|
|
|
"vorbis": "audio/x-vorbis",
|
|
|
|
"mp3": "audio/mpeg,mpegversion=1,layer=3",
|
2016-07-15 12:56:02 +00:00
|
|
|
"opus": "audio/x-opus",
|
|
|
|
"rawaudio": "audio/x-raw",
|
|
|
|
|
|
|
|
# Video
|
2014-07-16 10:16:03 +00:00
|
|
|
"h264": "video/x-h264",
|
2016-07-15 12:56:02 +00:00
|
|
|
"h265": "video/x-h265",
|
2014-07-16 10:16:03 +00:00
|
|
|
"vp8": "video/x-vp8",
|
2016-07-15 12:56:02 +00:00
|
|
|
"vp9": "video/x-vp9",
|
2014-07-16 10:16:03 +00:00
|
|
|
"theora": "video/x-theora",
|
2016-07-15 12:56:02 +00:00
|
|
|
"prores": "video/x-prores",
|
2016-07-29 19:52:48 +00:00
|
|
|
"jpeg": "image/jpeg",
|
2016-07-15 12:56:02 +00:00
|
|
|
|
|
|
|
# Containers
|
|
|
|
"webm": "video/webm",
|
2014-07-16 10:16:03 +00:00
|
|
|
"ogg": "application/ogg",
|
|
|
|
"mkv": "video/x-matroska",
|
|
|
|
"mp4": "video/quicktime,variant=iso;",
|
2016-07-29 19:52:48 +00:00
|
|
|
"quicktime": "video/quicktime;"}
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def __str__(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return "%s and %s in %s" % (self.audio, self.video, self.container)
|
2014-07-16 10:16:03 +00:00
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
def __init__(self, container, audio, video):
|
2014-10-13 08:32:07 +00:00
|
|
|
"""
|
|
|
|
Describes a media format to be used for transcoding tests.
|
|
|
|
|
|
|
|
:param container: A string defining the container format to be used, must bin in self.FORMATS
|
|
|
|
:param audio: A string defining the audio format to be used, must bin in self.FORMATS
|
|
|
|
:param video: A string defining the video format to be used, must bin in self.FORMATS
|
|
|
|
"""
|
2014-07-16 10:16:03 +00:00
|
|
|
self.container = container
|
2014-07-16 11:54:54 +00:00
|
|
|
self.audio = audio
|
|
|
|
self.video = video
|
|
|
|
|
|
|
|
def get_caps(self, track_type):
|
|
|
|
try:
|
2014-10-13 08:32:07 +00:00
|
|
|
return self.FORMATS[self.__dict__[track_type]]
|
2014-07-16 11:54:54 +00:00
|
|
|
except KeyError:
|
|
|
|
return None
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def get_audio_caps(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return self.get_caps("audio")
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def get_video_caps(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return self.get_caps("video")
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def get_muxer_caps(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return self.get_caps("container")
|