2014-05-16 14:20:26 +00:00
|
|
|
#!/usr/bin/env python2
|
2013-12-31 10:45:07 +00:00
|
|
|
#
|
|
|
|
# Copyright (c) 2013,Thibault Saunier <thibault.saunier@collabora.com>
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
|
|
# License as published by the Free Software Foundation; either
|
|
|
|
# version 2.1 of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
# Lesser General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
|
|
# License along with this program; if not, write to the
|
|
|
|
# Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
# Boston, MA 02110-1301, USA.
|
|
|
|
|
|
|
|
""" Class representing tests and test managers. """
|
|
|
|
|
|
|
|
import os
|
2014-03-28 14:00:01 +00:00
|
|
|
import sys
|
2013-12-31 10:45:07 +00:00
|
|
|
import re
|
|
|
|
import time
|
2014-01-15 15:11:39 +00:00
|
|
|
import utils
|
2014-04-22 08:49:10 +00:00
|
|
|
import signal
|
2014-01-30 11:42:25 +00:00
|
|
|
import urlparse
|
2013-12-31 10:45:07 +00:00
|
|
|
import subprocess
|
2015-01-12 12:09:33 +00:00
|
|
|
import threading
|
2015-01-16 18:03:07 +00:00
|
|
|
import Queue
|
2013-12-31 10:45:07 +00:00
|
|
|
import reporters
|
2014-02-12 10:18:14 +00:00
|
|
|
import ConfigParser
|
2014-07-22 13:49:09 +00:00
|
|
|
import loggable
|
2014-01-09 08:39:05 +00:00
|
|
|
from loggable import Loggable
|
2015-01-21 12:13:02 +00:00
|
|
|
import xml.etree.cElementTree as ET
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
from utils import mkdir, Result, Colors, printc, DEFAULT_TIMEOUT, GST_SECOND, \
|
2015-05-11 10:22:25 +00:00
|
|
|
Protocols, look_for_file_in_source_dir, get_data_file
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2015-03-19 15:06:54 +00:00
|
|
|
# The factor by which we increase the hard timeout when running inside
|
|
|
|
# Valgrind
|
2015-03-26 10:29:26 +00:00
|
|
|
VALGRIND_TIMEOUT_FACTOR = 20
|
2015-03-23 15:19:49 +00:00
|
|
|
# The error reported by valgrind when detecting errors
|
|
|
|
VALGRIND_ERROR_CODE = 20
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
VALIDATE_OVERRIDE_EXTENSION = ".override"
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-08 17:51:14 +00:00
|
|
|
class Test(Loggable):
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
""" A class representing a particular test. """
|
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def __init__(self, application_name, classname, options,
|
2014-05-07 09:30:09 +00:00
|
|
|
reporter, duration=0, timeout=DEFAULT_TIMEOUT,
|
2015-08-15 14:40:11 +00:00
|
|
|
hard_timeout=None, extra_env_variables=None):
|
2014-01-24 12:59:56 +00:00
|
|
|
"""
|
|
|
|
@timeout: The timeout during which the value return by get_current_value
|
|
|
|
keeps being exactly equal
|
|
|
|
@hard_timeout: Max time the test can take in absolute
|
|
|
|
"""
|
2014-01-08 17:51:14 +00:00
|
|
|
Loggable.__init__(self)
|
2013-12-31 10:45:07 +00:00
|
|
|
self.timeout = timeout
|
2014-01-24 12:59:56 +00:00
|
|
|
self.hard_timeout = hard_timeout
|
2013-12-31 10:45:07 +00:00
|
|
|
self.classname = classname
|
|
|
|
self.options = options
|
|
|
|
self.application = application_name
|
|
|
|
self.command = ""
|
|
|
|
self.reporter = reporter
|
|
|
|
self.process = None
|
2015-01-12 12:09:33 +00:00
|
|
|
self.proc_env = None
|
|
|
|
self.thread = None
|
2015-01-16 18:08:19 +00:00
|
|
|
self.queue = None
|
2014-05-07 09:30:09 +00:00
|
|
|
self.duration = duration
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
extra_env_variables = extra_env_variables or {}
|
2015-05-13 13:29:43 +00:00
|
|
|
self.extra_env_variables = extra_env_variables
|
|
|
|
|
2015-01-15 14:32:12 +00:00
|
|
|
self.clean()
|
2014-02-19 12:07:03 +00:00
|
|
|
|
2015-01-15 14:32:12 +00:00
|
|
|
def clean(self):
|
2013-12-31 10:45:07 +00:00
|
|
|
self.message = ""
|
2014-01-22 23:15:54 +00:00
|
|
|
self.error_str = ""
|
2013-12-31 10:45:07 +00:00
|
|
|
self.time_taken = 0.0
|
2013-12-31 10:45:07 +00:00
|
|
|
self._starting_time = None
|
|
|
|
self.result = Result.NOT_RUN
|
2014-01-10 14:30:38 +00:00
|
|
|
self.logfile = None
|
2015-01-16 17:50:38 +00:00
|
|
|
self.out = None
|
2014-03-26 19:09:12 +00:00
|
|
|
self.extra_logfiles = []
|
2015-06-12 09:17:43 +00:00
|
|
|
self.__env_variable = []
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
string = self.classname
|
2013-12-31 10:45:07 +00:00
|
|
|
if self.result != Result.NOT_RUN:
|
2013-12-31 10:45:07 +00:00
|
|
|
string += ": " + self.result
|
2014-01-09 08:27:50 +00:00
|
|
|
if self.result in [Result.FAILED, Result.TIMEOUT]:
|
2014-03-26 19:09:12 +00:00
|
|
|
string += " '%s'\n" \
|
2014-08-11 11:19:22 +00:00
|
|
|
" You can reproduce with: %s %s\n" \
|
|
|
|
% (self.message, self._env_variable, self.command)
|
2014-08-10 10:04:31 +00:00
|
|
|
|
2015-01-15 14:26:14 +00:00
|
|
|
if not self.options.redirect_logs:
|
2014-08-10 10:04:31 +00:00
|
|
|
string += " You can find logs in:\n" \
|
|
|
|
" - %s" % (self.logfile)
|
2014-03-26 19:09:12 +00:00
|
|
|
for log in self.extra_logfiles:
|
|
|
|
string += "\n - %s" % log
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
return string
|
|
|
|
|
2015-02-27 23:20:43 +00:00
|
|
|
def add_env_variable(self, variable, value=None):
|
2014-08-11 11:19:22 +00:00
|
|
|
"""
|
|
|
|
Only usefull so that the gst-validate-launcher can print the exact
|
|
|
|
right command line to reproduce the tests
|
|
|
|
"""
|
2015-02-27 23:20:43 +00:00
|
|
|
if value is None:
|
|
|
|
value = os.environ.get(variable, None)
|
|
|
|
|
|
|
|
if value is None:
|
|
|
|
return
|
|
|
|
|
2015-06-12 09:17:43 +00:00
|
|
|
self.__env_variable.append(variable)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _env_variable(self):
|
|
|
|
res = ""
|
|
|
|
for var in set(self.__env_variable):
|
|
|
|
if res:
|
|
|
|
res += " "
|
|
|
|
value = self.proc_env.get(var, None)
|
|
|
|
if value:
|
|
|
|
res += "%s=%s" % (var, value)
|
|
|
|
|
|
|
|
return res
|
2014-08-11 11:19:22 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
def open_logfile(self):
|
|
|
|
path = os.path.join(self.options.logsdir,
|
|
|
|
self.classname.replace(".", os.sep))
|
|
|
|
mkdir(os.path.dirname(path))
|
|
|
|
self.logfile = path
|
|
|
|
|
|
|
|
if self.options.redirect_logs == 'stdout':
|
|
|
|
self.out = sys.stdout
|
|
|
|
elif self.options.redirect_logs == 'stderr':
|
|
|
|
self.out = sys.stderr
|
|
|
|
else:
|
|
|
|
self.out = open(path, 'w+')
|
|
|
|
|
|
|
|
def close_logfile(self):
|
|
|
|
if not self.options.redirect_logs:
|
|
|
|
self.out.close()
|
|
|
|
|
|
|
|
self.out = None
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
def _get_file_content(self, file_name):
|
|
|
|
f = open(file_name, 'r+')
|
2014-04-23 09:47:10 +00:00
|
|
|
value = f.read()
|
|
|
|
f.close()
|
|
|
|
|
2014-04-26 07:16:26 +00:00
|
|
|
return value
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
def get_log_content(self):
|
|
|
|
return self._get_file_content(self.logfile)
|
|
|
|
|
|
|
|
def get_extra_log_content(self, extralog):
|
|
|
|
if extralog not in self.extra_logfiles:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
return self._get_file_content(extralog)
|
|
|
|
|
2014-01-10 15:46:00 +00:00
|
|
|
def get_classname(self):
|
|
|
|
name = self.classname.split('.')[-1]
|
|
|
|
classname = self.classname.replace('.%s' % name, '')
|
|
|
|
|
|
|
|
return classname
|
|
|
|
|
|
|
|
def get_name(self):
|
|
|
|
return self.classname.split('.')[-1]
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def add_arguments(self, *args):
|
|
|
|
for arg in args:
|
|
|
|
self.command += " " + arg
|
|
|
|
|
|
|
|
def build_arguments(self):
|
2015-02-27 23:20:43 +00:00
|
|
|
self.add_env_variable("LD_PRELOAD")
|
|
|
|
self.add_env_variable("DISPLAY")
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def set_result(self, result, message="", error=""):
|
2014-12-08 14:27:54 +00:00
|
|
|
self.debug("Setting result: %s (message: %s, error: %s)" % (result,
|
|
|
|
message, error))
|
2014-03-26 18:37:44 +00:00
|
|
|
if result is Result.TIMEOUT and self.options.debug is True:
|
|
|
|
pname = subprocess.check_output(("readlink -e /proc/%s/exe"
|
|
|
|
% self.process.pid).split(' ')).replace('\n', '')
|
|
|
|
raw_input("%sTimeout happened you can attach gdb doing: $gdb %s %d%s\n"
|
2014-10-24 12:23:52 +00:00
|
|
|
"Press enter to continue" % (Colors.FAIL, pname, self.process.pid,
|
2014-03-26 18:37:44 +00:00
|
|
|
Colors.ENDC))
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
self.result = result
|
|
|
|
self.message = message
|
2014-01-22 23:15:54 +00:00
|
|
|
self.error_str = error
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def check_results(self):
|
2015-03-29 02:13:01 +00:00
|
|
|
if self.result is Result.FAILED or self.result is Result.TIMEOUT:
|
2014-01-09 08:14:27 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
self.debug("%s returncode: %s", self, self.process.returncode)
|
2015-03-29 02:13:01 +00:00
|
|
|
if self.process.returncode == 0:
|
2014-02-19 09:31:15 +00:00
|
|
|
self.set_result(Result.PASSED)
|
2015-03-26 12:59:30 +00:00
|
|
|
elif self.process.returncode == VALGRIND_ERROR_CODE:
|
|
|
|
self.set_result(Result.FAILED, "Valgrind reported errors")
|
2013-12-31 10:45:07 +00:00
|
|
|
else:
|
2014-01-09 08:28:02 +00:00
|
|
|
self.set_result(Result.FAILED,
|
2015-03-09 17:41:54 +00:00
|
|
|
"Application returned %d" % (self.process.returncode))
|
2014-01-09 08:14:27 +00:00
|
|
|
|
|
|
|
def get_current_value(self):
|
|
|
|
"""
|
|
|
|
Lets subclasses implement a nicer timeout measurement method
|
|
|
|
They should return some value with which we will compare
|
|
|
|
the previous and timeout if they are egual during self.timeout
|
|
|
|
seconds
|
|
|
|
"""
|
|
|
|
return Result.NOT_RUN
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2015-01-16 17:57:06 +00:00
|
|
|
def process_update(self):
|
|
|
|
"""
|
|
|
|
Returns True when process has finished running or has timed out.
|
|
|
|
"""
|
2015-01-16 20:29:55 +00:00
|
|
|
|
|
|
|
if self.process is None:
|
|
|
|
# Process has not started running yet
|
|
|
|
return False
|
|
|
|
|
2015-01-16 17:57:06 +00:00
|
|
|
self.process.poll()
|
|
|
|
if self.process.returncode is not None:
|
|
|
|
return True
|
|
|
|
|
|
|
|
val = self.get_current_value()
|
|
|
|
|
|
|
|
self.debug("Got value: %s" % val)
|
|
|
|
if val is Result.NOT_RUN:
|
|
|
|
# The get_current_value logic is not implemented... dumb
|
|
|
|
# timeout
|
|
|
|
if time.time() - self.last_change_ts > self.timeout:
|
2015-03-29 02:13:01 +00:00
|
|
|
self.set_result(Result.TIMEOUT,
|
|
|
|
"Application timed out: %s secs" %
|
|
|
|
self.timeout,
|
|
|
|
"timeout")
|
2015-01-16 17:57:06 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
elif val is Result.FAILED:
|
|
|
|
return True
|
|
|
|
elif val is Result.KNOWN_ERROR:
|
|
|
|
return True
|
|
|
|
|
|
|
|
self.log("New val %s" % val)
|
|
|
|
|
|
|
|
if val == self.last_val:
|
|
|
|
delta = time.time() - self.last_change_ts
|
|
|
|
self.debug("%s: Same value for %d/%d seconds" %
|
|
|
|
(self, delta, self.timeout))
|
|
|
|
if delta > self.timeout:
|
2015-03-29 02:13:01 +00:00
|
|
|
self.set_result(Result.TIMEOUT,
|
|
|
|
"Application timed out: %s secs" %
|
|
|
|
self.timeout,
|
|
|
|
"timeout")
|
2015-01-16 17:57:06 +00:00
|
|
|
return True
|
|
|
|
elif self.hard_timeout and time.time() - self.start_ts > self.hard_timeout:
|
|
|
|
self.set_result(
|
|
|
|
Result.TIMEOUT, "Hard timeout reached: %d secs" % self.hard_timeout)
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
self.last_change_ts = time.time()
|
|
|
|
self.last_val = val
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2015-01-16 17:57:06 +00:00
|
|
|
return False
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
def get_subproc_env(self):
|
|
|
|
return os.environ
|
|
|
|
|
2015-01-16 18:08:19 +00:00
|
|
|
def kill_subprocess(self):
|
2014-12-08 13:37:15 +00:00
|
|
|
if self.process is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
stime = time.time()
|
|
|
|
res = self.process.poll()
|
|
|
|
while res is None:
|
|
|
|
try:
|
|
|
|
self.debug("Subprocess is still alive, sending KILL signal")
|
|
|
|
self.process.send_signal(signal.SIGKILL)
|
|
|
|
time.sleep(1)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
if time.time() - stime > DEFAULT_TIMEOUT:
|
|
|
|
raise RuntimeError("Could not kill subprocess after %s second"
|
|
|
|
" Something is really wrong, => EXITING"
|
|
|
|
% DEFAULT_TIMEOUT)
|
|
|
|
res = self.process.poll()
|
|
|
|
|
2015-01-12 12:09:33 +00:00
|
|
|
def thread_wrapper(self):
|
|
|
|
self.process = subprocess.Popen("exec " + self.command,
|
2015-01-16 17:50:38 +00:00
|
|
|
stderr=self.out,
|
|
|
|
stdout=self.out,
|
2015-01-12 12:09:33 +00:00
|
|
|
shell=True,
|
|
|
|
env=self.proc_env)
|
|
|
|
self.process.wait()
|
2015-01-16 18:03:07 +00:00
|
|
|
if self.result is not Result.TIMEOUT:
|
|
|
|
self.queue.put(None)
|
2015-01-12 12:09:33 +00:00
|
|
|
|
2015-03-19 16:22:26 +00:00
|
|
|
def get_valgrind_suppressions(self):
|
2015-04-20 08:53:29 +00:00
|
|
|
return [self.get_valgrind_suppression_file('data', 'gstvalidate.supp')]
|
2015-03-19 16:22:26 +00:00
|
|
|
|
2015-03-19 15:06:54 +00:00
|
|
|
def use_valgrind(self):
|
2015-03-23 08:39:30 +00:00
|
|
|
vglogsfile = self.logfile + '.valgrind'
|
|
|
|
self.extra_logfiles.append(vglogsfile)
|
|
|
|
|
2015-03-19 15:06:54 +00:00
|
|
|
vg_args = [
|
|
|
|
('trace-children', 'yes'),
|
|
|
|
('tool', 'memcheck'),
|
|
|
|
('leak-check', 'full'),
|
|
|
|
('leak-resolution', 'high'),
|
2015-07-26 23:46:01 +00:00
|
|
|
# TODO: errors-for-leak-kinds should be set to all instead of definite
|
|
|
|
# and all false positives should be added to suppression files.
|
|
|
|
('errors-for-leak-kinds', 'definite'),
|
2015-03-19 15:06:54 +00:00
|
|
|
('num-callers', '20'),
|
2015-08-07 12:38:20 +00:00
|
|
|
('log-file', '"' + vglogsfile + '"'),
|
2015-03-23 15:19:49 +00:00
|
|
|
('error-exitcode', str(VALGRIND_ERROR_CODE)),
|
2015-03-19 15:06:54 +00:00
|
|
|
]
|
|
|
|
|
2015-04-20 08:53:29 +00:00
|
|
|
for supp in self.get_valgrind_suppressions():
|
|
|
|
vg_args.append(('suppressions', supp))
|
2015-03-19 16:22:26 +00:00
|
|
|
|
2015-03-19 15:06:54 +00:00
|
|
|
self.command = "valgrind %s %s" % (' '.join(map(lambda x: '--%s=%s' % (x[0], x[1]), vg_args)),
|
|
|
|
self.command)
|
|
|
|
|
|
|
|
# Tune GLib's memory allocator to be more valgrind friendly
|
|
|
|
self.proc_env['G_DEBUG'] = 'gc-friendly'
|
|
|
|
self.add_env_variable('G_DEBUG', 'gc-friendly')
|
|
|
|
|
|
|
|
self.proc_env['G_SLICE'] = 'always-malloc'
|
|
|
|
self.add_env_variable('G_SLICE', 'always-malloc')
|
|
|
|
|
2015-03-26 12:57:34 +00:00
|
|
|
if self.hard_timeout is not None:
|
|
|
|
self.hard_timeout *= VALGRIND_TIMEOUT_FACTOR
|
2015-03-26 10:29:06 +00:00
|
|
|
self.timeout *= VALGRIND_TIMEOUT_FACTOR
|
2015-03-19 15:06:54 +00:00
|
|
|
|
2015-05-08 14:33:50 +00:00
|
|
|
# Enable 'valgrind.config'
|
|
|
|
vg_config = get_data_file('data', 'valgrind.config')
|
|
|
|
|
|
|
|
if self.proc_env.get('GST_VALIDATE_CONFIG'):
|
|
|
|
self.proc_env['GST_VALIDATE_CONFIG'] = '%s%s%s' % (self.proc_env['GST_VALIDATE_CONFIG'], os.pathsep, vg_config)
|
|
|
|
else:
|
|
|
|
self.proc_env['GST_VALIDATE_CONFIG'] = vg_config
|
|
|
|
|
|
|
|
self.add_env_variable('GST_VALIDATE_CONFIG', self.proc_env['GST_VALIDATE_CONFIG'])
|
|
|
|
|
2015-01-16 18:08:19 +00:00
|
|
|
def test_start(self, queue):
|
2015-01-16 17:50:38 +00:00
|
|
|
self.open_logfile()
|
|
|
|
|
2015-01-16 18:08:19 +00:00
|
|
|
self.queue = queue
|
2013-12-31 10:45:07 +00:00
|
|
|
self.command = "%s " % (self.application)
|
|
|
|
self._starting_time = time.time()
|
|
|
|
self.build_arguments()
|
2015-01-12 12:09:33 +00:00
|
|
|
self.proc_env = self.get_subproc_env()
|
2014-03-26 19:09:12 +00:00
|
|
|
|
2015-05-13 13:29:43 +00:00
|
|
|
for var, value in self.extra_env_variables.items():
|
|
|
|
self.proc_env[var] = self.proc_env.get(var, '') + os.pathsep + value
|
|
|
|
self.add_env_variable(var, self.proc_env[var])
|
|
|
|
|
2015-03-19 15:06:54 +00:00
|
|
|
if self.options.valgrind:
|
|
|
|
self.use_valgrind()
|
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
message = "Launching: %s%s\n" \
|
2014-10-24 12:23:52 +00:00
|
|
|
" Command: '%s %s'\n" % (Colors.ENDC, self.classname,
|
|
|
|
self._env_variable, self.command)
|
2015-01-15 14:26:14 +00:00
|
|
|
if not self.options.redirect_logs:
|
2014-08-10 10:04:31 +00:00
|
|
|
message += " Logs:\n" \
|
|
|
|
" - %s" % (self.logfile)
|
|
|
|
for log in self.extra_logfiles:
|
|
|
|
message += "\n - %s" % log
|
2014-03-26 19:09:12 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
self.out.write("=================\n"
|
|
|
|
"Test name: %s\n"
|
|
|
|
"Command: '%s'\n"
|
|
|
|
"=================\n\n"
|
|
|
|
% (self.classname, self.command))
|
|
|
|
self.out.flush()
|
2015-01-13 01:32:16 +00:00
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
printc(message, Colors.OKBLUE)
|
2014-04-02 17:14:30 +00:00
|
|
|
|
2015-01-12 12:09:33 +00:00
|
|
|
self.thread = threading.Thread(target=self.thread_wrapper)
|
|
|
|
self.thread.start()
|
|
|
|
|
2015-01-16 18:00:25 +00:00
|
|
|
self.last_val = 0
|
|
|
|
self.last_change_ts = time.time()
|
|
|
|
self.start_ts = time.time()
|
|
|
|
|
2015-01-16 18:08:19 +00:00
|
|
|
def test_end(self):
|
|
|
|
self.kill_subprocess()
|
2015-01-12 12:09:33 +00:00
|
|
|
self.thread.join()
|
2013-12-31 10:45:07 +00:00
|
|
|
self.time_taken = time.time() - self._starting_time
|
|
|
|
|
2015-01-20 15:44:07 +00:00
|
|
|
printc("%s: %s%s\n" % (self.classname, self.result,
|
2014-01-15 15:11:39 +00:00
|
|
|
" (" + self.message + ")" if self.message else ""),
|
|
|
|
color=utils.get_color_for_result(self.result))
|
2014-01-10 16:21:44 +00:00
|
|
|
|
2015-01-16 17:50:38 +00:00
|
|
|
self.close_logfile()
|
|
|
|
|
2014-01-13 16:31:57 +00:00
|
|
|
return self.result
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
class GstValidateTest(Test):
|
|
|
|
|
|
|
|
""" A class representing a particular test. """
|
2014-10-24 12:23:52 +00:00
|
|
|
findpos_regex = re.compile(
|
|
|
|
'.*position.*(\d+):(\d+):(\d+).(\d+).*duration.*(\d+):(\d+):(\d+).(\d+)')
|
|
|
|
findlastseek_regex = re.compile(
|
|
|
|
'seeking to.*(\d+):(\d+):(\d+).(\d+).*stop.*(\d+):(\d+):(\d+).(\d+).*rate.*(\d+)\.(\d+)')
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-12-08 14:27:54 +00:00
|
|
|
HARD_TIMEOUT_FACTOR = 5
|
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def __init__(self, application_name, classname,
|
2014-05-07 09:30:09 +00:00
|
|
|
options, reporter, duration=0,
|
2015-05-13 13:29:43 +00:00
|
|
|
timeout=DEFAULT_TIMEOUT, scenario=None, hard_timeout=None,
|
2015-08-15 14:40:11 +00:00
|
|
|
media_descriptor=None, extra_env_variables=None):
|
|
|
|
|
|
|
|
extra_env_variables = extra_env_variables or {}
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-12-08 14:27:54 +00:00
|
|
|
if not hard_timeout and self.HARD_TIMEOUT_FACTOR:
|
2014-12-08 17:23:10 +00:00
|
|
|
if timeout:
|
|
|
|
hard_timeout = timeout * self.HARD_TIMEOUT_FACTOR
|
|
|
|
elif duration:
|
2014-12-08 14:27:54 +00:00
|
|
|
hard_timeout = duration * self.HARD_TIMEOUT_FACTOR
|
|
|
|
else:
|
|
|
|
hard_timeout = None
|
|
|
|
|
2015-04-21 09:00:58 +00:00
|
|
|
# If we are running from source, use the -debug version of the
|
|
|
|
# application which is using rpath instead of libtool's wrappers. It's
|
|
|
|
# slightly faster to start and will not confuse valgrind.
|
|
|
|
debug = '%s-debug' % application_name
|
2015-04-27 11:25:44 +00:00
|
|
|
p = look_for_file_in_source_dir('tools', debug)
|
2015-04-21 09:00:58 +00:00
|
|
|
if p:
|
|
|
|
application_name = p
|
|
|
|
|
2015-08-15 14:23:02 +00:00
|
|
|
self.media_descriptor = media_descriptor
|
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
override_path = self.get_override_file(media_descriptor)
|
|
|
|
if override_path:
|
|
|
|
if extra_env_variables:
|
|
|
|
if extra_env_variables.get("GST_VALIDATE_OVERRIDE", ""):
|
|
|
|
extra_env_variables["GST_VALIDATE_OVERRIDE"] += os.path.pathsep
|
|
|
|
|
|
|
|
extra_env_variables["GST_VALIDATE_OVERRIDE"] = override_path
|
|
|
|
|
2014-12-08 17:23:10 +00:00
|
|
|
super(GstValidateTest, self).__init__(application_name, classname,
|
|
|
|
options, reporter,
|
|
|
|
duration=duration,
|
|
|
|
timeout=timeout,
|
2015-05-13 13:29:43 +00:00
|
|
|
hard_timeout=hard_timeout,
|
|
|
|
extra_env_variables=extra_env_variables)
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
# defines how much the process can be outside of the configured
|
|
|
|
# segment / seek
|
2014-02-13 14:31:58 +00:00
|
|
|
self._sent_eos_pos = None
|
2014-01-30 11:42:25 +00:00
|
|
|
|
2014-04-23 09:47:10 +00:00
|
|
|
self.validatelogs = None
|
2014-01-22 23:15:54 +00:00
|
|
|
if scenario is None or scenario.name.lower() == "none":
|
2014-01-09 08:14:27 +00:00
|
|
|
self.scenario = None
|
|
|
|
else:
|
|
|
|
self.scenario = scenario
|
|
|
|
|
2015-08-15 14:40:11 +00:00
|
|
|
def get_override_file(self, media_descriptor):
|
|
|
|
if media_descriptor:
|
|
|
|
if media_descriptor.get_path():
|
|
|
|
override_path = os.path.splitext(media_descriptor.get_path())[0] + VALIDATE_OVERRIDE_EXTENSION
|
|
|
|
if os.path.exists(override_path):
|
|
|
|
return override_path
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2015-02-27 13:16:01 +00:00
|
|
|
def get_current_value(self):
|
|
|
|
if self.scenario:
|
|
|
|
sent_eos = self.sent_eos_position()
|
|
|
|
if sent_eos is not None:
|
|
|
|
t = time.time()
|
|
|
|
if ((t - sent_eos)) > 30:
|
|
|
|
if self.media_descriptor.get_protocol() == Protocols.HLS:
|
|
|
|
self.set_result(Result.PASSED,
|
|
|
|
"""Got no EOS 30 seconds after sending EOS,
|
|
|
|
in HLS known and tolerated issue:
|
|
|
|
https://bugzilla.gnome.org/show_bug.cgi?id=723868""")
|
|
|
|
return Result.KNOWN_ERROR
|
|
|
|
|
|
|
|
self.set_result(
|
|
|
|
Result.FAILED, "Pipeline did not stop 30 Seconds after sending EOS")
|
|
|
|
|
|
|
|
return Result.FAILED
|
|
|
|
|
|
|
|
return self.get_current_position()
|
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
def get_subproc_env(self):
|
2015-01-15 14:32:12 +00:00
|
|
|
self.validatelogs = self.logfile + '.validate.logs'
|
|
|
|
logfiles = self.validatelogs
|
2015-01-15 14:26:14 +00:00
|
|
|
if self.options.redirect_logs:
|
2014-10-24 12:23:52 +00:00
|
|
|
logfiles += os.pathsep + \
|
2015-01-15 14:32:12 +00:00
|
|
|
self.options.redirect_logs.replace("<", '').replace(">", '')
|
2014-08-10 10:04:31 +00:00
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
subproc_env = os.environ.copy()
|
|
|
|
|
2014-04-23 09:47:10 +00:00
|
|
|
utils.touch(self.validatelogs)
|
2014-08-10 10:04:31 +00:00
|
|
|
subproc_env["GST_VALIDATE_FILE"] = logfiles
|
2014-04-23 09:47:10 +00:00
|
|
|
self.extra_logfiles.append(self.validatelogs)
|
|
|
|
|
2015-03-02 16:32:56 +00:00
|
|
|
if 'GST_DEBUG' in os.environ and not self.options.redirect_logs:
|
2014-03-26 19:09:12 +00:00
|
|
|
gstlogsfile = self.logfile + '.gstdebug'
|
|
|
|
self.extra_logfiles.append(gstlogsfile)
|
|
|
|
subproc_env["GST_DEBUG_FILE"] = gstlogsfile
|
2015-03-02 16:32:56 +00:00
|
|
|
|
|
|
|
if self.options.no_color:
|
2014-11-27 12:48:17 +00:00
|
|
|
subproc_env["GST_DEBUG_NO_COLOR"] = '1'
|
2014-03-26 19:09:12 +00:00
|
|
|
|
2015-04-14 10:31:32 +00:00
|
|
|
# Ensure XInitThreads is called, see bgo#731525
|
|
|
|
subproc_env['GST_GL_XINITTHREADS'] = '1'
|
|
|
|
self.add_env_variable('GST_GL_XINITTHREADS', '1')
|
|
|
|
|
2015-04-16 10:02:11 +00:00
|
|
|
if self.scenario is not None:
|
2015-04-30 15:39:55 +00:00
|
|
|
scenario = self.scenario.get_execution_name()
|
|
|
|
if self.options.valgrind:
|
|
|
|
# Increase sink's max-lateness property when running inside
|
|
|
|
# Valgrind as it slows down everything quiet a lot.
|
|
|
|
scenario = "setup_sink_props_max_lateness:%s" % scenario
|
|
|
|
|
|
|
|
subproc_env["GST_VALIDATE_SCENARIO"] = scenario
|
2015-04-16 10:02:11 +00:00
|
|
|
self.add_env_variable("GST_VALIDATE_SCENARIO",
|
|
|
|
subproc_env["GST_VALIDATE_SCENARIO"])
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
del subproc_env["GST_VALIDATE_SCENARIO"]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2014-03-26 19:09:12 +00:00
|
|
|
return subproc_env
|
|
|
|
|
2015-01-15 14:32:12 +00:00
|
|
|
def clean(self):
|
|
|
|
Test.clean(self)
|
|
|
|
self._sent_eos_pos = None
|
2014-02-19 12:07:03 +00:00
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def build_arguments(self):
|
2015-02-27 23:20:43 +00:00
|
|
|
super(GstValidateTest, self).build_arguments()
|
2014-08-11 11:19:22 +00:00
|
|
|
if "GST_VALIDATE" in os.environ:
|
|
|
|
self.add_env_variable("GST_VALIDATE", os.environ["GST_VALIDATE"])
|
|
|
|
|
|
|
|
if "GST_VALIDATE_SCENARIOS_PATH" in os.environ:
|
|
|
|
self.add_env_variable("GST_VALIDATE_SCENARIOS_PATH",
|
|
|
|
os.environ["GST_VALIDATE_SCENARIOS_PATH"])
|
2015-02-27 13:18:04 +00:00
|
|
|
|
2015-02-27 23:20:43 +00:00
|
|
|
self.add_env_variable("GST_VALIDATE_CONFIG")
|
|
|
|
self.add_env_variable("GST_VALIDATE_OVERRIDE")
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-04-23 09:47:10 +00:00
|
|
|
def get_extra_log_content(self, extralog):
|
2014-04-26 07:16:26 +00:00
|
|
|
value = Test.get_extra_log_content(self, extralog)
|
2014-04-23 09:47:10 +00:00
|
|
|
|
|
|
|
if extralog == self.validatelogs:
|
|
|
|
value = re.sub("<position:.*/>\r", "", value)
|
|
|
|
|
2014-04-26 07:16:26 +00:00
|
|
|
return value
|
2014-04-23 09:47:10 +00:00
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
def get_validate_criticals_errors(self):
|
|
|
|
ret = "["
|
|
|
|
errors = []
|
2014-04-23 09:47:10 +00:00
|
|
|
for l in open(self.validatelogs, 'r').readlines():
|
2014-01-09 08:14:27 +00:00
|
|
|
if "critical : " in l:
|
|
|
|
error = l.split("critical : ")[1].replace("\n", '')
|
|
|
|
if error not in errors:
|
2014-09-17 15:32:52 +00:00
|
|
|
if ret != "[":
|
|
|
|
ret += ", "
|
2014-01-09 08:14:27 +00:00
|
|
|
ret += error
|
|
|
|
errors.append(error)
|
|
|
|
|
|
|
|
if ret == "[":
|
2015-03-04 16:30:41 +00:00
|
|
|
return None
|
2014-01-09 08:14:27 +00:00
|
|
|
else:
|
|
|
|
return ret + "]"
|
|
|
|
|
2014-01-09 08:28:02 +00:00
|
|
|
def check_results(self):
|
2015-03-29 02:13:01 +00:00
|
|
|
if self.result is Result.FAILED or self.result is Result.PASSED or self.result is Result.TIMEOUT:
|
2014-01-09 08:28:02 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
self.debug("%s returncode: %s", self, self.process.returncode)
|
2015-03-04 16:30:41 +00:00
|
|
|
|
|
|
|
criticals = self.get_validate_criticals_errors()
|
|
|
|
if self.process.returncode == 139:
|
|
|
|
# FIXME Reimplement something like that if needed
|
|
|
|
# self.get_backtrace("SEGFAULT")
|
|
|
|
self.set_result(Result.FAILED,
|
|
|
|
"Application segfaulted",
|
|
|
|
"segfault")
|
2015-03-23 15:19:49 +00:00
|
|
|
elif self.process.returncode == VALGRIND_ERROR_CODE:
|
|
|
|
self.set_result(Result.FAILED, "Valgrind reported errors")
|
2015-03-04 16:30:41 +00:00
|
|
|
elif criticals or self.process.returncode != 0:
|
|
|
|
if criticals is None:
|
|
|
|
criticals = "No criticals"
|
|
|
|
self.set_result(Result.FAILED,
|
|
|
|
"Application returned %s (issues: %s)"
|
|
|
|
% (self.process.returncode, criticals))
|
2014-01-09 08:28:02 +00:00
|
|
|
else:
|
2015-03-04 16:30:41 +00:00
|
|
|
self.set_result(Result.PASSED)
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
def _parse_position(self, p):
|
|
|
|
self.log("Parsing %s" % p)
|
2014-01-31 11:21:21 +00:00
|
|
|
times = self.findpos_regex.findall(p)
|
2014-01-30 11:42:25 +00:00
|
|
|
|
2014-01-31 11:21:21 +00:00
|
|
|
if len(times) != 1:
|
2014-01-30 11:42:25 +00:00
|
|
|
self.warning("Got a unparsable value: %s" % p)
|
|
|
|
return 0, 0
|
|
|
|
|
2014-01-31 11:21:21 +00:00
|
|
|
return (utils.gsttime_from_tuple(times[0][:4]),
|
|
|
|
utils.gsttime_from_tuple(times[0][4:]))
|
2014-01-30 11:42:25 +00:00
|
|
|
|
|
|
|
def _parse_buffering(self, b):
|
|
|
|
return b.split("buffering... ")[1].split("%")[0], 100
|
|
|
|
|
|
|
|
def _get_position(self):
|
|
|
|
position = duration = -1
|
|
|
|
|
|
|
|
self.debug("Getting position")
|
|
|
|
m = None
|
2014-04-23 09:47:10 +00:00
|
|
|
for l in reversed(open(self.validatelogs, 'r').readlines()):
|
2014-01-30 11:42:25 +00:00
|
|
|
l = l.lower()
|
|
|
|
if "<position:" in l or "buffering" in l:
|
|
|
|
m = l
|
|
|
|
break
|
|
|
|
|
|
|
|
if m is None:
|
|
|
|
self.debug("Could not fine any positionning info")
|
|
|
|
return position, duration
|
|
|
|
|
|
|
|
for j in m.split("\r"):
|
|
|
|
j = j.lstrip().rstrip()
|
|
|
|
if j.startswith("<position:") and j.endswith("/>"):
|
|
|
|
position, duration = self._parse_position(j)
|
|
|
|
elif j.startswith("buffering") and j.endswith("%"):
|
|
|
|
position, duration = self._parse_buffering(j)
|
|
|
|
else:
|
2014-01-31 11:21:21 +00:00
|
|
|
self.log("No info in %s" % j)
|
2014-01-30 11:42:25 +00:00
|
|
|
|
|
|
|
return position, duration
|
|
|
|
|
|
|
|
def _get_last_seek_values(self):
|
|
|
|
m = None
|
|
|
|
rate = start = stop = None
|
|
|
|
|
2014-04-23 09:47:10 +00:00
|
|
|
for l in reversed(open(self.validatelogs, 'r').readlines()):
|
2014-01-30 11:42:25 +00:00
|
|
|
l = l.lower()
|
|
|
|
if "seeking to: " in l:
|
|
|
|
m = l
|
|
|
|
break
|
|
|
|
|
|
|
|
if m is None:
|
|
|
|
self.debug("Could not fine any seeking info")
|
|
|
|
return start, stop, rate
|
|
|
|
|
2014-01-31 11:21:21 +00:00
|
|
|
values = self.findlastseek_regex.findall(m)
|
|
|
|
if len(values) != 1:
|
2014-10-24 12:38:00 +00:00
|
|
|
self.warning("Got an unparsable seek value %s", m)
|
2014-01-31 11:21:21 +00:00
|
|
|
return start, stop, rate
|
|
|
|
|
|
|
|
v = values[0]
|
|
|
|
return (utils.gsttime_from_tuple(v[:4]),
|
|
|
|
utils.gsttime_from_tuple(v[4:8]),
|
|
|
|
float(str(v[8]) + "." + str(v[9])))
|
2014-01-30 11:42:25 +00:00
|
|
|
|
2014-02-13 14:31:58 +00:00
|
|
|
def sent_eos_position(self):
|
|
|
|
if self._sent_eos_pos is not None:
|
|
|
|
return self._sent_eos_pos
|
|
|
|
|
2014-04-23 09:47:10 +00:00
|
|
|
for l in reversed(open(self.validatelogs, 'r').readlines()):
|
2014-02-13 14:31:58 +00:00
|
|
|
l = l.lower()
|
|
|
|
if "sending eos" in l:
|
|
|
|
self._sent_eos_pos = time.time()
|
|
|
|
return self._sent_eos_pos
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
def get_current_position(self):
|
|
|
|
position, duration = self._get_position()
|
2014-02-14 15:07:51 +00:00
|
|
|
if position == -1:
|
|
|
|
return position
|
2014-01-30 11:42:25 +00:00
|
|
|
|
|
|
|
return position
|
|
|
|
|
2015-04-21 09:00:58 +00:00
|
|
|
def get_valgrind_suppression_file(self, subdir, name):
|
2015-05-11 10:22:25 +00:00
|
|
|
p = get_data_file(subdir, name)
|
2015-04-21 09:00:58 +00:00
|
|
|
if p:
|
|
|
|
return p
|
|
|
|
|
2015-04-20 08:53:29 +00:00
|
|
|
self.error("Could not find any %s file" % name)
|
2015-03-19 16:22:26 +00:00
|
|
|
|
2015-04-20 08:53:29 +00:00
|
|
|
def get_valgrind_suppressions(self):
|
|
|
|
result = super(GstValidateTest, self).get_valgrind_suppressions()
|
|
|
|
return result + [self.get_valgrind_suppression_file('common', 'gst.supp')]
|
2015-03-19 16:22:26 +00:00
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
class GstValidateEncodingTestInterface(object):
|
|
|
|
DURATION_TOLERANCE = GST_SECOND / 4
|
|
|
|
|
|
|
|
def __init__(self, combination, media_descriptor, duration_tolerance=None):
|
|
|
|
super(GstValidateEncodingTestInterface, self).__init__()
|
|
|
|
|
|
|
|
self.media_descriptor = media_descriptor
|
|
|
|
self.combination = combination
|
|
|
|
self.dest_file = ""
|
2014-01-30 11:42:25 +00:00
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
self._duration_tolerance = duration_tolerance
|
|
|
|
if duration_tolerance is None:
|
|
|
|
self._duration_tolerance = self.DURATION_TOLERANCE
|
|
|
|
|
|
|
|
def get_current_size(self):
|
2014-02-06 16:23:10 +00:00
|
|
|
try:
|
|
|
|
size = os.stat(urlparse.urlparse(self.dest_file).path).st_size
|
2014-10-24 12:38:00 +00:00
|
|
|
except OSError:
|
2014-07-16 10:03:14 +00:00
|
|
|
return None
|
2014-02-06 16:23:10 +00:00
|
|
|
|
2014-01-30 11:42:25 +00:00
|
|
|
self.debug("Size: %s" % size)
|
|
|
|
return size
|
2014-01-09 08:28:02 +00:00
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
def _get_profile_full(self, muxer, venc, aenc, video_restriction=None,
|
2014-10-24 12:23:52 +00:00
|
|
|
audio_restriction=None, audio_presence=0,
|
|
|
|
video_presence=0):
|
2014-07-16 10:03:14 +00:00
|
|
|
ret = "\""
|
|
|
|
if muxer:
|
|
|
|
ret += muxer
|
|
|
|
ret += ":"
|
|
|
|
if venc:
|
|
|
|
if video_restriction is not None:
|
|
|
|
ret = ret + video_restriction + '->'
|
|
|
|
ret += venc
|
|
|
|
if video_presence:
|
|
|
|
ret = ret + '|' + str(video_presence)
|
|
|
|
if aenc:
|
|
|
|
ret += ":"
|
|
|
|
if audio_restriction is not None:
|
|
|
|
ret = ret + audio_restriction + '->'
|
|
|
|
ret += aenc
|
|
|
|
if audio_presence:
|
|
|
|
ret = ret + '|' + str(audio_presence)
|
|
|
|
|
|
|
|
ret += "\""
|
|
|
|
return ret.replace("::", ":")
|
|
|
|
|
|
|
|
def get_profile(self, video_restriction=None, audio_restriction=None):
|
2014-07-16 10:16:03 +00:00
|
|
|
vcaps = self.combination.get_video_caps()
|
|
|
|
acaps = self.combination.get_audio_caps()
|
2014-07-16 10:03:14 +00:00
|
|
|
if self.media_descriptor is not None:
|
|
|
|
if self.media_descriptor.get_num_tracks("video") == 0:
|
|
|
|
vcaps = None
|
|
|
|
|
|
|
|
if self.media_descriptor.get_num_tracks("audio") == 0:
|
|
|
|
acaps = None
|
|
|
|
|
2014-07-16 10:16:03 +00:00
|
|
|
return self._get_profile_full(self.combination.get_muxer_caps(),
|
2014-07-16 10:03:14 +00:00
|
|
|
vcaps, acaps,
|
|
|
|
video_restriction=video_restriction,
|
|
|
|
audio_restriction=audio_restriction)
|
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
def _clean_caps(self, caps):
|
|
|
|
"""
|
|
|
|
Returns a list of key=value or structure name, without "(types)" or ";" or ","
|
|
|
|
"""
|
|
|
|
return re.sub(r"\(.+?\)\s*| |;", '', caps).split(',')
|
|
|
|
|
|
|
|
def _has_caps_type_variant(self, c, ccaps):
|
|
|
|
"""
|
|
|
|
Handle situations where we can have application/ogg or video/ogg or
|
|
|
|
audio/ogg
|
|
|
|
"""
|
|
|
|
has_variant = False
|
|
|
|
media_type = re.findall("application/|video/|audio/", c)
|
|
|
|
if media_type:
|
|
|
|
media_type = media_type[0].replace('/', '')
|
|
|
|
possible_mtypes = ["application", "video", "audio"]
|
|
|
|
possible_mtypes.remove(media_type)
|
|
|
|
for tmptype in possible_mtypes:
|
|
|
|
possible_c_variant = c.replace(media_type, tmptype)
|
|
|
|
if possible_c_variant in ccaps:
|
2014-10-24 12:23:52 +00:00
|
|
|
self.info(
|
2015-03-18 10:05:08 +00:00
|
|
|
"Found %s in %s, good enough!", possible_c_variant, ccaps)
|
2014-07-16 11:54:54 +00:00
|
|
|
has_variant = True
|
|
|
|
|
|
|
|
return has_variant
|
|
|
|
|
2014-07-16 10:03:14 +00:00
|
|
|
def check_encoded_file(self):
|
2014-10-24 12:23:52 +00:00
|
|
|
result_descriptor = GstValidateMediaDescriptor.new_from_uri(
|
|
|
|
self.dest_file)
|
2014-09-19 07:13:13 +00:00
|
|
|
if result_descriptor is None:
|
|
|
|
return (Result.FAILED, "Could not discover encoded file %s"
|
|
|
|
% self.dest_file)
|
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
duration = result_descriptor.get_duration()
|
2014-07-16 10:03:14 +00:00
|
|
|
orig_duration = self.media_descriptor.get_duration()
|
|
|
|
tolerance = self._duration_tolerance
|
|
|
|
|
|
|
|
if orig_duration - tolerance >= duration <= orig_duration + tolerance:
|
2014-07-16 11:54:54 +00:00
|
|
|
os.remove(result_descriptor.get_path())
|
2014-07-16 10:03:14 +00:00
|
|
|
return (Result.FAILED, "Duration of encoded file is "
|
|
|
|
" wrong (%s instead of %s)" %
|
2014-10-24 12:23:52 +00:00
|
|
|
(utils.TIME_ARGS(duration),
|
|
|
|
utils.TIME_ARGS(orig_duration)))
|
2014-07-16 10:03:14 +00:00
|
|
|
else:
|
2014-07-16 11:54:54 +00:00
|
|
|
all_tracks_caps = result_descriptor.get_tracks_caps()
|
|
|
|
container_caps = result_descriptor.get_caps()
|
|
|
|
if container_caps:
|
|
|
|
all_tracks_caps.insert(0, ("container", container_caps))
|
|
|
|
|
|
|
|
for track_type, caps in all_tracks_caps:
|
|
|
|
ccaps = self._clean_caps(caps)
|
|
|
|
wanted_caps = self.combination.get_caps(track_type)
|
|
|
|
cwanted_caps = self._clean_caps(wanted_caps)
|
|
|
|
|
2014-10-24 12:38:00 +00:00
|
|
|
if wanted_caps is None:
|
2014-07-16 11:54:54 +00:00
|
|
|
os.remove(result_descriptor.get_path())
|
|
|
|
return (Result.FAILED,
|
|
|
|
"Found a track of type %s in the encoded files"
|
|
|
|
" but none where wanted in the encoded profile: %s"
|
|
|
|
% (track_type, self.combination))
|
|
|
|
|
|
|
|
for c in cwanted_caps:
|
|
|
|
if c not in ccaps:
|
2014-10-24 12:23:52 +00:00
|
|
|
if not self._has_caps_type_variant(c, ccaps):
|
2014-07-16 11:54:54 +00:00
|
|
|
os.remove(result_descriptor.get_path())
|
|
|
|
return (Result.FAILED,
|
|
|
|
"Field: %s (from %s) not in caps of the outputed file %s"
|
|
|
|
% (wanted_caps, c, ccaps))
|
|
|
|
|
|
|
|
os.remove(result_descriptor.get_path())
|
2014-07-16 10:03:14 +00:00
|
|
|
return (Result.PASSED, "")
|
|
|
|
|
2014-01-09 08:14:27 +00:00
|
|
|
|
2014-01-09 15:57:54 +00:00
|
|
|
class TestsManager(Loggable):
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
""" A class responsible for managing tests. """
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
name = ""
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def __init__(self):
|
2014-01-09 15:57:54 +00:00
|
|
|
|
|
|
|
Loggable.__init__(self)
|
|
|
|
|
2014-07-23 08:54:37 +00:00
|
|
|
self.tests = []
|
|
|
|
self.unwanted_tests = []
|
2013-12-31 10:45:07 +00:00
|
|
|
self.options = None
|
|
|
|
self.args = None
|
|
|
|
self.reporter = None
|
|
|
|
self.wanted_tests_patterns = []
|
2014-01-22 23:15:54 +00:00
|
|
|
self.blacklisted_tests_patterns = []
|
2014-06-26 10:42:38 +00:00
|
|
|
self._generators = []
|
2015-01-16 18:08:19 +00:00
|
|
|
self.queue = Queue.Queue()
|
2015-01-16 20:08:54 +00:00
|
|
|
self.jobs = []
|
2015-01-16 19:35:33 +00:00
|
|
|
self.total_num_tests = 0
|
2015-03-30 07:00:09 +00:00
|
|
|
self.starting_test_num = 0
|
2015-03-13 17:09:08 +00:00
|
|
|
self.check_testslist = True
|
2015-03-14 15:08:12 +00:00
|
|
|
self.all_tests = None
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-09 14:17:53 +00:00
|
|
|
def init(self):
|
|
|
|
return False
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def list_tests(self):
|
2014-07-23 08:54:37 +00:00
|
|
|
return sorted(list(self.tests))
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
def add_test(self, test):
|
|
|
|
if self._is_test_wanted(test):
|
2014-10-24 12:38:00 +00:00
|
|
|
if test not in self.tests:
|
2014-07-23 08:54:37 +00:00
|
|
|
self.tests.append(test)
|
|
|
|
self.tests.sort(key=lambda test: test.classname)
|
2014-01-22 23:15:54 +00:00
|
|
|
else:
|
2014-10-24 12:38:00 +00:00
|
|
|
if test not in self.tests:
|
2014-07-23 08:54:37 +00:00
|
|
|
self.unwanted_tests.append(test)
|
|
|
|
self.unwanted_tests.sort(key=lambda test: test.classname)
|
2014-01-22 23:15:54 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def get_tests(self):
|
|
|
|
return self.tests
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def populate_testsuite(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def add_generators(self, generators):
|
|
|
|
"""
|
|
|
|
@generators: A list of, or one single #TestsGenerator to be used to generate tests
|
|
|
|
"""
|
|
|
|
if isinstance(generators, list):
|
|
|
|
self._generators.extend(generators)
|
|
|
|
else:
|
|
|
|
self._generators.append(generators)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
self._generators = list(set(self._generators))
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def get_generators(self):
|
|
|
|
return self._generators
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
def _add_blacklist(self, blacklisted_tests):
|
|
|
|
if not isinstance(blacklisted_tests, list):
|
|
|
|
blacklisted_tests = [blacklisted_tests]
|
|
|
|
|
|
|
|
for patterns in blacklisted_tests:
|
|
|
|
for pattern in patterns.split(","):
|
|
|
|
self.blacklisted_tests_patterns.append(re.compile(pattern))
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def set_default_blacklist(self, default_blacklist):
|
|
|
|
msg = "\nCurrently 'hardcoded' %s blacklisted tests:\n\n" % self.name
|
|
|
|
for name, bug in default_blacklist:
|
2014-11-28 23:03:04 +00:00
|
|
|
self._add_blacklist(name)
|
2014-06-26 10:42:38 +00:00
|
|
|
msg += " + %s \n --> bug: %s\n" % (name, bug)
|
|
|
|
|
|
|
|
printc(msg, Colors.FAIL, True)
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def add_options(self, parser):
|
|
|
|
""" Add more arguments. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
def set_settings(self, options, args, reporter):
|
|
|
|
""" Set properties after options parsing. """
|
|
|
|
self.options = options
|
|
|
|
self.args = args
|
|
|
|
self.reporter = reporter
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
self.populate_testsuite()
|
2015-04-27 12:04:05 +00:00
|
|
|
|
|
|
|
if self.options.valgrind:
|
|
|
|
self.print_valgrind_bugs()
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
if options.wanted_tests:
|
2014-01-22 23:15:54 +00:00
|
|
|
for patterns in options.wanted_tests:
|
|
|
|
for pattern in patterns.split(","):
|
|
|
|
self.wanted_tests_patterns.append(re.compile(pattern))
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-22 23:15:54 +00:00
|
|
|
if options.blacklisted_tests:
|
|
|
|
for patterns in options.blacklisted_tests:
|
2014-11-28 23:03:04 +00:00
|
|
|
self._add_blacklist(patterns)
|
2014-01-22 23:15:54 +00:00
|
|
|
|
|
|
|
def _check_blacklisted(self, test):
|
|
|
|
for pattern in self.blacklisted_tests_patterns:
|
|
|
|
if pattern.findall(test.classname):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def _is_test_wanted(self, test):
|
2014-01-22 23:15:54 +00:00
|
|
|
if self._check_blacklisted(test):
|
|
|
|
return False
|
|
|
|
|
2014-05-07 09:30:09 +00:00
|
|
|
if test.duration > 0 and int(self.options.long_limit) < int(test.duration):
|
2014-08-05 16:51:20 +00:00
|
|
|
self.info("Not activating %s as its duration (%d) is superior"
|
|
|
|
" than the long limit (%d)" % (test, test.duration,
|
2014-05-07 09:30:09 +00:00
|
|
|
int(self.options.long_limit)))
|
|
|
|
return False
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
if not self.wanted_tests_patterns:
|
|
|
|
return True
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
for pattern in self.wanted_tests_patterns:
|
|
|
|
if pattern.findall(test.classname):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2015-01-16 20:08:54 +00:00
|
|
|
def test_wait(self):
|
|
|
|
while True:
|
|
|
|
# Check process every second for timeout
|
|
|
|
try:
|
|
|
|
self.queue.get(timeout=1)
|
|
|
|
except Queue.Empty:
|
|
|
|
pass
|
2015-01-16 18:08:19 +00:00
|
|
|
|
2015-01-16 20:08:54 +00:00
|
|
|
for test in self.jobs:
|
2015-01-16 18:08:19 +00:00
|
|
|
if test.process_update():
|
2015-01-16 20:08:54 +00:00
|
|
|
self.jobs.remove(test)
|
|
|
|
return test
|
2015-01-16 18:08:19 +00:00
|
|
|
|
2015-01-16 20:08:54 +00:00
|
|
|
def tests_wait(self):
|
|
|
|
try:
|
|
|
|
test = self.test_wait()
|
2015-01-16 18:08:19 +00:00
|
|
|
test.check_results()
|
|
|
|
except KeyboardInterrupt:
|
2015-01-16 20:08:54 +00:00
|
|
|
for test in self.jobs:
|
|
|
|
test.kill_subprocess()
|
2015-01-16 18:08:19 +00:00
|
|
|
raise
|
|
|
|
|
2015-01-16 20:08:54 +00:00
|
|
|
return test
|
|
|
|
|
|
|
|
def start_new_job(self, tests_left):
|
|
|
|
try:
|
|
|
|
test = tests_left.pop(0)
|
|
|
|
except IndexError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
self.print_test_num(test)
|
|
|
|
test.test_start(self.queue)
|
|
|
|
|
|
|
|
self.jobs.append(test)
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2015-01-16 19:35:33 +00:00
|
|
|
def run_tests(self, starting_test_num, total_num_tests):
|
|
|
|
self.total_num_tests = total_num_tests
|
2015-03-30 07:00:09 +00:00
|
|
|
self.starting_test_num = starting_test_num
|
2015-01-16 19:35:33 +00:00
|
|
|
|
2015-01-16 20:29:55 +00:00
|
|
|
num_jobs = min(self.options.num_jobs, len(self.tests))
|
2015-01-16 20:08:54 +00:00
|
|
|
tests_left = list(self.tests)
|
2015-01-16 20:29:55 +00:00
|
|
|
jobs_running = 0
|
2015-01-16 20:08:54 +00:00
|
|
|
|
2015-01-16 20:29:55 +00:00
|
|
|
for i in range(num_jobs):
|
2015-01-16 20:08:54 +00:00
|
|
|
if not self.start_new_job(tests_left):
|
|
|
|
break
|
2015-01-16 20:29:55 +00:00
|
|
|
jobs_running += 1
|
2015-01-16 20:08:54 +00:00
|
|
|
|
2015-01-16 20:29:55 +00:00
|
|
|
while jobs_running != 0:
|
2015-01-16 20:08:54 +00:00
|
|
|
test = self.tests_wait()
|
2015-01-16 20:29:55 +00:00
|
|
|
jobs_running -= 1
|
2015-01-16 20:09:37 +00:00
|
|
|
self.print_test_num(test)
|
2015-01-16 18:08:19 +00:00
|
|
|
res = test.test_end()
|
2015-01-16 17:42:19 +00:00
|
|
|
self.reporter.after_test(test)
|
2014-04-02 17:14:30 +00:00
|
|
|
if res != Result.PASSED and (self.options.forever or
|
|
|
|
self.options.fatal_error):
|
2015-03-30 07:00:09 +00:00
|
|
|
return test.result
|
2015-01-16 20:29:55 +00:00
|
|
|
if self.start_new_job(tests_left):
|
|
|
|
jobs_running += 1
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2015-03-30 07:00:09 +00:00
|
|
|
return Result.PASSED
|
2014-01-14 17:07:46 +00:00
|
|
|
|
2015-01-16 19:35:33 +00:00
|
|
|
def print_test_num(self, test):
|
2015-03-30 07:00:09 +00:00
|
|
|
cur_test_num = self.starting_test_num + self.tests.index(test) + 1
|
|
|
|
sys.stdout.write("[%d / %d] " % (cur_test_num, self.total_num_tests))
|
2015-01-16 19:35:33 +00:00
|
|
|
|
2014-02-19 12:07:03 +00:00
|
|
|
def clean_tests(self):
|
|
|
|
for test in self.tests:
|
|
|
|
test.clean()
|
|
|
|
|
2014-01-13 16:31:57 +00:00
|
|
|
def needs_http_server(self):
|
|
|
|
return False
|
|
|
|
|
2015-04-27 12:04:05 +00:00
|
|
|
def print_valgrind_bugs(self):
|
|
|
|
pass
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
class TestsGenerator(Loggable):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def __init__(self, name, test_manager, tests=[]):
|
|
|
|
Loggable.__init__(self)
|
|
|
|
self.name = name
|
|
|
|
self.test_manager = test_manager
|
|
|
|
self._tests = {}
|
|
|
|
for test in tests:
|
|
|
|
self._tests[test.classname] = test
|
|
|
|
|
|
|
|
def generate_tests(self, *kwargs):
|
|
|
|
"""
|
|
|
|
Method that generates tests
|
|
|
|
"""
|
|
|
|
return list(self._tests.values())
|
|
|
|
|
|
|
|
def add_test(self, test):
|
|
|
|
self._tests[test.classname] = test
|
|
|
|
|
|
|
|
|
|
|
|
class GstValidateTestsGenerator(TestsGenerator):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def populate_tests(self, uri_minfo_special_scenarios, scenarios):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def generate_tests(self, uri_minfo_special_scenarios, scenarios):
|
|
|
|
self.populate_tests(uri_minfo_special_scenarios, scenarios)
|
|
|
|
return super(GstValidateTestsGenerator, self).generate_tests()
|
|
|
|
|
|
|
|
|
2014-01-09 14:17:53 +00:00
|
|
|
class _TestsLauncher(Loggable):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-08-11 18:19:02 +00:00
|
|
|
def __init__(self, libsdir):
|
2014-01-09 14:17:53 +00:00
|
|
|
|
|
|
|
Loggable.__init__(self)
|
|
|
|
|
2014-08-11 18:19:02 +00:00
|
|
|
self.libsdir = libsdir
|
2014-01-14 17:07:46 +00:00
|
|
|
self.options = None
|
2013-12-31 10:45:07 +00:00
|
|
|
self.testers = []
|
|
|
|
self.tests = []
|
|
|
|
self.reporter = None
|
|
|
|
self._list_testers()
|
2015-03-14 15:08:12 +00:00
|
|
|
self.all_tests = None
|
2013-12-31 10:45:07 +00:00
|
|
|
self.wanted_tests_patterns = []
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
def _list_app_dirs(self):
|
2014-10-23 19:36:03 +00:00
|
|
|
app_dirs = []
|
2014-10-24 12:23:52 +00:00
|
|
|
app_dirs.append(os.path.join(self.libsdir, "apps"))
|
2014-10-23 19:36:03 +00:00
|
|
|
env_dirs = os.environ.get("GST_VALIDATE_APPS_DIR")
|
|
|
|
if env_dirs is not None:
|
|
|
|
for dir_ in env_dirs.split(":"):
|
2014-10-24 12:23:52 +00:00
|
|
|
app_dirs.append(dir_)
|
2015-02-04 14:27:37 +00:00
|
|
|
sys.path.append(dir_)
|
2014-10-23 19:36:03 +00:00
|
|
|
|
|
|
|
return app_dirs
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
def _exec_app(self, app_dir, env):
|
2014-11-15 17:08:42 +00:00
|
|
|
try:
|
|
|
|
files = os.listdir(app_dir)
|
|
|
|
except OSError as e:
|
|
|
|
self.debug("Could not list %s: %s" % (app_dir, e))
|
|
|
|
files = []
|
|
|
|
for f in files:
|
2014-10-23 19:36:03 +00:00
|
|
|
if f.endswith(".py"):
|
|
|
|
execfile(os.path.join(app_dir, f), env)
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
def _exec_apps(self, env):
|
2014-10-23 19:36:03 +00:00
|
|
|
app_dirs = self._list_app_dirs()
|
|
|
|
for app_dir in app_dirs:
|
|
|
|
self._exec_app(app_dir, env)
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def _list_testers(self):
|
|
|
|
env = globals().copy()
|
2014-10-23 19:36:03 +00:00
|
|
|
self._exec_apps(env)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-04-30 13:40:10 +00:00
|
|
|
testers = [i() for i in utils.get_subclasses(TestsManager, env)]
|
2014-01-09 14:17:53 +00:00
|
|
|
for tester in testers:
|
|
|
|
if tester.init() is True:
|
|
|
|
self.testers.append(tester)
|
|
|
|
else:
|
2014-01-10 09:27:25 +00:00
|
|
|
self.warning("Can not init tester: %s -- PATH is %s"
|
|
|
|
% (tester.name, os.environ["PATH"]))
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def add_options(self, parser):
|
|
|
|
for tester in self.testers:
|
2014-04-30 13:40:10 +00:00
|
|
|
tester.add_options(parser)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
def _load_testsuites(self):
|
|
|
|
testsuites = []
|
|
|
|
for testsuite in self.options.testsuites:
|
|
|
|
if not os.path.isabs(testsuite):
|
|
|
|
testsuite = os.path.join(self.options.testsuites_dir, testsuite + ".py")
|
|
|
|
|
|
|
|
try:
|
|
|
|
sys.path.insert(0, os.path.dirname(testsuite))
|
|
|
|
module = __import__(os.path.basename(testsuite).replace(".py", ""))
|
|
|
|
except Exception as e:
|
|
|
|
printc("Could not load testsuite: %s, reason: %s"
|
|
|
|
% (testsuite, e), Colors.FAIL)
|
|
|
|
continue
|
|
|
|
finally:
|
|
|
|
sys.path.remove(os.path.dirname(testsuite))
|
|
|
|
|
|
|
|
testsuites.append(module)
|
|
|
|
if not hasattr(module, "TEST_MANAGER"):
|
|
|
|
module.TEST_MANAGER = [tester.name for tester in self.testers]
|
|
|
|
elif not isinstance(module.TEST_MANAGER, list):
|
|
|
|
module.TEST_MANAGER = [module.TEST_MANAGER]
|
|
|
|
|
|
|
|
self.options.testsuites = testsuites
|
|
|
|
|
|
|
|
def _setup_testsuites(self):
|
|
|
|
for testsuite in self.options.testsuites:
|
|
|
|
loaded = False
|
|
|
|
wanted_test_manager = None
|
|
|
|
if hasattr(testsuite, "TEST_MANAGER"):
|
|
|
|
wanted_test_manager = testsuite.TEST_MANAGER
|
|
|
|
if not isinstance(wanted_test_manager, list):
|
|
|
|
wanted_test_manager = [wanted_test_manager]
|
|
|
|
|
|
|
|
for tester in self.testers:
|
|
|
|
if wanted_test_manager is not None and \
|
|
|
|
tester.name not in wanted_test_manager:
|
|
|
|
continue
|
|
|
|
|
2015-07-17 07:45:35 +00:00
|
|
|
if self.options.paths:
|
|
|
|
tester.register_defaults()
|
|
|
|
loaded = True
|
|
|
|
elif testsuite.setup_tests(tester, self.options):
|
2014-11-28 23:03:04 +00:00
|
|
|
loaded = True
|
|
|
|
|
|
|
|
if not loaded:
|
|
|
|
printc("Could not load testsuite: %s"
|
|
|
|
" maybe because of missing TestManager"
|
|
|
|
% (testsuite), Colors.FAIL)
|
|
|
|
|
2014-12-08 11:42:51 +00:00
|
|
|
def _load_config(self, options):
|
2014-11-28 23:03:04 +00:00
|
|
|
printc("Loading config files is DEPRECATED"
|
|
|
|
" you should use the new testsuite format now",)
|
|
|
|
|
|
|
|
for tester in self.testers:
|
|
|
|
tester.options = options
|
|
|
|
globals()[tester.name] = tester
|
|
|
|
globals()["options"] = options
|
|
|
|
c__file__ = __file__
|
|
|
|
globals()["__file__"] = self.options.config
|
|
|
|
execfile(self.options.config, globals())
|
|
|
|
globals()["__file__"] = c__file__
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def set_settings(self, options, args):
|
|
|
|
self.reporter = reporters.XunitReporter(options)
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-14 17:07:46 +00:00
|
|
|
self.options = options
|
2013-12-31 10:45:07 +00:00
|
|
|
wanted_testers = None
|
|
|
|
for tester in self.testers:
|
|
|
|
if tester.name in args:
|
|
|
|
wanted_testers = tester.name
|
2014-11-28 23:03:04 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
if wanted_testers:
|
|
|
|
testers = self.testers
|
|
|
|
self.testers = []
|
|
|
|
for tester in testers:
|
|
|
|
if tester.name in args:
|
|
|
|
self.testers.append(tester)
|
|
|
|
args.remove(tester.name)
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
if options.config:
|
2014-12-08 11:42:51 +00:00
|
|
|
self._load_config(options)
|
2014-11-28 23:03:04 +00:00
|
|
|
|
|
|
|
self._load_testsuites()
|
2014-06-26 10:42:38 +00:00
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
for tester in self.testers:
|
|
|
|
tester.set_settings(options, args, self.reporter)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
if not options.config and options.testsuites:
|
|
|
|
self._setup_testsuites()
|
|
|
|
|
2015-03-14 15:40:17 +00:00
|
|
|
def _check_tester_has_other_testsuite(self, testsuite, tester):
|
2014-11-29 12:43:06 +00:00
|
|
|
if len(testsuite.TEST_MANAGER) > 1:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if tester.name != testsuite.TEST_MANAGER[0]:
|
|
|
|
return True
|
|
|
|
|
|
|
|
for t in self.options.testsuites:
|
|
|
|
if t != testsuite:
|
|
|
|
for other_testmanager in testsuite.TEST_MANAGER:
|
|
|
|
if other_testmanager == tester.name:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
def _check_defined_tests(self, tester, tests):
|
2015-03-14 15:08:12 +00:00
|
|
|
if self.options.blacklisted_tests or self.options.wanted_tests:
|
2014-11-29 12:43:06 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
tests_names = [test.classname for test in tests]
|
|
|
|
for testsuite in self.options.testsuites:
|
2015-03-14 15:40:17 +00:00
|
|
|
if not self._check_tester_has_other_testsuite(testsuite, tester) \
|
2015-03-14 15:08:12 +00:00
|
|
|
and tester.check_testslist:
|
2014-11-29 12:43:06 +00:00
|
|
|
try:
|
|
|
|
testlist_file = open(os.path.splitext(testsuite.__file__)[0] + ".testslist",
|
2015-01-20 08:59:23 +00:00
|
|
|
'rw')
|
2014-11-29 12:43:06 +00:00
|
|
|
|
|
|
|
know_tests = testlist_file.read().split("\n")
|
|
|
|
testlist_file.close()
|
|
|
|
|
|
|
|
testlist_file = open(os.path.splitext(testsuite.__file__)[0] + ".testslist",
|
2015-01-20 08:59:23 +00:00
|
|
|
'w')
|
2014-11-29 12:43:06 +00:00
|
|
|
except IOError:
|
|
|
|
return
|
|
|
|
|
|
|
|
for test in know_tests:
|
|
|
|
if test and test not in tests_names:
|
|
|
|
printc("Test %s Not in testsuite %s anymore"
|
|
|
|
% (test, testsuite.__file__), Colors.FAIL)
|
|
|
|
|
|
|
|
for test in tests_names:
|
|
|
|
testlist_file.write("%s\n" % test)
|
|
|
|
if test and test not in know_tests:
|
|
|
|
printc("Test %s is NEW in testsuite %s"
|
|
|
|
% (test, testsuite.__file__), Colors.OKGREEN)
|
|
|
|
|
|
|
|
testlist_file.close()
|
|
|
|
return
|
|
|
|
|
2013-12-31 10:45:07 +00:00
|
|
|
def list_tests(self):
|
|
|
|
for tester in self.testers:
|
2014-11-29 12:43:06 +00:00
|
|
|
tests = tester.list_tests()
|
|
|
|
self._check_defined_tests(tester, tests)
|
|
|
|
self.tests.extend(tests)
|
2014-07-23 08:54:37 +00:00
|
|
|
return sorted(list(self.tests))
|
2013-12-31 10:45:07 +00:00
|
|
|
|
2014-01-14 17:07:46 +00:00
|
|
|
def _run_tests(self):
|
2014-04-02 17:14:30 +00:00
|
|
|
cur_test_num = 0
|
2015-03-14 15:08:12 +00:00
|
|
|
|
|
|
|
if not self.all_tests:
|
2015-03-18 10:05:08 +00:00
|
|
|
total_num_tests = 1
|
2015-03-14 15:08:12 +00:00
|
|
|
self.all_tests = []
|
|
|
|
for tester in self.testers:
|
|
|
|
self.all_tests.extend(tester.list_tests())
|
|
|
|
total_num_tests = len(self.all_tests)
|
2014-04-02 17:14:30 +00:00
|
|
|
|
2015-01-16 17:25:56 +00:00
|
|
|
self.reporter.init_timer()
|
2013-12-31 10:45:07 +00:00
|
|
|
for tester in self.testers:
|
2015-03-30 07:00:09 +00:00
|
|
|
res = tester.run_tests(cur_test_num, total_num_tests)
|
|
|
|
cur_test_num += len(tester.list_tests())
|
2014-01-24 15:38:12 +00:00
|
|
|
if res != Result.PASSED and (self.options.forever or
|
2014-10-24 12:23:52 +00:00
|
|
|
self.options.fatal_error):
|
2014-01-14 17:07:46 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2014-02-19 12:07:03 +00:00
|
|
|
def _clean_tests(self):
|
|
|
|
for tester in self.testers:
|
|
|
|
tester.clean_tests()
|
|
|
|
|
2014-01-14 17:07:46 +00:00
|
|
|
def run_tests(self):
|
|
|
|
if self.options.forever:
|
|
|
|
while self._run_tests():
|
2014-02-19 12:07:03 +00:00
|
|
|
self._clean_tests()
|
2014-01-14 17:07:46 +00:00
|
|
|
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return self._run_tests()
|
2013-12-31 10:45:07 +00:00
|
|
|
|
|
|
|
def final_report(self):
|
|
|
|
self.reporter.final_report()
|
2014-01-13 16:31:57 +00:00
|
|
|
|
|
|
|
def needs_http_server(self):
|
|
|
|
for tester in self.testers:
|
|
|
|
if tester.needs_http_server():
|
|
|
|
return True
|
2014-01-24 10:41:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
class NamedDic(object):
|
|
|
|
|
|
|
|
def __init__(self, props):
|
|
|
|
if props:
|
|
|
|
for name, value in props.iteritems():
|
|
|
|
setattr(self, name, value)
|
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-01-30 15:56:51 +00:00
|
|
|
class Scenario(object):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
def __init__(self, name, props, path=None):
|
2014-01-24 10:41:25 +00:00
|
|
|
self.name = name
|
2014-04-30 09:06:09 +00:00
|
|
|
self.path = path
|
2014-02-12 10:18:14 +00:00
|
|
|
|
|
|
|
for prop, value in props:
|
2014-04-25 11:17:39 +00:00
|
|
|
setattr(self, prop.replace("-", "_"), value)
|
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
def get_execution_name(self):
|
|
|
|
if self.path is not None:
|
|
|
|
return self.path
|
|
|
|
else:
|
|
|
|
return self.name
|
|
|
|
|
2014-04-25 11:17:39 +00:00
|
|
|
def seeks(self):
|
|
|
|
if hasattr(self, "seek"):
|
|
|
|
return bool(self.seek)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-07-23 12:43:29 +00:00
|
|
|
def needs_clock_sync(self):
|
|
|
|
if hasattr(self, "need_clock_sync"):
|
|
|
|
return bool(self.need_clock_sync)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-07-23 18:39:05 +00:00
|
|
|
def get_min_media_duration(self):
|
|
|
|
if hasattr(self, "min_media_duration"):
|
2015-09-30 16:13:28 +00:00
|
|
|
return float(self.min_media_duration)
|
2014-07-23 18:39:05 +00:00
|
|
|
|
|
|
|
return 0
|
|
|
|
|
2014-04-25 11:17:39 +00:00
|
|
|
def does_reverse_playback(self):
|
|
|
|
if hasattr(self, "reverse_playback"):
|
|
|
|
return bool(self.seek)
|
|
|
|
|
2014-04-29 17:04:46 +00:00
|
|
|
return False
|
|
|
|
|
2014-05-07 09:30:09 +00:00
|
|
|
def get_duration(self):
|
|
|
|
try:
|
|
|
|
return float(getattr(self, "duration"))
|
|
|
|
except AttributeError:
|
|
|
|
return 0
|
2014-04-25 11:17:39 +00:00
|
|
|
|
2014-04-29 17:04:46 +00:00
|
|
|
def get_min_tracks(self, track_type):
|
|
|
|
try:
|
|
|
|
return int(getattr(self, "min_%s_track" % track_type))
|
|
|
|
except AttributeError:
|
|
|
|
return 0
|
2014-04-25 11:17:39 +00:00
|
|
|
|
2014-02-12 10:18:14 +00:00
|
|
|
|
2014-03-28 14:00:01 +00:00
|
|
|
class ScenarioManager(Loggable):
|
2014-02-12 10:18:14 +00:00
|
|
|
_instance = None
|
|
|
|
all_scenarios = []
|
2014-04-30 09:06:09 +00:00
|
|
|
|
2015-02-19 10:32:05 +00:00
|
|
|
FILE_EXTENSION = "scenario"
|
2014-02-12 10:18:14 +00:00
|
|
|
GST_VALIDATE_COMMAND = "gst-validate-1.0"
|
2014-03-28 14:00:01 +00:00
|
|
|
if "win32" in sys.platform:
|
|
|
|
GST_VALIDATE_COMMAND += ".exe"
|
2014-02-12 10:18:14 +00:00
|
|
|
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
if not cls._instance:
|
|
|
|
cls._instance = super(ScenarioManager, cls).__new__(
|
2014-10-24 12:23:52 +00:00
|
|
|
cls, *args, **kwargs)
|
2014-02-12 10:18:14 +00:00
|
|
|
cls._instance.config = None
|
2014-03-28 14:01:12 +00:00
|
|
|
cls._instance.discovered = False
|
|
|
|
Loggable.__init__(cls._instance)
|
2014-02-12 10:18:14 +00:00
|
|
|
|
2014-03-28 14:01:12 +00:00
|
|
|
return cls._instance
|
2014-04-25 11:19:19 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
def find_special_scenarios(self, mfile):
|
|
|
|
scenarios = []
|
|
|
|
mfile_bname = os.path.basename(mfile)
|
2015-02-04 14:27:37 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
for f in os.listdir(os.path.dirname(mfile)):
|
2015-07-21 23:45:26 +00:00
|
|
|
if re.findall("%s\..*\.%s$" % (re.escape(mfile_bname), self.FILE_EXTENSION), f):
|
2014-04-30 09:06:09 +00:00
|
|
|
scenarios.append(os.path.join(os.path.dirname(mfile), f))
|
|
|
|
|
|
|
|
if scenarios:
|
|
|
|
scenarios = self.discover_scenarios(scenarios, mfile)
|
|
|
|
|
|
|
|
return scenarios
|
|
|
|
|
|
|
|
def discover_scenarios(self, scenario_paths=[], mfile=None):
|
|
|
|
"""
|
|
|
|
Discover scenarios specified in scenario_paths or the default ones
|
|
|
|
if nothing specified there
|
|
|
|
"""
|
|
|
|
scenarios = []
|
2014-02-12 10:18:14 +00:00
|
|
|
scenario_defs = os.path.join(self.config.main_dir, "scenarios.def")
|
2015-01-15 14:32:12 +00:00
|
|
|
logs = open(os.path.join(self.config.logsdir,
|
|
|
|
"scenarios_discovery.log"), 'w')
|
2014-08-10 10:04:31 +00:00
|
|
|
|
2014-02-12 10:18:14 +00:00
|
|
|
try:
|
2014-10-24 12:23:52 +00:00
|
|
|
command = [self.GST_VALIDATE_COMMAND,
|
|
|
|
"--scenarios-defs-output-file", scenario_defs]
|
2014-04-30 09:06:09 +00:00
|
|
|
command.extend(scenario_paths)
|
2014-05-07 10:21:30 +00:00
|
|
|
subprocess.check_call(command, stdout=logs, stderr=logs)
|
2014-02-12 10:18:14 +00:00
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
config = ConfigParser.ConfigParser()
|
|
|
|
f = open(scenario_defs)
|
|
|
|
config.readfp(f)
|
|
|
|
|
|
|
|
for section in config.sections():
|
2014-04-30 09:06:09 +00:00
|
|
|
if scenario_paths:
|
|
|
|
for scenario_path in scenario_paths:
|
2015-02-04 14:27:37 +00:00
|
|
|
if mfile is None:
|
|
|
|
name = section
|
|
|
|
path = scenario_path
|
|
|
|
elif section in scenario_path:
|
2014-04-30 09:06:09 +00:00
|
|
|
# The real name of the scenario is:
|
|
|
|
# filename.REALNAME.scenario
|
2014-10-24 12:23:52 +00:00
|
|
|
name = scenario_path.replace(mfile + ".", "").replace(
|
2015-02-19 10:32:05 +00:00
|
|
|
"." + self.FILE_EXTENSION, "")
|
2014-04-30 09:06:09 +00:00
|
|
|
path = scenario_path
|
|
|
|
else:
|
|
|
|
name = section
|
|
|
|
path = None
|
|
|
|
|
|
|
|
scenarios.append(Scenario(name, config.items(section), path))
|
|
|
|
|
|
|
|
if not scenario_paths:
|
|
|
|
self.discovered = True
|
|
|
|
self.all_scenarios.extend(scenarios)
|
2014-02-12 10:18:14 +00:00
|
|
|
|
2014-04-30 09:06:09 +00:00
|
|
|
return scenarios
|
2014-03-28 14:01:12 +00:00
|
|
|
|
|
|
|
def get_scenario(self, name):
|
2015-02-19 10:32:05 +00:00
|
|
|
if name is not None and os.path.isabs(name) and name.endswith(self.FILE_EXTENSION):
|
2015-02-04 14:27:37 +00:00
|
|
|
scenarios = self.discover_scenarios([name])
|
|
|
|
|
|
|
|
if scenarios:
|
|
|
|
return scenarios[0]
|
|
|
|
|
2014-03-28 14:01:12 +00:00
|
|
|
if self.discovered is False:
|
2014-04-30 09:06:09 +00:00
|
|
|
self.discover_scenarios()
|
2014-03-28 14:01:12 +00:00
|
|
|
|
2014-04-25 09:32:04 +00:00
|
|
|
if name is None:
|
|
|
|
return self.all_scenarios
|
|
|
|
|
2014-03-28 14:01:12 +00:00
|
|
|
try:
|
|
|
|
return [scenario for scenario in self.all_scenarios if scenario.name == name][0]
|
|
|
|
except IndexError:
|
|
|
|
self.warning("Scenario: %s not found" % name)
|
|
|
|
return None
|
2014-06-26 10:42:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
class GstValidateBaseTestManager(TestsManager):
|
|
|
|
scenarios_manager = ScenarioManager()
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super(GstValidateBaseTestManager, self).__init__()
|
|
|
|
self._scenarios = []
|
|
|
|
self._encoding_formats = []
|
|
|
|
|
|
|
|
def add_scenarios(self, scenarios):
|
|
|
|
"""
|
2014-10-13 08:32:07 +00:00
|
|
|
@scenarios A list or a unic scenario name(s) to be run on the tests.
|
2014-06-26 10:42:38 +00:00
|
|
|
They are just the default scenarios, and then depending on
|
|
|
|
the TestsGenerator to be used you can have more fine grained
|
|
|
|
control on what to be run on each serie of tests.
|
|
|
|
"""
|
|
|
|
if isinstance(scenarios, list):
|
|
|
|
self._scenarios.extend(scenarios)
|
|
|
|
else:
|
|
|
|
self._scenarios.append(scenarios)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
self._scenarios = list(set(self._scenarios))
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def get_scenarios(self):
|
|
|
|
return self._scenarios
|
|
|
|
|
|
|
|
def add_encoding_formats(self, encoding_formats):
|
|
|
|
"""
|
2014-10-13 08:32:07 +00:00
|
|
|
:param encoding_formats: A list or one single #MediaFormatCombinations describing wanted output
|
2014-06-26 10:42:38 +00:00
|
|
|
formats for transcoding test.
|
|
|
|
They are just the default encoding formats, and then depending on
|
|
|
|
the TestsGenerator to be used you can have more fine grained
|
|
|
|
control on what to be run on each serie of tests.
|
|
|
|
"""
|
|
|
|
if isinstance(encoding_formats, list):
|
|
|
|
self._encoding_formats.extend(encoding_formats)
|
|
|
|
else:
|
|
|
|
self._encoding_formats.append(encoding_formats)
|
|
|
|
|
2014-11-28 23:03:04 +00:00
|
|
|
self._encoding_formats = list(set(self._encoding_formats))
|
|
|
|
|
2014-06-26 10:42:38 +00:00
|
|
|
def get_encoding_formats(self):
|
|
|
|
return self._encoding_formats
|
2014-07-16 08:10:44 +00:00
|
|
|
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
class MediaDescriptor(Loggable):
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def __init__(self):
|
|
|
|
Loggable.__init__(self)
|
|
|
|
|
2014-07-16 10:50:41 +00:00
|
|
|
def get_path(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def get_media_filepath(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_caps(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_uri(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_duration(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_protocol(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def is_seekable(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def is_image(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def get_num_tracks(self, track_type):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2014-12-13 15:01:49 +00:00
|
|
|
def can_play_reverse(self):
|
|
|
|
raise NotImplemented
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
def is_compatible(self, scenario):
|
2014-12-13 15:01:49 +00:00
|
|
|
if scenario is None:
|
|
|
|
return True
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
if scenario.seeks() and (not self.is_seekable() or self.is_image()):
|
|
|
|
self.debug("Do not run %s as %s does not support seeking",
|
|
|
|
scenario, self.get_uri())
|
|
|
|
return False
|
|
|
|
|
2014-07-23 12:43:29 +00:00
|
|
|
if self.is_image() and scenario.needs_clock_sync():
|
|
|
|
self.debug("Do not run %s as %s is an image",
|
|
|
|
scenario, self.get_uri())
|
|
|
|
return False
|
|
|
|
|
2014-12-13 15:01:49 +00:00
|
|
|
if not self.can_play_reverse() and scenario.does_reverse_playback():
|
|
|
|
return False
|
|
|
|
|
2015-04-28 14:44:42 +00:00
|
|
|
if self.get_duration() and self.get_duration() / GST_SECOND < scenario.get_min_media_duration():
|
2014-10-24 12:23:52 +00:00
|
|
|
self.debug(
|
|
|
|
"Do not run %s as %s is too short (%i < min media duation : %i",
|
2014-10-24 12:38:00 +00:00
|
|
|
scenario, self.get_uri(),
|
|
|
|
self.get_duration() / GST_SECOND,
|
|
|
|
scenario.get_min_media_duration())
|
2014-07-23 18:39:05 +00:00
|
|
|
return False
|
|
|
|
|
2014-07-16 09:36:29 +00:00
|
|
|
for track_type in ['audio', 'subtitle']:
|
|
|
|
if self.get_num_tracks(track_type) < scenario.get_min_tracks(track_type):
|
|
|
|
self.debug("%s -- %s | At least %s %s track needed < %s"
|
|
|
|
% (scenario, self.get_uri(), track_type,
|
|
|
|
scenario.get_min_tracks(track_type),
|
|
|
|
self.get_num_tracks(track_type)))
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
class GstValidateMediaDescriptor(MediaDescriptor):
|
2014-07-16 08:10:44 +00:00
|
|
|
# Some extension file for discovering results
|
|
|
|
MEDIA_INFO_EXT = "media_info"
|
|
|
|
STREAM_INFO_EXT = "stream_info"
|
|
|
|
|
2014-07-16 10:50:41 +00:00
|
|
|
DISCOVERER_COMMAND = "gst-validate-media-check-1.0"
|
|
|
|
if "win32" in sys.platform:
|
|
|
|
DISCOVERER_COMMAND += ".exe"
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def __init__(self, xml_path):
|
2014-07-16 10:03:14 +00:00
|
|
|
super(GstValidateMediaDescriptor, self).__init__()
|
2014-07-16 09:36:29 +00:00
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
self._xml_path = xml_path
|
|
|
|
self.media_xml = ET.parse(xml_path).getroot()
|
|
|
|
|
2014-10-24 12:38:00 +00:00
|
|
|
# Sanity checks
|
2014-07-16 08:10:44 +00:00
|
|
|
self.media_xml.attrib["duration"]
|
|
|
|
self.media_xml.attrib["seekable"]
|
|
|
|
|
2015-05-13 13:30:23 +00:00
|
|
|
self.set_protocol(urlparse.urlparse(urlparse.urlparse(self.get_uri()).scheme).scheme)
|
|
|
|
|
2014-07-16 10:50:41 +00:00
|
|
|
@staticmethod
|
2014-09-15 17:14:27 +00:00
|
|
|
def new_from_uri(uri, verbose=False, full=False):
|
2014-07-16 10:50:41 +00:00
|
|
|
media_path = utils.url2path(uri)
|
2014-10-24 12:23:52 +00:00
|
|
|
descriptor_path = "%s.%s" % (
|
|
|
|
media_path, GstValidateMediaDescriptor.MEDIA_INFO_EXT)
|
2014-07-16 10:50:41 +00:00
|
|
|
args = GstValidateMediaDescriptor.DISCOVERER_COMMAND.split(" ")
|
|
|
|
args.append(uri)
|
|
|
|
|
|
|
|
args.extend(["--output-file", descriptor_path])
|
2014-09-15 17:14:27 +00:00
|
|
|
if full:
|
|
|
|
args.extend(["--full"])
|
2014-07-16 10:50:41 +00:00
|
|
|
|
|
|
|
if verbose:
|
|
|
|
printc("Generating media info for %s\n"
|
2014-10-24 12:23:52 +00:00
|
|
|
" Command: '%s'" % (media_path, ' '.join(args)),
|
2014-07-16 10:50:41 +00:00
|
|
|
Colors.OKBLUE)
|
|
|
|
|
|
|
|
try:
|
2014-10-24 12:38:00 +00:00
|
|
|
subprocess.check_output(args, stderr=open(os.devnull))
|
2014-07-16 10:50:41 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
2014-07-17 14:48:21 +00:00
|
|
|
if verbose:
|
2014-07-16 10:50:41 +00:00
|
|
|
printc("Result: Failed", Colors.FAIL)
|
|
|
|
else:
|
2014-09-19 07:13:13 +00:00
|
|
|
loggable.warning("GstValidateMediaDescriptor", "Exception: %s" % e)
|
2014-07-16 10:50:41 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
if verbose:
|
|
|
|
printc("Result: Passed", Colors.OKGREEN)
|
|
|
|
|
|
|
|
return GstValidateMediaDescriptor(descriptor_path)
|
|
|
|
|
|
|
|
def get_path(self):
|
|
|
|
return self._xml_path
|
|
|
|
|
2014-11-20 10:55:45 +00:00
|
|
|
def need_clock_sync(self):
|
|
|
|
return Protocols.needs_clock_sync(self.get_protocol())
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def get_media_filepath(self):
|
|
|
|
if self.get_protocol() == Protocols.FILE:
|
|
|
|
return self._xml_path.replace("." + self.MEDIA_INFO_EXT, "")
|
|
|
|
else:
|
|
|
|
return self._xml_path.replace("." + self.STREAM_INFO_EXT, "")
|
|
|
|
|
|
|
|
def get_caps(self):
|
|
|
|
return self.media_xml.findall("streams")[0].attrib["caps"]
|
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
def get_tracks_caps(self):
|
|
|
|
res = []
|
|
|
|
try:
|
|
|
|
streams = self.media_xml.findall("streams")[0].findall("stream")
|
|
|
|
except IndexError:
|
|
|
|
return res
|
|
|
|
|
|
|
|
for stream in streams:
|
|
|
|
res.append((stream.attrib["type"], stream.attrib["caps"]))
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def get_uri(self):
|
|
|
|
return self.media_xml.attrib["uri"]
|
|
|
|
|
|
|
|
def get_duration(self):
|
|
|
|
return long(self.media_xml.attrib["duration"])
|
|
|
|
|
|
|
|
def set_protocol(self, protocol):
|
|
|
|
self.media_xml.attrib["protocol"] = protocol
|
|
|
|
|
|
|
|
def get_protocol(self):
|
|
|
|
return self.media_xml.attrib["protocol"]
|
|
|
|
|
|
|
|
def is_seekable(self):
|
|
|
|
return self.media_xml.attrib["seekable"]
|
|
|
|
|
2014-12-13 15:01:49 +00:00
|
|
|
def can_play_reverse(self):
|
|
|
|
return True
|
|
|
|
|
2014-07-16 08:10:44 +00:00
|
|
|
def is_image(self):
|
|
|
|
for stream in self.media_xml.findall("streams")[0].findall("stream"):
|
|
|
|
if stream.attrib["type"] == "image":
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def get_num_tracks(self, track_type):
|
|
|
|
n = 0
|
|
|
|
for stream in self.media_xml.findall("streams")[0].findall("stream"):
|
|
|
|
if stream.attrib["type"] == track_type:
|
|
|
|
n += 1
|
|
|
|
|
|
|
|
return n
|
2014-07-16 10:16:03 +00:00
|
|
|
|
2014-09-12 08:47:18 +00:00
|
|
|
def get_clean_name(self):
|
|
|
|
name = os.path.basename(self.get_path())
|
|
|
|
name = re.sub("\.stream_info|\.media_info", "", name)
|
|
|
|
|
|
|
|
return name.replace('.', "_")
|
2014-07-16 10:16:03 +00:00
|
|
|
|
2014-10-24 12:23:52 +00:00
|
|
|
|
2014-07-16 10:16:03 +00:00
|
|
|
class MediaFormatCombination(object):
|
2014-10-13 08:32:07 +00:00
|
|
|
FORMATS = {"aac": "audio/mpeg,mpegversion=4",
|
2014-07-16 10:16:03 +00:00
|
|
|
"ac3": "audio/x-ac3",
|
|
|
|
"vorbis": "audio/x-vorbis",
|
|
|
|
"mp3": "audio/mpeg,mpegversion=1,layer=3",
|
|
|
|
"h264": "video/x-h264",
|
|
|
|
"vp8": "video/x-vp8",
|
|
|
|
"theora": "video/x-theora",
|
|
|
|
"ogg": "application/ogg",
|
|
|
|
"mkv": "video/x-matroska",
|
|
|
|
"mp4": "video/quicktime,variant=iso;",
|
|
|
|
"webm": "video/webm"}
|
|
|
|
|
|
|
|
def __str__(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return "%s and %s in %s" % (self.audio, self.video, self.container)
|
2014-07-16 10:16:03 +00:00
|
|
|
|
2014-07-16 11:54:54 +00:00
|
|
|
def __init__(self, container, audio, video):
|
2014-10-13 08:32:07 +00:00
|
|
|
"""
|
|
|
|
Describes a media format to be used for transcoding tests.
|
|
|
|
|
|
|
|
:param container: A string defining the container format to be used, must bin in self.FORMATS
|
|
|
|
:param audio: A string defining the audio format to be used, must bin in self.FORMATS
|
|
|
|
:param video: A string defining the video format to be used, must bin in self.FORMATS
|
|
|
|
"""
|
2014-07-16 10:16:03 +00:00
|
|
|
self.container = container
|
2014-07-16 11:54:54 +00:00
|
|
|
self.audio = audio
|
|
|
|
self.video = video
|
|
|
|
|
|
|
|
def get_caps(self, track_type):
|
|
|
|
try:
|
2014-10-13 08:32:07 +00:00
|
|
|
return self.FORMATS[self.__dict__[track_type]]
|
2014-07-16 11:54:54 +00:00
|
|
|
except KeyError:
|
|
|
|
return None
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def get_audio_caps(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return self.get_caps("audio")
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def get_video_caps(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return self.get_caps("video")
|
2014-07-16 10:16:03 +00:00
|
|
|
|
|
|
|
def get_muxer_caps(self):
|
2014-07-16 11:54:54 +00:00
|
|
|
return self.get_caps("container")
|