mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2025-09-03 02:33:53 +00:00
validate: Add a mecanism to rerun failling tests
And add a way to mark some 'flakes' as tolerated
This commit is contained in:
parent
520c2102cc
commit
92f1979ec9
2 changed files with 72 additions and 23 deletions
|
@ -124,6 +124,7 @@ class Test(Loggable):
|
||||||
# String representation of the test number in the testsuite
|
# String representation of the test number in the testsuite
|
||||||
self.number = ""
|
self.number = ""
|
||||||
self.workdir = workdir
|
self.workdir = workdir
|
||||||
|
self.allow_flakiness = False
|
||||||
|
|
||||||
self.clean()
|
self.clean()
|
||||||
|
|
||||||
|
@ -227,6 +228,7 @@ class Test(Loggable):
|
||||||
|
|
||||||
def close_logfile(self):
|
def close_logfile(self):
|
||||||
if not self.options.redirect_logs:
|
if not self.options.redirect_logs:
|
||||||
|
self.out.flush()
|
||||||
self.out.close()
|
self.out.close()
|
||||||
|
|
||||||
self.out = None
|
self.out = None
|
||||||
|
@ -612,7 +614,17 @@ class Test(Loggable):
|
||||||
for logfile in self.extra_logfiles:
|
for logfile in self.extra_logfiles:
|
||||||
self._dump_log_file(logfile)
|
self._dump_log_file(logfile)
|
||||||
|
|
||||||
def test_end(self):
|
def copy_logfiles(self, extra_folder="flaky_tests"):
|
||||||
|
path = os.path.dirname(os.path.join(self.options.logsdir, extra_folder,
|
||||||
|
self.classname.replace(".", os.sep)))
|
||||||
|
mkdir(path)
|
||||||
|
self.logfile = shutil.copy(self.logfile, path)
|
||||||
|
extra_logs = []
|
||||||
|
for logfile in self.extra_logfiles:
|
||||||
|
extra_logs.append(shutil.copy(logfile, path))
|
||||||
|
self.extra_logfiles = extra_logs
|
||||||
|
|
||||||
|
def test_end(self, retry_on_failure=False):
|
||||||
self.kill_subprocess()
|
self.kill_subprocess()
|
||||||
self.thread.join()
|
self.thread.join()
|
||||||
self.time_taken = time.time() - self._starting_time
|
self.time_taken = time.time() - self._starting_time
|
||||||
|
@ -620,15 +632,17 @@ class Test(Loggable):
|
||||||
if self.options.gdb:
|
if self.options.gdb:
|
||||||
signal.signal(signal.SIGINT, self.previous_sigint_handler)
|
signal.signal(signal.SIGINT, self.previous_sigint_handler)
|
||||||
|
|
||||||
|
message = None
|
||||||
|
end = "\n"
|
||||||
if self.result != Result.PASSED:
|
if self.result != Result.PASSED:
|
||||||
message = str(self)
|
if not retry_on_failure:
|
||||||
end = "\n"
|
message = str(self)
|
||||||
|
end = "\n"
|
||||||
else:
|
else:
|
||||||
message = "%s %s: %s%s" % (self.number, self.classname, self.result,
|
if is_tty():
|
||||||
" (" + self.message + ")" if self.message else "")
|
message = "%s %s: %s%s" % (self.number, self.classname, self.result,
|
||||||
end = "\r"
|
" (" + self.message + ")" if self.message else "")
|
||||||
if not is_tty():
|
end = "\r"
|
||||||
message = None
|
|
||||||
|
|
||||||
if message is not None:
|
if message is not None:
|
||||||
printc(message, color=utils.get_color_for_result(
|
printc(message, color=utils.get_color_for_result(
|
||||||
|
@ -1303,9 +1317,13 @@ class TestsManager(Loggable):
|
||||||
tests_regexes.append(regex)
|
tests_regexes.append(regex)
|
||||||
for test in self.tests:
|
for test in self.tests:
|
||||||
if regex.findall(test.classname):
|
if regex.findall(test.classname):
|
||||||
test.expected_issues.extend(failure_def['issues'])
|
if failure_def.get('allow_flakiness'):
|
||||||
self.debug("%s added expected issues from %s" % (
|
test.allow_flakiness = True
|
||||||
test.classname, bugid))
|
self.debug("%s allow flakyness" % (test.classname))
|
||||||
|
else:
|
||||||
|
test.expected_issues.extend(failure_def['issues'])
|
||||||
|
self.debug("%s added expected issues from %s" % (
|
||||||
|
test.classname, bugid))
|
||||||
failure_def['tests'] = tests_regexes
|
failure_def['tests'] = tests_regexes
|
||||||
|
|
||||||
self.expected_issues.update(expected_issues)
|
self.expected_issues.update(expected_issues)
|
||||||
|
@ -1317,9 +1335,13 @@ class TestsManager(Loggable):
|
||||||
for bugid, failure_def in list(self.expected_issues.items()):
|
for bugid, failure_def in list(self.expected_issues.items()):
|
||||||
for regex in failure_def['tests']:
|
for regex in failure_def['tests']:
|
||||||
if regex.findall(test.classname):
|
if regex.findall(test.classname):
|
||||||
test.expected_issues.extend(failure_def['issues'])
|
if failure_def.get('allow_flakiness'):
|
||||||
self.debug("%s added expected issues from %s" % (
|
test.allow_flakiness = True
|
||||||
test.classname, bugid))
|
self.debug("%s allow flakyness" % (test.classname))
|
||||||
|
else:
|
||||||
|
test.expected_issues.extend(failure_def['issues'])
|
||||||
|
self.debug("%s added expected issues from %s" % (
|
||||||
|
test.classname, bugid))
|
||||||
|
|
||||||
if self._is_test_wanted(test):
|
if self._is_test_wanted(test):
|
||||||
if test not in self.tests:
|
if test not in self.tests:
|
||||||
|
@ -1874,9 +1896,13 @@ class _TestsLauncher(Loggable):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _run_tests(self):
|
def _run_tests(self, running_tests=None, all_alone=False, retry_on_failures=False):
|
||||||
if not self.all_tests:
|
if not self.all_tests:
|
||||||
self.all_tests = self.list_tests()
|
self.all_tests = self.list_tests()
|
||||||
|
|
||||||
|
if not running_tests:
|
||||||
|
running_tests = self.tests
|
||||||
|
|
||||||
self.total_num_tests = len(self.all_tests)
|
self.total_num_tests = len(self.all_tests)
|
||||||
if not is_tty():
|
if not is_tty():
|
||||||
printc("\nRunning %d tests..." % self.total_num_tests, color=Colors.HEADER)
|
printc("\nRunning %d tests..." % self.total_num_tests, color=Colors.HEADER)
|
||||||
|
@ -1884,8 +1910,8 @@ class _TestsLauncher(Loggable):
|
||||||
self.reporter.init_timer()
|
self.reporter.init_timer()
|
||||||
alone_tests = []
|
alone_tests = []
|
||||||
tests = []
|
tests = []
|
||||||
for test in self.tests:
|
for test in running_tests:
|
||||||
if test.is_parallel:
|
if test.is_parallel and not all_alone:
|
||||||
tests.append(test)
|
tests.append(test)
|
||||||
else:
|
else:
|
||||||
alone_tests.append(test)
|
alone_tests.append(test)
|
||||||
|
@ -1900,6 +1926,7 @@ class _TestsLauncher(Loggable):
|
||||||
random.shuffle(alone_tests)
|
random.shuffle(alone_tests)
|
||||||
|
|
||||||
current_test_num = 1
|
current_test_num = 1
|
||||||
|
to_retry = []
|
||||||
for num_jobs, tests in [(max_num_jobs, tests), (1, alone_tests)]:
|
for num_jobs, tests in [(max_num_jobs, tests), (1, alone_tests)]:
|
||||||
tests_left = list(tests)
|
tests_left = list(tests)
|
||||||
for i in range(num_jobs):
|
for i in range(num_jobs):
|
||||||
|
@ -1913,14 +1940,33 @@ class _TestsLauncher(Loggable):
|
||||||
test.number = "[%d / %d] " % (current_test_num,
|
test.number = "[%d / %d] " % (current_test_num,
|
||||||
self.total_num_tests)
|
self.total_num_tests)
|
||||||
current_test_num += 1
|
current_test_num += 1
|
||||||
res = test.test_end()
|
res = test.test_end(retry_on_failure=retry_on_failures)
|
||||||
self.reporter.after_test(test)
|
to_report = True
|
||||||
if res != Result.PASSED and (self.options.forever
|
if res != Result.PASSED:
|
||||||
or self.options.fatal_error):
|
if self.options.forever or self.options.fatal_error:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if retry_on_failures:
|
||||||
|
if not self.options.redirect_logs:
|
||||||
|
test.copy_logfiles()
|
||||||
|
printc(test)
|
||||||
|
test.clean()
|
||||||
|
to_retry.append(test)
|
||||||
|
|
||||||
|
# Not adding to final report if flakiness is tolerated
|
||||||
|
to_report = not test.allow_flakiness
|
||||||
|
if to_report:
|
||||||
|
self.reporter.after_test(test)
|
||||||
if self.start_new_job(tests_left):
|
if self.start_new_job(tests_left):
|
||||||
jobs_running += 1
|
jobs_running += 1
|
||||||
|
|
||||||
|
if to_retry:
|
||||||
|
printc("--> Rerunning the following tests to see if they are flaky:", Colors.WARNING)
|
||||||
|
for test in to_retry:
|
||||||
|
printc(' * %s' % test.classname)
|
||||||
|
printc('')
|
||||||
|
return self._run_tests(to_retry, all_alone=True, retry_on_failures=False)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def clean_tests(self):
|
def clean_tests(self):
|
||||||
|
@ -1957,7 +2003,7 @@ class _TestsLauncher(Loggable):
|
||||||
|
|
||||||
return res
|
return res
|
||||||
else:
|
else:
|
||||||
return self._run_tests()
|
return self._run_tests(retry_on_failures=self.options.retry_on_failures)
|
||||||
finally:
|
finally:
|
||||||
if self.httpsrv:
|
if self.httpsrv:
|
||||||
self.httpsrv.stop()
|
self.httpsrv.stop()
|
||||||
|
|
|
@ -223,6 +223,7 @@ class LauncherConfig(Loggable):
|
||||||
self.force_sync = False
|
self.force_sync = False
|
||||||
self.sync_all = False
|
self.sync_all = False
|
||||||
self.check_bugs_status = False
|
self.check_bugs_status = False
|
||||||
|
self.retry_on_failures = False
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
"""
|
"""
|
||||||
|
@ -480,6 +481,8 @@ class LauncherConfig(Loggable):
|
||||||
help="Runs the test in a random order. Can help speed up the overall"
|
help="Runs the test in a random order. Can help speed up the overall"
|
||||||
" test time by running synchronized and unsynchronized tests"
|
" test time by running synchronized and unsynchronized tests"
|
||||||
" at the same time")
|
" at the same time")
|
||||||
|
parser.add_argument('--retry-on-failures', dest="retry_on_failures", action="store_true",
|
||||||
|
help="Re-try tests that produce unexpected results")
|
||||||
dir_group = parser.add_argument_group(
|
dir_group = parser.add_argument_group(
|
||||||
"Directories and files to be used by the launcher")
|
"Directories and files to be used by the launcher")
|
||||||
dir_group.add_argument("-M", "--main-dir", dest="main_dir",
|
dir_group.add_argument("-M", "--main-dir", dest="main_dir",
|
||||||
|
|
Loading…
Reference in a new issue