Skip to content

Commit

Permalink
tools: add more options to track flaky tests
Browse files Browse the repository at this point in the history
Refs: #43929 (comment)

PR-URL: #43954
Backport-PR-URL: #45126
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
Reviewed-By: Tobias Nießen <tniessen@tnie.de>
Reviewed-By: Feng Yu <F3n67u@outlook.com>
  • Loading branch information
aduh95 authored and danielleadams committed Oct 26, 2022
1 parent 98c49d8 commit a1d5209
Show file tree
Hide file tree
Showing 7 changed files with 40 additions and 24 deletions.
7 changes: 3 additions & 4 deletions .github/workflows/build-tarball.yml
Expand Up @@ -11,12 +11,11 @@ on:
- v[0-9]+.x

env:
FLAKY_TESTS: dontcare
PYTHON_VERSION: '3.10'
FLAKY_TESTS: keep_retrying

jobs:
build-tarball:
env:
PYTHON_VERSION: '3.10'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
Expand Down Expand Up @@ -71,4 +70,4 @@ jobs:
- name: Test
run: |
cd $TAR_DIR
make run-ci -j2 V=1 TEST_CI_ARGS="-p dots"
make run-ci -j2 V=1 TEST_CI_ARGS="-p dots --measure-flakiness 9"
2 changes: 1 addition & 1 deletion .github/workflows/build-windows.yml
Expand Up @@ -13,7 +13,7 @@ on:

env:
PYTHON_VERSION: '3.10'
FLAKY_TESTS: dontcare
FLAKY_TESTS: keep_retrying

jobs:
build-windows:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/misc.yml
Expand Up @@ -31,4 +31,4 @@ jobs:
name: docs
path: out/doc
- name: Test
run: NODE=$(command -v node) make test-doc-ci TEST_CI_ARGS="-p actions"
run: NODE=$(command -v node) make test-doc-ci TEST_CI_ARGS="-p actions --measure-flakiness 9"
4 changes: 2 additions & 2 deletions .github/workflows/test-asan.yml
Expand Up @@ -19,7 +19,7 @@ on:

env:
PYTHON_VERSION: '3.10'
FLAKY_TESTS: dontcare
FLAKY_TESTS: keep_retrying

jobs:
test-asan:
Expand All @@ -40,4 +40,4 @@ jobs:
- name: Build
run: make build-ci -j2 V=1
- name: Test
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions -t 300"
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions -t 300 --measure-flakiness 9"
4 changes: 2 additions & 2 deletions .github/workflows/test-linux.yml
Expand Up @@ -13,7 +13,7 @@ on:

env:
PYTHON_VERSION: '3.10'
FLAKY_TESTS: dontcare
FLAKY_TESTS: keep_retrying

jobs:
test-linux:
Expand All @@ -29,4 +29,4 @@ jobs:
- name: Build
run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn"
- name: Test
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions"
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --measure-flakiness 9"
4 changes: 2 additions & 2 deletions .github/workflows/test-macos.yml
Expand Up @@ -19,7 +19,7 @@ on:

env:
PYTHON_VERSION: '3.10'
FLAKY_TESTS: dontcare
FLAKY_TESTS: keep_retrying

jobs:
test-macOS:
Expand All @@ -35,4 +35,4 @@ jobs:
- name: Build
run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn"
- name: Test
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions"
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --measure-flakiness 9"
41 changes: 29 additions & 12 deletions tools/test.py
Expand Up @@ -96,10 +96,11 @@ def get_module(name, path):

class ProgressIndicator(object):

def __init__(self, cases, flaky_tests_mode):
def __init__(self, cases, flaky_tests_mode, measure_flakiness):
self.cases = cases
self.serial_id = 0
self.flaky_tests_mode = flaky_tests_mode
self.measure_flakiness = measure_flakiness
self.parallel_queue = Queue(len(cases))
self.sequential_queue = Queue(len(cases))
for case in cases:
Expand Down Expand Up @@ -211,10 +212,22 @@ def RunSingle(self, parallel, thread_id):
if output.UnexpectedOutput():
if FLAKY in output.test.outcomes and self.flaky_tests_mode == DONTCARE:
self.flaky_failed.append(output)
elif FLAKY in output.test.outcomes and self.flaky_tests_mode == KEEP_RETRYING:
for _ in range(99):
if not case.Run().UnexpectedOutput():
self.flaky_failed.append(output)
break
else:
# If after 100 tries, the test is not passing, it's not flaky.
self.failed.append(output)
else:
self.failed.append(output)
if output.HasCrashed():
self.crashed += 1
if self.measure_flakiness:
outputs = [case.Run() for _ in range(self.measure_flakiness)]
# +1s are there because the test already failed once at this point.
print(" failed {} out of {}".format(len([i for i in outputs if i.UnexpectedOutput()]) + 1, self.measure_flakiness + 1))
else:
self.succeeded += 1
self.remaining -= 1
Expand Down Expand Up @@ -436,8 +449,8 @@ def Done(self):

class CompactProgressIndicator(ProgressIndicator):

def __init__(self, cases, flaky_tests_mode, templates):
super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode)
def __init__(self, cases, flaky_tests_mode, measure_flakiness, templates):
super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode, measure_flakiness)
self.templates = templates
self.last_status_length = 0
self.start_time = time.time()
Expand Down Expand Up @@ -492,29 +505,29 @@ def PrintProgress(self, name):

class ColorProgressIndicator(CompactProgressIndicator):

def __init__(self, cases, flaky_tests_mode):
def __init__(self, cases, flaky_tests_mode, measure_flakiness):
templates = {
'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s",
'stdout': "\033[1m%s\033[0m",
'stderr': "\033[31m%s\033[0m",
}
super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, measure_flakiness, templates)

def ClearLine(self, last_line_length):
print("\033[1K\r", end='')


class MonochromeProgressIndicator(CompactProgressIndicator):

def __init__(self, cases, flaky_tests_mode):
def __init__(self, cases, flaky_tests_mode, measure_flakiness):
templates = {
'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s",
'stdout': '%s',
'stderr': '%s',
'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"),
'max_length': 78
}
super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, measure_flakiness, templates)

def ClearLine(self, last_line_length):
print(("\r" + (" " * last_line_length) + "\r"), end='')
Expand Down Expand Up @@ -946,8 +959,8 @@ def GetVm(self, arch, mode):
def GetTimeout(self, mode):
return self.timeout * TIMEOUT_SCALEFACTOR[ARCH_GUESS or 'ia32'][mode]

def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode):
progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode)
def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode, measure_flakiness):
progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode, measure_flakiness)
return progress.Run(tasks)

# -------------------------------------------
Expand All @@ -965,6 +978,7 @@ def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode):
SLOW = 'slow'
FLAKY = 'flaky'
DONTCARE = 'dontcare'
KEEP_RETRYING = 'keep_retrying'

class Expression(object):
pass
Expand Down Expand Up @@ -1353,8 +1367,11 @@ def BuildOptions():
result.add_option("--cat", help="Print the source of the tests",
default=False, action="store_true")
result.add_option("--flaky-tests",
help="Regard tests marked as flaky (run|skip|dontcare)",
help="Regard tests marked as flaky (run|skip|dontcare|keep_retrying)",
default="run")
result.add_option("--measure-flakiness",
help="When a test fails, re-run it x number of times",
default=0, type="int")
result.add_option("--skip-tests",
help="Tests that should not be executed (comma-separated)",
default="")
Expand Down Expand Up @@ -1426,7 +1443,7 @@ def ProcessOptions(options):
# tends to exaggerate the number of available cpus/cores.
cores = os.environ.get('JOBS')
options.j = int(cores) if cores is not None else multiprocessing.cpu_count()
if options.flaky_tests not in [RUN, SKIP, DONTCARE]:
if options.flaky_tests not in [RUN, SKIP, DONTCARE, KEEP_RETRYING]:
print("Unknown flaky-tests mode %s" % options.flaky_tests)
return False
return True
Expand Down Expand Up @@ -1726,7 +1743,7 @@ def should_keep(case):
else:
try:
start = time.time()
if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests):
if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests, options.measure_flakiness):
result = 0
else:
result = 1
Expand Down

0 comments on commit a1d5209

Please sign in to comment.