Skip to content

Commit

Permalink
Add support for Bazel's TESTBRIDGE_TEST_ONLY env var
Browse files Browse the repository at this point in the history
Closes #2490
  • Loading branch information
horenmar committed Oct 18, 2022
1 parent 7d88207 commit 8f2a0db
Show file tree
Hide file tree
Showing 4 changed files with 121 additions and 35 deletions.
89 changes: 54 additions & 35 deletions src/catch2/catch_config.cpp
Expand Up @@ -15,7 +15,7 @@
#include <catch2/interfaces/catch_interfaces_tag_alias_registry.hpp>

namespace {
bool enableBazelEnvSupport() {
static bool enableBazelEnvSupport() {
#if defined(CATCH_CONFIG_BAZEL_SUPPORT)
return true;
#elif defined(CATCH_PLATFORM_WINDOWS_UWP)
Expand Down Expand Up @@ -62,17 +62,6 @@ namespace Catch {
elem = trim(elem);
}


TestSpecParser parser(ITagAliasRegistry::get());
if (!m_data.testsOrTags.empty()) {
m_hasTestFilters = true;
for (auto const& testOrTags : m_data.testsOrTags) {
parser.parse(testOrTags);
}
}
m_testSpec = parser.testSpec();


// Insert the default reporter if user hasn't asked for a specfic one
if ( m_data.reporterSpecifications.empty() ) {
m_data.reporterSpecifications.push_back( {
Expand All @@ -85,29 +74,21 @@ namespace Catch {
} );
}

#if !defined(CATCH_PLATFORM_WINDOWS_UWP)
if(enableBazelEnvSupport()){
// Register a JUnit reporter for Bazel. Bazel sets an environment
// variable with the path to XML output. If this file is written to
// during test, Bazel will not generate a default XML output.
// This allows the XML output file to contain higher level of detail
// than what is possible otherwise.
# if defined( _MSC_VER )
// On Windows getenv throws a warning as there is no input validation,
// since the key is hardcoded, this should not be an issue.
# pragma warning( push )
# pragma warning( disable : 4996 )
# endif
const auto bazelOutputFilePtr = std::getenv( "XML_OUTPUT_FILE" );
# if defined( _MSC_VER )
# pragma warning( pop )
# endif
if ( bazelOutputFilePtr != nullptr ) {
m_data.reporterSpecifications.push_back(
{ "junit", std::string( bazelOutputFilePtr ), {}, {} } );
if ( enableBazelEnvSupport() ) {
readBazelEnvVars();
}

// Bazel support can modify the test specs, so parsing has to happen
// after reading Bazel env vars.
TestSpecParser parser( ITagAliasRegistry::get() );
if ( !m_data.testsOrTags.empty() ) {
m_hasTestFilters = true;
for ( auto const& testOrTags : m_data.testsOrTags ) {
parser.parse( testOrTags );
}
}
#endif
}
m_testSpec = parser.testSpec();


// We now fixup the reporter specs to handle default output spec,
// default colour spec, etc
Expand Down Expand Up @@ -185,6 +166,44 @@ namespace Catch {
unsigned int Config::benchmarkSamples() const { return m_data.benchmarkSamples; }
double Config::benchmarkConfidenceInterval() const { return m_data.benchmarkConfidenceInterval; }
unsigned int Config::benchmarkResamples() const { return m_data.benchmarkResamples; }
std::chrono::milliseconds Config::benchmarkWarmupTime() const { return std::chrono::milliseconds(m_data.benchmarkWarmupTime); }
std::chrono::milliseconds Config::benchmarkWarmupTime() const {
return std::chrono::milliseconds( m_data.benchmarkWarmupTime );
}

void Config::readBazelEnvVars() {
#if defined( CATCH_PLATFORM_WINDOWS_UWP )
// We cannot read environment variables on UWP platforms
#else

# if defined( _MSC_VER )
# pragma warning( push )
# pragma warning( disable : 4996 ) // use getenv_s instead of getenv
# endif

// Register a JUnit reporter for Bazel. Bazel sets an environment
// variable with the path to XML output. If this file is written to
// during test, Bazel will not generate a default XML output.
// This allows the XML output file to contain higher level of detail
// than what is possible otherwise.
const auto bazelOutputFile = std::getenv( "XML_OUTPUT_FILE" );
if ( bazelOutputFile ) {
m_data.reporterSpecifications.push_back(
{ "junit", std::string( bazelOutputFile ), {}, {} } );
}

const auto bazelTestSpec = std::getenv( "TESTBRIDGE_TEST_ONLY" );
if ( bazelTestSpec ) {
// Presumably the test spec from environment should overwrite
// the one we got from CLI (if we got any)
m_data.testsOrTags.clear();
m_data.testsOrTags.push_back( bazelTestSpec );
}

# if defined( _MSC_VER )
# pragma warning( pop )
# endif

#endif
}

} // end namespace Catch
3 changes: 3 additions & 0 deletions src/catch2/catch_config.hpp
Expand Up @@ -140,6 +140,9 @@ namespace Catch {
std::chrono::milliseconds benchmarkWarmupTime() const override;

private:
// Reads Bazel env vars and applies them to the config
void readBazelEnvVars();

ConfigData m_data;
std::vector<ProcessedReporterSpec> m_processedReporterSpecs;
TestSpec m_testSpec;
Expand Down
12 changes: 12 additions & 0 deletions tests/ExtraTests/CMakeLists.txt
Expand Up @@ -150,6 +150,18 @@ set_tests_properties(NO_CATCH_CONFIG_BAZEL_REPORTER-1
ENVIRONMENT "BAZEL_TEST=1"
)

add_test(NAME BazelEnv::TESTBRIDGE_TEST_ONLY
COMMAND
"${PYTHON_EXECUTABLE}" "${CATCH_DIR}/tests/TestScripts/testBazelEnvTestSpec.py"
$<TARGET_FILE:BazelReporterNoCatchConfig>
)
set_tests_properties(BazelEnv::TESTBRIDGE_TEST_ONLY
PROPERTIES
LABELS "uses-python"
ENVIRONMENT "BAZEL_TEST=1"
)


# The default handler on Windows leads to the just-in-time debugger firing,
# which makes this test unsuitable for CI and headless runs, as it opens
# up an interactive dialog.
Expand Down
52 changes: 52 additions & 0 deletions tests/TestScripts/testBazelEnvTestSpec.py
@@ -0,0 +1,52 @@
#!/usr/bin/env python3

# Copyright Catch2 Authors
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# https://www.boost.org/LICENSE_1_0.txt)

# SPDX-License-Identifier: BSL-1.0

import os
import sys
import subprocess

"""
Test that Catch2 recognizes `TESTBRIDGE_TEST_ONLY` env variable and applies
the provided test spec properly.
Requires 1 arguments, path to Catch2 binary to test.
"""
if len(sys.argv) != 2:
print("Wrong number of arguments: {}".format(len(sys.argv)))
print("Usage: {} test-bin-path".format(sys.argv[0]))
exit(1)


bin_path = os.path.abspath(sys.argv[1])

print('bin path:', bin_path)

env = os.environ.copy()
env["TESTBRIDGE_TEST_ONLY"] = "Passing test case"

try:
ret = subprocess.run(
bin_path,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True,
universal_newlines=True,
env=env
)
stdout = ret.stdout
except subprocess.SubprocessError as ex:
print('Could not run "{}"'.format(bin_path))
print("Return code: {}".format(ex.returncode))
print("stdout: {}".format(ex.stdout))
print("stderr: {}".format(ex.stderr))
raise

if not 'All tests passed (1 assertion in 1 test case)' in stdout:
print('Unexpected output: {}'.format(stdout))
exit(2)

0 comments on commit 8f2a0db

Please sign in to comment.