Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Adding support for running system tests in parallel #1096

Open
wants to merge 6 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,4 @@ jobs:
env:
SPANNER_EMULATOR_HOST: localhost:9010
GOOGLE_CLOUD_PROJECT: emulator-test-project
GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE: true
GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE: false
4 changes: 2 additions & 2 deletions .kokoro/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
# Setup project id.
export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")

# Set up creating a new instance for each system test run
export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=true
# use existing instance for every system test run
export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=false

# Remove old nox
python3 -m pip uninstall --yes --quiet nox-automation
Expand Down
5 changes: 3 additions & 2 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,7 @@ def install_systemtest_dependencies(session, *constraints):
# Exclude version 1.52.0rc1 which has a known issue.
# See https://github.com/grpc/grpc/issues/32163
session.install("--pre", "grpcio!=1.52.0rc1")
session.install("pytest-xdist")

session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints)

Expand Down Expand Up @@ -282,7 +283,7 @@ def system(session, database_dialect):
if system_test_exists:
session.run(
"py.test",
"--quiet",
"-n=12",
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_path,
*session.posargs,
Expand All @@ -294,7 +295,7 @@ def system(session, database_dialect):
if system_test_folder_exists:
session.run(
"py.test",
"--quiet",
"-n=12",
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_folder_path,
*session.posargs,
Expand Down
2 changes: 0 additions & 2 deletions owlbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,8 +145,6 @@ def get_staging_dirs(
".kokoro/build.sh",
"# Remove old nox",
"""\
# Set up creating a new instance for each system test run
export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=true

# Remove old nox""",
)
Expand Down
75 changes: 46 additions & 29 deletions samples/samples/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import time
import uuid
from random import randrange

from google.api_core import exceptions

Expand All @@ -26,11 +27,16 @@
from google.cloud.spanner_v1 import instance
import pytest
from test_utils import retry
from test_utils import system

INSTANCE_CREATION_TIMEOUT = 560 # seconds

OPERATION_TIMEOUT_SECONDS = 120 # seconds

CREATE_INSTANCE = False

INSTANCE_ID_DEFAULT = "test-instance"

retry_429 = retry.RetryErrors(exceptions.ResourceExhausted, delay=15)


Expand Down Expand Up @@ -76,23 +82,26 @@ def scrub_instance_ignore_not_found(to_scrub):
@pytest.fixture(scope="session")
def cleanup_old_instances(spanner_client):
"""Delete instances, created by samples, that are older than an hour."""
cutoff = int(time.time()) - 1 * 60 * 60
instance_filter = "labels.cloud_spanner_samples:true"
if CREATE_INSTANCE:
cutoff = int(time.time()) - 1 * 60 * 60
instance_filter = "labels.cloud_spanner_samples:true"

for instance_pb in spanner_client.list_instances(filter_=instance_filter):
inst = instance.Instance.from_pb(instance_pb, spanner_client)
for instance_pb in spanner_client.list_instances(filter_=instance_filter):
inst = instance.Instance.from_pb(instance_pb, spanner_client)

if "created" in inst.labels:
create_time = int(inst.labels["created"])
if "created" in inst.labels:
create_time = int(inst.labels["created"])

if create_time <= cutoff:
scrub_instance_ignore_not_found(inst)
if create_time <= cutoff:
scrub_instance_ignore_not_found(inst)


@pytest.fixture(scope="module")
def instance_id():
"""Unique id for the instance used in samples."""
return f"test-instance-{uuid.uuid4().hex[:10]}"
if CREATE_INSTANCE:
return f"test-instance-{uuid.uuid4().hex[:10]}"
return INSTANCE_ID_DEFAULT


@pytest.fixture(scope="module")
Expand Down Expand Up @@ -121,31 +130,36 @@ def sample_instance(
instance_config,
sample_name,
):
sample_instance = spanner_client.instance(
instance_id,
instance_config,
labels={
"cloud_spanner_samples": "true",
"sample_name": sample_name,
"created": str(int(time.time())),
},
)
op = retry_429(sample_instance.create)()
op.result(INSTANCE_CREATION_TIMEOUT) # block until completion
if CREATE_INSTANCE:
sample_instance = spanner_client.instance(
instance_id,
instance_config,
labels={
"cloud_spanner_samples": "true",
"sample_name": sample_name,
"created": str(int(time.time())),
},
)
op = retry_429(sample_instance.create)()
op.result(INSTANCE_CREATION_TIMEOUT) # block until completion

# Eventual consistency check
retry_found = retry.RetryResult(bool)
retry_found(sample_instance.exists)()
# Eventual consistency check
retry_found = retry.RetryResult(bool)
retry_found(sample_instance.exists)()
else:
sample_instance = spanner_client.instance(instance_id)
sample_instance.reload()

yield sample_instance

for database_pb in sample_instance.list_databases():
database.Database.from_pb(database_pb, sample_instance).drop()
if CREATE_INSTANCE:
for database_pb in sample_instance.list_databases():
database.Database.from_pb(database_pb, sample_instance).drop()

for backup_pb in sample_instance.list_backups():
backup.Backup.from_pb(backup_pb, sample_instance).delete()
for backup_pb in sample_instance.list_backups():
backup.Backup.from_pb(backup_pb, sample_instance).delete()

sample_instance.delete()
sample_instance.delete()


@pytest.fixture(scope="module")
Expand Down Expand Up @@ -189,7 +203,7 @@ def database_id():

Sample testcase modules can override as needed.
"""
return "my-database-id"
return unique_id("dbapi-txn")


@pytest.fixture(scope="module")
Expand Down Expand Up @@ -291,3 +305,6 @@ def kms_key_name(spanner_client):
"spanner-test-keyring",
"spanner-test-cmek",
)

def unique_id(prefix, separator="-"):
return f"{prefix}{system.unique_resource_id(separator)}{randrange(100)}"
1 change: 1 addition & 0 deletions samples/samples/noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,7 @@ def _session_tests(

session.run(
"pytest",
"-n=12",
*(PYTEST_COMMON_ARGS + session.posargs + concurrent_args),
# Pytest will return 5 when no tests are collected. This can happen
# on travis where slow and flaky tests are excluded.
Expand Down
1 change: 1 addition & 0 deletions samples/samples/requirements-test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ pytest==8.0.0
pytest-dependency==0.6.0
mock==5.1.0
google-cloud-testutils==1.4.0
pytest-xdist==3.5.0
5 changes: 3 additions & 2 deletions tests/system/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import operator
import os
import time
from random import randrange

from google.api_core import exceptions
from google.cloud.spanner_v1 import instance as instance_mod
Expand All @@ -24,7 +25,7 @@


CREATE_INSTANCE_ENVVAR = "GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE"
CREATE_INSTANCE = os.getenv(CREATE_INSTANCE_ENVVAR) is not None
CREATE_INSTANCE = os.getenv(CREATE_INSTANCE_ENVVAR) == "true"

INSTANCE_ID_ENVVAR = "GOOGLE_CLOUD_TESTS_SPANNER_INSTANCE"
INSTANCE_ID_DEFAULT = "google-cloud-python-systest"
Expand Down Expand Up @@ -134,4 +135,4 @@ def cleanup_old_instances(spanner_client):


def unique_id(prefix, separator="-"):
return f"{prefix}{system.unique_resource_id(separator)}"
return f"{prefix}{system.unique_resource_id(separator)}{randrange(100)}"
10 changes: 5 additions & 5 deletions tests/system/test_database_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ def test_update_ddl_w_pitr_invalid(
databases_to_delete,
):
pool = spanner_v1.BurstyPool(labels={"testcase": "update_database_ddl_pitr"})
temp_db_id = _helpers.unique_id("pitr_upd_ddl_inv", separator="_")
temp_db_id = _helpers.unique_id("pitr_upd_ddl", separator="_")
retention_period = "0d"
temp_db = shared_instance.database(temp_db_id, pool=pool)

Expand All @@ -367,7 +367,7 @@ def test_update_ddl_w_pitr_success(
databases_to_delete,
):
pool = spanner_v1.BurstyPool(labels={"testcase": "update_database_ddl_pitr"})
temp_db_id = _helpers.unique_id("pitr_upd_ddl_inv", separator="_")
temp_db_id = _helpers.unique_id("pitr_upd_ddl", separator="_")
retention_period = "7d"
temp_db = shared_instance.database(temp_db_id, pool=pool)

Expand Down Expand Up @@ -399,7 +399,7 @@ def test_update_ddl_w_default_leader_success(
labels={"testcase": "update_database_ddl_default_leader"},
)

temp_db_id = _helpers.unique_id("dfl_ldrr_upd_ddl", separator="_")
temp_db_id = _helpers.unique_id("dfl_upd_ddl", separator="_")
default_leader = "us-east4"
temp_db = multiregion_instance.database(temp_db_id, pool=pool)

Expand Down Expand Up @@ -427,7 +427,7 @@ def test_create_role_grant_access_success(
creator_role_parent = _helpers.unique_id("role_parent", separator="_")
creator_role_orphan = _helpers.unique_id("role_orphan", separator="_")

temp_db_id = _helpers.unique_id("dfl_ldrr_upd_ddl", separator="_")
temp_db_id = _helpers.unique_id("dfl_ldrr_ddl", separator="_")
temp_db = shared_instance.database(temp_db_id, database_dialect=database_dialect)

create_op = temp_db.create()
Expand Down Expand Up @@ -487,7 +487,7 @@ def test_list_database_role_success(
creator_role_parent = _helpers.unique_id("role_parent", separator="_")
creator_role_orphan = _helpers.unique_id("role_orphan", separator="_")

temp_db_id = _helpers.unique_id("dfl_ldrr_upd_ddl", separator="_")
temp_db_id = _helpers.unique_id("dfl_ldrr_ddl", separator="_")
temp_db = shared_instance.database(temp_db_id, database_dialect=database_dialect)

create_op = temp_db.create()
Expand Down