diff --git a/.github/workflows/verify_library_generation.yaml b/.github/workflows/verify_library_generation.yaml index 0b4ae1b8ed..f3dc825c85 100644 --- a/.github/workflows/verify_library_generation.yaml +++ b/.github/workflows/verify_library_generation.yaml @@ -13,9 +13,7 @@ jobs: strategy: matrix: java: [ 11 ] - os: [ ubuntu-22.04, macos-12 ] - post_processing: [ 'true', 'false' ] - runs-on: ${{ matrix.os }} + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v3 - uses: actions/setup-java@v3 @@ -45,35 +43,13 @@ jobs: pushd library_generation pip install -r requirements.in popd - - - name: install utils (macos) - if: matrix.os == 'macos-12' - shell: bash - run: | - brew update --preinstall - # we need the `realpath` command to be available - brew install coreutils - - name: install docker (ubuntu) - if: matrix.os == 'ubuntu-22.04' - shell: bash - run: | - set -x - # install docker - sudo apt install containerd -y - sudo apt install -y docker.io docker-compose - - # launch docker - sudo systemctl start docker - name: Run integration tests - # we don't run ITs with postprocessing on macos because one of its dependencies "synthtool" is designed to run on linux only - if: matrix.os == 'ubuntu-22.04' || matrix.post_processing == 'false' shell: bash run: | + set -x git config --global user.email "github-workflow@github.com" git config --global user.name "Github Workflow" - library_generation/test/generate_library_integration_test.sh \ - --googleapis_gen_url https://cloud-java-bot:${{ secrets.CLOUD_JAVA_BOT_GITHUB_TOKEN }}@github.com/googleapis/googleapis-gen.git \ - --enable_postprocessing "${{ matrix.post_processing }}" + python -m unittest library_generation/test/integration_tests.py unit_tests: strategy: matrix: @@ -106,7 +82,7 @@ jobs: run: | set -x python -m unittest library_generation/test/unit_tests.py - lint: + lint-shell: runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v3 @@ -116,3 +92,20 @@ jobs: scandir: 'library_generation' format: tty severity: error + lint-python: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v3 + - name: install python dependencies + shell: bash + run: | + set -ex + pushd library_generation + pip install -r requirements.in + popd + - name: Lint + shell: bash + run: | + # exclude generated golden files + # exclude owlbot until further refaction + black --check library_generation --exclude "(library_generation/owlbot)|(library_generation/test/resources/goldens)" diff --git a/.gitignore b/.gitignore index 3da2d8a7d2..a9b6f36914 100644 --- a/.gitignore +++ b/.gitignore @@ -19,8 +19,12 @@ target/ # Python **/__pycache__/ +.venv # library generation output/ library_generation/output/ +library_generation/test/output +library_generation/test/googleapis +library_generation/test/resources/integration/golden showcase/scripts/output/ diff --git a/library_generation/__init__.py b/library_generation/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/library_generation/generate_composed_library.py b/library_generation/generate_composed_library.py index d5beec733b..6f8216f511 100755 --- a/library_generation/generate_composed_library.py +++ b/library_generation/generate_composed_library.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python3 +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ This script allows generation of libraries that are composed of more than one service version. It is achieved by calling `generate_library.sh` without @@ -12,130 +27,146 @@ - A "grafeas" folder found in the googleapis/googleapis repository Note: googleapis repo is found in https://github.com/googleapis/googleapis. """ - -import click -import utilities as util import os -import sys -import subprocess -import json -from model.GenerationConfig import GenerationConfig -from model.LibraryConfig import LibraryConfig -from model.ClientInputs import parse as parse_build_file +from pathlib import Path +from typing import List +import library_generation.utilities as util +from library_generation.model.generation_config import GenerationConfig +from library_generation.model.gapic_config import GapicConfig +from library_generation.model.gapic_inputs import GapicInputs +from library_generation.model.library_config import LibraryConfig +from library_generation.model.gapic_inputs import parse as parse_build_file script_dir = os.path.dirname(os.path.realpath(__file__)) -""" -Main function in charge of generating libraries composed of more than one -service or service version. -Arguments - - config: a GenerationConfig object representing a parsed configuration - yaml - - library: a LibraryConfig object contained inside config, passed here for - convenience and to prevent all libraries to be processed - - enable_postprocessing: true if postprocessing should be done on the generated - libraries - - repository_path: path to the repository where the generated files will be - sent. If not specified, it will default to the one defined in the configuration yaml - and will be downloaded. The versions file will be inferred from this folder -""" + def generate_composed_library( config: GenerationConfig, + library_path: str, library: LibraryConfig, - repository_path: str, - enable_postprocessing: bool = True, + output_folder: str, + versions_file: str, ) -> None: - output_folder = util.sh_util('get_output_folder') - - print(f'output_folder: {output_folder}') - print('library: ', library) - os.makedirs(output_folder, exist_ok=True) - - googleapis_commitish = config.googleapis_commitish - if library.googleapis_commitish is not None: - googleapis_commitish = library.googleapis_commitish - print('using library-specific googleapis commitish: ' + googleapis_commitish) - else: - print('using common googleapis_commitish') - - print('removing old googleapis folders and files') - util.delete_if_exists(f'{output_folder}/google') - util.delete_if_exists(f'{output_folder}/grafeas') - - print('downloading googleapis') - util.sh_util(f'download_googleapis_files_and_folders "{output_folder}" "{googleapis_commitish}"') - - is_monorepo = len(config.libraries) > 1 + """ + Generate libraries composed of more than one service or service version + :param config: a GenerationConfig object representing a parsed configuration + yaml + :param library_path: the path to which the generated file goes + :param library: a LibraryConfig object contained inside config, passed here + for convenience and to prevent all libraries to be processed + :param output_folder: + :param versions_file: + :return None + """ + util.pull_api_definition( + config=config, library=library, output_folder=output_folder + ) + + is_monorepo = util.check_monorepo(config=config) + base_arguments = __construct_tooling_arg(config=config) + owlbot_cli_source_folder = util.sh_util("mktemp -d") + os.makedirs(f"{library_path}", exist_ok=True) + for gapic in library.gapic_configs: + build_file_folder = Path(f"{output_folder}/{gapic.proto_path}").resolve() + print(f"build_file_folder: {build_file_folder}") + gapic_inputs = parse_build_file(build_file_folder, gapic.proto_path) + # generate prerequisite files (.repo-metadata.json, .OwlBot.yaml, + # owlbot.py) here because transport is parsed from BUILD.bazel, + # which lives in a versioned proto_path. + util.generate_prerequisite_files( + library=library, + proto_path=util.remove_version_from(gapic.proto_path), + transport=gapic_inputs.transport, + library_path=library_path, + ) + service_version = gapic.proto_path.split("/")[-1] + temp_destination_path = f"java-{library.api_shortname}-{service_version}" + effective_arguments = __construct_effective_arg( + base_arguments=base_arguments, + gapic=gapic, + gapic_inputs=gapic_inputs, + temp_destination_path=temp_destination_path, + ) + print("arguments: ") + print(effective_arguments) + print(f"Generating library from {gapic.proto_path} to {library_path}") + util.run_process_and_print_output( + ["bash", f"{script_dir}/generate_library.sh", *effective_arguments], + "Library generation", + ) + + util.sh_util( + f'build_owlbot_cli_source_folder "{library_path}"' + + f' "{owlbot_cli_source_folder}" "{output_folder}/{temp_destination_path}"' + + f' "{gapic.proto_path}"', + cwd=output_folder, + ) - base_arguments = [] - base_arguments += util.create_argument('gapic_generator_version', config) - base_arguments += util.create_argument('grpc_version', config) - base_arguments += util.create_argument('protobuf_version', config) - - library_name = f'java-{library.api_shortname}' - library_path = None - - versions_file = '' - if is_monorepo: - print('this is a monorepo library') - destination_path = config.destination_path + '/' + library_name - library_folder = destination_path.split('/')[-1] - if repository_path is None: - print(f'sparse_cloning monorepo with {library_name}') - repository_path = f'{output_folder}/{config.destination_path}' - clone_out = util.sh_util(f'sparse_clone "https://github.com/googleapis/{MONOREPO_NAME}.git" "{library_folder} google-cloud-pom-parent google-cloud-jar-parent versions.txt .github"', cwd=output_folder) - print(clone_out) - library_path = f'{repository_path}/{library_name}' - versions_file = f'{repository_path}/versions.txt' - else: - print('this is a HW library') - destination_path = library_name - if repository_path is None: - repository_path = f'{output_folder}/{destination_path}' - util.delete_if_exists(f'{output_folder}/{destination_path}') - clone_out = util.sh_util(f'git clone "https://github.com/googleapis/{destination_path}.git"', cwd=output_folder) - print(clone_out) - library_path = f'{repository_path}' - versions_file = f'{repository_path}/versions.txt' - - owlbot_cli_source_folder = util.sh_util('mktemp -d') - for gapic in library.gapic_configs: - - effective_arguments = list(base_arguments) - effective_arguments += util.create_argument('proto_path', gapic) - - build_file_folder = f'{output_folder}/{gapic.proto_path}' - print(f'build_file_folder: {build_file_folder}') - client_inputs = parse_build_file(build_file_folder, gapic.proto_path) - effective_arguments += [ - '--proto_only', client_inputs.proto_only, - '--gapic_additional_protos', client_inputs.additional_protos, - '--transport', client_inputs.transport, - '--rest_numeric_enums', client_inputs.rest_numeric_enum, - '--gapic_yaml', client_inputs.gapic_yaml, - '--service_config', client_inputs.service_config, - '--service_yaml', client_inputs.service_yaml, - '--include_samples', client_inputs.include_samples, - ] - service_version = gapic.proto_path.split('/')[-1] - temp_destination_path = f'java-{library.api_shortname}-{service_version}' - effective_arguments += [ '--destination_path', temp_destination_path ] - print('arguments: ') - print(effective_arguments) - print(f'Generating library from {gapic.proto_path} to {destination_path}...') - util.run_process_and_print_output(['bash', '-x', f'{script_dir}/generate_library.sh', - *effective_arguments], 'Library generation') - - - if enable_postprocessing: - util.sh_util(f'build_owlbot_cli_source_folder "{library_path}"' - + f' "{owlbot_cli_source_folder}" "{output_folder}/{temp_destination_path}"' - + f' "{gapic.proto_path}"', - cwd=output_folder) - - if enable_postprocessing: # call postprocess library - util.run_process_and_print_output([f'{script_dir}/postprocess_library.sh', - f'{library_path}', '', versions_file, owlbot_cli_source_folder, - config.owlbot_cli_image, config.synthtool_commitish, str(is_monorepo).lower()], 'Library postprocessing') + util.run_process_and_print_output( + [ + f"{script_dir}/postprocess_library.sh", + f"{library_path}", + "", + versions_file, + owlbot_cli_source_folder, + config.owlbot_cli_image, + config.synthtool_commitish, + str(is_monorepo).lower(), + ], + "Library postprocessing", + ) + + +def __construct_tooling_arg(config: GenerationConfig) -> List[str]: + """ + Construct arguments of tooling versions used in generate_library.sh + :param config: the generation config + :return: arguments containing tooling versions + """ + arguments = [] + arguments += util.create_argument("gapic_generator_version", config) + arguments += util.create_argument("grpc_version", config) + arguments += util.create_argument("protobuf_version", config) + + return arguments + + +def __construct_effective_arg( + base_arguments: List[str], + gapic: GapicConfig, + gapic_inputs: GapicInputs, + temp_destination_path: str, +) -> List[str]: + """ + Construct arguments consist attributes of a GAPIC library which used in + generate_library.sh + :param base_arguments: arguments consist of tooling versions + :param gapic: an object of GapicConfig + :param gapic_inputs: an object of GapicInput + :param temp_destination_path: the path to which the generated library goes + :return: arguments containing attributes to generate a GAPIC library + """ + arguments = list(base_arguments) + arguments += util.create_argument("proto_path", gapic) + arguments += [ + "--proto_only", + gapic_inputs.proto_only, + "--gapic_additional_protos", + gapic_inputs.additional_protos, + "--transport", + gapic_inputs.transport, + "--rest_numeric_enums", + gapic_inputs.rest_numeric_enum, + "--gapic_yaml", + gapic_inputs.gapic_yaml, + "--service_config", + gapic_inputs.service_config, + "--service_yaml", + gapic_inputs.service_yaml, + "--include_samples", + gapic_inputs.include_samples, + ] + arguments += ["--destination_path", temp_destination_path] + return arguments diff --git a/library_generation/generate_repo.py b/library_generation/generate_repo.py new file mode 100644 index 0000000000..85c10c029f --- /dev/null +++ b/library_generation/generate_repo.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click +import library_generation.utilities as util +from library_generation.generate_composed_library import generate_composed_library +from library_generation.model.generation_config import from_yaml + + +@click.group(invoke_without_command=False) +@click.pass_context +@click.version_option(message="%(version)s") +def main(ctx): + pass + + +@main.command() +@click.option( + "--generation-config-yaml", + required=True, + type=str, + help=""" + Path to generation_config.yaml that contains the metadata about + library generation + """, +) +@click.option( + "--target-library-api-shortname", + required=False, + type=str, + help=""" + If specified, only the `library` whose api_shortname equals to + target-library-api-shortname will be generated. + If not specified, all libraries in the configuration yaml will be generated. + """, +) +@click.option( + "--repository-path", + required=False, + default=".", + type=str, + help=""" + If specified, the generated files will be sent to this location. + If not specified, the repository will be generated to the current working + directory. + """, +) +def generate( + generation_config_yaml: str, + target_library_api_shortname: str, + repository_path: str, +): + generate_from_yaml( + generation_config_yaml=generation_config_yaml, + repository_path=repository_path, + target_library_api_shortname=target_library_api_shortname, + ) + + +def generate_from_yaml( + generation_config_yaml: str, + repository_path: str, + target_library_api_shortname: str = None, +) -> None: + """ + Parses a config yaml and generates libraries via + generate_composed_library.py + """ + config = from_yaml(generation_config_yaml) + target_libraries = config.libraries + if target_library_api_shortname is not None: + target_libraries = [ + library + for library in config.libraries + if library.api_shortname == target_library_api_shortname + ] + + repo_config = util.prepare_repo( + gen_config=config, library_config=target_libraries, repo_path=repository_path + ) + + for library_path, library in repo_config.libraries.items(): + print(f"generating library {library.api_shortname}") + + generate_composed_library( + config=config, + library_path=library_path, + library=library, + output_folder=repo_config.output_folder, + versions_file=repo_config.versions_file, + ) + + util.repo_level_post_process( + repository_path=repository_path, versions_file=repo_config.versions_file + ) + + +if __name__ == "__main__": + main() diff --git a/library_generation/main.py b/library_generation/main.py deleted file mode 100644 index 282e0283fd..0000000000 --- a/library_generation/main.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Parses a config yaml and generates libraries via generate_composed_library.py -""" - -import click -from generate_composed_library import generate_composed_library -from typing import Dict -from model.GenerationConfig import GenerationConfig -from collections.abc import Sequence -from absl import app - -@click.group(invoke_without_command=False) -@click.pass_context -@click.version_option(message="%(version)s") -def main(ctx): - pass - -@main.command() -@click.option( - "--generation-config-yaml", - required=True, - type=str, - help=""" - Path to generation_config.yaml that contains the metadata about library generation - """ -) -@click.option( - "--enable-postprocessing", - required=False, - default=True, - type=bool, - help=""" - Path to repository where generated files will be merged into, via owlbot copy-code. - Specifying this option enables postprocessing - """ -) -@click.option( - "--target-library-api-shortname", - required=False, - type=str, - help=""" - If specified, only the `library` with api_shortname = target-library-api-shortname will - be generated. If not specified, all libraries in the configuration yaml will be generated - """ -) -@click.option( - "--repository-path", - required=False, - type=str, - help=""" - If specified, the generated files will be sent to this location. If not specified, the - repository will be pulled into output_folder and move the generated files there - """ -) -def generate_from_yaml( - generation_config_yaml: str, - enable_postprocessing: bool, - target_library_api_shortname: str, - repository_path: str -) -> None: - config = GenerationConfig.from_yaml(generation_config_yaml) - target_libraries = config.libraries - if target_library_api_shortname is not None: - target_libraries = [library for library in config.libraries - if library.api_shortname == target_library_api_shortname] - for library in target_libraries: - print(f'generating library {library.api_shortname}') - generate_composed_library( - config, library, repository_path, enable_postprocessing - ) - - - - - -if __name__ == "__main__": - main() diff --git a/library_generation/model/GapicConfig.py b/library_generation/model/GapicConfig.py deleted file mode 100644 index be99b0a35f..0000000000 --- a/library_generation/model/GapicConfig.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -Class that represents a GAPICs single entry, inside a `LibraryConfig` in a generation_config.yaml -""" -class GapicConfig: - def __init__( - self, - proto_path: str, - ): - self.proto_path = proto_path diff --git a/library_generation/model/GenerationConfig.py b/library_generation/model/GenerationConfig.py deleted file mode 100644 index 77273b10eb..0000000000 --- a/library_generation/model/GenerationConfig.py +++ /dev/null @@ -1,91 +0,0 @@ -""" -Class that represents the root of a generation_config.yaml -""" -import yaml -from typing import List, Optional, Dict -from .LibraryConfig import LibraryConfig -from .GapicConfig import GapicConfig - - -class GenerationConfig: - def __init__( - self, - gapic_generator_version: str, - grpc_version: Optional[str], - protobuf_version: Optional[str], - googleapis_commitish: str, - owlbot_cli_image: str, - synthtool_commitish: str, - destination_path: Optional[str], - libraries: List[LibraryConfig], - ): - self.gapic_generator_version = gapic_generator_version - self.grpc_version = grpc_version - self.protobuf_version = protobuf_version - self.googleapis_commitish = googleapis_commitish - self.owlbot_cli_image = owlbot_cli_image - self.synthtool_commitish = synthtool_commitish - self.destination_path = destination_path - self.libraries = libraries - - """ - Parses a yaml located in path_to_yaml. Returns the parsed configuration represented - by the "model" classes - """ - @staticmethod - def from_yaml(path_to_yaml: str): - config = None - with open(path_to_yaml, 'r') as file_stream: - config = yaml.load(file_stream, yaml.Loader) - - libraries = _required(config, 'libraries') - - parsed_libraries = list() - for library in libraries: - gapics = _required(library, 'GAPICs') - - parsed_gapics = list() - for gapic in gapics: - proto_path = _required(gapic, 'proto_path') - new_gapic = GapicConfig(proto_path) - parsed_gapics.append(new_gapic) - - new_library = LibraryConfig( - _required(library, 'api_shortname'), - _optional(library, 'name_pretty', None), - _required(library, 'library_type'), - _optional(library, 'artifact_id', None), - _optional(library, 'api_description', None), - _optional(library, 'product_documentation', None), - _optional(library, 'client_documentation', None), - _optional(library, 'rest_documentation', None), - _optional(library, 'rpc_documentation', None), - parsed_gapics, - _optional(library, 'googleapis_commitish', None), - _optional(library, 'group_id', 'com.google.cloud'), - _optional(library, 'requires_billing', None), - ) - parsed_libraries.append(new_library) - - parsed_config = GenerationConfig( - _required(config, 'gapic_generator_version'), - _optional(config, 'grpc_version', None), - _optional(config, 'protobuf_version', None), - _required(config, 'googleapis_commitish'), - _required(config, 'owlbot_cli_image'), - _required(config, 'synthtool_commitish'), - _optional(config, 'destination_path', None), - parsed_libraries - ) - - return parsed_config - -def _required(config: Dict, key: str): - if key not in config: - raise ValueError(f'required key {key} not found in yaml') - return config[key] - -def _optional(config: Dict, key: str, default: any): - if key not in config: - return default - return config[key] diff --git a/library_generation/model/Library.py b/library_generation/model/Library.py deleted file mode 100644 index e1449443ba..0000000000 --- a/library_generation/model/Library.py +++ /dev/null @@ -1,46 +0,0 @@ -""" -Class that represents a library in a generation_config.yaml file -""" -from typing import Dict, List, Optional -from enum import Enum -from .GapicConfig import GapicConfig - -""" -Two possible library types: - - GAPIC_AUTO: pure generated library - - GAPIC_COMBO: generated library with a handwritten layer -""" -class _LibraryType(Enum): - GAPIC_AUTO = 1 - GAPIC_COMBO = 2 - -class LibraryConfig: - def __init__( - self, - api_shortname: str, - name_pretty: Optional[str], - library_type: _LibraryType, - artifact_id: Optional[str], - api_description: Optional[str], - product_documentation: Optional[str], - client_documentation: Optional[str], - rest_documentation: Optional[str], - rpc_documentation: Optional[str], - gapicConfigs: List[GapicConfig], - googleapis_commitish: Optional[str], - group_id: Optional[str] = 'com.google.cloud', - requires_billing: Optional[bool] = True, - ): - self.api_shortname = api_shortname - self.name_pretty = name_pretty - self.library_type = library_type - self.artifact_id = artifact_id - self.requires_billing = requires_billing - self.api_description = api_description - self.product_documentation = product_documentation - self.client_documentation = client_documentation - self.rest_documentation = rest_documentation - self.rpc_documentation = rpc_documentation - self.group_id = group_id - self.gapicConfigs = gapicConfigs - self.googleapis_commitish = googleapis_commitish diff --git a/library_generation/model/LibraryConfig.py b/library_generation/model/LibraryConfig.py deleted file mode 100644 index a0d09351ed..0000000000 --- a/library_generation/model/LibraryConfig.py +++ /dev/null @@ -1,46 +0,0 @@ -""" -Class that represents a library in a generation_config.yaml file -""" -from typing import Dict, List, Optional -from enum import Enum -from .GapicConfig import GapicConfig - -""" -Two possible library types: - - GAPIC_AUTO: pure generated library - - GAPIC_COMBO: generated library with a handwritten layer -""" -class _LibraryType(Enum): - GAPIC_AUTO = 1 - GAPIC_COMBO = 2 - -class LibraryConfig: - def __init__( - self, - api_shortname: str, - name_pretty: Optional[str], - library_type: _LibraryType, - artifact_id: Optional[str], - api_description: Optional[str], - product_documentation: Optional[str], - client_documentation: Optional[str], - rest_documentation: Optional[str], - rpc_documentation: Optional[str], - gapic_configs: List[GapicConfig], - googleapis_commitish: Optional[str], - group_id: Optional[str] = 'com.google.cloud', - requires_billing: Optional[bool] = True, - ): - self.api_shortname = api_shortname - self.name_pretty = name_pretty - self.library_type = library_type - self.artifact_id = artifact_id - self.requires_billing = requires_billing - self.api_description = api_description - self.product_documentation = product_documentation - self.client_documentation = client_documentation - self.rest_documentation = rest_documentation - self.rpc_documentation = rpc_documentation - self.group_id = group_id - self.gapic_configs = gapic_configs - self.googleapis_commitish = googleapis_commitish diff --git a/library_generation/model/bom_config.py b/library_generation/model/bom_config.py new file mode 100644 index 0000000000..b562407eb7 --- /dev/null +++ b/library_generation/model/bom_config.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class BomConfig: + """ + Class that represents an entry in dependencyManagement section. + """ + + def __init__( + self, + group_id: str, + artifact_id: str, + version: str, + version_annotation: str, + is_import: bool = True, + ): + self.group_id = group_id + self.artifact_id = artifact_id + self.version = version + self.version_annotation = version_annotation + self.is_import = is_import + + def __lt__(self, another): + return self.group_id < another.group_id or ( + self.group_id == another.group_id and self.artifact_id < another.artifact_id + ) + + def __eq__(self, another): + return ( + self.group_id == another.group_id + and self.artifact_id == another.artifact_id + ) diff --git a/library_generation/model/gapic_config.py b/library_generation/model/gapic_config.py new file mode 100644 index 0000000000..bec1645823 --- /dev/null +++ b/library_generation/model/gapic_config.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class GapicConfig: + """ + Class that represents a GAPICs single entry, inside a `LibraryConfig` in + a generation_config.yaml + """ + + def __init__(self, proto_path: str): + self.proto_path = proto_path diff --git a/library_generation/model/ClientInputs.py b/library_generation/model/gapic_inputs.py similarity index 78% rename from library_generation/model/ClientInputs.py rename to library_generation/model/gapic_inputs.py index 38acdb316f..b6500a6f3d 100644 --- a/library_generation/model/ClientInputs.py +++ b/library_generation/model/gapic_inputs.py @@ -42,11 +42,12 @@ include_samples_pattern = r"include_samples = True" -class ClientInput: +class GapicInputs: """ A data class containing inputs to invoke generate_library.sh to generate a GAPIC library. """ + def __init__( self, proto_only="true", @@ -69,16 +70,15 @@ def __init__( def parse( - build_path: Path, - versioned_path: str, - build_file_name: str = 'BUILD.bazel' -) -> ClientInput: + build_path: Path, versioned_path: str, build_file_name: str = "BUILD.bazel" +) -> GapicInputs: """ Utility function to parse inputs of generate_library.sh from BUILD.bazel. :param build_path: the file path of BUILD.bazel :param versioned_path: a versioned path in googleapis repository, e.g., google/cloud/asset/v1. - :return: an ClientInput object. + :param build_file_name: the name of the build file. + :return: an GapicInputs object. """ with open(f"{build_path}/{build_file_name}") as build: content = build.read() @@ -86,20 +86,18 @@ def parse( proto_library_target = re.compile( proto_library_pattern, re.DOTALL | re.VERBOSE ).findall(content) - additional_protos = '' + additional_protos = "" if len(proto_library_target) > 0: - additional_protos = __parse_additional_protos(proto_library_target[0]) - gapic_target = re.compile(gapic_pattern, re.DOTALL | re.VERBOSE)\ - .findall(content) - assembly_target = re.compile(assembly_pattern, re.DOTALL | re.VERBOSE)\ - .findall(content) - include_samples = 'false' + additional_protos = __parse_additional_protos(proto_library_target[0]) + gapic_target = re.compile(gapic_pattern, re.DOTALL | re.VERBOSE).findall(content) + assembly_target = re.compile(assembly_pattern, re.DOTALL | re.VERBOSE).findall( + content + ) + include_samples = "false" if len(assembly_target) > 0: - include_samples = __parse_include_samples(assembly_target[0]) + include_samples = __parse_include_samples(assembly_target[0]) if len(gapic_target) == 0: - return ClientInput( - include_samples=include_samples - ) + return GapicInputs(include_samples=include_samples) transport = __parse_transport(gapic_target[0]) rest_numeric_enum = __parse_rest_numeric_enums(gapic_target[0]) @@ -107,15 +105,15 @@ def parse( service_config = __parse_service_config(gapic_target[0], versioned_path) service_yaml = __parse_service_yaml(gapic_target[0], versioned_path) - return ClientInput( - proto_only="false", - additional_protos=additional_protos, - transport=transport, - rest_numeric_enum=rest_numeric_enum, - gapic_yaml=gapic_yaml, - service_config=service_config, - service_yaml=service_yaml, - include_samples=include_samples, + return GapicInputs( + proto_only="false", + additional_protos=additional_protos, + transport=transport, + rest_numeric_enum=rest_numeric_enum, + gapic_yaml=gapic_yaml, + service_config=service_config, + service_yaml=service_yaml, + include_samples=include_samples, ) @@ -147,14 +145,23 @@ def __parse_gapic_yaml(gapic_target: str, versioned_path: str) -> str: def __parse_service_config(gapic_target: str, versioned_path: str) -> str: service_config = re.findall(service_config_pattern, gapic_target) - return f"{versioned_path}/{service_config[0]}".replace(':','') if len(service_config) != 0 \ + return ( + f"{versioned_path}/{service_config[0]}".replace(":", "") + if len(service_config) != 0 else "" + ) def __parse_service_yaml(gapic_target: str, versioned_path: str) -> str: service_yaml = re.findall(service_yaml_pattern, gapic_target) - return f"{versioned_path}/{service_yaml[0]}" if len(service_yaml) != 0 \ - else "" + if len(service_yaml) == 0: + return "" + res = str(service_yaml[0]) + if res.startswith("//"): + # special case if the service config starts with "//", is a Bazel + # target with an absolute path. + return res.replace("//", "").replace(":", "/") + return f"{versioned_path}/{res}" def __parse_include_samples(assembly_target: str) -> str: diff --git a/library_generation/model/generation_config.py b/library_generation/model/generation_config.py new file mode 100644 index 0000000000..1c3c62d1fb --- /dev/null +++ b/library_generation/model/generation_config.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python3 +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import yaml +from typing import List, Optional, Dict +from library_generation.model.library_config import LibraryConfig +from library_generation.model.gapic_config import GapicConfig + + +class GenerationConfig: + """ + Class that represents the root of a generation_config.yaml + """ + + def __init__( + self, + gapic_generator_version: str, + googleapis_commitish: str, + owlbot_cli_image: str, + synthtool_commitish: str, + libraries: List[LibraryConfig], + grpc_version: Optional[str] = None, + protobuf_version: Optional[str] = None, + ): + self.gapic_generator_version = gapic_generator_version + self.googleapis_commitish = googleapis_commitish + self.owlbot_cli_image = owlbot_cli_image + self.synthtool_commitish = synthtool_commitish + self.libraries = libraries + self.grpc_version = grpc_version + self.protobuf_version = protobuf_version + + +def from_yaml(path_to_yaml: str): + """ + Parses a yaml located in path_to_yaml. Returns the parsed configuration + represented by the "model" classes + """ + with open(path_to_yaml, "r") as file_stream: + config = yaml.safe_load(file_stream) + + libraries = __required(config, "libraries") + + parsed_libraries = list() + for library in libraries: + gapics = __required(library, "GAPICs") + + parsed_gapics = list() + for gapic in gapics: + proto_path = __required(gapic, "proto_path") + new_gapic = GapicConfig(proto_path) + parsed_gapics.append(new_gapic) + + new_library = LibraryConfig( + api_shortname=__required(library, "api_shortname"), + api_description=__required(library, "api_description"), + name_pretty=__required(library, "name_pretty"), + product_documentation=__required(library, "product_documentation"), + gapic_configs=parsed_gapics, + library_type=__optional(library, "library_type", "GAPIC_AUTO"), + release_level=__optional(library, "release_level", "preview"), + api_id=__optional(library, "api_id", None), + api_reference=__optional(library, "api_reference", None), + client_documentation=__optional(library, "client_documentation", None), + distribution_name=__optional(library, "distribution_name", None), + googleapis_commitish=__optional(library, "googleapis_commitish", None), + group_id=__optional(library, "group_id", "com.google.cloud"), + issue_tracker=__optional(library, "issue_tracker", None), + library_name=__optional(library, "library_name", None), + rest_documentation=__optional(library, "rest_documentation", None), + rpc_documentation=__optional(library, "rpc_documentation", None), + cloud_api=__optional(library, "cloud_api", True), + requires_billing=__optional(library, "requires_billing", True), + ) + parsed_libraries.append(new_library) + + parsed_config = GenerationConfig( + gapic_generator_version=__required(config, "gapic_generator_version"), + grpc_version=__optional(config, "grpc_version", None), + protobuf_version=__optional(config, "protobuf_version", None), + googleapis_commitish=__required(config, "googleapis_commitish"), + owlbot_cli_image=__required(config, "owlbot_cli_image"), + synthtool_commitish=__required(config, "synthtool_commitish"), + libraries=parsed_libraries, + ) + + return parsed_config + + +def __required(config: Dict, key: str): + if key not in config: + raise ValueError(f"required key {key} not found in yaml") + return config[key] + + +def __optional(config: Dict, key: str, default: any): + if key not in config: + return default + return config[key] diff --git a/library_generation/model/library_config.py b/library_generation/model/library_config.py new file mode 100644 index 0000000000..9e3f9c9c61 --- /dev/null +++ b/library_generation/model/library_config.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List, Optional +from library_generation.model.gapic_config import GapicConfig + + +class LibraryConfig: + """ + Class that represents a library in a generation_config.yaml file + """ + + def __init__( + self, + api_shortname: str, + api_description: str, + name_pretty: str, + product_documentation: str, + gapic_configs: List[GapicConfig], + library_type: Optional[str] = None, + release_level: Optional[str] = None, + api_id: Optional[str] = None, + api_reference: Optional[str] = None, + client_documentation: Optional[str] = None, + distribution_name: Optional[str] = None, + googleapis_commitish: Optional[str] = None, + group_id: Optional[str] = "com.google.cloud", + issue_tracker: Optional[str] = None, + library_name: Optional[str] = None, + rest_documentation: Optional[str] = None, + rpc_documentation: Optional[str] = None, + cloud_api: Optional[bool] = True, + requires_billing: Optional[bool] = True, + ): + self.api_shortname = api_shortname + self.api_description = api_description + self.name_pretty = name_pretty + self.product_documentation = product_documentation + self.gapic_configs = gapic_configs + self.library_type = library_type if library_type else "GAPIC_AUTO" + self.release_level = release_level if release_level else "preview" + self.api_id = api_id + self.api_reference = api_reference + self.client_documentation = client_documentation + self.distribution_name = distribution_name + self.googleapis_commitish = googleapis_commitish + self.group_id = group_id + self.issue_tracker = issue_tracker + self.library_name = library_name + self.rest_documentation = rest_documentation + self.rpc_documentation = rpc_documentation + self.cloud_api = cloud_api + self.requires_billing = requires_billing diff --git a/library_generation/model/repo_config.py b/library_generation/model/repo_config.py new file mode 100644 index 0000000000..7f42720fe3 --- /dev/null +++ b/library_generation/model/repo_config.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Dict +from library_generation.model.library_config import LibraryConfig + + +class RepoConfig: + """ + Class that represents a generated repository + """ + + def __init__( + self, + output_folder: str, + libraries: Dict[str, LibraryConfig], + versions_file: str, + ): + """ + Init a RepoConfig object + :param output_folder: the path to which the generated repo goes + :param libraries: a mapping from library_path to LibraryConfig object + :param versions_file: the path of versions.txt used in post-processing + """ + self.output_folder = output_folder + self.libraries = libraries + self.versions_file = versions_file diff --git a/library_generation/new_client/get_generator_version_from_workspace.sh b/library_generation/new_client/get_generator_version_from_workspace.sh deleted file mode 100755 index 0d8cac1f25..0000000000 --- a/library_generation/new_client/get_generator_version_from_workspace.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/usr/bin/env bash -curl --silent 'https://raw.githubusercontent.com/googleapis/googleapis/master/WORKSPACE' | perl -nle 'print $1 if m/_gapic_generator_java_version\s+=\s+\"(.+)\"/' \ No newline at end of file diff --git a/library_generation/new_client/new-client.py b/library_generation/new_client/new-client.py deleted file mode 100644 index 26d0afb7f3..0000000000 --- a/library_generation/new_client/new-client.py +++ /dev/null @@ -1,424 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import os -from pathlib import Path -import re -import subprocess -import sys -import click -import templates -from git import Repo -import shutil -current_dir = os.path.dirname(os.path.realpath(__file__)) -parent_dir = os.path.dirname(current_dir) -sys.path.append(parent_dir) -from model.ClientInputs import parse - - -@click.group(invoke_without_command=False) -@click.pass_context -@click.version_option(message="%(version)s") -def main(ctx): - pass - - -@main.command() -@click.option( - "--api_shortname", - required=True, - type=str, - prompt="Service name? (e.g. automl)", - help="Name for the new directory name and (default) artifact name" -) -@click.option( - "--name-pretty", - required=True, - type=str, - prompt="Pretty name? (e.g. 'Cloud AutoML')", - help="The human-friendly name that appears in README.md" -) -@click.option( - "--product-docs", - required=True, - type=str, - prompt="Product Documentation URL", - help="Documentation URL that appears in README.md" -) -@click.option( - "--api-description", - required=True, - type=str, - prompt="Description for README. The first sentence is prefixed by the " - "pretty name", - help="Description that appears in README.md" -) -@click.option( - "--release-level", - type=click.Choice(["stable", "preview"]), - default="preview", - show_default=True, - help="A label that appears in repo-metadata.json. The first library " - "generation is always 'preview'." -) -@click.option( - "--transport", - type=click.Choice(["grpc", "http", "both"]), - default="grpc", - show_default=True, - help="A label that appears in repo-metadata.json" -) -@click.option("--language", type=str, default="java", show_default=True) -@click.option( - "--distribution-name", - type=str, - help="Maven coordinates of the generated library. By default it's " - "com.google.cloud:google-cloud-" -) -@click.option( - "--api-id", - type=str, - help="The value of the apiid parameter used in README.md It has link to " - "https://console.cloud.google.com/flows/enableapi?apiid=" -) -@click.option( - "--requires-billing", - type=bool, - default=True, - show_default=True, - help="Based on this value, README.md explains whether billing setup is " - "needed or not." -) -@click.option( - "--destination-name", - type=str, - default=None, - help="The directory name of the new library. By default it's " - "java-" -) -@click.option( - "--proto-path", - required=True, - type=str, - default=None, - help="Path to proto file from the root of the googleapis repository to the" - "directory that contains the proto files (without the version)." - "For example, to generate the library for 'google/maps/routing/v2', " - "then you specify this value as 'google/maps/routing'" -) -@click.option( - "--cloud-api", - type=bool, - default=True, - show_default=True, - help="If true, the artifact ID of the library is 'google-cloud-'; " - "otherwise 'google-'" -) -@click.option( - "--group-id", - type=str, - default="com.google.cloud", - show_default=True, - help="The group ID of the artifact when distribution name is not set" -) -@click.option( - "--library-type", - type=str, - default="GAPIC_AUTO", - show_default=True, - help="A label that appear in repo-metadata.json to tell how the library is " - "maintained or generated" -) -@click.option( - "--googleapis-url", - type=str, - default="https://github.com/googleapis/googleapis.git", - show_default=True, - help="The URL of the repository that has proto service definition" -) -@click.option( - "--rest-docs", - type=str, - help="If it exists, link to the REST Documentation for a service" -) -@click.option( - "--rpc-docs", - type=str, - help="If it exists, link to the RPC Documentation for a service" -) -@click.option( - "--split-repo", - type=bool, - default=False, - help="Whether generating a library into a split repository" -) -def generate( - api_shortname, - name_pretty, - product_docs, - api_description, - release_level, - distribution_name, - api_id, - requires_billing, - transport, - language, - destination_name, - proto_path, - cloud_api, - group_id, - library_type, - googleapis_url, - rest_docs, - rpc_docs, - split_repo, -): - cloud_prefix = "cloud-" if cloud_api else "" - - output_name = destination_name if destination_name else api_shortname - if distribution_name is None: - distribution_name = f"{group_id}:google-{cloud_prefix}{output_name}" - - distribution_name_short = re.split(r"[:\/]", distribution_name)[-1] - - if api_id is None: - api_id = f"{api_shortname}.googleapis.com" - - if not product_docs.startswith("https"): - sys.exit("product_docs must starts with 'https://'") - - client_documentation = ( - f"https://cloud.google.com/{language}/docs/reference/{distribution_name_short}/latest/overview" - ) - - if api_shortname == "": - sys.exit("api_shortname is empty") - - repo = "googleapis/google-cloud-java" - if split_repo: - repo = f"{language}-{output_name}" - - repo_metadata = { - "api_shortname": api_shortname, - "name_pretty": name_pretty, - "product_documentation": product_docs, - "api_description": api_description, - "client_documentation": client_documentation, - "release_level": release_level, - "transport": transport, - "language": language, - "repo": f"{repo}", - "repo_short": f"{language}-{output_name}", - "distribution_name": distribution_name, - "api_id": api_id, - "library_type": library_type, - } - if requires_billing: - repo_metadata["requires_billing"] = True - - if rest_docs: - repo_metadata["rest_documentation"] = rest_docs - - if rpc_docs: - repo_metadata["rpc_documentation"] = rpc_docs - # Initialize workdir - workdir = Path(f"{sys.path[0]}/../../output/java-{output_name}").resolve() - if os.path.isdir(workdir): - sys.exit( - "Couldn't create the module because " - f"the module {workdir} already exists. In Java client library " - "generation, a new API version of an existing module does not " - "require new-client.py invocation. " - "See go/yoshi-java-new-client#adding-a-new-service-version-by-owlbot." - ) - print(f"Creating a new module {workdir}") - os.makedirs(workdir, exist_ok=False) - # write .repo-metadata.json file - with open(workdir / ".repo-metadata.json", "w") as fp: - json.dump(repo_metadata, fp, indent=2) - - template_excludes = [ - ".github/*", - ".kokoro/*", - "samples/*", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.md", - "LICENSE", - "SECURITY.md", - "java.header", - "license-checks.xml", - "renovate.json", - ".gitignore" - ] - # create owlbot.py - templates.render( - template_name="owlbot.py.j2", - output_name=str(workdir / "owlbot.py"), - should_include_templates=True, - template_excludes=template_excludes, - ) - - # In monorepo, .OwlBot.yaml needs to be in the directory of the module. - owlbot_yaml_location_from_module = ".OwlBot.yaml" - # create owlbot config - templates.render( - template_name="owlbot.yaml.monorepo.j2", - output_name=str(workdir / owlbot_yaml_location_from_module), - artifact_name=distribution_name_short, - proto_path=proto_path, - module_name=f"java-{output_name}", - api_shortname=api_shortname - ) - - print(f"Pulling proto from {googleapis_url}") - output_dir = Path(f"{sys.path[0]}/../../output").resolve() - __sparse_clone( - remote_url=googleapis_url, - dest=output_dir, - ) - # Find a versioned directory within proto_path - # We only need to generate one version of the library as OwlBot - # will copy other versions from googleapis-gen. - version = __find_version( - Path(f"{sys.path[0]}/../../output/{proto_path}").resolve() - ) - versioned_proto_path = f"{proto_path}/{version}" - print(f"Generating from {versioned_proto_path}") - # parse BUILD.bazel in proto_path - client_input = parse( - build_path=Path(f"{sys.path[0]}/../../output/{versioned_proto_path}") - .resolve(), - versioned_path=versioned_proto_path, - ) - repo_root_dir = Path(f"{sys.path[0]}/../../").resolve() - generator_version = subprocess.check_output( - ["library_generation/new_client/get_generator_version_from_workspace.sh"], - cwd=repo_root_dir - ).strip() - print(f"Generator version: {generator_version}") - # run generate_library.sh - subprocess.check_call([ - "library_generation/generate_library.sh", - "-p", - versioned_proto_path, - "-d", - f"java-{output_name}", - "--gapic_generator_version", - generator_version, - "--protobuf_version", - "23.2", - "--proto_only", - client_input.proto_only, - "--gapic_additional_protos", - client_input.additional_protos, - "--transport", - client_input.transport, - "--rest_numeric_enums", - client_input.rest_numeric_enum, - "--gapic_yaml", - client_input.gapic_yaml, - "--service_config", - client_input.service_config, - "--service_yaml", - client_input.service_yaml, - "--include_samples", - client_input.include_samples, - "--versions_file", - f"{repo_root_dir}/versions.txt"], - cwd=repo_root_dir - ) - - # Move generated module to repo root. - __move_modules( - source=output_dir, - dest=repo_root_dir, - name_prefix="java-" - ) - - # Repo level post process - script_dir = "library_generation/repo-level-postprocess" - - print("Regenerating root pom.xml") - subprocess.check_call( - [ - f"{script_dir}/generate_root_pom.sh", - f"{output_dir}" - ], - cwd=repo_root_dir, - ) - - if not split_repo: - print("Regenerating the GAPIC BOM") - subprocess.check_call( - [ - f"{script_dir}/generate_gapic_bom.sh", - f"{output_dir}" - ], - cwd=repo_root_dir, - ) - - print("Deleting temp files") - subprocess.check_call( - [ - "rm", - "-rf", - f"{output_dir}" - ], - cwd=repo_root_dir - ) - - print(f"Prepared new library in {workdir}") - print(f"Please create a pull request:\n" - f" $ git checkout -b new_module_java-{output_name}\n" - f" $ git add .\n" - f" $ git commit -m 'feat: [{api_shortname}] new module for {api_shortname}'\n" - f" $ gh pr create --title 'feat: [{api_shortname}] new module for {api_shortname}'") - - -def __sparse_clone( - remote_url: str, - dest: Path, - commit_hash: str = "master", -): - local_repo = Repo.init(dest) - origin = local_repo.create_remote( - name="origin", - url=remote_url - ) - - origin.fetch() - git = local_repo.git() - git.checkout(f"origin/{commit_hash}", "--", "google", "grafeas") - - -def __find_version(proto_path: Path) -> str: - for child in proto_path.iterdir(): - if child.is_dir() and re.search(r"v[1-9]", child.name) is not None: - return child.name - return "" - - -def __move_modules( - source: Path, - dest: Path, - name_prefix: str -) -> None: - for folder in source.iterdir(): - if folder.is_dir() and folder.name.startswith(name_prefix): - shutil.move(folder, dest) - - -if __name__ == "__main__": - main() diff --git a/library_generation/new_client/requirements.in b/library_generation/new_client/requirements.in deleted file mode 100644 index 2ff144604c..0000000000 --- a/library_generation/new_client/requirements.in +++ /dev/null @@ -1,8 +0,0 @@ -attr -attrs -black -click -jinja2 -lxml -typing -GitPython diff --git a/library_generation/new_client/requirements.txt b/library_generation/new_client/requirements.txt deleted file mode 100644 index 1012323e89..0000000000 --- a/library_generation/new_client/requirements.txt +++ /dev/null @@ -1,232 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --generate-hashes library_generation/new_client/requirements.in -# -attr==0.3.2 \ - --hash=sha256:1ceebca768181cdcce9827611b1d728e592be5d293911539ea3d0b0bfa1146f4 \ - --hash=sha256:4f4bffeea8c27387bde446675a7ac24f3b8fea1075f12d849b5f5c5181fc8336 - # via -r library_generation/new_client/requirements.in -attrs==23.2.0 \ - --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ - --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 - # via -r library_generation/new_client/requirements.in -black==23.12.1 \ - --hash=sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50 \ - --hash=sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f \ - --hash=sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e \ - --hash=sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec \ - --hash=sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055 \ - --hash=sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3 \ - --hash=sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5 \ - --hash=sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54 \ - --hash=sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b \ - --hash=sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e \ - --hash=sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e \ - --hash=sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba \ - --hash=sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea \ - --hash=sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59 \ - --hash=sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d \ - --hash=sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0 \ - --hash=sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9 \ - --hash=sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a \ - --hash=sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e \ - --hash=sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba \ - --hash=sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2 \ - --hash=sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2 - # via -r library_generation/new_client/requirements.in -click==8.1.7 \ - --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ - --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de - # via - # -r library_generation/new_client/requirements.in - # black -gitdb==4.0.11 \ - --hash=sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4 \ - --hash=sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b - # via gitpython -gitpython==3.1.40 \ - --hash=sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4 \ - --hash=sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a - # via -r library_generation/new_client/requirements.in -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 - # via -r library_generation/new_client/requirements.in -lxml==5.0.0 \ - --hash=sha256:016de3b29a262655fc3d2075dc1b2611f84f4c3d97a71d579c883d45e201eee4 \ - --hash=sha256:0326e9b8176ea77269fb39e7af4010906e73e9496a9f8eaf06d253b1b1231ceb \ - --hash=sha256:03290e2f714f2e7431c8430c08b48167f657da7bc689c6248e828ff3c66d5b1b \ - --hash=sha256:049fef98d02513c34f5babd07569fc1cf1ed14c0f2fbff18fe72597f977ef3c2 \ - --hash=sha256:07a900735bad9af7be3085480bf384f68ed5580ba465b39a098e6a882c060d6b \ - --hash=sha256:0d277d4717756fe8816f0beeff229cb72f9dd02a43b70e1d3f07c8efadfb9fe1 \ - --hash=sha256:173bcead3af5d87c7bca9a030675073ddaad8e0a9f0b04be07cd9390453e7226 \ - --hash=sha256:1ef0793e1e2dd221fce7c142177008725680f7b9e4a184ab108d90d5d3ab69b7 \ - --hash=sha256:21af2c3862db6f4f486cddf73ec1157b40d5828876c47cd880edcbad8240ea1b \ - --hash=sha256:2219cbf790e701acf9a21a31ead75f983e73daf0eceb9da6990212e4d20ebefe \ - --hash=sha256:2992591e2294bb07faf7f5f6d5cb60710c046404f4bfce09fb488b85d2a8f58f \ - --hash=sha256:3663542aee845129a981889c19b366beab0b1dadcf5ca164696aabfe1aa51667 \ - --hash=sha256:3e6cbb68bf70081f036bfc018649cf4b46c4e7eaf7860a277cae92dee2a57f69 \ - --hash=sha256:3f908afd0477cace17f941d1b9cfa10b769fe1464770abe4cfb3d9f35378d0f8 \ - --hash=sha256:3ffa066db40b0347e48334bd4465de768e295a3525b9a59831228b5f4f93162d \ - --hash=sha256:405e3760f83a8ba3bdb6e622ec79595cdc20db916ce37377bbcb95b5711fa4ca \ - --hash=sha256:44fa9afd632210f1eeda51cf284ed8dbab0c7ec8b008dd39ba02818e0e114e69 \ - --hash=sha256:4786b0af7511ea614fd86407a52a7bc161aa5772d311d97df2591ed2351de768 \ - --hash=sha256:4a45a278518e4308865c1e9dbb2c42ce84fb154efb03adeb16fdae3c1687c7c9 \ - --hash=sha256:4b9d5b01900a760eb3acf6cef50aead4ef2fa79e7ddb927084244e41dfe37b65 \ - --hash=sha256:4e69c36c8618707a90ed3fb6f48a6cc9254ffcdbf7b259e439a5ae5fbf9c5206 \ - --hash=sha256:52a9ab31853d3808e7cf0183b3a5f7e8ffd622ea4aee1deb5252dbeaefd5b40d \ - --hash=sha256:52c0acc2f29b0a204efc11a5ed911a74f50a25eb7d7d5069c2b1fd3b3346ce11 \ - --hash=sha256:5382612ba2424cea5d2c89e2c29077023d8de88f8d60d5ceff5f76334516df9e \ - --hash=sha256:581a78f299a9f5448b2c3aea904bfcd17c59bf83016d221d7f93f83633bb2ab2 \ - --hash=sha256:583c0e15ae06adc81035346ae2abb2e748f0b5197e7740d8af31222db41bbf7b \ - --hash=sha256:59cea9ba1c675fbd6867ca1078fc717a113e7f5b7644943b74137b7cc55abebf \ - --hash=sha256:5b39f63edbe7e018c2ac1cf0259ee0dd2355274e8a3003d404699b040782e55e \ - --hash=sha256:5eff173f0ff408bfa578cbdafd35a7e0ca94d1a9ffe09a8a48e0572d0904d486 \ - --hash=sha256:5fb988e15378d6e905ca8f60813950a0c56da9469d0e8e5d8fe785b282684ec5 \ - --hash=sha256:6507c58431dbd95b50654b3313c5ad54f90e54e5f2cdacf733de61eae478eec5 \ - --hash=sha256:6a2de85deabf939b0af89e2e1ea46bfb1239545e2da6f8ac96522755a388025f \ - --hash=sha256:6a5501438dd521bb7e0dde5008c40c7bfcfaafaf86eccb3f9bd27509abb793da \ - --hash=sha256:6bba06d8982be0f0f6432d289a8d104417a0ab9ed04114446c4ceb6d4a40c65d \ - --hash=sha256:70ab4e02f7aa5fb4131c8b222a111ce7676f3767e36084fba3a4e7338dc82dcd \ - --hash=sha256:7188495c1bf71bfda87d78ed50601e72d252119ce11710d6e71ff36e35fea5a0 \ - --hash=sha256:71a7cee869578bc17b18050532bb2f0bc682a7b97dda77041741a1bd2febe6c7 \ - --hash=sha256:73bfab795d354aaf2f4eb7a5b0db513031734fd371047342d5803834ce19ec18 \ - --hash=sha256:766868f729f3ab84125350f1a0ea2594d8b1628a608a574542a5aff7355b9941 \ - --hash=sha256:77b73952534967a4497d9e4f26fbeebfba19950cbc66b7cc3a706214429d8106 \ - --hash=sha256:78d6d8e5b54ed89dc0f0901eaaa579c384ad8d59fa43cc7fb06e9bb89115f8f4 \ - --hash=sha256:793be9b4945c2dfd69828fb5948d7d9569b78e0599e4a2e88d92affeb0ff3aa3 \ - --hash=sha256:7ba26a7dc929a1b3487d51bbcb0099afed2fc06e891b82845c8f37a2d7d7fbbd \ - --hash=sha256:7df433d08d4587dc3932f7fcfc3194519a6824824104854e76441fd3bc000d29 \ - --hash=sha256:80209b31dd3908bc5b014f540fd192c97ea52ab179713a730456c5baf7ce80c1 \ - --hash=sha256:8134d5441d1ed6a682e3de3d7a98717a328dce619ee9c4c8b3b91f0cb0eb3e28 \ - --hash=sha256:81509dffd8aba3bdb43e90cbd218c9c068a1f4047d97bc9546b3ac9e3a4ae81d \ - --hash=sha256:88f559f8beb6b90e41a7faae4aca4c8173a4819874a9bf8e74c8d7c1d51f3162 \ - --hash=sha256:894c5f71186b410679aaab5774543fcb9cbabe8893f0b31d11cf28a0740e80be \ - --hash=sha256:8cc0a951e5616ac626f7036309c41fb9774adcd4aa7db0886463da1ce5b65edb \ - --hash=sha256:8ce8b468ab50f9e944719d1134709ec11fe0d2840891a6cae369e22141b1094c \ - --hash=sha256:904d36165848b59c4e04ae5b969072e602bd987485076fca8ec42c6cd7a7aedc \ - --hash=sha256:96095bfc0c02072fc89afa67626013a253596ea5118b8a7f4daaae049dafa096 \ - --hash=sha256:980ba47c8db4b9d870014c7040edb230825b79017a6a27aa54cdb6fcc02d8cc0 \ - --hash=sha256:992029258ed719f130d5a9c443d142c32843046f1263f2c492862b2a853be570 \ - --hash=sha256:99cad5c912f359e59e921689c04e54662cdd80835d80eeaa931e22612f515df7 \ - --hash=sha256:9b59c429e1a2246da86ae237ffc3565efcdc71c281cd38ca8b44d5fb6a3b993a \ - --hash=sha256:9ca498f8554a09fbc3a2f8fc4b23261e07bc27bef99b3df98e2570688033f6fc \ - --hash=sha256:9cd3d6c2c67d4fdcd795e4945e2ba5434909c96640b4cc09453bd0dc7e8e1bac \ - --hash=sha256:a85136d0ee18a41c91cc3e2844c683be0e72e6dda4cb58da9e15fcaef3726af7 \ - --hash=sha256:ac21aace6712472e77ea9dfc38329f53830c4259ece54c786107105ebb069053 \ - --hash=sha256:aebd8fd378e074b22e79cad329dcccd243c40ff1cafaa512d19276c5bb9554e1 \ - --hash=sha256:affdd833f82334fdb10fc9a1c7b35cdb5a86d0b672b4e14dd542e1fe7bcea894 \ - --hash=sha256:b6d4e148edee59c2ad38af15810dbcb8b5d7b13e5de3509d8cf3edfe74c0adca \ - --hash=sha256:bb58e8f4b2cfe012cd312239b8d5139995fe8f5945c7c26d5fbbbb1ddb9acd47 \ - --hash=sha256:bfdc4668ac56687a89ca3eca44231144a2e9d02ba3b877558db74ba20e2bd9fa \ - --hash=sha256:c1249aa4eaced30b59ecf8b8cae0b1ccede04583c74ca7d10b6f8bbead908b2c \ - --hash=sha256:c7cfb6af73602c8d288581df8a225989d7e9d5aab0a174be0e19fcfa800b6797 \ - --hash=sha256:c7fe19abb3d3c55a9e65d289b12ad73b3a31a3f0bda3c539a890329ae9973bd6 \ - --hash=sha256:c8954da15403db1acfc0544b3c3f963a6ef4e428283ab6555e3e298bbbff1cf6 \ - --hash=sha256:c90c593aa8dd57d5dab0ef6d7d64af894008971d98e6a41b320fdd75258fbc6e \ - --hash=sha256:cb564bbe55ff0897d9cf1225041a44576d7ae87f06fd60163544c91de2623d3f \ - --hash=sha256:cfa8a4cdc3765574b7fd0c7cfa5fbd1e2108014c9dfd299c679e5152bea9a55e \ - --hash=sha256:d1bb64646480c36a4aa1b6a44a5b6e33d0fcbeab9f53f1b39072cd3bb2c6243a \ - --hash=sha256:dac2733fe4e159b0aae0439db6813b7b1d23ff96d0b34c0107b87faf79208c4e \ - --hash=sha256:db40e85cffd22f7d65dcce30e85af565a66401a6ed22fc0c56ed342cfa4ffc43 \ - --hash=sha256:dd39ef87fd1f7bb5c4aa53454936e6135cbfe03fe3744e8218be193f9e4fef16 \ - --hash=sha256:de1a8b54170024cf1c0c2718c82412bca42cd82e390556e3d8031af9541b416f \ - --hash=sha256:e675a4b95208e74c34ac0751cc4bab9170e7728b61601fb0f4746892c2bb7e0b \ - --hash=sha256:e6bb39d91bf932e7520cb5718ae3c2f498052aca53294d5d59fdd9068fe1a7f2 \ - --hash=sha256:e8c63f5c7d87e7044880b01851ac4e863c3349e6f6b6ab456fe218d9346e816d \ - --hash=sha256:ea56825c1e23c9c8ea385a191dac75f9160477057285b88c88736d9305e6118f \ - --hash=sha256:ee60f33456ff34b2dd1d048a740a2572798356208e4c494301c931de3a0ab3a2 \ - --hash=sha256:f15844a1b93dcaa09c2b22e22a73384f3ae4502347c3881cfdd674e14ac04e21 \ - --hash=sha256:f298ac9149037d6a3d5c74991bded39ac46292520b9c7c182cb102486cc87677 \ - --hash=sha256:f30e697b6215e759d0824768b2c5b0618d2dc19abe6c67eeed2b0460f52470d1 \ - --hash=sha256:f92d73faa0b1a76d1932429d684b7ce95829e93c3eef3715ec9b98ab192c9d31 \ - --hash=sha256:fef10f27d6318d2d7c88680e113511ddecf09ee4f9559b3623b73ee89fa8f6cc - # via -r library_generation/new_client/requirements.in -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 - # via jinja2 -mypy-extensions==1.0.0 \ - --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ - --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 - # via black -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 - # via black -pathspec==0.12.1 \ - --hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \ - --hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712 - # via black -platformdirs==4.1.0 \ - --hash=sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380 \ - --hash=sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420 - # via black -smmap==5.0.1 \ - --hash=sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62 \ - --hash=sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da - # via gitdb -typing==3.7.4.3 \ - --hash=sha256:1187fb9c82fd670d10aa07bbb6cfcfe4bdda42d6fab8d5134f04e8c4d0b71cc9 \ - --hash=sha256:283d868f5071ab9ad873e5e52268d611e851c870a2ba354193026f2dfb29d8b5 - # via -r library_generation/new_client/requirements.in diff --git a/library_generation/new_client/templates.py b/library_generation/new_client/templates.py deleted file mode 100644 index 5b0282ce03..0000000000 --- a/library_generation/new_client/templates.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from jinja2 import Environment, FileSystemLoader -import os -import pathlib - -root_directory = pathlib.Path( - os.path.realpath(os.path.dirname(os.path.realpath(__file__))) -) -print(root_directory) -jinja_env = Environment(loader=FileSystemLoader(str(root_directory / "templates"))) - - -def render(template_name: str, output_name: str, **kwargs): - template = jinja_env.get_template(template_name) - t = template.stream(kwargs) - directory = os.path.dirname(output_name) - if not os.path.isdir(directory): - os.makedirs(directory) - t.dump(str(output_name)) diff --git a/library_generation/owlbot/bin/entrypoint.sh b/library_generation/owlbot/bin/entrypoint.sh index 26ed707591..a26eaec996 100755 --- a/library_generation/owlbot/bin/entrypoint.sh +++ b/library_generation/owlbot/bin/entrypoint.sh @@ -61,7 +61,7 @@ function processModule() { # ensure formatting on all .java files in the repository echo "Reformatting source..." - mvn fmt:format + mvn fmt:format -q echo "...done" } diff --git a/library_generation/postprocess_library.sh b/library_generation/postprocess_library.sh index f7035ec6c8..d46a9c890c 100755 --- a/library_generation/postprocess_library.sh +++ b/library_generation/postprocess_library.sh @@ -20,7 +20,7 @@ # provided # 7 - is_monorepo: whether this library is a monorepo, which implies slightly # different logic -set -xeo pipefail +set -eo pipefail scripts_root=$(dirname "$(readlink -f "$0")") postprocessing_target=$1 @@ -50,22 +50,6 @@ do fi done - -# ensure pyenv scripts are available -eval "$(pyenv init --path)" -eval "$(pyenv init -)" -eval "$(pyenv virtualenv-init -)" - -# create and activate the python virtualenv -python_version=$(cat "${scripts_root}/configuration/python-version") -if [ $(pyenv versions | grep "${python_version}" | wc -l) -eq 0 ]; then - pyenv install "${python_version}" -fi -if [ $(pyenv virtualenvs | grep "${python_version}" | grep "postprocessing" | wc -l) -eq 0 ];then - pyenv virtualenv "${python_version}" "postprocessing" -fi -pyenv activate "postprocessing" - if [[ -z "${owlbot_cli_source_folder}" ]]; then owlbot_cli_source_folder=$(mktemp -d) build_owlbot_cli_source_folder "${postprocessing_target}" "${owlbot_cli_source_folder}" "${preprocessed_sources_path}" @@ -81,7 +65,7 @@ else fi docker run --rm \ - --user $(id -u):$(id -g) \ + --user "$(id -u)":"$(id -g)" \ -v "${postprocessing_target}:/repo" \ -v "${owlbot_cli_source_folder}:/pre-processed-libraries" \ -w /repo \ diff --git a/library_generation/repo-level-postprocess/generate_gapic_bom.sh b/library_generation/repo-level-postprocess/generate_gapic_bom.sh deleted file mode 100755 index ad37553d58..0000000000 --- a/library_generation/repo-level-postprocess/generate_gapic_bom.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -set -e - -# Generate BOM of the artifacts in this repository -GENERATION_DIR=$1 -bom_lines="" -# For modules that produce BOMs -for bom_directory in $(find . -maxdepth 3 -name 'google-*-bom' | sort --dictionary-order); do - if [[ "${bom_directory}" = *gapic-libraries-bom ]] || [[ "${bom_directory}" = *google-cloud-core* ]]; then - continue - fi - pom_file="${bom_directory}/pom.xml" - groupId_line=$(grep --max-count=1 'groupId' "${pom_file}") - artifactId_line=$(grep --max-count=1 'artifactId' "${pom_file}") - version_line=$(grep --max-count=1 'x-version-update' "${pom_file}") - - if [[ "$groupId_line" == *"com.google.cloud"* - || "$groupId_line" == *"com.google.analytic"* - || "$groupId_line" == *"com.google.area120"* - || "$groupId_line" == *"io.grafeas"* ]]; then - # The gapic bom mainly includes cloud libraries and ones that have been included already. - # Let's avoid adding com.google.maps and com.google.shopping for now. We may decide to - # add them later. It's more difficult to remove them later without impacting users. - bom_lines+=" \n\ - ${groupId_line}\n\ - ${artifactId_line}\n\ - ${version_line}\n\ - pom\n\ - import\n\ - \n" - fi -done - -# For originally-handwritten modules that do not produce a BOM -for module in $(find . -mindepth 2 -maxdepth 2 -name pom.xml |sort --dictionary-order | xargs dirname); do - if ls "${module}"/*-bom 1> /dev/null 2>&1; then - continue - fi - if ! test -f "${module}/.repo-metadata.json"; then - continue - fi - - pom_file="${module}/pom.xml" - groupId_line=$(grep --max-count=1 'groupId' "${pom_file}") - artifactId_line=$(grep --max-count=1 'artifactId' "${pom_file}") - version_line=$(grep --max-count=1 'x-version-update' "${pom_file}") - bom_lines+=" \n\ - ${groupId_line}\n\ - ${artifactId_line}\n\ - ${version_line}\n\ - \n" -done - -mkdir -p gapic-libraries-bom - -perl -0pe 's/.*<\/dependencies>/\nBOM_ARTIFACT_LIST\n <\/dependencies>/s' "${GENERATION_DIR}/../gapic-libraries-bom/pom.xml" > "${GENERATION_DIR}/bom.pom.xml" -awk -v "dependencyManagements=${bom_lines}" '{gsub(/BOM_ARTIFACT_LIST/,dependencyManagements)}1' \ - "${GENERATION_DIR}/bom.pom.xml" > gapic-libraries-bom/pom.xml -rm "${GENERATION_DIR}/bom.pom.xml" \ No newline at end of file diff --git a/library_generation/repo-level-postprocess/generate_root_pom.sh b/library_generation/repo-level-postprocess/generate_root_pom.sh deleted file mode 100755 index 2cac682c94..0000000000 --- a/library_generation/repo-level-postprocess/generate_root_pom.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -set -e - -GENERATION_DIR=$1; - -# Find all Maven modules (a directory that contains pom.xml) -find . -mindepth 2 -maxdepth 2 -name pom.xml |sort --dictionary-order | xargs dirname \ - |sed -e 's|./||' | xargs -I '{}' echo " {}" > /tmp/repo-modules.txt - -perl -0pe 's/.*<\/modules>/\n <\/modules>/s' ${GENERATION_DIR}/../pom.xml > ${GENERATION_DIR}/parent.pom.xml -awk -v MODULES="`awk -v ORS='\\\\n' '1' /tmp/repo-modules.txt`" '1;//{print MODULES}' ${GENERATION_DIR}/parent.pom.xml > pom.xml -rm ${GENERATION_DIR}/parent.pom.xml \ No newline at end of file diff --git a/library_generation/templates/gapic-libraries-bom.xml.j2 b/library_generation/templates/gapic-libraries-bom.xml.j2 new file mode 100644 index 0000000000..45dbdf42ce --- /dev/null +++ b/library_generation/templates/gapic-libraries-bom.xml.j2 @@ -0,0 +1,37 @@ + + + 4.0.0 + com.google.cloud + gapic-libraries-bom + pom + {{ monorepo_version }} + Google Cloud Java BOM + + BOM for the libraries in google-cloud-java repository. Users should not + depend on this artifact explicitly because this BOM is an implementation + detail of the Libraries BOM. + + + + google-cloud-pom-parent + com.google.cloud + {{ monorepo_version }} + ../google-cloud-pom-parent/pom.xml + + + + + {%- for bom_config in bom_configs %} + + {{ bom_config.group_id }} + {{ bom_config.artifact_id }} + {{ bom_config.version }} + {%- if bom_config.is_import %} + pom + import + {%- endif %} + + {%- endfor %} + + + diff --git a/library_generation/new_client/templates/owlbot.py.j2 b/library_generation/templates/owlbot.py.j2 similarity index 100% rename from library_generation/new_client/templates/owlbot.py.j2 rename to library_generation/templates/owlbot.py.j2 diff --git a/library_generation/new_client/templates/owlbot.yaml.monorepo.j2 b/library_generation/templates/owlbot.yaml.monorepo.j2 similarity index 99% rename from library_generation/new_client/templates/owlbot.yaml.monorepo.j2 rename to library_generation/templates/owlbot.yaml.monorepo.j2 index 3cfcc46aaf..5267a6f8a3 100644 --- a/library_generation/new_client/templates/owlbot.yaml.monorepo.j2 +++ b/library_generation/templates/owlbot.yaml.monorepo.j2 @@ -31,6 +31,6 @@ deep-copy-regex: dest: "/owl-bot-staging/{{ module_name }}/$1/{{ artifact_name }}/src" - source: "/{{ proto_path }}/(v.*)/.*-java/samples/snippets/generated" dest: "/owl-bot-staging/{{ module_name }}/$1/samples/snippets/generated" -{% endif %} +{%- endif %} api-name: {{ api_shortname }} diff --git a/library_generation/templates/root-pom.xml.j2 b/library_generation/templates/root-pom.xml.j2 new file mode 100644 index 0000000000..c48e8b71ef --- /dev/null +++ b/library_generation/templates/root-pom.xml.j2 @@ -0,0 +1,88 @@ + + + 4.0.0 + google-cloud-java + com.google.cloud + 0.201.0 + pom + + + true + + + + gapic-libraries-bom + google-cloud-jar-parent + google-cloud-pom-parent + {%- for module in modules %} + {{ module }} + {%- endfor %} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 3.1.1 + + true + + + + + + + + release-staging-repository + + + + !gpg.executable + + + + + sonatype-nexus-snapshots + https://google.oss.sonatype.org/content/repositories/snapshots + + + sonatype-nexus-staging + https://google.oss.sonatype.org/service/local/staging/deploy/maven2/ + + + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.13 + true + + sonatype-nexus-staging + https://google.oss.sonatype.org/ + false + + + + + + + release-non-google-oss-sonatype + + + + org.sonatype.plugins + nexus-staging-maven-plugin + + ossrh + https://oss.sonatype.org/ + + + + + + + diff --git a/library_generation/test/compare_poms.py b/library_generation/test/compare_poms.py index 94c94ae128..f58953505d 100644 --- a/library_generation/test/compare_poms.py +++ b/library_generation/test/compare_poms.py @@ -4,112 +4,117 @@ The only comparison points are: element path (e.g. project/dependencies) and element text There is a special case for `dependency`, where the maven coordinates are prepared as well """ -import xml.etree.ElementTree as ET +from library_generation.utilities import eprint +import xml.etree.ElementTree as et from collections import Counter import sys import os + current = os.path.dirname(os.path.realpath(__file__)) parent = os.path.dirname(current) sys.path.append(parent) -from utilities import eprint -""" -Convenience method to access a node's child elements via path and get its text -""" def get_text_from_element(node, element_name, namespace): - child = node.find(namespace + element_name) - return child.text if child is not None else '' + """ + Convenience method to access a node's child elements via path and get + its text. + """ + child = node.find(namespace + element_name) + return child.text if child is not None else "" + -""" -Convenience method to pretty print the contents of a Counter (or dict) -""" def print_counter(counter): - for key, value in counter.items(): - eprint(f'{key}: {value}') + """ + Convenience method to pretty print the contents of a Counter (or dict) + """ + for key, value in counter.items(): + eprint(f"{key}: {value}") + -""" -Recursively traverses a node tree and appends element text to a given -`elements` array. If the element tag is `dependency` -then the maven coordinates for its children will be computed as well -""" def append_to_element_list(node, path, elements): - namespace_start, namespace_end, tag_name = node.tag.rpartition('}') - namespace = namespace_start + namespace_end - if tag_name == 'dependency': - group_id = get_text_from_element(node, 'groupId', namespace) - artifact_id = get_text_from_element(node, 'artifactId', namespace) - artifact_str = '' - artifact_str += group_id - artifact_str += ':' + artifact_id - elements.append(path + '/' + tag_name + '=' + artifact_str) - if node.text and len(node.text.strip()) > 0: - elements.append(path + '/' + tag_name + '=' + node.text) - - if tag_name == 'version': - # versions may be yet to be processed, we disregard them - return elements + """ + Recursively traverses a node tree and appends element text to a given + `elements` array. If the element tag is `dependency` + then the maven coordinates for its children will be computed as well + """ + namespace_start, namespace_end, tag_name = node.tag.rpartition("}") + namespace = namespace_start + namespace_end + if tag_name == "dependency": + group_id = get_text_from_element(node, "groupId", namespace) + artifact_id = get_text_from_element(node, "artifactId", namespace) + artifact_str = "" + artifact_str += group_id + artifact_str += ":" + artifact_id + elements.append(path + "/" + tag_name + "=" + artifact_str) + if node.text and len(node.text.strip()) > 0: + elements.append(path + "/" + tag_name + "=" + node.text) + + if tag_name == "version": + # versions may be yet to be processed, we disregard them + return elements + + for child in node: + child_path = path + "/" + tag_name + append_to_element_list(child, child_path, elements) - for child in node: - child_path = path + '/' + tag_name - append_to_element_list(child, child_path, elements) + return elements - return elements -""" -compares two XMLs for content differences -the argument print_whole_trees determines if both trees should be printed -""" -def compare_xml(file1, file2, print_whole_trees): - try: - tree1 = ET.parse(file1) - tree2 = ET.parse(file2) - except ET.ParseError as e: - eprint(f'Error parsing XML') - raise e - except FileNotFoundError as e: - eprint(f'Error reading file') - raise e - - tree1_elements = [] - tree2_elements = [] - - append_to_element_list(tree1.getroot(), '/', tree1_elements) - append_to_element_list(tree2.getroot(), '/', tree2_elements) - - tree1_counter = Counter(tree1_elements) - tree2_counter = Counter(tree2_elements) - intersection = tree1_counter & tree2_counter - only_in_tree1 = tree1_counter - intersection - only_in_tree2 = tree2_counter - intersection - if print_whole_trees == 'true': - eprint('tree1') - print_counter(tree2_counter) - eprint('tree2') - print_counter(tree1_counter) - if len(only_in_tree1) > 0 or len(only_in_tree2) > 0: - eprint('only in ' + file1) - print_counter(only_in_tree1) - eprint('only in ' + file2) - print_counter(only_in_tree2) - return True - return False +def compare_xml(expected, actual, print_trees): + """ + compares two XMLs for content differences + the argument print_whole_trees determines if both trees should be printed + """ + try: + expected_tree = et.parse(expected) + actual_tree = et.parse(actual) + except et.ParseError as e: + eprint(f"Error parsing XML") + raise e + except FileNotFoundError as e: + eprint(f"Error reading file") + raise e + + expected_elements = [] + actual_elements = [] + + append_to_element_list(expected_tree.getroot(), "/", expected_elements) + append_to_element_list(actual_tree.getroot(), "/", actual_elements) + + expected_counter = Counter(expected_elements) + actual_counter = Counter(actual_elements) + intersection = expected_counter & actual_counter + only_in_expected = expected_counter - intersection + only_in_actual = actual_counter - intersection + if print_trees: + eprint("expected") + print_counter(actual_counter) + eprint("actual") + print_counter(expected_counter) + if len(only_in_expected) > 0 or len(only_in_actual) > 0: + eprint("only in " + expected) + print_counter(only_in_expected) + eprint("only in " + actual) + print_counter(only_in_actual) + return True + return False if __name__ == "__main__": - if len(sys.argv) != 4: - eprint("Usage: python compare_xml.py ") - sys.exit(1) - - file1 = sys.argv[1] - file2 = sys.argv[2] - print_whole_trees = sys.argv[3] - has_diff = compare_xml(file1, file2, print_whole_trees) - - if has_diff: - eprint(f'The poms are different') - sys.exit(1) - eprint('The XML files are the same.') - sys.exit(0) - - + if len(sys.argv) != 4: + eprint( + "Usage: python compare_xml.py " + ) + sys.exit(1) + + file1 = sys.argv[1] + file2 = sys.argv[2] + print_whole_trees = sys.argv[3] + has_diff = compare_xml(file1, file2, print_whole_trees) + + if has_diff: + eprint(f"The poms are different") + sys.exit(1) + eprint("The XML files are the same.") + sys.exit(0) diff --git a/library_generation/test/generate_library_integration_test.sh b/library_generation/test/generate_library_integration_test.sh deleted file mode 100755 index 9b46304da3..0000000000 --- a/library_generation/test/generate_library_integration_test.sh +++ /dev/null @@ -1,152 +0,0 @@ -#!/usr/bin/env bash - -set -xeo pipefail - -# This script is used to test the result of `generate_library.sh` against generated -# source code in the specified repository. -# Specifically, this script will do -# 1. take a configuration yaml describing the structure of the libraries to -# generate -# 2. For each api_shortname, call generate_composed_library.py to generate the groups of libraries -# 3. After the generation is done, compare the resulting library with the -# corresponding cloned repository - -# defaults -googleapis_gen_url="git@github.com:googleapis/googleapis-gen.git" -enable_postprocessing="true" - -script_dir=$(dirname "$(readlink -f "$0")") -proto_path_list="${script_dir}/resources/proto_path_list.txt" -library_generation_dir="${script_dir}"/.. -source "${script_dir}/test_utilities.sh" -source "${script_dir}/../utilities.sh" -output_folder="$(pwd)/output" - - -while [[ $# -gt 0 ]]; do -key="$1" -case $key in - -p|--proto_path_list) - proto_path_list="$2" - shift - ;; - -e|--enable_postprocessing) - enable_postprocessing="$2" - shift - ;; - -g|--googleapis_gen_url) - googleapis_gen_url="$2" - shift - ;; - *) - echo "Invalid option: [$1]" - exit 1 - ;; -esac -shift # past argument or value -done - -mkdir -p "${output_folder}" - -if [ -f "${output_folder}/generation_times" ];then - rm "${output_folder}/generation_times" -fi - -declare -a configuration_yamls=( - "${script_dir}/resources/integration/java-bigtable/generation_config.yaml" - "${script_dir}/resources/integration/google-cloud-java/generation_config.yaml" -) - - -for configuration_yaml in "${configuration_yamls[@]}"; do - library_api_shortnames=$(py_util "get_configuration_yaml_library_api_shortnames" "${configuration_yaml}") - destination_path=$(py_util "get_configuration_yaml_destination_path" "${configuration_yaml}") - pushd "${output_folder}" - if [[ "${destination_path}" == *google-cloud-java* ]]; then - git clone "https://github.com/googleapis/google-cloud-java" - repository_path="${output_folder}/google-cloud-java" - else - git clone "https://github.com/googleapis/${destination_path}" - repository_path="${output_folder}/${destination_path}" - fi - popd - - for api_shortname in ${library_api_shortnames}; do - pushd "${output_folder}" - - echo "Generating library ${api_shortname}..." - generation_start=$(date "+%s") - python3 "${library_generation_dir}"/main.py generate-from-yaml \ - --generation-config-yaml "${configuration_yaml}" \ - --enable-postprocessing "${enable_postprocessing}" \ - --target-library-api-shortname "${api_shortname}" \ - --repository-path "${repository_path}" - generation_end=$(date "+%s") - - # some generations are less than 1 second (0 produces exit code 1 in `expr`) - generation_duration_seconds=$(expr "${generation_end}" - "${generation_start}" || true) - echo "Generation time for ${api_shortname} was ${generation_duration_seconds} seconds." - pushd "${output_folder}" - echo "${proto_path} ${generation_duration_seconds}" >> generation_times - - echo "Generate library finished." - echo "Compare generation result..." - if [ ${enable_postprocessing} == "true" ]; then - echo "Checking out repository..." - if [[ "${destination_path}" == *google-cloud-java* ]]; then - target_folder="${output_folder}/google-cloud-java/java-${api_shortname}" - else - target_folder="${output_folder}/java-${api_shortname}" - fi - - pushd "${target_folder}" - source_diff_result=0 - git diff \ - --ignore-space-at-eol \ - -r \ - --exit-code \ - -- \ - . \ - ':!*pom.xml' \ - ':!*README.md' \ - ':!*gapic_metadata.json' \ - ':!*reflect-config.json' \ - ':!*package-info.java' \ - || source_diff_result=$? - - pom_diff_result=$(compare_poms "${target_folder}") - popd # target_folder - if [[ ${source_diff_result} == 0 ]] && [[ ${pom_diff_result} == 0 ]] ; then - echo "SUCCESS: Comparison finished, no difference is found." - elif [ ${source_diff_result} != 0 ]; then - echo "FAILURE: Differences found in proto path: java-${api_shortname}." - exit "${source_diff_result}" - elif [ ${pom_diff_result} != 0 ]; then - echo "FAILURE: Differences found in generated java-${api_shortname}'s poms" - exit "${pom_diff_result}" - fi - elif [ "${enable_postprocessing}" == "false" ]; then - for proto_path in "${proto_paths[@]}"; do - destination_path=$(compute_destination_path "${proto_path}" "${output_folder}") - # include gapic_metadata.json and package-info.java after - # resolving https://github.com/googleapis/sdk-platform-java/issues/1986 - source_diff_result=0 - diff --strip-trailing-cr -r "googleapis-gen/${proto_path}/${destination_path}" "${output_folder}/${destination_path}" \ - -x "*gradle*" \ - -x "gapic_metadata.json" \ - -x "package-info.java" || source_diff_result=$? - if [ ${source_diff_result} == 0 ] ; then - echo "SUCCESS: Comparison finished, no difference is found." - else - echo "FAILURE: Differences found in proto path: ${proto_path}." - exit "${source_diff_result}" - fi - done - fi - - popd # output_folder - done -done -echo "ALL TESTS SUCCEEDED" -echo "generation times in seconds (does not consider repo checkout):" -cat "${output_folder}/generation_times" diff --git a/library_generation/test/integration_tests.py b/library_generation/test/integration_tests.py new file mode 100644 index 0000000000..8f0cb4a1ef --- /dev/null +++ b/library_generation/test/integration_tests.py @@ -0,0 +1,148 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json +import os +import shutil +import unittest +from distutils.dir_util import copy_tree +from distutils.file_util import copy_file +from filecmp import dircmp + +from git import Repo +from pathlib import Path +from typing import List +from typing import Dict +from library_generation.generate_repo import generate_from_yaml +from library_generation.model.generation_config import from_yaml +from library_generation.test.compare_poms import compare_xml +from library_generation.utilities import get_library_name + +config_name = "generation_config.yaml" +script_dir = os.path.dirname(os.path.realpath(__file__)) +# for simplicity, the configuration files should be in a relative directory +# within config_dir named {repo}/generation_config.yaml, where repo is +# the name of the repository the target libraries live. +config_dir = f"{script_dir}/resources/integration" +golden_dir = f"{config_dir}/golden" +repo_prefix = "https://github.com/googleapis" +committish_list = ["chore/test-hermetic-build"] # google-cloud-java + + +class IntegrationTest(unittest.TestCase): + def test_generate_repo(self): + shutil.rmtree(f"{golden_dir}", ignore_errors=True) + os.makedirs(f"{golden_dir}", exist_ok=True) + config_files = self.__get_config_files(config_dir) + i = 0 + for repo, config_file in config_files.items(): + repo_dest = f"{golden_dir}/{repo}" + self.__pull_repo_to(Path(repo_dest), repo, committish_list[i]) + library_names = self.__get_library_names_from_config(config_file) + # prepare golden files + for library_name in library_names: + copy_tree(f"{repo_dest}/{library_name}", f"{golden_dir}/{library_name}") + copy_tree( + f"{repo_dest}/gapic-libraries-bom", f"{golden_dir}/gapic-libraries-bom" + ) + copy_file(f"{repo_dest}/pom.xml", golden_dir) + generate_from_yaml( + generation_config_yaml=config_file, repository_path=repo_dest + ) + # compare result + for library_name in library_names: + print( + f"Compare generation result: " + f"expected library in {golden_dir}/{library_name}, " + f"actual library in {repo_dest}/{library_name}." + ) + compare_result = dircmp( + f"{golden_dir}/{library_name}", + f"{repo_dest}/{library_name}", + ignore=[".repo-metadata.json"], + ) + # compare source code + self.assertEqual([], compare_result.left_only) + self.assertEqual([], compare_result.right_only) + self.assertEqual([], compare_result.diff_files) + print("Source code comparison succeed.") + # compare .repo-metadata.json + self.assertTrue( + self.__compare_json_files( + f"{golden_dir}/{library_name}/.repo-metadata.json", + f"{repo_dest}/{library_name}/.repo-metadata.json", + ), + msg=f"The generated {library_name}/.repo-metadata.json is different from golden.", + ) + print(".repo-metadata.json comparison succeed.") + # compare gapic-libraries-bom/pom.xml and pom.xml + self.assertFalse( + compare_xml( + f"{golden_dir}/gapic-libraries-bom/pom.xml", + f"{repo_dest}/gapic-libraries-bom/pom.xml", + False, + ) + ) + print("gapic-libraries-bom/pom.xml comparison succeed.") + self.assertFalse( + compare_xml( + f"{golden_dir}/pom.xml", + f"{repo_dest}/pom.xml", + False, + ) + ) + print("pom.xml comparison succeed.") + # remove google-cloud-java + i += 1 + + @classmethod + def __pull_repo_to(cls, dest: Path, repo: str, committish: str): + repo_url = f"{repo_prefix}/{repo}" + repo = Repo.clone_from(repo_url, dest) + repo.git.checkout(committish) + + @classmethod + def __get_library_names_from_config(cls, config_path: str) -> List[str]: + config = from_yaml(config_path) + library_names = [] + for library in config.libraries: + library_names.append(f"java-{get_library_name(library)}") + + return library_names + + @classmethod + def __get_config_files(cls, path: str) -> Dict[str, str]: + config_files = {} + for sub_dir in Path(path).resolve().iterdir(): + repo = sub_dir.name + # skip the split repo. + if repo == "golden" or repo == "java-bigtable": + continue + config = f"{sub_dir}/{config_name}" + config_files[repo] = config + + return config_files + + @classmethod + def __compare_json_files(cls, expected: str, actual: str) -> bool: + return cls.__load_json_to_sorted_list( + expected + ) == cls.__load_json_to_sorted_list(actual) + + @classmethod + def __load_json_to_sorted_list(cls, path: str) -> List[tuple]: + with open(path) as f: + data = json.load(f) + res = [(key, value) for key, value in data.items()] + + return sorted(res, key=lambda x: x[0]) diff --git a/library_generation/test/resources/goldens/.OwlBot-golden.yaml b/library_generation/test/resources/goldens/.OwlBot-golden.yaml new file mode 100644 index 0000000000..225b4620bf --- /dev/null +++ b/library_generation/test/resources/goldens/.OwlBot-golden.yaml @@ -0,0 +1,35 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: +- "/java-bare-metal-solution/grpc-google-.*/src" +- "/java-bare-metal-solution/proto-google-.*/src" +- "/java-bare-metal-solution/google-.*/src" +- "/java-bare-metal-solution/samples/snippets/generated" + +deep-preserve-regex: +- "/java-bare-metal-solution/google-.*/src/test/java/com/google/cloud/.*/v.*/it/IT.*Test.java" + +deep-copy-regex: +- source: "/google/cloud/baremetalsolution/(v.*)/.*-java/proto-google-.*/src" + dest: "/owl-bot-staging/java-bare-metal-solution/$1/proto-google-cloud-bare-metal-solution-$1/src" +- source: "/google/cloud/baremetalsolution/(v.*)/.*-java/grpc-google-.*/src" + dest: "/owl-bot-staging/java-bare-metal-solution/$1/grpc-google-cloud-bare-metal-solution-$1/src" +- source: "/google/cloud/baremetalsolution/(v.*)/.*-java/gapic-google-.*/src" + dest: "/owl-bot-staging/java-bare-metal-solution/$1/google-cloud-bare-metal-solution/src" +- source: "/google/cloud/baremetalsolution/(v.*)/.*-java/samples/snippets/generated" + dest: "/owl-bot-staging/java-bare-metal-solution/$1/samples/snippets/generated" + +api-name: baremetalsolution \ No newline at end of file diff --git a/library_generation/test/resources/goldens/.repo-metadata-golden.json b/library_generation/test/resources/goldens/.repo-metadata-golden.json new file mode 100644 index 0000000000..88ee68b2e1 --- /dev/null +++ b/library_generation/test/resources/goldens/.repo-metadata-golden.json @@ -0,0 +1,18 @@ +{ + "api_shortname": "baremetalsolution", + "name_pretty": "Bare Metal Solution", + "product_documentation": "https://cloud.google.com/bare-metal/docs", + "api_description": "Bring your Oracle workloads to Google Cloud with Bare Metal Solution and jumpstart your cloud journey with minimal risk.", + "client_documentation": "https://cloud.google.com/java/docs/reference/google-cloud-bare-metal-solution/latest/overview", + "release_level": "preview", + "transport": "grpc", + "language": "java", + "repo": "googleapis/google-cloud-java", + "repo_short": "java-bare-metal-solution", + "distribution_name": "com.google.cloud:google-cloud-bare-metal-solution", + "api_id": "baremetalsolution.googleapis.com", + "library_type": "GAPIC_AUTO", + "requires_billing": true, + "rest_documentation": "https://cloud.google.com/bare-metal/docs/reference/rest", + "rpc_documentation": "https://cloud.google.com/bare-metal/docs/reference/rpc" +} \ No newline at end of file diff --git a/library_generation/test/resources/goldens/owlbot-golden.py b/library_generation/test/resources/goldens/owlbot-golden.py new file mode 100644 index 0000000000..c2c142892a --- /dev/null +++ b/library_generation/test/resources/goldens/owlbot-golden.py @@ -0,0 +1,36 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import synthtool as s +from synthtool.languages import java + + +for library in s.get_staging_dirs(): + # put any special-case replacements here + s.move(library) + +s.remove_staging_dirs() +java.common_templates(monorepo=True, excludes=[ + ".github/*", + ".kokoro/*", + "samples/*", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.md", + "LICENSE", + "SECURITY.md", + "java.header", + "license-checks.xml", + "renovate.json", + ".gitignore", +]) \ No newline at end of file diff --git a/library_generation/test/resources/integration/google-cloud-java/generation_config.yaml b/library_generation/test/resources/integration/google-cloud-java/generation_config.yaml index 7b73f329d0..67164271fb 100644 --- a/library_generation/test/resources/integration/google-cloud-java/generation_config.yaml +++ b/library_generation/test/resources/integration/google-cloud-java/generation_config.yaml @@ -1,90 +1,32 @@ -#Required. -gapic_generator_version: 2.32.0 -#Optional. -# grpc_version: 1.60.0 -#Optional. The protobuf version in googleapis (not sdk-platform-java) is the actual source of truth for generated protos in google-cloud-java -protobuf_version: 23.2 -#Required. -googleapis_commitish: 4512234113a18c1fda1fb0d0ceac8f4b4efe9801 -#Required. +gapic_generator_version: 2.34.0 +protobuf_version: 25.2 +googleapis_commitish: 1a45bf7393b52407188c82e63101db7dc9c72026 owlbot_cli_image: sha256:623647ee79ac605858d09e60c1382a716c125fb776f69301b72de1cd35d49409 -#Required. -synthtool_commitish: fac8444edd5f5526e804c306b766a271772a3e2f -#Required. The root folder name of generated client libraries. +synthtool_commitish: 6612ab8f3afcd5e292aecd647f0fa68812c9f5b5 destination_path: google-cloud-java -#Required. If the number of libraries is greater than 1, the scripts will treat the target repository as a monorepo, with a slightly different workflow mainly in the postprocessing stage libraries: - #Required. Can be used for populating the folder name java-{api_shortName}. This is also the destination-name in new-client.py. - - api_shortname: asset - #Optional. Overrides the root-level commit hash - googleapis_commitish: 4512234113a18c1fda1fb0d0ceac8f4b4efe9801 - #Optional. The default value is the title of service yaml - name_pretty: Cloud Asset - #Required. - library_type: GAPIC_AUTO - #Optional. The default value is com.google.cloud - group_id: com.google.cloud - #Optional. The default value is google.cloud.{api_shortname} - artifact_id: google.cloud.asset - #Optional. The default value is true. - requires_billing: true - #Optional. The default value is documentation.summary from service yaml - api_description: - #Optional. - product_documentation: - #Optional. - client_documentation: - #Optional. - rest_documentation: - #Optional. - rpc_documentation: - #Required. + - api_shortname: apigeeconnect + name_pretty: Apigee Connect + product_documentation: "https://cloud.google.com/apigee/docs/hybrid/v1.3/apigee-connect/" + api_description: "allows the Apigee hybrid management plane to connect securely to the MART service in the runtime plane without requiring you to expose the MART endpoint on the internet." + release_level: "stable" + library_name: "apigee-connect" + GAPICs: + - proto_path: google/cloud/apigeeconnect/v1 + + - api_shortname: cloudasset + name_pretty: Cloud Asset Inventory + product_documentation: "https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview" + api_description: "provides inventory services based on a time series database. This database keeps a five week history of Google Cloud asset metadata. The Cloud Asset Inventory export service allows you to export all asset metadata at a certain timestamp or export event change history during a timeframe." + library_name: "asset" + client_documentation: "https://cloud.google.com/java/docs/reference/google-cloud-asset/latest/overview" + distribution_name: "com.google.cloud:google-cloud-asset" + release_level: "stable" + issue_tracker: "https://issuetracker.google.com/issues/new?component=187210&template=0" + api_reference: "https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview" GAPICs: - #Required. This is a relative path to googleapis/googleapis. We'll parse all the parameters needed by generate_library.sh from BUILD.bazel in this folder. - proto_path: google/cloud/asset/v1 - proto_path: google/cloud/asset/v1p1beta1 - proto_path: google/cloud/asset/v1p2beta1 - proto_path: google/cloud/asset/v1p5beta1 - proto_path: google/cloud/asset/v1p7beta1 - - api_shortname: speech - library_type: GAPIC_AUTO - GAPICs: - - proto_path: google/cloud/speech/v1 - - proto_path: google/cloud/speech/v1p1beta1 - - proto_path: google/cloud/speech/v2 - - api_shortname: apigee-connect - library_type: GAPIC_AUTO - GAPICs: - - proto_path: google/cloud/apigeeconnect/v1 - - api_shortname: dialogflow - library_type: GAPIC_AUTO - GAPICs: - - proto_path: google/cloud/dialogflow/v2beta1 - - proto_path: google/cloud/dialogflow/v2 - - api_shortname: compute - library_type: GAPIC_AUTO - GAPICs: - - proto_path: google/cloud/compute/v1 - - api_shortname: kms - library_type: GAPIC_AUTO - GAPICs: - - proto_path: google/cloud/kms/v1 - - api_shortname: redis - library_type: GAPIC_AUTO - GAPICs: - - proto_path: google/cloud/redis/v1 - - proto_path: google/cloud/redis/v1beta1 - - api_shortname: containeranalysis - library_type: GAPIC_AUTO - GAPICs: - - proto_path: google/devtools/containeranalysis/v1 - - api_shortname: iam - library_type: GAPIC_AUTO - GAPICs: - - proto_path: google/iam/v1 - - proto_path: google/iam/v2 - - api_shortname: iamcredentials - library_type: GAPIC_AUTO - GAPICs: - - proto_path: google/iam/credentials/v1 - diff --git a/library_generation/test/resources/integration/java-bigtable/generation_config.yaml b/library_generation/test/resources/integration/java-bigtable/generation_config.yaml index 4a82a3e2c4..fcad57c819 100644 --- a/library_generation/test/resources/integration/java-bigtable/generation_config.yaml +++ b/library_generation/test/resources/integration/java-bigtable/generation_config.yaml @@ -6,9 +6,9 @@ owlbot_cli_image: sha256:623647ee79ac605858d09e60c1382a716c125fb776f69301b72de1c synthtool_commitish: 6612ab8f3afcd5e292aecd647f0fa68812c9f5b5 destination_path: java-bigtable libraries: - - api_shortname: bigtable - name_pretty: Cloud Bigtable - library_type: GAPIC_COMBO - GAPICs: - - proto_path: google/bigtable/admin/v2 - - proto_path: google/bigtable/v2 +- api_shortname: bigtable + name_pretty: Cloud Bigtable + library_type: GAPIC_COMBO + GAPICs: + - proto_path: google/bigtable/admin/v2 + - proto_path: google/bigtable/v2 diff --git a/library_generation/test/resources/misc/BUILD_service_config_relative_target.bazel b/library_generation/test/resources/misc/BUILD_service_config_relative_target.bazel new file mode 100644 index 0000000000..ccd59af2fb --- /dev/null +++ b/library_generation/test/resources/misc/BUILD_service_config_relative_target.bazel @@ -0,0 +1,3 @@ +java_gapic_library( + grpc_service_config = ":compute_grpc_service_config.json" +) diff --git a/library_generation/test/resources/misc/BUILD_service_yaml_absolute_target.bazel b/library_generation/test/resources/misc/BUILD_service_yaml_absolute_target.bazel new file mode 100644 index 0000000000..ded899dff7 --- /dev/null +++ b/library_generation/test/resources/misc/BUILD_service_yaml_absolute_target.bazel @@ -0,0 +1,3 @@ +java_gapic_library( + service_yaml = "//google/cloud/videointelligence:videointelligence_v1p3beta1.yaml", +) \ No newline at end of file diff --git a/library_generation/test/resources/misc/testversions.txt b/library_generation/test/resources/misc/versions.txt similarity index 100% rename from library_generation/test/resources/misc/testversions.txt rename to library_generation/test/resources/misc/versions.txt diff --git a/library_generation/test/resources/test_repo_level_postprocess/gapic-libraries-bom/pom-golden.xml b/library_generation/test/resources/test_repo_level_postprocess/gapic-libraries-bom/pom-golden.xml new file mode 100644 index 0000000000..304ee9b892 --- /dev/null +++ b/library_generation/test/resources/test_repo_level_postprocess/gapic-libraries-bom/pom-golden.xml @@ -0,0 +1,45 @@ + + + 4.0.0 + com.google.cloud + gapic-libraries-bom + pom + 1.29.0-SNAPSHOT + Google Cloud Java BOM + + BOM for the libraries in google-cloud-java repository. Users should not + depend on this artifact explicitly because this BOM is an implementation + detail of the Libraries BOM. + + + + google-cloud-pom-parent + com.google.cloud + 1.29.0-SNAPSHOT + ../google-cloud-pom-parent/pom.xml + + + + + + com.google.cloud + google-cloud-dns + 2.33.0-SNAPSHOT + + + com.google.cloud + google-cloud-service-control-bom + 1.35.0-SNAPSHOT + pom + import + + + com.google.cloud + google-cloud-tasks-bom + 2.35.0-SNAPSHOT + pom + import + + + + \ No newline at end of file diff --git a/library_generation/test/resources/test_repo_level_postprocess/java-dns/pom.xml b/library_generation/test/resources/test_repo_level_postprocess/java-dns/pom.xml new file mode 100644 index 0000000000..28bdaad76b --- /dev/null +++ b/library_generation/test/resources/test_repo_level_postprocess/java-dns/pom.xml @@ -0,0 +1,9 @@ + + + 4.0.0 + com.google.cloud + google-cloud-dns + jar + 2.33.0-SNAPSHOT + Google Cloud DNS Parent + diff --git a/library_generation/test/resources/test_repo_level_postprocess/java-service-control/google-cloud-service-control-bom/pom.xml b/library_generation/test/resources/test_repo_level_postprocess/java-service-control/google-cloud-service-control-bom/pom.xml new file mode 100644 index 0000000000..483838475d --- /dev/null +++ b/library_generation/test/resources/test_repo_level_postprocess/java-service-control/google-cloud-service-control-bom/pom.xml @@ -0,0 +1,8 @@ + + + 4.0.0 + com.google.cloud + google-cloud-service-control-bom + 1.35.0-SNAPSHOT + pom + diff --git a/library_generation/test/resources/test_repo_level_postprocess/java-tasks/google-cloud-tasks-bom/pom.xml b/library_generation/test/resources/test_repo_level_postprocess/java-tasks/google-cloud-tasks-bom/pom.xml new file mode 100644 index 0000000000..3138a26ce7 --- /dev/null +++ b/library_generation/test/resources/test_repo_level_postprocess/java-tasks/google-cloud-tasks-bom/pom.xml @@ -0,0 +1,8 @@ + + + 4.0.0 + com.google.cloud + google-cloud-tasks-bom + 2.35.0-SNAPSHOT + pom + diff --git a/library_generation/test/resources/test_repo_level_postprocess/pom-golden.xml b/library_generation/test/resources/test_repo_level_postprocess/pom-golden.xml new file mode 100644 index 0000000000..8347287bc3 --- /dev/null +++ b/library_generation/test/resources/test_repo_level_postprocess/pom-golden.xml @@ -0,0 +1,88 @@ + + + 4.0.0 + google-cloud-java + com.google.cloud + 0.201.0 + pom + + + true + + + + gapic-libraries-bom + google-cloud-jar-parent + google-cloud-pom-parent + java-dns + java-service-control + java-tasks + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 3.1.1 + + true + + + + + + + + release-staging-repository + + + + !gpg.executable + + + + + sonatype-nexus-snapshots + https://google.oss.sonatype.org/content/repositories/snapshots + + + sonatype-nexus-staging + https://google.oss.sonatype.org/service/local/staging/deploy/maven2/ + + + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.13 + true + + sonatype-nexus-staging + https://google.oss.sonatype.org/ + false + + + + + + + release-non-google-oss-sonatype + + + + org.sonatype.plugins + nexus-staging-maven-plugin + + ossrh + https://oss.sonatype.org/ + + + + + + + \ No newline at end of file diff --git a/library_generation/test/resources/test_repo_level_postprocess/versions.txt b/library_generation/test/resources/test_repo_level_postprocess/versions.txt new file mode 100644 index 0000000000..6a537f4a39 --- /dev/null +++ b/library_generation/test/resources/test_repo_level_postprocess/versions.txt @@ -0,0 +1,4 @@ +# Format: +# module:released-version:current-version + +google-cloud-java:1.28.0:1.29.0-SNAPSHOT diff --git a/library_generation/test/unit_tests.py b/library_generation/test/unit_tests.py index 13d2eaacf9..f819bae3e7 100644 --- a/library_generation/test/unit_tests.py +++ b/library_generation/test/unit_tests.py @@ -1,190 +1,371 @@ -""" -Unit tests for utilities.py -""" +#!/usr/bin/env python3 +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import unittest import os import io -import sys import contextlib import subprocess -current = os.path.dirname(os.path.realpath(__file__)) -parent = os.path.dirname(current) -sys.path.append(parent) -import utilities as util -from model.GapicConfig import GapicConfig -from model.GenerationConfig import GenerationConfig -from model.ClientInputs import parse as parse_build_file - -script_dir = os.path.dirname(os.path.realpath(__file__)) -resources_dir = os.path.join(script_dir, 'resources') - -class UtilitiesTest(unittest.TestCase): - - CONFIGURATION_YAML_PATH = os.path.join(current, 'resources', 'integration', - 'google-cloud-java', 'generation_config.yaml') - - def test_create_argument_valid_container_succeeds(self): - container_value = 'google/test/v1' - container = GapicConfig(container_value) - argument_key = 'proto_path' - result = util.create_argument(argument_key, container) - self.assertEqual([ f'--{argument_key}', container_value], result) - - def test_create_argument_empty_container_returns_empty_list(self): - container = dict() - argument_key = 'proto_path' - result = util.create_argument(argument_key, container) - self.assertEqual([], result) - - def test_create_argument_none_container_fails(self): - container = None - argument_key = 'proto_path' - result = util.create_argument(argument_key, container) - self.assertEqual([], result) - - def test_get_configuration_yaml_library_api_shortnames_valid_input_returns_valid_list(self): - result = util.get_configuration_yaml_library_api_shortnames(self.CONFIGURATION_YAML_PATH) - self.assertEqual('asset speech apigee-connect dialogflow compute kms ' - + 'redis containeranalysis iam iamcredentials', result) - - def test_get_configuration_yaml_destination_path_returns_valid_destination_path(self): - result = util.get_configuration_yaml_destination_path(self.CONFIGURATION_YAML_PATH) - self.assertEqual('google-cloud-java', result) - - def test_sh_util_existent_function_succeeds(self): - result = util.sh_util('extract_folder_name path/to/folder_name') - self.assertEqual('folder_name', result) - - def test_sh_util_nonexistent_function_fails(self): - with self.assertRaises(RuntimeError): - result = util.sh_util('nonexistent_function') - - def test_eprint_valid_input_succeeds(self): - test_input='This is some test input' - # create a stdio capture object - stderr_capture = io.StringIO() - # run eprint() with the capture object - with contextlib.redirect_stderr(stderr_capture): - util.eprint(test_input) - result = stderr_capture.getvalue() - # print() appends a `\n` each time it's called - self.assertEqual(test_input + '\n', result) - - def test_delete_if_exists_preexisting_temp_files_succeeds(self): - # create temporary directory - # also remove last character (\n) - temp_dir = subprocess.check_output(['mktemp', '-d']).decode()[:-1] - - # add a file and a folder to the temp dir - file = os.path.join(temp_dir, 'temp_file') - with open(file, 'a'): - os.utime(file, None) - folder = os.path.join(temp_dir, 'temp_child_dir') - os.mkdir(folder) - self.assertEqual(2, len(os.listdir(temp_dir))) - - # remove file and folder - util.delete_if_exists(file) - util.delete_if_exists(folder) - self.assertEqual(0, len(os.listdir(temp_dir))) - - def test_client_inputs_parse_grpc_only_suceeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, '', 'BUILD_grpc.bazel') - self.assertEqual('grpc', parsed.transport) - - def test_client_inputs_parse_grpc_only_suceeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, '', 'BUILD_grpc.bazel') - self.assertEqual('grpc', parsed.transport) - - def test_client_inputs_parse_grpc_rest_suceeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, '', 'BUILD_grpc_rest.bazel') - self.assertEqual('grpc+rest', parsed.transport) - - def test_client_inputs_parse_rest_suceeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, '', 'BUILD_rest.bazel') - self.assertEqual('rest', parsed.transport) - - def test_client_inputs_parse_empty_include_samples_suceeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, '', 'BUILD_include_samples_empty.bazel') - self.assertEqual('false', parsed.include_samples) - - def test_client_inputs_parse_include_samples_false_suceeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, '', 'BUILD_include_samples_false.bazel') - self.assertEqual('false', parsed.include_samples) - - def test_client_inputs_parse_include_samples_true_suceeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, '', 'BUILD_include_samples_true.bazel') - self.assertEqual('true', parsed.include_samples) - - def test_client_inputs_parse_empty_rest_numeric_enums_suceeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, '', 'BUILD_rest_numeric_enums_empty.bazel') - self.assertEqual('false', parsed.rest_numeric_enum) - - def test_client_inputs_parse_include_samples_false_suceeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, '', 'BUILD_rest_numeric_enums_false.bazel') - self.assertEqual('false', parsed.rest_numeric_enum) - - def test_client_inputs_parse_include_samples_true_suceeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, '', 'BUILD_rest_numeric_enums_true.bazel') - self.assertEqual('true', parsed.rest_numeric_enum) - - def test_client_inputs_parse_no_gapic_library_returns_proto_only_true(self): - build_file = os.path.join(resources_dir, 'misc') - # include_samples_empty only has a gradle assembly rule - parsed = parse_build_file(build_file, '', 'BUILD_include_samples_empty.bazel') - self.assertEqual('true', parsed.proto_only) - - def test_client_inputs_parse_with_gapic_library_returns_proto_only_false(self): - build_file = os.path.join(resources_dir, 'misc') - # rest.bazel has a java_gapic_library rule - parsed = parse_build_file(build_file, '', 'BUILD_rest.bazel') - self.assertEqual('false', parsed.proto_only) - - def test_client_inputs_parse_gapic_yaml_succeeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_gapic_yaml.bazel') - self.assertEqual('test/versioned/path/test_gapic_yaml.yaml', parsed.gapic_yaml) - - def test_client_inputs_parse_no_gapic_yaml_returns_empty_string(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_no_gapic_yaml.bazel') - self.assertEqual('', parsed.gapic_yaml) - - def test_client_inputs_parse_service_config_succeeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_service_config.bazel') - self.assertEqual('test/versioned/path/test_service_config.json', parsed.service_config) - - def test_client_inputs_parse_no_service_config_returns_empty_string(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_no_service_config.bazel') - self.assertEqual('', parsed.service_config) - - def test_client_inputs_parse_service_yaml_succeeds(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_service_yaml.bazel') - self.assertEqual('test/versioned/path/test_service_yaml.yaml', parsed.service_yaml) - - def test_client_inputs_parse_no_service_yaml_returns_empty_string(self): - build_file = os.path.join(resources_dir, 'misc') - parsed = parse_build_file(build_file, 'test/versioned/path', 'BUILD_no_service_yaml.bazel') - self.assertEqual('', parsed.service_yaml) +from pathlib import Path +from difflib import unified_diff +from typing import List +from library_generation import utilities as util +from library_generation.model.gapic_config import GapicConfig +from library_generation.model.generation_config import GenerationConfig +from library_generation.model.gapic_inputs import parse as parse_build_file +from library_generation.model.library_config import LibraryConfig +script_dir = os.path.dirname(os.path.realpath(__file__)) +resources_dir = os.path.join(script_dir, "resources") +build_file = Path(os.path.join(resources_dir, "misc")).resolve() +library_1 = LibraryConfig( + api_shortname="baremetalsolution", + name_pretty="Bare Metal Solution", + product_documentation="https://cloud.google.com/bare-metal/docs", + api_description="Bring your Oracle workloads to Google Cloud with Bare Metal Solution and jumpstart your cloud journey with minimal risk.", + gapic_configs=list(), + library_name="bare-metal-solution", + rest_documentation="https://cloud.google.com/bare-metal/docs/reference/rest", + rpc_documentation="https://cloud.google.com/bare-metal/docs/reference/rpc", +) +library_2 = LibraryConfig( + api_shortname="secretmanager", + name_pretty="Secret Management", + product_documentation="https://cloud.google.com/solutions/secrets-management/", + api_description="allows you to encrypt, store, manage, and audit infrastructure and application-level secrets.", + gapic_configs=list(), +) +class UtilitiesTest(unittest.TestCase): + """ + Unit tests for utilities.py + """ + + CONFIGURATION_YAML_PATH = os.path.join( + script_dir, + "resources", + "integration", + "google-cloud-java", + "generation_config.yaml", + ) + + def test_create_argument_valid_container_succeeds(self): + container_value = "google/test/v1" + container = GapicConfig(container_value) + argument_key = "proto_path" + result = util.create_argument(argument_key, container) + self.assertEqual([f"--{argument_key}", container_value], result) + + def test_create_argument_empty_container_returns_empty_list(self): + container = dict() + argument_key = "proto_path" + result = util.create_argument(argument_key, container) + self.assertEqual([], result) + + def test_create_argument_none_container_fails(self): + container = None + argument_key = "proto_path" + result = util.create_argument(argument_key, container) + self.assertEqual([], result) + + def test_sh_util_existent_function_succeeds(self): + result = util.sh_util("extract_folder_name path/to/folder_name") + self.assertEqual("folder_name", result) + + def test_sh_util_nonexistent_function_fails(self): + with self.assertRaises(RuntimeError): + result = util.sh_util("nonexistent_function") + + def test_eprint_valid_input_succeeds(self): + test_input = "This is some test input" + # create a stdio capture object + stderr_capture = io.StringIO() + # run eprint() with the capture object + with contextlib.redirect_stderr(stderr_capture): + util.eprint(test_input) + result = stderr_capture.getvalue() + # print() appends a `\n` each time it's called + self.assertEqual(test_input + "\n", result) + + def test_gapic_inputs_parse_grpc_only_succeeds(self): + parsed = parse_build_file(build_file, "", "BUILD_grpc.bazel") + self.assertEqual("grpc", parsed.transport) + + def test_gapic_inputs_parse_grpc_rest_succeeds(self): + parsed = parse_build_file(build_file, "", "BUILD_grpc_rest.bazel") + self.assertEqual("grpc+rest", parsed.transport) + + def test_gapic_inputs_parse_rest_succeeds(self): + parsed = parse_build_file(build_file, "", "BUILD_rest.bazel") + self.assertEqual("rest", parsed.transport) + + def test_gapic_inputs_parse_empty_include_samples_succeeds(self): + parsed = parse_build_file(build_file, "", "BUILD_include_samples_empty.bazel") + self.assertEqual("false", parsed.include_samples) + + def test_gapic_inputs_parse_include_samples_false_succeeds(self): + parsed = parse_build_file(build_file, "", "BUILD_include_samples_false.bazel") + self.assertEqual("false", parsed.include_samples) + + def test_gapic_inputs_parse_include_samples_true_succeeds(self): + parsed = parse_build_file(build_file, "", "BUILD_include_samples_true.bazel") + self.assertEqual("true", parsed.include_samples) + + def test_gapic_inputs_parse_empty_rest_numeric_enums_succeeds(self): + parsed = parse_build_file( + build_file, "", "BUILD_rest_numeric_enums_empty.bazel" + ) + self.assertEqual("false", parsed.rest_numeric_enum) + + def test_gapic_inputs_parse_rest_numeric_enums_false_succeeds(self): + parsed = parse_build_file( + build_file, "", "BUILD_rest_numeric_enums_false.bazel" + ) + self.assertEqual("false", parsed.rest_numeric_enum) + + def test_gapic_inputs_parse_rest_numeric_enums_true_succeeds(self): + parsed = parse_build_file(build_file, "", "BUILD_rest_numeric_enums_true.bazel") + self.assertEqual("true", parsed.rest_numeric_enum) + + def test_gapic_inputs_parse_no_gapic_library_returns_proto_only_true(self): + # include_samples_empty only has a gradle assembly rule + parsed = parse_build_file(build_file, "", "BUILD_include_samples_empty.bazel") + self.assertEqual("true", parsed.proto_only) + + def test_gapic_inputs_parse_with_gapic_library_returns_proto_only_false(self): + # rest.bazel has a java_gapic_library rule + parsed = parse_build_file(build_file, "", "BUILD_rest.bazel") + self.assertEqual("false", parsed.proto_only) + + def test_gapic_inputs_parse_gapic_yaml_succeeds(self): + parsed = parse_build_file( + build_file, "test/versioned/path", "BUILD_gapic_yaml.bazel" + ) + self.assertEqual("test/versioned/path/test_gapic_yaml.yaml", parsed.gapic_yaml) + + def test_gapic_inputs_parse_no_gapic_yaml_returns_empty_string(self): + parsed = parse_build_file( + build_file, "test/versioned/path", "BUILD_no_gapic_yaml.bazel" + ) + self.assertEqual("", parsed.gapic_yaml) + + def test_gapic_inputs_parse_service_config_succeeds(self): + parsed = parse_build_file( + build_file, "test/versioned/path", "BUILD_service_config.bazel" + ) + self.assertEqual( + "test/versioned/path/test_service_config.json", parsed.service_config + ) + + def test_gapic_inputs_parse_service_yaml_relative_target(self): + parsed = parse_build_file( + build_file, + "google/cloud/compute/v1", + "BUILD_service_config_relative_target.bazel", + ) + self.assertEqual( + "google/cloud/compute/v1/compute_grpc_service_config.json", + parsed.service_config, + ) + + def test_gapic_inputs_parse_no_service_config_returns_empty_string(self): + parsed = parse_build_file( + build_file, "test/versioned/path", "BUILD_no_service_config.bazel" + ) + self.assertEqual("", parsed.service_config) + + def test_gapic_inputs_parse_service_yaml_succeeds(self): + parsed = parse_build_file( + build_file, "test/versioned/path", "BUILD_service_yaml.bazel" + ) + self.assertEqual( + "test/versioned/path/test_service_yaml.yaml", parsed.service_yaml + ) + + def test_gapic_inputs_parse_service_yaml_absolute_target(self): + parsed = parse_build_file( + build_file, "", "BUILD_service_yaml_absolute_target.bazel" + ) + self.assertEqual( + "google/cloud/videointelligence/videointelligence_v1p3beta1.yaml", + parsed.service_yaml, + ) + + def test_gapic_inputs_parse_no_service_yaml_returns_empty_string(self): + parsed = parse_build_file( + build_file, "test/versioned/path", "BUILD_no_service_yaml.bazel" + ) + self.assertEqual("", parsed.service_yaml) + + def test_remove_version_from_returns_non_versioned_path(self): + proto_path = "google/cloud/aiplatform/v1" + self.assertEqual( + "google/cloud/aiplatform", util.remove_version_from(proto_path) + ) + + def test_remove_version_from_returns_self(self): + proto_path = "google/cloud/aiplatform" + self.assertEqual( + "google/cloud/aiplatform", util.remove_version_from(proto_path) + ) + + def test_get_version_from_returns_current(self): + versions_file = f"{resources_dir}/misc/versions.txt" + artifact = "gax-grpc" + self.assertEqual( + "2.33.1-SNAPSHOT", util.get_version_from(versions_file, artifact) + ) + + def test_get_version_from_returns_released(self): + versions_file = f"{resources_dir}/misc/versions.txt" + artifact = "gax-grpc" + self.assertEqual("2.34.0", util.get_version_from(versions_file, artifact, True)) + + def test_get_library_returns_library_name(self): + self.assertEqual("bare-metal-solution", util.get_library_name(library_1)) + + def test_get_library_returns_api_shortname(self): + self.assertEqual("secretmanager", util.get_library_name(library_2)) + + def test_generate_prerequisite_files_success(self): + library_path = f"{resources_dir}/goldens" + files = [ + f"{library_path}/.repo-metadata.json", + f"{library_path}/.OwlBot.yaml", + f"{library_path}/owlbot.py", + ] + self.__cleanup(files) + proto_path = "google/cloud/baremetalsolution/v2" + transport = "grpc" + util.generate_prerequisite_files( + library=library_1, + proto_path=proto_path, + transport=transport, + library_path=library_path, + ) + + self.__compare_files( + f"{library_path}/.repo-metadata.json", + f"{library_path}/.repo-metadata-golden.json", + ) + self.__compare_files( + f"{library_path}/.OwlBot.yaml", f"{library_path}/.OwlBot-golden.yaml" + ) + self.__compare_files( + f"{library_path}/owlbot.py", f"{library_path}/owlbot-golden.py" + ) + + def test_prepare_repo_monorepo_success(self): + gen_config = self.__get_a_gen_config(2) + repo_config = util.prepare_repo( + gen_config=gen_config, + library_config=gen_config.libraries, + repo_path=f"{resources_dir}/misc", + ) + self.assertEqual("output", Path(repo_config.output_folder).name) + library_path = sorted([Path(key).name for key in repo_config.libraries]) + self.assertEqual( + ["java-bare-metal-solution", "java-secretmanager"], library_path + ) + + def test_prepare_repo_monorepo_failed(self): + gen_config = self.__get_a_gen_config(2) + self.assertRaises( + FileNotFoundError, + util.prepare_repo, + gen_config, + gen_config.libraries, + f"{resources_dir}/non-exist", + ) + + def test_prepare_repo_split_repo_success(self): + gen_config = self.__get_a_gen_config(1) + repo_config = util.prepare_repo( + gen_config=gen_config, + library_config=gen_config.libraries, + repo_path=f"{resources_dir}/misc", + ) + self.assertEqual("output", Path(repo_config.output_folder).name) + library_path = sorted([Path(key).name for key in repo_config.libraries]) + self.assertEqual(["misc"], library_path) + + def test_repo_level_post_process_success(self): + repository_path = f"{resources_dir}/test_repo_level_postprocess" + versions_file = f"{repository_path}/versions.txt" + files = [ + f"{repository_path}/pom.xml", + f"{repository_path}/gapic-libraries-bom/pom.xml", + ] + self.__cleanup(files) + util.repo_level_post_process( + repository_path=repository_path, versions_file=versions_file + ) + self.__compare_files( + expect=f"{repository_path}/pom-golden.xml", + actual=f"{repository_path}/pom.xml", + ) + self.__compare_files( + expect=f"{repository_path}/gapic-libraries-bom/pom-golden.xml", + actual=f"{repository_path}/gapic-libraries-bom/pom.xml", + ) + + def __compare_files(self, expect: str, actual: str): + with open(expect, "r") as f: + expected_lines = f.readlines() + with open(actual, "r") as f: + actual_lines = f.readlines() + + diff = list(unified_diff(expected_lines, actual_lines)) + self.assertEqual( + first=[], second=diff, msg="Unexpected file contents:\n" + "".join(diff) + ) + + @staticmethod + def __get_a_gen_config(num: int): + """ + Returns an object of GenerationConfig with one or two of + LibraryConfig objects. Other attributes are set to empty str. + + :param num: the number of LibraryConfig objects associated with + the GenerationConfig. Only support one or two. + :return: an object of GenerationConfig + """ + if num > 1: + libraries = [library_1, library_2] + else: + libraries = [library_1] + + return GenerationConfig( + gapic_generator_version="", + googleapis_commitish="", + owlbot_cli_image="", + synthtool_commitish="", + libraries=libraries, + ) + + @staticmethod + def __cleanup(files: List[str]): + for file in files: + path = Path(file).resolve() + if path.is_file(): + path.unlink() + elif path.is_dir(): + path.rmdir() if __name__ == "__main__": - unittest.main() + unittest.main() diff --git a/library_generation/utilities.py b/library_generation/utilities.py index 0772e8b260..1eff1ae947 100755 --- a/library_generation/utilities.py +++ b/library_generation/utilities.py @@ -1,125 +1,486 @@ - +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json import sys import subprocess import os import shutil -from collections.abc import Sequence -from model.GenerationConfig import GenerationConfig +import re +from pathlib import Path +from lxml import etree +from library_generation.model.bom_config import BomConfig +from library_generation.model.generation_config import GenerationConfig +from library_generation.model.library_config import LibraryConfig from typing import List +from jinja2 import Environment, FileSystemLoader + +from library_generation.model.repo_config import RepoConfig script_dir = os.path.dirname(os.path.realpath(__file__)) +jinja_env = Environment(loader=FileSystemLoader(f"{script_dir}/templates")) +project_tag = "{http://maven.apache.org/POM/4.0.0}" +group_id_tag = "groupId" +artifact_tag = "artifactId" +version_tag = "version" + + +def __render(template_name: str, output_name: str, **kwargs): + template = jinja_env.get_template(template_name) + t = template.stream(kwargs) + directory = os.path.dirname(output_name) + if not os.path.isdir(directory): + os.makedirs(directory) + t.dump(str(output_name)) + + +def __search_for_java_modules( + repository_path: str, +) -> List[str]: + repo = Path(repository_path).resolve() + modules = [] + for sub_dir in repo.iterdir(): + if sub_dir.is_dir() and sub_dir.name.startswith("java-"): + modules.append(sub_dir.name) + return sorted(modules) + + +def __search_for_bom_artifact( + repository_path: str, +) -> List[BomConfig]: + repo = Path(repository_path).resolve() + module_exclusions = ["gapic-libraries-bom"] + group_id_inclusions = [ + "com.google.cloud", + "com.google.analytics", + "com.google.area120", + ] + bom_configs = [] + for module in repo.iterdir(): + if module.is_file() or module.name in module_exclusions: + continue + for sub_module in module.iterdir(): + if sub_module.is_dir() and sub_module.name.endswith("-bom"): + root = etree.parse(f"{sub_module}/pom.xml").getroot() + group_id = root.find(f"{project_tag}{group_id_tag}").text + if group_id not in group_id_inclusions: + continue + artifact_id = root.find(f"{project_tag}{artifact_tag}").text + version = root.find(f"{project_tag}{version_tag}").text + index = artifact_id.rfind("-") + version_annotation = artifact_id[:index] + bom_configs.append( + BomConfig( + group_id=group_id, + artifact_id=artifact_id, + version=version, + version_annotation=version_annotation, + ) + ) + # handle edge case: java-grafeas + bom_configs += __handle_special_bom( + repository_path=repository_path, + module="java-grafeas", + group_id="io.grafeas", + artifact_id="grafeas", + ) + # handle edge case: java-dns + bom_configs += __handle_special_bom( + repository_path=repository_path, + module="java-dns", + group_id="com.google.cloud", + artifact_id="google-cloud-dns", + ) + # handle edge case: java-notification + bom_configs += __handle_special_bom( + repository_path=repository_path, + module="java-notification", + group_id="com.google.cloud", + artifact_id="google-cloud-notification", + ) + + return sorted(bom_configs) + + +def __handle_special_bom( + repository_path: str, + module: str, + group_id: str, + artifact_id: str, +) -> List[BomConfig]: + pom = f"{repository_path}/{module}/pom.xml" + if not Path(pom).exists(): + return [] + root = etree.parse(pom).getroot() + version = root.find(f"{project_tag}{version_tag}").text + return [ + BomConfig( + group_id=group_id, + artifact_id=artifact_id, + version=version, + version_annotation=artifact_id, + is_import=False, + ) + ] -""" -Generates a list of two elements [argument, value], or returns -an empty array if arg_val is None -""" def create_argument(arg_key: str, arg_container: object) -> List[str]: - arg_val = getattr(arg_container, arg_key, None) - if arg_val is not None: - return [f'--{arg_key}', f'{arg_val}'] - return [] - -""" -For a given configuration yaml path, it returns a space-separated list of -the api_shortnames contained in such configuration_yaml -""" -def get_configuration_yaml_library_api_shortnames(generation_config_yaml: str) -> List[str]: - config = GenerationConfig.from_yaml(generation_config_yaml) - result = '' - for library in config.libraries: - result += f'{library.api_shortname} ' - return result[:-1] - -""" -For a given configuration yaml path, it returns the destination_path -entry at the root of the yaml -""" -def get_configuration_yaml_destination_path(generation_config_yaml: str) -> str: - config = GenerationConfig.from_yaml(generation_config_yaml) - return config.destination_path or '' - -""" -Runs a process with the given "arguments" list and prints its output. If the process -fails, then the whole program exits -""" -def run_process_and_print_output(arguments: List[str], job_name: str = 'Job'): - # check_output() raises an exception if it exited with a nonzero code - try: - output = subprocess.check_output(arguments, stderr=subprocess.STDOUT) - print(output.decode(), end='', flush=True) - print(f'{job_name} finished successfully') - except subprocess.CalledProcessError as ex: - print(ex.output.decode(), end='', flush=True) - print(f'{job_name} failed') - sys.exit(1) - - -""" -Calls a function defined in library_generation/utilities.sh -""" + """ + Generates a list of two elements [argument, value], or returns + an empty array if arg_val is None + """ + arg_val = getattr(arg_container, arg_key, None) + if arg_val is not None: + return [f"--{arg_key}", f"{arg_val}"] + return [] + + +def get_library_name( + library: LibraryConfig, +) -> str: + """ + Return the library name of a given LibraryConfig object + :param library: an object of LibraryConfig + :return: the library name + """ + return library.library_name if library.library_name else library.api_shortname + + +def run_process_and_print_output(arguments: List[str], job_name: str = "Job"): + """ + Runs a process with the given "arguments" list and prints its output. + If the process fails, then the whole program exits + """ + # check_output() raises an exception if it exited with a nonzero code + try: + output = subprocess.check_output(arguments, stderr=subprocess.STDOUT) + print(output.decode(), end="", flush=True) + print(f"{job_name} finished successfully") + except subprocess.CalledProcessError as ex: + print(ex.output.decode(), end="", flush=True) + print(f"{job_name} failed") + sys.exit(1) + + def sh_util(statement: str, **kwargs) -> str: - if 'stdout' not in kwargs: - kwargs['stdout'] = subprocess.PIPE - if 'stderr' not in kwargs: - kwargs['stderr'] = subprocess.PIPE - output = '' - with subprocess.Popen( - ['bash', '-exc', f'source {script_dir}/utilities.sh && {statement}'], - **kwargs, - ) as proc: - print('command stderr:') - for line in proc.stderr: - print(line.decode(), end='', flush=True) - print('command stdout:') - for line in proc.stdout: - print(line.decode(), end='', flush=True) - output += line.decode() - proc.wait() - if proc.returncode != 0: - raise RuntimeError(f'function {statement} failed with exit code {proc.returncode}') - # captured stdout may contain a newline at the end, we remove it - if len(output) > 0 and output[-1] == '\n': - output = output[:-1] - return output - -""" -prints to stderr -""" + """ + Calls a function defined in library_generation/utilities.sh + """ + if "stdout" not in kwargs: + kwargs["stdout"] = subprocess.PIPE + if "stderr" not in kwargs: + kwargs["stderr"] = subprocess.PIPE + output = "" + with subprocess.Popen( + ["bash", "-exc", f"source {script_dir}/utilities.sh && {statement}"], + **kwargs, + ) as proc: + print("command stderr:") + for line in proc.stderr: + print(line.decode(), end="", flush=True) + print("command stdout:") + for line in proc.stdout: + print(line.decode(), end="", flush=True) + output += line.decode() + proc.wait() + if proc.returncode != 0: + raise RuntimeError( + f"function {statement} failed with exit code {proc.returncode}" + ) + # captured stdout may contain a newline at the end, we remove it + if len(output) > 0 and output[-1] == "\n": + output = output[:-1] + return output + + def eprint(*args, **kwargs): - print(*args, file=sys.stderr, **kwargs) + """ + prints to stderr + """ + print(*args, file=sys.stderr, **kwargs) + + +def remove_version_from(proto_path: str) -> str: + """ + Remove the version of a proto_path + :param proto_path: versioned proto_path + :return: the proto_path without version + """ + version_pattern = "^v[1-9]" + index = proto_path.rfind("/") + version = proto_path[index + 1 :] + if re.match(version_pattern, version): + return proto_path[:index] + return proto_path + + +def check_monorepo(config: GenerationConfig) -> bool: + """ + Check whether to generate a monorepo according to the + generation config. + :param config: the generation configuration + :return: True if it's to generate a monorepo + """ + return len(config.libraries) > 1 + + +def prepare_repo( + gen_config: GenerationConfig, + library_config: List[LibraryConfig], + repo_path: str, + language: str = "java", +) -> RepoConfig: + """ + Gather information of the generated repository. + + :param gen_config: a GenerationConfig object representing a parsed + configuration yaml + :param library_config: a LibraryConfig object contained inside config, + passed here for convenience and to prevent all libraries to be processed + :param repo_path: the path to which the generated repository goes + :param language: programming language of the library + :return: a RepoConfig object contained repository information + :raise FileNotFoundError if there's no versions.txt in repo_path + """ + output_folder = sh_util("get_output_folder") + print(f"output_folder: {output_folder}") + os.makedirs(output_folder, exist_ok=True) + is_monorepo = check_monorepo(gen_config) + libraries = {} + for library in library_config: + library_name = ( + f"{language}-{library.library_name}" + if library.library_name + else f"{language}-{library.api_shortname}" + ) + library_path = f"{repo_path}/{library_name}" if is_monorepo else f"{repo_path}" + # use absolute path because docker requires absolute path + # in volume name. + absolute_library_path = str(Path(library_path).resolve()) + libraries[absolute_library_path] = library + # remove existing .repo-metadata.json + json_name = ".repo-metadata.json" + if os.path.exists(f"{absolute_library_path}/{json_name}"): + os.remove(f"{absolute_library_path}/{json_name}") + versions_file = f"{repo_path}/versions.txt" + if not Path(versions_file).exists(): + raise FileNotFoundError(f"{versions_file} is not found.") + + return RepoConfig( + output_folder=output_folder, + libraries=libraries, + versions_file=str(Path(versions_file).resolve()), + ) + + +def pull_api_definition( + config: GenerationConfig, library: LibraryConfig, output_folder: str +) -> None: + """ + Pull APIs definition from googleapis/googleapis repository. + To avoid duplicated pulling, only perform pulling if the library uses a + different commitish than in generation config. + :param config: a GenerationConfig object representing a parsed configuration + yaml + :param library: a LibraryConfig object contained inside config, passed here + for convenience and to prevent all libraries to be processed + :param output_folder: the folder to which APIs definition (proto files) goes + :return: None + """ + googleapis_commitish = config.googleapis_commitish + if library.googleapis_commitish: + googleapis_commitish = library.googleapis_commitish + print(f"using library-specific googleapis commitish: {googleapis_commitish}") + else: + print(f"using common googleapis_commitish: {config.googleapis_commitish}") + + if googleapis_commitish != config.googleapis_commitish: + print("removing existing APIs definition") + shutil.rmtree(f"{output_folder}/google", ignore_errors=True) + shutil.rmtree(f"{output_folder}/grafeas", ignore_errors=True) + + if not ( + os.path.exists(f"{output_folder}/google") + and os.path.exists(f"{output_folder}/grafeas") + ): + print("downloading googleapis") + sh_util( + f"download_googleapis_files_and_folders {output_folder} {googleapis_commitish}" + ) + + +def generate_prerequisite_files( + library: LibraryConfig, + proto_path: str, + transport: str, + library_path: str, + language: str = "java", + is_monorepo: bool = True, +) -> None: + """ + Generate prerequisite files for a library. + + Note that the version, if any, in the proto_path will be removed. + :param library: the library configuration + :param proto_path: the proto path + :param transport: transport supported by the library + :param library_path: the path to which the generated file goes + :param language: programming language of the library + :param is_monorepo: whether the library is in a monorepo + :return: None + """ + cloud_prefix = "cloud-" if library.cloud_api else "" + library_name = get_library_name(library) + distribution_name = ( + library.distribution_name + if library.distribution_name + else f"{library.group_id}:google-{cloud_prefix}{library_name}" + ) + distribution_name_short = re.split(r"[:/]", distribution_name)[-1] + repo = ( + "googleapis/google-cloud-java" if is_monorepo else f"{language}-{library_name}" + ) + api_id = ( + library.api_id if library.api_id else f"{library.api_shortname}.googleapis.com" + ) + client_documentation = ( + library.client_documentation + if library.client_documentation + else f"https://cloud.google.com/{language}/docs/reference/{distribution_name_short}/latest/overview" + ) + + # The mapping is needed because transport in .repo-metadata.json + # is one of grpc, http and both, + if transport == "grpc": + converted_transport = "grpc" + elif transport == "rest": + converted_transport = "http" + else: + converted_transport = "both" + repo_metadata = { + "api_shortname": library.api_shortname, + "name_pretty": library.name_pretty, + "product_documentation": library.product_documentation, + "api_description": library.api_description, + "client_documentation": client_documentation, + "release_level": library.release_level, + "transport": converted_transport, + "language": language, + "repo": repo, + "repo_short": f"{language}-{library_name}", + "distribution_name": distribution_name, + "api_id": api_id, + "library_type": library.library_type, + "requires_billing": library.requires_billing, + } -"""Deletes a file or folder if it exists. + if library.api_reference: + repo_metadata["api_reference"] = library.api_reference + if library.issue_tracker: + repo_metadata["issue_tracker"] = library.issue_tracker + if library.rest_documentation: + repo_metadata["rest_documentation"] = library.rest_documentation + if library.rpc_documentation: + repo_metadata["rpc_documentation"] = library.rpc_documentation - Args: - path: The path to the file or folder. -""" -def delete_if_exists(path: str): - if os.path.isfile(path): # Check if it's a file - os.remove(path) - print(f"File deleted: {path}") - elif os.path.isdir(path): # Check if it's a directory - shutil.rmtree(path) - print(f"Folder deleted: {path}") - else: - print(f"Path does not exist: {path}") + # generate .repo-meta.json + json_file = ".repo-metadata.json" + # .repo-metadata.json is removed before generating the first version of + # a library. This check ensures no duplicated generation. + if not os.path.exists(f"{library_path}/{json_file}"): + with open(f"{library_path}/{json_file}", "w") as fp: + json.dump(repo_metadata, fp, indent=2) -def main(argv: Sequence[str]) -> None: - if len(argv) < 1: - raise ValueError('Usage: python generate_composed_library_args.py function_name arg1...argN') + # generate .OwlBot.yaml + yaml_file = ".OwlBot.yaml" + if not os.path.exists(f"{library_path}/{yaml_file}"): + __render( + template_name="owlbot.yaml.monorepo.j2", + output_name=f"{library_path}/{yaml_file}", + artifact_name=distribution_name_short, + proto_path=remove_version_from(proto_path), + module_name=repo_metadata["repo_short"], + api_shortname=library.api_shortname, + ) - function_name = argv[1] - arguments = argv[2:] - try: - function = getattr(sys.modules[__name__], function_name) - print(function(*arguments)) - except AttributeError: - print(f'function name "{function_name}" not found in utilities.py') - sys.exit(1) + # generate owlbot.py + py_file = "owlbot.py" + if not os.path.exists(f"{library_path}/{py_file}"): + template_excludes = [ + ".github/*", + ".kokoro/*", + "samples/*", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.md", + "LICENSE", + "SECURITY.md", + "java.header", + "license-checks.xml", + "renovate.json", + ".gitignore", + ] + __render( + template_name="owlbot.py.j2", + output_name=f"{library_path}/{py_file}", + should_include_templates=True, + template_excludes=template_excludes, + ) +def repo_level_post_process( + repository_path: str, + versions_file: str, +) -> None: + """ + Perform repository level post-processing + :param repository_path: the path of the repository + :param versions_file: the versions_txt contains version of modules + :return: None + """ + print("Regenerating root pom.xml") + modules = __search_for_java_modules(repository_path) + __render( + template_name="root-pom.xml.j2", + output_name=f"{repository_path}/pom.xml", + modules=modules, + ) + print("Regenerating gapic-libraries-bom") + bom_configs = __search_for_bom_artifact(repository_path) + monorepo_version = get_version_from( + versions_file=versions_file, + artifact_id="google-cloud-java", + ) + __render( + template_name="gapic-libraries-bom.xml.j2", + output_name=f"{repository_path}/gapic-libraries-bom/pom.xml", + monorepo_version=monorepo_version, + bom_configs=bom_configs, + ) -if __name__ == "__main__": - main(sys.argv) +def get_version_from( + versions_file: str, artifact_id: str, is_released: bool = False +) -> str: + """ + Get version of a given artifact from versions.txt + :param versions_file: the path of version.txt + :param artifact_id: the artifact id + :param is_released: whether returns the released or current version + :return: the version of the artifact + """ + index = 1 if is_released else 2 + with open(versions_file, "r") as f: + for line in f.readlines(): + if artifact_id in line: + return line.split(":")[index].strip() diff --git a/library_generation/utilities.sh b/library_generation/utilities.sh index 965ed1fa0a..f0bdaeee01 100755 --- a/library_generation/utilities.sh +++ b/library_generation/utilities.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -set -xeo pipefail +set -eo pipefail utilities_script_dir=$(dirname "$(realpath "${BASH_SOURCE[0]}")") # Utility functions used in `generate_library.sh` and showcase generation.