diff --git a/.github/workflows/verify_library_generation.yaml b/.github/workflows/verify_library_generation.yaml
index 521f586c3d..cdfde24b98 100644
--- a/.github/workflows/verify_library_generation.yaml
+++ b/.github/workflows/verify_library_generation.yaml
@@ -12,7 +12,7 @@ jobs:
integration_tests:
strategy:
matrix:
- java: [ 8 ]
+ java: [ 11 ]
os: [ ubuntu-22.04, macos-12 ]
post_processing: [ 'true', 'false' ]
runs-on: ${{ matrix.os }}
@@ -26,8 +26,23 @@ jobs:
- uses: actions/setup-python@v4
with:
python-version: '3.11'
+ - name: install pyenv
+ shell: bash
+ run: |
+ set -ex
+ curl https://pyenv.run | bash
+ # setup environment
+ export PYENV_ROOT="$HOME/.pyenv"
+ export PATH="$PYENV_ROOT/bin:$PATH"
+ echo "PYENV_ROOT=${PYENV_ROOT}" >> $GITHUB_ENV
+ echo "PATH=${PATH}" >> $GITHUB_ENV
+ # init pyenv
+ eval "$(pyenv init --path)"
+ eval "$(pyenv init -)"
+ set +ex
- name: install docker (ubuntu)
if: matrix.os == 'ubuntu-22.04'
+ shell: bash
run: |
set -x
# install docker
@@ -36,17 +51,11 @@ jobs:
# launch docker
sudo systemctl start docker
- - name: install docker (macos)
- if: matrix.os == 'macos-12'
- run: |
- brew update --preinstall
- brew install docker docker-compose qemu
- brew upgrade qemu
- colima start
- docker run --user $(id -u):$(id -g) --rm hello-world
- name: Run integration tests
+ # we don't run ITs with postprocessing on macos because one of its dependencies "synthtool" is designed to run on linux only
+ if: matrix.os == 'ubuntu-22.04' || matrix.post_processing == 'false'
+ shell: bash
run: |
- set -x
git config --global user.email "github-workflow@github.com"
git config --global user.name "Github Workflow"
library_generation/test/generate_library_integration_test.sh \
diff --git a/library_generation/README.md b/library_generation/README.md
index c2e9826b5b..e3b7e7d56c 100644
--- a/library_generation/README.md
+++ b/library_generation/README.md
@@ -1,4 +1,4 @@
-# Generate GAPIC Client Library without post-processing
+# Generate GAPIC Client Library with and without post-processing
The script, `generate_library.sh`, allows you to generate a GAPIC client library from proto files.
@@ -28,6 +28,12 @@ This repository will be the source of truth for pre-existing
pom.xml files, owlbot.py and .OwlBot.yaml files. See the option belows for
custom postprocessed generations (e.g. custom `versions.txt` file).
+Post-processing makes use of python scripts. The script will automatically use
+`pyenv` to use the specified version in
+`library_generation/configuration/python-version`. Pyenv is then a requirement
+in the environment.
+
+
## Parameters to run `generate_library.sh`
You need to run the script with the following parameters.
@@ -225,3 +231,26 @@ library_generation/generate_library.sh \
--versions_file "path/to/versions.txt" \
--include_samples true
```
+
+# Owlbot Java Postprocessor
+
+We have transferred the
+[implementation](https://github.com/googleapis/synthtool/tree/59fe44fde9866a26e7ee4e4450fd79f67f8cf599/docker/owlbot/java)
+of Java Owlbot Postprocessor into `sdk-platform-java/library_generation`. The
+implementation in synthtool is still valid and used by other services, so we
+have two versions during a transition period.
+
+## Reflecting changes in synthtool/docker/owlbot/java into this repository
+The transfer was not a verbatim copy, it rather had modifications:
+ * `format-source.sh` was replaced by a call to `mvn fmt:format`
+ * `entrypoint.sh` was modified to have input arguments and slightly modified
+ the way the helper scripts are called
+ * Other helper scripts were modified to have input arguments.
+ * `fix-poms.py` modified the way the monorepo is detected
+
+All these modifications imply that whenever we want to reflect a change from the
+original owlbot in synthtool we may be better off modifying the affected source
+files one by one. The mapping is from
+[`synthtool/docker/owlbot/java`](https://github.com/googleapis/synthtool/tree/59fe44fde9866a26e7ee4e4450fd79f67f8cf599/docker/owlbot/java)
+to
+[`sdk-platform-java/library_generation/owlbot`](https://github.com/googleapis/sdk-platform-java/tree/move-java-owlbot/library_generation/owlbot)
diff --git a/library_generation/configuration/python-version b/library_generation/configuration/python-version
new file mode 100644
index 0000000000..1e33456831
--- /dev/null
+++ b/library_generation/configuration/python-version
@@ -0,0 +1 @@
+3.11.2
diff --git a/library_generation/configuration/synthtool-commitish b/library_generation/configuration/synthtool-commitish
new file mode 100644
index 0000000000..5603b9055b
--- /dev/null
+++ b/library_generation/configuration/synthtool-commitish
@@ -0,0 +1 @@
+59fe44fde9866a26e7ee4e4450fd79f67f8cf599
diff --git a/library_generation/generate_library.sh b/library_generation/generate_library.sh
index 7df2b226e9..e3793b020f 100755
--- a/library_generation/generate_library.sh
+++ b/library_generation/generate_library.sh
@@ -133,7 +133,13 @@ if [ -z "${os_architecture}" ]; then
os_architecture=$(detect_os_architecture)
fi
+temp_destination_path="${output_folder}/temp_preprocessed"
mkdir -p "${output_folder}/${destination_path}"
+if [ -d "${temp_destination_path}" ]; then
+ # we don't want the preprocessed sources of a previous run
+ rm -rd "${temp_destination_path}"
+fi
+mkdir -p "${temp_destination_path}"
##################### Section 0 #####################
# prepare tooling
#####################################################
@@ -185,14 +191,14 @@ download_tools "${gapic_generator_version}" "${protobuf_version}" "${grpc_versio
if [[ ! "${transport}" == "rest" ]]; then
# do not need to generate grpc-* if the transport is `rest`.
"${protoc_path}"/protoc "--plugin=protoc-gen-rpc-plugin=protoc-gen-grpc-java-${grpc_version}-${os_architecture}.exe" \
- "--rpc-plugin_out=:${destination_path}/java_grpc.jar" \
+ "--rpc-plugin_out=:${temp_destination_path}/java_grpc.jar" \
${proto_files} # Do not quote because this variable should not be treated as one long string.
# unzip java_grpc.jar to grpc-*/src/main/java
- unzip_src_files "grpc"
+ unzip_src_files "grpc" "${temp_destination_path}"
# remove empty files in grpc-*/src/main/java
- remove_empty_files "grpc"
+ remove_empty_files "grpc" "${temp_destination_path}"
# remove grpc version in *ServiceGrpc.java file so the content is identical with bazel build.
- remove_grpc_version
+ remove_grpc_version "${temp_destination_path}"
fi
###################### Section 2 #####################
## generate gapic-*/, part of proto-*/, samples/
@@ -200,15 +206,15 @@ fi
if [[ "${proto_only}" == "false" ]]; then
"$protoc_path"/protoc --experimental_allow_proto3_optional \
"--plugin=protoc-gen-java_gapic=${script_dir}/gapic-generator-java-wrapper" \
- "--java_gapic_out=metadata:${destination_path}/java_gapic_srcjar_raw.srcjar.zip" \
+ "--java_gapic_out=metadata:${temp_destination_path}/java_gapic_srcjar_raw.srcjar.zip" \
"--java_gapic_opt=$(get_gapic_opts "${transport}" "${rest_numeric_enums}" "${gapic_yaml}" "${service_config}" "${service_yaml}")" \
${proto_files} ${gapic_additional_protos}
- unzip -o -q "${destination_path}/java_gapic_srcjar_raw.srcjar.zip" -d "${destination_path}"
+ unzip -o -q "${temp_destination_path}/java_gapic_srcjar_raw.srcjar.zip" -d "${temp_destination_path}"
# Sync'\''d to the output file name in Writer.java.
- unzip -o -q "${destination_path}/temp-codegen.srcjar" -d "${destination_path}/java_gapic_srcjar"
+ unzip -o -q "${temp_destination_path}/temp-codegen.srcjar" -d "${temp_destination_path}/java_gapic_srcjar"
# Resource name source files.
- proto_dir=${destination_path}/java_gapic_srcjar/proto/src/main/java
+ proto_dir=${temp_destination_path}/java_gapic_srcjar/proto/src/main/java
if [ ! -d "${proto_dir}" ]; then
# Some APIs don't have resource name helpers, like BigQuery v2.
# Create an empty file so we can finish building. Gating the resource name rule definition
@@ -218,14 +224,14 @@ if [[ "${proto_only}" == "false" ]]; then
touch "${proto_dir}"/PlaceholderFile.java
fi
# move java_gapic_srcjar/src/main to gapic-*/src.
- mv_src_files "gapic" "main"
+ mv_src_files "gapic" "main" "${temp_destination_path}"
# remove empty files in gapic-*/src/main/java
- remove_empty_files "gapic"
+ remove_empty_files "gapic" "${temp_destination_path}"
# move java_gapic_srcjar/src/test to gapic-*/src
- mv_src_files "gapic" "test"
+ mv_src_files "gapic" "test" "${temp_destination_path}"
if [ "${include_samples}" == "true" ]; then
# move java_gapic_srcjar/samples/snippets to samples/snippets
- mv_src_files "samples" "main"
+ mv_src_files "samples" "main" "${temp_destination_path}"
fi
fi
##################### Section 3 #####################
@@ -247,16 +253,16 @@ case "${proto_path}" in
proto_files="${proto_files//${removed_proto}/}"
;;
esac
-"$protoc_path"/protoc "--java_out=${destination_path}/java_proto.jar" ${proto_files}
+"$protoc_path"/protoc "--java_out=${temp_destination_path}/java_proto.jar" ${proto_files}
if [[ "${proto_only}" == "false" ]]; then
# move java_gapic_srcjar/proto/src/main/java (generated resource name helper class)
# to proto-*/src/main
- mv_src_files "proto" "main"
+ mv_src_files "proto" "main" "${temp_destination_path}"
fi
# unzip java_proto.jar to proto-*/src/main/java
-unzip_src_files "proto"
+unzip_src_files "proto" "${temp_destination_path}"
# remove empty files in proto-*/src/main/java
-remove_empty_files "proto"
+remove_empty_files "proto" "${temp_destination_path}"
case "${proto_path}" in
"google/cloud/aiplatform/v1beta1"*)
prefix="google/cloud/aiplatform/v1beta1/schema"
@@ -282,14 +288,14 @@ for proto_src in ${proto_files}; do
if [[ "${proto_src}" == "google/cloud/common/operation_metadata.proto" ]]; then
continue
fi
- mkdir -p "${destination_path}/proto-${folder_name}/src/main/proto"
- rsync -R "${proto_src}" "${destination_path}/proto-${folder_name}/src/main/proto"
+ mkdir -p "${temp_destination_path}/proto-${folder_name}/src/main/proto"
+ rsync -R "${proto_src}" "${temp_destination_path}/proto-${folder_name}/src/main/proto"
done
popd # output_folder
##################### Section 4 #####################
# rm tar files
#####################################################
-pushd "${output_folder}/${destination_path}"
+pushd "${temp_destination_path}"
rm -rf java_gapic_srcjar java_gapic_srcjar_raw.srcjar.zip java_grpc.jar java_proto.jar temp-codegen.srcjar
popd # destination path
##################### Section 5 #####################
@@ -298,6 +304,8 @@ popd # destination path
if [ "${enable_postprocessing}" != "true" ];
then
echo "post processing is disabled"
+ cp -r ${temp_destination_path}/* "${output_folder}/${destination_path}"
+ rm -rdf "${temp_destination_path}"
exit 0
fi
if [ -z "${versions_file}" ];then
@@ -311,21 +319,13 @@ fi
mkdir -p "${workspace}"
+# if destination_path is not empty, it will be used as a starting workspace for
+# postprocessing
+if [[ $(find "${output_folder}/${destination_path}" -mindepth 1 -maxdepth 1 -type d,f | wc -l) -gt 0 ]];then
+ workspace="${output_folder}/${destination_path}"
+fi
+
bash -x "${script_dir}/postprocess_library.sh" "${workspace}" \
- "${script_dir}" \
- "${destination_path}" \
- "${proto_path}" \
- "${versions_file}" \
- "${output_folder}"
+ "${temp_destination_path}" \
+ "${versions_file}"
-# for post-procesed libraries, remove pre-processed folders
-pushd "${output_folder}/${destination_path}"
-rm -rdf "proto-${folder_name}"
-rm -rdf "grpc-${folder_name}"
-rm -rdf "gapic-${folder_name}"
-if [ "${include_samples}" == "false" ]; then
- rm -rdf "samples"
-fi
-popd # output_folder
-# move contents of the post-processed library into destination_path
-cp -r ${workspace}/* "${output_folder}/${destination_path}"
diff --git a/library_generation/owlbot/bin/entrypoint.sh b/library_generation/owlbot/bin/entrypoint.sh
new file mode 100755
index 0000000000..f483f98cfd
--- /dev/null
+++ b/library_generation/owlbot/bin/entrypoint.sh
@@ -0,0 +1,113 @@
+#!/bin/bash
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is the entrypoint script for java owlbot. This is not intended to be
+# called directly but rather be called from postproces_library.sh
+# For reference, the positional arguments are
+# 1: scripts_root: location of postprocess_library.sh
+# 2: versions_file: points to a versions.txt containing versions to be applied
+# both to README and pom.xml files
+
+# The scripts assumes the CWD is the folder where postprocessing is going to be
+# applied
+
+set -ex
+scripts_root=$1
+versions_file=$2
+
+# Runs template and etc in current working directory
+function processModule() {
+ # templates as well as retrieving files from owl-bot-staging
+ echo "Generating templates and retrieving files from owl-bot-staging directory..."
+ if [ -f "owlbot.py" ]
+ then
+ # defaults to run owlbot.py
+ python3 owlbot.py
+ fi
+ echo "...done"
+
+ # write or restore pom.xml files
+ echo "Generating missing pom.xml..."
+ python3 "${scripts_root}/owlbot/src/fix-poms.py" "${versions_file}" "true"
+ echo "...done"
+
+ # write or restore clirr-ignored-differences.xml
+ echo "Generating clirr-ignored-differences.xml..."
+ ${scripts_root}/owlbot/bin/write_clirr_ignore.sh "${scripts_root}"
+ echo "...done"
+
+ # fix license headers
+ echo "Fixing missing license headers..."
+ python3 "${scripts_root}/owlbot/src/fix-license-headers.py"
+ echo "...done"
+
+ # TODO: re-enable this once we resolve thrashing
+ # restore license headers years
+ # echo "Restoring copyright years..."
+ # /owlbot/bin/restore_license_headers.sh
+ # echo "...done"
+
+ # ensure formatting on all .java files in the repository
+ echo "Reformatting source..."
+ mvn fmt:format
+ echo "...done"
+}
+
+if [ "$(ls */.OwlBot.yaml|wc -l)" -gt 1 ];then
+ # Monorepo (googleapis/google-cloud-java) has multiple OwlBot.yaml config
+ # files in the modules.
+ echo "Processing monorepo"
+ if [ -d owl-bot-staging ]; then
+ # The content of owl-bot-staging is controlled by Owlbot.yaml files in
+ # each module in the monorepo
+ echo "Extracting contents from owl-bot-staging"
+ for module in owl-bot-staging/* ; do
+ if [ ! -d "$module" ]; then
+ continue
+ fi
+ # This relocation allows us continue to use owlbot.py without modification
+ # after monorepo migration.
+ mv "owl-bot-staging/$module" "$module/owl-bot-staging"
+ pushd "$module"
+ processModule
+ popd
+ done
+ rm -r owl-bot-staging
+ else
+ echo "In monorepo but no owl-bot-staging." \
+ "Formatting changes in the last commit"
+ # Find the files that were touched by the last commit.
+ last_commit=$(git log -1 --format=%H)
+ # [A]dded, [C]reated, [M]odified, and [R]enamed
+ changed_files=$(git show --name-only --no-renames --diff-filter=ACMR \
+ "${last_commit}")
+ changed_modules=$(echo "$changed_files" |grep -E '.java$' |cut -d '/' -f 1 \
+ |sort -u)
+ for module in ${changed_modules}; do
+ if [ ! -f "$module/.OwlBot.yaml" ]; then
+ # Changes irrelevant to Owlbot-generated module (such as .github) do not
+ # need formatting
+ continue
+ fi
+ pushd "$module"
+ processModule
+ popd
+ done
+ fi
+else
+ # Split repository
+ echo "Processing a split repo"
+ processModule
+fi
diff --git a/library_generation/owlbot/bin/write_clirr_ignore.sh b/library_generation/owlbot/bin/write_clirr_ignore.sh
new file mode 100755
index 0000000000..d6925ef354
--- /dev/null
+++ b/library_generation/owlbot/bin/write_clirr_ignore.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+scripts_root=$1
+templates_dir=$(realpath $(dirname "${BASH_SOURCE[0]}")/../templates/clirr)
+
+# restore default clirr-ignored-differences.xml for protos if the file does not exist
+for dir in `ls -d proto-google-*`
+do
+ if [ ! -f "${dir}/clirr-ignored-differences.xml" ]
+ then
+ tmp_dir=$(mktemp -d -t ci-XXXXXXXXXX)
+ pushd ${dir}
+ pushd src/main/java
+ find * -name *OrBuilder.java | xargs dirname | sort -u | jq -Rns ' (inputs | rtrimstr("\n") | split("\n") ) as $data | {proto_paths: $data}' > ${tmp_dir}/paths.json
+ popd
+ python3 "${scripts_root}/owlbot/src/gen-template.py" --data=${tmp_dir}/paths.json --folder=${templates_dir}
+ popd
+ fi
+done
diff --git a/library_generation/owlbot/src/fix-license-headers.py b/library_generation/owlbot/src/fix-license-headers.py
new file mode 100644
index 0000000000..50f9f7fce0
--- /dev/null
+++ b/library_generation/owlbot/src/fix-license-headers.py
@@ -0,0 +1,30 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pathlib import Path
+import glob
+from synthtool.languages import java
+
+
+root = Path(".").resolve()
+
+# Until the generator generates license headers on generated proto
+# classes, add the license headers in
+for path in glob.glob("proto-google-*"):
+ java.fix_proto_headers(root / path)
+
+# Until the generator generates license headers on generated grpc
+# classes, add the license headers in
+for path in glob.glob("grpc-google-*"):
+ java.fix_grpc_headers(root / path, "unused")
diff --git a/library_generation/owlbot/src/fix-poms.py b/library_generation/owlbot/src/fix-poms.py
new file mode 100644
index 0000000000..e4617a5085
--- /dev/null
+++ b/library_generation/owlbot/src/fix-poms.py
@@ -0,0 +1,528 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import glob
+import inspect
+import itertools
+import json
+from lxml import etree
+import os
+import re
+from typing import List, Mapping
+from poms import module, templates
+
+
+def load_versions(filename: str, default_group_id: str) -> Mapping[str, module.Module]:
+ if not os.path.isfile(filename):
+ return {}
+ modules = {}
+ with open(filename, "r") as fp:
+ for line in fp:
+ line = line.strip()
+ if line.startswith("#"):
+ continue
+
+ parts = line.split(":")
+ if len(parts) == 3:
+ artifact_id = parts[0]
+ group_id = (
+ default_group_id
+ if artifact_id.startswith("google-")
+ else _proto_group_id(default_group_id)
+ )
+ modules[artifact_id] = module.Module(
+ group_id=group_id,
+ artifact_id=artifact_id,
+ release_version=parts[1],
+ version=parts[2],
+ )
+ return modules
+
+
+def _find_dependency_index(dependencies, group_id, artifact_id) -> int:
+ try:
+ return next(
+ i
+ for i, x in enumerate(dependencies.getchildren())
+ if _dependency_matches(x, group_id, artifact_id)
+ )
+ except StopIteration:
+ return -1
+
+
+def _dependency_matches(node, group_id, artifact_id) -> bool:
+ artifact_node = node.find("{http://maven.apache.org/POM/4.0.0}artifactId")
+ group_node = node.find("{http://maven.apache.org/POM/4.0.0}groupId")
+
+ if artifact_node is None or group_node is None:
+ return False
+
+ return artifact_node.text.startswith(artifact_id) and group_node.text.startswith(
+ group_id
+ )
+
+
+def _is_cloud_client(existing_modules: List[module.Module]) -> bool:
+ proto_modules_len = 0
+ grpc_modules_len = 0
+ for artifact in existing_modules:
+ if artifact.startswith("proto-"):
+ proto_modules_len += 1
+ if artifact.startswith("grpc-"):
+ grpc_modules_len += 1
+ return proto_modules_len > 0 or grpc_modules_len > 0
+
+
+def update_cloud_pom(
+ filename: str, proto_modules: List[module.Module], grpc_modules: List[module.Module]
+):
+ tree = etree.parse(filename)
+ root = tree.getroot()
+ dependencies = root.find("{http://maven.apache.org/POM/4.0.0}dependencies")
+
+ existing_dependencies = [
+ m.find("{http://maven.apache.org/POM/4.0.0}artifactId").text
+ for m in dependencies
+ if m.find("{http://maven.apache.org/POM/4.0.0}artifactId") is not None
+ ]
+
+ try:
+ grpc_index = _find_dependency_index(
+ dependencies, "com.google.api.grpc", "grpc-"
+ )
+ except StopIteration:
+ grpc_index = _find_dependency_index(dependencies, "junit", "junit")
+ # insert grpc dependencies after junit
+ for m in grpc_modules:
+ if m.artifact_id not in existing_dependencies:
+ print(f"adding new test dependency {m.artifact_id}")
+ new_dependency = etree.Element(
+ "{http://maven.apache.org/POM/4.0.0}dependency"
+ )
+ new_dependency.tail = "\n "
+ new_dependency.text = "\n "
+ new_group = etree.Element("{http://maven.apache.org/POM/4.0.0}groupId")
+ new_group.text = m.group_id
+ new_group.tail = "\n "
+ new_artifact = etree.Element(
+ "{http://maven.apache.org/POM/4.0.0}artifactId"
+ )
+ new_artifact.text = m.artifact_id
+ new_artifact.tail = "\n "
+ new_scope = etree.Element("{http://maven.apache.org/POM/4.0.0}scope")
+ new_scope.text = "test"
+ new_scope.tail = "\n "
+ new_dependency.append(new_group)
+ new_dependency.append(new_artifact)
+ new_dependency.append(new_scope)
+ dependencies.insert(grpc_index + 1, new_dependency)
+
+ try:
+ proto_index = _find_dependency_index(
+ dependencies, "com.google.api.grpc", "proto-"
+ )
+ except StopIteration:
+ print("after protobuf")
+ proto_index = _find_dependency_index(
+ dependencies, "com.google.protobuf", "protobuf-java"
+ )
+ # insert proto dependencies after protobuf-java
+ for m in proto_modules:
+ if m.artifact_id not in existing_dependencies:
+ if re.match(r"proto-.*-v\d+.*", m.artifact_id):
+ print(f"adding new dependency {m.artifact_id}")
+ new_dependency = etree.Element(
+ "{http://maven.apache.org/POM/4.0.0}dependency"
+ )
+ new_dependency.tail = "\n "
+ new_dependency.text = "\n "
+ new_group = etree.Element("{http://maven.apache.org/POM/4.0.0}groupId")
+ new_group.text = m.group_id
+ new_group.tail = "\n "
+ new_artifact = etree.Element(
+ "{http://maven.apache.org/POM/4.0.0}artifactId"
+ )
+ new_artifact.text = m.artifact_id
+ new_artifact.tail = "\n "
+ new_dependency.append(new_group)
+ new_dependency.append(new_artifact)
+ dependencies.insert(proto_index + 1, new_dependency)
+
+ tree.write(filename, pretty_print=True, xml_declaration=True, encoding="utf-8")
+
+
+def update_parent_pom(filename: str, modules: List[module.Module]):
+ tree = etree.parse(filename)
+ root = tree.getroot()
+
+ # BEGIN: update modules
+ existing = root.find("{http://maven.apache.org/POM/4.0.0}modules")
+
+ module_names = [m.artifact_id for m in modules]
+ extra_modules = [
+ m.text for i, m in enumerate(existing) if m.text not in module_names
+ ]
+
+ modules_to_write = module_names + extra_modules
+ num_modules = len(modules_to_write)
+
+ existing.clear()
+ existing.text = "\n "
+ for index, m in enumerate(modules_to_write):
+ new_module = etree.Element("{http://maven.apache.org/POM/4.0.0}module")
+ new_module.text = m
+ if index == num_modules - 1:
+ new_module.tail = "\n "
+ else:
+ new_module.tail = "\n "
+ existing.append(new_module)
+
+ existing.tail = "\n\n "
+ # END: update modules
+
+ # BEGIN: update versions in dependencyManagement
+ dependencies = root.find(
+ "{http://maven.apache.org/POM/4.0.0}dependencyManagement"
+ ).find("{http://maven.apache.org/POM/4.0.0}dependencies")
+
+ existing_dependencies = [
+ m.find("{http://maven.apache.org/POM/4.0.0}artifactId").text
+ for m in dependencies
+ if m.find("{http://maven.apache.org/POM/4.0.0}artifactId") is not None
+ ]
+ insert_index = 1
+
+ num_modules = len(modules)
+
+ for index, m in enumerate(modules):
+ if m.artifact_id in existing_dependencies:
+ continue
+
+ new_dependency = etree.Element("{http://maven.apache.org/POM/4.0.0}dependency")
+ new_dependency.tail = "\n "
+ new_dependency.text = "\n "
+ new_group = etree.Element("{http://maven.apache.org/POM/4.0.0}groupId")
+ new_group.text = m.group_id
+ new_group.tail = "\n "
+ new_artifact = etree.Element("{http://maven.apache.org/POM/4.0.0}artifactId")
+ new_artifact.text = m.artifact_id
+ new_artifact.tail = "\n "
+ new_version = etree.Element("{http://maven.apache.org/POM/4.0.0}version")
+ new_version.text = m.version
+ comment = etree.Comment(" {x-version-update:" + m.artifact_id + ":current} ")
+ comment.tail = "\n "
+ new_dependency.append(new_group)
+ new_dependency.append(new_artifact)
+ new_dependency.append(new_version)
+ new_dependency.append(comment)
+ new_dependency.tail = "\n "
+ dependencies.insert(1, new_dependency)
+
+ # END: update versions in dependencyManagement
+
+ tree.write(filename, pretty_print=True, xml_declaration=True, encoding="utf-8")
+
+
+def update_bom_pom(filename: str, modules: List[module.Module]):
+ tree = etree.parse(filename)
+ root = tree.getroot()
+ existing = root.find(
+ "{http://maven.apache.org/POM/4.0.0}dependencyManagement"
+ ).find("{http://maven.apache.org/POM/4.0.0}dependencies")
+
+ num_modules = len(modules)
+
+ existing.clear()
+ existing.text = "\n "
+ for index, m in enumerate(modules):
+ new_dependency = etree.Element("{http://maven.apache.org/POM/4.0.0}dependency")
+ new_dependency.tail = "\n "
+ new_dependency.text = "\n "
+ new_group = etree.Element("{http://maven.apache.org/POM/4.0.0}groupId")
+ new_group.text = m.group_id
+ new_group.tail = "\n "
+ new_artifact = etree.Element("{http://maven.apache.org/POM/4.0.0}artifactId")
+ new_artifact.text = m.artifact_id
+ new_artifact.tail = "\n "
+ new_version = etree.Element("{http://maven.apache.org/POM/4.0.0}version")
+ new_version.text = m.version
+ comment = etree.Comment(" {x-version-update:" + m.artifact_id + ":current} ")
+ comment.tail = "\n "
+ new_dependency.append(new_group)
+ new_dependency.append(new_artifact)
+ new_dependency.append(new_version)
+ new_dependency.append(comment)
+
+ if index == num_modules - 1:
+ new_dependency.tail = "\n "
+ else:
+ new_dependency.tail = "\n "
+ existing.append(new_dependency)
+
+ existing.tail = "\n "
+
+ tree.write(filename, pretty_print=True, xml_declaration=True, encoding="utf-8")
+
+
+# When generating non-cloud client library, the group id of proto/grpc artifacts
+# is prefixed with `{main_artifact_group_id}.api.grpc`, rather than
+# `com.google.api.grpc`.
+# https://github.com/googleapis/google-cloud-java/issues/9125
+# However, some exceptions are com.google.area120 and com.google.analytics.
+# https://github.com/googleapis/google-cloud-java/issues/9304
+def _proto_group_id(main_artifact_group_id: str) -> str:
+ prefix = "com.google"
+ list_of_group_id = ["com.google.cloud",
+ "com.google.area120",
+ "com.google.analytics"]
+ if main_artifact_group_id not in list_of_group_id:
+ prefix = main_artifact_group_id
+ return f"{prefix}.api.grpc"
+
+
+def main(versions_file, monorepo):
+ print(f"working directory: {os.getcwd()}")
+ with open(".repo-metadata.json", "r") as fp:
+ repo_metadata = json.load(fp)
+ group_id, artifact_id = repo_metadata["distribution_name"].split(":")
+ name = repo_metadata["name_pretty"]
+ existing_modules = load_versions(versions_file, group_id)
+ print(f"monorepo? {monorepo}")
+
+ # extra modules that need to be manages in versions.txt
+ if "extra_versioned_modules" in repo_metadata:
+ extra_managed_modules = repo_metadata["extra_versioned_modules"].split(",")
+ else:
+ extra_managed_modules = ""
+
+ # list of modules to be excluded from added to poms
+ if "excluded_dependencies" in repo_metadata:
+ excluded_dependencies_list = repo_metadata["excluded_dependencies"].split(",")
+ else:
+ excluded_dependencies_list = ""
+
+ # list of poms that have to be excluded from post processing
+ if "excluded_poms" in repo_metadata:
+ excluded_poms_list = repo_metadata["excluded_poms"].split(",")
+ else:
+ excluded_poms_list = ""
+
+ # Missing Case 1: When this library ('java-XXX' module) is new.
+ if artifact_id not in existing_modules:
+ existing_modules[artifact_id] = module.Module(
+ group_id=group_id,
+ artifact_id=artifact_id,
+ version="0.0.1-SNAPSHOT",
+ release_version="0.0.0",
+ )
+ main_module = existing_modules[artifact_id]
+
+ # Artifact ID is part of distribution name field in .repo-metadata.json
+ if artifact_id in ["grafeas", "google-cloud-dns",
+ "google-cloud-notification", "google-iam-policy"]:
+ # There are special libraries that are not automatically generated
+ print(f"Skipping a special case library {artifact_id} that do not have "
+ " the standard module structure.")
+ return
+
+ parent_artifact_id = f"{artifact_id}-parent"
+
+ if parent_artifact_id not in existing_modules:
+ existing_modules[parent_artifact_id] = module.Module(
+ group_id=group_id,
+ artifact_id=parent_artifact_id,
+ version=main_module.version,
+ release_version=main_module.release_version,
+ )
+ parent_module = existing_modules[parent_artifact_id]
+
+ required_dependencies = {}
+ for dependency_module in existing_modules:
+ if dependency_module in excluded_dependencies_list:
+ continue
+ dep_artifact_id = existing_modules[dependency_module].artifact_id
+ if monorepo and not os.path.isdir(dep_artifact_id):
+ # In monorepo, existing_modules are loaded from the root
+ # versions.txt and thus includes irrelevant artifacts
+ continue
+ required_dependencies[dependency_module] = existing_modules[dependency_module]
+
+ # Missing Case 2: There's a new proto-XXX and grpc-XXX directory. It's a new
+ # version in the proto file to a library. Both a new library and existing
+ # library.
+ for path in glob.glob("proto-google-*"):
+ if not path in existing_modules:
+ existing_modules[path] = module.Module(
+ group_id=_proto_group_id(group_id),
+ artifact_id=path,
+ version=main_module.version,
+ release_version=main_module.release_version,
+ )
+ if path not in excluded_dependencies_list \
+ and path not in main_module.artifact_id:
+ required_dependencies[path] = module.Module(
+ group_id=_proto_group_id(group_id),
+ artifact_id=path,
+ version=main_module.version,
+ release_version=main_module.release_version,
+ )
+ if not os.path.isfile(f"{path}/pom.xml"):
+ print(f"creating missing proto pom: {path}")
+ templates.render(
+ template_name="proto_pom.xml.j2",
+ output_name=f"{path}/pom.xml",
+ module=required_dependencies[path],
+ parent_module=parent_module,
+ main_module=main_module,
+ )
+ if path not in excluded_dependencies_list \
+ and path not in main_module.artifact_id:
+ required_dependencies[path] = module.Module(
+ group_id=_proto_group_id(group_id),
+ artifact_id=path,
+ version=main_module.version,
+ release_version=main_module.release_version,
+ )
+
+ for path in glob.glob("grpc-google-*"):
+ if not path in existing_modules:
+ existing_modules[path] = module.Module(
+ group_id=_proto_group_id(group_id),
+ artifact_id=path,
+ version=main_module.version,
+ release_version=main_module.release_version,
+ )
+ if path not in excluded_dependencies_list \
+ and path not in main_module.artifact_id:
+ required_dependencies[path] = module.Module(
+ group_id=_proto_group_id(group_id),
+ artifact_id=path,
+ version=main_module.version,
+ release_version=main_module.release_version,
+ )
+
+ if not os.path.isfile(f"{path}/pom.xml"):
+ proto_artifact_id = path.replace("grpc-", "proto-")
+ print(f"creating missing grpc pom: {path}")
+ templates.render(
+ template_name="grpc_pom.xml.j2",
+ output_name=f"{path}/pom.xml",
+ module=required_dependencies[path],
+ parent_module=parent_module,
+ main_module=main_module,
+ proto_module=existing_modules[proto_artifact_id],
+ )
+ if path not in excluded_dependencies_list \
+ and path not in main_module.artifact_id:
+ required_dependencies[path] = module.Module(
+ group_id=_proto_group_id(group_id),
+ artifact_id=path,
+ version=main_module.version,
+ release_version=main_module.release_version,
+ )
+ proto_modules = [
+ module
+ for module in required_dependencies.values()
+ if module.artifact_id.startswith("proto-")
+ and module.artifact_id not in parent_artifact_id
+ ]
+ grpc_modules = [
+ module
+ for module in required_dependencies.values()
+ if module.artifact_id.startswith("grpc-") \
+ and module.artifact_id not in parent_artifact_id
+ ]
+ if main_module in grpc_modules or main_module in proto_modules:
+ modules = grpc_modules + proto_modules
+ else:
+ modules = [main_module] + grpc_modules + proto_modules
+
+ if not _is_cloud_client(existing_modules):
+ print("no proto or grpc modules - probably not a cloud client")
+ return
+
+ if os.path.isfile(f"{artifact_id}/pom.xml"):
+ print("updating modules in cloud pom.xml")
+ if artifact_id not in excluded_poms_list:
+ update_cloud_pom(f"{artifact_id}/pom.xml", proto_modules, grpc_modules)
+ elif artifact_id not in excluded_poms_list:
+ print("creating missing cloud pom.xml")
+ templates.render(
+ template_name="cloud_pom.xml.j2",
+ output_name=f"{artifact_id}/pom.xml",
+ module=main_module,
+ parent_module=parent_module,
+ repo=repo_metadata["repo"],
+ name=name,
+ description=repo_metadata["api_description"],
+ proto_modules=proto_modules,
+ grpc_modules=grpc_modules,
+ )
+
+ if os.path.isfile(f"{artifact_id}-bom/pom.xml"):
+ print("updating modules in bom pom.xml")
+ if artifact_id+"-bom" not in excluded_poms_list:
+ update_bom_pom(f"{artifact_id}-bom/pom.xml", modules)
+ elif artifact_id+"-bom" not in excluded_poms_list:
+ print("creating missing bom pom.xml")
+ templates.render(
+ template_name="bom_pom.xml.j2",
+ output_name=f"{artifact_id}-bom/pom.xml",
+ repo=repo_metadata["repo"],
+ name=name,
+ modules=modules,
+ main_module=main_module,
+ )
+
+ if os.path.isfile("pom.xml"):
+ print("updating modules in parent pom.xml")
+ update_parent_pom("pom.xml", modules)
+ else:
+ print("creating missing parent pom.xml")
+ templates.render(
+ template_name="parent_pom.xml.j2",
+ output_name="./pom.xml",
+ repo=repo_metadata["repo"],
+ modules=modules,
+ main_module=main_module,
+ name=name,
+ )
+
+ # For monorepo, we use the versions.txt at the root. The "./" is needed
+ # for the templates.render(), which tries to create a directory.
+ versions_txt_file = "../versions.txt" if monorepo else "./versions.txt"
+ print(f"updating modules in {versions_txt_file}")
+ existing_modules.pop(parent_artifact_id)
+
+ # add extra modules to versions.txt
+ for dependency_module in extra_managed_modules:
+ if dependency_module not in existing_modules:
+ existing_modules[dependency_module] = module.Module(
+ group_id=_proto_group_id(group_id),
+ artifact_id=dependency_module,
+ version=main_module.version,
+ release_version=main_module.release_version,
+ )
+ templates.render(
+ template_name="versions.txt.j2", output_name=versions_txt_file, modules=existing_modules.values(),
+ )
+
+
+if __name__ == "__main__":
+ versions_file = sys.argv[1]
+ monorepo = sys.argv[2]
+ if monorepo == 'true':
+ monorepo = True
+ main(versions_file, monorepo)
diff --git a/library_generation/owlbot/src/gen-template.py b/library_generation/owlbot/src/gen-template.py
new file mode 100644
index 0000000000..fd3015ebf8
--- /dev/null
+++ b/library_generation/owlbot/src/gen-template.py
@@ -0,0 +1,81 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import glob
+import json
+from typing import List
+import os
+from pathlib import Path
+
+import click
+import jinja2
+
+
+@click.command()
+@click.option(
+ "--folder", help="Path to folder of templates",
+)
+@click.option("--file", help="Path to template file")
+@click.option(
+ "--data",
+ help="Path to JSON file with template values",
+ multiple=True,
+ required=True,
+)
+@click.option(
+ "--output", help="Path to output", default=".",
+)
+def main(folder: str, file: str, data: List[str], output: str):
+ """Generate templates"""
+ variables = {}
+ for data_file in data:
+ with open(data_file, "r") as fp:
+ variables = {**variables, **json.load(fp)}
+
+ if folder is not None:
+ location = Path(folder)
+ filenames = glob.glob(f"{folder}/**/*.j2", recursive=True)
+ elif file is not None:
+ location = Path(file).parent
+ filenames = [f"{file}.j2"]
+ else:
+ raise Exception("Need to specify either folder or file")
+
+ output_path = Path(output)
+
+ env = jinja2.Environment(
+ loader=jinja2.FileSystemLoader(str(location)),
+ autoescape=False,
+ keep_trailing_newline=True,
+ )
+
+ for filename in filenames:
+ template_name = Path(filename).relative_to(location)
+ template = env.get_template(str(template_name))
+ output = template.stream(**variables)
+
+ destination = output_path / os.path.splitext(template_name)[0]
+ destination.parent.mkdir(parents=True, exist_ok=True)
+
+ with destination.open("w") as fp:
+ output.dump(fp)
+
+ # Copy file mode over
+ source_path = Path(template.filename)
+ mode = source_path.stat().st_mode
+ destination.chmod(mode)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/library_generation/owlbot/src/poms/.gitignore b/library_generation/owlbot/src/poms/.gitignore
new file mode 100644
index 0000000000..c18dd8d83c
--- /dev/null
+++ b/library_generation/owlbot/src/poms/.gitignore
@@ -0,0 +1 @@
+__pycache__/
diff --git a/library_generation/owlbot/src/poms/module.py b/library_generation/owlbot/src/poms/module.py
new file mode 100644
index 0000000000..3beafd22b0
--- /dev/null
+++ b/library_generation/owlbot/src/poms/module.py
@@ -0,0 +1,50 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import attr
+from lxml import etree
+import os
+from typing import List, Optional
+
+
+@attr.s(auto_attribs=True)
+class Module:
+ group_id: str
+ artifact_id: str
+ version: str
+ release_version: Optional[str]
+
+
+def read_module(pom: str) -> Module:
+ tree = etree.parse(pom)
+ artifact_id = tree.find("{http://maven.apache.org/POM/4.0.0}artifactId").text
+ version = tree.find("{http://maven.apache.org/POM/4.0.0}version").text
+ group_id = (
+ "com.google.cloud"
+ if artifact_id.startswith("google-cloud")
+ else "com.google.api.grpc"
+ )
+ return Module(group_id=group_id, artifact_id=artifact_id, version=version,)
+
+
+def read_modules(service: str) -> List[Module]:
+ thedir = f"workspace/java-{service}/"
+ modules = []
+ for name in os.listdir(thedir):
+ dir = os.path.join(thedir, name)
+ pom = os.path.join(dir, "pom.xml")
+ if os.path.exists(pom):
+ modules.append(read_module(pom))
+
+ return modules
diff --git a/library_generation/owlbot/src/poms/templates.py b/library_generation/owlbot/src/poms/templates.py
new file mode 100644
index 0000000000..09a77e9ec7
--- /dev/null
+++ b/library_generation/owlbot/src/poms/templates.py
@@ -0,0 +1,36 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from jinja2 import Environment, FileSystemLoader
+import os
+import pathlib
+from typing import List
+
+root_directory = pathlib.Path(
+ os.path.realpath(os.path.dirname(os.path.realpath(__file__)))
+).parent.parent
+print(root_directory)
+jinja_env = Environment(
+ loader=FileSystemLoader(str(root_directory / "templates" / "poms")),
+ keep_trailing_newline=True,
+)
+
+
+def render(template_name: str, output_name: str, **kwargs):
+ template = jinja_env.get_template(template_name)
+ t = template.stream(kwargs)
+ directory = os.path.dirname(output_name)
+ if not os.path.isdir(directory):
+ os.makedirs(directory)
+ t.dump(str(output_name))
diff --git a/library_generation/owlbot/src/requirements.in b/library_generation/owlbot/src/requirements.in
new file mode 100644
index 0000000000..1dbbb3c666
--- /dev/null
+++ b/library_generation/owlbot/src/requirements.in
@@ -0,0 +1,11 @@
+attrs
+click
+jinja2
+lxml
+typing
+markupsafe
+colorlog
+protobuf
+watchdog
+requests
+pyyaml
\ No newline at end of file
diff --git a/library_generation/owlbot/src/requirements.txt b/library_generation/owlbot/src/requirements.txt
new file mode 100644
index 0000000000..d3b0f53dba
--- /dev/null
+++ b/library_generation/owlbot/src/requirements.txt
@@ -0,0 +1,353 @@
+#
+# This file is autogenerated by pip-compile with Python 3.9
+# by the following command:
+#
+# pip-compile requirements.in --generate-hashes --upgrade
+#
+attrs==23.1.0 \
+ --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
+ --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
+ # via -r requirements.in
+certifi==2023.7.22 \
+ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
+ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
+ # via requests
+charset-normalizer==3.2.0 \
+ --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \
+ --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \
+ --hash=sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710 \
+ --hash=sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706 \
+ --hash=sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020 \
+ --hash=sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252 \
+ --hash=sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad \
+ --hash=sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329 \
+ --hash=sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a \
+ --hash=sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f \
+ --hash=sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6 \
+ --hash=sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4 \
+ --hash=sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a \
+ --hash=sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46 \
+ --hash=sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2 \
+ --hash=sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23 \
+ --hash=sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace \
+ --hash=sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd \
+ --hash=sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982 \
+ --hash=sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10 \
+ --hash=sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2 \
+ --hash=sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea \
+ --hash=sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09 \
+ --hash=sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5 \
+ --hash=sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149 \
+ --hash=sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489 \
+ --hash=sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9 \
+ --hash=sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80 \
+ --hash=sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592 \
+ --hash=sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3 \
+ --hash=sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6 \
+ --hash=sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed \
+ --hash=sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c \
+ --hash=sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200 \
+ --hash=sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a \
+ --hash=sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e \
+ --hash=sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d \
+ --hash=sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6 \
+ --hash=sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623 \
+ --hash=sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669 \
+ --hash=sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3 \
+ --hash=sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa \
+ --hash=sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9 \
+ --hash=sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2 \
+ --hash=sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f \
+ --hash=sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1 \
+ --hash=sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4 \
+ --hash=sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a \
+ --hash=sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8 \
+ --hash=sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3 \
+ --hash=sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029 \
+ --hash=sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f \
+ --hash=sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959 \
+ --hash=sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22 \
+ --hash=sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7 \
+ --hash=sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952 \
+ --hash=sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346 \
+ --hash=sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e \
+ --hash=sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d \
+ --hash=sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299 \
+ --hash=sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd \
+ --hash=sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a \
+ --hash=sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3 \
+ --hash=sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037 \
+ --hash=sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94 \
+ --hash=sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c \
+ --hash=sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858 \
+ --hash=sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a \
+ --hash=sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449 \
+ --hash=sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c \
+ --hash=sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918 \
+ --hash=sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1 \
+ --hash=sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c \
+ --hash=sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac \
+ --hash=sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa
+ # via requests
+click==8.1.4 \
+ --hash=sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3 \
+ --hash=sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37
+ # via -r requirements.in
+colorlog==6.7.0 \
+ --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \
+ --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5
+ # via -r requirements.in
+idna==3.4 \
+ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
+ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
+ # via requests
+jinja2==3.1.2 \
+ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
+ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
+ # via -r requirements.in
+lxml==4.9.3 \
+ --hash=sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3 \
+ --hash=sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d \
+ --hash=sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a \
+ --hash=sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120 \
+ --hash=sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305 \
+ --hash=sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287 \
+ --hash=sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23 \
+ --hash=sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52 \
+ --hash=sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f \
+ --hash=sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4 \
+ --hash=sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584 \
+ --hash=sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f \
+ --hash=sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693 \
+ --hash=sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef \
+ --hash=sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5 \
+ --hash=sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02 \
+ --hash=sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc \
+ --hash=sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7 \
+ --hash=sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da \
+ --hash=sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a \
+ --hash=sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40 \
+ --hash=sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8 \
+ --hash=sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd \
+ --hash=sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601 \
+ --hash=sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c \
+ --hash=sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be \
+ --hash=sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2 \
+ --hash=sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c \
+ --hash=sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129 \
+ --hash=sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc \
+ --hash=sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2 \
+ --hash=sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1 \
+ --hash=sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7 \
+ --hash=sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d \
+ --hash=sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477 \
+ --hash=sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d \
+ --hash=sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e \
+ --hash=sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7 \
+ --hash=sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2 \
+ --hash=sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574 \
+ --hash=sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf \
+ --hash=sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b \
+ --hash=sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98 \
+ --hash=sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12 \
+ --hash=sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42 \
+ --hash=sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35 \
+ --hash=sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d \
+ --hash=sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce \
+ --hash=sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d \
+ --hash=sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f \
+ --hash=sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db \
+ --hash=sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4 \
+ --hash=sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694 \
+ --hash=sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac \
+ --hash=sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2 \
+ --hash=sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7 \
+ --hash=sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96 \
+ --hash=sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d \
+ --hash=sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b \
+ --hash=sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a \
+ --hash=sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13 \
+ --hash=sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340 \
+ --hash=sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6 \
+ --hash=sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458 \
+ --hash=sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c \
+ --hash=sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c \
+ --hash=sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9 \
+ --hash=sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432 \
+ --hash=sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991 \
+ --hash=sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69 \
+ --hash=sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf \
+ --hash=sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb \
+ --hash=sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b \
+ --hash=sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833 \
+ --hash=sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76 \
+ --hash=sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85 \
+ --hash=sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e \
+ --hash=sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50 \
+ --hash=sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8 \
+ --hash=sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4 \
+ --hash=sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b \
+ --hash=sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5 \
+ --hash=sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190 \
+ --hash=sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7 \
+ --hash=sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa \
+ --hash=sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0 \
+ --hash=sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9 \
+ --hash=sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0 \
+ --hash=sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b \
+ --hash=sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5 \
+ --hash=sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7 \
+ --hash=sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4
+ # via -r requirements.in
+markupsafe==2.1.3 \
+ --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
+ --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \
+ --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \
+ --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \
+ --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \
+ --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \
+ --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \
+ --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \
+ --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \
+ --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \
+ --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \
+ --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \
+ --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \
+ --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \
+ --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \
+ --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \
+ --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \
+ --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \
+ --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \
+ --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \
+ --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \
+ --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \
+ --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \
+ --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \
+ --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \
+ --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \
+ --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \
+ --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \
+ --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \
+ --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \
+ --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \
+ --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \
+ --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \
+ --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \
+ --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \
+ --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \
+ --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \
+ --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \
+ --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \
+ --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \
+ --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \
+ --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \
+ --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \
+ --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \
+ --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \
+ --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \
+ --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \
+ --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \
+ --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \
+ --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2
+ # via
+ # -r requirements.in
+ # jinja2
+protobuf==4.23.4 \
+ --hash=sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474 \
+ --hash=sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2 \
+ --hash=sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b \
+ --hash=sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720 \
+ --hash=sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12 \
+ --hash=sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd \
+ --hash=sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0 \
+ --hash=sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e \
+ --hash=sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9 \
+ --hash=sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70 \
+ --hash=sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff \
+ --hash=sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597 \
+ --hash=sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a
+ # via -r requirements.in
+pyyaml==6.0 \
+ --hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
+ --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
+ --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
+ --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
+ --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
+ --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
+ --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
+ --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
+ --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
+ --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
+ --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
+ --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
+ --hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
+ --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
+ --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
+ --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
+ --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
+ --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
+ --hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
+ --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
+ --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
+ --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
+ --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
+ --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
+ --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
+ --hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
+ --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
+ --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
+ --hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
+ --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
+ --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
+ --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
+ --hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
+ --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
+ --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
+ --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
+ --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
+ --hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
+ --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
+ --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
+ # via -r requirements.in
+requests==2.31.0 \
+ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
+ --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
+ # via -r requirements.in
+typing==3.7.4.3 \
+ --hash=sha256:1187fb9c82fd670d10aa07bbb6cfcfe4bdda42d6fab8d5134f04e8c4d0b71cc9 \
+ --hash=sha256:283d868f5071ab9ad873e5e52268d611e851c870a2ba354193026f2dfb29d8b5
+ # via -r requirements.in
+urllib3==2.0.7 \
+ --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \
+ --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e
+ # via requests
+watchdog==3.0.0 \
+ --hash=sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a \
+ --hash=sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100 \
+ --hash=sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8 \
+ --hash=sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc \
+ --hash=sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae \
+ --hash=sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41 \
+ --hash=sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0 \
+ --hash=sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f \
+ --hash=sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c \
+ --hash=sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9 \
+ --hash=sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3 \
+ --hash=sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709 \
+ --hash=sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83 \
+ --hash=sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759 \
+ --hash=sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9 \
+ --hash=sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3 \
+ --hash=sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7 \
+ --hash=sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f \
+ --hash=sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346 \
+ --hash=sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674 \
+ --hash=sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397 \
+ --hash=sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96 \
+ --hash=sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d \
+ --hash=sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a \
+ --hash=sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64 \
+ --hash=sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44 \
+ --hash=sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33
+ # via -r requirements.in
diff --git a/library_generation/owlbot/templates/clirr/clirr-ignored-differences.xml.j2 b/library_generation/owlbot/templates/clirr/clirr-ignored-differences.xml.j2
new file mode 100644
index 0000000000..6528981704
--- /dev/null
+++ b/library_generation/owlbot/templates/clirr/clirr-ignored-differences.xml.j2
@@ -0,0 +1,19 @@
+
+
+
+{% for proto_path in proto_paths %}
+ 7012
+ {{proto_path}}/*OrBuilder
+ * get*(*)
+
+
+ 7012
+ {{proto_path}}/*OrBuilder
+ boolean contains*(*)
+
+
+ 7012
+ {{proto_path}}/*OrBuilder
+ boolean has*(*)
+ {% endfor %}
+
diff --git a/library_generation/owlbot/templates/poms/bom_pom.xml.j2 b/library_generation/owlbot/templates/poms/bom_pom.xml.j2
new file mode 100644
index 0000000000..45e6d25253
--- /dev/null
+++ b/library_generation/owlbot/templates/poms/bom_pom.xml.j2
@@ -0,0 +1,34 @@
+
+
+ 4.0.0
+ {{main_module.group_id}}
+ {{main_module.artifact_id}}-bom
+ {{main_module.version}}
+ pom
+
+ com.google.cloud
+ google-cloud-shared-config
+ 1.5.3
+
+
+ Google {{name}} BOM
+
+ BOM for {{name}}
+
+
+
+
+ true
+
+
+
+ {% for module in modules %}
+
+ {{module.group_id}}
+ {{module.artifact_id}}
+ {{module.version}}
+ {% endfor %}
+
+
+
+
diff --git a/library_generation/owlbot/templates/poms/cloud_pom.xml.j2 b/library_generation/owlbot/templates/poms/cloud_pom.xml.j2
new file mode 100644
index 0000000000..6f999f4897
--- /dev/null
+++ b/library_generation/owlbot/templates/poms/cloud_pom.xml.j2
@@ -0,0 +1,134 @@
+
+
+ 4.0.0
+ {{module.group_id}}
+ {{module.artifact_id}}
+ {{module.version}}
+ jar
+ Google {{name}}
+ https://github.com/{{repo}}
+ {{name}} {{description}}
+
+ {{parent_module.group_id}}
+ {{parent_module.artifact_id}}
+ {{parent_module.version}}
+
+
+ {{module.artifact_id}}
+
+
+
+ io.grpc
+ grpc-api
+
+
+ io.grpc
+ grpc-stub
+
+
+ io.grpc
+ grpc-protobuf
+
+
+ com.google.api
+ api-common
+
+
+ com.google.protobuf
+ protobuf-java
+
+
+ com.google.api.grpc
+ proto-google-common-protos
+
+{% for module in proto_modules %}
+
+ {{module.group_id}}
+ {{module.artifact_id}}
+ {% endfor %}
+
+ com.google.guava
+ guava
+
+
+ com.google.api
+ gax
+
+
+ com.google.api
+ gax-grpc
+
+
+ com.google.api
+ gax-httpjson
+
+
+ com.google.api.grpc
+ proto-google-iam-v1
+
+
+ com.google.api.grpc
+ grpc-google-iam-v1
+
+
+ org.threeten
+ threetenbp
+
+
+
+
+ junit
+ junit
+ test
+
+{% for module in grpc_modules %}
+
+ {{module.group_id}}
+ {{module.artifact_id}}
+ test
+ {% endfor %}
+
+
+ com.google.api
+ gax
+ testlib
+ test
+
+
+ com.google.api
+ gax-grpc
+ testlib
+ test
+
+
+ com.google.api
+ gax-httpjson
+ testlib
+ test
+
+
+
+
+
+ java9
+
+ [9,)
+
+
+
+ javax.annotation
+ javax.annotation-api
+
+
+
+
+
+
+
+
+ org.codehaus.mojo
+ flatten-maven-plugin
+
+
+
+
diff --git a/library_generation/owlbot/templates/poms/grpc_pom.xml.j2 b/library_generation/owlbot/templates/poms/grpc_pom.xml.j2
new file mode 100644
index 0000000000..ad2b39c223
--- /dev/null
+++ b/library_generation/owlbot/templates/poms/grpc_pom.xml.j2
@@ -0,0 +1,69 @@
+
+ 4.0.0
+ {{module.group_id}}
+ {{module.artifact_id}}
+ {{module.version}}
+ {{module.artifact_id}}
+ GRPC library for {{main_module.artifact_id}}
+
+ {{parent_module.group_id}}
+ {{parent_module.artifact_id}}
+ {{parent_module.version}}
+
+
+
+ io.grpc
+ grpc-api
+
+
+ io.grpc
+ grpc-stub
+
+
+ io.grpc
+ grpc-protobuf
+
+
+ com.google.protobuf
+ protobuf-java
+
+
+ com.google.api.grpc
+ proto-google-common-protos
+
+
+ {{proto_module.group_id}}
+ {{proto_module.artifact_id}}
+
+
+ com.google.guava
+ guava
+
+
+
+
+
+ java9
+
+ [9,)
+
+
+
+ javax.annotation
+ javax.annotation-api
+
+
+
+
+
+
+
+
+ org.codehaus.mojo
+ flatten-maven-plugin
+
+
+
+
diff --git a/library_generation/owlbot/templates/poms/parent_pom.xml.j2 b/library_generation/owlbot/templates/poms/parent_pom.xml.j2
new file mode 100644
index 0000000000..cbe3f10913
--- /dev/null
+++ b/library_generation/owlbot/templates/poms/parent_pom.xml.j2
@@ -0,0 +1,43 @@
+
+
+ 4.0.0
+ {{main_module.group_id}}
+ {{main_module.artifact_id}}-parent
+ pom
+ {{main_module.version}}
+ Google {{name}} Parent
+
+ Java idiomatic client for Google Cloud Platform services.
+
+
+
+ com.google.cloud
+ google-cloud-shared-config
+ 1.5.3
+
+
+
+ UTF-8
+ UTF-8
+ github
+ {{main_module.artifact_id}}-parent
+
+
+
+
+{% for module in modules %}
+ {{module.group_id}}
+ {{module.artifact_id}}
+ {{module.version}}
+
+{% endfor %}
+
+
+
+
+
+{% for module in modules %} {{module.artifact_id}}
+{% endfor %} {{main_module.artifact_id}}-bom
+
+
+
diff --git a/library_generation/owlbot/templates/poms/proto_pom.xml.j2 b/library_generation/owlbot/templates/poms/proto_pom.xml.j2
new file mode 100644
index 0000000000..9c383533c7
--- /dev/null
+++ b/library_generation/owlbot/templates/poms/proto_pom.xml.j2
@@ -0,0 +1,46 @@
+
+ 4.0.0
+ {{module.group_id}}
+ {{module.artifact_id}}
+ {{module.version}}
+ {{module.artifact_id}}
+ Proto library for {{main_module.artifact_id}}
+
+ {{parent_module.group_id}}
+ {{parent_module.artifact_id}}
+ {{parent_module.version}}
+
+
+
+ com.google.protobuf
+ protobuf-java
+
+
+ com.google.api.grpc
+ proto-google-common-protos
+
+
+ com.google.api.grpc
+ proto-google-iam-v1
+
+
+ com.google.api
+ api-common
+
+
+ com.google.guava
+ guava
+
+
+
+
+
+
+ org.codehaus.mojo
+ flatten-maven-plugin
+
+
+
+
diff --git a/library_generation/owlbot/templates/poms/versions.txt.j2 b/library_generation/owlbot/templates/poms/versions.txt.j2
new file mode 100644
index 0000000000..2ebaf85d34
--- /dev/null
+++ b/library_generation/owlbot/templates/poms/versions.txt.j2
@@ -0,0 +1,4 @@
+# Format:
+# module:released-version:current-version
+{% for module in modules %}
+{{module.artifact_id}}:{% if module.release_version %}{{module.release_version}}{% else %}{{module.version}}{% endif %}:{{module.version}}{% endfor %}
diff --git a/library_generation/postprocess_library.sh b/library_generation/postprocess_library.sh
index bbb7789bd3..bb8415dda2 100755
--- a/library_generation/postprocess_library.sh
+++ b/library_generation/postprocess_library.sh
@@ -1,85 +1,84 @@
#!/bin/bash
#
-# Main functions to interact with owlbot post-processor and postprocessing
-# scripts
+# Main functions to interact with owlbot post-processor
-
-# Runs the owlbot post-processor docker image. The resulting post-processed
-# library gets stored in `${output_folder}/workspace`
+# Runs the java owlbot post-processor. The resulting post-processed
+# library gets stored in the $postprocessing_target argument
# Arguments
-# 1 - workspace: the location of the grpc,proto and gapic libraries to be
-# processed
-# 2 - scripts_root: location of the generation scripts
-# 3 - destination_path: used to transfer the raw grpc, proto and gapic libraries
-# 4 - proto_path: googleapis path of the library. This is used to prepare the
-# folder structure to run `owlbot-cli copy-code`
-# 5 - versions_file: path to file containing versions to be applied to the poms
-# 6 - output_folder: main workspace of the generation process
-
-workspace=$1
-scripts_root=$2
-destination_path=$3
-proto_path=$4
-versions_file=$5
-output_folder=$6
+# 1 - postprocessing_target: path where the postprocessor will run. This folder
+# has the following requirements
+# - a .repo-metadata.json file must be present
+# - an owlbot.py file must be present
+# - an .OwlBot.yaml file must be present
+# 2 - preprocessed_sources_path: used to transfer the raw grpc, proto and gapic
+# libraries into the postprocessing_target via copy-code
+# 3 - versions_file: path to file containing versions to be applied to the poms
+set -xeo pipefail
+scripts_root=$(dirname "$(readlink -f "$0")")
+
+postprocessing_target=$1
+preprocessed_sources_path=$2
+versions_file=$3
source "${scripts_root}"/utilities.sh
-repository_root=$(echo "${destination_path}" | cut -d/ -f1)
-repo_metadata_json_path=$(get_repo_metadata_json "${destination_path}" "${output_folder}")
-owlbot_sha=$(get_owlbot_sha "${output_folder}" "${repository_root}")
-
-# read or infer owlbot sha
-
-cp "${repo_metadata_json_path}" "${workspace}"/.repo-metadata.json
+for owlbot_file in ".repo-metadata.json" "owlbot.py" ".OwlBot.yaml"
+do
+ if [[ $(find "${postprocessing_target}" -name "${owlbot_file}" | wc -l) -eq 0 ]]; then
+ echo "necessary file for postprocessing '${owlbot_file}' was not found in postprocessing_target"
+ echo "please provide a postprocessing_target folder that is java owlbot compatible"
+ exit 1
+ fi
+done
+
+proto_path=$(get_proto_path_from_preprocessed_sources "${preprocessed_sources_path}")
+
+# ensure pyenv scripts are available
+eval "$(pyenv init --path)"
+eval "$(pyenv init -)"
+eval "$(pyenv virtualenv-init -)"
+
+# create and activate the python virtualenv
+python_version=$(cat "${scripts_root}/configuration/python-version")
+if [ $(pyenv versions | grep "${python_version}" | wc -l) -eq 0 ]; then
+ pyenv install "${python_version}"
+fi
+if [ $(pyenv virtualenvs | grep "${python_version}" | grep "postprocessing" | wc -l) -eq 0 ];then
+ pyenv virtualenv "${python_version}" "postprocessing"
+fi
+pyenv activate "postprocessing"
# call owl-bot-copy
-owlbot_staging_folder="${workspace}/owl-bot-staging"
+owlbot_staging_folder="${postprocessing_target}/owl-bot-staging"
mkdir -p "${owlbot_staging_folder}"
-owlbot_postprocessor_image="gcr.io/cloud-devrel-public-resources/owlbot-java@sha256:${owlbot_sha}"
-
-
-
-# copy existing pom, owlbot and version files if the source of truth repo is present
-# pre-processed folders are ommited
-if [[ -d "${output_folder}/${destination_path}" ]]; then
- rsync -avm \
- --include='*/' \
- --include='*.xml' \
- --include='owlbot.py' \
- --include='.OwlBot.yaml' \
- --exclude='*' \
- "${output_folder}/${destination_path}/" \
- "${workspace}"
-fi
-
echo 'Running owl-bot-copy'
-pre_processed_libs_folder="${output_folder}/pre-processed"
+pre_processed_libs_folder=$(mktemp -d)
# By default (thanks to generation templates), .OwlBot.yaml `deep-copy` section
# references a wildcard pattern matching a folder
-# ending with `-java` at the leaf of proto_path.
+# ending with `-java` at the leaf of proto_path. We then use a generated-java
+# folder that will be picked up by copy-code
mkdir -p "${pre_processed_libs_folder}/${proto_path}/generated-java"
-folder_name=$(extract_folder_name "${destination_path}")
-copy_directory_if_exists "${output_folder}/${destination_path}/proto-${folder_name}" \
- "${pre_processed_libs_folder}/${proto_path}/generated-java/proto-google-cloud-${folder_name}"
-copy_directory_if_exists "${output_folder}/${destination_path}/grpc-${folder_name}" \
- "${pre_processed_libs_folder}/${proto_path}/generated-java/grpc-google-cloud-${folder_name}"
-copy_directory_if_exists "${output_folder}/${destination_path}/gapic-${folder_name}" \
- "${pre_processed_libs_folder}/${proto_path}/generated-java/gapic-google-cloud-${folder_name}"
-copy_directory_if_exists "${output_folder}/${destination_path}/samples" \
+copy_directory_if_exists "${preprocessed_sources_path}" "proto" \
+ "${pre_processed_libs_folder}/${proto_path}/generated-java/proto-google-cloud-library"
+copy_directory_if_exists "${preprocessed_sources_path}" "grpc" \
+ "${pre_processed_libs_folder}/${proto_path}/generated-java/grpc-google-cloud-library"
+copy_directory_if_exists "${preprocessed_sources_path}" "gapic" \
+ "${pre_processed_libs_folder}/${proto_path}/generated-java/gapic-google-cloud-library"
+copy_directory_if_exists "${preprocessed_sources_path}" "samples" \
"${pre_processed_libs_folder}/${proto_path}/generated-java/samples"
pushd "${pre_processed_libs_folder}"
-# create an empty repository so owl-bot-copy can process this as a repo
-# (cannot process non-git-repositories)
+# create an empty commit so owl-bot-copy can process this as a repo
+# (it cannot process non-git-repositories)
git init
git commit --allow-empty -m 'empty commit'
popd # pre_processed_libs_folder
owlbot_cli_image_sha=$(cat "${scripts_root}/configuration/owlbot-cli-sha" | grep "sha256")
+
docker run --rm \
--user $(id -u):$(id -g) \
- -v "${workspace}:/repo" \
+ -v "${postprocessing_target}:/repo" \
-v "${pre_processed_libs_folder}:/pre-processed-libraries" \
-w /repo \
--env HOME=/tmp \
@@ -89,15 +88,37 @@ docker run --rm \
--source-repo=/pre-processed-libraries \
--config-file=.OwlBot.yaml
+# if the postprocessing_target is a library of google-cloud-java, we have to "unpack" the
+# owl-bot-staging folder so it's properly processed by java owlbot
+if [[ $(basename $(dirname "${postprocessing_target}")) == "google-cloud-java" ]]; then
+ pushd "${postprocessing_target}"
+ mv owl-bot-staging/* temp
+ rm -rd owl-bot-staging/
+ mv temp owl-bot-staging
+ popd # postprocessing_target
+fi
-echo 'running owl-bot post-processor'
-versions_file_arg=""
-if [ -f "${versions_file}" ];then
- versions_file_arg="-v ${versions_file}:/versions.txt"
+# we clone the synthtool library and manually build it
+mkdir -p /tmp/synthtool
+pushd /tmp/synthtool
+if [ ! -d "synthtool" ]; then
+ git clone https://github.com/googleapis/synthtool.git
fi
+pushd "synthtool"
+synthtool_commitish=$(cat "${scripts_root}/configuration/synthtool-commitish")
+git reset --hard "${synthtool_commitish}"
+python3 -m pip install -e .
+python3 -m pip install -r requirements.in
+popd # synthtool
+popd # temp dir
+
+# we install the owlbot requirements
+pushd "${scripts_root}/owlbot/src/"
+python3 -m pip install -r requirements.in
+popd # owlbot/src
+
# run the postprocessor
-docker run --rm \
- -v "${workspace}:/workspace" \
- ${versions_file_arg} \
- --user $(id -u):$(id -g) \
- "${owlbot_postprocessor_image}"
+echo 'running owl-bot post-processor'
+pushd "${postprocessing_target}"
+bash "${scripts_root}/owlbot/bin/entrypoint.sh" "${scripts_root}" "${versions_file}"
+popd # postprocessing_target
diff --git a/library_generation/test/generate_library_integration_test.sh b/library_generation/test/generate_library_integration_test.sh
index 3cc5a1544c..f6084cf241 100755
--- a/library_generation/test/generate_library_integration_test.sh
+++ b/library_generation/test/generate_library_integration_test.sh
@@ -36,10 +36,6 @@ case $key in
enable_postprocessing="$2"
shift
;;
- -s|--owlbot_sha)
- owlbot_sha="$2"
- shift
- ;;
-g|--googleapis_gen_url)
googleapis_gen_url="$2"
shift
@@ -117,6 +113,7 @@ grep -v '^ *#' < "${proto_path_list}" | while IFS= read -r line; do
else
echo 'this is a monorepo library'
sparse_clone "https://github.com/googleapis/google-cloud-java.git" "${repository_path} google-cloud-pom-parent google-cloud-jar-parent versions.txt .github"
+
# compute path from output_folder to source of truth library location
# (e.g. google-cloud-java/java-compute)
repository_path="google-cloud-java/${repository_path}"
@@ -173,11 +170,6 @@ grep -v '^ *#' < "${proto_path_list}" | while IFS= read -r line; do
echo "Generate library finished."
echo "Compare generation result..."
if [ $enable_postprocessing == "true" ]; then
- if [ $(find "${output_folder}/workspace" -name '*.java' | wc -l) -eq 0 ];
- then
- echo 'no java files found in workspace. This probably means that owlbot copy failed'
- exit 1
- fi
echo "Checking out repository..."
pushd "${target_folder}"
source_diff_result=0
@@ -199,7 +191,7 @@ grep -v '^ *#' < "${proto_path_list}" | while IFS= read -r line; do
rm -rdf google-cloud-java
elif [ ${source_diff_result} != 0 ]; then
echo "FAILURE: Differences found in proto path: ${proto_path}."
- exit "${SOURCE_DIFF_RESULT}"
+ exit "${source_diff_result}"
elif [ ${pom_diff_result} != 0 ]; then
echo "FAILURE: Differences found in generated poms"
exit "${pom_diff_result}"
diff --git a/library_generation/test/generate_library_unit_tests.sh b/library_generation/test/generate_library_unit_tests.sh
index 8a6ef0f42d..6fde314788 100755
--- a/library_generation/test/generate_library_unit_tests.sh
+++ b/library_generation/test/generate_library_unit_tests.sh
@@ -77,7 +77,7 @@ get_gapic_opts_with_non_default_test() {
remove_grpc_version_test() {
local destination_path="${script_dir}/resources/gapic_options"
cp "${destination_path}/QueryServiceGrpc_copy.java" "${destination_path}/QueryServiceGrpc.java"
- remove_grpc_version
+ remove_grpc_version "${destination_path}"
local res=0
if ! grep -q 'value = "by gRPC proto compiler",' "${destination_path}/QueryServiceGrpc.java"; then
echo "Error: grpc version is not removed."
@@ -305,41 +305,11 @@ get_version_from_valid_WORKSPACE_test() {
assertEquals '2.25.1-SNAPSHOT' "${obtained_ggj_version}"
}
-get_repo_metadata_json_valid_repo_succeeds() {
- local output_folder="${script_dir}/resources"
- local repository_path="test-monorepo/test-service"
- local repo_metadata_json=$(get_repo_metadata_json "${repository_path}" "${output_folder}")
- assertEquals "${output_folder}/${repository_path}/.repo-metadata.json" \
- "${repo_metadata_json}"
-}
-
-get_repo_metadata_json_invalid_repo_fails() {
- local output_folder="${script_dir}/resources"
- local repository_path="test-monorepo/java-nonexistent"
- $(get_repo_metadata_json "${repository_path}" "${output_folder}") || res=$?
- assertEquals 1 ${res}
-}
-
-get_owlbot_sha_valid_repo_succeeds() {
- local output_folder="${script_dir}/resources"
- local repository_root="test-monorepo"
- local owlbot_sha=$(get_owlbot_sha "${output_folder}" "${repository_root}")
- assertEquals 'fb7584f6adb3847ac480ed49a4bfe1463965026b2919a1be270e3174f3ce1191' \
- "${owlbot_sha}"
-}
-
-get_owlbot_sha_invalid_repo_fails() {
- local output_folder="${script_dir}/resources"
- local repository_root="nonexistent-repo"
- $(get_owlbot_sha "${output_folder}" "${repository_root}") || res=$?
- assertEquals 1 ${res}
-}
-
copy_directory_if_exists_valid_folder_succeeds() {
local source_folder="${script_dir}/resources"
local destination="${script_dir}/test_destination_folder"
mkdir -p "${destination}"
- copy_directory_if_exists "${source_folder}" "${destination}/copied-folder"
+ copy_directory_if_exists "${source_folder}" "gapic" "${destination}/copied-folder"
n_matching_folders=$(ls "${destination}" | grep -e 'copied-folder' | wc -l)
rm -rdf "${destination}"
assertEquals 1 ${n_matching_folders}
@@ -349,12 +319,34 @@ copy_directory_if_exists_invalid_folder_does_not_copy() {
local source_folder="${script_dir}/non-existent"
local destination="${script_dir}/test_destination_folder"
mkdir -p "${destination}"
- copy_directory_if_exists "${source_folder}" "${destination}/copied-folder"
+ copy_directory_if_exists "${source_folder}" "gapic" "${destination}/copied-folder"
n_matching_folders=$(ls "${destination}" | grep -e 'copied-folder' | wc -l) || res=$?
rm -rdf "${destination}"
assertEquals 0 ${n_matching_folders}
}
+get_proto_path_from_preprocessed_sources_valid_library_succeeds() {
+ local sources="${script_dir}/resources/proto_path_library"
+ local proto_path=$(get_proto_path_from_preprocessed_sources "${sources}")
+ assertEquals "google/cloud/test/v1" ${proto_path}
+}
+
+get_proto_path_from_preprocessed_sources_empty_library_fails() {
+ local sources=$(mktemp -d)
+ (
+ get_proto_path_from_preprocessed_sources "${sources}"
+ ) || res=$?
+ assertEquals 1 ${res}
+}
+
+get_proto_path_from_preprocessed_sources_multiple_proto_dirs_fails() {
+ local sources="${script_dir}/resources/proto_path_library_multiple_protos"
+ (
+ get_proto_path_from_preprocessed_sources "${sources}"
+ ) || res=$?
+ assertEquals 1 ${res}
+}
+
# Execute tests.
# One line per test.
test_list=(
@@ -394,12 +386,11 @@ test_list=(
get_include_samples_from_BUILD_false_test
get_include_samples_from_BUILD_empty_test
get_version_from_valid_WORKSPACE_test
- get_repo_metadata_json_valid_repo_succeeds
- get_repo_metadata_json_invalid_repo_fails
- get_owlbot_sha_valid_repo_succeeds
- get_owlbot_sha_invalid_repo_fails
copy_directory_if_exists_valid_folder_succeeds
copy_directory_if_exists_invalid_folder_does_not_copy
+ get_proto_path_from_preprocessed_sources_valid_library_succeeds
+ get_proto_path_from_preprocessed_sources_empty_library_fails
+ get_proto_path_from_preprocessed_sources_multiple_proto_dirs_fails
)
pushd "${script_dir}"
diff --git a/library_generation/test/resources/proto_path_library/proto-test-library/src/main/proto/google/cloud/test/v1/empty.proto b/library_generation/test/resources/proto_path_library/proto-test-library/src/main/proto/google/cloud/test/v1/empty.proto
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/library_generation/test/resources/proto_path_library_multiple_protos/proto-1/fake.proto b/library_generation/test/resources/proto_path_library_multiple_protos/proto-1/fake.proto
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/library_generation/test/resources/proto_path_library_multiple_protos/proto-2/fake.proto b/library_generation/test/resources/proto_path_library_multiple_protos/proto-2/fake.proto
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/library_generation/test/resources/proto_path_list.txt b/library_generation/test/resources/proto_path_list.txt
index c85cbccbd9..5f82059e52 100755
--- a/library_generation/test/resources/proto_path_list.txt
+++ b/library_generation/test/resources/proto_path_list.txt
@@ -5,13 +5,13 @@
# google/bigtable/v2 java-bigtable true
google/cloud/apigeeconnect/v1 java-apigee-connect false
google/cloud/asset/v1p5beta1 java-asset false
-google/cloud/asset/v1p2beta1 java-asset false
+# google/cloud/asset/v1p2beta1 java-asset false
google/cloud/asset/v1p1beta1 java-asset false
google/cloud/asset/v1p7beta1 java-asset false
google/cloud/asset/v1 java-asset false
# google/cloud/dialogflow/v2beta1 java-dialogflow false
# google/cloud/dialogflow/v2 java-dialogflow false
-google/cloud/compute/v1 java-compute false
+# google/cloud/compute/v1 java-compute false
google/cloud/kms/v1 java-kms false
google/cloud/redis/v1 java-redis false
google/cloud/redis/v1beta1 java-redis false
diff --git a/library_generation/utilities.sh b/library_generation/utilities.sh
index 66e489d2bc..87feb3838c 100755
--- a/library_generation/utilities.sh
+++ b/library_generation/utilities.sh
@@ -11,6 +11,7 @@ extract_folder_name() {
remove_empty_files() {
local category=$1
+ local destination_path=$2
local file_num
find "${destination_path}/${category}-${folder_name}/src/main/java" -type f -size 0 | while read -r f; do rm -f "${f}"; done
# remove the directory if the directory has no files.
@@ -28,6 +29,7 @@ remove_empty_files() {
mv_src_files() {
local category=$1 # one of gapic, proto, samples
local type=$2 # one of main, test
+ local destination_path=$3
if [ "${category}" == "samples" ]; then
src_suffix="samples/snippets/generated/src/main/java/com"
folder_suffix="samples/snippets/generated"
@@ -48,6 +50,7 @@ mv_src_files() {
# unzip jar file
unzip_src_files() {
local category=$1
+ local destination_path=$2
local jar_file=java_${category}.jar
mkdir -p "${destination_path}/${category}-${folder_name}/src/main/java"
unzip -q -o "${destination_path}/${jar_file}" -d "${destination_path}/${category}-${folder_name}/src/main/java"
@@ -83,6 +86,7 @@ get_gapic_opts() {
}
remove_grpc_version() {
+ local destination_path=$1
find "${destination_path}" -type f -name "*Grpc.java" -exec \
sed -i.bak 's/value = \"by gRPC proto compiler.*/value = \"by gRPC proto compiler\",/g' {} \; -exec rm {}.bak \;
}
@@ -226,50 +230,41 @@ detect_os_architecture() {
echo "${os_architecture}"
}
-# returns the metadata json path if given, or defaults to the one found in
-# $repository_path
-# Arguments
-# 1 - repository_path: path from output_folder to the location of the library
-# containing .repo-metadata. It assumes the existence of google-cloud-java in
-# the output folder
-# 2 - output_folder: root for the generated libraries, used in conjunction with
-get_repo_metadata_json() {
- local repository_path=$1
- local output_folder=$2
- >&2 echo 'Attempting to obtain .repo-metadata.json from repository_path'
- local default_metadata_json_path="${output_folder}/${repository_path}/.repo-metadata.json"
- if [ -f "${default_metadata_json_path}" ]; then
- echo "${default_metadata_json_path}"
- else
- >&2 echo 'failed to obtain json from repository_path'
- exit 1
- fi
-}
-# returns the owlbot image sha contained in google-cloud-java. This is default
-# behavior that may be overriden by a custom value in the future.
-# Arguments
-# 1 - output_folder: root for the generated libraries, used in conjunction with
-# 2 - repository_root: usually "google-cloud-java". The .OwlBot.yaml
-# file is looked into its .github folder
-get_owlbot_sha() {
- local output_folder=$1
- local repository_root=$2
- if [ ! -d "${output_folder}/${repository_root}" ];
- then
- >&2 echo 'No repository to infer owlbot_sha was provided. This is necessary for post-processing' >&2
- exit 1
+# copies $1 as a folder as $2 only if $1 exists
+copy_directory_if_exists() {
+ local base_folder=$1
+ local folder_prefix=$2
+ local destination_folder=$3
+ if [ ! -d "${base_folder}" ]; then
+ return
+ fi
+ pushd "${base_folder}"
+ if [[ $(find . -maxdepth 1 -type d -name "${folder_prefix}*" | wc -l ) -gt 0 ]]; then
+ cp -r ${base_folder}/${folder_prefix}* "${destination_folder}"
fi
- >&2 echo "Attempting to obtain owlbot_sha from monorepo folder"
- owlbot_sha=$(grep 'sha256' "${output_folder}/${repository_root}/.github/.OwlBot.lock.yaml" | cut -d: -f3)
- echo "${owlbot_sha}"
+ popd # base_folder
}
-# copies $1 as a folder as $2 only if $1 exists
-copy_directory_if_exists() {
- local source_folder=$1
- local destination_folder=$2
- if [ -d "${source_folder}" ]; then
- cp -r "${source_folder}" "${destination_folder}"
+# computes proto_path from a given folder of GAPIC sources
+# It will inspect the proto library to compute the path
+get_proto_path_from_preprocessed_sources() {
+ set -e
+ local sources=$1
+ pushd "${sources}" > /dev/null
+ local proto_library=$(find . -maxdepth 1 -type d -name 'proto-*' | sed 's/\.\///')
+ local found_libraries=$(echo "${proto_library}" | wc -l)
+ if [ -z ${proto_library} ]; then
+ echo "no proto libraries found in the supplied sources path"
+ exit 1
+ elif [ ${found_libraries} -gt 1 ]; then
+ echo "more than one proto library found in the supplied sources path"
+ echo "cannot decide for a service version"
+ exit 1
fi
+ pushd "$(pwd)/${proto_library}/src/main/proto" > /dev/null
+ local result=$(find . -type f -name '*.proto' | head -n 1 | xargs dirname | sed 's/\.\///')
+ popd > /dev/null # proto_library
+ popd > /dev/null # sources
+ echo "${result}"
}