Skip to content

Commit

Permalink
Merge branch 'main' into bugfix/26206
Browse files Browse the repository at this point in the history
Signed-off-by: Shaun Adams <shaun.adams@volunteers.acasi.info>
  • Loading branch information
adams-shaun committed May 2, 2024
2 parents 7cd452b + 1ae957c commit 901fc18
Show file tree
Hide file tree
Showing 308 changed files with 4,511 additions and 2,611 deletions.
22 changes: 18 additions & 4 deletions .azure-pipelines/cached.yml
@@ -1,4 +1,3 @@

parameters:
- name: arch
type: string
Expand Down Expand Up @@ -50,7 +49,12 @@ steps:
VSO_DEDUP_REDIRECT_TIMEOUT_IN_SEC: "${{ parameters.cacheTimeoutWorkaround }}"
displayName: "Cache (${{ parameters.cacheName }})"
inputs:
key: '${{ parameters.cacheName }} | "${{ parameters.version }}" | "${{ parameters.arch }}" | ${{ parameters.keyDocker }} | ${{ parameters.keyBazel }}'
key: >-
${{ parameters.cacheName }}
| "${{ parameters.version }}"
| "${{ parameters.arch }}"
| ${{ parameters.keyDocker }}
| ${{ parameters.keyBazel }}
path: "${{ parameters.pathTemp }}/all"
cacheHitVar: CACHE_RESTORED

Expand Down Expand Up @@ -81,9 +85,19 @@ steps:
BAZEL_RESTORED: $(BAZEL_CACHE_RESTORED)
displayName: "Cache/prime (Docker/Bazel)"
# TODO(phlax): figure if there is a way to test cache without downloading it
condition: and(not(canceled()), eq(${{ parameters.prime }}, true), eq('${{ parameters.cacheName }}', ''), or(ne(variables.DOCKER_CACHE_RESTORED, 'true'), ne(variables.BAZEL_CACHE_RESTORED, 'true')))
condition: >-
and(not(canceled()),
eq(${{ parameters.prime }}, true),
eq('${{ parameters.cacheName }}', ''),
or(ne(variables.DOCKER_CACHE_RESTORED, 'true'),
ne(variables.BAZEL_CACHE_RESTORED, 'true')))
# Load the caches for a job
- script: sudo .azure-pipelines/docker/load_caches.sh "$(Build.StagingDirectory)" "${{ parameters.pathTemp }}" "${{ parameters.pathDockerBind }}" "${{ parameters.tmpfsDockerDisabled }}"
- script: >-
sudo .azure-pipelines/docker/load_caches.sh
"$(Build.StagingDirectory)"
"${{ parameters.pathTemp }}"
"${{ parameters.pathDockerBind }}"
"${{ parameters.tmpfsDockerDisabled }}"
displayName: "Cache/restore"
condition: and(not(canceled()), eq(${{ parameters.prime }}, false))
1 change: 0 additions & 1 deletion .azure-pipelines/gpg.yml
@@ -1,4 +1,3 @@

parameters:
- name: nameDownloadTask
type: string
Expand Down
8 changes: 7 additions & 1 deletion .azure-pipelines/pipelines.yml
Expand Up @@ -57,7 +57,13 @@ variables:
value: /mnt/docker

- name: authGithubSSHKeyPublic
value: "github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk="
value: >-
github.com ssh-rsa
AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/
C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY
7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B
381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1v
N1/wsjk="
stages:
Expand Down
3 changes: 1 addition & 2 deletions .azure-pipelines/stage/checks.yml
@@ -1,4 +1,3 @@

parameters:
- name: bucketGCP
type: string
Expand Down Expand Up @@ -113,7 +112,7 @@ jobs:
# This condition ensures that this (required) check passes if all of
# the preceding checks either pass or are skipped
# adapted from:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: |
and(
eq(variables['Build.Reason'], 'PullRequest'),
Expand Down
6 changes: 4 additions & 2 deletions .azure-pipelines/stage/linux.yml
Expand Up @@ -83,8 +83,10 @@ jobs:
# This condition ensures that this (required) job passes if all of
# the preceeding jobs either pass or are skipped
# adapted from:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: and(eq(variables['Build.Reason'], 'PullRequest'), in(dependencies.release.result, 'Succeeded', 'SucceededWithIssues', 'Skipped'))
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: |
and(eq(variables['Build.Reason'], 'PullRequest'),
in(dependencies.release.result, 'Succeeded', 'SucceededWithIssues', 'Skipped'))
steps:
- checkout: none
- bash: |
Expand Down
2 changes: 1 addition & 1 deletion .azure-pipelines/stage/prechecks.yml
Expand Up @@ -165,7 +165,7 @@ jobs:
# This condition ensures that this (required) job passes if all of
# the preceeding jobs either pass or are skipped
# adapted from:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: |
and(
eq(variables['Build.Reason'], 'PullRequest'),
Expand Down
3 changes: 1 addition & 2 deletions .azure-pipelines/stage/publish.yml
@@ -1,4 +1,3 @@

parameters:

- name: bucketGCP
Expand Down Expand Up @@ -253,7 +252,7 @@ jobs:
# This condition ensures that this (required) check passes if all of
# the preceding checks either pass or are skipped
# adapted from:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: |
and(
in(dependencies.docker.result, 'Succeeded', 'SucceededWithIssues', 'Skipped'),
Expand Down
17 changes: 13 additions & 4 deletions .azure-pipelines/stage/verify.yml
@@ -1,4 +1,3 @@

parameters:

# Auth
Expand All @@ -10,7 +9,12 @@ parameters:
jobs:
- job: packages_x64
displayName: Debs (x64)
condition: and(not(canceled()), succeeded(), ne(stageDependencies.env.repo.outputs['changed.mobileOnly'], 'true'), ne(stageDependencies.env.repo.outputs['changed.docsOnly'], 'true'), ne(stageDependencies.env.repo.outputs['changed.examplesOnly'], 'true'))
condition: |
and(not(canceled()),
succeeded(),
ne(stageDependencies.env.repo.outputs['changed.mobileOnly'], 'true'),
ne(stageDependencies.env.repo.outputs['changed.docsOnly'], 'true'),
ne(stageDependencies.env.repo.outputs['changed.examplesOnly'], 'true'))
timeoutInMinutes: 120
pool: envoy-x64-small
steps:
Expand All @@ -32,7 +36,12 @@ jobs:

- job: packages_arm64
displayName: Debs (arm64)
condition: and(not(canceled()), succeeded(), ne(stageDependencies.env.repo.outputs['changed.mobileOnly'], 'true'), ne(stageDependencies.env.repo.outputs['changed.docsOnly'], 'true'), ne(stageDependencies.env.repo.outputs['changed.examplesOnly'], 'true'))
condition: |
and(not(canceled()),
succeeded(),
ne(stageDependencies.env.repo.outputs['changed.mobileOnly'], 'true'),
ne(stageDependencies.env.repo.outputs['changed.docsOnly'], 'true'),
ne(stageDependencies.env.repo.outputs['changed.examplesOnly'], 'true'))
timeoutInMinutes: 120
pool: "envoy-arm-small"
steps:
Expand Down Expand Up @@ -63,7 +72,7 @@ jobs:
# This condition ensures that this (required) check passes if all of
# the preceding checks either pass or are skipped
# adapted from:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: |
and(
eq(variables['Build.Reason'], 'PullRequest'),
Expand Down
1 change: 0 additions & 1 deletion .azure-pipelines/stages.yml
@@ -1,4 +1,3 @@

parameters:
## Build stages
# NB: all stages _must_ depend on `env`
Expand Down
2 changes: 1 addition & 1 deletion .clang-format
@@ -1,5 +1,5 @@
---
Language: Cpp
Language: Cpp
AccessModifierOffset: -2
ColumnLimit: 100
DerivePointerAlignment: false
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/_precheck_deps.yml
Expand Up @@ -55,4 +55,4 @@ jobs:
ref: ${{ fromJSON(inputs.request).request.sha }}
persist-credentials: false
- name: Dependency Review
uses: actions/dependency-review-action@5bbc3ba658137598168acb2ab73b21c432dd411b # v4.2.5
uses: actions/dependency-review-action@0c155c5e8556a497adf53f2c18edabf945ed8e70 # v4.3.2
7 changes: 5 additions & 2 deletions .github/workflows/codeql-daily.yml
Expand Up @@ -46,7 +46,9 @@ jobs:
shell: bash
run: |
sudo apt-get update --error-on=any
sudo apt-get install --yes libtool cmake automake autoconf make ninja-build curl unzip virtualenv openjdk-11-jdk build-essential libc++1
sudo apt-get install --yes \
libtool cmake automake autoconf make ninja-build curl unzip \
virtualenv openjdk-11-jdk build-essential libc++1
# Note: the llvm/clang version should match the version specifed in:
# - bazel/repository_locations.bzl
# - .github/workflows/codeql-push.yml
Expand All @@ -61,7 +63,8 @@ jobs:
run: |
bazel/setup_clang.sh bin/clang14
bazelisk shutdown
bazelisk build -c fastbuild --spawn_strategy=local --discard_analysis_cache --nouse_action_cache --config clang --config libc++ //source/common/http/...
bazelisk build -c fastbuild --spawn_strategy=local --discard_analysis_cache --nouse_action_cache --config clang --config libc++ \
//source/common/http/...
- name: Clean Artifacts
run: |
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/codeql-push.yml
Expand Up @@ -77,7 +77,9 @@ jobs:
shell: bash
run: |
sudo apt-get update --error-on=any
sudo apt-get install --yes libtool cmake automake autoconf make ninja-build curl unzip virtualenv openjdk-11-jdk build-essential libc++1
sudo apt-get install --yes \
libtool cmake automake autoconf make ninja-build curl \
unzip virtualenv openjdk-11-jdk build-essential libc++1
# Note: the llvm/clang version should match the version specifed in:
# - bazel/repository_locations.bzl
# - .github/workflows/codeql-daily.yml
Expand Down
7 changes: 5 additions & 2 deletions .github/workflows/envoy-dependency.yml
Expand Up @@ -43,9 +43,12 @@ jobs:
github.event_name == 'workflow_dispatch'
&& startsWith(inputs.task, 'bazel')
}}
name: >-
name: >
Update dep
(${{ inputs.pr && 'PR/' || '' }}${{ inputs.task == 'bazel' && 'bazel' || 'bazel/api' }}/${{ inputs.dependency }}/${{ inputs.version }})
(${{ inputs.pr && 'PR/' || '' }}
${{ inputs.task == 'bazel' && 'bazel' || 'bazel/api' }}
/${{ inputs.dependency }}
/${{ inputs.version }})
runs-on: ubuntu-22.04
steps:
- id: appauth
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/envoy-publish.yml
Expand Up @@ -55,7 +55,7 @@ jobs:
ENVOY_CI_SYNC_APP_ID: ${{ fromJSON(needs.load.outputs.trusted) && secrets.ENVOY_CI_SYNC_APP_ID || '' }}
ENVOY_CI_SYNC_APP_KEY: ${{ fromJSON(needs.load.outputs.trusted) && secrets.ENVOY_CI_SYNC_APP_KEY || '' }}
ENVOY_CI_PUBLISH_APP_ID: ${{ fromJSON(needs.load.outputs.trusted) && secrets.ENVOY_CI_PUBLISH_APP_ID || '' }}
ENVOY_CI_PUBLISH_APP_KEY: ${{ fromJSON(needs.load.outputs.trusted) && secrets.ENVOY_CI_PUBLISH_APP_KEY || '' }}
ENVOY_CI_PUBLISH_APP_KEY: ${{ fromJSON(needs.load.outputs.trusted) && secrets.ENVOY_CI_PUBLISH_APP_KEY || '' }}
permissions:
contents: read
packages: read
Expand Down
6 changes: 6 additions & 0 deletions .yamllint
Expand Up @@ -18,8 +18,14 @@ rules:
- "false"
# https://github.com/adrienverge/yamllint/issues/430
- "on"
- "off"

yaml-files:
- .clang-format
- "*.yml"
- "*.yaml"

ignore:
- "**/*template.yaml"
- examples/single-page-app/_github-clusters.yml
- test/config/integration/server_xds.cds.with_unknown_field.yaml
3 changes: 0 additions & 3 deletions README.md
Expand Up @@ -20,9 +20,6 @@ involved and how Envoy plays a role, read the CNCF
* [Official documentation](https://www.envoyproxy.io/)
* [FAQ](https://www.envoyproxy.io/docs/envoy/latest/faq/overview)
* [Unofficial Chinese documentation](https://cloudnative.to/envoy/)
* Watch [a video overview of Envoy](https://www.youtube.com/watch?v=RVZX4CwKhGE)
([transcript](https://www.microservices.com/talks/lyfts-envoy-monolith-service-mesh-matt-klein/))
to find out more about the origin story and design philosophy of Envoy
* [Blog](https://medium.com/@mattklein123/envoy-threading-model-a8d44b922310) about the threading model
* [Blog](https://medium.com/@mattklein123/envoy-hot-restart-1d16b14555b5) about hot restart
* [Blog](https://medium.com/@mattklein123/envoy-stats-b65c7f363342) about stats architecture
Expand Down
50 changes: 25 additions & 25 deletions SECURITY-INSIGHTS.yml
Expand Up @@ -10,43 +10,43 @@ project-lifecycle:
status: active
bug-fixes-only: false
core-maintainers: # from https://github.com/envoyproxy/envoy/blob/main/OWNERS.md
# Senior maintainers
- github:mattklein123
- github:htuch
- github:alyssawilk
- github:zuercher
- github:lizan
- github:ggreenway
- github:yanavlasov
- github:phlax
- github:RyanTheOptimist
- github:wbpcode
# Maintainers
- github:jmarantz
- github:adisuissa
- github:KBaichoo
- github:keith
- github:kyessenov
- github:ravenblackx
- github:soulxu
- github:nezdolik
# Senior maintainers
- github:mattklein123
- github:htuch
- github:alyssawilk
- github:zuercher
- github:lizan
- github:ggreenway
- github:yanavlasov
- github:phlax
- github:RyanTheOptimist
- github:wbpcode
# Maintainers
- github:jmarantz
- github:adisuissa
- github:KBaichoo
- github:keith
- github:kyessenov
- github:ravenblackx
- github:soulxu
- github:nezdolik
contribution-policy:
accepts-pull-requests: true
accepts-automated-pull-requests: true
code-of-conduct: https://github.com/envoyproxy/envoy/blob/main/CODE_OF_CONDUCT.md
dependencies:
third-party-packages: true
dependencies-lists:
- https://www.envoyproxy.io/docs/envoy/latest/intro/arch_overview/security/external_deps
- https://www.envoyproxy.io/docs/envoy/latest/intro/arch_overview/security/external_deps
env-dependencies-policy:
policy-url: https://github.com/envoyproxy/envoy/blob/main/DEPENDENCY_POLICY.md
distribution-points:
- https://github.com/envoyproxy/envoy
- https://github.com/envoyproxy/envoy
documentation:
- https://www.envoyproxy.io/docs
- https://www.envoyproxy.io/docs
security-contacts:
- type: email
value: envoy-security@googlegroups.com
- type: email
value: envoy-security@googlegroups.com
security-testing:
- tool-type: sca
tool-name: Dependabot
Expand Down
3 changes: 0 additions & 3 deletions api/bazel/cc_proto_descriptor_library/builddefs.bzl
Expand Up @@ -304,9 +304,6 @@ def _get_cc_proto_descriptor_library_aspect_provides():

cc_proto_descriptor_library_aspect = aspect(
attrs = _maybe_add({
#"_copts": attr.label(
# default = "//:upb_proto_library_copts__for_generated_code_only_do_not_use",
#),
"_gen_descriptor": attr.label(
executable = True,
cfg = "exec",
Expand Down
18 changes: 9 additions & 9 deletions api/bazel/repository_locations.bzl
Expand Up @@ -105,9 +105,9 @@ REPOSITORY_LOCATIONS_SPEC = dict(
project_name = "Protobuf Rules for Bazel",
project_desc = "Protocol buffer rules for Bazel",
project_url = "https://github.com/bazelbuild/rules_proto",
version = "4.0.0",
sha256 = "66bfdf8782796239d3875d37e7de19b1d94301e8972b3cbd2446b332429b4df1",
release_date = "2021-09-15",
version = "5.3.0-21.7",
sha256 = "dc3fb206a2cb3441b485eb1e423165b231235a1ea9b031b4433cf7bc1fa460dd",
release_date = "2022-12-27",
strip_prefix = "rules_proto-{version}",
urls = ["https://github.com/bazelbuild/rules_proto/archive/refs/tags/{version}.tar.gz"],
use_category = ["api"],
Expand All @@ -118,9 +118,9 @@ REPOSITORY_LOCATIONS_SPEC = dict(
project_name = "OpenTelemetry Proto",
project_desc = "Language Independent Interface Types For OpenTelemetry",
project_url = "https://github.com/open-telemetry/opentelemetry-proto",
version = "1.2.0",
sha256 = "516dc94685dbaa14fb792788f31d2ef2b0c3ad08dfa8a9a8164e3cf60c1ab6f7",
release_date = "2024-04-05",
version = "1.3.0",
sha256 = "73a678b0ff7a29b581381566a2230fe2a00b864608786c99c050a4492e2bbafc",
release_date = "2024-04-24",
strip_prefix = "opentelemetry-proto-{version}",
urls = ["https://github.com/open-telemetry/opentelemetry-proto/archive/v{version}.tar.gz"],
use_category = ["api"],
Expand All @@ -131,11 +131,11 @@ REPOSITORY_LOCATIONS_SPEC = dict(
project_name = "buf",
project_desc = "A new way of working with Protocol Buffers.", # Used for breaking change detection in API protobufs
project_url = "https://buf.build",
version = "1.30.1",
sha256 = "4d329fdd44146616325c0ad1ee7ebd34886d915131f5457bbb5e0f2a54ae7963",
version = "1.31.0",
sha256 = "8a94dce37ce72c89c82e6c4baf77797a2a4a2eef3b02a7f39b40ef7fb0f39f94",
strip_prefix = "buf",
urls = ["https://github.com/bufbuild/buf/releases/download/v{version}/buf-Linux-x86_64.tar.gz"],
release_date = "2024-04-03",
release_date = "2024-04-23",
use_category = ["api"],
license = "Apache-2.0",
license_url = "https://github.com/bufbuild/buf/blob/v{version}/LICENSE",
Expand Down
Expand Up @@ -23,7 +23,7 @@ option (xds.annotations.v3.file_status).work_in_progress = true;
// [#protodoc-title: Generic Proxy Route Action Configuration]

// Configuration for the route match action.
// [#next-free-field: 7]
// [#next-free-field: 8]
message RouteAction {
// The name of the route action. This should be unique across all route actions.
string name = 5;
Expand Down Expand Up @@ -53,6 +53,13 @@ message RouteAction {
// spans between the point at which the entire downstream request (i.e. end-of-stream) has been
// processed and when the upstream response has been completely processed. A value of 0 will
// disable the route's timeout.
// [#not-implemented-hide:]
google.protobuf.Duration timeout = 6;

// Specifies the retry policy for the route. If not specified, then no retries will be performed.
//
// .. note::
// Only simplest retry policy is supported and only ``num_retries`` field is used for generic
// proxy. The default value for ``num_retries`` is 1 means that the request will be tried once
// and no additional retries will be performed.
config.core.v3.RetryPolicy retry_policy = 7;
}

0 comments on commit 901fc18

Please sign in to comment.