Skip to content

Commit

Permalink
Merge branch 'main' into admin-stats-histograms
Browse files Browse the repository at this point in the history
Signed-off-by: Joshua Marantz <[email protected]>
  • Loading branch information
jmarantz committed Jun 21, 2024
2 parents 1dcd6e8 + bb4a76a commit 1da6766
Show file tree
Hide file tree
Showing 3,889 changed files with 134,668 additions and 54,237 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
22 changes: 18 additions & 4 deletions .azure-pipelines/cached.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

parameters:
- name: arch
type: string
Expand Down Expand Up @@ -50,7 +49,12 @@ steps:
VSO_DEDUP_REDIRECT_TIMEOUT_IN_SEC: "${{ parameters.cacheTimeoutWorkaround }}"
displayName: "Cache (${{ parameters.cacheName }})"
inputs:
key: '${{ parameters.cacheName }} | "${{ parameters.version }}" | "${{ parameters.arch }}" | ${{ parameters.keyDocker }} | ${{ parameters.keyBazel }}'
key: >-
${{ parameters.cacheName }}
| "${{ parameters.version }}"
| "${{ parameters.arch }}"
| ${{ parameters.keyDocker }}
| ${{ parameters.keyBazel }}
path: "${{ parameters.pathTemp }}/all"
cacheHitVar: CACHE_RESTORED

Expand Down Expand Up @@ -81,9 +85,19 @@ steps:
BAZEL_RESTORED: $(BAZEL_CACHE_RESTORED)
displayName: "Cache/prime (Docker/Bazel)"
# TODO(phlax): figure if there is a way to test cache without downloading it
condition: and(not(canceled()), eq(${{ parameters.prime }}, true), eq('${{ parameters.cacheName }}', ''), or(ne(variables.DOCKER_CACHE_RESTORED, 'true'), ne(variables.BAZEL_CACHE_RESTORED, 'true')))
condition: >-
and(not(canceled()),
eq(${{ parameters.prime }}, true),
eq('${{ parameters.cacheName }}', ''),
or(ne(variables.DOCKER_CACHE_RESTORED, 'true'),
ne(variables.BAZEL_CACHE_RESTORED, 'true')))
# Load the caches for a job
- script: sudo .azure-pipelines/docker/load_caches.sh "$(Build.StagingDirectory)" "${{ parameters.pathTemp }}" "${{ parameters.pathDockerBind }}" "${{ parameters.tmpfsDockerDisabled }}"
- script: >-
sudo .azure-pipelines/docker/load_caches.sh
"$(Build.StagingDirectory)"
"${{ parameters.pathTemp }}"
"${{ parameters.pathDockerBind }}"
"${{ parameters.tmpfsDockerDisabled }}"
displayName: "Cache/restore"
condition: and(not(canceled()), eq(${{ parameters.prime }}, false))
15 changes: 11 additions & 4 deletions .azure-pipelines/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ parameters:
displayName: "CI target"
type: string
default: release
- name: artifactName
displayName: "Artifact name"
type: string
default: ""
- name: artifactSuffix
displayName: "Suffix of artifact"
type: string
Expand Down Expand Up @@ -318,11 +322,11 @@ steps:

- script: |
set -e
sudo .azure-pipelines/docker/save_cache.sh "$(Build.StagingDirectory)" /mnt/cache/all true true
sudo .azure-pipelines/docker/save_cache.sh "$(Build.StagingDirectory)" ${{ parameters.pathCacheTemp }}/all true true
if id -u vsts &> /dev/null; then
sudo chown -R vsts:vsts /mnt/cache/all
sudo chown -R vsts:vsts ${{ parameters.pathCacheTemp }}/all
else
sudo chown -R azure-pipelines:azure-pipelines /mnt/cache/all
sudo chown -R azure-pipelines:azure-pipelines ${{ parameters.pathCacheTemp }}/all
fi
displayName: "Cache/save (${{ parameters.cacheName}})"
Expand All @@ -339,6 +343,9 @@ steps:
- task: PublishBuildArtifacts@1
inputs:
pathtoPublish: "$(Build.StagingDirectory)/envoy"
artifactName: ${{ parameters.ciTarget }}
${{ if eq(parameters.artifactName, '') }}:
artifactName: ${{ parameters.ciTarget }}
${{ if ne(parameters.artifactName, '') }}:
artifactName: ${{ parameters.artifactName }}
timeoutInMinutes: 10
condition: eq(${{ parameters.publishEnvoy }}, 'true')
1 change: 0 additions & 1 deletion .azure-pipelines/gpg.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

parameters:
- name: nameDownloadTask
type: string
Expand Down
8 changes: 7 additions & 1 deletion .azure-pipelines/pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,13 @@ variables:
value: /mnt/docker

- name: authGithubSSHKeyPublic
value: "github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk="
value: >-
github.com ssh-rsa
AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/
C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY
7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B
381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1v
N1/wsjk="
stages:
Expand Down
7 changes: 4 additions & 3 deletions .azure-pipelines/stage/checks.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

parameters:
- name: bucketGCP
type: string
Expand Down Expand Up @@ -75,7 +74,8 @@ jobs:
api:
CI_TARGET: "api"
timeoutInMinutes: 180
pool: envoy-x64-small
pool:
vmImage: $(agentUbuntu)
steps:
- template: ../ci.yml
parameters:
Expand All @@ -86,6 +86,7 @@ jobs:
managedAgent: false
repoFetchDepth: $(REPO_FETCH_DEPTH)
repoFetchTags: $(REPO_FETCH_TAGS)
pathCacheTemp: /tmp/cache
publishTestResults: variables.PUBLISH_TEST_RESULTS
publishEnvoy: variables.PUBLISH_ENVOY
tmpfsDockerDisabled: true
Expand All @@ -111,7 +112,7 @@ jobs:
# This condition ensures that this (required) check passes if all of
# the preceding checks either pass or are skipped
# adapted from:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: |
and(
eq(variables['Build.Reason'], 'PullRequest'),
Expand Down
7 changes: 5 additions & 2 deletions .azure-pipelines/stage/linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ jobs:
name: target
- template: ../ci.yml
parameters:
artifactName: release
managedAgent: ${{ parameters.managedAgent }}
ciTarget: $(target.value)
cacheName: "release"
Expand All @@ -82,8 +83,10 @@ jobs:
# This condition ensures that this (required) job passes if all of
# the preceeding jobs either pass or are skipped
# adapted from:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: and(eq(variables['Build.Reason'], 'PullRequest'), in(dependencies.release.result, 'Succeeded', 'SucceededWithIssues', 'Skipped'))
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: |
and(eq(variables['Build.Reason'], 'PullRequest'),
in(dependencies.release.result, 'Succeeded', 'SucceededWithIssues', 'Skipped'))
steps:
- checkout: none
- bash: |
Expand Down
2 changes: 1 addition & 1 deletion .azure-pipelines/stage/prechecks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ jobs:
# This condition ensures that this (required) job passes if all of
# the preceeding jobs either pass or are skipped
# adapted from:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: |
and(
eq(variables['Build.Reason'], 'PullRequest'),
Expand Down
9 changes: 4 additions & 5 deletions .azure-pipelines/stage/publish.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

parameters:

- name: bucketGCP
Expand All @@ -10,12 +9,12 @@ parameters:
- name: timeoutDockerPublish
displayName: "Timout Docker publish"
type: number
# in seconds
default: 15
# in minutes
default: 18
- name: timeoutDockerBuild
displayName: "Timout Docker build"
type: number
default: 400
default: 500

# Auth
- name: authGCP
Expand Down Expand Up @@ -253,7 +252,7 @@ jobs:
# This condition ensures that this (required) check passes if all of
# the preceding checks either pass or are skipped
# adapted from:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: |
and(
in(dependencies.docker.result, 'Succeeded', 'SucceededWithIssues', 'Skipped'),
Expand Down
17 changes: 13 additions & 4 deletions .azure-pipelines/stage/verify.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

parameters:

# Auth
Expand All @@ -10,7 +9,12 @@ parameters:
jobs:
- job: packages_x64
displayName: Debs (x64)
condition: and(not(canceled()), succeeded(), ne(stageDependencies.env.repo.outputs['changed.mobileOnly'], 'true'), ne(stageDependencies.env.repo.outputs['changed.docsOnly'], 'true'), ne(stageDependencies.env.repo.outputs['changed.examplesOnly'], 'true'))
condition: |
and(not(canceled()),
succeeded(),
ne(stageDependencies.env.repo.outputs['changed.mobileOnly'], 'true'),
ne(stageDependencies.env.repo.outputs['changed.docsOnly'], 'true'),
ne(stageDependencies.env.repo.outputs['changed.examplesOnly'], 'true'))
timeoutInMinutes: 120
pool: envoy-x64-small
steps:
Expand All @@ -32,7 +36,12 @@ jobs:

- job: packages_arm64
displayName: Debs (arm64)
condition: and(not(canceled()), succeeded(), ne(stageDependencies.env.repo.outputs['changed.mobileOnly'], 'true'), ne(stageDependencies.env.repo.outputs['changed.docsOnly'], 'true'), ne(stageDependencies.env.repo.outputs['changed.examplesOnly'], 'true'))
condition: |
and(not(canceled()),
succeeded(),
ne(stageDependencies.env.repo.outputs['changed.mobileOnly'], 'true'),
ne(stageDependencies.env.repo.outputs['changed.docsOnly'], 'true'),
ne(stageDependencies.env.repo.outputs['changed.examplesOnly'], 'true'))
timeoutInMinutes: 120
pool: "envoy-arm-small"
steps:
Expand Down Expand Up @@ -63,7 +72,7 @@ jobs:
# This condition ensures that this (required) check passes if all of
# the preceding checks either pass or are skipped
# adapted from:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
# https://learn.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops#job-to-job-dependencies-within-one-stage
condition: |
and(
eq(variables['Build.Reason'], 'PullRequest'),
Expand Down
1 change: 0 additions & 1 deletion .azure-pipelines/stages.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

parameters:
## Build stages
# NB: all stages _must_ depend on `env`
Expand Down
65 changes: 42 additions & 23 deletions .bazelrc
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ build --incompatible_config_setting_private_default_visibility
build --incompatible_enforce_config_setting_visibility

test --test_verbose_timeout_warnings
test --experimental_ui_max_stdouterr_bytes=11712829 #default 1048576

# Allow tags to influence execution requirements
common --experimental_allow_tags_propagation
Expand All @@ -64,7 +65,8 @@ common --experimental_allow_tags_propagation
build:linux --copt=-fdebug-types-section
build:linux --copt=-fPIC
build:linux --copt=-Wno-deprecated-declarations
build:linux --cxxopt=-std=c++17 --host_cxxopt=-std=c++17
build:linux --cxxopt=-std=c++20 --host_cxxopt=-std=c++20
build:linux --cxxopt=-fsized-deallocation --host_cxxopt=-fsized-deallocation
build:linux --conlyopt=-fexceptions
build:linux --fission=dbg,opt
build:linux --features=per_object_debug_info
Expand All @@ -83,15 +85,19 @@ build:sanitizer --linkopt -ldl

# Common flags for Clang
build:clang --action_env=BAZEL_COMPILER=clang
build:clang --action_env=CC=clang --action_env=CXX=clang++
build:clang --linkopt=-fuse-ld=lld
build:clang --action_env=CC=clang --host_action_env=CC=clang
build:clang --action_env=CXX=clang++ --host_action_env=CXX=clang++

# Flags for Clang + PCH
build:clang-pch --spawn_strategy=local
build:clang-pch --define=ENVOY_CLANG_PCH=1

# Use gold linker for gcc compiler.
build:gcc --linkopt=-fuse-ld=gold
build:gcc --test_env=HEAPCHECK=
build:gcc --action_env=BAZEL_COMPILER=gcc
build:gcc --action_env=CC=gcc --action_env=CXX=g++

# Clang-tidy
# TODO(phlax): enable this, its throwing some errors as well as finding more issues
Expand All @@ -102,7 +108,6 @@ build:clang-tidy --output_groups=report
build:clang-tidy --build_tag_filters=-notidy

# Basic ASAN/UBSAN that works for gcc
build:asan --action_env=ENVOY_ASAN=1
build:asan --config=sanitizer
# ASAN install its signal handler, disable ours so the stacktrace will be printed by ASAN
build:asan --define signal_trace=disabled
Expand All @@ -125,14 +130,21 @@ build:asan --copt -O1
build:asan --copt -fno-optimize-sibling-calls

# Clang ASAN/UBSAN
build:clang-asan --config=clang
build:clang-asan --config=asan
build:clang-asan --linkopt -fuse-ld=lld
build:clang-asan --linkopt --rtlib=compiler-rt
build:clang-asan --linkopt --unwindlib=libgcc
build:clang-asan-common --config=clang
build:clang-asan-common --config=asan
build:clang-asan-common --linkopt -fuse-ld=lld
build:clang-asan-common --linkopt --rtlib=compiler-rt
build:clang-asan-common --linkopt --unwindlib=libgcc

build:clang-asan --config=clang-asan-common
build:clang-asan --linkopt=-l:libclang_rt.ubsan_standalone.a
build:clang-asan --linkopt=-l:libclang_rt.ubsan_standalone_cxx.a
build:clang-asan --action_env=ENVOY_UBSAN_VPTR=1
build:clang-asan --copt=-fsanitize=vptr,function
build:clang-asan --linkopt=-fsanitize=vptr,function

# macOS
build:macos --cxxopt=-std=c++17 --host_cxxopt=-std=c++17
build:macos --cxxopt=-std=c++20 --host_cxxopt=-std=c++20
build:macos --action_env=PATH=/opt/homebrew/bin:/opt/local/bin:/usr/local/bin:/usr/bin:/bin
build:macos --host_action_env=PATH=/opt/homebrew/bin:/opt/local/bin:/usr/local/bin:/usr/bin:/bin
build:macos --define tcmalloc=disabled
Expand Down Expand Up @@ -190,10 +202,11 @@ build:libc++ --action_env=BAZEL_CXXOPTS=-stdlib=libc++
build:libc++ --action_env=BAZEL_LINKLIBS=-l%:libc++.a:-l%:libc++abi.a
build:libc++ --action_env=BAZEL_LINKOPTS=-lm:-pthread
build:libc++ --define force_libcpp=enabled
build:clang-libc++ --config=libc++

build:libc++20 --config=libc++
# gRPC has a lot of deprecated-enum-enum-conversion warning. Remove once it is addressed
build:libc++20 --cxxopt=-std=c++20 --copt=-Wno-error=deprecated-enum-enum-conversion
build:libc++20 --copt=-Wno-error=deprecated-enum-enum-conversion

# Optimize build for binary size reduction.
build:sizeopt -c opt --copt -Os
Expand Down Expand Up @@ -226,7 +239,8 @@ build:coverage --instrumentation_filter="^//source(?!/common/quic/platform)[/:],
build:coverage --remote_download_minimal
build:coverage --define=tcmalloc=gperftools
build:coverage --define=no_debug_info=1
build:coverage --linkopt=-Wl,-s
# `--no-relax` is required for coverage to not err with `relocation R_X86_64_REX_GOTPCRELX`
build:coverage --linkopt=-Wl,-s,--no-relax
build:coverage --test_env=ENVOY_IP_TEST_VERSIONS=v4only

build:test-coverage --test_arg="-l trace"
Expand Down Expand Up @@ -314,6 +328,7 @@ build:remote-clang-libc++ --config=remote
build:remote-clang-libc++ --config=rbe-toolchain-clang-libc++

build:remote-gcc --config=remote
build:remote-gcc --config=gcc
build:remote-gcc --config=rbe-toolchain-gcc

build:remote-asan --config=remote
Expand Down Expand Up @@ -357,7 +372,7 @@ build:compile-time-options --@envoy//source/extensions/filters/http/kill_request

# Docker sandbox
# NOTE: Update this from https://github.com/envoyproxy/envoy-build-tools/blob/main/toolchains/rbe_toolchains_config.bzl#L8
build:docker-sandbox --experimental_docker_image=envoyproxy/envoy-build-ubuntu:fd9ec000fdd72d5c5e4e4ef16db4f9103058779e@sha256:1386a26f687826850ba488d66a6cd5337c5941b3b8793d08cfa6f9df12aa2fcf
build:docker-sandbox --experimental_docker_image=envoyproxy/envoy-build-ubuntu:f94a38f62220a2b017878b790b6ea98a0f6c5f9c@sha256:2dd96b6f43c08ccabd5f4747fce5854f5f96af509b32e5cf6493f136e9833649
build:docker-sandbox --spawn_strategy=docker
build:docker-sandbox --strategy=Javac=docker
build:docker-sandbox --strategy=Closure=docker
Expand Down Expand Up @@ -495,6 +510,7 @@ build:rbe-google --config=cache-google

build:rbe-google-bes --bes_backend=grpcs://buildeventservice.googleapis.com
build:rbe-google-bes --bes_results_url=https://source.cloud.google.com/results/invocations/
build:rbe-google-bes --bes_upload_mode=fully_async

# RBE (Engflow mobile)
build:rbe-engflow --google_default_credentials=false
Expand All @@ -507,18 +523,21 @@ build:rbe-engflow --grpc_keepalive_time=30s
build:rbe-engflow --remote_timeout=3600s
build:rbe-engflow --bes_timeout=3600s
build:rbe-engflow --bes_upload_mode=fully_async

build:rbe-envoy-engflow --google_default_credentials=false
build:rbe-envoy-engflow --remote_cache=grpcs://morganite.cluster.engflow.com
build:rbe-engflow --nolegacy_important_outputs

build:cache-envoy-engflow --google_default_credentials=false
build:cache-envoy-engflow --remote_cache=grpcs://morganite.cluster.engflow.com
build:cache-envoy-engflow --remote_timeout=3600s
build:cache-envoy-engflow --credential_helper=*.engflow.com=%workspace%/bazel/engflow-bazel-credential-helper.sh
build:cache-envoy-engflow --grpc_keepalive_time=30s
build:bes-envoy-engflow --bes_backend=grpcs://morganite.cluster.engflow.com/
build:bes-envoy-engflow --bes_results_url=https://morganite.cluster.engflow.com/invocation/
build:bes-envoy-engflow --bes_timeout=3600s
build:bes-envoy-engflow --bes_upload_mode=fully_async
build:rbe-envoy-engflow --config=cache-envoy-engflow
build:rbe-envoy-engflow --config=bes-envoy-engflow
build:rbe-envoy-engflow --remote_executor=grpcs://morganite.cluster.engflow.com
build:rbe-envoy-engflow --bes_backend=grpcs://morganite.cluster.engflow.com/
build:rbe-envoy-engflow --bes_results_url=https://morganite.cluster.engflow.com/invocation/
build:rbe-envoy-engflow --credential_helper=*.engflow.com=%workspace%/bazel/engflow-bazel-credential-helper.sh
build:rbe-envoy-engflow --grpc_keepalive_time=30s
build:rbe-envoy-engflow --remote_timeout=3600s
build:rbe-envoy-engflow --bes_timeout=3600s
build:rbe-envoy-engflow --bes_upload_mode=fully_async
build:rbe-envoy-engflow --remote_default_exec_properties=container-image=docker://docker.io/envoyproxy/envoy-build-ubuntu:fd9ec000fdd72d5c5e4e4ef16db4f9103058779e@sha256:1386a26f687826850ba488d66a6cd5337c5941b3b8793d08cfa6f9df12aa2fcf
build:rbe-envoy-engflow --remote_default_exec_properties=container-image=docker://docker.io/envoyproxy/envoy-build-ubuntu:f94a38f62220a2b017878b790b6ea98a0f6c5f9c@sha256:2dd96b6f43c08ccabd5f4747fce5854f5f96af509b32e5cf6493f136e9833649

#############################################################################
# debug: Various Bazel debugging flags
Expand Down
2 changes: 1 addition & 1 deletion .bazelversion
Original file line number Diff line number Diff line change
@@ -1 +1 @@
6.3.2
6.5.0
Loading

0 comments on commit 1da6766

Please sign in to comment.