From 5980c5ee9eaa9ca29fbaa030a03f4bfa00448d48 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 22 Jan 2025 14:36:11 +0100 Subject: [PATCH 01/16] Updated runner labels for the jobs --- .github/workflows/release_branches.yml | 48 +++++++++++++------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 0c7db644d025..661cb410906a 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -31,7 +31,7 @@ on: # yamllint disable-line rule:truthy jobs: # DockerHubPushAarch64: - # runs-on: [self-hosted, style-checker-aarch64] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none-aarch64] # steps: # - name: Check out repository code # uses: ClickHouse/checkout@v1 @@ -46,7 +46,7 @@ jobs: # path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json # Former DockerHubPushAmd64 DockerHubPush: - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code uses: ClickHouse/checkout@v1 @@ -68,7 +68,7 @@ jobs: path: ${{ runner.temp }}/docker_images_check/changed_images.json # DockerHubPush: # needs: [DockerHubPushAmd64, DockerHubPushAarch64] - # runs-on: [self-hosted, style-checker] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] # steps: # - name: Check out repository code # uses: ClickHouse/checkout@v1 @@ -93,7 +93,7 @@ jobs: # path: ${{ runner.temp }}/changed_images.json CompatibilityCheck: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -129,7 +129,7 @@ jobs: ######################################################################################### BuilderDebRelease: needs: [DockerHubPush] - runs-on: [self-hosted, builder] + runs-on: [self-hosted, altinity-type-ccx53, altinity-on-demand, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -225,7 +225,7 @@ jobs: needs: - BuilderDebRelease # - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code uses: ClickHouse/checkout@v1 @@ -254,7 +254,7 @@ jobs: needs: - BuilderDebRelease # - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] if: ${{ success() || failure() }} steps: - name: Set envs @@ -295,7 +295,7 @@ jobs: # needs: # # - BuilderBinDarwin # - BuilderBinDarwinAarch64 - # runs-on: [self-hosted, style-checker] + # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] # if: ${{ success() || failure() }} # steps: # - name: Set envs @@ -337,7 +337,7 @@ jobs: # - BuilderBinDarwinAarch64 - BuilderDebRelease # - BuilderDebAarch64 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code uses: ClickHouse/checkout@v1 @@ -352,7 +352,7 @@ jobs: ############################################################################################## FunctionalStatelessTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash,altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -427,7 +427,7 @@ jobs: ############################################################################################## FunctionalStatefulTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-in-ash,altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -502,7 +502,7 @@ jobs: ############################################################################################# IntegrationTestsRelease0: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -539,7 +539,7 @@ jobs: sudo rm -fr "$TEMP_PATH" IntegrationTestsRelease1: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Set envs run: | @@ -590,7 +590,7 @@ jobs: matrix: SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, lightweight_delete, data_types, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -650,7 +650,7 @@ jobs: matrix: STORAGE: [minio, aws_s3, gcs] needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -715,7 +715,7 @@ jobs: clickhouse_keeper_ssl: needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -772,7 +772,7 @@ jobs: ./*/*/_instances/*.log key_value: needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -832,7 +832,7 @@ jobs: matrix: SUITE: [authentication, external_user_directory, role_mapping] needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -888,7 +888,7 @@ jobs: parquet: needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -945,7 +945,7 @@ jobs: parquet_minio: needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -1003,7 +1003,7 @@ jobs: parquet_aws: needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -1069,7 +1069,7 @@ jobs: matrix: STORAGE: [minio, aws_s3, gcs] needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -1138,7 +1138,7 @@ jobs: matrix: STORAGE: [minio, s3amazon, s3gcs] needs: [regression_start] - runs-on: [self-hosted, stress-tester] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} @@ -1266,7 +1266,7 @@ jobs: - parquet_aws - s3 - tiered_storage_s3 - runs-on: [self-hosted, style-checker] + runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code uses: ClickHouse/checkout@v1 From 122a1cd046fbfeb842f1d19b603da905362d3536 Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Wed, 22 Jan 2025 14:23:57 +0000 Subject: [PATCH 02/16] Attempt to fix binary-builder docker image --- docker/packager/binary/Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index de9310732a8b..b51bced31ac5 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -9,6 +9,7 @@ ENV CXX=clang++-${LLVM_VERSION} # libtapi is required to support .tbh format from recent MacOS SDKs RUN git clone --depth 1 https://github.com/tpoechtrager/apple-libtapi.git \ && cd apple-libtapi \ + && git checkout 15dfc2a8c9a2a89d06ff227560a69f5265b692f9 \ && INSTALLPREFIX=/cctools ./build.sh \ && ./install.sh \ && cd .. \ @@ -17,6 +18,7 @@ RUN git clone --depth 1 https://github.com/tpoechtrager/apple-libtapi.git \ # Build and install tools for cross-linking to Darwin (x86-64) RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ && cd cctools-port/cctools \ + && git checkout 2a3e1c2a6ff54a30f898b70cfb9ba1692a55fad7 \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ --target=x86_64-apple-darwin \ && make install -j$(nproc) \ @@ -26,6 +28,7 @@ RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ # Build and install tools for cross-linking to Darwin (aarch64) RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ && cd cctools-port/cctools \ + && git checkout 2a3e1c2a6ff54a30f898b70cfb9ba1692a55fad7 \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ --target=aarch64-apple-darwin \ && make install -j$(nproc) \ From b244a6215ac30752985da8e19dece92e1cf7d059 Mon Sep 17 00:00:00 2001 From: MyroTk <44327070+MyroTk@users.noreply.github.com> Date: Wed, 22 Jan 2025 11:09:44 -0500 Subject: [PATCH 03/16] attempting to fix binary-builder --- docker/packager/binary/Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index b51bced31ac5..9e0b795fcf0e 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -7,7 +7,7 @@ ENV CC=clang-${LLVM_VERSION} ENV CXX=clang++-${LLVM_VERSION} # libtapi is required to support .tbh format from recent MacOS SDKs -RUN git clone --depth 1 https://github.com/tpoechtrager/apple-libtapi.git \ +RUN git clone https://github.com/tpoechtrager/apple-libtapi.git \ && cd apple-libtapi \ && git checkout 15dfc2a8c9a2a89d06ff227560a69f5265b692f9 \ && INSTALLPREFIX=/cctools ./build.sh \ @@ -16,7 +16,7 @@ RUN git clone --depth 1 https://github.com/tpoechtrager/apple-libtapi.git \ && rm -rf apple-libtapi # Build and install tools for cross-linking to Darwin (x86-64) -RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ +RUN git clone https://github.com/tpoechtrager/cctools-port.git \ && cd cctools-port/cctools \ && git checkout 2a3e1c2a6ff54a30f898b70cfb9ba1692a55fad7 \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ @@ -26,7 +26,7 @@ RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ && rm -rf cctools-port # Build and install tools for cross-linking to Darwin (aarch64) -RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ +RUN git clone https://github.com/tpoechtrager/cctools-port.git \ && cd cctools-port/cctools \ && git checkout 2a3e1c2a6ff54a30f898b70cfb9ba1692a55fad7 \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ From 8529fe5d15ea67e4781351388507e95b83e2b42d Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 15:44:48 -0500 Subject: [PATCH 04/16] Fix docker images --- docker/images.json | 5 ----- docker/test/stateless/Dockerfile | 12 ++++++++---- tests/ci/tests/docker_images.json | 4 ---- 3 files changed, 8 insertions(+), 13 deletions(-) diff --git a/docker/images.json b/docker/images.json index 3e5f28aca883..0bbabfe9eb1f 100644 --- a/docker/images.json +++ b/docker/images.json @@ -130,11 +130,6 @@ "docker/test/keeper-jepsen" ] }, - "docker/test/integration/kerberized_hadoop": { - "only_amd64": true, - "name": "altinityinfra/kerberized-hadoop", - "dependent": [] - }, "docker/test/sqlancer": { "name": "altinityinfra/sqlancer-test", "dependent": [] diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index 58ac030e46c1..3ce249767dc9 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -1,6 +1,6 @@ # rebuild in #33610 # docker build -t clickhouse/stateless-test . -ARG FROM_TAG=latest +ARG FROM_TAG=600-b244a6215ac30752985da8e19dece92e1cf7d059-amd64 FROM altinityinfra/test-base:$FROM_TAG ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz" @@ -52,7 +52,7 @@ RUN mkdir -p /tmp/clickhouse-odbc-tmp \ && odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \ && rm -rf /tmp/clickhouse-odbc-tmp -ENV TZ=Europe/Moscow +ENV TZ=Europe/Amsterdam RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone ENV NUM_TRIES=1 @@ -71,7 +71,7 @@ RUN arch=${TARGETARCH:-amd64} \ && chmod +x ./mc ./minio -RUN wget 'https://dlcdn.apache.org/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \ +RUN wget --no-verbose 'https://archive.apache.org/dist/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \ && tar -xvf hadoop-3.3.1.tar.gz \ && rm -rf hadoop-3.3.1.tar.gz @@ -79,9 +79,13 @@ ENV MINIO_ROOT_USER="clickhouse" ENV MINIO_ROOT_PASSWORD="clickhouse" ENV EXPORT_S3_STORAGE_POLICIES=1 -RUN npm install -g azurite +# TODO: check against 3.29.0, but 23.8.11 had azurite@3.28.0 +RUN npm install -g azurite@3.29.0 \ + && npm install -g tslib@2.6.2 + COPY run.sh / COPY setup_minio.sh / COPY setup_hdfs_minicluster.sh / + CMD ["/bin/bash", "/run.sh"] diff --git a/tests/ci/tests/docker_images.json b/tests/ci/tests/docker_images.json index 53ad258f6ec9..466bae288c56 100644 --- a/tests/ci/tests/docker_images.json +++ b/tests/ci/tests/docker_images.json @@ -108,10 +108,6 @@ "docker/test/keeper-jepsen" ] }, - "docker/test/integration/kerberized_hadoop": { - "name": "altinityinfra/kerberized-hadoop", - "dependent": [] - }, "docker/test/sqlancer": { "name": "altinityinfra/sqlancer-test", "dependent": [] From 20f89b78ee87516b0971d005b341759d4658c564 Mon Sep 17 00:00:00 2001 From: Stuart <146047128+strtgbb@users.noreply.github.com> Date: Wed, 22 Jan 2025 16:21:02 -0500 Subject: [PATCH 05/16] pin altinityinfra/kerberized-hadoop tag --- .../runner/compose/docker_compose_kerberized_hdfs.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml b/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml index 365821b3f5ea..b163936460c2 100644 --- a/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml +++ b/docker/test/integration/runner/compose/docker_compose_kerberized_hdfs.yml @@ -4,7 +4,8 @@ services: kerberizedhdfs1: cap_add: - DAC_READ_SEARCH - image: altinityinfra/kerberized-hadoop:${DOCKER_KERBERIZED_HADOOP_TAG:-latest} + # image: altinityinfra/kerberized-hadoop:${DOCKER_KERBERIZED_HADOOP_TAG:-latest} + image: altinityinfra/kerberized-hadoop:0-7fb126d648460c159657a337b2f0cc24fbbce2ee-amd64 hostname: kerberizedhdfs1 restart: always volumes: From aca046109300259c46d7c0f7083c9bde5f7ff27a Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 19:02:24 -0500 Subject: [PATCH 06/16] test fixes --- .github/workflows/regression.yml | 623 ++++++++++++++++++++ .github/workflows/release_branches.yml | 775 ++++--------------------- tests/ci/integration_test_check.py | 1 - tests/integration/ci-runner.py | 1 - tests/integration/runner | 2 - 5 files changed, 728 insertions(+), 674 deletions(-) create mode 100644 .github/workflows/regression.yml diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml new file mode 100644 index 000000000000..73728451316e --- /dev/null +++ b/.github/workflows/regression.yml @@ -0,0 +1,623 @@ +name: Regression test workflow - Release +'on': + workflow_call: + inputs: + runner_type: + description: the label of runner to use, can be a simple string or a comma-separated list + required: true + type: string + commit: + description: commit hash of the regression tests. + required: true + type: string + arch: + description: arch to run the tests on. + required: true + type: string + timeout_minutes: + description: Maximum number of minutes to let workflow run before GitHub cancels it. + default: 210 + type: number + build_sha: + description: commit sha of the workflow run for artifact upload. + required: true + type: string + checkout_depth: + description: the value of the git shallow checkout + required: false + type: number + default: 1 + submodules: + description: if the submodules should be checked out + required: false + type: boolean + default: false + additional_envs: + description: additional ENV variables to setup the job + type: string + secrets: + secret_envs: + description: if given, it's passed to the environments + required: false + AWS_SECRET_ACCESS_KEY: + description: the access key to the aws param store. + required: true + AWS_ACCESS_KEY_ID: + description: the access key id to the aws param store. + required: true + AWS_DEFAULT_REGION: + description: the region of the aws param store. + required: true + AWS_REPORT_KEY_ID: + description: aws s3 key id used for regression test reports. + required: true + AWS_REPORT_SECRET_ACCESS_KEY: + description: aws s3 secret access key used for regression test reports. + required: true + AWS_REPORT_REGION: + description: aws s3 region used for regression test reports. + required: true + DOCKER_USERNAME: + description: username of the docker user. + required: true + DOCKER_PASSWORD: + description: password to the docker user. + required: true + REGRESSION_AWS_S3_BUCKET: + description: aws s3 bucket used for regression tests. + required: true + REGRESSION_AWS_S3_KEY_ID: + description: aws s3 key id used for regression tests. + required: true + REGRESSION_AWS_S3_SECRET_ACCESS_KEY: + description: aws s3 secret access key used for regression tests. + required: true + REGRESSION_AWS_S3_REGION: + description: aws s3 region used for regression tests. + required: true + REGRESSION_GCS_KEY_ID: + description: gcs key id used for regression tests. + required: true + REGRESSION_GCS_KEY_SECRET: + description: gcs key secret used for regression tests. + required: true + REGRESSION_GCS_URI: + description: gcs uri used for regression tests. + required: true + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }} + CHECKS_DATABASE_USER: ${{ secrets.CHECKS_DATABASE_USER }} + CHECKS_DATABASE_PASSWORD: ${{ secrets.CHECKS_DATABASE_PASSWORD }} + args: --test-to-end + --no-colors + --local + --collect-service-logs + --output classic + --parallel 1 + --log raw.log + --with-analyzer + artifacts: builds + artifact_paths: | + ./report.html + ./*.log.txt + ./*.log + ./*.html + ./*/_instances/*.log + ./*/_instances/*/logs/*.log + ./*/*/_instances/*/logs/*.log + ./*/*/_instances/*.log + build_sha: ${{ inputs.build_sha }} + pr_number: ${{ github.event.number }} + event_name: ${{ github.event_name }} + +jobs: + runner_labels_setup: + name: Compute proper runner labels for the rest of the jobs + runs-on: ubuntu-latest + outputs: + runner_labels: ${{ steps.setVariables.outputs.runner_labels }} + steps: + - id: setVariables + name: Prepare runner_labels variables for the later steps + run: | + + # Prepend self-hosted + input="self-hosted, ${input}" + + # Remove all whitespace + input="$(echo ${input} | tr -d [:space:])" + # Make something like a JSON array from comma-separated list + input="[ '${input//\,/\'\, \'}' ]" + + echo "runner_labels=$input" >> ${GITHUB_OUTPUT} + env: + input: ${{ inputs.runner_type }} + + Common: + strategy: + fail-fast: false + matrix: + SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, data_types, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, key_value, lightweight_delete, memory, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + Alter: + strategy: + fail-fast: false + matrix: + ONLY: [replace, attach, move] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=alter + STORAGE=/${{ matrix.ONLY }}_partition + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u alter/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --only "/alter/${{ matrix.ONLY }} partition/*" + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: alter-${{ matrix.ONLY }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + Benchmark: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ontime_benchmark + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/benchmark.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: benchmark-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + ClickHouseKeeperSSL: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + SUITE=clickhouse_keeper + STORAGE=/ssl + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --ssl + --clickhouse-binary-path ${{ env.clickhouse_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-ssl-artifacts + path: ${{ env.artifact_paths }} + + LDAP: + strategy: + fail-fast: false + matrix: + SUITE: [authentication, external_user_directory, role_mapping] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=ldap/${{ matrix.SUITE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ldap-${{ matrix.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + Parquet: + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=parquet + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + ParquetS3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=parquet + STORAGE=${{ matrix.STORAGE}} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --storage ${{ matrix.STORAGE }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ env.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths }} + + S3: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, aws_s3, gcs, azure] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=s3 + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --storage ${{ matrix.STORAGE }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} + --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --azure-account-name ${{ secrets.AZURE_ACCOUNT_NAME }} + --azure-storage-key ${{ secrets.AZURE_STORAGE_KEY }} + --azure-container ${{ secrets.AZURE_CONTAINER_NAME }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} + + TieredStorage: + strategy: + fail-fast: false + matrix: + STORAGE: [minio, s3amazon, s3gcs] + needs: [runner_labels_setup] + runs-on: ${{ fromJson(needs.runner_labels_setup.outputs.runner_labels) }} + timeout-minutes: ${{ inputs.timeout_minutes }} + steps: + - name: Checkout regression repo + uses: actions/checkout@v4 + with: + repository: Altinity/clickhouse-regression + ref: ${{ inputs.commit }} + - name: Set envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{ runner.temp }}/reports_dir + SUITE=tiered_storage + STORAGE=/${{ matrix.STORAGE }} + EOF + - name: Download json reports + uses: actions/download-artifact@v4 + with: + path: ${{ env.REPORTS_PATH }} + name: build_report_package_${{ inputs.arch }} + - name: Rename reports + run: | + mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + - name: Setup + run: .github/setup.sh + - name: Get deb url + run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV + - name: Run ${{ env.SUITE }} suite + run: EXITCODE=0; + python3 + -u ${{ env.SUITE }}/regression.py + --clickhouse-binary-path ${{ env.clickhouse_path }} + --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} + --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} + --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ + --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} + --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} + --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} + --with-${{ matrix.STORAGE }} + --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)" + ${{ env.args }} || EXITCODE=$?; + .github/add_link_to_logs.sh; + exit $EXITCODE + - name: Create and upload logs + if: always() + run: .github/create_and_upload_logs.sh 1 + - uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-${{ inputs.arch }}-artifacts + path: ${{ env.artifact_paths}} diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 661cb410906a..9c4c119bca93 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -6,11 +6,6 @@ env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - REGRESSION_RESULTS_URL: ${{github.event.number}}/${GITHUB_SHA}/testflows - REGRESSION_COMMON_COMMIT: 1108c52f249af64885c255f39192ba3cc4c145ab - REGRESSION_PARQUET_COMMIT: 63a15b5dfc55badefcf4b869296e3ec99ca08141 - REGRESSION_KEY_VALUE_COMMIT: e072060fba19d3f81a96f4c5cbe9c5d0b1dcfa9d - on: # yamllint disable-line rule:truthy pull_request: @@ -34,7 +29,7 @@ jobs: # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none-aarch64] # steps: # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # - name: Images check # run: | # cd "$GITHUB_WORKSPACE/tests/ci" @@ -49,7 +44,7 @@ jobs: runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Images check @@ -62,7 +57,7 @@ jobs: run: | mv ${{ runner.temp }}/docker_images_check/changed_images_amd64.json ${{ runner.temp }}/docker_images_check/changed_images.json - name: Upload images files to artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: changed_images path: ${{ runner.temp }}/docker_images_check/changed_images.json @@ -71,14 +66,14 @@ jobs: # runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] # steps: # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # - name: Download changed aarch64 images - # uses: actions/download-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: changed_images_aarch64 # path: ${{ runner.temp }} # - name: Download changed amd64 images - # uses: actions/download-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: changed_images_amd64 # path: ${{ runner.temp }} @@ -103,11 +98,11 @@ jobs: REPORTS_PATH=${{runner.temp}}/reports_dir EOF - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: CompatibilityCheck @@ -142,14 +137,14 @@ jobs: CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable EOF - name: Download changed images - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: changed_images path: ${{ env.IMAGES_PATH }} - name: Trust My Directory run: git config --global --add safe.directory * # https://stackoverflow.com/a/71940133 - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true submodules: true @@ -163,7 +158,7 @@ jobs: cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - name: Upload build URLs to artifacts if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ env.BUILD_URLS }} path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json @@ -189,12 +184,12 @@ jobs: # BUILD_NAME=package_aarch64 # EOF # - name: Download changed images - # uses: actions/download-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: changed_images # path: ${{ runner.temp }}/images_path # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # with: # fetch-depth: 0 # otherwise we will have no info about contributors # - name: Build @@ -228,7 +223,7 @@ jobs: runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself @@ -267,11 +262,11 @@ jobs: NEEDS_DATA_PATH=${{runner.temp}}/needs.json EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Report Builder @@ -307,11 +302,11 @@ jobs: # NEEDS_DATA_PATH=${{runner.temp}}/needs.json # EOF # - name: Download json reports - # uses: actions/download-artifact@v3 + # uses: actions/download-artifact@v4 # with: # path: ${{ env.REPORTS_PATH }} # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # with: # clear-repository: true # - name: Report Builder @@ -340,7 +335,7 @@ jobs: runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Mark Commit Release Ready @@ -364,13 +359,28 @@ jobs: KILL_TIMEOUT=10800 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl status docker - name: Functional test run: | sudo rm -fr "$TEMP_PATH" @@ -400,11 +410,11 @@ jobs: # KILL_TIMEOUT=10800 # EOF # - name: Download json reports - # uses: actions/download-artifact@v3 + # uses: actions/download-artifact@v4 # with: # path: ${{ env.REPORTS_PATH }} # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # with: # clear-repository: true # - name: Functional test @@ -439,13 +449,28 @@ jobs: KILL_TIMEOUT=3600 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:3984:3989::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl status docker - name: Functional test run: | sudo rm -fr "$TEMP_PATH" @@ -475,11 +500,11 @@ jobs: # KILL_TIMEOUT=3600 # EOF # - name: Download json reports - # uses: actions/download-artifact@v3 + # uses: actions/download-artifact@v4 # with: # path: ${{ env.REPORTS_PATH }} # - name: Check out repository code - # uses: ClickHouse/checkout@v1 + # uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 # with: # clear-repository: true # - name: Functional test @@ -515,13 +540,28 @@ jobs: RUN_BY_HASH_TOTAL=2 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:db8:1::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl status docker - name: Integration test run: | sudo rm -fr "$TEMP_PATH" @@ -552,13 +592,28 @@ jobs: RUN_BY_HASH_TOTAL=2 EOF - name: Download json reports - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true + - name: Docker IPv6 configuration + shell: bash + run: | + # make sure docker uses proper IPv6 config + sudo touch /etc/docker/daemon.json + sudo chown ubuntu:ubuntu /etc/docker/daemon.json + sudo cat < /etc/docker/daemon.json + { + "ipv6": true, + "fixed-cidr-v6": "2001:db8:1::/64" + } + EOT + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo systemctl status docker - name: Integration test run: | sudo rm -fr "$TEMP_PATH" @@ -577,628 +632,17 @@ jobs: ############################################################################################# ##################################### REGRESSION TESTS ###################################### ############################################################################################# - regression_start: - ## Not depending on the tests above since they can fail at any given moment. + RegressionTestsRelease: needs: [BuilderDebRelease] - runs-on: ubuntu-latest - steps: - - run: true - - regression_common: - strategy: - fail-fast: false - matrix: - SUITE: [aes_encryption, aggregate_functions, atomic_insert, base_58, clickhouse_keeper, datetime64_extended_range, disk_level_encryption, dns, engines, example, extended_precision_data_types, kafka, kerberos, lightweight_delete, data_types, part_moves_between_shards, rbac, selects, session_timezone, ssl_server, tiered_storage, window_functions] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=${{ matrix.SUITE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - benchmark: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ontime_benchmark - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/benchmark.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-minio-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - clickhouse_keeper_ssl: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=clickhouse_keeper - STORAGE=/ssl - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --ssl - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - key_value: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_KEY_VALUE_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=key_value - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - ldap: - strategy: - fail-fast: false - matrix: - SUITE: [authentication, external_user_directory, role_mapping] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=ldap/${{ matrix.SUITE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ldap-authentication-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - parquet: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/no_s3 - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - parquet_minio: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/minio - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage minio - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-minio-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - parquet_aws: - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_PARQUET_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=parquet - STORAGE=/aws_s3 - artifacts=public - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage aws_s3 - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-aws_s3-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - s3: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, aws_s3, gcs] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=s3 - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --storage ${{ matrix.STORAGE }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }} - --aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log - - tiered_storage_s3: - strategy: - fail-fast: false - matrix: - STORAGE: [minio, s3amazon, s3gcs] - needs: [regression_start] - runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_REPORT_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_REPORT_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }} - steps: - - name: Checkout regression repo - uses: actions/checkout@v3 - with: - repository: Altinity/clickhouse-regression - ref: ${{ env.REGRESSION_COMMON_COMMIT }} - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir - SUITE=tiered_storage - STORAGE=/${{ matrix.STORAGE }} - artifacts=builds - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Setup - run: .github/setup.sh - - name: Get deb url - run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV - - name: Run ${{ env.SUITE }} suite - run: python3 - -u ${{ env.SUITE }}/regression.py - --clickhouse-binary-path ${{ env.clickhouse_binary_path }} - --test-to-end - --local - --collect-service-logs - --output classic - --parallel 1 - --attr project="${GITHUB_REPOSITORY}" project.id="${GITHUB_REPOSITORY_ID}" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="${GITHUB_ACTOR}" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="${GITHUB_RUN_ID}" job.url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" arch="$(uname -i)" - --log raw.log - --aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }} - --aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }} - --aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/ - --gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }} - --gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }} - --gcs-uri ${{ secrets.REGRESSION_GCS_URI }} - --with-${{ matrix.STORAGE }} - - name: Create and upload logs - if: always() - run: .github/create_and_upload_logs.sh 1 - - uses: actions/upload-artifact@v3 - if: always() - with: - name: ${{ env.SUITE }}-${{ matrix.STORAGE }}-artifacts - path: | - ./report.html - ./*.log.txt - ./*.log - ./*.html - ./*/_instances/*.log - ./*/_instances/*/logs/*.log - ./*/*/_instances/*/logs/*.log - ./*/*/_instances/*.log + if: ${{ !failure() && !cancelled() }} + uses: ./.github/workflows/regression.yml + secrets: inherit + with: + runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression + commit: 217a4fde343586d50229ff5e24295a02412d1d98 + arch: release + build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} + timeout_minutes: 300 SignRelease: needs: [BuilderDebRelease] @@ -1214,9 +658,9 @@ jobs: run: | sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - name: Check out repository code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Download json reports - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: Sign release @@ -1256,20 +700,11 @@ jobs: - IntegrationTestsRelease1 - CompatibilityCheck - SignRelease - - regression_common - - benchmark - - clickhouse_keeper_ssl - - key_value - - ldap - - parquet - - parquet_minio - - parquet_aws - - s3 - - tiered_storage_s3 + - RegressionTestsRelease runs-on: [self-hosted, altinity-on-demand, altinity-type-cpx51, altinity-image-x86-snapshot-22.04-amd, altinity-startup-snapshot, altinity-setup-none] steps: - name: Check out repository code - uses: ClickHouse/checkout@v1 + uses: Altinity/checkout@19599efdf36c4f3f30eb55d5bb388896faea69f6 with: clear-repository: true - name: Finish label diff --git a/tests/ci/integration_test_check.py b/tests/ci/integration_test_check.py index bd1513f87f22..957a782c779a 100644 --- a/tests/ci/integration_test_check.py +++ b/tests/ci/integration_test_check.py @@ -44,7 +44,6 @@ "altinityinfra/postgresql-java-client", "altinityinfra/integration-test", "altinityinfra/kerberos-kdc", - "altinityinfra/kerberized-hadoop", "altinityinfra/integration-helper", "altinityinfra/dotnet-client", ] diff --git a/tests/integration/ci-runner.py b/tests/integration/ci-runner.py index 5f9b0619deca..adc3b3b94f49 100755 --- a/tests/integration/ci-runner.py +++ b/tests/integration/ci-runner.py @@ -282,7 +282,6 @@ def get_images_names(): "altinityinfra/integration-helper", "altinityinfra/integration-test", "altinityinfra/integration-tests-runner", - "altinityinfra/kerberized-hadoop", "altinityinfra/kerberos-kdc", "altinityinfra/mysql-golang-client", "altinityinfra/mysql-java-client", diff --git a/tests/integration/runner b/tests/integration/runner index 00022b511ba9..b26f87646dc2 100755 --- a/tests/integration/runner +++ b/tests/integration/runner @@ -321,8 +321,6 @@ if __name__ == "__main__": env_tags += "-e {}={} ".format("DOCKER_HELPER_TAG", tag) elif image == "altinityinfra/integration-test": env_tags += "-e {}={} ".format("DOCKER_BASE_TAG", tag) - elif image == "altinityinfra/kerberized-hadoop": - env_tags += "-e {}={} ".format("DOCKER_KERBERIZED_HADOOP_TAG", tag) elif image == "altinityinfra/kerberos-kdc": env_tags += "-e {}={} ".format("DOCKER_KERBEROS_KDC_TAG", tag) else: From 582abc8fa9ffeb22a5bf942469b77459e6c561a0 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 19:46:01 -0500 Subject: [PATCH 07/16] push docker images and make them amd only --- .github/workflows/release_branches.yml | 4 ++-- tests/ci/docker_server.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9c4c119bca93..9e6180ca9623 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -230,9 +230,9 @@ jobs: - name: Check docker altinity/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --no-push \ + python3 docker_server.py --release-type head push \ --image-repo altinity/clickhouse-server --image-path docker/server - python3 docker_server.py --release-type head --no-push \ + python3 docker_server.py --release-type head push \ --image-repo altinity/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index 93d21ebc614d..ac1abd9831d1 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -31,7 +31,7 @@ ) TEMP_PATH = p.join(RUNNER_TEMP, "docker_images_check") -BUCKETS = {"amd64": "package_release", "arm64": "package_aarch64"} +BUCKETS = {"amd64": "package_release"} git = Git(ignore_no_tags=True) From 987cc8c56bb2bd9f0610d45d725367af757c713f Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 19:47:03 -0500 Subject: [PATCH 08/16] typo fix --- .github/workflows/release_branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 9e6180ca9623..d328de3d7ec0 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -230,9 +230,9 @@ jobs: - name: Check docker altinity/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head push \ + python3 docker_server.py --release-type head --push \ --image-repo altinity/clickhouse-server --image-path docker/server - python3 docker_server.py --release-type head push \ + python3 docker_server.py --release-type head --push \ --image-repo altinity/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() From 2b236e50773697a9cc3046b9756bd6404ff09b89 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 20:18:37 -0500 Subject: [PATCH 09/16] add retry script --- .github/retry.sh | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100755 .github/retry.sh diff --git a/.github/retry.sh b/.github/retry.sh new file mode 100755 index 000000000000..566c2cf11315 --- /dev/null +++ b/.github/retry.sh @@ -0,0 +1,22 @@ +#!/bin/bash +# Execute command until exitcode is 0 or +# maximum number of retries is reached +# Example: +# ./retry +retries=$1 +delay=$2 +command="${@:3}" +exitcode=0 +try=0 +until [ "$try" -ge $retries ] +do + echo "$command" + eval "$command" + exitcode=$? + if [ $exitcode -eq 0 ]; then + break + fi + try=$((try+1)) + sleep $2 +done +exit $exitcode From e40a197b932b90de09b23fd1c8a715a792b276d0 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 21:23:15 -0500 Subject: [PATCH 10/16] skip docker image rebuild --- .github/workflows/release_branches.yml | 12 +++--- tests/ci/docker_images_check.py | 54 ++++++++++++++++++-------- 2 files changed, 43 insertions(+), 23 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index d328de3d7ec0..073a191ba583 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -35,7 +35,7 @@ jobs: # cd "$GITHUB_WORKSPACE/tests/ci" # python3 docker_images_check.py --suffix aarch64 # - name: Upload images files to artifacts - # uses: actions/upload-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: changed_images_aarch64 # path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json @@ -82,7 +82,7 @@ jobs: # cd "$GITHUB_WORKSPACE/tests/ci" # python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 # - name: Upload images files to artifacts - # uses: actions/upload-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: changed_images # path: ${{ runner.temp }}/changed_images.json @@ -201,7 +201,7 @@ jobs: # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" # cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" # - name: Upload build URLs to artifacts - # uses: actions/upload-artifact@v2 + # uses: actions/download-artifact@v4 # with: # name: ${{ env.BUILD_URLS }} # path: ${{ runner.temp }}/build_check/${{ env.BUILD_URLS }}.json @@ -230,9 +230,9 @@ jobs: - name: Check docker altinity/clickhouse-server building run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 docker_server.py --release-type head --push \ + python3 docker_server.py --release-type head \ --image-repo altinity/clickhouse-server --image-path docker/server - python3 docker_server.py --release-type head --push \ + python3 docker_server.py --release-type head \ --image-repo altinity/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() @@ -672,7 +672,7 @@ jobs: cd "$GITHUB_WORKSPACE/tests/ci" python3 sign_release.py - name: Upload signed hashes - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: signed-hashes path: ${{ env.TEMP_PATH }}/*.gpg diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index 2f3859380cf7..affe37684555 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -106,23 +106,43 @@ def get_changed_docker_images( str(files_changed), ) - # Rebuild all images - changed_images = [DockerImage(dockerfile_dir, image_description["name"], image_description.get("only_amd64", False)) for dockerfile_dir, image_description in images_dict.items()] - - # for dockerfile_dir, image_description in images_dict.items(): - # for f in files_changed: - # if f.startswith(dockerfile_dir): - # name = image_description["name"] - # only_amd64 = image_description.get("only_amd64", False) - # logging.info( - # "Found changed file '%s' which affects " - # "docker image '%s' with path '%s'", - # f, - # name, - # dockerfile_dir, - # ) - # changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) - # break + # Find changed images + all_images = [] + changed_images = [] + for dockerfile_dir, image_description in images_dict.items(): + all_images.append(DockerImage(dockerfile_dir, image_description["name"], image_description.get("only_amd64", False))) + for f in files_changed: + if f.startswith(dockerfile_dir): + name = image_description["name"] + only_amd64 = image_description.get("only_amd64", False) + logging.info( + "Found changed file '%s' which affects " + "docker image '%s' with path '%s'", + f, + name, + dockerfile_dir, + ) + changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) + break + + # Rebuild all images on push, release, or scheduled run + if pr_info.number in [0,1]: + changed_images = all_images + + else: + # Rebuild all on opened PR + if pr_info.event['action'] in ['opened', 'reopened']: + changed_images = all_images + + # Check that image for the PR exists + elif pr_info.event['action'] == 'synchronize': + unchanged_images = [ + image for image in all_images if image not in changed_images + ] + logging.info(f"Unchanged images: {unchanged_images}") + for image in unchanged_images: + if subprocess.run(f"docker manifest inspect {image.repo}:{pr_info.number}", shell=True).returncode != 0: + changed_images.append(image) # The order is important: dependents should go later than bases, so that # they are built with updated base versions. From edb90737c6094932c67b2ac1b1523490c11551c6 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 22:33:22 -0500 Subject: [PATCH 11/16] remove push-by-digest --- tests/ci/docker_images_check.py | 2 +- tests/ci/docker_server.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index affe37684555..f1e506aa38a1 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -141,7 +141,7 @@ def get_changed_docker_images( ] logging.info(f"Unchanged images: {unchanged_images}") for image in unchanged_images: - if subprocess.run(f"docker manifest inspect {image.repo}:{pr_info.number}", shell=True).returncode != 0: + if subprocess.run(f"docker manifest inspect {image.repo}:{pr_info.number}-amd64", shell=True).returncode != 0: changed_images.append(image) # The order is important: dependents should go later than bases, so that diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index ac1abd9831d1..ecfd030360c4 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -242,7 +242,7 @@ def build_and_push_image( init_args = ["docker", "buildx", "build"] if push: init_args.append("--push") - init_args.append("--output=type=image,push-by-digest=true") + init_args.append("--output=type=image") init_args.append(f"--tag={image.repo}") else: init_args.append("--output=type=docker") From 4f3b2cd63c9fda1f5ecd82d50c5eaeecdf9386ed Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 22:38:58 -0500 Subject: [PATCH 12/16] minor regression fix --- .github/workflows/regression.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 73728451316e..d2b144f5ba09 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -165,7 +165,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -216,7 +216,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -268,7 +268,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -323,7 +323,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -374,7 +374,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -420,7 +420,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -471,7 +471,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -527,7 +527,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json @@ -589,7 +589,7 @@ jobs: uses: actions/download-artifact@v4 with: path: ${{ env.REPORTS_PATH }} - name: build_report_package_${{ inputs.arch }} + name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json From b7b0fd92d4aecf4f9829cd9e1f7fe3954979d2c3 Mon Sep 17 00:00:00 2001 From: MyroTk Date: Wed, 22 Jan 2025 23:53:11 -0500 Subject: [PATCH 13/16] update build suffix and add hotfix to version helper --- .github/workflows/regression.yml | 18 ++++----- .github/workflows/release_branches.yml | 2 +- tests/ci/docker_images_check.py | 54 ++++++++------------------ tests/ci/version_helper.py | 3 +- 4 files changed, 29 insertions(+), 48 deletions(-) diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index d2b144f5ba09..db8d893ec430 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -168,7 +168,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -219,7 +219,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -271,7 +271,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -326,7 +326,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -377,7 +377,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -423,7 +423,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -474,7 +474,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -530,7 +530,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url @@ -592,7 +592,7 @@ jobs: name: build_urls_package_${{ inputs.arch }} - name: Rename reports run: | - mv ${{ env.REPORTS_PATH }}/build_report_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json + mv ${{ env.REPORTS_PATH }}/build_urls_package_*.json ${{ env.REPORTS_PATH }}/build_report_package_${{ inputs.arch }}.json - name: Setup run: .github/setup.sh - name: Get deb url diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 073a191ba583..986556a1b3de 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -134,7 +134,7 @@ jobs: REPO_COPY=${{runner.temp}}/build_check/ClickHouse CACHES_PATH=${{runner.temp}}/../ccaches BUILD_NAME=package_release - CLICKHOUSE_STABLE_VERSION_SUFFIX=altinitystable + CLICKHOUSE_STABLE_VERSION_SUFFIX=altinityhotfix EOF - name: Download changed images uses: actions/download-artifact@v4 diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index f1e506aa38a1..2f3859380cf7 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -106,43 +106,23 @@ def get_changed_docker_images( str(files_changed), ) - # Find changed images - all_images = [] - changed_images = [] - for dockerfile_dir, image_description in images_dict.items(): - all_images.append(DockerImage(dockerfile_dir, image_description["name"], image_description.get("only_amd64", False))) - for f in files_changed: - if f.startswith(dockerfile_dir): - name = image_description["name"] - only_amd64 = image_description.get("only_amd64", False) - logging.info( - "Found changed file '%s' which affects " - "docker image '%s' with path '%s'", - f, - name, - dockerfile_dir, - ) - changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) - break - - # Rebuild all images on push, release, or scheduled run - if pr_info.number in [0,1]: - changed_images = all_images - - else: - # Rebuild all on opened PR - if pr_info.event['action'] in ['opened', 'reopened']: - changed_images = all_images - - # Check that image for the PR exists - elif pr_info.event['action'] == 'synchronize': - unchanged_images = [ - image for image in all_images if image not in changed_images - ] - logging.info(f"Unchanged images: {unchanged_images}") - for image in unchanged_images: - if subprocess.run(f"docker manifest inspect {image.repo}:{pr_info.number}-amd64", shell=True).returncode != 0: - changed_images.append(image) + # Rebuild all images + changed_images = [DockerImage(dockerfile_dir, image_description["name"], image_description.get("only_amd64", False)) for dockerfile_dir, image_description in images_dict.items()] + + # for dockerfile_dir, image_description in images_dict.items(): + # for f in files_changed: + # if f.startswith(dockerfile_dir): + # name = image_description["name"] + # only_amd64 = image_description.get("only_amd64", False) + # logging.info( + # "Found changed file '%s' which affects " + # "docker image '%s' with path '%s'", + # f, + # name, + # dockerfile_dir, + # ) + # changed_images.append(DockerImage(dockerfile_dir, name, only_amd64)) + # break # The order is important: dependents should go later than bases, so that # they are built with updated base versions. diff --git a/tests/ci/version_helper.py b/tests/ci/version_helper.py index bb9630c99afb..3b8b4bd04cc2 100755 --- a/tests/ci/version_helper.py +++ b/tests/ci/version_helper.py @@ -175,7 +175,8 @@ class VersionType: PRESTABLE = "prestable" STABLE = "altinitystable" TESTING = "testing" - VALID = (TESTING, PRESTABLE, STABLE, LTS) + HOTFIX = "altinityhotfix" + VALID = (TESTING, PRESTABLE, STABLE, LTS, HOTFIX) def validate_version(version: str): From d2e6a05e986503991ec687ffc13bc80af7259deb Mon Sep 17 00:00:00 2001 From: MyroTk Date: Thu, 23 Jan 2025 01:14:49 -0500 Subject: [PATCH 14/16] docker login --- .github/workflows/release_branches.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 986556a1b3de..e29a7eb92b62 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -229,6 +229,7 @@ jobs: fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - name: Check docker altinity/clickhouse-server building run: | + docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }} cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head \ --image-repo altinity/clickhouse-server --image-path docker/server From d6cf5c44b102590c914446c24da681ea54a591ab Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 23 Jan 2025 09:59:07 +0100 Subject: [PATCH 15/16] Fixed pushing docker images of the server Pushing as `altinityinfra/clickhouse-server` --- .github/workflows/release_branches.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index e29a7eb92b62..d23baf2df93e 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -227,14 +227,14 @@ jobs: with: clear-repository: true fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself - - name: Check docker altinity/clickhouse-server building + - name: Check docker altinityinfra/clickhouse-server building run: | docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }} cd "$GITHUB_WORKSPACE/tests/ci" python3 docker_server.py --release-type head \ - --image-repo altinity/clickhouse-server --image-path docker/server + --image-repo altinityinfra/clickhouse-server --image-path docker/server python3 docker_server.py --release-type head \ - --image-repo altinity/clickhouse-keeper --image-path docker/keeper + --image-repo altinityinfra/clickhouse-keeper --image-path docker/keeper - name: Cleanup if: always() run: | From 046b4ff1cf1521628ea47e1c01eac5423e77580b Mon Sep 17 00:00:00 2001 From: Vasily Nemkov Date: Thu, 23 Jan 2025 10:59:31 +0100 Subject: [PATCH 16/16] Pushing docker image with exact version --- tests/ci/docker_server.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index ecfd030360c4..018f3d232cde 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -210,6 +210,7 @@ def gen_tags(version: ClickHouseVersion, release_type: str) -> List[str]: tags.append(".".join(parts[: i + 1])) elif release_type == "head": tags.append(release_type) + tags.append(version.string) else: raise ValueError(f"{release_type} is not valid release part") return tags