diff --git a/.github/workflows-source/release-testing.yml b/.github/workflows-source/release-testing.yml index f047516356b..64bbc090a55 100644 --- a/.github/workflows-source/release-testing.yml +++ b/.github/workflows-source/release-testing.yml @@ -1,11 +1,12 @@ name: Release Testing on: - push: - branches: - - "hotfix-*" - - "rc--*" - workflow_dispatch: + # push: + # branches: + # - "hotfix-*" + # - "rc--*" + # workflow_dispatch: + pull_request: # new commits interrupt any running workflow on the same branch concurrency: @@ -59,10 +60,10 @@ anchors: DOCKER_HUB_PASSWORD_RO: ${{ secrets.DOCKER_HUB_PASSWORD_RO }} jobs: - ci-main: - name: CI Main - uses: ./.github/workflows/ci-main.yml - secrets: inherit + # ci-main: + # name: CI Main + # uses: ./.github/workflows/ci-main.yml + # secrets: inherit bazel-system-test-nightly: name: Bazel System Test Nightly @@ -76,7 +77,7 @@ jobs: uses: ./.github/actions/bazel-test-all/ with: BAZEL_COMMAND: "test" - BAZEL_TARGETS: "//rs/tests/..." + BAZEL_TARGETS: //rs/tests/cross_chain:ic_xc_ledger_suite_orchestrator_test_head_nns BAZEL_CI_CONFIG: "--config=ci --repository_cache=/cache/bazel" BAZEL_EXTRA_ARGS: "--keep_going --test_tag_filters=system_test_nightly" HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} @@ -92,128 +93,128 @@ jobs: bazel-bep.pb profile.json - bazel-system-test-staging: - name: Bazel System Test Staging - continue-on-error: True - <<: *dind-large-setup - steps: - - <<: *checkout - - <<: *before-script - - <<: *docker-login - - name: Run Bazel System Test Staging - id: bazel-test-all - uses: ./.github/actions/bazel-test-all/ - with: - BAZEL_COMMAND: "test" - BAZEL_TARGETS: "//rs/tests/..." - BAZEL_CI_CONFIG: "--config=ci --repository_cache=/cache/bazel" - BAZEL_EXTRA_ARGS: "--keep_going --test_tag_filters=system_test_staging" - HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} - - name: Upload bazel-bep - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ github.job }}-bep - retention-days: 14 - if-no-files-found: ignore - compression-level: 9 - path: | - bazel-bep.pb - profile.json + # bazel-system-test-staging: + # name: Bazel System Test Staging + # continue-on-error: True + # <<: *dind-large-setup + # steps: + # - <<: *checkout + # - <<: *before-script + # - <<: *docker-login + # - name: Run Bazel System Test Staging + # id: bazel-test-all + # uses: ./.github/actions/bazel-test-all/ + # with: + # BAZEL_COMMAND: "test" + # BAZEL_TARGETS: "//rs/tests/..." + # BAZEL_CI_CONFIG: "--config=ci --repository_cache=/cache/bazel" + # BAZEL_EXTRA_ARGS: "--keep_going --test_tag_filters=system_test_staging" + # HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} + # - name: Upload bazel-bep + # uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ github.job }}-bep + # retention-days: 14 + # if-no-files-found: ignore + # compression-level: 9 + # path: | + # bazel-bep.pb + # profile.json - bazel-system-test-hotfix: - name: Bazel System Test Hotfix - <<: *dind-large-setup - timeout-minutes: 90 - steps: - - <<: *checkout - - <<: *before-script - - <<: *docker-login - - name: Run Bazel Test All - id: bazel-test-all - uses: ./.github/actions/bazel-test-all/ - with: - BAZEL_COMMAND: "test" - BAZEL_TARGETS: "//rs/tests/..." - BAZEL_CI_CONFIG: "--config=ci --repository_cache=/cache/bazel" - BAZEL_EXTRA_ARGS_RULES: "--test_tag_filters=system_test_hotfix" - HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} - - name: Upload bazel-bep - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ github.job }}-bep - retention-days: 14 - if-no-files-found: ignore - compression-level: 9 - path: | - bazel-bep.pb - profile.json + # bazel-system-test-hotfix: + # name: Bazel System Test Hotfix + # <<: *dind-large-setup + # timeout-minutes: 90 + # steps: + # - <<: *checkout + # - <<: *before-script + # - <<: *docker-login + # - name: Run Bazel Test All + # id: bazel-test-all + # uses: ./.github/actions/bazel-test-all/ + # with: + # BAZEL_COMMAND: "test" + # BAZEL_TARGETS: "//rs/tests/..." + # BAZEL_CI_CONFIG: "--config=ci --repository_cache=/cache/bazel" + # BAZEL_EXTRA_ARGS_RULES: "--test_tag_filters=system_test_hotfix" + # HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} + # - name: Upload bazel-bep + # uses: actions/upload-artifact@v4 + # if: always() + # with: + # name: ${{ github.job }}-bep + # retention-days: 14 + # if-no-files-found: ignore + # compression-level: 9 + # path: | + # bazel-bep.pb + # profile.json - dependency-scan-release-cut: - name: Dependency Scan for Release - <<: *dind-large-setup - timeout-minutes: 60 - env: - SHELL_WRAPPER: "/usr/bin/time" - CI_PROJECT_PATH: ${{ github.repository }} - CI_PIPELINE_ID: ${{ github.run_id }} - CI_COMMIT_SHA: ${{ github.sha }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} - SLACK_PSEC_BOT_OAUTH_TOKEN: ${{ secrets.SLACK_PSEC_BOT_OAUTH_TOKEN }} - steps: - - <<: *checkout - - <<: *before-script - - <<: *docker-login - - name: Setup python deps - id: setup-python-deps - shell: bash - run: | - pip3 install --ignore-installed -r requirements.txt - - name: Dependency Scan for Release - id: dependency-scan-release-cut - shell: bash - run: | - set -euo pipefail - export PYTHONPATH=$PWD/ci/src:$PWD/ci/src/dependencies - cd ci/src/dependencies/ - $SHELL_WRAPPER python3 job/bazel_rust_ic_scanner_release_job.py + # dependency-scan-release-cut: + # name: Dependency Scan for Release + # <<: *dind-large-setup + # timeout-minutes: 60 + # env: + # SHELL_WRAPPER: "/usr/bin/time" + # CI_PROJECT_PATH: ${{ github.repository }} + # CI_PIPELINE_ID: ${{ github.run_id }} + # CI_COMMIT_SHA: ${{ github.sha }} + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} + # SLACK_PSEC_BOT_OAUTH_TOKEN: ${{ secrets.SLACK_PSEC_BOT_OAUTH_TOKEN }} + # steps: + # - <<: *checkout + # - <<: *before-script + # - <<: *docker-login + # - name: Setup python deps + # id: setup-python-deps + # shell: bash + # run: | + # pip3 install --ignore-installed -r requirements.txt + # - name: Dependency Scan for Release + # id: dependency-scan-release-cut + # shell: bash + # run: | + # set -euo pipefail + # export PYTHONPATH=$PWD/ci/src:$PWD/ci/src/dependencies + # cd ci/src/dependencies/ + # $SHELL_WRAPPER python3 job/bazel_rust_ic_scanner_release_job.py - setup-guest-os-qualification: - name: Setting up guest os qualification pipeline - <<: *dind-large-setup - outputs: - matrix: ${{ steps.generate.outputs.output }} - steps: - - name: Sparse checkout - uses: actions/checkout@v4 - with: - ref: ${{ github.event.workflow_run.head_branch }} - sparse-checkout: ".github/scripts/determine-initial-guest-os-versions.py" - - id: generate - name: Fetch beginning versions for qualification - shell: bash - run: | - set -euo pipefail - OUTPUT=$(python .github/scripts/determine-initial-guest-os-versions.py) - echo "output=$OUTPUT" >> $GITHUB_OUTPUT + # setup-guest-os-qualification: + # name: Setting up guest os qualification pipeline + # <<: *dind-large-setup + # outputs: + # matrix: ${{ steps.generate.outputs.output }} + # steps: + # - name: Sparse checkout + # uses: actions/checkout@v4 + # with: + # ref: ${{ github.event.workflow_run.head_branch }} + # sparse-checkout: ".github/scripts/determine-initial-guest-os-versions.py" + # - id: generate + # name: Fetch beginning versions for qualification + # shell: bash + # run: | + # set -euo pipefail + # OUTPUT=$(python .github/scripts/determine-initial-guest-os-versions.py) + # echo "output=$OUTPUT" >> $GITHUB_OUTPUT - guest-os-qualification: - name: Qualifying ${{ matrix.version }} -> ${{ github.sha }} - needs: setup-guest-os-qualification - strategy: - matrix: ${{ fromJson(needs.setup-guest-os-qualification.outputs.matrix) }} - <<: *dind-large-setup - steps: - - <<: *checkout - - <<: *before-script - - <<: *docker-login - - name: Run qualification for version ${{ matrix.version }} from the tip of the branch - uses: ./.github/actions/bazel-test-all/ - with: - BAZEL_COMMAND: "test" - BAZEL_TARGETS: "//rs/tests/dre:guest_os_qualification" - BAZEL_CI_CONFIG: "--config=systest --repository_cache=/cache/bazel" - BAZEL_EXTRA_ARGS_RULES: "--test_timeout=7200 --test_env=OLD_VERSION=${{ matrix.version }}" - HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} + # guest-os-qualification: + # name: Qualifying ${{ matrix.version }} -> ${{ github.sha }} + # needs: setup-guest-os-qualification + # strategy: + # matrix: ${{ fromJson(needs.setup-guest-os-qualification.outputs.matrix) }} + # <<: *dind-large-setup + # steps: + # - <<: *checkout + # - <<: *before-script + # - <<: *docker-login + # - name: Run qualification for version ${{ matrix.version }} from the tip of the branch + # uses: ./.github/actions/bazel-test-all/ + # with: + # BAZEL_COMMAND: "test" + # BAZEL_TARGETS: "//rs/tests/dre:guest_os_qualification" + # BAZEL_CI_CONFIG: "--config=systest --repository_cache=/cache/bazel" + # BAZEL_EXTRA_ARGS_RULES: "--test_timeout=7200 --test_env=OLD_VERSION=${{ matrix.version }}" + # HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} diff --git a/.github/workflows/release-testing.yml b/.github/workflows/release-testing.yml index a8304a2c426..03d0c0c5e04 100644 --- a/.github/workflows/release-testing.yml +++ b/.github/workflows/release-testing.yml @@ -1,10 +1,11 @@ name: Release Testing on: - push: - branches: - - "hotfix-*" - - "rc--*" - workflow_dispatch: + # push: + # branches: + # - "hotfix-*" + # - "rc--*" + # workflow_dispatch: + pull_request: # new commits interrupt any running workflow on the same branch concurrency: group: ${{ github.ref }} @@ -22,10 +23,10 @@ env: RUSTFLAGS: "--remap-path-prefix=${CI_PROJECT_DIR}=/ic" BUILDEVENT_DATASET: "github-ci-dfinity" jobs: - ci-main: - name: CI Main - uses: ./.github/workflows/ci-main.yml - secrets: inherit + # ci-main: + # name: CI Main + # uses: ./.github/workflows/ci-main.yml + # secrets: inherit bazel-system-test-nightly: name: Bazel System Test Nightly runs-on: @@ -57,7 +58,7 @@ jobs: uses: ./.github/actions/bazel-test-all/ with: BAZEL_COMMAND: "test" - BAZEL_TARGETS: "//rs/tests/..." + BAZEL_TARGETS: //rs/tests/cross_chain:ic_xc_ledger_suite_orchestrator_test_head_nns BAZEL_CI_CONFIG: "--config=ci --repository_cache=/cache/bazel" BAZEL_EXTRA_ARGS: "--keep_going --test_tag_filters=system_test_nightly" HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} @@ -72,205 +73,127 @@ jobs: path: | bazel-bep.pb profile.json - bazel-system-test-staging: - name: Bazel System Test Staging - continue-on-error: True - runs-on: - group: zh1 - labels: dind-large - container: - image: ghcr.io/dfinity/ic-build@sha256:1c0e901df3c7a97fc440c271881400ce6d2e586e2a89cdc39ec939e3dfe5de76 - options: >- - -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp - timeout-minutes: 180 # 3 hours - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: ${{ github.event.workflow_run.head_branch }} - - name: Before script - id: before-script - shell: bash - run: | - [ -n "${NODE_NAME:-}" ] && echo "Node: $NODE_NAME" - - name: Login to Dockerhub - shell: bash - run: ./ci/scripts/docker-login.sh - env: - DOCKER_HUB_USER: ${{ vars.DOCKER_HUB_USER }} - DOCKER_HUB_PASSWORD_RO: ${{ secrets.DOCKER_HUB_PASSWORD_RO }} - - name: Run Bazel System Test Staging - id: bazel-test-all - uses: ./.github/actions/bazel-test-all/ - with: - BAZEL_COMMAND: "test" - BAZEL_TARGETS: "//rs/tests/..." - BAZEL_CI_CONFIG: "--config=ci --repository_cache=/cache/bazel" - BAZEL_EXTRA_ARGS: "--keep_going --test_tag_filters=system_test_staging" - HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} - - name: Upload bazel-bep - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ github.job }}-bep - retention-days: 14 - if-no-files-found: ignore - compression-level: 9 - path: | - bazel-bep.pb - profile.json - bazel-system-test-hotfix: - name: Bazel System Test Hotfix - runs-on: - group: zh1 - labels: dind-large - container: - image: ghcr.io/dfinity/ic-build@sha256:1c0e901df3c7a97fc440c271881400ce6d2e586e2a89cdc39ec939e3dfe5de76 - options: >- - -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp - timeout-minutes: 90 - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: ${{ github.event.workflow_run.head_branch }} - - name: Before script - id: before-script - shell: bash - run: | - [ -n "${NODE_NAME:-}" ] && echo "Node: $NODE_NAME" - - name: Login to Dockerhub - shell: bash - run: ./ci/scripts/docker-login.sh - env: - DOCKER_HUB_USER: ${{ vars.DOCKER_HUB_USER }} - DOCKER_HUB_PASSWORD_RO: ${{ secrets.DOCKER_HUB_PASSWORD_RO }} - - name: Run Bazel Test All - id: bazel-test-all - uses: ./.github/actions/bazel-test-all/ - with: - BAZEL_COMMAND: "test" - BAZEL_TARGETS: "//rs/tests/..." - BAZEL_CI_CONFIG: "--config=ci --repository_cache=/cache/bazel" - BAZEL_EXTRA_ARGS_RULES: "--test_tag_filters=system_test_hotfix" - HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} - - name: Upload bazel-bep - uses: actions/upload-artifact@v4 - if: always() - with: - name: ${{ github.job }}-bep - retention-days: 14 - if-no-files-found: ignore - compression-level: 9 - path: | - bazel-bep.pb - profile.json - dependency-scan-release-cut: - name: Dependency Scan for Release - runs-on: - group: zh1 - labels: dind-large - container: - image: ghcr.io/dfinity/ic-build@sha256:1c0e901df3c7a97fc440c271881400ce6d2e586e2a89cdc39ec939e3dfe5de76 - options: >- - -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp - timeout-minutes: 60 - env: - SHELL_WRAPPER: "/usr/bin/time" - CI_PROJECT_PATH: ${{ github.repository }} - CI_PIPELINE_ID: ${{ github.run_id }} - CI_COMMIT_SHA: ${{ github.sha }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} - SLACK_PSEC_BOT_OAUTH_TOKEN: ${{ secrets.SLACK_PSEC_BOT_OAUTH_TOKEN }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: ${{ github.event.workflow_run.head_branch }} - - name: Before script - id: before-script - shell: bash - run: | - [ -n "${NODE_NAME:-}" ] && echo "Node: $NODE_NAME" - - name: Login to Dockerhub - shell: bash - run: ./ci/scripts/docker-login.sh - env: - DOCKER_HUB_USER: ${{ vars.DOCKER_HUB_USER }} - DOCKER_HUB_PASSWORD_RO: ${{ secrets.DOCKER_HUB_PASSWORD_RO }} - - name: Setup python deps - id: setup-python-deps - shell: bash - run: | - pip3 install --ignore-installed -r requirements.txt - - name: Dependency Scan for Release - id: dependency-scan-release-cut - shell: bash - run: | - set -euo pipefail - export PYTHONPATH=$PWD/ci/src:$PWD/ci/src/dependencies - cd ci/src/dependencies/ - $SHELL_WRAPPER python3 job/bazel_rust_ic_scanner_release_job.py - setup-guest-os-qualification: - name: Setting up guest os qualification pipeline - runs-on: - group: zh1 - labels: dind-large - container: - image: ghcr.io/dfinity/ic-build@sha256:1c0e901df3c7a97fc440c271881400ce6d2e586e2a89cdc39ec939e3dfe5de76 - options: >- - -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp - timeout-minutes: 180 # 3 hours - outputs: - matrix: ${{ steps.generate.outputs.output }} - steps: - - name: Sparse checkout - uses: actions/checkout@v4 - with: - ref: ${{ github.event.workflow_run.head_branch }} - sparse-checkout: ".github/scripts/determine-initial-guest-os-versions.py" - - id: generate - name: Fetch beginning versions for qualification - shell: bash - run: | - set -euo pipefail - OUTPUT=$(python .github/scripts/determine-initial-guest-os-versions.py) - echo "output=$OUTPUT" >> $GITHUB_OUTPUT - guest-os-qualification: - name: Qualifying ${{ matrix.version }} -> ${{ github.sha }} - needs: setup-guest-os-qualification - strategy: - matrix: ${{ fromJson(needs.setup-guest-os-qualification.outputs.matrix) }} - runs-on: - group: zh1 - labels: dind-large - container: - image: ghcr.io/dfinity/ic-build@sha256:1c0e901df3c7a97fc440c271881400ce6d2e586e2a89cdc39ec939e3dfe5de76 - options: >- - -e NODE_NAME --privileged --cgroupns host -v /cache:/cache -v /var/sysimage:/var/sysimage -v /var/tmp:/var/tmp - timeout-minutes: 180 # 3 hours - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: ${{ github.event.workflow_run.head_branch }} - - name: Before script - id: before-script - shell: bash - run: | - [ -n "${NODE_NAME:-}" ] && echo "Node: $NODE_NAME" - - name: Login to Dockerhub - shell: bash - run: ./ci/scripts/docker-login.sh - env: - DOCKER_HUB_USER: ${{ vars.DOCKER_HUB_USER }} - DOCKER_HUB_PASSWORD_RO: ${{ secrets.DOCKER_HUB_PASSWORD_RO }} - - name: Run qualification for version ${{ matrix.version }} from the tip of the branch - uses: ./.github/actions/bazel-test-all/ - with: - BAZEL_COMMAND: "test" - BAZEL_TARGETS: "//rs/tests/dre:guest_os_qualification" - BAZEL_CI_CONFIG: "--config=systest --repository_cache=/cache/bazel" - BAZEL_EXTRA_ARGS_RULES: "--test_timeout=7200 --test_env=OLD_VERSION=${{ matrix.version }}" - HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} +# bazel-system-test-staging: +# name: Bazel System Test Staging +# continue-on-error: True +# <<: *dind-large-setup +# steps: +# - <<: *checkout +# - <<: *before-script +# - <<: *docker-login +# - name: Run Bazel System Test Staging +# id: bazel-test-all +# uses: ./.github/actions/bazel-test-all/ +# with: +# BAZEL_COMMAND: "test" +# BAZEL_TARGETS: "//rs/tests/..." +# BAZEL_CI_CONFIG: "--config=ci --repository_cache=/cache/bazel" +# BAZEL_EXTRA_ARGS: "--keep_going --test_tag_filters=system_test_staging" +# HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} +# - name: Upload bazel-bep +# uses: actions/upload-artifact@v4 +# if: always() +# with: +# name: ${{ github.job }}-bep +# retention-days: 14 +# if-no-files-found: ignore +# compression-level: 9 +# path: | +# bazel-bep.pb +# profile.json +# bazel-system-test-hotfix: +# name: Bazel System Test Hotfix +# <<: *dind-large-setup +# timeout-minutes: 90 +# steps: +# - <<: *checkout +# - <<: *before-script +# - <<: *docker-login +# - name: Run Bazel Test All +# id: bazel-test-all +# uses: ./.github/actions/bazel-test-all/ +# with: +# BAZEL_COMMAND: "test" +# BAZEL_TARGETS: "//rs/tests/..." +# BAZEL_CI_CONFIG: "--config=ci --repository_cache=/cache/bazel" +# BAZEL_EXTRA_ARGS_RULES: "--test_tag_filters=system_test_hotfix" +# HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} +# - name: Upload bazel-bep +# uses: actions/upload-artifact@v4 +# if: always() +# with: +# name: ${{ github.job }}-bep +# retention-days: 14 +# if-no-files-found: ignore +# compression-level: 9 +# path: | +# bazel-bep.pb +# profile.json + +# dependency-scan-release-cut: +# name: Dependency Scan for Release +# <<: *dind-large-setup +# timeout-minutes: 60 +# env: +# SHELL_WRAPPER: "/usr/bin/time" +# CI_PROJECT_PATH: ${{ github.repository }} +# CI_PIPELINE_ID: ${{ github.run_id }} +# CI_COMMIT_SHA: ${{ github.sha }} +# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +# JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} +# SLACK_PSEC_BOT_OAUTH_TOKEN: ${{ secrets.SLACK_PSEC_BOT_OAUTH_TOKEN }} +# steps: +# - <<: *checkout +# - <<: *before-script +# - <<: *docker-login +# - name: Setup python deps +# id: setup-python-deps +# shell: bash +# run: | +# pip3 install --ignore-installed -r requirements.txt +# - name: Dependency Scan for Release +# id: dependency-scan-release-cut +# shell: bash +# run: | +# set -euo pipefail +# export PYTHONPATH=$PWD/ci/src:$PWD/ci/src/dependencies +# cd ci/src/dependencies/ +# $SHELL_WRAPPER python3 job/bazel_rust_ic_scanner_release_job.py + +# setup-guest-os-qualification: +# name: Setting up guest os qualification pipeline +# <<: *dind-large-setup +# outputs: +# matrix: ${{ steps.generate.outputs.output }} +# steps: +# - name: Sparse checkout +# uses: actions/checkout@v4 +# with: +# ref: ${{ github.event.workflow_run.head_branch }} +# sparse-checkout: ".github/scripts/determine-initial-guest-os-versions.py" +# - id: generate +# name: Fetch beginning versions for qualification +# shell: bash +# run: | +# set -euo pipefail +# OUTPUT=$(python .github/scripts/determine-initial-guest-os-versions.py) +# echo "output=$OUTPUT" >> $GITHUB_OUTPUT + +# guest-os-qualification: +# name: Qualifying ${{ matrix.version }} -> ${{ github.sha }} +# needs: setup-guest-os-qualification +# strategy: +# matrix: ${{ fromJson(needs.setup-guest-os-qualification.outputs.matrix) }} +# <<: *dind-large-setup +# steps: +# - <<: *checkout +# - <<: *before-script +# - <<: *docker-login +# - name: Run qualification for version ${{ matrix.version }} from the tip of the branch +# uses: ./.github/actions/bazel-test-all/ +# with: +# BAZEL_COMMAND: "test" +# BAZEL_TARGETS: "//rs/tests/dre:guest_os_qualification" +# BAZEL_CI_CONFIG: "--config=systest --repository_cache=/cache/bazel" +# BAZEL_EXTRA_ARGS_RULES: "--test_timeout=7200 --test_env=OLD_VERSION=${{ matrix.version }}" +# HONEYCOMB_API_TOKEN: ${{ secrets.HONEYCOMB_API_TOKEN }} diff --git a/rs/nervous_system/integration_tests/src/pocket_ic_helpers.rs b/rs/nervous_system/integration_tests/src/pocket_ic_helpers.rs index 3126a0c538d..93ea9384753 100644 --- a/rs/nervous_system/integration_tests/src/pocket_ic_helpers.rs +++ b/rs/nervous_system/integration_tests/src/pocket_ic_helpers.rs @@ -1,20 +1,20 @@ use candid::{Decode, Encode, Nat, Principal}; -use canister_test::Wasm; +use canister_test::{CanisterInstallMode, Wasm}; use ic_base_types::{CanisterId, PrincipalId, SubnetId}; use ic_ledger_core::Tokens; use ic_nervous_system_common::{E8, ONE_DAY_SECONDS}; use ic_nervous_system_common_test_keys::{TEST_NEURON_1_ID, TEST_NEURON_1_OWNER_PRINCIPAL}; +use ic_nervous_system_root::change_canister::ChangeCanisterRequest; use ic_nns_common::pb::v1::{NeuronId, ProposalId}; use ic_nns_constants::{ self, ALL_NNS_CANISTER_IDS, GOVERNANCE_CANISTER_ID, LEDGER_CANISTER_ID, LIFELINE_CANISTER_ID, REGISTRY_CANISTER_ID, ROOT_CANISTER_ID, SNS_WASM_CANISTER_ID, }; use ic_nns_governance_api::pb::v1::{ - install_code::CanisterInstallMode, manage_neuron_response, CreateServiceNervousSystem, + manage_neuron, manage_neuron_response, proposal, CreateServiceNervousSystem, ExecuteNnsFunction, GetNeuronsFundAuditInfoRequest, GetNeuronsFundAuditInfoResponse, - InstallCodeRequest, ListNeurons, ListNeuronsResponse, MakeProposalRequest, - ManageNeuronCommandRequest, ManageNeuronRequest, ManageNeuronResponse, NetworkEconomics, - NnsFunction, ProposalActionRequest, ProposalInfo, Topic, + ListNeurons, ListNeuronsResponse, ManageNeuron, ManageNeuronResponse, NetworkEconomics, + NnsFunction, Proposal, ProposalInfo, Topic, }; use ic_nns_test_utils::{ common::{ @@ -23,6 +23,7 @@ use ic_nns_test_utils::{ build_mainnet_registry_wasm, build_mainnet_root_wasm, build_mainnet_sns_wasms_wasm, build_registry_wasm, build_root_wasm, build_sns_wasms_wasm, NnsInitPayloadsBuilder, }, + governance::UpgradeRootProposal, sns_wasm::{ build_archive_sns_wasm, build_governance_sns_wasm, build_index_ng_sns_wasm, build_ledger_sns_wasm, build_mainnet_archive_sns_wasm, build_mainnet_governance_sns_wasm, @@ -166,16 +167,14 @@ pub fn add_wasm_via_nns_proposal( hash: hash.to_vec(), wasm: Some(wasm), }; - let proposal = MakeProposalRequest { + let proposal = Proposal { title: Some(format!("Add WASM for SNS canister type {}", canister_type)), summary: "summary".to_string(), url: "".to_string(), - action: Some(ProposalActionRequest::ExecuteNnsFunction( - ExecuteNnsFunction { - nns_function: NnsFunction::AddSnsWasm as i32, - payload: Encode!(&payload).expect("Error encoding proposal payload"), - }, - )), + action: Some(proposal::Action::ExecuteNnsFunction(ExecuteNnsFunction { + nns_function: NnsFunction::AddSnsWasm as i32, + payload: Encode!(&payload).expect("Error encoding proposal payload"), + })), }; nns::governance::propose_and_wait(pocket_ic, proposal) } @@ -184,13 +183,11 @@ pub fn propose_to_set_network_economics_and_wait( pocket_ic: &PocketIc, network_economics: NetworkEconomics, ) -> Result { - let proposal = MakeProposalRequest { + let proposal = Proposal { title: Some("Set NetworkEconomics.neurons_fund_economics {}".to_string()), summary: "summary".to_string(), url: "".to_string(), - action: Some(ProposalActionRequest::ManageNetworkEconomics( - network_economics, - )), + action: Some(proposal::Action::ManageNetworkEconomics(network_economics)), }; nns::governance::propose_and_wait(pocket_ic, proposal) } @@ -605,19 +602,36 @@ pub fn upgrade_nns_canister_to_tip_of_master_or_panic( return; } + let (payload, nns_function) = if canister_id == ROOT_CANISTER_ID { + let payload = UpgradeRootProposal { + wasm_module: wasm.bytes(), + module_arg: vec![], + stop_upgrade_start: true, + }; + ( + Encode!(&payload).unwrap(), + NnsFunction::NnsRootUpgrade as i32, + ) + } else { + let payload = ChangeCanisterRequest::new(true, CanisterInstallMode::Upgrade, canister_id) + .with_memory_allocation(ic_nns_constants::memory_allocation_of(canister_id)) + .with_wasm(wasm.bytes()); + ( + Encode!(&payload).unwrap(), + NnsFunction::NnsCanisterUpgrade as i32, + ) + }; + println!("Upgrading {} to the latest version.", label); let proposal_info = nns::governance::propose_and_wait( pocket_ic, - MakeProposalRequest { + Proposal { title: Some(format!("Upgrade {} to the latest version.", label)), summary: "".to_string(), url: "".to_string(), - action: Some(ProposalActionRequest::InstallCode(InstallCodeRequest { - canister_id: Some(canister_id.get()), - install_mode: Some(CanisterInstallMode::Upgrade as i32), - wasm_module: Some(wasm.bytes()), - arg: Some(vec![]), - skip_stopping_before_installing: None, + action: Some(proposal::Action::ExecuteNnsFunction(ExecuteNnsFunction { + nns_function, + payload, })), }, ) @@ -688,14 +702,14 @@ pub mod nns { pocket_ic: &PocketIc, sender: PrincipalId, neuron_id: NeuronId, - command: ManageNeuronCommandRequest, + command: manage_neuron::Command, ) -> ManageNeuronResponse { let result = pocket_ic .update_call( GOVERNANCE_CANISTER_ID.into(), Principal::from(sender), "manage_neuron", - Encode!(&ManageNeuronRequest { + Encode!(&ManageNeuron { id: Some(neuron_id), command: Some(command), neuron_id_or_subaccount: None @@ -712,12 +726,13 @@ pub mod nns { pub fn propose_and_wait( pocket_ic: &PocketIc, - proposal: MakeProposalRequest, + proposal: Proposal, ) -> Result { let neuron_id = NeuronId { id: TEST_NEURON_1_ID, }; - let command = ManageNeuronCommandRequest::MakeProposal(Box::new(proposal)); + let command: manage_neuron::Command = + manage_neuron::Command::MakeProposal(Box::new(proposal)); let response = manage_neuron( pocket_ic, *TEST_NEURON_1_OWNER_PRINCIPAL, @@ -848,11 +863,11 @@ pub mod nns { ) -> (DeployedSns, ProposalId) { let proposal_info = propose_and_wait( pocket_ic, - MakeProposalRequest { + Proposal { title: Some(format!("Create SNS #{}", sns_instance_label)), summary: "".to_string(), url: "".to_string(), - action: Some(ProposalActionRequest::CreateServiceNervousSystem( + action: Some(proposal::Action::CreateServiceNervousSystem( create_service_nervous_system, )), }, @@ -1152,18 +1167,24 @@ pub mod sns { post_upgrade_running_version.swap_wasm_hash.clone(), ) .wasm; + let upgrade_canister = ChangeCanisterRequest { + stop_before_installing: true, + mode: CanisterInstallMode::Upgrade, + canister_id: CanisterId::unchecked_from_principal(swap_canister_id), + wasm_module: wasm, + arg: vec![], + compute_allocation: None, + memory_allocation: None, + }; nns::governance::propose_and_wait( pocket_ic, - MakeProposalRequest { + Proposal { title: Some("Enable auto-finalization for the Swap canister".to_string()), summary: "".to_string(), url: "".to_string(), - action: Some(ProposalActionRequest::InstallCode(InstallCodeRequest { - canister_id: Some(swap_canister_id), - install_mode: Some(CanisterInstallMode::Upgrade as i32), - wasm_module: Some(wasm), - arg: Some(vec![]), - skip_stopping_before_installing: None, + action: Some(proposal::Action::ExecuteNnsFunction(ExecuteNnsFunction { + nns_function: NnsFunction::NnsCanisterUpgrade as i32, + payload: Encode!(&upgrade_canister).unwrap(), })), }, ) diff --git a/rs/nns/governance/src/governance.rs b/rs/nns/governance/src/governance.rs index 9f36d5c4411..56dceae1b48 100644 --- a/rs/nns/governance/src/governance.rs +++ b/rs/nns/governance/src/governance.rs @@ -397,14 +397,20 @@ impl NnsFunction { fn allowed_when_resources_are_low(&self) -> bool { matches!( self, - NnsFunction::ReviseElectedGuestosVersions | NnsFunction::DeployGuestosToAllSubnetNodes + NnsFunction::NnsRootUpgrade + | NnsFunction::NnsCanisterUpgrade + | NnsFunction::HardResetNnsRootToVersion + | NnsFunction::ReviseElectedGuestosVersions + | NnsFunction::DeployGuestosToAllSubnetNodes ) } fn can_have_large_payload(&self) -> bool { matches!( self, - NnsFunction::NnsCanisterInstall + NnsFunction::NnsCanisterUpgrade + | NnsFunction::NnsCanisterInstall + | NnsFunction::NnsRootUpgrade | NnsFunction::HardResetNnsRootToVersion | NnsFunction::AddSnsWasm ) @@ -420,8 +426,6 @@ impl NnsFunction { | NnsFunction::UpdateNodesHostosVersion | NnsFunction::BlessReplicaVersion | NnsFunction::RetireReplicaVersion - | NnsFunction::NnsCanisterUpgrade - | NnsFunction::NnsRootUpgrade ) } } diff --git a/rs/nns/governance/src/governance/tests/mod.rs b/rs/nns/governance/src/governance/tests/mod.rs index c6ea2428622..2702b7c4d86 100644 --- a/rs/nns/governance/src/governance/tests/mod.rs +++ b/rs/nns/governance/src/governance/tests/mod.rs @@ -1588,6 +1588,10 @@ fn test_validate_execute_nns_function() { nns_function: NnsFunction::CreateSubnet as i32, payload: vec![1u8; PROPOSAL_EXECUTE_NNS_FUNCTION_PAYLOAD_BYTES_MAX], }, + ExecuteNnsFunction { + nns_function: NnsFunction::NnsCanisterUpgrade as i32, + payload: vec![1u8; PROPOSAL_EXECUTE_NNS_FUNCTION_PAYLOAD_BYTES_MAX + 1], + }, ExecuteNnsFunction { nns_function: NnsFunction::IcpXdrConversionRate as i32, payload: Encode!(&UpdateIcpXdrConversionRatePayload { diff --git a/rs/nns/governance/tests/degraded_mode.rs b/rs/nns/governance/tests/degraded_mode.rs index d7bf205be6c..4ac016f8686 100644 --- a/rs/nns/governance/tests/degraded_mode.rs +++ b/rs/nns/governance/tests/degraded_mode.rs @@ -6,21 +6,19 @@ use futures::future::FutureExt; use ic_base_types::{CanisterId, PrincipalId}; use ic_nervous_system_common::{cmc::CMC, ledger::IcpLedger, NervousSystemError}; use ic_nns_common::pb::v1::NeuronId; -use ic_nns_constants::GOVERNANCE_CANISTER_ID; use ic_nns_governance::{ governance::{ Environment, Governance, HeapGrowthPotential, HEAP_SIZE_SOFT_LIMIT_IN_WASM32_PAGES, }, pb::v1::{ governance_error::ErrorType, - install_code::CanisterInstallMode, manage_neuron::{ claim_or_refresh::{By, MemoAndController}, ClaimOrRefresh, Command, }, manage_neuron_response::Command as CommandResponse, neuron, proposal, ExecuteNnsFunction, Governance as GovernanceProto, GovernanceError, - InstallCode, ManageNeuron, ManageNeuronResponse, Motion, NetworkEconomics, Neuron, + ManageNeuron, ManageNeuronResponse, Motion, NetworkEconomics, Neuron, NnsFunction, Proposal, }, }; @@ -181,12 +179,9 @@ async fn test_can_submit_nns_canister_upgrade_in_degraded_mode() { &Proposal { title: Some("A Reasonable Title".to_string()), summary: "proposal 1".to_string(), - action: Some(proposal::Action::InstallCode(InstallCode { - canister_id: Some(GOVERNANCE_CANISTER_ID.get()), - wasm_module: Some(vec![1, 2, 3]), - install_mode: Some(CanisterInstallMode::Upgrade as i32), - arg: Some(vec![4, 5, 6]), - skip_stopping_before_installing: None, + action: Some(proposal::Action::ExecuteNnsFunction(ExecuteNnsFunction { + nns_function: NnsFunction::NnsCanisterUpgrade as i32, + payload: Vec::new(), })), ..Default::default() }, diff --git a/rs/nns/governance/tests/governance.rs b/rs/nns/governance/tests/governance.rs index ba533b6efa3..8f33377b72a 100644 --- a/rs/nns/governance/tests/governance.rs +++ b/rs/nns/governance/tests/governance.rs @@ -61,7 +61,6 @@ use ic_nns_governance::{ governance_error::ErrorType::{ self, InsufficientFunds, NotAuthorized, NotFound, PreconditionFailed, ResourceExhausted, }, - install_code::CanisterInstallMode, manage_neuron::{ self, claim_or_refresh::{By, MemoAndController}, @@ -79,8 +78,8 @@ use ic_nns_governance::{ settle_neurons_fund_participation_request, swap_background_information, AddOrRemoveNodeProvider, Ballot, BallotChange, BallotInfo, BallotInfoChange, CreateServiceNervousSystem, Empty, ExecuteNnsFunction, Governance as GovernanceProto, - GovernanceChange, GovernanceError, IdealMatchedParticipationFunction, InstallCode, - KnownNeuron, KnownNeuronData, ListNeurons, ListNeuronsResponse, ListProposalInfo, + GovernanceChange, GovernanceError, IdealMatchedParticipationFunction, KnownNeuron, + KnownNeuronData, ListNeurons, ListNeuronsResponse, ListProposalInfo, ListProposalInfoResponse, ManageNeuron, ManageNeuronResponse, MonthlyNodeProviderRewards, Motion, NetworkEconomics, Neuron, NeuronChange, NeuronState, NeuronType, NeuronsFundData, NeuronsFundParticipation, NeuronsFundSnapshot, NnsFunction, NodeProvider, Proposal, @@ -8160,7 +8159,7 @@ fn test_filter_proposals_excluding_topics() { proposer: Some(NeuronId { id: 1 }), proposal: Some(Proposal { action: Some(proposal::Action::ExecuteNnsFunction(ExecuteNnsFunction { - nns_function: NnsFunction::HardResetNnsRootToVersion as i32, + nns_function: NnsFunction::NnsCanisterUpgrade as i32, payload: Vec::new(), })), ..new_motion_proposal() @@ -8194,7 +8193,7 @@ fn test_filter_proposals_excluding_topics() { &ListProposalInfo { exclude_topic: vec![ Topic::NetworkEconomics as i32, - Topic::ProtocolCanisterManagement as i32 + Topic::NetworkCanisterManagement as i32 ], ..Default::default() }, @@ -8564,7 +8563,7 @@ async fn test_max_number_of_proposals_with_ballots() { ..Default::default() }, ), Err(GovernanceError{error_type, error_message: _}) if error_type==ResourceExhausted as i32); - // Let's try an Installcode for Governance itself. This proposal type is whitelisted, so it can + // Let's try a NnsCanisterUpgrade. This proposal type is whitelisted, so it can // be submitted even though the max is reached. assert_matches!( gov.make_proposal( @@ -8573,14 +8572,10 @@ async fn test_max_number_of_proposals_with_ballots() { &principal(1), &Proposal { title: Some("A Reasonable Title".to_string()), - summary: "InstallCode for Governance should go through despite the limit" - .to_string(), - action: Some(proposal::Action::InstallCode(InstallCode { - canister_id: Some(GOVERNANCE_CANISTER_ID.get()), - wasm_module: Some(vec![1, 2, 3]), - install_mode: Some(CanisterInstallMode::Upgrade as i32), - arg: Some(vec![4, 5, 6]), - skip_stopping_before_installing: None, + summary: "NnsCanisterUpgrade should go through despite the limit".to_string(), + action: Some(proposal::Action::ExecuteNnsFunction(ExecuteNnsFunction { + nns_function: NnsFunction::NnsCanisterUpgrade as i32, + payload: Vec::new(), })), ..Default::default() }, diff --git a/rs/nns/integration_tests/src/canister_upgrade.rs b/rs/nns/integration_tests/src/canister_upgrade.rs index 6fd02fe22f0..6eed78aae89 100644 --- a/rs/nns/integration_tests/src/canister_upgrade.rs +++ b/rs/nns/integration_tests/src/canister_upgrade.rs @@ -16,7 +16,7 @@ use ic_nns_test_utils::{ }, }; -fn test_upgrade_canister(canister_id: CanisterId, canister_wasm: Wasm) { +fn test_upgrade_canister(canister_id: CanisterId, canister_wasm: Wasm, use_proposal_action: bool) { let state_machine = state_machine_builder_for_nns_tests().build(); let nns_init_payloads = NnsInitPayloadsBuilder::new().with_test_neurons().build(); setup_nns_canisters(&state_machine, nns_init_payloads); @@ -32,7 +32,7 @@ fn test_upgrade_canister(canister_id: CanisterId, canister_wasm: Wasm) { canister_id, modified_wasm.clone(), vec![], - true, + use_proposal_action, ); let controller_canister_id = if canister_id == ROOT_CANISTER_ID { @@ -48,29 +48,34 @@ fn test_upgrade_canister(canister_id: CanisterId, canister_wasm: Wasm) { controller_canister_id, ); - let proposal_info = - nns_governance_get_proposal_info_as_anonymous(&state_machine, proposal_id.id); - let action = proposal_info.proposal.unwrap().action.unwrap(); - if let Action::InstallCode(install_code) = action { - assert_eq!( - install_code.wasm_module_hash, - Some(Sha256::hash(&modified_wasm).to_vec()) - ); - assert_eq!(install_code.arg_hash, Some(vec![])); - assert_eq!( - install_code.install_mode, - Some(CanisterInstallMode::Upgrade as i32) - ); - assert_eq!(install_code.canister_id, Some(canister_id.get())); - assert_eq!(install_code.skip_stopping_before_installing, None); - } else { - panic!("Unexpected action: {:?}", action); + if use_proposal_action { + let proposal_info = + nns_governance_get_proposal_info_as_anonymous(&state_machine, proposal_id.id); + let action = proposal_info.proposal.unwrap().action.unwrap(); + if let Action::InstallCode(install_code) = action { + assert_eq!( + install_code.wasm_module_hash, + Some(Sha256::hash(&modified_wasm).to_vec()) + ); + assert_eq!(install_code.arg_hash, Some(vec![])); + assert_eq!( + install_code.install_mode, + Some(CanisterInstallMode::Upgrade as i32) + ); + assert_eq!(install_code.canister_id, Some(canister_id.get())); + assert_eq!(install_code.skip_stopping_before_installing, None); + } else { + panic!("Unexpected action: {:?}", action); + } } } #[test] fn upgrade_canisters_by_proposal() { - test_upgrade_canister(GOVERNANCE_CANISTER_ID, build_governance_wasm()); - test_upgrade_canister(ROOT_CANISTER_ID, build_root_wasm()); - test_upgrade_canister(LIFELINE_CANISTER_ID, build_lifeline_wasm()); + test_upgrade_canister(GOVERNANCE_CANISTER_ID, build_governance_wasm(), true); + test_upgrade_canister(GOVERNANCE_CANISTER_ID, build_governance_wasm(), false); + test_upgrade_canister(ROOT_CANISTER_ID, build_root_wasm(), false); + test_upgrade_canister(ROOT_CANISTER_ID, build_root_wasm(), true); + test_upgrade_canister(LIFELINE_CANISTER_ID, build_lifeline_wasm(), true); + test_upgrade_canister(LIFELINE_CANISTER_ID, build_lifeline_wasm(), false); } diff --git a/rs/nns/integration_tests/src/copy_inactive_neurons_to_stable_memory.rs b/rs/nns/integration_tests/src/copy_inactive_neurons_to_stable_memory.rs index c07b4bd7a5b..c2c371cff74 100644 --- a/rs/nns/integration_tests/src/copy_inactive_neurons_to_stable_memory.rs +++ b/rs/nns/integration_tests/src/copy_inactive_neurons_to_stable_memory.rs @@ -40,7 +40,7 @@ fn test_copy_inactive_neurons_to_stable_memory() { GOVERNANCE_CANISTER_ID, // Target, i.e. the canister that we want to upgrade. new_wasm_content, // The new code that we want the canister to start running. module_arg, - true, + false, ); println!("Done proposing governance upgrade: {:?}", proposal_id); diff --git a/rs/nns/integration_tests/src/governance_mem_test.rs b/rs/nns/integration_tests/src/governance_mem_test.rs index 7f1914d2528..859c7558a67 100644 --- a/rs/nns/integration_tests/src/governance_mem_test.rs +++ b/rs/nns/integration_tests/src/governance_mem_test.rs @@ -85,7 +85,7 @@ fn governance_mem_test() { GOVERNANCE_CANISTER_ID, real_gov_wasm.bytes(), module_arg, - true, + false, ); state_machine.tick(); diff --git a/rs/nns/integration_tests/src/lifeline.rs b/rs/nns/integration_tests/src/lifeline.rs index 762f00f6206..987e79f8b76 100644 --- a/rs/nns/integration_tests/src/lifeline.rs +++ b/rs/nns/integration_tests/src/lifeline.rs @@ -9,13 +9,15 @@ use ic_nervous_system_common_test_keys::{ }; use ic_nns_common::pb::v1::NeuronId; use ic_nns_constants::{LIFELINE_CANISTER_ID, ROOT_CANISTER_ID}; -use ic_nns_governance_api::pb::v1::{ - install_code::CanisterInstallMode as GovernanceCanisterInstallMode, - manage_neuron_response::Command as CommandResponse, InstallCodeRequest, MakeProposalRequest, - ProposalActionRequest, ProposalStatus, Vote, +use ic_nns_governance_api::{ + pb::v1::{ + manage_neuron_response::Command as CommandResponse, NnsFunction, ProposalStatus, Vote, + }, + proposal_helpers::create_external_update_proposal_candid, }; use ic_nns_test_utils::{ common::NnsInitPayloadsBuilder, + governance::UpgradeRootProposal, state_test_helpers::{ get_pending_proposals, get_root_canister_status, nns_cast_vote, nns_governance_get_proposal_info_as_anonymous, nns_governance_make_proposal, @@ -69,18 +71,17 @@ fn test_submit_and_accept_root_canister_upgrade_proposal() { let funny: u32 = 422557101; // just a funny number I came up with let magic = funny.to_le_bytes(); - let proposal = MakeProposalRequest { - title: Some("Proposal to upgrade the root canister".to_string()), - summary: "".to_string(), - url: "".to_string(), - action: Some(ProposalActionRequest::InstallCode(InstallCodeRequest { - canister_id: Some(ROOT_CANISTER_ID.get()), - wasm_module: Some(wasm_module.clone()), - install_mode: Some(GovernanceCanisterInstallMode::Upgrade as i32), - arg: Some(magic.to_vec()), - skip_stopping_before_installing: None, - })), - }; + let proposal = create_external_update_proposal_candid( + "Proposal to upgrade the root canister", + "", + "", + NnsFunction::NnsRootUpgrade, + UpgradeRootProposal { + wasm_module: wasm_module.clone(), + module_arg: magic.to_vec(), + stop_upgrade_start: true, + }, + ); let neuron_id = NeuronId { id: TEST_NEURON_2_ID, @@ -170,18 +171,17 @@ fn test_submit_and_accept_forced_root_canister_upgrade_proposal() { let init_arg: &[u8] = &[]; - let proposal = MakeProposalRequest { - title: Some("Proposal to upgrade the root canister".to_string()), - summary: "".to_string(), - url: "".to_string(), - action: Some(ProposalActionRequest::InstallCode(InstallCodeRequest { - canister_id: Some(ROOT_CANISTER_ID.get()), - wasm_module: Some(empty_wasm.to_vec()), - install_mode: Some(GovernanceCanisterInstallMode::Upgrade as i32), - arg: Some(init_arg.to_vec()), - skip_stopping_before_installing: Some(true), - })), - }; + let proposal = create_external_update_proposal_candid( + "Proposal to upgrade the root canister", + "", + "", + NnsFunction::NnsRootUpgrade, + UpgradeRootProposal { + wasm_module: empty_wasm.to_vec(), + module_arg: init_arg.to_vec(), + stop_upgrade_start: false, + }, + ); let neuron_id = NeuronId { id: TEST_NEURON_2_ID, @@ -272,18 +272,17 @@ fn test_lifeline_canister_restarts_root_on_stop_canister_timeout() { state_machine.tick(); let root_wasm = Project::cargo_bin_maybe_from_env("root-canister", &[]).bytes(); - let proposal = MakeProposalRequest { - title: Some("Tea. Earl Grey. Hot.".to_string()), - summary: "Make It So".to_string(), - url: "".to_string(), - action: Some(ProposalActionRequest::InstallCode(InstallCodeRequest { - canister_id: Some(ROOT_CANISTER_ID.get()), - wasm_module: Some(root_wasm), - install_mode: Some(GovernanceCanisterInstallMode::Upgrade as i32), - arg: Some(vec![]), - skip_stopping_before_installing: None, - })), - }; + let proposal = create_external_update_proposal_candid( + "Tea. Earl Grey. Hot.", + "Make It So", + "", + NnsFunction::NnsRootUpgrade, + UpgradeRootProposal { + stop_upgrade_start: true, + wasm_module: root_wasm, + module_arg: vec![], + }, + ); let neuron_id = NeuronId { id: TEST_NEURON_1_ID, }; diff --git a/rs/nns/integration_tests/src/neuron_following.rs b/rs/nns/integration_tests/src/neuron_following.rs index c2338fe2c17..5adab60da6b 100644 --- a/rs/nns/integration_tests/src/neuron_following.rs +++ b/rs/nns/integration_tests/src/neuron_following.rs @@ -22,7 +22,7 @@ use ic_state_machine_tests::StateMachine; const VALID_TOPIC: i32 = Topic::ParticipantManagement as i32; const INVALID_TOPIC: i32 = 69420; -const PROTOCOAL_CANISTER_MANAGEMENT_TOPIC: i32 = Topic::ProtocolCanisterManagement as i32; +const NETWORK_CANISTER_MANAGEMENT_TOPIC: i32 = Topic::NetworkCanisterManagement as i32; const NEURON_MANAGEMENT_TOPIC: i32 = Topic::NeuronManagement as i32; const VOTING_POWER_NEURON_1: u64 = 1_404_004_106; const VOTING_POWER_NEURON_2: u64 = 140_400_410; @@ -210,7 +210,7 @@ fn vote_propagation_with_following() { &state_machine, &n1, &[n2.neuron_id], - PROTOCOAL_CANISTER_MANAGEMENT_TOPIC, + NETWORK_CANISTER_MANAGEMENT_TOPIC, ); // voting doesn't get propagated by mutating the following graph @@ -250,7 +250,7 @@ fn vote_propagation_with_following() { &state_machine, &n3, &[n2.neuron_id], - PROTOCOAL_CANISTER_MANAGEMENT_TOPIC, + NETWORK_CANISTER_MANAGEMENT_TOPIC, ); // make n2 follow n1 @@ -258,7 +258,7 @@ fn vote_propagation_with_following() { &state_machine, &n2, &[n1.neuron_id], - PROTOCOAL_CANISTER_MANAGEMENT_TOPIC, + NETWORK_CANISTER_MANAGEMENT_TOPIC, ); // now n1 and n2 follow each other (circle), and n3 follows n2 @@ -291,7 +291,7 @@ fn vote_propagation_with_following() { &state_machine, &n2, &[n1a.neuron_id], - PROTOCOAL_CANISTER_MANAGEMENT_TOPIC, + NETWORK_CANISTER_MANAGEMENT_TOPIC, ); // at this point n2 is not influential @@ -349,14 +349,14 @@ fn vote_propagation_with_following() { &state_machine, &n1a, &[n3.neuron_id], - PROTOCOAL_CANISTER_MANAGEMENT_TOPIC, + NETWORK_CANISTER_MANAGEMENT_TOPIC, ); set_followees_on_topic( &state_machine, &n3, &[n1.neuron_id], - PROTOCOAL_CANISTER_MANAGEMENT_TOPIC, + NETWORK_CANISTER_MANAGEMENT_TOPIC, ); // fire off a new proposal by n1, and see all neurons voting diff --git a/rs/nns/integration_tests/src/reinstall_and_upgrade.rs b/rs/nns/integration_tests/src/reinstall_and_upgrade.rs index 8e023b24c86..a39c28287ab 100644 --- a/rs/nns/integration_tests/src/reinstall_and_upgrade.rs +++ b/rs/nns/integration_tests/src/reinstall_and_upgrade.rs @@ -18,7 +18,7 @@ use ic_nns_test_utils::{ governance::{ bump_gzip_timestamp, get_pending_proposals, reinstall_nns_canister_by_proposal, submit_external_update_proposal, upgrade_nns_canister_by_proposal, - upgrade_nns_canister_with_arg_by_proposal, + upgrade_nns_canister_with_arg_by_proposal, upgrade_root_canister_by_proposal, }, itest_helpers::{state_machine_test_on_nns_subnet, NnsCanisters}, }; @@ -49,44 +49,54 @@ fn test_reinstall_and_upgrade_canisters_canonical_ordering() { for CanisterInstallInfo { wasm, - use_root: _, + use_root, canister, init_payload, mode, } in get_nns_canister_wasm(&nns_canisters, init_state).into_iter() { - if mode == CanisterInstallMode::Upgrade { - println!("[Update] Canister: {:?}", canister.canister_id()); - if canister.canister_id() == LIFELINE_CANISTER_ID { - let arg: Vec = vec![]; - upgrade_nns_canister_with_arg_by_proposal( - canister, - &nns_canisters.governance, - &nns_canisters.root, - bump_gzip_timestamp(&wasm), - Encode!(&arg).unwrap(), - ) - .await; - } else { - upgrade_nns_canister_by_proposal( + if use_root { + if mode == CanisterInstallMode::Upgrade { + println!("[Update] Canister: {:?}", canister.canister_id()); + if canister.canister_id() == LIFELINE_CANISTER_ID { + let arg: Vec = vec![]; + upgrade_nns_canister_with_arg_by_proposal( + canister, + &nns_canisters.governance, + &nns_canisters.root, + bump_gzip_timestamp(&wasm), + Encode!(&arg).unwrap(), + ) + .await; + } else { + upgrade_nns_canister_by_proposal( + canister, + &nns_canisters.governance, + &nns_canisters.root, + true, + // Method fails if wasm stays the same + bump_gzip_timestamp(&wasm), + None, + ) + .await; + } + } else if mode == CanisterInstallMode::Reinstall { + println!("[Reinstall] Canister: {:?}", canister.canister_id()); + reinstall_nns_canister_by_proposal( canister, &nns_canisters.governance, &nns_canisters.root, - true, - // Method fails if wasm stays the same - bump_gzip_timestamp(&wasm), - Some(Encode!(&()).unwrap()), + wasm, + init_payload, ) .await; } - } else if mode == CanisterInstallMode::Reinstall { - println!("[Reinstall] Canister: {:?}", canister.canister_id()); - reinstall_nns_canister_by_proposal( - canister, + } else { + // Root Upgrade via Lifeline + upgrade_root_canister_by_proposal( &nns_canisters.governance, &nns_canisters.root, wasm, - init_payload, ) .await; } @@ -186,22 +196,32 @@ fn test_reinstall_and_upgrade_canisters_with_state_changes() { // Upgrade for CanisterInstallInfo { wasm, - use_root: _, + use_root, canister, init_payload, mode, } in canister_install_info { if mode == CanisterInstallMode::Upgrade { - upgrade_nns_canister_by_proposal( - canister, - &nns_canisters.governance, - &nns_canisters.root, - false, - wasm, - Some(init_payload), - ) - .await; + if use_root { + upgrade_nns_canister_by_proposal( + canister, + &nns_canisters.governance, + &nns_canisters.root, + false, + wasm, + Some(init_payload), + ) + .await; + } else { + // Root Upgrade via Lifeline + upgrade_root_canister_by_proposal( + &nns_canisters.governance, + &nns_canisters.root, + wasm, + ) + .await; + } } } @@ -340,6 +360,13 @@ fn get_nns_canister_wasm<'a>( use_root: false, canister: &nns_canisters.root, init_payload: encoded_init_state[5].clone(), + mode: CanisterInstallMode::Reinstall, + }, + CanisterInstallInfo { + wasm: Project::cargo_bin_maybe_from_env("root-canister", &[]), + use_root: false, + canister: &nns_canisters.root, + init_payload: encoded_init_state[5].clone(), mode: CanisterInstallMode::Upgrade, }, CanisterInstallInfo { diff --git a/rs/nns/integration_tests/src/upgrade_canisters_with_golden_nns_state.rs b/rs/nns/integration_tests/src/upgrade_canisters_with_golden_nns_state.rs index 34866087bc6..8b44b4ee10a 100644 --- a/rs/nns/integration_tests/src/upgrade_canisters_with_golden_nns_state.rs +++ b/rs/nns/integration_tests/src/upgrade_canisters_with_golden_nns_state.rs @@ -237,7 +237,7 @@ fn test_upgrade_canisters_with_golden_nns_state() { *canister_id, wasm_content.clone(), module_arg.clone(), - true, + false, ); // Step 3: Verify result(s): In a short while, the canister should diff --git a/rs/nns/test_utils/src/governance.rs b/rs/nns/test_utils/src/governance.rs index 9795afde7bb..df1d2eec0d8 100644 --- a/rs/nns/test_utils/src/governance.rs +++ b/rs/nns/test_utils/src/governance.rs @@ -6,21 +6,25 @@ use canister_test::{Canister, Wasm}; use dfn_candid::{candid, candid_one}; use ic_btc_interface::SetConfigRequest; use ic_canister_client_sender::Sender; +use ic_management_canister_types::CanisterInstallMode; use ic_nervous_system_clients::{ canister_id_record::CanisterIdRecord, canister_status::{CanisterStatusResult, CanisterStatusType}, }; use ic_nervous_system_common_test_keys::{TEST_NEURON_1_ID, TEST_NEURON_1_OWNER_KEYPAIR}; +use ic_nervous_system_root::change_canister::ChangeCanisterRequest; use ic_nns_common::types::{NeuronId, ProposalId}; +use ic_nns_constants::ROOT_CANISTER_ID; use ic_nns_governance_api::{ bitcoin::{BitcoinNetwork, BitcoinSetConfigProposal}, pb::v1::{ - add_or_remove_node_provider::Change, install_code::CanisterInstallMode, - manage_neuron::NeuronIdOrSubaccount, manage_neuron_response::Command as CommandResponse, - AddOrRemoveNodeProvider, ExecuteNnsFunction, GovernanceError, InstallCodeRequest, - ListNodeProvidersResponse, MakeProposalRequest, ManageNeuronCommandRequest, - ManageNeuronRequest, ManageNeuronResponse, NnsFunction, NodeProvider, - ProposalActionRequest, ProposalInfo, ProposalStatus, + add_or_remove_node_provider::Change, + manage_neuron::{Command, NeuronIdOrSubaccount}, + manage_neuron_response::Command as CommandResponse, + proposal::Action, + AddOrRemoveNodeProvider, ExecuteNnsFunction, GovernanceError, ListNodeProvidersResponse, + ManageNeuron, ManageNeuronResponse, NnsFunction, NodeProvider, Proposal, ProposalInfo, + ProposalStatus, }, }; pub use ic_nns_handler_lifeline_interface::{ @@ -32,7 +36,7 @@ use std::time::Duration; /// serialization/deserialization pub async fn submit_proposal( governance_canister: &Canister<'_>, - proposal: &MakeProposalRequest, + proposal: &Proposal, ) -> ProposalId { governance_canister .update_("submit_proposal", candid_one, proposal) @@ -52,25 +56,23 @@ pub async fn submit_external_update_proposal_allowing_error( title: String, summary: String, ) -> Result { - let proposal = MakeProposalRequest { + let proposal = Proposal { title: Some(title), summary, url: "".to_string(), - action: Some(ProposalActionRequest::ExecuteNnsFunction( - ExecuteNnsFunction { - nns_function: nns_function as i32, - payload: Encode!(&nns_function_input).expect("Error encoding proposal payload"), - }, - )), + action: Some(Action::ExecuteNnsFunction(ExecuteNnsFunction { + nns_function: nns_function as i32, + payload: Encode!(&nns_function_input).expect("Error encoding proposal payload"), + })), }; let response: ManageNeuronResponse = governance_canister .update_from_sender( "manage_neuron", candid_one, - ManageNeuronRequest { + ManageNeuron { id: None, - command: Some(ManageNeuronCommandRequest::MakeProposal(Box::new(proposal))), + command: Some(Command::MakeProposal(Box::new(proposal))), neuron_id_or_subaccount: Some(NeuronIdOrSubaccount::NeuronId( proposer_neuron_id.into(), )), @@ -97,25 +99,23 @@ pub async fn submit_external_update_proposal( title: String, summary: String, ) -> ProposalId { - let proposal = MakeProposalRequest { + let proposal = Proposal { title: Some(title), summary, url: "".to_string(), - action: Some(ProposalActionRequest::ExecuteNnsFunction( - ExecuteNnsFunction { - nns_function: nns_function as i32, - payload: Encode!(&nns_function_input).expect("Error encoding proposal payload"), - }, - )), + action: Some(Action::ExecuteNnsFunction(ExecuteNnsFunction { + nns_function: nns_function as i32, + payload: Encode!(&nns_function_input).expect("Error encoding proposal payload"), + })), }; let response: ManageNeuronResponse = governance_canister .update_from_sender( "manage_neuron", candid_one, - ManageNeuronRequest { + ManageNeuron { id: None, - command: Some(ManageNeuronCommandRequest::MakeProposal(Box::new(proposal))), + command: Some(Command::MakeProposal(Box::new(proposal))), neuron_id_or_subaccount: Some(NeuronIdOrSubaccount::NeuronId( proposer_neuron_id.into(), )), @@ -134,6 +134,76 @@ pub async fn submit_external_update_proposal( } } +/// Wraps the given nns_function_input into a proposal; sends it to the governance +/// canister; returns the proposal id. +pub async fn submit_external_update_proposal_binary( + governance_canister: &Canister<'_>, + proposer: Sender, + proposer_neuron_id: NeuronId, + nns_function: NnsFunction, + nns_function_input: Vec, + title: String, + summary: String, +) -> ProposalId { + let response: ManageNeuronResponse = submit_external_update_proposal_binary_with_response( + governance_canister, + proposer, + proposer_neuron_id, + nns_function, + nns_function_input, + title, + summary, + ) + .await; + + match response + .panic_if_error("Error making proposal") + .command + .unwrap() + { + CommandResponse::MakeProposal(resp) => ProposalId::from(resp.proposal_id.unwrap()), + _ => panic!("Invalid response"), + } +} + +/// Wraps the given nns_function_input into a proposal; sends it to the governance +/// canister; returns the `ManageNeuronResponse` +pub async fn submit_external_update_proposal_binary_with_response( + governance_canister: &Canister<'_>, + proposer: Sender, + proposer_neuron_id: NeuronId, + nns_function: NnsFunction, + nns_function_input: Vec, + title: String, + summary: String, +) -> ManageNeuronResponse { + let proposal = Proposal { + title: Some(title), + summary, + url: "".to_string(), + action: Some(Action::ExecuteNnsFunction(ExecuteNnsFunction { + nns_function: nns_function as i32, + payload: nns_function_input, + })), + }; + + governance_canister + .update_from_sender( + "manage_neuron", + candid_one, + ManageNeuron { + id: None, + command: Some(Command::MakeProposal(Box::new(proposal))), + neuron_id_or_subaccount: Some(NeuronIdOrSubaccount::NeuronId( + proposer_neuron_id.into(), + )), + }, + &proposer, + ) + .await + .expect("Error calling the manage_neuron api.") +} + /// Thin-wrapper around get_proposal_info to handle /// serialization/deserialization pub async fn get_proposal_info( @@ -203,25 +273,21 @@ pub async fn add_node_provider(nns_canisters: &NnsCanisters<'_>, np: NodeProvide .update_from_sender( "manage_neuron", candid_one, - ManageNeuronRequest { + ManageNeuron { neuron_id_or_subaccount: Some(NeuronIdOrSubaccount::NeuronId( ic_nns_common::pb::v1::NeuronId { id: TEST_NEURON_1_ID, }, )), id: None, - command: Some(ManageNeuronCommandRequest::MakeProposal(Box::new( - MakeProposalRequest { - title: Some("Add a Node Provider".to_string()), - summary: "".to_string(), - url: "".to_string(), - action: Some(ProposalActionRequest::AddOrRemoveNodeProvider( - AddOrRemoveNodeProvider { - change: Some(Change::ToAdd(np)), - }, - )), - }, - ))), + command: Some(Command::MakeProposal(Box::new(Proposal { + title: Some("Add a Node Provider".to_string()), + summary: "".to_string(), + url: "".to_string(), + action: Some(Action::AddOrRemoveNodeProvider(AddOrRemoveNodeProvider { + change: Some(Change::ToAdd(np)), + })), + }))), }, &Sender::from_keypair(&TEST_NEURON_1_OWNER_KEYPAIR), ) @@ -290,6 +356,55 @@ pub fn bump_gzip_timestamp(wasm: &Wasm) -> Wasm { Wasm::from_bytes(new_wasm) } +/// Submits a proposal to upgrade the root canister. +pub async fn upgrade_root_canister_by_proposal( + governance: &Canister<'_>, + root: &Canister<'_>, + wasm: Wasm, +) { + let wasm = wasm.bytes(); + let new_module_hash = &ic_crypto_sha2::Sha256::hash(&wasm); + + let proposal_id = submit_external_update_proposal( + governance, + Sender::from_keypair(&TEST_NEURON_1_OWNER_KEYPAIR), + NeuronId(TEST_NEURON_1_ID), + NnsFunction::NnsRootUpgrade, + UpgradeRootProposal { + wasm_module: wasm, + module_arg: Vec::::new(), + stop_upgrade_start: false, + }, + "Upgrade Root Canister".to_string(), + "".to_string(), + ) + .await; + + assert_eq!( + wait_for_final_state(governance, proposal_id).await.status(), + ProposalStatus::Executed + ); + + for _ in 0..100 { + let Ok(status): Result = root + .update_( + "canister_status", + candid_one, + CanisterIdRecord::from(ROOT_CANISTER_ID), + ) + .await + else { + continue; + }; + if status.module_hash.unwrap().as_slice() == new_module_hash + && status.status == CanisterStatusType::Running + { + return; + } + } + panic!("Root canister upgrade did not complete in time."); +} + /// Perform a change on a canister by upgrading it or /// reinstalling entirely, depending on the `how` argument. /// Argument `wasm` is ensured to have a different @@ -320,77 +435,44 @@ async fn change_nns_canister_by_proposal( .await .unwrap(); let old_module_hash = status.module_hash.unwrap(); - assert_ne!( - old_module_hash.as_slice(), - new_module_hash, - "change_nns_canister_by_proposal: both module hashes prev, cur are \ - the same {:?}, but they should be different for upgrade", - old_module_hash - ); - - let proposal = MakeProposalRequest { - title: Some("Upgrade NNS Canister".to_string()), - summary: "".to_string(), - url: "".to_string(), - action: Some(ProposalActionRequest::InstallCode(InstallCodeRequest { - canister_id: Some(canister.canister_id().get()), - wasm_module: Some(wasm.clone()), - install_mode: Some(how as i32), - arg: Some(arg.unwrap_or_default()), - skip_stopping_before_installing: Some(stop_before_installing), - })), + assert_ne!(old_module_hash.as_slice(), new_module_hash, "change_nns_canister_by_proposal: both module hashes prev, cur are the same {:?}, but they should be different for upgrade", old_module_hash); + + let change_canister_request = + ChangeCanisterRequest::new(stop_before_installing, how, canister.canister_id()) + .with_memory_allocation(ic_nns_constants::memory_allocation_of( + canister.canister_id(), + )) + .with_wasm(wasm) + .with_arg(Encode!().unwrap()); + let change_canister_request = if let Some(arg) = arg { + change_canister_request.with_arg(arg) + } else { + change_canister_request }; // Submitting a proposal also implicitly records a vote from the proposer, // which with TEST_NEURON_1 is enough to trigger execution. - let response: ManageNeuronResponse = governance - .update_from_sender( - "manage_neuron", - candid_one, - ManageNeuronRequest { - id: None, - command: Some(ManageNeuronCommandRequest::MakeProposal(Box::new(proposal))), - neuron_id_or_subaccount: Some(NeuronIdOrSubaccount::NeuronId( - NeuronId(TEST_NEURON_1_ID).into(), - )), - }, - &Sender::from_keypair(&TEST_NEURON_1_OWNER_KEYPAIR), - ) - .await - .expect("Error calling the manage_neuron api."); - let proposal_id = match response - .panic_if_error("Error making proposal") - .command - .unwrap() - { - CommandResponse::MakeProposal(resp) => resp.proposal_id.expect("No proposal id"), - other => panic!("Unexpected response: {:?}", other), - }; - - // If the canister is the root canister, we need to wait for the proposal to be executed before - // starting to poll the status, otherwise the open call context will prevent the root canister - // from being stopped. - if canister.canister_id() == root.canister_id() { - assert_eq!( - wait_for_final_state(governance, ProposalId::from(proposal_id)) - .await - .status(), - ProposalStatus::Executed - ); - } + submit_external_update_proposal( + governance, + Sender::from_keypair(&TEST_NEURON_1_OWNER_KEYPAIR), + NeuronId(TEST_NEURON_1_ID), + NnsFunction::NnsCanisterUpgrade, + change_canister_request, + "Upgrade NNS Canister".to_string(), + "".to_string(), + ) + .await; // Wait 'till the hash matches and the canister is running again. loop { - let Ok(status): Result = root + let status: CanisterStatusResult = root .update_( "canister_status", candid_one, CanisterIdRecord::from(canister.canister_id()), ) .await - else { - continue; - }; + .unwrap(); if status.module_hash.unwrap().as_slice() == new_module_hash && status.status == CanisterStatusType::Running { diff --git a/rs/nns/test_utils/src/neuron_helpers.rs b/rs/nns/test_utils/src/neuron_helpers.rs index adb6763fe1b..5bb556d0e26 100644 --- a/rs/nns/test_utils/src/neuron_helpers.rs +++ b/rs/nns/test_utils/src/neuron_helpers.rs @@ -5,10 +5,9 @@ use ic_nervous_system_common_test_keys::{ TEST_NEURON_2_OWNER_PRINCIPAL, TEST_NEURON_3_ID, TEST_NEURON_3_OWNER_PRINCIPAL, }; use ic_nns_common::{pb::v1::NeuronId, types::ProposalId}; -use ic_nns_constants::ROOT_CANISTER_ID; use ic_nns_governance_api::pb::v1::{ - install_code::CanisterInstallMode, manage_neuron_response::Command, InstallCodeRequest, - MakeProposalRequest, Neuron, ProposalActionRequest, + manage_neuron_response::Command, ExecuteNnsFunction, MakeProposalRequest, Neuron, NnsFunction, + ProposalActionRequest, }; use ic_state_machine_tests::StateMachine; use std::collections::HashMap; @@ -71,13 +70,12 @@ pub fn get_some_proposal() -> MakeProposalRequest { title: Some("".to_string()), summary: "".to_string(), url: "".to_string(), - action: Some(ProposalActionRequest::InstallCode(InstallCodeRequest { - canister_id: Some(ROOT_CANISTER_ID.get()), - wasm_module: Some(vec![]), - install_mode: Some(CanisterInstallMode::Upgrade as i32), - arg: Some(vec![]), - skip_stopping_before_installing: None, - })), + action: Some(ProposalActionRequest::ExecuteNnsFunction( + ExecuteNnsFunction { + nns_function: NnsFunction::NnsRootUpgrade as i32, + payload: Vec::new(), + }, + )), } } diff --git a/rs/tests/src/ledger_tests/transaction_ledger_correctness.rs b/rs/tests/src/ledger_tests/transaction_ledger_correctness.rs index 848d6b24736..91b8dbf8b74 100644 --- a/rs/tests/src/ledger_tests/transaction_ledger_correctness.rs +++ b/rs/tests/src/ledger_tests/transaction_ledger_correctness.rs @@ -157,13 +157,12 @@ mod holder { } pub async fn upgrade(rt: &Runtime, nns_canister_id: &CanisterId) { - ic_nns_test_utils::governance::upgrade_nns_canister_by_proposal( + ic_nns_test_utils::itest_helpers::upgrade_nns_canister_by_proposal( &Canister::new(rt, *nns_canister_id), &Canister::new(rt, ic_nns_constants::GOVERNANCE_CANISTER_ID), &Canister::new(rt, ic_nns_constants::ROOT_CANISTER_ID), true, Wasm::from_bytes(HOLDER_CANISTER_WASM.to_vec()), - None, ) .await; }