Skip to content

Commit

Permalink
Merge branch 'main' into update_gh_actions
Browse files Browse the repository at this point in the history
  • Loading branch information
saratomaz authored Jan 29, 2025
2 parents 305a0bd + 471b0a7 commit f897625
Show file tree
Hide file tree
Showing 26 changed files with 731 additions and 490 deletions.
10 changes: 10 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# Set update schedule for GitHub Actions

version: 2
updates:

- package-ecosystem: "github-actions"
directory: "/"
schedule:
# Check for updates to GitHub Actions every day
interval: "daily"
2 changes: 1 addition & 1 deletion .github/workflows/db_sync_full_sync.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: trigger the Buildkite pipeline - run db-sync full sync test
uses: 'buildkite/trigger-pipeline-action@v1.5.0'
uses: 'buildkite/trigger-pipeline-action@v2.3.0'
env:
BUILDKITE_API_ACCESS_TOKEN: ${{ secrets.BUILDKITE_API_ACCESS_TOKEN }}
PIPELINE: 'input-output-hk/db-sync-sync-tests'
Expand Down
27 changes: 27 additions & 0 deletions .github/workflows/nix_smoke.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: "Nix smoke test"

on:
pull_request:
branches: [ "main" ]
paths:
- 'flake.lock'
- '**.nix'
- '.github/workflows/nix_smoke.yaml'
workflow_dispatch:

jobs:
nix_smoke:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install Nix
uses: cachix/install-nix-action@v30
with:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ= iohk.cachix.org-1:DpRUyj7h7V830dp/i6Nti+NEO2/nhblbov/8MW7Rqoo=
substituters = https://cache.nixos.org https://cache.iog.io https://iohk.cachix.org
allow-import-from-derivation = true
- name: Run nix shell
run: nix develop --accept-flake-config .#python --command python -c "import sync_tests"
3 changes: 2 additions & 1 deletion .github/workflows/node_sync_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ jobs:
fi
- name: Trigger the Buildkite pipeline - run sync tests on Mainnet
uses: 'buildkite/trigger-pipeline-action@v1.5.0'
uses: 'buildkite/trigger-pipeline-action@v2.3.0'
env:
BUILDKITE_API_ACCESS_TOKEN: ${{ secrets.BUILDKITE_API_ACCESS_TOKEN }}
PIPELINE: 'input-output-hk/node-sync-tests'
Expand Down Expand Up @@ -112,3 +112,4 @@ jobs:
"AWS_DB_NAME":"${{ secrets.AWS_DB_NAME }}",
"AWS_DB_HOSTNAME":"${{ secrets.AWS_DB_HOSTNAME }}"
}'

2 changes: 1 addition & 1 deletion .github/workflows/weekly_node_sync_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
echo "tag_no1=nightly_node_$current_date" >> $GITHUB_OUTPUT
- name: Trigger Buildkite pipeline for Mainnet sync tests
uses: 'buildkite/trigger-pipeline-action@v1.5.0'
uses: 'buildkite/trigger-pipeline-action@v2.3.0'
env:
BUILDKITE_API_ACCESS_TOKEN: ${{ secrets.BUILDKITE_API_ACCESS_TOKEN }}
PIPELINE: 'input-output-hk/node-sync-tests'
Expand Down
1 change: 0 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
exclude: ^sync_tests/
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
Expand Down
6 changes: 4 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
line-length = 100

[tool.ruff.lint]
select = ["ARG", "B", "C4", "C90", "D", "DTZ", "E", "EM", "F", "FURB", "I001", "ISC", "N", "PERF", "PIE", "PL", "PLE", "PLR", "PLW", "PT", "PTH", "Q", "RET", "RSE", "RUF", "SIM", "TRY", "UP", "W", "YTT"]
ignore = ["B905", "D10", "D203", "D212", "D213", "D214", "D215", "D404", "D405", "D406", "D407", "D408", "D409", "D410", "D411", "D413", "ISC001", "PLR0912", "PLR0913", "PLR0915", "PLR2004", "PT001", "PT007", "PT012", "PT018", "PT023", "PTH123", "RET504", "TRY002", "TRY301", "UP006", "UP007", "UP035"]
# TODO: add "PTH"
select = ["ARG", "B", "C4", "C90", "D", "DTZ", "E", "EM", "F", "FURB", "I001", "ISC", "N", "PERF", "PIE", "PL", "PLE", "PLR", "PLW", "PT", "Q", "RET", "RSE", "RUF", "SIM", "TRY", "UP", "W", "YTT"]
# TODO: remove "PLW0603", "SIM115", "SIM112", "N801", "PERF203", "C901"
ignore = ["B905", "D10", "D203", "D212", "D213", "D214", "D215", "D404", "D405", "D406", "D407", "D408", "D409", "D410", "D411", "D413", "ISC001", "PLR0912", "PLR0913", "PLR0915", "PLR2004", "PT001", "PT007", "PT012", "PT018", "PT023", "PTH123", "RET504", "TRY002", "TRY301", "UP006", "UP007", "UP035", "PLW0603", "SIM115", "SIM112", "N801", "PERF203", "C901"]

[tool.ruff.lint.isort]
force-single-line = true
Expand Down
1 change: 1 addition & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@
mypy
pre-commit
types-requests
types-PyMySQL
1 change: 0 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ long_description_content_type = text/markdown
classifiers =
Topic :: Software Development :: Libraries :: Python Modules
Programming Language :: Python :: 3
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Intended Audience :: Developers
Expand Down
2 changes: 1 addition & 1 deletion sync_tests/schemas/expected_db_indexes.json
Original file line number Diff line number Diff line change
Expand Up @@ -45,4 +45,4 @@
"reference_tx_in": ["reference_tx_in_pkey"],
"redeemer_data": ["redeemer_data_pkey", "unique_redeemer_data"],
"reverse_index": ["reverse_index_pkey"]
}
}
2 changes: 1 addition & 1 deletion sync_tests/schemas/expected_db_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -445,4 +445,4 @@
{"inline_datum_id": "bigint"},
{"reference_script_id": "bigint"}
]
}
}
9 changes: 5 additions & 4 deletions sync_tests/scripts/db-sync-start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,19 @@


cd cardano-db-sync
export PGPASSFILE=config/pgpass-$ENVIRONMENT
export PGPASSFILE="config/pgpass-$ENVIRONMENT"

if [[ $FIRST_START == "True" ]]; then
cd config
wget -O $ENVIRONMENT-db-config.json https://book.play.dev.cardano.org/environments/$ENVIRONMENT/db-sync-config.json
wget -O "$ENVIRONMENT-db-config.json https://book.play.dev.cardano.org/environments/$ENVIRONMENT/db-sync-config.json"
sed -i "s/NodeConfigFile.*/NodeConfigFile\": \"..\/..\/cardano-node\/$ENVIRONMENT-config.json\",/g" "$ENVIRONMENT-db-config.json"
cd ..
fi

# clear log file
cat /dev/null > $LOG_FILEPATH
cat /dev/null > "$LOG_FILEPATH"

# set abort on first error flag and start db-sync
export DbSyncAbortOnPanic=1
PGPASSFILE=$PGPASSFILE db-sync-node/bin/cardano-db-sync --config config/$ENVIRONMENT-db-config.json --socket-path ../cardano-node/db/node.socket --schema-dir schema/ --state-dir ledger-state/$ENVIRONMENT $DB_SYNC_START_ARGS >> $LOG_FILEPATH &
# shellcheck disable=SC2086
PGPASSFILE="$PGPASSFILE" db-sync-node/bin/cardano-db-sync --config "config/$ENVIRONMENT-db-config.json" --socket-path ../cardano-node/db/node.socket --schema-dir schema/ --state-dir "ledger-state/$ENVIRONMENT" ${DB_SYNC_START_ARGS} >> $"LOG_FILEPATH" &
2 changes: 1 addition & 1 deletion sync_tests/tests/db_tables_ddl.sql
Original file line number Diff line number Diff line change
Expand Up @@ -364,4 +364,4 @@ CREATE TABLE `shelley-qa_performance_stats_db_sync` (
`slot_no` int DEFAULT NULL,
`cpu_percent_usage` decimal(10,0) DEFAULT NULL,
`rss_mem_usage` bigint DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
59 changes: 37 additions & 22 deletions sync_tests/tests/full_sync_from_clean_state.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import argparse
import datetime
import json
import os
import sys
import typing as tp
from collections import OrderedDict
from datetime import datetime
from datetime import timedelta
from pathlib import Path

import matplotlib.pyplot as plt
Expand All @@ -27,7 +27,7 @@ def create_sync_stats_chart() -> None:
fig = plt.figure(figsize=(14, 10))

# define epochs sync times chart
ax_epochs = fig.add_axes([0.05, 0.05, 0.9, 0.35])
ax_epochs = fig.add_axes((0.05, 0.05, 0.9, 0.35))
ax_epochs.set(xlabel="epochs [number]", ylabel="time [min]")
ax_epochs.set_title("Epochs Sync Times")

Expand All @@ -39,7 +39,7 @@ def create_sync_stats_chart() -> None:
ax_epochs.bar(epochs, epoch_times)

# define performance chart
ax_perf = fig.add_axes([0.05, 0.5, 0.9, 0.45])
ax_perf = fig.add_axes((0.05, 0.5, 0.9, 0.45))
ax_perf.set(xlabel="time [min]", ylabel="RSS [B]")
ax_perf.set_title("RSS usage")

Expand All @@ -53,7 +53,7 @@ def create_sync_stats_chart() -> None:
fig.savefig(CHART)


def upload_sync_results_to_aws(env):
def upload_sync_results_to_aws(env: str) -> None:
os.chdir(utils_db_sync.ROOT_TEST_PATH)
os.chdir(Path.cwd() / "cardano-db-sync")

Expand All @@ -62,7 +62,9 @@ def upload_sync_results_to_aws(env):
sync_test_results_dict = json.load(json_file)

test_summary_table = env + "_db_sync"
test_id = str(int(aws_db_utils.get_last_identifier(test_summary_table).split("_")[-1]) + 1)
last_identifier = aws_db_utils.get_last_identifier(test_summary_table)
assert last_identifier is not None # TODO: refactor
test_id = str(int(last_identifier.split("_")[-1]) + 1)
identifier = env + "_" + test_id
sync_test_results_dict["identifier"] = identifier

Expand Down Expand Up @@ -131,7 +133,7 @@ def upload_sync_results_to_aws(env):
sys.exit(1)


def print_report(db_schema, db_indexes):
def print_report(db_schema: Exception | None, db_indexes: Exception | None) -> None:
log_errors = utils_db_sync.are_errors_present_in_db_sync_logs(utils_db_sync.DB_SYNC_LOG_FILE)
utils_db_sync.print_color_log(
utils_db_sync.sh_colors.WARNING, f"Are errors present: {log_errors}"
Expand Down Expand Up @@ -172,13 +174,13 @@ def print_report(db_schema, db_indexes):
utils_db_sync.print_color_log(utils_db_sync.sh_colors.WARNING, "NO Db indexes issues")


def main():
def main() -> None:
# system and software versions details
print("--- Sync from clean state - setup")
platform_system, platform_release, platform_version = utils.get_os_type()
print(f"Platform: {platform_system, platform_release, platform_version}")

start_test_time = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
start_test_time = datetime.datetime.now(tz=datetime.timezone.utc).strftime("%d/%m/%Y %H:%M:%S")
print(f"Test start time: {start_test_time}")

env = utils.get_arg_value(args=args, key="environment")
Expand Down Expand Up @@ -208,8 +210,8 @@ def main():
print(f"DB sync version: {db_sync_version_from_gh_action}")

# cardano-node setup
NODE_DIR = git_utils.clone_repo("cardano-node", node_version_from_gh_action)
os.chdir(NODE_DIR)
node_dir = git_utils.clone_repo("cardano-node", node_version_from_gh_action)
os.chdir(node_dir)
utils.execute_command("nix build -v .#cardano-node -o cardano-node-bin")
utils.execute_command("nix-build -v -A cardano-cli -o cardano-cli-bin")

Expand All @@ -224,8 +226,8 @@ def main():

# cardano-db sync setup
os.chdir(utils_db_sync.ROOT_TEST_PATH)
DB_SYNC_DIR = git_utils.clone_repo("cardano-db-sync", db_sync_version_from_gh_action.rstrip())
os.chdir(DB_SYNC_DIR)
db_sync_dir = git_utils.clone_repo("cardano-db-sync", db_sync_version_from_gh_action.rstrip())
os.chdir(db_sync_dir)
print("--- Db sync setup")
utils_db_sync.setup_postgres() # To login use: psql -h /path/to/postgres -p 5432 -e postgres
utils_db_sync.create_pgpass_file(env)
Expand All @@ -245,12 +247,15 @@ def main():
db_indexes = utils_db_sync.check_database(
utils_db_sync.get_db_indexes, "DB indexes are incorrect", EXPECTED_DB_INDEXES
)
epoch_no, block_no, slot_no = utils_db_sync.get_db_sync_tip(env)
end_test_time = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
db_sync_tip = utils_db_sync.get_db_sync_tip(env)
assert db_sync_tip is not None # TODO: refactor
epoch_no, block_no, slot_no = db_sync_tip
end_test_time = datetime.datetime.now(tz=datetime.timezone.utc).strftime("%d/%m/%Y %H:%M:%S")

print("--- Summary & Artifacts uploading")
print(
f"FINAL db-sync progress: {utils_db_sync.get_db_sync_progress(env)}, epoch: {epoch_no}, block: {block_no}"
f"FINAL db-sync progress: {utils_db_sync.get_db_sync_progress(env)}, "
f"epoch: {epoch_no}, block: {block_no}"
)
print(f"TOTAL sync time [sec]: {db_full_sync_time_in_secs}")

Expand All @@ -259,12 +264,12 @@ def main():
utils_db_sync.manage_process(proc_name="cardano-node", action="terminate")

# export test data as a json file
test_data = OrderedDict()
test_data: OrderedDict[str, tp.Any] = OrderedDict()
test_data["platform_system"] = platform_system
test_data["platform_release"] = platform_release
test_data["platform_version"] = platform_version
test_data["no_of_cpu_cores"] = os.cpu_count()
test_data["total_ram_in_GB"] = utils.get_total_ram_in_GB()
test_data["total_ram_in_GB"] = utils.get_total_ram_in_gb()
test_data["env"] = env
test_data["node_pr"] = node_pr
test_data["node_branch"] = node_branch
Expand All @@ -278,7 +283,9 @@ def main():
test_data["start_test_time"] = start_test_time
test_data["end_test_time"] = end_test_time
test_data["total_sync_time_in_sec"] = db_full_sync_time_in_secs
test_data["total_sync_time_in_h_m_s"] = str(timedelta(seconds=int(db_full_sync_time_in_secs)))
test_data["total_sync_time_in_h_m_s"] = str(
datetime.timedelta(seconds=int(db_full_sync_time_in_secs))
)
test_data["last_synced_epoch_no"] = epoch_no
test_data["last_synced_block_no"] = block_no
test_data["last_synced_slot_no"] = slot_no
Expand Down Expand Up @@ -333,7 +340,7 @@ def main():

if __name__ == "__main__":

def hyphenated(db_sync_start_args):
def hyphenated(db_sync_start_args: str) -> str:
start_args = db_sync_start_args.split(" ")
final_args_string = ""

Expand All @@ -349,13 +356,21 @@ def hyphenated(db_sync_start_args):
parser.add_argument(
"-nv",
"--node_version_gh_action",
help="node version - 1.33.0-rc2 (tag number) or 1.33.0 (release number - for released versions) or 1.33.0_PR2124 (for not released and not tagged runs with a specific node PR/version)",
help=(
"node version - 1.33.0-rc2 (tag number) or 1.33.0 "
"(release number - for released versions) or 1.33.0_PR2124 "
"(for not released and not tagged runs with a specific node PR/version)"
),
)
parser.add_argument("-dbr", "--db_sync_branch", help="db-sync branch or tag")
parser.add_argument(
"-dv",
"--db_sync_version_gh_action",
help="db-sync version - 12.0.0-rc2 (tag number) or 12.0.2 (release number - for released versions) or 12.0.2_PR2124 (for not released and not tagged runs with a specific db_sync PR/version)",
help=(
"db-sync version - 12.0.0-rc2 (tag number) or 12.0.2 "
"(release number - for released versions) or 12.0.2_PR2124 "
"(for not released and not tagged runs with a specific db_sync PR/version)"
),
)
parser.add_argument(
"-dsa",
Expand Down
Loading

0 comments on commit f897625

Please sign in to comment.