From f0187c2b4da206a281ff9dc0232b8b567d6126e9 Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Thu, 19 Oct 2023 17:56:49 -0300 Subject: [PATCH 1/6] Update actions; don't lock dependencies in CI (#874) --- .github/workflows/build.yml | 12 ++++++------ .github/workflows/docs.yml | 6 +++--- .github/workflows/installer.yml | 4 ++-- .github/workflows/release.yml | 16 ++++++++-------- .github/workflows/test.yml | 6 +++--- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index cb53dbb71..e8eb5def4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -26,18 +26,18 @@ jobs: steps: - name: Check out the repo - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: install: true - name: Log in to the registry - uses: docker/login-action@v1 + uses: docker/login-action@v3 with: registry: ${{ env.DOCKER_REGISTRY }} username: ${{ github.actor }} @@ -45,7 +45,7 @@ jobs: - name: Set up metadata id: meta - uses: docker/metadata-action@v3 + uses: docker/metadata-action@v5 with: images: ${{ env.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE_NAME }} flavor: | @@ -55,7 +55,7 @@ jobs: type=ref,event=tag - name: Publish nightly image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: context: . file: Dockerfile diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 5cf16b51e..d1d824d0b 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -35,10 +35,10 @@ jobs: # && sudo apt install gh -y - name: Check out the repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@main + uses: actions/setup-python@v4 with: python-version: '3.11' cache: 'pip' @@ -47,7 +47,7 @@ jobs: run: pip install pdm - name: Install project - run: pdm install + run: pdm sync - name: Clone frontend run: | diff --git a/.github/workflows/installer.yml b/.github/workflows/installer.yml index 6d3379640..679a03213 100644 --- a/.github/workflows/installer.yml +++ b/.github/workflows/installer.yml @@ -27,10 +27,10 @@ jobs: arch: arm64 steps: - name: Check out the repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@main + uses: actions/setup-python@v4 with: python-version: '3.11' cache: 'pip' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 99b3c70f5..e2bbf4011 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,27 +14,27 @@ jobs: steps: - name: Check out the repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 # NOTE: Fetch full history for Sentry release with: fetch-depth: 0 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: install: true - name: Log in to Docker Hub - uses: docker/login-action@v1 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Log in to GHCR - uses: docker/login-action@v1 + uses: docker/login-action@v3 with: registry: ${{ env.DOCKER_REGISTRY }} username: ${{ github.actor }} @@ -42,7 +42,7 @@ jobs: - name: Set up Docker metadata id: meta - uses: docker/metadata-action@v3 + uses: docker/metadata-action@v5 with: images: | dipdup/dipdup @@ -55,7 +55,7 @@ jobs: type=pep440,pattern={{major}}.{{minor}} - name: Set up Python - uses: actions/setup-python@main + uses: actions/setup-python@v4 with: python-version: '3.11' cache: 'pip' @@ -72,7 +72,7 @@ jobs: ALCHEMY_KEY: ${{ secrets.ALCHEMY_KEY }} - name: Publish stable image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: context: . file: Dockerfile diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 276ee4288..68f01d251 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -28,19 +28,19 @@ jobs: arch: arm64 steps: - name: Check out the repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install PDM run: pipx install pdm - name: Set up Python - uses: actions/setup-python@main + uses: actions/setup-python@v4 with: python-version: '3.11' cache: 'pip' - name: Run install - run: pdm install + run: pdm sync - name: Run lint run: pdm run lint From 21ad10f546ce3c6328e519dfe9f7685e7bc3b87d Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Fri, 20 Oct 2023 09:32:32 -0300 Subject: [PATCH 2/6] Helpers for testing DipDup projects (#864) --- src/dipdup/cli.py | 1 + src/dipdup/database.py | 35 +++- src/dipdup/hasura.py | 17 +- src/dipdup/test.py | 159 +++++++++++++++ tests/__init__.py | 4 +- tests/configs/test_postgres.yaml | 6 + tests/configs/test_postgres_immune.yaml | 11 ++ .../configs/{sqlite.yaml => test_sqlite.yaml} | 0 tests/configs/test_sqlite_immune.yaml | 11 ++ tests/test_demos.py | 120 +----------- tests/test_hasura.py | 69 +------ tests/test_index/test_tzkt_operations.py | 8 +- tests/test_schema.py | 183 ++++++++++++++++++ 13 files changed, 423 insertions(+), 201 deletions(-) create mode 100644 tests/configs/test_postgres.yaml create mode 100644 tests/configs/test_postgres_immune.yaml rename tests/configs/{sqlite.yaml => test_sqlite.yaml} (100%) create mode 100644 tests/configs/test_sqlite_immune.yaml create mode 100644 tests/test_schema.py diff --git a/src/dipdup/cli.py b/src/dipdup/cli.py index 9d49d9657..1a00cdc98 100644 --- a/src/dipdup/cli.py +++ b/src/dipdup/cli.py @@ -503,6 +503,7 @@ async def schema_wipe(ctx: click.Context, immune: bool, force: bool) -> None: models=models, timeout=config.database.connection_timeout, decimal_precision=config.advanced.decimal_precision, + unsafe_sqlite=config.advanced.unsafe_sqlite, ): conn = get_connection() await wipe_schema( diff --git a/src/dipdup/database.py b/src/dipdup/database.py index cff3cf138..da66b20f3 100644 --- a/src/dipdup/database.py +++ b/src/dipdup/database.py @@ -214,12 +214,40 @@ async def _pg_create_functions(conn: AsyncpgClient) -> None: await execute_sql(conn, sql_path) +async def get_tables() -> set[str]: + conn = get_connection() + if isinstance(conn, SqliteClient): + _, sqlite_res = await conn.execute_query('SELECT name FROM sqlite_master WHERE type = "table";') + return {row[0] for row in sqlite_res} + if isinstance(conn, AsyncpgClient): + _, postgres_res = await conn.execute_query( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_type = 'BASE TABLE'" + ) + return {row[0] for row in postgres_res} + + raise NotImplementedError + + async def _pg_create_views(conn: AsyncpgClient) -> None: sql_path = Path(__file__).parent / 'sql' / 'dipdup_head_status.sql' # TODO: Configurable interval await execute_sql(conn, sql_path, HEAD_STATUS_TIMEOUT) +# FIXME: Private but used in dipdup.hasura +async def _pg_get_views(conn: AsyncpgClient, schema_name: str) -> list[str]: + return [ + row[0] + for row in ( + await conn.execute_query( + "SELECT table_name FROM information_schema.views WHERE table_schema =" + f" '{schema_name}' UNION SELECT matviewname as table_name FROM pg_matviews" + f" WHERE schemaname = '{schema_name}'" + ) + )[1] + ] + + async def _pg_wipe_schema( conn: AsyncpgClient, schema_name: str, @@ -257,8 +285,11 @@ async def _sqlite_wipe_schema( # NOTE: Copy immune tables to the new database. master_query = 'SELECT name FROM sqlite_master WHERE type = "table"' result = await conn.execute_query(master_query) - for name in result[1]: - if name not in immune_tables: # type: ignore[comparison-overlap] + for row in result[1]: + name = row[0] + if name == 'sqlite_sequence': + continue + if name not in immune_tables: continue expr = f'CREATE TABLE {namespace}.{name} AS SELECT * FROM {name}' diff --git a/src/dipdup/hasura.py b/src/dipdup/hasura.py index 2fe04b260..f54885dde 100644 --- a/src/dipdup/hasura.py +++ b/src/dipdup/hasura.py @@ -19,6 +19,8 @@ from dipdup.config import HttpConfig from dipdup.config import PostgresDatabaseConfig from dipdup.config import ResolvedHttpConfig +from dipdup.database import AsyncpgClient +from dipdup.database import _pg_get_views from dipdup.database import get_connection from dipdup.database import iter_models from dipdup.exceptions import ConfigurationError @@ -328,18 +330,9 @@ async def _apply_custom_metadata(self) -> None: async def _get_views(self) -> list[str]: conn = get_connection() - views = [ - row[0] - for row in ( - await conn.execute_query( - "SELECT table_name FROM information_schema.views WHERE table_schema =" - f" '{self._database_config.schema_name}' UNION SELECT matviewname as table_name FROM pg_matviews" - f" WHERE schemaname = '{self._database_config.schema_name}'" - ) - )[1] - ] - self._logger.info('Found %s regular and materialized views', len(views)) - return views + if not isinstance(conn, AsyncpgClient): + raise HasuraError('Hasura integration requires `postgres` database client') + return await _pg_get_views(conn, self._database_config.schema_name) def _iterate_graphql_queries(self) -> Iterator[tuple[str, str]]: graphql_path = env.get_package_path(self._package) / 'graphql' diff --git a/src/dipdup/test.py b/src/dipdup/test.py index a71ec9957..ec5262d85 100644 --- a/src/dipdup/test.py +++ b/src/dipdup/test.py @@ -1,10 +1,30 @@ +"""This module contains helper functions for testing DipDup projects. + +These helpers are not part of the public API and can be changed without prior notice. +""" import asyncio +import atexit +import os +import tempfile +from collections.abc import AsyncIterator from contextlib import AsyncExitStack +from contextlib import asynccontextmanager +from pathlib import Path +from shutil import which +from typing import TYPE_CHECKING from typing import Any from dipdup.config import DipDupConfig +from dipdup.config import HasuraConfig +from dipdup.config import PostgresDatabaseConfig from dipdup.dipdup import DipDup +from dipdup.exceptions import FrameworkException from dipdup.index import Index +from dipdup.project import get_default_answers +from dipdup.yaml import DipDupYAMLConfig + +if TYPE_CHECKING: + from docker.client import DockerClient # type: ignore[import-untyped] async def create_dummy_dipdup( @@ -47,3 +67,142 @@ async def spawn_index(dipdup: DipDup, name: str) -> Index[Any, Any, Any]: index: Index[Any, Any, Any] = await dispatcher._ctx._spawn_index(name) dispatcher._indexes[name] = dispatcher._ctx._pending_indexes.pop() return index + + +def get_docker_client() -> 'DockerClient': + """Get Docker client instance if socket is available; skip test otherwise.""" + import _pytest.outcomes + from docker.client import DockerClient + + docker_socks = ( + Path('/var/run/docker.sock'), + Path.home() / 'Library' / 'Containers' / 'com.docker.docker' / 'Data' / 'vms' / '0' / 'docker.sock', + Path.home() / 'Library' / 'Containers' / 'com.docker.docker' / 'Data' / 'docker.sock', + ) + for path in docker_socks: + if path.exists(): + return DockerClient(base_url=f'unix://{path}') + + raise _pytest.outcomes.Skipped( # pragma: no cover + 'Docker socket not found', + allow_module_level=True, + ) + + +async def run_postgres_container() -> PostgresDatabaseConfig: + """Run Postgres container (destroyed on exit) and return database config with its IP.""" + docker = get_docker_client() + postgres_container = docker.containers.run( + image=get_default_answers()['postgres_image'], + environment={ + 'POSTGRES_USER': 'test', + 'POSTGRES_PASSWORD': 'test', + 'POSTGRES_DB': 'test', + }, + detach=True, + remove=True, + ) + atexit.register(postgres_container.stop) + postgres_container.reload() + postgres_ip = postgres_container.attrs['NetworkSettings']['IPAddress'] + + while not postgres_container.exec_run('pg_isready').exit_code == 0: + await asyncio.sleep(0.1) + + return PostgresDatabaseConfig( + kind='postgres', + host=postgres_ip, + port=5432, + user='test', + database='test', + password='test', + ) + + +async def run_hasura_container(postgres_ip: str) -> HasuraConfig: + """Run Hasura container (destroyed on exit) and return config with its IP.""" + docker = get_docker_client() + hasura_container = docker.containers.run( + image=get_default_answers()['hasura_image'], + environment={ + 'HASURA_GRAPHQL_DATABASE_URL': f'postgres://test:test@{postgres_ip}:5432', + }, + detach=True, + remove=True, + ) + atexit.register(hasura_container.stop) + hasura_container.reload() + hasura_ip = hasura_container.attrs['NetworkSettings']['IPAddress'] + + return HasuraConfig( + url=f'http://{hasura_ip}:8080', + source='new_source', + create_source=True, + ) + + +@asynccontextmanager +async def tmp_project( + config_paths: list[Path], + package: str, + exists: bool, + env: dict[str, str] | None = None, +) -> AsyncIterator[tuple[Path, dict[str, str]]]: + """Create a temporary isolated DipDup project.""" + with tempfile.TemporaryDirectory() as tmp_package_path: + # NOTE: Dump config + config, _ = DipDupYAMLConfig.load(config_paths, environment=False) + tmp_config_path = Path(tmp_package_path) / 'dipdup.yaml' + tmp_config_path.write_text(config.dump()) + + # NOTE: Symlink packages and executables + tmp_bin_path = Path(tmp_package_path) / 'bin' + tmp_bin_path.mkdir() + for executable in ('dipdup', 'datamodel-codegen'): + if (executable_path := which(executable)) is None: + raise FrameworkException(f'Executable `{executable}` not found') # pragma: no cover + os.symlink(executable_path, tmp_bin_path / executable) + + os.symlink( + Path(__file__).parent.parent / 'dipdup', + Path(tmp_package_path) / 'dipdup', + ) + + # NOTE: Ensure that `run` uses existing package and `init` creates a new one + if exists: + os.symlink( + Path(__file__).parent.parent / package, + Path(tmp_package_path) / package, + ) + + # NOTE: Prepare environment + env = { + **os.environ, + **(env or {}), + 'PATH': str(tmp_bin_path), + 'PYTHONPATH': str(tmp_package_path), + 'DIPDUP_TEST': '1', + 'DIPDUP_DEBUG': '1', + } + + yield Path(tmp_package_path), env + + +async def run_in_tmp( + tmp_path: Path, + env: dict[str, str], + *args: str, +) -> None: + """Run DipDup in existing temporary project.""" + tmp_config_path = Path(tmp_path) / 'dipdup.yaml' + + proc = await asyncio.subprocess.create_subprocess_shell( + f'dipdup -c {tmp_config_path} {" ".join(args)}', + cwd=tmp_path, + shell=True, + env=env, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + await proc.communicate() + assert proc.returncode == 0 diff --git a/tests/__init__.py b/tests/__init__.py index a3f76569c..48e92b0f7 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -10,9 +10,7 @@ env.set_test() -CONFIGS_PATH = Path(__file__).parent / 'configs' -REPLAYS_PATH = Path(__file__).parent / 'replays' -SRC_PATH = Path(__file__).parent.parent / 'src' +TEST_CONFIGS = Path(__file__).parent / 'configs' @asynccontextmanager diff --git a/tests/configs/test_postgres.yaml b/tests/configs/test_postgres.yaml new file mode 100644 index 000000000..98615dd3e --- /dev/null +++ b/tests/configs/test_postgres.yaml @@ -0,0 +1,6 @@ +database: + kind: postgres + host: ${POSTGRES_HOST} + user: test + password: test + database: test \ No newline at end of file diff --git a/tests/configs/test_postgres_immune.yaml b/tests/configs/test_postgres_immune.yaml new file mode 100644 index 000000000..8fd521d66 --- /dev/null +++ b/tests/configs/test_postgres_immune.yaml @@ -0,0 +1,11 @@ +database: + kind: postgres + host: ${POSTGRES_HOST} + user: test + password: test + database: test + + immune_tables: + - tld + - domain + - test diff --git a/tests/configs/sqlite.yaml b/tests/configs/test_sqlite.yaml similarity index 100% rename from tests/configs/sqlite.yaml rename to tests/configs/test_sqlite.yaml diff --git a/tests/configs/test_sqlite_immune.yaml b/tests/configs/test_sqlite_immune.yaml new file mode 100644 index 000000000..95232dd9e --- /dev/null +++ b/tests/configs/test_sqlite_immune.yaml @@ -0,0 +1,11 @@ +database: + kind: sqlite + path: db.sqlite3 + + immune_tables: + - tld + - domain + - test + +advanced: + unsafe_sqlite: true \ No newline at end of file diff --git a/tests/test_demos.py b/tests/test_demos.py index 93d810cb9..28f92246a 100644 --- a/tests/test_demos.py +++ b/tests/test_demos.py @@ -1,80 +1,16 @@ -import os -import subprocess -import tempfile -from collections.abc import AsyncIterator from collections.abc import Awaitable from collections.abc import Callable -from contextlib import AbstractAsyncContextManager from contextlib import AsyncExitStack -from contextlib import asynccontextmanager from decimal import Decimal from functools import partial -from pathlib import Path -from shutil import which import pytest -from dipdup.database import get_connection from dipdup.database import tortoise_wrapper -from dipdup.exceptions import FrameworkException from dipdup.models.tezos_tzkt import TzktOperationType -from tests import CONFIGS_PATH -from tests import SRC_PATH - - -@asynccontextmanager -async def tmp_project(config_path: Path, package: str, exists: bool) -> AsyncIterator[tuple[Path, dict[str, str]]]: - with tempfile.TemporaryDirectory() as tmp_package_path: - # NOTE: Symlink configs, packages and executables - tmp_config_path = Path(tmp_package_path) / 'dipdup.yaml' - os.symlink(config_path, tmp_config_path) - - tmp_bin_path = Path(tmp_package_path) / 'bin' - tmp_bin_path.mkdir() - for executable in ('dipdup', 'datamodel-codegen'): - if (executable_path := which(executable)) is None: - raise FrameworkException(f'Executable `{executable}` not found') - os.symlink(executable_path, tmp_bin_path / executable) - - os.symlink( - SRC_PATH / 'dipdup', - Path(tmp_package_path) / 'dipdup', - ) - - # NOTE: Ensure that `run` uses existing package and `init` creates a new one - if exists: - os.symlink( - SRC_PATH / package, - Path(tmp_package_path) / package, - ) - - # NOTE: Prepare environment - env = { - **os.environ, - 'PATH': str(tmp_bin_path), - 'PYTHONPATH': str(tmp_package_path), - 'DIPDUP_TEST': '1', - } - - yield Path(tmp_package_path), env - - -async def run_in_tmp( - tmp_path: Path, - env: dict[str, str], - *cmd: str, -) -> None: - sqlite_config_path = Path(__file__).parent / 'configs' / 'sqlite.yaml' - tmp_config_path = Path(tmp_path) / 'dipdup.yaml' - - subprocess.run( - f'dipdup -c {tmp_config_path} -c {sqlite_config_path} {" ".join(cmd)}', - cwd=tmp_path, - check=True, - shell=True, - env=env, - capture_output=True, - ) +from dipdup.test import run_in_tmp +from dipdup.test import tmp_project +from tests import TEST_CONFIGS async def assert_run_token() -> None: @@ -262,11 +198,13 @@ async def test_run_init( cmd: str, assert_fn: Callable[[], Awaitable[None]], ) -> None: - config_path = CONFIGS_PATH / config + config_path = TEST_CONFIGS / config + env_config_path = TEST_CONFIGS / 'test_sqlite.yaml' + async with AsyncExitStack() as stack: tmp_package_path, env = await stack.enter_async_context( tmp_project( - config_path, + [config_path, env_config_path], package, exists=cmd != 'init', ), @@ -280,47 +218,3 @@ async def test_run_init( ) await assert_fn() - - -async def _count_tables() -> int: - conn = get_connection() - _, res = await conn.execute_query('SELECT count(name) FROM sqlite_master WHERE type = "table";') - return int(res[0][0]) - - -async def test_schema() -> None: - package = 'demo_token' - config_path = CONFIGS_PATH / f'{package}.yml' - - async with AsyncExitStack() as stack: - tmp_package_path, env = await stack.enter_async_context( - tmp_project( - config_path, - package, - exists=True, - ), - ) - - def tortoise() -> AbstractAsyncContextManager[None]: - return tortoise_wrapper( - f'sqlite://{tmp_package_path}/db.sqlite3', - f'{package}.models', - ) - - async with tortoise(): - conn = get_connection() - assert (await _count_tables()) == 0 - - await run_in_tmp(tmp_package_path, env, 'schema', 'init') - - async with tortoise(): - conn = get_connection() - assert (await _count_tables()) == 10 - await conn.execute_script('CREATE TABLE test (id INTEGER PRIMARY KEY);') - assert (await _count_tables()) == 11 - - await run_in_tmp(tmp_package_path, env, 'schema', 'wipe', '--force') - - async with tortoise(): - conn = get_connection() - assert (await _count_tables()) == 0 diff --git a/tests/test_hasura.py b/tests/test_hasura.py index ea233c607..bd48ce3f7 100644 --- a/tests/test_hasura.py +++ b/tests/test_hasura.py @@ -1,5 +1,3 @@ -import asyncio -import atexit import os from contextlib import AsyncExitStack from pathlib import Path @@ -9,7 +7,6 @@ import pytest from aiohttp import web from aiohttp.pytest_plugin import AiohttpClient -from docker.client import DockerClient # type: ignore[import-untyped] from tortoise import Tortoise from dipdup.config import DipDupConfig @@ -20,76 +17,14 @@ from dipdup.hasura import HasuraGateway from dipdup.models import ReindexingAction from dipdup.models import ReindexingReason -from dipdup.project import get_default_answers from dipdup.test import create_dummy_dipdup +from dipdup.test import run_hasura_container +from dipdup.test import run_postgres_container if TYPE_CHECKING: from aiohttp.test_utils import TestClient -def get_docker_client() -> DockerClient: - docker_socks = ( - Path('/var/run/docker.sock'), - Path.home() / 'Library' / 'Containers' / 'com.docker.docker' / 'Data' / 'vms' / '0' / 'docker.sock', - Path.home() / 'Library' / 'Containers' / 'com.docker.docker' / 'Data' / 'docker.sock', - ) - for path in docker_socks: - if path.exists(): - return DockerClient(base_url=f'unix://{path}') - else: - pytest.skip('Docker socket not found', allow_module_level=True) - - -async def run_postgres_container() -> PostgresDatabaseConfig: - docker = get_docker_client() - postgres_container = docker.containers.run( - image=get_default_answers()['postgres_image'], - environment={ - 'POSTGRES_USER': 'test', - 'POSTGRES_PASSWORD': 'test', - 'POSTGRES_DB': 'test', - }, - detach=True, - remove=True, - ) - atexit.register(postgres_container.stop) - postgres_container.reload() - postgres_ip = postgres_container.attrs['NetworkSettings']['IPAddress'] - - while not postgres_container.exec_run('pg_isready').exit_code == 0: - await asyncio.sleep(0.1) - - return PostgresDatabaseConfig( - kind='postgres', - host=postgres_ip, - port=5432, - user='test', - database='test', - password='test', - ) - - -async def run_hasura_container(postgres_ip: str) -> HasuraConfig: - docker = get_docker_client() - hasura_container = docker.containers.run( - image=get_default_answers()['hasura_image'], - environment={ - 'HASURA_GRAPHQL_DATABASE_URL': f'postgres://test:test@{postgres_ip}:5432', - }, - detach=True, - remove=True, - ) - atexit.register(hasura_container.stop) - hasura_container.reload() - hasura_ip = hasura_container.attrs['NetworkSettings']['IPAddress'] - - return HasuraConfig( - url=f'http://{hasura_ip}:8080', - source='new_source', - create_source=True, - ) - - async def test_configure_hasura() -> None: if os.uname().sysname != 'Linux' or 'microsoft' in os.uname().release: # check for WSL, Windows, mac and else pytest.skip('Test is not supported for os archetecture', allow_module_level=True) diff --git a/tests/test_index/test_tzkt_operations.py b/tests/test_index/test_tzkt_operations.py index 3e4032e4a..dbb2a28a3 100644 --- a/tests/test_index/test_tzkt_operations.py +++ b/tests/test_index/test_tzkt_operations.py @@ -19,7 +19,7 @@ from dipdup.models.tezos_tzkt import TzktOperationType from dipdup.test import create_dummy_dipdup from dipdup.test import spawn_index -from tests import CONFIGS_PATH +from tests import TEST_CONFIGS from tests import tzkt_replay @@ -31,7 +31,7 @@ async def tzkt() -> AsyncIterator[TzktDatasource]: @pytest.fixture def index_config() -> TzktOperationsIndexConfig: - config = DipDupConfig.load([CONFIGS_PATH / 'operation_filters.yml'], True) + config = DipDupConfig.load([TEST_CONFIGS / 'operation_filters.yml'], True) config.initialize() return cast(TzktOperationsIndexConfig, config.indexes['test']) @@ -124,7 +124,7 @@ async def test_get_transaction_filters(tzkt: TzktDatasource, index_config: TzktO async def test_get_sync_level() -> None: - config = DipDupConfig.load([CONFIGS_PATH / 'demo_token.yml'], True) + config = DipDupConfig.load([TEST_CONFIGS / 'demo_token.yml'], True) async with AsyncExitStack() as stack: dipdup = await create_dummy_dipdup(config, stack) index = await spawn_index(dipdup, 'tzbtc_holders_mainnet') @@ -149,7 +149,7 @@ async def test_get_sync_level() -> None: async def test_realtime() -> None: from demo_token import models - config = DipDupConfig.load([CONFIGS_PATH / 'demo_token.yml'], True) + config = DipDupConfig.load([TEST_CONFIGS / 'demo_token.yml'], True) async with AsyncExitStack() as stack: dipdup = await create_dummy_dipdup(config, stack) await dipdup._set_up_datasources(stack) diff --git a/tests/test_schema.py b/tests/test_schema.py new file mode 100644 index 000000000..da62ed023 --- /dev/null +++ b/tests/test_schema.py @@ -0,0 +1,183 @@ +from contextlib import AbstractAsyncContextManager +from contextlib import AsyncExitStack + +from dipdup.database import get_connection +from dipdup.database import get_tables +from dipdup.database import tortoise_wrapper +from dipdup.test import run_in_tmp +from dipdup.test import run_postgres_container +from dipdup.test import tmp_project +from tests import TEST_CONFIGS + +_dipdup_tables = { + 'dipdup_contract_metadata', + 'dipdup_model_update', + 'dipdup_schema', + 'dipdup_contract', + 'dipdup_token_metadata', + 'dipdup_head', + 'dipdup_index', + 'dipdup_meta', +} + + +async def test_schema_sqlite() -> None: + package = 'demo_domains' + config_path = TEST_CONFIGS / f'{package}.yml' + env_config_path = TEST_CONFIGS / 'test_sqlite.yaml' + + async with AsyncExitStack() as stack: + tmp_package_path, env = await stack.enter_async_context( + tmp_project( + [config_path, env_config_path], + package, + exists=True, + ), + ) + + def tortoise() -> AbstractAsyncContextManager[None]: + return tortoise_wrapper( + f'sqlite://{tmp_package_path}/db.sqlite3', + f'{package}.models', + ) + + async with tortoise(): + conn = get_connection() + assert await get_tables() == set() + + await run_in_tmp(tmp_package_path, env, 'schema', 'init') + + async with tortoise(): + conn = get_connection() + assert await get_tables() == _dipdup_tables | {'tld', 'record', 'domain', 'sqlite_sequence'} + await conn.execute_script('CREATE TABLE test (id INTEGER PRIMARY KEY);') + assert await get_tables() == _dipdup_tables | {'tld', 'record', 'domain', 'sqlite_sequence', 'test'} + + await run_in_tmp(tmp_package_path, env, 'schema', 'wipe', '--force') + + async with tortoise(): + conn = get_connection() + assert await get_tables() == set() + + +async def test_schema_sqlite_immune() -> None: + package = 'demo_domains' + config_path = TEST_CONFIGS / f'{package}.yml' + env_config_path = TEST_CONFIGS / 'test_sqlite_immune.yaml' + + async with AsyncExitStack() as stack: + tmp_package_path, env = await stack.enter_async_context( + tmp_project( + [config_path, env_config_path], + package, + exists=True, + ), + ) + + def tortoise() -> AbstractAsyncContextManager[None]: + return tortoise_wrapper( + f'sqlite://{tmp_package_path}/db.sqlite3', + f'{package}.models', + ) + + async with tortoise(): + conn = get_connection() + assert await get_tables() == set() + + await run_in_tmp(tmp_package_path, env, 'schema', 'init') + + async with tortoise(): + conn = get_connection() + assert await get_tables() == _dipdup_tables | {'tld', 'record', 'domain', 'sqlite_sequence'} + await conn.execute_script('CREATE TABLE test (id INTEGER PRIMARY KEY);') + assert await get_tables() == _dipdup_tables | {'tld', 'record', 'domain', 'sqlite_sequence', 'test'} + + await run_in_tmp(tmp_package_path, env, 'schema', 'wipe', '--force') + + async with tortoise(): + conn = get_connection() + assert await get_tables() == {'dipdup_meta', 'test', 'domain', 'tld'} + + +async def test_schema_postgres() -> None: + package = 'demo_domains' + config_path = TEST_CONFIGS / f'{package}.yml' + env_config_path = TEST_CONFIGS / 'test_postgres.yaml' + + async with AsyncExitStack() as stack: + tmp_package_path, env = await stack.enter_async_context( + tmp_project( + [config_path, env_config_path], + package, + exists=True, + ), + ) + + database_config = await run_postgres_container() + env['POSTGRES_HOST'] = database_config.host + + def tortoise() -> AbstractAsyncContextManager[None]: + return tortoise_wrapper( + database_config.connection_string, + f'{package}.models', + ) + + async with tortoise(): + conn = get_connection() + assert await get_tables() == set() + + await run_in_tmp(tmp_package_path, env, 'schema', 'init') + + async with tortoise(): + conn = get_connection() + assert await get_tables() == _dipdup_tables | {'tld', 'record', 'domain'} + await conn.execute_script('CREATE TABLE test (id INTEGER PRIMARY KEY);') + assert await get_tables() == _dipdup_tables | {'tld', 'record', 'domain', 'test'} + + await run_in_tmp(tmp_package_path, env, 'schema', 'wipe', '--force') + + async with tortoise(): + conn = get_connection() + assert await get_tables() == {'dipdup_meta'} + + +async def test_schema_postgres_immune() -> None: + package = 'demo_domains' + config_path = TEST_CONFIGS / f'{package}.yml' + env_config_path = TEST_CONFIGS / 'test_postgres_immune.yaml' + + async with AsyncExitStack() as stack: + tmp_package_path, env = await stack.enter_async_context( + tmp_project( + [config_path, env_config_path], + package, + exists=True, + ), + ) + + database_config = await run_postgres_container() + env['POSTGRES_HOST'] = database_config.host + + def tortoise() -> AbstractAsyncContextManager[None]: + return tortoise_wrapper( + database_config.connection_string, + f'{package}.models', + ) + + async with tortoise(): + conn = get_connection() + assert await get_tables() == set() + + await run_in_tmp(tmp_package_path, env, 'schema', 'init') + + async with tortoise(): + conn = get_connection() + assert await get_tables() == _dipdup_tables | {'tld', 'record', 'domain'} + await conn.execute_script('CREATE TABLE test (id INTEGER PRIMARY KEY);') + assert await get_tables() == _dipdup_tables | {'tld', 'record', 'domain', 'test'} + + await run_in_tmp(tmp_package_path, env, 'schema', 'wipe', '--force') + + async with tortoise(): + conn = get_connection() + assert await get_tables() == {'dipdup_meta', 'test', 'domain', 'tld'} From 3e6cee8e9c66f4c4b0c0e97e3cee8d16ef957b86 Mon Sep 17 00:00:00 2001 From: Wizard1209 <34334729+Wizard1209@users.noreply.github.com> Date: Fri, 20 Oct 2023 09:34:50 -0300 Subject: [PATCH 3/6] Add balances index (#871) Co-authored-by: Vladimir Bobrikov --- CHANGELOG.md | 10 ++ docs/1.getting-started/7.indexes.md | 1 + docs/2.indexes/8.tezos_tzkt_token_balances.md | 19 +++ src/demo_token_balances/.dockerignore | 22 ++++ src/demo_token_balances/.gitignore | 29 +++++ src/demo_token_balances/README.md | 61 ++++++++++ src/demo_token_balances/__init__.py | 0 src/demo_token_balances/abi/.keep | 0 src/demo_token_balances/configs/.keep | 0 .../configs/dipdup.compose.yaml | 22 ++++ .../configs/dipdup.sqlite.yaml | 5 + .../configs/dipdup.swarm.yaml | 22 ++++ src/demo_token_balances/configs/replay.yaml | 15 +++ src/demo_token_balances/deploy/.env.default | 12 ++ src/demo_token_balances/deploy/.keep | 0 src/demo_token_balances/deploy/Dockerfile | 9 ++ .../deploy/compose.sqlite.yaml | 19 +++ .../deploy/compose.swarm.yaml | 92 ++++++++++++++ src/demo_token_balances/deploy/compose.yaml | 55 +++++++++ .../deploy/sqlite.env.default | 5 + .../deploy/swarm.env.default | 12 ++ src/demo_token_balances/dipdup.yaml | 21 ++++ src/demo_token_balances/graphql/.keep | 0 src/demo_token_balances/handlers/.keep | 0 .../handlers/on_balance_update.py | 14 +++ src/demo_token_balances/hasura/.keep | 0 src/demo_token_balances/hooks/.keep | 0 .../hooks/on_index_rollback.py | 16 +++ src/demo_token_balances/hooks/on_reindex.py | 7 ++ src/demo_token_balances/hooks/on_restart.py | 7 ++ .../hooks/on_synchronized.py | 7 ++ src/demo_token_balances/models/.keep | 0 src/demo_token_balances/models/__init__.py | 7 ++ src/demo_token_balances/py.typed | 0 src/demo_token_balances/pyproject.toml | 52 ++++++++ src/demo_token_balances/sql/.keep | 0 .../sql/on_index_rollback/.keep | 0 src/demo_token_balances/sql/on_reindex/.keep | 0 src/demo_token_balances/sql/on_restart/.keep | 0 .../sql/on_synchronized/.keep | 0 src/demo_token_balances/types/.keep | 0 src/dipdup/config/__init__.py | 15 ++- .../config/tezos_tzkt_token_balances.py | 80 ++++++++++++ src/dipdup/context.py | 7 +- src/dipdup/datasources/tezos_tzkt.py | 75 ++++++++++++ .../tezos_tzkt_token_balances/__init__.py | 0 .../tezos_tzkt_token_balances/index.py | 114 ++++++++++++++++++ .../tezos_tzkt_token_balances/matcher.py | 42 +++++++ src/dipdup/models/__init__.py | 1 + src/dipdup/models/tezos_tzkt.py | 77 +++++++++++- .../demo_token_balances/dipdup.yaml.j2 | 21 ++++ .../handlers/on_balance_update.py.j2 | 13 ++ .../demo_token_balances/models/__init__.py.j2 | 8 ++ .../projects/demo_token_balances/replay.yaml | 5 + tests/configs/demo_token_balances.yml | 23 ++++ tests/test_demos.py | 14 +++ 56 files changed, 1031 insertions(+), 5 deletions(-) create mode 100644 docs/2.indexes/8.tezos_tzkt_token_balances.md create mode 100644 src/demo_token_balances/.dockerignore create mode 100644 src/demo_token_balances/.gitignore create mode 100644 src/demo_token_balances/README.md create mode 100644 src/demo_token_balances/__init__.py create mode 100644 src/demo_token_balances/abi/.keep create mode 100644 src/demo_token_balances/configs/.keep create mode 100644 src/demo_token_balances/configs/dipdup.compose.yaml create mode 100644 src/demo_token_balances/configs/dipdup.sqlite.yaml create mode 100644 src/demo_token_balances/configs/dipdup.swarm.yaml create mode 100644 src/demo_token_balances/configs/replay.yaml create mode 100644 src/demo_token_balances/deploy/.env.default create mode 100644 src/demo_token_balances/deploy/.keep create mode 100644 src/demo_token_balances/deploy/Dockerfile create mode 100644 src/demo_token_balances/deploy/compose.sqlite.yaml create mode 100644 src/demo_token_balances/deploy/compose.swarm.yaml create mode 100644 src/demo_token_balances/deploy/compose.yaml create mode 100644 src/demo_token_balances/deploy/sqlite.env.default create mode 100644 src/demo_token_balances/deploy/swarm.env.default create mode 100644 src/demo_token_balances/dipdup.yaml create mode 100644 src/demo_token_balances/graphql/.keep create mode 100644 src/demo_token_balances/handlers/.keep create mode 100644 src/demo_token_balances/handlers/on_balance_update.py create mode 100644 src/demo_token_balances/hasura/.keep create mode 100644 src/demo_token_balances/hooks/.keep create mode 100644 src/demo_token_balances/hooks/on_index_rollback.py create mode 100644 src/demo_token_balances/hooks/on_reindex.py create mode 100644 src/demo_token_balances/hooks/on_restart.py create mode 100644 src/demo_token_balances/hooks/on_synchronized.py create mode 100644 src/demo_token_balances/models/.keep create mode 100644 src/demo_token_balances/models/__init__.py create mode 100644 src/demo_token_balances/py.typed create mode 100644 src/demo_token_balances/pyproject.toml create mode 100644 src/demo_token_balances/sql/.keep create mode 100644 src/demo_token_balances/sql/on_index_rollback/.keep create mode 100644 src/demo_token_balances/sql/on_reindex/.keep create mode 100644 src/demo_token_balances/sql/on_restart/.keep create mode 100644 src/demo_token_balances/sql/on_synchronized/.keep create mode 100644 src/demo_token_balances/types/.keep create mode 100644 src/dipdup/config/tezos_tzkt_token_balances.py create mode 100644 src/dipdup/indexes/tezos_tzkt_token_balances/__init__.py create mode 100644 src/dipdup/indexes/tezos_tzkt_token_balances/index.py create mode 100644 src/dipdup/indexes/tezos_tzkt_token_balances/matcher.py create mode 100644 src/dipdup/projects/demo_token_balances/dipdup.yaml.j2 create mode 100644 src/dipdup/projects/demo_token_balances/handlers/on_balance_update.py.j2 create mode 100644 src/dipdup/projects/demo_token_balances/models/__init__.py.j2 create mode 100644 src/dipdup/projects/demo_token_balances/replay.yaml create mode 100644 tests/configs/demo_token_balances.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 451da2348..6ef2825f6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog], and this project adheres to [Semantic Versioning]. +## Unreleased + +### Added + +- tezos.tzkt.token_balances: Added new index. + +### Fixed + +- tezos.tzkt.token_transfers: Fixed token_id handler in token transfers index. + ## [7.0.2] - 2023-10-10 ### Added diff --git a/docs/1.getting-started/7.indexes.md b/docs/1.getting-started/7.indexes.md index b023d6582..ddccbe2c7 100644 --- a/docs/1.getting-started/7.indexes.md +++ b/docs/1.getting-started/7.indexes.md @@ -18,6 +18,7 @@ Multiple indexes are available for different workloads. Every index is linked to | [tezos.tzkt.operations](../2.indexes/5.tezos_tzkt_operations.md) | Tezos | TzKT | typed operations | | [tezos.tzkt.operations_unfiltered](../2.indexes/6.tezos_tzkt_operations_unfiltered.md) | Tezos | TzKT | untyped operations | | [tezos.tzkt.token_transfers](../2.indexes/7.tezos_tzkt_token_transfers.md) | Tezos | TzKT | TZIP-12/16 token transfers | +| [tezos.tzkt.token_balances](../2.indexes/8.tezos_tzkt_token_balances.md) | Tezos | TzKT | TZIP-12/16 token balances | Indexes can join multiple contracts considered as a single application. Also, contracts can be used by multiple indexes of any kind, but make sure that they are independent of each other and that indexed data don't overlap. diff --git a/docs/2.indexes/8.tezos_tzkt_token_balances.md b/docs/2.indexes/8.tezos_tzkt_token_balances.md new file mode 100644 index 000000000..12f0ffa1a --- /dev/null +++ b/docs/2.indexes/8.tezos_tzkt_token_balances.md @@ -0,0 +1,19 @@ +--- +title: "Token balances" +description: "This index allows indexing token balances of contracts compatible with FA1.2 or FA2 standards." +network: "tezos" +--- + +# `tezos.tzkt.token_balances` index + +This index allows indexing token balances of contracts compatible with [FA1.2](https://gitlab.com/tzip/tzip/-/blob/master/proposals/tzip-7/README.md) or [FA2](https://gitlab.com/tzip/tzip/-/blob/master/proposals/tzip-12/tzip-12.md) standards. You can either index transfers and cumulatively calculate balances or use this index type to fetch the latest balance information directly. + +```yaml [dipdup.yaml] +{{ #include ../src/demo_token_balances/dipdup.yaml }} +``` + +Callback receives `TzktTokenBalanceData` model that optionally contains the owner, token, and balance values + +```python +{{ #include ../src/demo_token_balances/handlers/on_balance_update.py }} +``` diff --git a/src/demo_token_balances/.dockerignore b/src/demo_token_balances/.dockerignore new file mode 100644 index 000000000..861e17227 --- /dev/null +++ b/src/demo_token_balances/.dockerignore @@ -0,0 +1,22 @@ +# Ignore all +* + +# Add metadata and build files +!demo_token_balances +!pyproject.toml +!pdm.lock +!README.md + +# Add Python code +!**/*.py +**/.*_cache +**/__pycache__ + +# Add configs and scripts (but not env!) +!**/*.graphql +!**/*.json +!**/*.sql +!**/*.yaml +!**/*.yml +!**/*.j2 +!**/.keep \ No newline at end of file diff --git a/src/demo_token_balances/.gitignore b/src/demo_token_balances/.gitignore new file mode 100644 index 000000000..6961da918 --- /dev/null +++ b/src/demo_token_balances/.gitignore @@ -0,0 +1,29 @@ +# Ignore all +* +!*/ + +# Add metadata and build files +!demo_token_balances +!.gitignore +!.dockerignore +!py.typed +!**/Dockerfile +!**/Makefile +!**/pyproject.toml +!**/pdm.lock +!**/README.md +!**/.keep + +# Add Python code +!**/*.py +**/.*_cache +**/__pycache__ + +# Add configs and scripts (but not env!) +!**/*.graphql +!**/*.json +!**/*.sql +!**/*.yaml +!**/*.yml +!**/*.j2 +!**/*.env.default \ No newline at end of file diff --git a/src/demo_token_balances/README.md b/src/demo_token_balances/README.md new file mode 100644 index 000000000..2caca0555 --- /dev/null +++ b/src/demo_token_balances/README.md @@ -0,0 +1,61 @@ +# demo_token_balances + +TzBTC FA1.2 token balances + +## Installation + +This project is based on [DipDup](https://dipdup.io), a framework for building featureful dapps. + +You need a Linux/macOS system with Python 3.11 installed. Use our installer for easy setup: + +```bash +curl -Lsf https://dipdup.io/install.py | python3 +``` + +See the [Installation](https://dipdup.io/docs/installation) page for all options. + +## Usage + +Run the indexer in-memory: + +```bash +dipdup run +``` + +Store data in SQLite database: + +```bash +dipdup -c . -c configs/dipdup.sqlite.yml run +``` + +Or spawn a docker-compose stack: + +```bash +cp deploy/.env.default deploy/.env +# Edit .env before running +docker-compose -f deploy/compose.yaml up +``` + +## Development setup + +We recommend [PDM](https://pdm.fming.dev/latest/) for managing Python projects. To set up the development environment: + +```bash +pdm install +$(pdm venv activate) +``` + +Some tools are included to help you keep the code quality high: black, ruff and mypy. + +```bash +# Format code +pdm fmt + +# Lint code +pdm lint + +# Build Docker image +pdm image +``` + +Inspect the `pyproject.toml` file. It contains all the dependencies and tools used in the project. \ No newline at end of file diff --git a/src/demo_token_balances/__init__.py b/src/demo_token_balances/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/abi/.keep b/src/demo_token_balances/abi/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/configs/.keep b/src/demo_token_balances/configs/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/configs/dipdup.compose.yaml b/src/demo_token_balances/configs/dipdup.compose.yaml new file mode 100644 index 000000000..98824f36f --- /dev/null +++ b/src/demo_token_balances/configs/dipdup.compose.yaml @@ -0,0 +1,22 @@ +database: + kind: postgres + host: ${POSTGRES_HOST:-db} + port: 5432 + user: ${POSTGRES_USER:-dipdup} + password: ${POSTGRES_PASSWORD} + database: ${POSTGRES_DB:-dipdup} + +hasura: + url: http://${HASURA_HOST:-hasura}:8080 + admin_secret: ${HASURA_SECRET} + allow_aggregations: true + camel_case: true + +sentry: + dsn: ${SENTRY_DSN:-""} + environment: ${SENTRY_ENVIRONMENT:-""} + +prometheus: + host: 0.0.0.0 + +logging: ${LOGLEVEL:-INFO} \ No newline at end of file diff --git a/src/demo_token_balances/configs/dipdup.sqlite.yaml b/src/demo_token_balances/configs/dipdup.sqlite.yaml new file mode 100644 index 000000000..ec7006d3d --- /dev/null +++ b/src/demo_token_balances/configs/dipdup.sqlite.yaml @@ -0,0 +1,5 @@ +database: + kind: sqlite + path: ${SQLITE_PATH:-/tmp/demo_token_balances.sqlite} + +logging: ${LOGLEVEL:-INFO} \ No newline at end of file diff --git a/src/demo_token_balances/configs/dipdup.swarm.yaml b/src/demo_token_balances/configs/dipdup.swarm.yaml new file mode 100644 index 000000000..6c7c79b82 --- /dev/null +++ b/src/demo_token_balances/configs/dipdup.swarm.yaml @@ -0,0 +1,22 @@ +database: + kind: postgres + host: ${POSTGRES_HOST:-demo_token_balances_db} + port: 5432 + user: ${POSTGRES_USER:-dipdup} + password: ${POSTGRES_PASSWORD} + database: ${POSTGRES_DB:-dipdup} + +hasura: + url: http://${HASURA_HOST:-demo_token_balances_hasura}:8080 + admin_secret: ${HASURA_SECRET} + allow_aggregations: false + camel_case: true + +sentry: + dsn: ${SENTRY_DSN:-""} + environment: ${SENTRY_ENVIRONMENT:-""} + +prometheus: + host: 0.0.0.0 + +logging: ${LOGLEVEL:-INFO} \ No newline at end of file diff --git a/src/demo_token_balances/configs/replay.yaml b/src/demo_token_balances/configs/replay.yaml new file mode 100644 index 000000000..ef2c6ef31 --- /dev/null +++ b/src/demo_token_balances/configs/replay.yaml @@ -0,0 +1,15 @@ +# Run `dipdup new --replay configs/replay.yaml` to generate new project from this replay +spec_version: 2.0 +replay: + dipdup_version: 7 + template: demo_token_balances + package: demo_token_balances + version: 0.0.1 + description: TzBTC FA1.2 token balances + license: MIT + name: John Doe + email: john_doe@example.com + postgres_image: postgres:15 + postgres_data_path: /var/lib/postgresql/data + hasura_image: hasura/graphql-engine:latest + line_length: 120 diff --git a/src/demo_token_balances/deploy/.env.default b/src/demo_token_balances/deploy/.env.default new file mode 100644 index 000000000..00b262cb5 --- /dev/null +++ b/src/demo_token_balances/deploy/.env.default @@ -0,0 +1,12 @@ +# This env file was generated automatically by DipDup. Do not edit it! +# Create a copy with .env extension, fill it with your values and run DipDup with `--env-file` option. +# +HASURA_HOST=hasura +HASURA_SECRET= +LOGLEVEL=INFO +POSTGRES_DB=dipdup +POSTGRES_HOST=db +POSTGRES_PASSWORD= +POSTGRES_USER=dipdup +SENTRY_DSN="" +SENTRY_ENVIRONMENT="" diff --git a/src/demo_token_balances/deploy/.keep b/src/demo_token_balances/deploy/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/deploy/Dockerfile b/src/demo_token_balances/deploy/Dockerfile new file mode 100644 index 000000000..b9215c26f --- /dev/null +++ b/src/demo_token_balances/deploy/Dockerfile @@ -0,0 +1,9 @@ +FROM dipdup/dipdup:7 +# FROM ghcr.io/dipdup-io/dipdup:7 +# FROM ghcr.io/dipdup-io/dipdup:next + +# COPY --chown=dipdup pyproject.toml README.md . +# RUN pip install . + +COPY --chown=dipdup . demo_token_balances +WORKDIR demo_token_balances \ No newline at end of file diff --git a/src/demo_token_balances/deploy/compose.sqlite.yaml b/src/demo_token_balances/deploy/compose.sqlite.yaml new file mode 100644 index 000000000..1c7fc50ef --- /dev/null +++ b/src/demo_token_balances/deploy/compose.sqlite.yaml @@ -0,0 +1,19 @@ +version: "3.8" +name: demo_token_balances + +services: + dipdup: + build: + context: .. + dockerfile: deploy/Dockerfile + command: ["-c", "dipdup.yaml", "-c", "configs/dipdup.sqlite.yaml", "run"] + restart: always + env_file: .env + ports: + - 46339 + - 9000 + volumes: + - sqlite:${SQLITE_PATH:-/tmp/demo_token_balances.sqlite} + +volumes: + sqlite: \ No newline at end of file diff --git a/src/demo_token_balances/deploy/compose.swarm.yaml b/src/demo_token_balances/deploy/compose.swarm.yaml new file mode 100644 index 000000000..a64536e89 --- /dev/null +++ b/src/demo_token_balances/deploy/compose.swarm.yaml @@ -0,0 +1,92 @@ +version: "3.8" +name: demo_token_balances + +services: + dipdup: + image: ${IMAGE:-ghcr.io/dipdup-io/dipdup}:${TAG:-7} + depends_on: + - db + - hasura + command: ["-c", "dipdup.yaml", "-c", "configs/dipdup.swarm.yaml", "run"] + env_file: .env + networks: + - internal + - prometheus-private + deploy: + mode: replicated + replicas: ${INDEXER_ENABLED:-1} + labels: + - prometheus-job=${SERVICE} + - prometheus-port=8000 + placement: &placement + constraints: + - node.labels.${SERVICE} == true + logging: &logging + driver: "json-file" + options: + max-size: "10m" + max-file: "10" + tag: "\{\{.Name\}\}.\{\{.ImageID\}\}" + + db: + image: postgres:15 + volumes: + - db:/var/lib/postgresql/data + env_file: .env + environment: + - POSTGRES_USER=dipdup + - POSTGRES_DB=dipdup + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - internal + deploy: + mode: replicated + replicas: 1 + placement: *placement + logging: *logging + + hasura: + image: hasura/graphql-engine:latest + depends_on: + - db + environment: + - HASURA_GRAPHQL_DATABASE_URL=postgres://dipdup:${POSTGRES_PASSWORD}@demo_token_balances_db:5432/dipdup + - HASURA_GRAPHQL_ADMIN_SECRET=${HASURA_SECRET} + - HASURA_GRAPHQL_ENABLE_CONSOLE=true + - HASURA_GRAPHQL_DEV_MODE=false + - HASURA_GRAPHQL_LOG_LEVEL=warn + - HASURA_GRAPHQL_ENABLE_TELEMETRY=false + - HASURA_GRAPHQL_UNAUTHORIZED_ROLE=user + - HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES=true + networks: + - internal + - traefik-public + deploy: + mode: replicated + replicas: 1 + labels: + - traefik.enable=true + - traefik.http.services.${SERVICE}.loadbalancer.server.port=8080 + - "traefik.http.routers.${SERVICE}.rule=Host(`${HOST}`) && (PathPrefix(`/v1/graphql`) || PathPrefix(`/api/rest`))" + - traefik.http.routers.${SERVICE}.entrypoints=http,${INGRESS:-ingress} + - "traefik.http.routers.${SERVICE}-console.rule=Host(`${SERVICE}.${SWARM_ROOT_DOMAIN}`)" + - traefik.http.routers.${SERVICE}-console.entrypoints=https + - traefik.http.middlewares.${SERVICE}-console.headers.customrequestheaders.X-Hasura-Admin-Secret=${HASURA_SECRET} + - traefik.http.routers.${SERVICE}-console.middlewares=authelia@docker,${SERVICE}-console + placement: *placement + logging: *logging + +volumes: + db: + +networks: + internal: + traefik-public: + external: true + prometheus-private: + external: true \ No newline at end of file diff --git a/src/demo_token_balances/deploy/compose.yaml b/src/demo_token_balances/deploy/compose.yaml new file mode 100644 index 000000000..884b80f27 --- /dev/null +++ b/src/demo_token_balances/deploy/compose.yaml @@ -0,0 +1,55 @@ +version: "3.8" +name: demo_token_balances + +services: + dipdup: + build: + context: .. + dockerfile: deploy/Dockerfile + restart: always + env_file: .env + ports: + - 46339 + - 9000 + command: ["-c", "dipdup.yaml", "-c", "configs/dipdup.compose.yaml", "run"] + depends_on: + - db + - hasura + + db: + image: postgres:15 + ports: + - 5432 + volumes: + - db:/var/lib/postgresql/data + restart: always + env_file: .env + environment: + - POSTGRES_USER=dipdup + - POSTGRES_DB=dipdup + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + healthcheck: + test: ["CMD-SHELL", "pg_isready -U dipdup"] + interval: 10s + timeout: 5s + retries: 5 + + hasura: + image: hasura/graphql-engine:latest + ports: + - 8080 + depends_on: + - db + restart: always + environment: + - HASURA_GRAPHQL_DATABASE_URL=postgres://dipdup:${POSTGRES_PASSWORD}@db:5432/dipdup + - HASURA_GRAPHQL_ADMIN_SECRET=${HASURA_SECRET} + - HASURA_GRAPHQL_ENABLE_CONSOLE=true + - HASURA_GRAPHQL_DEV_MODE=true + - HASURA_GRAPHQL_LOG_LEVEL=info + - HASURA_GRAPHQL_ENABLE_TELEMETRY=false + - HASURA_GRAPHQL_UNAUTHORIZED_ROLE=user + - HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES=true + +volumes: + db: \ No newline at end of file diff --git a/src/demo_token_balances/deploy/sqlite.env.default b/src/demo_token_balances/deploy/sqlite.env.default new file mode 100644 index 000000000..4cacf6bc5 --- /dev/null +++ b/src/demo_token_balances/deploy/sqlite.env.default @@ -0,0 +1,5 @@ +# This env file was generated automatically by DipDup. Do not edit it! +# Create a copy with .env extension, fill it with your values and run DipDup with `--env-file` option. +# +LOGLEVEL=INFO +SQLITE_PATH=/tmp/demo_token_balances.sqlite diff --git a/src/demo_token_balances/deploy/swarm.env.default b/src/demo_token_balances/deploy/swarm.env.default new file mode 100644 index 000000000..c4811e380 --- /dev/null +++ b/src/demo_token_balances/deploy/swarm.env.default @@ -0,0 +1,12 @@ +# This env file was generated automatically by DipDup. Do not edit it! +# Create a copy with .env extension, fill it with your values and run DipDup with `--env-file` option. +# +HASURA_HOST=demo_token_balances_hasura +HASURA_SECRET= +LOGLEVEL=INFO +POSTGRES_DB=dipdup +POSTGRES_HOST=demo_token_balances_db +POSTGRES_PASSWORD= +POSTGRES_USER=dipdup +SENTRY_DSN="" +SENTRY_ENVIRONMENT="" diff --git a/src/demo_token_balances/dipdup.yaml b/src/demo_token_balances/dipdup.yaml new file mode 100644 index 000000000..da40f0441 --- /dev/null +++ b/src/demo_token_balances/dipdup.yaml @@ -0,0 +1,21 @@ +spec_version: 2.0 +package: demo_token_balances + +contracts: + tzbtc_mainnet: + kind: tezos + address: KT1PWx2mnDueood7fEmfbBDKx1D9BAnnXitn + typename: tzbtc + +datasources: + tzkt: + kind: tezos.tzkt + url: https://api.tzkt.io + +indexes: + tzbtc_holders_mainnet: + kind: tezos.tzkt.token_balances + datasource: tzkt + handlers: + - callback: on_balance_update + contract: tzbtc_mainnet \ No newline at end of file diff --git a/src/demo_token_balances/graphql/.keep b/src/demo_token_balances/graphql/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/handlers/.keep b/src/demo_token_balances/handlers/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/handlers/on_balance_update.py b/src/demo_token_balances/handlers/on_balance_update.py new file mode 100644 index 000000000..ec6716d4d --- /dev/null +++ b/src/demo_token_balances/handlers/on_balance_update.py @@ -0,0 +1,14 @@ +from decimal import Decimal + +from demo_token_balances import models as models +from dipdup.context import HandlerContext +from dipdup.models.tezos_tzkt import TzktTokenBalanceData + + +async def on_balance_update( + ctx: HandlerContext, + token_balance: TzktTokenBalanceData, +) -> None: + holder, _ = await models.Holder.get_or_create(address=token_balance.contract_address) + holder.balance = Decimal(token_balance.balance_value or 0) / (10**8) + await holder.save() \ No newline at end of file diff --git a/src/demo_token_balances/hasura/.keep b/src/demo_token_balances/hasura/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/hooks/.keep b/src/demo_token_balances/hooks/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/hooks/on_index_rollback.py b/src/demo_token_balances/hooks/on_index_rollback.py new file mode 100644 index 000000000..3d38655e0 --- /dev/null +++ b/src/demo_token_balances/hooks/on_index_rollback.py @@ -0,0 +1,16 @@ +from dipdup.context import HookContext +from dipdup.index import Index + + +async def on_index_rollback( + ctx: HookContext, + index: Index, # type: ignore[type-arg] + from_level: int, + to_level: int, +) -> None: + await ctx.execute_sql('on_index_rollback') + await ctx.rollback( + index=index.name, + from_level=from_level, + to_level=to_level, + ) \ No newline at end of file diff --git a/src/demo_token_balances/hooks/on_reindex.py b/src/demo_token_balances/hooks/on_reindex.py new file mode 100644 index 000000000..0804aae37 --- /dev/null +++ b/src/demo_token_balances/hooks/on_reindex.py @@ -0,0 +1,7 @@ +from dipdup.context import HookContext + + +async def on_reindex( + ctx: HookContext, +) -> None: + await ctx.execute_sql('on_reindex') \ No newline at end of file diff --git a/src/demo_token_balances/hooks/on_restart.py b/src/demo_token_balances/hooks/on_restart.py new file mode 100644 index 000000000..2581b5be3 --- /dev/null +++ b/src/demo_token_balances/hooks/on_restart.py @@ -0,0 +1,7 @@ +from dipdup.context import HookContext + + +async def on_restart( + ctx: HookContext, +) -> None: + await ctx.execute_sql('on_restart') \ No newline at end of file diff --git a/src/demo_token_balances/hooks/on_synchronized.py b/src/demo_token_balances/hooks/on_synchronized.py new file mode 100644 index 000000000..09099e4b6 --- /dev/null +++ b/src/demo_token_balances/hooks/on_synchronized.py @@ -0,0 +1,7 @@ +from dipdup.context import HookContext + + +async def on_synchronized( + ctx: HookContext, +) -> None: + await ctx.execute_sql('on_synchronized') \ No newline at end of file diff --git a/src/demo_token_balances/models/.keep b/src/demo_token_balances/models/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/models/__init__.py b/src/demo_token_balances/models/__init__.py new file mode 100644 index 000000000..833a8deea --- /dev/null +++ b/src/demo_token_balances/models/__init__.py @@ -0,0 +1,7 @@ +from dipdup import fields +from dipdup.models import Model + + +class Holder(Model): + address = fields.TextField(pk=True) + balance = fields.DecimalField(decimal_places=8, max_digits=20, default=0) \ No newline at end of file diff --git a/src/demo_token_balances/py.typed b/src/demo_token_balances/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/pyproject.toml b/src/demo_token_balances/pyproject.toml new file mode 100644 index 000000000..32ad2b1ff --- /dev/null +++ b/src/demo_token_balances/pyproject.toml @@ -0,0 +1,52 @@ +[project] +name = "demo_token_balances" +version = "0.0.1" +description = "TzBTC FA1.2 token balances" +license = { text = "MIT" } +authors = [ + { name = "John Doe", email = "john_doe@example.com" } +] +readme = "README.md" +requires-python = ">=3.11,<3.12" +dependencies = [ + "dipdup>=7,<8" +] + +[tool.pdm.dev-dependencies] +dev = [ + "isort", + "black", + "ruff", + "mypy", +] + +[tool.pdm.scripts] +_isort = "isort ." +_black = "black ." +_ruff = "ruff check --fix ." +_mypy = "mypy --no-incremental --exclude demo_token_balances ." +all = { composite = ["fmt", "lint"] } +fmt = { composite = ["_isort", "_black"] } +lint = { composite = ["_ruff", "_mypy"] } +image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_token_balances:latest" + +[tool.isort] +line_length = 120 +force_single_line = true + +[tool.black] +line-length = 120 +target-version = ['py311'] +skip-string-normalization = true + +[tool.ruff] +line-length = 120 +target-version = 'py311' + +[tool.mypy] +python_version = "3.11" +plugins = ["pydantic.mypy"] + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" \ No newline at end of file diff --git a/src/demo_token_balances/sql/.keep b/src/demo_token_balances/sql/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/sql/on_index_rollback/.keep b/src/demo_token_balances/sql/on_index_rollback/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/sql/on_reindex/.keep b/src/demo_token_balances/sql/on_reindex/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/sql/on_restart/.keep b/src/demo_token_balances/sql/on_restart/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/sql/on_synchronized/.keep b/src/demo_token_balances/sql/on_synchronized/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/demo_token_balances/types/.keep b/src/demo_token_balances/types/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/src/dipdup/config/__init__.py b/src/dipdup/config/__init__.py index 6ec3461a3..8740486aa 100644 --- a/src/dipdup/config/__init__.py +++ b/src/dipdup/config/__init__.py @@ -956,13 +956,20 @@ def _resolve_index_links(self, index_config: ResolvedIndexConfigU) -> None: handler_config.parent = index_config if isinstance(handler_config.contract, str): - handler_config.contract = self.get_contract(handler_config.contract) + handler_config.contract = self.get_tezos_contract(handler_config.contract) if isinstance(handler_config.from_, str): - handler_config.from_ = self.get_contract(handler_config.from_) + handler_config.from_ = self.get_tezos_contract(handler_config.from_) if isinstance(handler_config.to, str): - handler_config.to = self.get_contract(handler_config.to) + handler_config.to = self.get_tezos_contract(handler_config.to) + + elif isinstance(index_config, TzktTokenBalancesIndexConfig): + for handler_config in index_config.handlers: + handler_config.parent = index_config + + if isinstance(handler_config.contract, str): + handler_config.contract = self.get_tezos_contract(handler_config.contract) elif isinstance(index_config, TzktOperationsUnfilteredIndexConfig): index_config.handler_config.parent = index_config @@ -1025,6 +1032,7 @@ def _set_names(self) -> None: from dipdup.config.tezos_tzkt_operations import OperationsHandlerTransactionPatternConfig from dipdup.config.tezos_tzkt_operations import TzktOperationsIndexConfig from dipdup.config.tezos_tzkt_operations import TzktOperationsUnfilteredIndexConfig +from dipdup.config.tezos_tzkt_token_balances import TzktTokenBalancesIndexConfig from dipdup.config.tezos_tzkt_token_transfers import TzktTokenTransfersIndexConfig from dipdup.config.tzip_metadata import TzipMetadataDatasourceConfig @@ -1048,6 +1056,7 @@ def _set_names(self) -> None: | TzktOperationsIndexConfig | TzktOperationsUnfilteredIndexConfig | TzktTokenTransfersIndexConfig + | TzktTokenBalancesIndexConfig ) IndexConfigU = ResolvedIndexConfigU | IndexTemplateConfig diff --git a/src/dipdup/config/tezos_tzkt_token_balances.py b/src/dipdup/config/tezos_tzkt_token_balances.py new file mode 100644 index 000000000..3bb3fb754 --- /dev/null +++ b/src/dipdup/config/tezos_tzkt_token_balances.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING +from typing import Literal + +from pydantic.dataclasses import dataclass +from pydantic.fields import Field + +from dipdup.config import ContractConfig +from dipdup.config import HandlerConfig +from dipdup.config.tezos import TezosContractConfig +from dipdup.config.tezos_tzkt import TzktDatasourceConfig +from dipdup.config.tezos_tzkt import TzktIndexConfig +from dipdup.models.tezos_tzkt import TokenBalanceSubscription + +if TYPE_CHECKING: + from collections.abc import Iterator + + from dipdup.subscriptions import Subscription + + +@dataclass +class TzktTokenBalancesHandlerConfig(HandlerConfig): + """Token balance handler config + + :param callback: Callback name + :param contract: Filter by contract + :param token_id: Filter by token ID + """ + + contract: TezosContractConfig | None = None + token_id: int | None = None + + def iter_imports(self, package: str) -> Iterator[tuple[str, str]]: + """This iterator result will be used in codegen to generate handler(s) template""" + yield 'dipdup.context', 'HandlerContext' + yield 'dipdup.models.tezos_tzkt', 'TzktTokenBalanceData' + yield package, 'models as models' + + def iter_arguments(self) -> Iterator[tuple[str, str]]: + """This iterator result will be used in codegen to generate handler(s) template""" + yield 'ctx', 'HandlerContext' + yield 'token_balance', 'TzktTokenBalanceData' + + +@dataclass +class TzktTokenBalancesIndexConfig(TzktIndexConfig): + """Token balance index config + + :param kind: always `tezos.tzkt.token_balances` + :param datasource: Index datasource to use + :param handlers: Mapping of token transfer handlers + + :param first_level: Level to start indexing from + :param last_level: Level to stop indexing at + """ + + kind: Literal['tezos.tzkt.token_balances'] + datasource: TzktDatasourceConfig + handlers: tuple[TzktTokenBalancesHandlerConfig, ...] = Field(default_factory=tuple) + + first_level: int = 0 + last_level: int = 0 + + def get_subscriptions(self) -> set[Subscription]: + subs = super().get_subscriptions() + if self.datasource.merge_subscriptions: + subs.add(TokenBalanceSubscription()) + else: + for handler_config in self.handlers: + contract = ( + handler_config.contract.address if isinstance(handler_config.contract, ContractConfig) else None + ) + subs.add( + TokenBalanceSubscription( + contract=contract, + token_id=handler_config.token_id, + ) + ) + return subs diff --git a/src/dipdup/context.py b/src/dipdup/context.py index 1407e6116..32e24a810 100644 --- a/src/dipdup/context.py +++ b/src/dipdup/context.py @@ -33,6 +33,7 @@ from dipdup.config.tezos_tzkt_head import TzktHeadIndexConfig from dipdup.config.tezos_tzkt_operations import TzktOperationsIndexConfig from dipdup.config.tezos_tzkt_operations import TzktOperationsUnfilteredIndexConfig +from dipdup.config.tezos_tzkt_token_balances import TzktTokenBalancesIndexConfig from dipdup.config.tezos_tzkt_token_transfers import TzktTokenTransfersIndexConfig from dipdup.database import execute_sql from dipdup.database import execute_sql_query @@ -297,10 +298,11 @@ async def _spawn_index(self, name: str, state: Index | None = None) -> Any: from dipdup.indexes.tezos_tzkt_events.index import TzktEventsIndex from dipdup.indexes.tezos_tzkt_head.index import TzktHeadIndex from dipdup.indexes.tezos_tzkt_operations.index import TzktOperationsIndex + from dipdup.indexes.tezos_tzkt_token_balances.index import TzktTokenBalancesIndex from dipdup.indexes.tezos_tzkt_token_transfers.index import TzktTokenTransfersIndex index_config = cast(ResolvedIndexConfigU, self.config.get_index(name)) - index: TzktOperationsIndex | TzktBigMapsIndex | TzktHeadIndex | TzktTokenTransfersIndex | TzktEventsIndex | SubsquidEventsIndex + index: TzktOperationsIndex | TzktBigMapsIndex | TzktHeadIndex | TzktTokenBalancesIndex | TzktTokenTransfersIndex | TzktEventsIndex | SubsquidEventsIndex datasource_name = index_config.datasource.name datasource: TzktDatasource | SubsquidDatasource @@ -315,6 +317,9 @@ async def _spawn_index(self, name: str, state: Index | None = None) -> Any: elif isinstance(index_config, TzktHeadIndexConfig): datasource = self.get_tzkt_datasource(datasource_name) index = TzktHeadIndex(self, index_config, datasource) + elif isinstance(index_config, TzktTokenBalancesIndexConfig): + datasource = self.get_tzkt_datasource(datasource_name) + index = TzktTokenBalancesIndex(self, index_config, datasource) elif isinstance(index_config, TzktTokenTransfersIndexConfig): datasource = self.get_tzkt_datasource(datasource_name) index = TzktTokenTransfersIndex(self, index_config, datasource) diff --git a/src/dipdup/datasources/tezos_tzkt.py b/src/dipdup/datasources/tezos_tzkt.py index 00fd4d479..72bbebcf3 100644 --- a/src/dipdup/datasources/tezos_tzkt.py +++ b/src/dipdup/datasources/tezos_tzkt.py @@ -43,6 +43,7 @@ from dipdup.models.tezos_tzkt import TzktQuoteData from dipdup.models.tezos_tzkt import TzktRollbackMessage from dipdup.models.tezos_tzkt import TzktSubscription +from dipdup.models.tezos_tzkt import TzktTokenBalanceData from dipdup.models.tezos_tzkt import TzktTokenTransferData from dipdup.utils import split_by_chunks @@ -111,6 +112,18 @@ 'originationId', 'migrationId', ) +TOKEN_BALANCE_FIELDS = ( + 'id', + 'transfersCount', + 'firstLevel', + 'firstTime', + 'lastLevel', + 'lastTime', + 'account', + 'token', + 'balance', + 'balanceValue', +) EVENT_FIELDS = ( 'id', 'level', @@ -127,6 +140,7 @@ HeadCallback = Callable[['TzktDatasource', TzktHeadBlockData], Awaitable[None]] OperationsCallback = Callable[['TzktDatasource', tuple[TzktOperationData, ...]], Awaitable[None]] TokenTransfersCallback = Callable[['TzktDatasource', tuple[TzktTokenTransferData, ...]], Awaitable[None]] +TokenBalancesCallback = Callable[['TzktDatasource', tuple[TzktTokenBalanceData, ...]], Awaitable[None]] BigMapsCallback = Callable[['TzktDatasource', tuple[TzktBigMapData, ...]], Awaitable[None]] EventsCallback = Callable[['TzktDatasource', tuple[TzktEventData, ...]], Awaitable[None]] RollbackCallback = Callable[['TzktDatasource', MessageType, int, int], Awaitable[None]] @@ -236,6 +250,7 @@ def __init__( self._on_head_callbacks: set[HeadCallback] = set() self._on_operations_callbacks: set[OperationsCallback] = set() self._on_token_transfers_callbacks: set[TokenTransfersCallback] = set() + self._on_token_balances_callbacks: set[TokenBalancesCallback] = set() self._on_big_maps_callbacks: set[BigMapsCallback] = set() self._on_events_callbacks: set[EventsCallback] = set() self._on_rollback_callbacks: set[RollbackCallback] = set() @@ -341,6 +356,10 @@ async def emit_token_transfers(self, token_transfers: tuple[TzktTokenTransferDat for fn in self._on_token_transfers_callbacks: await fn(self, token_transfers) + async def emit_token_balances(self, token_balances: tuple[TzktTokenBalanceData, ...]) -> None: + for fn in self._on_token_balances_callbacks: + await fn(self, token_balances) + async def emit_big_maps(self, big_maps: tuple[TzktBigMapData, ...]) -> None: for fn in self._on_big_maps_callbacks: await fn(self, big_maps) @@ -897,6 +916,48 @@ async def iter_token_transfers( ): yield batch + async def get_token_balances( + self, + token_addresses: set[str], + token_ids: set[int], + first_level: int | None = None, + last_level: int | None = None, + offset: int | None = None, + limit: int | None = None, + ) -> tuple[TzktTokenBalanceData, ...]: + params = self._get_request_params( + first_level, + last_level, + offset=offset or 0, + limit=limit, + select=TOKEN_BALANCE_FIELDS, + values=True, + cursor=True, + **{ + 'token.contract.in': ','.join(token_addresses), + 'token.id.in': ','.join(str(token_id) for token_id in token_ids), + }, + ) + raw_token_balances = await self._request_values_dict('get', url='v1/tokens/balances', params=params) + return tuple(TzktTokenBalanceData.from_json(item) for item in raw_token_balances) + + async def iter_token_balances( + self, + token_addresses: set[str], + token_ids: set[int], + first_level: int | None = None, + last_level: int | None = None, + ) -> AsyncIterator[tuple[TzktTokenBalanceData, ...]]: + async for batch in self._iter_batches( + self.get_token_balances, + token_addresses, + token_ids, + first_level, + last_level, + cursor=True, + ): + yield batch + async def get_events( self, addresses: set[str], @@ -1073,6 +1134,7 @@ def _get_signalr_client(self) -> SignalRClient: self._signalr_client.on('operations', partial(self._on_message, TzktMessageType.operation)) self._signalr_client.on('transfers', partial(self._on_message, TzktMessageType.token_transfer)) + self._signalr_client.on('balances', partial(self._on_message, TzktMessageType.token_balance)) self._signalr_client.on('bigmaps', partial(self._on_message, TzktMessageType.big_map)) self._signalr_client.on('head', partial(self._on_message, TzktMessageType.head)) self._signalr_client.on('events', partial(self._on_message, TzktMessageType.event)) @@ -1146,6 +1208,8 @@ async def _on_message(self, type_: TzktMessageType, message: list[dict[str, Any] await self._process_operations_data(cast(list[dict[str, Any]], buffered_message.data)) elif buffered_message.type == TzktMessageType.token_transfer: await self._process_token_transfers_data(cast(list[dict[str, Any]], buffered_message.data)) + elif buffered_message.type == TzktMessageType.token_balance: + await self._process_token_balances_data(cast(list[dict[str, Any]], buffered_message.data)) elif buffered_message.type == TzktMessageType.big_map: await self._process_big_maps_data(cast(list[dict[str, Any]], buffered_message.data)) elif buffered_message.type == TzktMessageType.head: @@ -1182,6 +1246,17 @@ async def _process_token_transfers_data(self, data: list[dict[str, Any]]) -> Non for _level, token_transfers in level_token_transfers.items(): await self.emit_token_transfers(tuple(token_transfers)) + async def _process_token_balances_data(self, data: list[dict[str, Any]]) -> None: + """Parse and emit raw token balances from WS""" + level_token_balances: defaultdict[int, deque[TzktTokenBalanceData]] = defaultdict(deque) + + for token_balance_json in data: + token_balance = TzktTokenBalanceData.from_json(token_balance_json) + level_token_balances[token_balance.level].append(token_balance) + + for _level, token_balances in level_token_balances.items(): + await self.emit_token_balances(tuple(token_balances)) + async def _process_big_maps_data(self, data: list[dict[str, Any]]) -> None: """Parse and emit raw big map diffs from WS""" level_big_maps: defaultdict[int, deque[TzktBigMapData]] = defaultdict(deque) diff --git a/src/dipdup/indexes/tezos_tzkt_token_balances/__init__.py b/src/dipdup/indexes/tezos_tzkt_token_balances/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/dipdup/indexes/tezos_tzkt_token_balances/index.py b/src/dipdup/indexes/tezos_tzkt_token_balances/index.py new file mode 100644 index 000000000..710adf028 --- /dev/null +++ b/src/dipdup/indexes/tezos_tzkt_token_balances/index.py @@ -0,0 +1,114 @@ +from contextlib import ExitStack + +from dipdup.config.tezos_tzkt_token_balances import TzktTokenBalancesHandlerConfig +from dipdup.config.tezos_tzkt_token_balances import TzktTokenBalancesIndexConfig +from dipdup.datasources.tezos_tzkt import TzktDatasource +from dipdup.exceptions import ConfigInitializationException +from dipdup.exceptions import FrameworkException +from dipdup.index import Index +from dipdup.indexes.tezos_tzkt_token_balances.matcher import match_token_balances +from dipdup.models.tezos_tzkt import TzktMessageType +from dipdup.models.tezos_tzkt import TzktRollbackMessage +from dipdup.models.tezos_tzkt import TzktTokenBalanceData +from dipdup.prometheus import Metrics + +TokenBalanceQueueItem = tuple[TzktTokenBalanceData, ...] | TzktRollbackMessage + + +class TzktTokenBalancesIndex( + Index[TzktTokenBalancesIndexConfig, TokenBalanceQueueItem, TzktDatasource], + message_type=TzktMessageType.token_balance, +): + def push_token_balances(self, token_balances: TokenBalanceQueueItem) -> None: + self.push_realtime_message(token_balances) + + async def _synchronize(self, sync_level: int) -> None: + await self._enter_sync_state(sync_level) + await self._synchronize_actual(sync_level) + await self._exit_sync_state(sync_level) + + async def _synchronize_actual(self, head_level: int) -> None: + """Retrieve data for the current level""" + # TODO: think about logging and metrics + + addresses, token_ids = set(), set() + for handler in self._config.handlers: + if handler.contract and handler.contract.address is not None: + addresses.add(handler.contract.address) + if handler.token_id is not None: + token_ids.add(handler.token_id) + + async with self._ctx.transactions.in_transaction(head_level, head_level, self.name): + # NOTE: If index is out of date fetch balances as of the current head. + async for balances_batch in self._datasource.iter_token_balances( + addresses, token_ids, last_level=head_level + ): + matched_handlers = match_token_balances(self._config.handlers, balances_batch) + for handler_config, matched_balance_data in matched_handlers: + await self._call_matched_handler(handler_config, matched_balance_data) + + await self._update_state(level=head_level) + + async def _process_level_token_balances( + self, + token_balances: tuple[TzktTokenBalanceData, ...], + sync_level: int, + ) -> None: + if not token_balances: + return + + batch_level = token_balances[0].level + index_level = self.state.level + if batch_level <= index_level: + raise FrameworkException(f'Batch level is lower than index level: {batch_level} <= {index_level}') + + self._logger.debug('Processing token balances of level %s', batch_level) + matched_handlers = match_token_balances(self._config.handlers, token_balances) + + if Metrics.enabled: + Metrics.set_index_handlers_matched(len(matched_handlers)) + + # NOTE: We still need to bump index level but don't care if it will be done in existing transaction + if not matched_handlers: + await self._update_state(level=batch_level) + return + + async with self._ctx.transactions.in_transaction(batch_level, sync_level, self.name): + for handler_config, token_balance in matched_handlers: + await self._call_matched_handler(handler_config, token_balance) + await self._update_state(level=batch_level) + + async def _call_matched_handler( + self, handler_config: TzktTokenBalancesHandlerConfig, token_balance: TzktTokenBalanceData + ) -> None: + if not handler_config.parent: + raise ConfigInitializationException + + await self._ctx.fire_handler( + handler_config.callback, + handler_config.parent.name, + self.datasource, + # NOTE: missing `operation_id` field in API to identify operation + None, + token_balance, + ) + + async def _process_queue(self) -> None: + """Process WebSocket queue""" + if self._queue: + self._logger.debug('Processing websocket queue') + while self._queue: + message = self._queue.popleft() + if isinstance(message, TzktRollbackMessage): + await self._tzkt_rollback(message.from_level, message.to_level) + continue + + message_level = message[0].level + if message_level <= self.state.level: + self._logger.debug('Skipping outdated message: %s <= %s', message_level, self.state.level) + continue + + with ExitStack() as stack: + if Metrics.enabled: + stack.enter_context(Metrics.measure_level_realtime_duration()) + await self._process_level_token_balances(message, message_level) diff --git a/src/dipdup/indexes/tezos_tzkt_token_balances/matcher.py b/src/dipdup/indexes/tezos_tzkt_token_balances/matcher.py new file mode 100644 index 000000000..46b00a015 --- /dev/null +++ b/src/dipdup/indexes/tezos_tzkt_token_balances/matcher.py @@ -0,0 +1,42 @@ +import logging +from collections import deque +from collections.abc import Iterable + +from dipdup.config.tezos_tzkt_token_balances import TzktTokenBalancesHandlerConfig +from dipdup.models.tezos_tzkt import TzktTokenBalanceData + +_logger = logging.getLogger('dipdup.matcher') + +MatchedTokenBalancesT = tuple[TzktTokenBalancesHandlerConfig, TzktTokenBalanceData] + + +def match_token_balance( + handler_config: TzktTokenBalancesHandlerConfig, + token_balance: TzktTokenBalanceData, +) -> bool: + """Match single token balance with pattern""" + if handler_config.contract: + if handler_config.contract.address != token_balance.contract_address: + return False + if handler_config.token_id is not None: + if handler_config.token_id != token_balance.token_id: + return False + return True + + +def match_token_balances( + handlers: Iterable[TzktTokenBalancesHandlerConfig], token_balances: Iterable[TzktTokenBalanceData] +) -> deque[MatchedTokenBalancesT]: + """Try to match token balances with all index handlers.""" + + matched_handlers: deque[MatchedTokenBalancesT] = deque() + + for token_balance in token_balances: + for handler_config in handlers: + token_balance_matched = match_token_balance(handler_config, token_balance) + if not token_balance_matched: + continue + _logger.debug('%s: `%s` handler matched!', token_balance.level, handler_config.callback) + matched_handlers.append((handler_config, token_balance)) + + return matched_handlers diff --git a/src/dipdup/models/__init__.py b/src/dipdup/models/__init__.py index 85997fd3f..519188a48 100644 --- a/src/dipdup/models/__init__.py +++ b/src/dipdup/models/__init__.py @@ -53,6 +53,7 @@ class IndexType(Enum): tezos_tzkt_big_maps = 'tezos.tzkt.big_maps' tezos_tzkt_head = 'tezos.tzkt.head' tezos_tzkt_token_transfers = 'tezos.tzkt.token_transfers' + tezos_tzkt_token_balances = 'tezos.tzkt.token_balances' tezos_tzkt_events = 'tezos.tzkt.events' evm_subsquid_events = 'evm.subsquid.events' diff --git a/src/dipdup/models/tezos_tzkt.py b/src/dipdup/models/tezos_tzkt.py index dd9085180..b63a9679a 100644 --- a/src/dipdup/models/tezos_tzkt.py +++ b/src/dipdup/models/tezos_tzkt.py @@ -48,6 +48,7 @@ class TzktMessageType(MessageType, Enum): big_map = 'big_map' head = 'head' token_transfer = 'token_transfer' + token_balance = 'token_balance' event = 'event' @@ -119,7 +120,7 @@ class TokenTransferSubscription(TzktSubscription): def get_request(self) -> list[dict[str, Any]]: request: dict[str, Any] = {} if self.token_id: - request['token_id'] = self.token_id + request['tokenId'] = self.token_id if self.contract: request['contract'] = self.contract if self.from_: @@ -129,6 +130,22 @@ def get_request(self) -> list[dict[str, Any]]: return [request] +@dataclass(frozen=True) +class TokenBalanceSubscription(TzktSubscription): + type: Literal['token_balance'] = 'token_balance' + method: Literal['SubscribeToTokenBalances'] = 'SubscribeToTokenBalances' + contract: str | None = None + token_id: int | None = None + + def get_request(self) -> list[dict[str, Any]]: + request: dict[str, Any] = {} + if self.token_id: + request['tokenId'] = self.token_id + if self.contract: + request['contract'] = self.contract + return [request] + + @dataclass(frozen=True) class EventSubscription(TzktSubscription): type: Literal['event'] = 'event' @@ -537,6 +554,64 @@ def from_json(cls, token_transfer_json: dict[str, Any]) -> 'TzktTokenTransferDat ) +@dataclass(frozen=True) +class TzktTokenBalanceData(HasLevel): + """Basic structure for token transver received from TzKT SignalR API""" + + id: int + transfers_count: int + first_level: int + first_time: datetime + # level is not defined in tzkt balances data, so it is + # Level of the block where the token balance was last changed. + last_level: int + last_time: datetime + # owner account + account_address: str | None = None + account_alias: str | None = None + # token object + tzkt_token_id: int | None = None + contract_address: str | None = None + contract_alias: str | None = None + token_id: int | None = None + standard: TzktTokenStandard | None = None + metadata: dict[str, Any] | None = None + + balance: str | None = None + balance_value: float | None = None + + @property + def level(self) -> int: # type: ignore[override] + return self.last_level + + @classmethod + def from_json(cls, token_transfer_json: dict[str, Any]) -> 'TzktTokenBalanceData': + """Convert raw token transfer message from REST or WS into dataclass""" + token_json = token_transfer_json.get('token') or {} + standard = token_json.get('standard') + metadata = token_json.get('metadata') + contract_json = token_json.get('contract') or {} + + return TzktTokenBalanceData( + id=token_transfer_json['id'], + transfers_count=token_transfer_json['transfersCount'], + first_level=token_transfer_json['firstLevel'], + first_time=_parse_timestamp(token_transfer_json['firstTime']), + last_level=token_transfer_json['lastLevel'], + last_time=_parse_timestamp(token_transfer_json['lastTime']), + account_address=token_transfer_json.get('account', {}).get('address'), + account_alias=token_transfer_json.get('account', {}).get('alias'), + tzkt_token_id=token_json['id'], + contract_address=contract_json.get('address'), + contract_alias=contract_json.get('alias'), + token_id=token_json.get('tokenId'), + standard=TzktTokenStandard(standard) if standard else None, + metadata=metadata if isinstance(metadata, dict) else {}, + balance=token_transfer_json.get('balance'), + balance_value=token_transfer_json.get('balanceValue'), + ) + + @dataclass(frozen=True) class TzktEventData(HasLevel): """Basic structure for events received from TzKT REST API""" diff --git a/src/dipdup/projects/demo_token_balances/dipdup.yaml.j2 b/src/dipdup/projects/demo_token_balances/dipdup.yaml.j2 new file mode 100644 index 000000000..757f20925 --- /dev/null +++ b/src/dipdup/projects/demo_token_balances/dipdup.yaml.j2 @@ -0,0 +1,21 @@ +spec_version: 2.0 +package: {{ project.package }} + +contracts: + tzbtc_mainnet: + kind: tezos + address: KT1PWx2mnDueood7fEmfbBDKx1D9BAnnXitn + typename: tzbtc + +datasources: + tzkt: + kind: tezos.tzkt + url: https://api.tzkt.io + +indexes: + tzbtc_holders_mainnet: + kind: tezos.tzkt.token_balances + datasource: tzkt + handlers: + - callback: on_balance_update + contract: tzbtc_mainnet diff --git a/src/dipdup/projects/demo_token_balances/handlers/on_balance_update.py.j2 b/src/dipdup/projects/demo_token_balances/handlers/on_balance_update.py.j2 new file mode 100644 index 000000000..b4a13aee6 --- /dev/null +++ b/src/dipdup/projects/demo_token_balances/handlers/on_balance_update.py.j2 @@ -0,0 +1,13 @@ +from decimal import Decimal +from {{ project.package }} import models as models +from dipdup.context import HandlerContext +from dipdup.models.tezos_tzkt import TzktTokenBalanceData + + +async def on_balance_update( + ctx: HandlerContext, + token_balance: TzktTokenBalanceData, +) -> None: + holder, _ = await models.Holder.get_or_create(address=token_balance.contract_address) + holder.balance = Decimal(token_balance.balance_value or 0) / (10**8) + await holder.save() \ No newline at end of file diff --git a/src/dipdup/projects/demo_token_balances/models/__init__.py.j2 b/src/dipdup/projects/demo_token_balances/models/__init__.py.j2 new file mode 100644 index 000000000..876bfdd21 --- /dev/null +++ b/src/dipdup/projects/demo_token_balances/models/__init__.py.j2 @@ -0,0 +1,8 @@ +from dipdup import fields + +from dipdup.models import Model + + +class Holder(Model): + address = fields.TextField(pk=True) + balance = fields.DecimalField(decimal_places=8, max_digits=20, default=0) diff --git a/src/dipdup/projects/demo_token_balances/replay.yaml b/src/dipdup/projects/demo_token_balances/replay.yaml new file mode 100644 index 000000000..8c8703a05 --- /dev/null +++ b/src/dipdup/projects/demo_token_balances/replay.yaml @@ -0,0 +1,5 @@ +spec_version: 2.0 +replay: + description: TzBTC FA1.2 token balances + package: demo_token_balances + template: demo_token_balances \ No newline at end of file diff --git a/tests/configs/demo_token_balances.yml b/tests/configs/demo_token_balances.yml new file mode 100644 index 000000000..3eabede72 --- /dev/null +++ b/tests/configs/demo_token_balances.yml @@ -0,0 +1,23 @@ +spec_version: 2.0 +package: demo_token_balances + +contracts: + tzbtc_mainnet: + kind: tezos + address: KT1PWx2mnDueood7fEmfbBDKx1D9BAnnXitn + typename: tzbtc + +datasources: + tzkt: + kind: tezos.tzkt + url: https://api.tzkt.io + +indexes: + tzbtc_holders_mainnet: + kind: tezos.tzkt.token_balances + datasource: tzkt + first_level: 1366824 + last_level: 1366999 + handlers: + - callback: on_balance_update + contract: tzbtc_mainnet \ No newline at end of file diff --git a/tests/test_demos.py b/tests/test_demos.py index 28f92246a..7e5043f73 100644 --- a/tests/test_demos.py +++ b/tests/test_demos.py @@ -65,6 +65,18 @@ async def assert_run_token_transfers(expected_holders: int, expected_balance: st assert f'{random_balance:f}' == expected_balance +async def assert_run_balances() -> None: + import demo_token_balances.models + + holders = await demo_token_balances.models.Holder.filter().count() + holder = await demo_token_balances.models.Holder.first() + assert holder + random_balance = holder.balance + + assert holders == 1 + assert random_balance == 0 + + async def assert_run_big_maps() -> None: import demo_big_maps.models @@ -171,6 +183,8 @@ async def assert_run_dao() -> None: 'run', partial(assert_run_token_transfers, 2, '-0.02302128'), ), + ('demo_token_balances.yml', 'demo_token_balances', 'run', assert_run_balances), + ('demo_token_balances.yml', 'demo_token_balances', 'init', partial(assert_init, 'demo_token_balances')), ('demo_big_maps.yml', 'demo_big_maps', 'run', assert_run_big_maps), ('demo_big_maps.yml', 'demo_big_maps', 'init', partial(assert_init, 'demo_big_maps')), ('demo_domains.yml', 'demo_domains', 'run', assert_run_domains), From 02a26cdf79dbc394f81a009efe6637eea49c9f79 Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Fri, 20 Oct 2023 09:37:39 -0300 Subject: [PATCH 4/6] Ability to limit the scope of `init` command (#870) --- CHANGELOG.md | 5 +++-- src/dipdup/cli.py | 20 ++++++++++++++++++-- src/dipdup/codegen/__init__.py | 11 +++++++++-- src/dipdup/codegen/tezos_tzkt.py | 2 ++ src/dipdup/dipdup.py | 15 +++++++++++++-- src/dipdup/project.py | 23 ++++++++++++++++++++--- 6 files changed, 65 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ef2825f6..b605e6df5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,15 +4,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog], and this project adheres to [Semantic Versioning]. -## Unreleased +## [Unreleased] ### Added +- cli: Relative paths to be initialized now can be passed to the `init` command as arguments. - tezos.tzkt.token_balances: Added new index. ### Fixed -- tezos.tzkt.token_transfers: Fixed token_id handler in token transfers index. +- tezos.tzkt.token_transfers: Fixed filtering transfers by token_id. ## [7.0.2] - 2023-10-10 diff --git a/src/dipdup/cli.py b/src/dipdup/cli.py index 1a00cdc98..86223390e 100644 --- a/src/dipdup/cli.py +++ b/src/dipdup/cli.py @@ -288,9 +288,20 @@ async def run(ctx: click.Context) -> None: @cli.command() @click.option('--force', '-f', is_flag=True, help='Overwrite existing types and ABIs.') @click.option('--base', '-b', is_flag=True, help='Include template base: pyproject.toml, Dockerfile, etc.') +@click.argument( + 'include', + type=str, + nargs=-1, + metavar='PATH', +) @click.pass_context @_cli_wrapper -async def init(ctx: click.Context, force: bool, base: bool) -> None: +async def init( + ctx: click.Context, + force: bool, + base: bool, + include: list[str], +) -> None: """Generate project tree, typeclasses and callback stubs. This command is idempotent, meaning it won't overwrite previously generated files unless asked explicitly. @@ -299,7 +310,12 @@ async def init(ctx: click.Context, force: bool, base: bool) -> None: config: DipDupConfig = ctx.obj.config dipdup = DipDup(config) - await dipdup.init(force, base) + + await dipdup.init( + force=force, + base=base or bool(include), + include=set(include), + ) @cli.command() diff --git a/src/dipdup/codegen/__init__.py b/src/dipdup/codegen/__init__.py index ce2de0b97..933124a3b 100644 --- a/src/dipdup/codegen/__init__.py +++ b/src/dipdup/codegen/__init__.py @@ -38,10 +38,12 @@ def __init__( config: DipDupConfig, package: DipDupPackage, datasources: dict[str, Datasource[Any]], + include: set[str] | None = None, ) -> None: self._config = config self._package = package self._datasources = datasources + self._include = include or set() self._logger = _logger async def init( @@ -52,9 +54,14 @@ async def init( self._package.create() replay = self._package.replay - if base and replay: + if base: + if not replay: + raise FrameworkException('`--base` option passed but `configs/replay.yaml` file is missing') _logger.info('Recreating base template with replay.yaml') - render_base(replay, force) + render_base(replay, force, self._include) + + if self._include: + force = any(str(path).startswith('types') for path in self._include) await self.generate_abi() await self.generate_schemas(force) diff --git a/src/dipdup/codegen/tezos_tzkt.py b/src/dipdup/codegen/tezos_tzkt.py index 0b4a0082a..51ff82e54 100644 --- a/src/dipdup/codegen/tezos_tzkt.py +++ b/src/dipdup/codegen/tezos_tzkt.py @@ -90,11 +90,13 @@ def __init__( config: DipDupConfig, package: DipDupPackage, datasources: dict[str, Datasource[Any]], + include: set[str] | None = None, ) -> None: super().__init__( config=config, package=package, datasources=datasources, + include=include, ) self._schemas: dict[str, dict[str, dict[str, Any]]] = {} diff --git a/src/dipdup/dipdup.py b/src/dipdup/dipdup.py index 258f3b1b3..acff12dbd 100644 --- a/src/dipdup/dipdup.py +++ b/src/dipdup/dipdup.py @@ -20,6 +20,7 @@ from tortoise.exceptions import OperationalError from dipdup import env +from dipdup.codegen import CodeGenerator from dipdup.codegen import generate_environments from dipdup.config import DipDupConfig from dipdup.config import IndexTemplateConfig @@ -502,6 +503,7 @@ async def init( self, force: bool = False, base: bool = False, + include: set[str] | None = None, ) -> None: """Create new or update existing dipdup project""" from dipdup.codegen.evm_subsquid import SubsquidCodeGenerator @@ -515,8 +517,17 @@ async def init( package = DipDupPackage(self._config.package_path) - for codegen_cls in (TzktCodeGenerator, SubsquidCodeGenerator): - codegen = codegen_cls(self._config, package, self._datasources) + codegen_classes: tuple[type[CodeGenerator], ...] = ( + TzktCodeGenerator, + SubsquidCodeGenerator, + ) + for codegen_cls in codegen_classes: + codegen = codegen_cls( + config=self._config, + package=package, + datasources=self._datasources, + include=include, + ) await codegen.init( force=force, base=base, diff --git a/src/dipdup/project.py b/src/dipdup/project.py index 11c96abdb..58d46bad7 100644 --- a/src/dipdup/project.py +++ b/src/dipdup/project.py @@ -263,10 +263,17 @@ def render_project( def render_base( answers: Answers, force: bool = False, + include: set[str] | None = None, ) -> None: """Render base from template""" # NOTE: Common base - _render_templates(answers, Path('base'), force, refresh=True) + _render_templates( + answers=answers, + path=Path('base'), + force=force, + include=include, + exists=True, + ) _render( answers, @@ -276,7 +283,13 @@ def render_base( ) -def _render_templates(answers: Answers, path: Path, force: bool = False, refresh: bool = False) -> None: +def _render_templates( + answers: Answers, + path: Path, + force: bool = False, + include: set[str] | None = None, + exists: bool = False, +) -> None: from jinja2 import Template project_path = Path(__file__).parent / 'projects' / path @@ -284,7 +297,11 @@ def _render_templates(answers: Answers, path: Path, force: bool = False, refresh for path in project_paths: template_path = path.relative_to(Path(__file__).parent) - output_base = get_package_path(answers['package']) if refresh else Path(answers['package']) + + if include and not any(str(path).startswith(i) for i in include): + continue + + output_base = get_package_path(answers['package']) if exists else Path(answers['package']) output_path = Path( output_base, *path.relative_to(project_path).parts, From e9c99167d778e03722d03c0c48e3d5f653f7351d Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Fri, 20 Oct 2023 09:39:10 -0300 Subject: [PATCH 5/6] `--unsafe` and `--compose` flags for `config env` command (#853) --- CHANGELOG.md | 1 + src/dipdup/cli.py | 24 +++++++++++++++++++----- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b605e6df5..a9e25bf27 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ The format is based on [Keep a Changelog], and this project adheres to [Semantic ### Added +- cli: Added `--unsafe` and `--compose` flags to `config env` command. - cli: Relative paths to be initialized now can be passed to the `init` command as arguments. - tezos.tzkt.token_balances: Added new index. diff --git a/src/dipdup/cli.py b/src/dipdup/cli.py index 86223390e..bb35be168 100644 --- a/src/dipdup/cli.py +++ b/src/dipdup/cli.py @@ -362,20 +362,34 @@ async def config_export(ctx: click.Context, unsafe: bool, full: bool) -> None: @config.command(name='env') @click.option('--output', '-o', type=str, default=None, help='Output to file instead of stdout.') +@click.option('--unsafe', is_flag=True, help='Resolve environment variables or use default values from the config.') +@click.option('--compose', is_flag=True, help='Output in docker-compose format.') @click.pass_context @_cli_wrapper -async def config_env(ctx: click.Context, output: str | None) -> None: +async def config_env(ctx: click.Context, output: str | None, unsafe: bool, compose: bool) -> None: """Dump environment variables used in DipDup config. If variable is not set, default value will be used. """ - from dipdup.config import DipDupConfig + from dipdup.yaml import DipDupYAMLConfig - config = DipDupConfig.load( + config, environment = DipDupYAMLConfig.load( paths=ctx.obj.config._paths, - environment=True, + environment=unsafe, ) - content = '\n'.join(f'{k}={v}' for k, v in sorted(config._environment.items())) + if compose: + content = '\nservices:\n dipdup:\n environment:\n' + _tab = ' ' * 6 + for k, v in sorted(environment.items()): + line = f'{_tab}- {k}=' + '${' + k + if v: + line += ':-' + v + '}' + else: + line += '}' + + content += line + '\n' + else: + content = '\n'.join(f'{k}={v}' for k, v in sorted(environment.items())) if output: Path(output).write_text(content) else: From d9ff7122c022b6759af1e02ec65df27e9b7ee698 Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Fri, 20 Oct 2023 09:41:09 -0300 Subject: [PATCH 6/6] Add help messages to pyproject.toml scripts (#863) --- CHANGELOG.md | 1 + pdm.lock | 87 +++++++++---------- pyproject.toml | 73 +++++++++++----- requirements.dev.txt | 10 +-- requirements.txt | 8 +- src/demo_auction/models/__init__.py | 1 + src/demo_auction/pyproject.toml | 21 ++++- src/demo_big_maps/models/__init__.py | 1 + src/demo_big_maps/pyproject.toml | 21 ++++- src/demo_blank/pyproject.toml | 21 ++++- src/demo_dao/models/__init__.py | 1 + src/demo_dao/pyproject.toml | 21 ++++- src/demo_dex/models/__init__.py | 1 + src/demo_dex/pyproject.toml | 21 ++++- src/demo_domains/models/__init__.py | 1 + src/demo_domains/pyproject.toml | 21 ++++- src/demo_events/pyproject.toml | 21 ++++- src/demo_evm_events/pyproject.toml | 21 ++++- src/demo_factories/handlers/on_transfer.py | 5 +- src/demo_factories/models/__init__.py | 3 +- src/demo_factories/pyproject.toml | 21 ++++- src/demo_head/pyproject.toml | 21 ++++- src/demo_nft_marketplace/models/__init__.py | 2 + src/demo_nft_marketplace/pyproject.toml | 21 ++++- src/demo_raw/models/__init__.py | 3 +- src/demo_raw/pyproject.toml | 21 ++++- src/demo_token/models/__init__.py | 1 + src/demo_token/pyproject.toml | 21 ++++- src/demo_token_transfers/models/__init__.py | 1 + src/demo_token_transfers/pyproject.toml | 21 ++++- src/demo_uniswap/pyproject.toml | 21 ++++- src/dipdup/cli.py | 2 - src/dipdup/config/__init__.py | 4 + src/dipdup/project.py | 5 +- src/dipdup/projects/base/pyproject.toml.j2 | 21 ++++- .../projects/demo_uniswap/models/repo.py.j2 | 4 +- src/dipdup/sys.py | 3 + 37 files changed, 407 insertions(+), 146 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a9e25bf27..0d1510c5a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ The format is based on [Keep a Changelog], and this project adheres to [Semantic ### Fixed +- cli: Fixed `DIPDUP_DEBUG` not being applied to the package logger. - tezos.tzkt.token_transfers: Fixed filtering transfers by token_id. ## [7.0.2] - 2023-10-10 diff --git a/pdm.lock b/pdm.lock index a2c96f1ac..4195bbb51 100644 --- a/pdm.lock +++ b/pdm.lock @@ -236,7 +236,7 @@ files = [ [[package]] name = "black" -version = "23.9.1" +version = "23.10.0" requires_python = ">=3.8" summary = "The uncompromising code formatter." dependencies = [ @@ -247,13 +247,12 @@ dependencies = [ "platformdirs>=2", ] files = [ - {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, - {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, - {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, - {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, - {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, + {file = "black-23.10.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:30b78ac9b54cf87bcb9910ee3d499d2bc893afd52495066c49d9ee6b21eee06e"}, + {file = "black-23.10.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:0e232f24a337fed7a82c1185ae46c56c4a6167fb0fe37411b43e876892c76699"}, + {file = "black-23.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31946ec6f9c54ed7ba431c38bc81d758970dd734b96b8e8c2b17a367d7908171"}, + {file = "black-23.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:c870bee76ad5f7a5ea7bd01dc646028d05568d33b0b09b7ecfc8ec0da3f3f39c"}, + {file = "black-23.10.0-py3-none-any.whl", hash = "sha256:e223b731a0e025f8ef427dd79d8cd69c167da807f5710add30cdf131f13dd62e"}, + {file = "black-23.10.0.tar.gz", hash = "sha256:31b9f87b277a68d0e99d2905edae08807c007973eaa609da5f0c62def6b7c0bd"}, ] [[package]] @@ -954,21 +953,21 @@ files = [ [[package]] name = "orjson" -version = "3.9.8" +version = "3.9.9" requires_python = ">=3.8" summary = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" files = [ - {file = "orjson-3.9.8-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8a1c92f467f5fd0f8fb79273006b563364b1e45667b3760423498348dc2e22fa"}, - {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:742d4d16d66579ffff4b2048a8de4a0b03d731847233e92c4edd418a9c582d0f"}, - {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d1aab08b373232f568ea9ae048f9f77e09f389068afee6dd44bb6140e2c3ea3"}, - {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68ed63273ec4ecdd7865e9d984d65a749c0d780882cf9dde6ab2bc6323f6471a"}, - {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d23edcb32383f3d86b2f4914f9825ce2d67625abd34be6e5ed1f59ec30127b7a"}, - {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9bcd3a48b260d3dfe68b8ce93d11f99a70bd4c908efe22d195a1b1dcfb15ac2"}, - {file = "orjson-3.9.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9ce982f3c1df83f7dc74f3b2690605470ff4790d12558e44359f01e822c5cb08"}, - {file = "orjson-3.9.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4433dd903d5b022a64e9dd1dca94f08ab04d5d928a0ecd33dd46110468960879"}, - {file = "orjson-3.9.8-cp311-none-win32.whl", hash = "sha256:a119c73520192c2882d0549151b9cdd65e0bb5396bedf8951ba5f70d6a873879"}, - {file = "orjson-3.9.8-cp311-none-win_amd64.whl", hash = "sha256:764306f6370e6c76cbbf3139dd9b05be9c4481ee0b15966bd1907827a5777216"}, - {file = "orjson-3.9.8.tar.gz", hash = "sha256:ed1adc6db9841974170a5195b827ee4e392b1e8ca385b19fcdc3248489844059"}, + {file = "orjson-3.9.9-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:31d676bc236f6e919d100fb85d0a99812cff1ebffaa58106eaaec9399693e227"}, + {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:678ffb5c0a6b1518b149cc328c610615d70d9297e351e12c01d0beed5d65360f"}, + {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71b0cc21f2c324747bc77c35161e0438e3b5e72db6d3b515310457aba743f7f"}, + {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae72621f216d1d990468291b1ec153e1b46e0ed188a86d54e0941f3dabd09ee8"}, + {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:512e5a41af008e76451f5a344941d61f48dddcf7d7ddd3073deb555de64596a6"}, + {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f89dc338a12f4357f5bf1b098d3dea6072fb0b643fd35fec556f4941b31ae27"}, + {file = "orjson-3.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:957a45fb201c61b78bcf655a16afbe8a36c2c27f18a998bd6b5d8a35e358d4ad"}, + {file = "orjson-3.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1c01cf4b8e00c7e98a0a7cf606a30a26c32adf2560be2d7d5d6766d6f474b31"}, + {file = "orjson-3.9.9-cp311-none-win32.whl", hash = "sha256:397a185e5dd7f8ebe88a063fe13e34d61d394ebb8c70a443cee7661b9c89bda7"}, + {file = "orjson-3.9.9-cp311-none-win_amd64.whl", hash = "sha256:24301f2d99d670ded4fb5e2f87643bc7428a54ba49176e38deb2887e42fe82fb"}, + {file = "orjson-3.9.9.tar.gz", hash = "sha256:02e693843c2959befdd82d1ebae8b05ed12d1cb821605d5f9fe9f98ca5c9fd2b"}, ] [[package]] @@ -1458,40 +1457,40 @@ files = [ [[package]] name = "ruff" -version = "0.0.292" +version = "0.1.0" requires_python = ">=3.7" summary = "An extremely fast Python linter, written in Rust." files = [ - {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"}, - {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"}, - {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"}, - {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"}, - {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"}, - {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"}, + {file = "ruff-0.1.0-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:87114e254dee35e069e1b922d85d4b21a5b61aec759849f393e1dbb308a00439"}, + {file = "ruff-0.1.0-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:764f36d2982cc4a703e69fb73a280b7c539fd74b50c9ee531a4e3fe88152f521"}, + {file = "ruff-0.1.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65f4b7fb539e5cf0f71e9bd74f8ddab74cabdd673c6fb7f17a4dcfd29f126255"}, + {file = "ruff-0.1.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:299fff467a0f163baa282266b310589b21400de0a42d8f68553422fa6bf7ee01"}, + {file = "ruff-0.1.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d412678bf205787263bb702c984012a4f97e460944c072fd7cfa2bd084857c4"}, + {file = "ruff-0.1.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a5391b49b1669b540924640587d8d24128e45be17d1a916b1801d6645e831581"}, + {file = "ruff-0.1.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee8cd57f454cdd77bbcf1e11ff4e0046fb6547cac1922cc6e3583ce4b9c326d1"}, + {file = "ruff-0.1.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7aeed7bc23861a2b38319b636737bf11cfa55d2109620b49cf995663d3e888"}, + {file = "ruff-0.1.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04cd4298b43b16824d9a37800e4c145ba75c29c43ce0d74cad1d66d7ae0a4c5"}, + {file = "ruff-0.1.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7186ccf54707801d91e6314a016d1c7895e21d2e4cd614500d55870ed983aa9f"}, + {file = "ruff-0.1.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d88adfd93849bc62449518228581d132e2023e30ebd2da097f73059900d8dce3"}, + {file = "ruff-0.1.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ad2ccdb3bad5a61013c76a9c1240fdfadf2c7103a2aeebd7bcbbed61f363138f"}, + {file = "ruff-0.1.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b77f6cfa72c6eb19b5cac967cc49762ae14d036db033f7d97a72912770fd8e1c"}, + {file = "ruff-0.1.0-py3-none-win32.whl", hash = "sha256:480bd704e8af1afe3fd444cc52e3c900b936e6ca0baf4fb0281124330b6ceba2"}, + {file = "ruff-0.1.0-py3-none-win_amd64.whl", hash = "sha256:a76ba81860f7ee1f2d5651983f87beb835def94425022dc5f0803108f1b8bfa2"}, + {file = "ruff-0.1.0-py3-none-win_arm64.whl", hash = "sha256:45abdbdab22509a2c6052ecf7050b3f5c7d6b7898dc07e82869401b531d46da4"}, + {file = "ruff-0.1.0.tar.gz", hash = "sha256:ad6b13824714b19c5f8225871cf532afb994470eecb74631cd3500fe817e6b3f"}, ] [[package]] name = "sentry-sdk" -version = "1.31.0" +version = "1.32.0" summary = "Python client for Sentry (https://sentry.io)" dependencies = [ "certifi", "urllib3>=1.26.11; python_version >= \"3.6\"", ] files = [ - {file = "sentry-sdk-1.31.0.tar.gz", hash = "sha256:6de2e88304873484207fed836388e422aeff000609b104c802749fd89d56ba5b"}, - {file = "sentry_sdk-1.31.0-py2.py3-none-any.whl", hash = "sha256:64a7141005fb775b9db298a30de93e3b83e0ddd1232dc6f36eb38aebc1553291"}, + {file = "sentry-sdk-1.32.0.tar.gz", hash = "sha256:935e8fbd7787a3702457393b74b13d89a5afb67185bc0af85c00cb27cbd42e7c"}, + {file = "sentry_sdk-1.32.0-py2.py3-none-any.whl", hash = "sha256:eeb0b3550536f3bbc05bb1c7e0feb3a78d74acb43b607159a606ed2ec0a33a4d"}, ] [[package]] @@ -1820,7 +1819,7 @@ files = [ [[package]] name = "web3" -version = "6.10.0" +version = "6.11.0" requires_python = ">=3.7.2" summary = "web3.py" dependencies = [ @@ -1841,8 +1840,8 @@ dependencies = [ "websockets>=10.0.0", ] files = [ - {file = "web3-6.10.0-py3-none-any.whl", hash = "sha256:070625a0da4f0fcac090fa95186e0b865a1bbc43efb78fd2ee805f7bf9cd8986"}, - {file = "web3-6.10.0.tar.gz", hash = "sha256:ea89f8a6ee74b74c3ff21954eafe00ec914365adb904c6c374f559bc46d4a61c"}, + {file = "web3-6.11.0-py3-none-any.whl", hash = "sha256:44e79da6a4765eacf137f2f388e37aa0c1e24a93bdfb462cffe9441d1be3d509"}, + {file = "web3-6.11.0.tar.gz", hash = "sha256:050dea52ae73d787272e7ecba7249f096595938c90cce1a384c20375c6b0f720"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index d53fa72dc..28adf8991 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -106,30 +106,63 @@ dev = [ _black = "black src tests scripts" _ruff = "ruff check --fix src tests scripts" _mypy = "mypy src tests scripts" -all = { composite = ["fmt", "lint", "test"] } -fmt = { composite = ["_black"] } -lint = { composite = ["_ruff", "_mypy"] } -test = "pytest --cov-report=term-missing --cov=dipdup --cov-report=xml -n auto -s -v tests" -update = { shell = """ - pdm update - pdm export --without-hashes -f requirements --prod -o requirements.txt - pdm export --without-hashes -f requirements --dev -o requirements.dev.txt -""" } -image = "docker buildx build . --load --progress plain -t dipdup:latest" -clean = "git clean -xdf --exclude=.venv" -demos = { shell = """ + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint", "test"] + +[tool.pdm.scripts.demos] +help = "Recreate demo projects from templates" +shell = """ python scripts/update_demos.py python scripts/init_demos.py pdm run fmt - # FIXME: Run isort once to fix import sorting (bug in ruff) - isort . --force-single-line -l 120 pdm run lint -""" } -docs_build = "python scripts/docs.py --source docs --destination {args:../interface}/content/docs" -docs_serve = "python scripts/docs.py --source docs --destination {args:../interface}/content/docs --watch --serve" -docs_watch = "python scripts/docs.py --source docs --destination {args:../interface}/content/docs --watch" -docs_references = "python scripts/dump_references.py" -fixme = "grep -r -e 'FIXME: ' -e 'TODO: ' -e 'type: ignore' -n src/dipdup --color" +""" + +[tool.pdm.scripts.docs_build] +help = "Build docs" +cmd = "python scripts/docs.py --source docs --destination {args:../interface}/content/docs" + +[tool.pdm.scripts.docs_serve] +help = "Build and serve docs" +cmd = "python scripts/docs.py --source docs --destination {args:../interface}/content/docs --serve" + +[tool.pdm.scripts.docs_watch] +help = "Watch docs" +cmd = "python scripts/docs.py --source docs --destination {args:../interface}/content/docs --watch" + +[tool.pdm.scripts.docs_references] +help = "Dump references" +cmd = "python scripts/dump_references.py" + +[tool.pdm.scripts.fixme] +help = "Find FIXME and TODO comments" +cmd = "grep -r -e 'FIXME: ' -e 'TODO: ' -e 'type: ignore' -n src/dipdup --color" + +[tool.pdm.scripts.fmt] +help = "Format code with black" +composite = ["_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -t dipdup:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] + +[tool.pdm.scripts.test] +help = "Run tests" +cmd = "pytest --cov-report=term-missing --cov=dipdup --cov-report=xml -n auto -s -v tests" + +[tool.pdm.scripts.update] +help = "Update dependencies and dump requirements.txt" +shell = """ + pdm update + pdm export --without-hashes -f requirements --prod -o requirements.txt + pdm export --without-hashes -f requirements --dev -o requirements.dev.txt +""" [tool.pdm.build.targets.wheel] packages = ["src/dipdup"] diff --git a/requirements.dev.txt b/requirements.dev.txt index 642cc6d9a..0d8d8c184 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -16,7 +16,7 @@ asyncpg==0.28.0 attrs==23.1.0 babel==2.12.1 bitarray==2.8.1 -black==23.9.1 +black==23.10.0 certifi==2023.7.22 chardet==5.2.0 charset-normalizer==3.2.0 @@ -59,7 +59,7 @@ mypy-extensions==1.0.0 numpy==1.25.2 openapi-schema-validator==0.4.4 openapi-spec-validator==0.5.7 -orjson==3.9.8 +orjson==3.9.9 packaging==23.1 parsimonious==0.9.0 pathable==0.4.3 @@ -94,8 +94,8 @@ rfc3339-validator==0.1.4 rlp==3.0.0 ruamel-yaml==0.17.35 ruamel-yaml-clib==0.2.7 -ruff==0.0.292 -sentry-sdk==1.31.0 +ruff==0.1.0 +sentry-sdk==1.32.0 setuptools==68.2.2 six==1.16.0 sniffio==1.3.0 @@ -121,7 +121,7 @@ typing-extensions==4.7.1 tzlocal==5.0.1 urllib3==2.0.6 watchdog==3.0.0 -web3==6.10.0 +web3==6.11.0 websocket-client==1.6.1 websockets==10.4 yarl==1.9.2 diff --git a/requirements.txt b/requirements.txt index 7e9cbb23d..86c34bdc8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,7 +14,7 @@ asyncclick==8.1.3.4 asyncpg==0.28.0 attrs==23.1.0 bitarray==2.8.1 -black==23.9.1 +black==23.10.0 certifi==2023.7.22 chardet==5.2.0 charset-normalizer==3.2.0 @@ -50,7 +50,7 @@ mypy-extensions==1.0.0 numpy==1.25.2 openapi-schema-validator==0.4.4 openapi-spec-validator==0.5.7 -orjson==3.9.8 +orjson==3.9.9 packaging==23.1 parsimonious==0.9.0 pathable==0.4.3 @@ -77,7 +77,7 @@ rfc3339-validator==0.1.4 rlp==3.0.0 ruamel-yaml==0.17.35 ruamel-yaml-clib==0.2.7 -sentry-sdk==1.31.0 +sentry-sdk==1.32.0 setuptools==68.2.2 six==1.16.0 sniffio==1.3.0 @@ -90,6 +90,6 @@ tortoise-orm==0.19.3 typing-extensions==4.7.1 tzlocal==5.0.1 urllib3==2.0.6 -web3==6.10.0 +web3==6.11.0 websockets==10.4 yarl==1.9.2 diff --git a/src/demo_auction/models/__init__.py b/src/demo_auction/models/__init__.py index 34c8f6e15..81bbc1027 100644 --- a/src/demo_auction/models/__init__.py +++ b/src/demo_auction/models/__init__.py @@ -1,5 +1,6 @@ from enum import IntEnum + from dipdup import fields from dipdup.models import Model diff --git a/src/demo_auction/pyproject.toml b/src/demo_auction/pyproject.toml index 87a4c6ba1..e903c5409 100644 --- a/src/demo_auction/pyproject.toml +++ b/src/demo_auction/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_auction" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_auction ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_auction:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_auction:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_big_maps/models/__init__.py b/src/demo_big_maps/models/__init__.py index 7daaa1739..ed7d799e8 100644 --- a/src/demo_big_maps/models/__init__.py +++ b/src/demo_big_maps/models/__init__.py @@ -1,5 +1,6 @@ from dipdup import fields + from dipdup.models import Model diff --git a/src/demo_big_maps/pyproject.toml b/src/demo_big_maps/pyproject.toml index 0753b612d..0b8be9e98 100644 --- a/src/demo_big_maps/pyproject.toml +++ b/src/demo_big_maps/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_big_maps" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_big_maps ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_big_maps:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_big_maps:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_blank/pyproject.toml b/src/demo_blank/pyproject.toml index b2691dfc8..d7b9a5f2a 100644 --- a/src/demo_blank/pyproject.toml +++ b/src/demo_blank/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_blank" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_blank ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_blank:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_blank:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_dao/models/__init__.py b/src/demo_dao/models/__init__.py index 6ccb9d8cc..6afee512f 100644 --- a/src/demo_dao/models/__init__.py +++ b/src/demo_dao/models/__init__.py @@ -1,5 +1,6 @@ from dipdup import fields + from dipdup.models import Model diff --git a/src/demo_dao/pyproject.toml b/src/demo_dao/pyproject.toml index 314dcfdda..ed8e09859 100644 --- a/src/demo_dao/pyproject.toml +++ b/src/demo_dao/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_dao" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_dao ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_dao:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_dao:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_dex/models/__init__.py b/src/demo_dex/models/__init__.py index 4738efd55..abaa84fd9 100644 --- a/src/demo_dex/models/__init__.py +++ b/src/demo_dex/models/__init__.py @@ -1,6 +1,7 @@ from enum import IntEnum from dipdup import fields + from dipdup.models import Model diff --git a/src/demo_dex/pyproject.toml b/src/demo_dex/pyproject.toml index 3086a4c0a..7a78a63a4 100644 --- a/src/demo_dex/pyproject.toml +++ b/src/demo_dex/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_dex" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_dex ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_dex:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_dex:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_domains/models/__init__.py b/src/demo_domains/models/__init__.py index 0b8afea86..02e2e637c 100644 --- a/src/demo_domains/models/__init__.py +++ b/src/demo_domains/models/__init__.py @@ -1,5 +1,6 @@ from dipdup import fields + from dipdup.models import Model diff --git a/src/demo_domains/pyproject.toml b/src/demo_domains/pyproject.toml index ca4300aa0..83d7de114 100644 --- a/src/demo_domains/pyproject.toml +++ b/src/demo_domains/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_domains" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_domains ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_domains:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_domains:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_events/pyproject.toml b/src/demo_events/pyproject.toml index 3adf9be74..a0d28f383 100644 --- a/src/demo_events/pyproject.toml +++ b/src/demo_events/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_events" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_events ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_events:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_events:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_evm_events/pyproject.toml b/src/demo_evm_events/pyproject.toml index c597edef7..4a660e146 100644 --- a/src/demo_evm_events/pyproject.toml +++ b/src/demo_evm_events/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_evm_events" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_evm_events ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_evm_events:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_evm_events:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_factories/handlers/on_transfer.py b/src/demo_factories/handlers/on_transfer.py index 6c3881545..ef792c5b5 100644 --- a/src/demo_factories/handlers/on_transfer.py +++ b/src/demo_factories/handlers/on_transfer.py @@ -1,8 +1,9 @@ +from dipdup.context import HandlerContext +from dipdup.models.tezos_tzkt import TzktTransaction + import demo_factories.models as models from demo_factories.types.token.tezos_parameters.transfer import TransferParameter from demo_factories.types.token.tezos_storage import TokenStorage -from dipdup.context import HandlerContext -from dipdup.models.tezos_tzkt import TzktTransaction async def on_transfer( diff --git a/src/demo_factories/models/__init__.py b/src/demo_factories/models/__init__.py index 16efd6f76..d6749bf61 100644 --- a/src/demo_factories/models/__init__.py +++ b/src/demo_factories/models/__init__.py @@ -1,7 +1,8 @@ from dipdup import fields -from dipdup.models import Model +from dipdup.models import Model + class Transfer(Model): id = fields.IntField(pk=True) diff --git a/src/demo_factories/pyproject.toml b/src/demo_factories/pyproject.toml index cf8d5f71b..c87acd14f 100644 --- a/src/demo_factories/pyproject.toml +++ b/src/demo_factories/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_factories" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_factories ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_factories:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_factories:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_head/pyproject.toml b/src/demo_head/pyproject.toml index ce9961ad5..f6a880e16 100644 --- a/src/demo_head/pyproject.toml +++ b/src/demo_head/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_head" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_head ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_head:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_head:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_nft_marketplace/models/__init__.py b/src/demo_nft_marketplace/models/__init__.py index 8d6a83557..dabe43e54 100644 --- a/src/demo_nft_marketplace/models/__init__.py +++ b/src/demo_nft_marketplace/models/__init__.py @@ -1,6 +1,8 @@ from enum import IntEnum + from dipdup import fields + from dipdup.models import Model diff --git a/src/demo_nft_marketplace/pyproject.toml b/src/demo_nft_marketplace/pyproject.toml index ec5917fdc..e1456fa61 100644 --- a/src/demo_nft_marketplace/pyproject.toml +++ b/src/demo_nft_marketplace/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_nft_marketplace" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_nft_marketplace ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_nft_marketplace:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_nft_marketplace:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_raw/models/__init__.py b/src/demo_raw/models/__init__.py index 23be1468d..7611bc676 100644 --- a/src/demo_raw/models/__init__.py +++ b/src/demo_raw/models/__init__.py @@ -1,6 +1,7 @@ from dipdup import fields -from dipdup.models import Model + from dipdup.models.tezos_tzkt import TzktOperationType +from dipdup.models import Model class Operation(Model): diff --git a/src/demo_raw/pyproject.toml b/src/demo_raw/pyproject.toml index 1ad94b476..0a0e4da86 100644 --- a/src/demo_raw/pyproject.toml +++ b/src/demo_raw/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_raw" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_raw ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_raw:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_raw:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_token/models/__init__.py b/src/demo_token/models/__init__.py index 5b81565c2..e84aee5e5 100644 --- a/src/demo_token/models/__init__.py +++ b/src/demo_token/models/__init__.py @@ -1,4 +1,5 @@ from dipdup import fields + from dipdup.models import Model diff --git a/src/demo_token/pyproject.toml b/src/demo_token/pyproject.toml index 17f3695cc..8e991f514 100644 --- a/src/demo_token/pyproject.toml +++ b/src/demo_token/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_token" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_token ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_token:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_token:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_token_transfers/models/__init__.py b/src/demo_token_transfers/models/__init__.py index 5b81565c2..e84aee5e5 100644 --- a/src/demo_token_transfers/models/__init__.py +++ b/src/demo_token_transfers/models/__init__.py @@ -1,4 +1,5 @@ from dipdup import fields + from dipdup.models import Model diff --git a/src/demo_token_transfers/pyproject.toml b/src/demo_token_transfers/pyproject.toml index 8eed8325b..9ee68568a 100644 --- a/src/demo_token_transfers/pyproject.toml +++ b/src/demo_token_transfers/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_token_transfers" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_token_transfers ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_token_transfers:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_token_transfers:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/demo_uniswap/pyproject.toml b/src/demo_uniswap/pyproject.toml index 842c82de7..0edbcdadb 100644 --- a/src/demo_uniswap/pyproject.toml +++ b/src/demo_uniswap/pyproject.toml @@ -1,3 +1,4 @@ +# Generated by DipDup 7.0.2+editable [project] name = "demo_uniswap" version = "0.0.1" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude demo_uniswap ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_uniswap:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t demo_uniswap:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = 120 diff --git a/src/dipdup/cli.py b/src/dipdup/cli.py index bb35be168..2fbb676a4 100644 --- a/src/dipdup/cli.py +++ b/src/dipdup/cli.py @@ -217,8 +217,6 @@ async def cli(ctx: click.Context, config: list[str], env_file: list[str]) -> Non from dipdup.sys import set_up_logging set_up_logging() - if env.DEBUG: - logging.getLogger('dipdup').setLevel(logging.DEBUG) env_file_paths = [Path(file) for file in env_file] config_paths = [Path(file) for file in config] diff --git a/src/dipdup/config/__init__.py b/src/dipdup/config/__init__.py index 8740486aa..f045c5199 100644 --- a/src/dipdup/config/__init__.py +++ b/src/dipdup/config/__init__.py @@ -737,6 +737,10 @@ def get_evm_node_datasource(self, name: str) -> EvmNodeDatasourceConfig: return datasource def set_up_logging(self) -> None: + if env.DEBUG: + logging.getLogger('dipdup').setLevel(logging.DEBUG) + logging.getLogger(self.package).setLevel(logging.DEBUG) + loglevels = self.logging if not isinstance(loglevels, dict): loglevels = { diff --git a/src/dipdup/project.py b/src/dipdup/project.py index 58d46bad7..8698b1efc 100644 --- a/src/dipdup/project.py +++ b/src/dipdup/project.py @@ -317,5 +317,8 @@ def _render(answers: Answers, template_path: Path, output_path: Path, force: boo _logger.info('Generating `%s`', output_path) template = load_template(str(template_path)) - content = template.render(project=answers) + content = template.render( + project=answers, + __version__=__version__, + ) write(output_path, content, overwrite=force) diff --git a/src/dipdup/projects/base/pyproject.toml.j2 b/src/dipdup/projects/base/pyproject.toml.j2 index 01a852880..77a96fb5b 100644 --- a/src/dipdup/projects/base/pyproject.toml.j2 +++ b/src/dipdup/projects/base/pyproject.toml.j2 @@ -1,3 +1,4 @@ +# Generated by DipDup {{ __version__ }} [project] name = "{{ project.package }}" version = "{{ project.version }}" @@ -25,10 +26,22 @@ _isort = "isort ." _black = "black ." _ruff = "ruff check --fix ." _mypy = "mypy --no-incremental --exclude {{ project.package }} ." -all = { composite = ["fmt", "lint"] } -fmt = { composite = ["_isort", "_black"] } -lint = { composite = ["_ruff", "_mypy"] } -image = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t {{ project.package }}:latest" + +[tool.pdm.scripts.all] +help = "Run all checks" +composite = ["fmt", "lint"] + +[tool.pdm.scripts.fmt] +help = "Format code with isort and black" +composite = ["_isort", "_black"] + +[tool.pdm.scripts.image] +help = "Build Docker image" +cmd = "docker buildx build . --load --progress plain -f deploy/Dockerfile -t {{ project.package }}:latest" + +[tool.pdm.scripts.lint] +help = "Check code with ruff and mypy" +composite = ["_ruff", "_mypy"] [tool.isort] line_length = {{ project.line_length }} diff --git a/src/dipdup/projects/demo_uniswap/models/repo.py.j2 b/src/dipdup/projects/demo_uniswap/models/repo.py.j2 index e2e774ce5..6e0171630 100644 --- a/src/dipdup/projects/demo_uniswap/models/repo.py.j2 +++ b/src/dipdup/projects/demo_uniswap/models/repo.py.j2 @@ -2,7 +2,7 @@ from decimal import Decimal from typing import Any from typing import cast -from lru import LRU # type: ignore[import-untyped] +from lru import LRU # type: ignore[import-not-found] import {{ project.package }}.models as models from dipdup.config.evm import EvmContractConfig @@ -32,7 +32,7 @@ class ModelsRepo: self._pending_positions[idx] = position def get_pending_position(self, idx: str) -> dict[str, Any] | None: - return self._pending_positions.get(idx, None) + return self._pending_positions.get(idx, None) # type: ignore[no-any-return] async def get_ctx_factory(ctx: HandlerContext) -> models.Factory: diff --git a/src/dipdup/sys.py b/src/dipdup/sys.py index 11e52b9f8..a60a55f59 100644 --- a/src/dipdup/sys.py +++ b/src/dipdup/sys.py @@ -38,6 +38,9 @@ def set_up_logging() -> None: # NOTE: Nothing useful there logging.getLogger('tortoise').setLevel(logging.WARNING) + if env.DEBUG: + logging.getLogger('dipdup').setLevel(logging.DEBUG) + def set_up_process(signals: bool) -> None: """Set up interpreter process-wide state"""