From 34453e354a74005f9ae1c5711fa1c6402dcb2937 Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Fri, 29 Sep 2023 18:50:02 -0300 Subject: [PATCH 01/11] Fixed node log request, decimals in uniswap demo (#854) --- CHANGELOG.md | 7 + src/demo_uniswap/models/__init__.py | 138 +++++++++--------- .../indexes/evm_subsquid_events/index.py | 2 +- .../demo_uniswap/models/__init__.py.j2 | 138 +++++++++--------- 4 files changed, 146 insertions(+), 139 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ad293695f..47349e796 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog], and this project adheres to [Semantic Versioning]. +## [Unreleased] + +### Fixed + +- demos: Fixed decimal overflow in `demo_uniswap` project. +- evm.node: Fixed incorrect log request parameters. + ## [7.0.0] - 2023-09-25 ### Fixed diff --git a/src/demo_uniswap/models/__init__.py b/src/demo_uniswap/models/__init__.py index 765edbe40..04e1288de 100644 --- a/src/demo_uniswap/models/__init__.py +++ b/src/demo_uniswap/models/__init__.py @@ -12,23 +12,23 @@ class Factory(CachedModel): # amount of transactions all time tx_count = fields.BigIntField(default=0) # total volume all time in derived USD - total_volume_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + total_volume_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # total volume all time in derived ETH - total_volume_eth = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_volume_eth = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # total swap fees all time in USD - total_fees_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + total_fees_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # total swap fees all time in USD - total_fees_eth = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_fees_eth = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all volume even through less reliable USD values - untracked_volume_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + untracked_volume_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # TVL derived in USD - total_value_locked_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + total_value_locked_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # TVL derived in ETH - total_value_locked_eth = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_eth = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # TVL derived in USD untracked - total_value_locked_usd_untracked = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + total_value_locked_usd_untracked = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # TVL derived in ETH untracked - total_value_locked_eth_untracked = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_eth_untracked = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # current owner of the factory owner = fields.TextField(default=ADDRESS_ZERO) @@ -44,25 +44,25 @@ class Token(CachedModel): # token total supply total_supply = fields.BigIntField() # volume in token units - volume = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # volume in derived USD - volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # volume in USD even on pools with less reliable USD values - untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # fees in USD - fees_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + fees_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # transactions across all pools that include this token tx_count = fields.BigIntField(default=0) # number of pools containing this token pool_count = fields.BigIntField(default=0) # liquidity across all pools in token units - total_value_locked = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # liquidity across all pools in derived USD - total_value_locked_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # TVL derived in USD untracked - total_value_locked_usd_untracked = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_usd_untracked = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived price in ETH - derived_eth = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + derived_eth = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # pools token is in that are whitelisted for USD pricing whitelist_pools = fields.ArrayField(default=[]) @@ -89,41 +89,41 @@ class Pool(CachedModel): # tracker for global fee growth # fee_growth_global_1x128 = fields.BigIntField(default=0) # token0 per token1 - token0_price = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + token0_price = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # token1 per token0 - token1_price = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + token1_price = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # current tick tick = fields.BigIntField(null=True) # current observation index observation_index = fields.BigIntField(default=0) # all time token0 swapped - volume_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time token1 swapped - volume_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time USD swapped - volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time USD swapped, unfiltered for unreliable USD pools - untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # fees in USD - fees_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + fees_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time number of transactions tx_count = fields.BigIntField(default=0) # all time fees collected token0 - collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time fees collected token1 - collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time fees collected derived USD - collected_fees_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # total token 0 across all ticks - total_value_locked_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # total token 1 across all ticks - total_value_locked_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # tvl derived ETH - total_value_locked_eth = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_eth = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # tvl USD - total_value_locked_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # TVL derived in USD untracked - total_value_locked_usd_untracked = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_usd_untracked = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # Fields used to help derived relationship liquidity_provider_count = fields.BigIntField(default=0) # used to detect new exchanges @@ -142,25 +142,25 @@ class Tick(Model): # how much liquidity changes when tick crossed liquidity_net = fields.DecimalField(decimal_places=0, max_digits=76, default=0) # calculated price of token0 of tick within this pool - constant - price0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + price0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # calculated price of token1 of tick within this pool - constant - price1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + price1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # lifetime volume of token0 with this tick in range - volume_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # lifetime volume of token1 with this tick in range - volume_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # lifetime volume in derived USD with this tick in range - volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # lifetime volume in untracked USD with this tick in range - untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # fees in USD - fees_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + fees_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token0 - collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token1 - collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in USD - collected_fees_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # created time created_at_timestamp = fields.BigIntField() # created block @@ -199,17 +199,17 @@ class Position(Model): # total position liquidity liquidity = fields.DecimalField(max_digits=76, decimal_places=0, default=0) # amount of token 0 ever deposited to position - deposited_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + deposited_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 ever deposited to position - deposited_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + deposited_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 0 ever withdrawn from position (without fees) - withdrawn_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + withdrawn_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 ever withdrawn from position (without fees) - withdrawn_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + withdrawn_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token0 - collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token1 - collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # vars needed for fee computation # fee_growth_inside_0_last_x128 = fields.BigIntField(default=0) # fee_growth_inside_1_last_x128 = fields.BigIntField(default=0) @@ -249,17 +249,17 @@ class PositionSnapshot(Model): # total position liquidity liquidity = fields.DecimalField(max_digits=76, decimal_places=0, default=0) # amount of token 0 ever deposited to position - deposited_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + deposited_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 ever deposited to position - deposited_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + deposited_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 0 ever withdrawn from position (without fees) - withdrawn_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + withdrawn_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 ever withdrawn from position (without fees) - withdrawn_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + withdrawn_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token0 - collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token1 - collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # internal vars needed for fee computation # fee_growth_inside_0_last_x128 = fields.BigIntField() # fee_growth_inside_1_last_x128 = fields.BigIntField() @@ -286,9 +286,9 @@ class Mint(Model): # amount of liquidity minted amount = fields.DecimalField(decimal_places=0, max_digits=76, default=0) # amount of token 0 minted - amount0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 minted - amount1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived amount based on available prices of tokens amount_usd = fields.DecimalField(decimal_places=18, max_digits=76, null=True) # lower tick of the position @@ -318,11 +318,11 @@ class Burn(Model): # amount of liquidity burned amount = fields.DecimalField(decimal_places=0, max_digits=76, default=0) # amount of token 0 burned - amount0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 burned - amount1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived amount based on available prices of tokens - amount_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + amount_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # lower tick of position tick_lower = fields.BigIntField() # upper tick of position @@ -350,11 +350,11 @@ class Swap(Model): # txn origin origin = fields.CharField(max_length=42) # the EOA that initiated the txn # delta of token0 swapped - amount0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # delta of token1 swapped - amount1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived info - amount_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + amount_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # The sqrt(price) of the pool after the swap, as a Q64.96 sqrt_price_x96 = fields.DecimalField(decimal_places=0, max_digits=76, default=0) # the tick after the swap @@ -374,9 +374,9 @@ class Collect(Model): # owner of position collect was performed on owner = fields.CharField(max_length=42, null=True) # amount of token0 collected - amount0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token1 collected - amount1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived amount based on available prices of tokens amount_usd = fields.DecimalField(decimal_places=18, max_digits=76, null=True) # lower tick of position @@ -400,14 +400,14 @@ class Flash(Model): # recipient of the flash recipient = fields.CharField(max_length=42) # amount of token0 flashed - amount0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token1 flashed - amount1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived amount based on available prices of tokens - amount_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + amount_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # amount token0 paid for flash - amount0_paid = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0_paid = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount token1 paid for flash - amount1_paid = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1_paid = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # index within the txn log_index = fields.BigIntField() \ No newline at end of file diff --git a/src/dipdup/indexes/evm_subsquid_events/index.py b/src/dipdup/indexes/evm_subsquid_events/index.py index 78070c8f9..b40484ed7 100644 --- a/src/dipdup/indexes/evm_subsquid_events/index.py +++ b/src/dipdup/indexes/evm_subsquid_events/index.py @@ -155,13 +155,13 @@ async def _synchronize(self, sync_level: int) -> None: for handler in self._config.handlers: typename = handler.contract.module_name topics.add(self.topics[typename][handler.name]) + # FIXME: This is terribly inefficient (but okay for the last mile); see advanced example in web3.py docs. for level in range(first_level, sync_level): # NOTE: Get random one every time level_logs = await self.random_node.get_logs( { 'fromBlock': hex(level), 'toBlock': hex(level), - 'topics': tuple(topics), } ) block = await self.random_node.get_block_by_level(level) diff --git a/src/dipdup/projects/demo_uniswap/models/__init__.py.j2 b/src/dipdup/projects/demo_uniswap/models/__init__.py.j2 index 1e3da899f..2dac7983f 100644 --- a/src/dipdup/projects/demo_uniswap/models/__init__.py.j2 +++ b/src/dipdup/projects/demo_uniswap/models/__init__.py.j2 @@ -12,23 +12,23 @@ class Factory(CachedModel): # amount of transactions all time tx_count = fields.BigIntField(default=0) # total volume all time in derived USD - total_volume_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + total_volume_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # total volume all time in derived ETH - total_volume_eth = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_volume_eth = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # total swap fees all time in USD - total_fees_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + total_fees_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # total swap fees all time in USD - total_fees_eth = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_fees_eth = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all volume even through less reliable USD values - untracked_volume_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + untracked_volume_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # TVL derived in USD - total_value_locked_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + total_value_locked_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # TVL derived in ETH - total_value_locked_eth = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_eth = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # TVL derived in USD untracked - total_value_locked_usd_untracked = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + total_value_locked_usd_untracked = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # TVL derived in ETH untracked - total_value_locked_eth_untracked = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_eth_untracked = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # current owner of the factory owner = fields.TextField(default=ADDRESS_ZERO) @@ -44,25 +44,25 @@ class Token(CachedModel): # token total supply total_supply = fields.BigIntField() # volume in token units - volume = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # volume in derived USD - volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # volume in USD even on pools with less reliable USD values - untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # fees in USD - fees_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + fees_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # transactions across all pools that include this token tx_count = fields.BigIntField(default=0) # number of pools containing this token pool_count = fields.BigIntField(default=0) # liquidity across all pools in token units - total_value_locked = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # liquidity across all pools in derived USD - total_value_locked_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # TVL derived in USD untracked - total_value_locked_usd_untracked = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_usd_untracked = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived price in ETH - derived_eth = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + derived_eth = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # pools token is in that are whitelisted for USD pricing whitelist_pools = fields.ArrayField(default=[]) @@ -89,41 +89,41 @@ class Pool(CachedModel): # tracker for global fee growth # fee_growth_global_1x128 = fields.BigIntField(default=0) # token0 per token1 - token0_price = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + token0_price = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # token1 per token0 - token1_price = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + token1_price = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # current tick tick = fields.BigIntField(null=True) # current observation index observation_index = fields.BigIntField(default=0) # all time token0 swapped - volume_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time token1 swapped - volume_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time USD swapped - volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time USD swapped, unfiltered for unreliable USD pools - untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # fees in USD - fees_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + fees_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time number of transactions tx_count = fields.BigIntField(default=0) # all time fees collected token0 - collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time fees collected token1 - collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time fees collected derived USD - collected_fees_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # total token 0 across all ticks - total_value_locked_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # total token 1 across all ticks - total_value_locked_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # tvl derived ETH - total_value_locked_eth = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_eth = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # tvl USD - total_value_locked_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # TVL derived in USD untracked - total_value_locked_usd_untracked = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + total_value_locked_usd_untracked = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # Fields used to help derived relationship liquidity_provider_count = fields.BigIntField(default=0) # used to detect new exchanges @@ -142,25 +142,25 @@ class Tick(Model): # how much liquidity changes when tick crossed liquidity_net = fields.DecimalField(decimal_places=0, max_digits=76, default=0) # calculated price of token0 of tick within this pool - constant - price0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + price0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # calculated price of token1 of tick within this pool - constant - price1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + price1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # lifetime volume of token0 with this tick in range - volume_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # lifetime volume of token1 with this tick in range - volume_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # lifetime volume in derived USD with this tick in range - volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # lifetime volume in untracked USD with this tick in range - untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + untracked_volume_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # fees in USD - fees_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + fees_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token0 - collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token1 - collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in USD - collected_fees_usd = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_usd = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # created time created_at_timestamp = fields.BigIntField() # created block @@ -199,17 +199,17 @@ class Position(Model): # total position liquidity liquidity = fields.DecimalField(max_digits=76, decimal_places=0, default=0) # amount of token 0 ever deposited to position - deposited_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + deposited_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 ever deposited to position - deposited_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + deposited_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 0 ever withdrawn from position (without fees) - withdrawn_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + withdrawn_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 ever withdrawn from position (without fees) - withdrawn_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + withdrawn_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token0 - collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token1 - collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # vars needed for fee computation # fee_growth_inside_0_last_x128 = fields.BigIntField(default=0) # fee_growth_inside_1_last_x128 = fields.BigIntField(default=0) @@ -249,17 +249,17 @@ class PositionSnapshot(Model): # total position liquidity liquidity = fields.DecimalField(max_digits=76, decimal_places=0, default=0) # amount of token 0 ever deposited to position - deposited_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + deposited_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 ever deposited to position - deposited_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + deposited_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 0 ever withdrawn from position (without fees) - withdrawn_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + withdrawn_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 ever withdrawn from position (without fees) - withdrawn_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + withdrawn_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token0 - collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # all time collected fees in token1 - collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + collected_fees_token1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # internal vars needed for fee computation # fee_growth_inside_0_last_x128 = fields.BigIntField() # fee_growth_inside_1_last_x128 = fields.BigIntField() @@ -286,9 +286,9 @@ class Mint(Model): # amount of liquidity minted amount = fields.DecimalField(decimal_places=0, max_digits=76, default=0) # amount of token 0 minted - amount0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 minted - amount1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived amount based on available prices of tokens amount_usd = fields.DecimalField(decimal_places=18, max_digits=76, null=True) # lower tick of the position @@ -318,11 +318,11 @@ class Burn(Model): # amount of liquidity burned amount = fields.DecimalField(decimal_places=0, max_digits=76, default=0) # amount of token 0 burned - amount0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token 1 burned - amount1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived amount based on available prices of tokens - amount_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + amount_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # lower tick of position tick_lower = fields.BigIntField() # upper tick of position @@ -350,11 +350,11 @@ class Swap(Model): # txn origin origin = fields.CharField(max_length=42) # the EOA that initiated the txn # delta of token0 swapped - amount0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # delta of token1 swapped - amount1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived info - amount_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + amount_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # The sqrt(price) of the pool after the swap, as a Q64.96 sqrt_price_x96 = fields.DecimalField(decimal_places=0, max_digits=76, default=0) # the tick after the swap @@ -374,9 +374,9 @@ class Collect(Model): # owner of position collect was performed on owner = fields.CharField(max_length=42, null=True) # amount of token0 collected - amount0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token1 collected - amount1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived amount based on available prices of tokens amount_usd = fields.DecimalField(decimal_places=18, max_digits=76, null=True) # lower tick of position @@ -400,14 +400,14 @@ class Flash(Model): # recipient of the flash recipient = fields.CharField(max_length=42) # amount of token0 flashed - amount0 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount of token1 flashed - amount1 = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1 = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # derived amount based on available prices of tokens - amount_usd = fields.DecimalField(decimal_places=2, max_digits=16, default=0) + amount_usd = fields.DecimalField(decimal_places=2, max_digits=32, default=0) # amount token0 paid for flash - amount0_paid = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount0_paid = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # amount token1 paid for flash - amount1_paid = fields.DecimalField(decimal_places=18, max_digits=76, default=0) + amount1_paid = fields.DecimalField(decimal_places=18, max_digits=96, default=0) # index within the txn log_index = fields.BigIntField() \ No newline at end of file From 05ffa8b3ba4c15c0d2e711db234468b41237442a Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Sat, 30 Sep 2023 09:09:10 -0300 Subject: [PATCH 02/11] Fix issue with determining the last level when syncing with node (#856) * DIPDUP_DEBUG env * Fixed issue with determining the last level when syncing with node * Remove outdated check * Log realtime messages --- CHANGELOG.md | 5 +++ src/dipdup/cli.py | 4 ++- src/dipdup/config/evm_subsquid_events.py | 2 ++ src/dipdup/datasources/evm_node.py | 2 +- src/dipdup/env.py | 35 ++++++++++++------ src/dipdup/http.py | 3 +- .../indexes/evm_subsquid_events/index.py | 15 ++++---- tests/configs/demo_evm_events_node.yml | 36 +++++++++++++++++++ tests/test_demos.py | 1 + 9 files changed, 80 insertions(+), 23 deletions(-) create mode 100644 tests/configs/demo_evm_events_node.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 47349e796..38380eaae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,10 +6,15 @@ The format is based on [Keep a Changelog], and this project adheres to [Semantic ## [Unreleased] +### Added + +- env: Added `DIPDUP_DEBUG` environment variable to enable debug logging. + ### Fixed - demos: Fixed decimal overflow in `demo_uniswap` project. - evm.node: Fixed incorrect log request parameters. +- evm.subsquid.events: Fixed issue with determining the last level when syncing with node. ## [7.0.0] - 2023-09-25 diff --git a/src/dipdup/cli.py b/src/dipdup/cli.py index 48f50ca0a..a2a32ce8a 100644 --- a/src/dipdup/cli.py +++ b/src/dipdup/cli.py @@ -18,6 +18,7 @@ import asyncclick as click from dipdup import __version__ +from dipdup import env from dipdup.install import EPILOG from dipdup.install import WELCOME_ASCII from dipdup.performance import metrics @@ -216,6 +217,8 @@ async def cli(ctx: click.Context, config: list[str], env_file: list[str]) -> Non from dipdup.sys import set_up_logging set_up_logging() + if env.DEBUG: + logging.getLogger('dipdup').setLevel(logging.DEBUG) env_file_paths = [Path(file) for file in env_file] config_paths = [Path(file) for file in config] @@ -232,7 +235,6 @@ async def cli(ctx: click.Context, config: list[str], env_file: list[str]) -> Non logging.getLogger('dipdup').setLevel(logging.INFO) return - from dipdup import env from dipdup.config import DipDupConfig from dipdup.exceptions import InitializationRequiredError from dipdup.package import DipDupPackage diff --git a/src/dipdup/config/evm_subsquid_events.py b/src/dipdup/config/evm_subsquid_events.py index 8c99cddb0..55f2ac254 100644 --- a/src/dipdup/config/evm_subsquid_events.py +++ b/src/dipdup/config/evm_subsquid_events.py @@ -58,6 +58,7 @@ class SubsquidEventsIndexConfig(IndexConfig): :param datasource: Subsquid datasource :param handlers: Event handlers :param abi: One or more `evm.abi` datasource(s) for the same network + :param node_only: Don't use Subsquid Archives API (dev only) :param first_level: Level to start indexing from :param last_level: Level to stop indexing and disable this index """ @@ -66,6 +67,7 @@ class SubsquidEventsIndexConfig(IndexConfig): datasource: SubsquidDatasourceConfig handlers: tuple[SubsquidEventsHandlerConfig, ...] = field(default_factory=tuple) abi: AbiDatasourceConfig | tuple[AbiDatasourceConfig, ...] | None = None + node_only: bool = False first_level: int = 0 last_level: int = 0 diff --git a/src/dipdup/datasources/evm_node.py b/src/dipdup/datasources/evm_node.py index 5a600f1d4..b0af3869d 100644 --- a/src/dipdup/datasources/evm_node.py +++ b/src/dipdup/datasources/evm_node.py @@ -267,7 +267,7 @@ async def _on_message(self, message: Message) -> None: if subscription_id not in self._subscription_ids: raise FrameworkException(f'{self.name}: Unknown subscription ID: {subscription_id}') subscription = self._subscription_ids[subscription_id] - self._logger.debug('Received subscription for channel %s', subscription_id) + self._logger.info('Received a message from channel %s', subscription_id) await self._handle_subscription(subscription, data['params']['result']) else: raise DatasourceError(f'Unknown method: {data["method"]}', self.name) diff --git a/src/dipdup/env.py b/src/dipdup/env.py index ffa1fffc2..90731e2ef 100644 --- a/src/dipdup/env.py +++ b/src/dipdup/env.py @@ -62,17 +62,30 @@ def get_path(key: str) -> Path | None: def set_test() -> None: global TEST, REPLAY_PATH TEST = True - REPLAY_PATH = str(Path(__file__).parent.parent.parent / 'tests' / 'replays') - env['DIPDUP_REPLAY_PATH'] = REPLAY_PATH + REPLAY_PATH = Path(__file__).parent.parent.parent / 'tests' / 'replays' -if get('CI') == 'true': - env['DIPDUP_CI'] = '1' -if platform.system() == 'Linux' and Path('/.dockerenv').exists(): - env['DIPDUP_DOCKER'] = '1' +CI: bool +DEBUG: bool +DOCKER: bool +NEXT: bool +REPLAY_PATH: Path | None +TEST: bool -CI = get_bool('DIPDUP_CI') -DOCKER = get_bool('DIPDUP_DOCKER') -NEXT = get_bool('DIPDUP_NEXT') -REPLAY_PATH = get_path('DIPDUP_REPLAY_PATH') -TEST = get_bool('DIPDUP_TEST') + +def read() -> None: + global CI, DEBUG, DOCKER, NEXT, REPLAY_PATH, TEST + CI = get_bool('DIPDUP_CI') + DEBUG = get_bool('DIPDUP_DEBUG') + DOCKER = get_bool('DIPDUP_DOCKER') + NEXT = get_bool('DIPDUP_NEXT') + REPLAY_PATH = get_path('DIPDUP_REPLAY_PATH') + TEST = get_bool('DIPDUP_TEST') + + if get('CI') == 'true': + CI = True + if platform.system() == 'Linux' and Path('/.dockerenv').exists(): + DOCKER = True + + +read() diff --git a/src/dipdup/http.py b/src/dipdup/http.py index 8f499edb8..7e7a7fa82 100644 --- a/src/dipdup/http.py +++ b/src/dipdup/http.py @@ -23,6 +23,7 @@ from aiolimiter import AsyncLimiter from dipdup import __version__ +from dipdup import env from dipdup.config import ResolvedHttpConfig from dipdup.exceptions import FrameworkException from dipdup.exceptions import InvalidRequestError @@ -143,7 +144,7 @@ async def _retry_request( """Retry a request in case of failure sleeping according to config""" attempt = 1 retry_sleep = self._config.retry_sleep - retry_count = self._config.retry_count + retry_count = 0 if env.TEST else self._config.retry_count retry_count_str = 'inf' if retry_count is sys.maxsize else str(retry_count) while True: diff --git a/src/dipdup/indexes/evm_subsquid_events/index.py b/src/dipdup/indexes/evm_subsquid_events/index.py index b40484ed7..47b59990b 100644 --- a/src/dipdup/indexes/evm_subsquid_events/index.py +++ b/src/dipdup/indexes/evm_subsquid_events/index.py @@ -99,13 +99,6 @@ async def _process_queue(self) -> None: break for message_level, level_logs in logs_by_level.items(): - # NOTE: If it's not a next block - resync with Subsquid - if message_level != self.state.level + 1: - self._logger.info('Not enough messages in queue; resyncing to %s', message_level) - self._queue.clear() - self.datasource.set_sync_level(None, message_level) - return - await self._process_level_events(tuple(level_logs), self.topics, message_level) def get_sync_level(self) -> int: @@ -147,16 +140,20 @@ async def _synchronize(self, sync_level: int) -> None: self._logger.info('Subsquid is %s levels behind; %s available', subsquid_lag, subsquid_available) if subsquid_available < NODE_SYNC_LIMIT: use_node = True + elif self._config.node_only: + self._logger.debug('Using node anyway') + use_node = True # NOTE: Fetch last blocks from node if there are not enough realtime messages in queue if use_node and self.node_datasources: - sync_level = node_sync_level + sync_level = min(sync_level, node_sync_level) + self._logger.debug('Using node datasource; sync level: %s', sync_level) topics = set() for handler in self._config.handlers: typename = handler.contract.module_name topics.add(self.topics[typename][handler.name]) # FIXME: This is terribly inefficient (but okay for the last mile); see advanced example in web3.py docs. - for level in range(first_level, sync_level): + for level in range(first_level, sync_level + 1): # NOTE: Get random one every time level_logs = await self.random_node.get_logs( { diff --git a/tests/configs/demo_evm_events_node.yml b/tests/configs/demo_evm_events_node.yml new file mode 100644 index 000000000..51b83c648 --- /dev/null +++ b/tests/configs/demo_evm_events_node.yml @@ -0,0 +1,36 @@ +spec_version: 2.0 +package: demo_evm_events + +datasources: + ethscan: + kind: abi.etherscan + + mainnet_node: + kind: evm.node + url: https://eth-mainnet.g.alchemy.com/v2/${ALCHEMY_KEY:-''} + ws_url: wss://eth-mainnet.g.alchemy.com/v2/${ALCHEMY_KEY:-''} + + mainnet_subsquid: + kind: evm.subsquid + url: ${ARCHIVE_URL:-https://v2.archive.subsquid.io/network/ethereum-mainnet} + node: mainnet_node + http: + replay_path: ${DIPDUP_REPLAY_PATH:-} + +contracts: + eth_usdt: + kind: evm + address: 0xdac17f958d2ee523a2206206994597c13d831ec7 + typename: eth_usdt + +indexes: + eth_usdt_events: + kind: evm.subsquid.events + datasource: mainnet_subsquid + handlers: + - callback: on_transfer + contract: eth_usdt + name: Transfer + first_level: 18077421 + last_level: 18077421 + node_only: true diff --git a/tests/test_demos.py b/tests/test_demos.py index 765482c8c..57ea16d67 100644 --- a/tests/test_demos.py +++ b/tests/test_demos.py @@ -241,6 +241,7 @@ async def assert_run_dao() -> None: ('demo_raw.yml', 'demo_raw', 'init', partial(assert_init, 'demo_raw')), ('demo_evm_events.yml', 'demo_evm_events', 'run', assert_run_evm_events), ('demo_evm_events.yml', 'demo_evm_events', 'init', partial(assert_init, 'demo_evm_events')), + ('demo_evm_events_node.yml', 'demo_evm_events', 'run', assert_run_evm_events), ) From bfe1389fc97b9255c1eba99f39228aafb4dae5f1 Mon Sep 17 00:00:00 2001 From: Wizard1209 <34334729+Wizard1209@users.noreply.github.com> Date: Sat, 30 Sep 2023 14:25:35 -0300 Subject: [PATCH 03/11] Warn about unusual paths in timescaledb-ha image (#858) Co-authored-by: Vladimir Bobrikov Co-authored-by: Lev Gorodetskiy --- CHANGELOG.md | 2 ++ docs/6.deployment/1.database.md | 27 +++++++++++++++++++++++++++ src/dipdup/project.py | 3 ++- 3 files changed, 31 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 38380eaae..63ab5faab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,8 @@ The format is based on [Keep a Changelog], and this project adheres to [Semantic ### Fixed + +- cli: Added warning for timescaledb-ha users - demos: Fixed decimal overflow in `demo_uniswap` project. - evm.node: Fixed incorrect log request parameters. - evm.subsquid.events: Fixed issue with determining the last level when syncing with node. diff --git a/docs/6.deployment/1.database.md b/docs/6.deployment/1.database.md index ef222e131..93edd17cb 100644 --- a/docs/6.deployment/1.database.md +++ b/docs/6.deployment/1.database.md @@ -24,3 +24,30 @@ Immune tables support for SQLite is experimental and requires `advanced.unsafe_s Latest PostgreSQL and TimescaleDB versions are recommended due to significant performance improvements in recent releases (see [Feature matrix](https://www.postgresql.org/about/featurematrix/) page). Usually it's okay to use different database engines for development and production, but be careful with SQL scripts and column types that can behave differently. + +## TimescaleDB + +TimescaleDB is a PostgreSQL extension that provides time-series data management capabilities. You can use it with DipDup as a drop-in replacement for PostgreSQL. + +You can choose `timescale/timescaledb` or `timescale/timescaledb-ha` Docker images when initializing DipDup project. + +::banner{type="warning"} +Be careful! Unlike other PostgreSQL images `timescale/timescaledb-ha` uses `/home/postgres/pgdata/data` as a persistent volume. +:: + +Use SQL scripts in `on_reindex` directory to prepare the database. First, create a hypertable replacing primary key with composite one: + +```sql [sql/on_reindex/00_prepare_db.sql] +{{ #include ../src/demo_uniswap/sql/on_reindex/00_prepare_db.sql }} +``` + +Now you can create a continuous aggregate on top of the hypertable: + +```sql [sql/on_reindex/01_create_mv_token_price.sql] +{{ #include ../src/demo_uniswap/sql/on_reindex/01_create_mv_token_price.sql }} +``` + +For more information visit the official TimescaleDB documentation: + +- [Hypertables](https://docs.timescale.com/use-timescale/latest/hypertables/) +- [Continuous aggregates](https://docs.timescale.com/use-timescale/continuous-aggregates/) diff --git a/src/dipdup/project.py b/src/dipdup/project.py index 3f4729951..cb1ce7551 100644 --- a/src/dipdup/project.py +++ b/src/dipdup/project.py @@ -214,8 +214,9 @@ def answers_from_terminal() -> Answers: ), default=0, ) - if 'timescale-ha' in answers['postgres_image']: + if 'timescaledb-ha' in answers['postgres_image']: answers['postgres_data_path'] = '/home/postgres/pgdata/data' + echo('`timescaledb-ha` Docker image uses `/home/postgres/pgdata/data` as a data path; generated files were updated accordingly.', fg='yellow') big_yellow_echo('Miscellaneous tunables; leave default values if unsure') From ab00c3bffe87c120b01aae4f6b40fdba94649bfb Mon Sep 17 00:00:00 2001 From: Wizard1209 <34334729+Wizard1209@users.noreply.github.com> Date: Sat, 30 Sep 2023 15:00:49 -0300 Subject: [PATCH 04/11] Increase initial Hasura connection retry count (#857) Co-authored-by: Vladimir Bobrikov Co-authored-by: Lev Gorodetskiy --- CHANGELOG.md | 4 ++-- src/dipdup/hasura.py | 9 +++++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 63ab5faab..8ef7e91fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,11 +12,11 @@ The format is based on [Keep a Changelog], and this project adheres to [Semantic ### Fixed - -- cli: Added warning for timescaledb-ha users +- cli: Use correct data path with timescaledb-ha Docker image. - demos: Fixed decimal overflow in `demo_uniswap` project. - evm.node: Fixed incorrect log request parameters. - evm.subsquid.events: Fixed issue with determining the last level when syncing with node. +- hasura: Increated retry count for initial connection (healthcheck). ## [7.0.0] - 2023-09-25 diff --git a/src/dipdup/hasura.py b/src/dipdup/hasura.py index 617f9673f..22021c1d5 100644 --- a/src/dipdup/hasura.py +++ b/src/dipdup/hasura.py @@ -210,9 +210,11 @@ def _get_source(self, metadata: dict[str, Any], name: str) -> dict[str, Any] | N else: return None - async def _hasura_request(self, endpoint: str, json: dict[str, Any] | None = None) -> dict[str, Any]: + async def _hasura_request(self, endpoint: str, json: dict[str, Any] | None = None, retry_count: int | None = None) -> dict[str, Any]: self._logger.debug('Sending `%s` request: %s', endpoint, orjson.dumps(json)) try: + if retry_count is not None: + self._http_config.retry_count, retry_count = retry_count, self._http_config.retry_count result = await self.request( method='get' if json is None else 'post', url=f'v1/{endpoint}', @@ -221,6 +223,9 @@ async def _hasura_request(self, endpoint: str, json: dict[str, Any] | None = Non ) except ClientResponseError as e: raise HasuraError(f'{e.status} {e.message}') from e + finally: + if retry_count is not None: + self._http_config.retry_count, retry_count = retry_count, self._http_config.retry_count self._logger.debug('Response: %s', result) if errors := result.get('error') or result.get('errors'): @@ -230,7 +235,7 @@ async def _hasura_request(self, endpoint: str, json: dict[str, Any] | None = Non async def _healthcheck(self) -> None: self._logger.info('Connecting to Hasura instance') - version_json = await self._hasura_request('version') + version_json = await self._hasura_request('version', retry_count=20) version = version_json['version'] if version.startswith('v1'): raise UnsupportedAPIError( From e3b6e64140fef2c5d84fe69e9d05581f1878298b Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Sat, 30 Sep 2023 15:17:55 -0300 Subject: [PATCH 05/11] Bump version 7.0.1 (#859) --- CHANGELOG.md | 5 ++-- docs/9.release-notes/1.v7.0.md | 6 ++--- pdm.lock | 48 +++++++++++++++++----------------- pyproject.toml | 2 +- requirements.dev.txt | 4 +-- requirements.txt | 4 +-- 6 files changed, 35 insertions(+), 34 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8ef7e91fd..b295118a0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog], and this project adheres to [Semantic Versioning]. -## [Unreleased] +## [7.0.1] - 2023-09-30 ### Added @@ -1193,7 +1193,8 @@ This release contains no changes except for the version number. [semantic versioning]: https://semver.org/spec/v2.0.0.html -[Unreleased]: https://github.com/dipdup-io/dipdup/compare/7.0.0...HEAD +[Unreleased]: https://github.com/dipdup-io/dipdup/compare/7.0.1...HEAD +[7.0.1]: https://github.com/dipdup-io/dipdup/compare/7.0.0...7.0.1 [7.0.0]: https://github.com/dipdup-io/dipdup/compare/7.0.0rc5...7.0.0 [7.0.0rc5]: https://github.com/dipdup-io/dipdup/compare/7.0.0rc4...7.0.0rc5 [6.5.11]: https://github.com/dipdup-io/dipdup/compare/6.5.10...6.5.11 diff --git a/docs/9.release-notes/1.v7.0.md b/docs/9.release-notes/1.v7.0.md index a2221882d..51331a354 100644 --- a/docs/9.release-notes/1.v7.0.md +++ b/docs/9.release-notes/1.v7.0.md @@ -1,9 +1,9 @@ --- -title: 7.0.0 -description: DipDup 7.0.0 release notes +title: 7.0 +description: DipDup 7.0 release notes --- -# Release Notes: 7.0.0 +# Release Notes: 7.0 Welcome, developers! Today we introduce 7.0, the most significant major release for DipDup in terms of both changes and developer hours. The new framework architecture allows to easily integrate new blockchains and data sources. EVM support is the first step in this direction; more to come soon. Also we have focused on improving developer experience, so you can initialize, extend and maintain DipDup projects with minimal effort. Finally, updated docs and new demo projects won't let you get lost. diff --git a/pdm.lock b/pdm.lock index 3f05f9d48..b25994e8c 100644 --- a/pdm.lock +++ b/pdm.lock @@ -1130,44 +1130,44 @@ files = [ [[package]] name = "pydantic" -version = "1.10.12" +version = "1.10.13" requires_python = ">=3.7" summary = "Data validation and settings management using python type hints" dependencies = [ "typing-extensions>=4.2.0", ] files = [ - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, ] [[package]] name = "pydantic" -version = "1.10.12" +version = "1.10.13" extras = ["email"] requires_python = ">=3.7" summary = "Data validation and settings management using python type hints" dependencies = [ "email-validator>=1.0.3", - "pydantic==1.10.12", + "pydantic==1.10.13", ] files = [ - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, ] [[package]] @@ -1430,15 +1430,15 @@ files = [ [[package]] name = "ruamel-yaml" -version = "0.17.32" +version = "0.17.33" requires_python = ">=3" summary = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" dependencies = [ "ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.12\"", ] files = [ - {file = "ruamel.yaml-0.17.32-py3-none-any.whl", hash = "sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447"}, - {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"}, + {file = "ruamel.yaml-0.17.33-py3-none-any.whl", hash = "sha256:2080c7a02b8a30fb3c06727cdf3e254a64055eedf3aa2d17c2b669639c04971b"}, + {file = "ruamel.yaml-0.17.33.tar.gz", hash = "sha256:5c56aa0bff2afceaa93bffbfc78b450b7dc1e01d5edb80b3a570695286ae62b1"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index 1bfacacf1..0a57083fa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "dipdup" description = "Modular framework for creating selective indexers and featureful backends for dapps" -version = "7.0.0" +version = "7.0.1" license = { text = "MIT" } authors = [ { name = "Lev Gorodetskii", email = "dipdup@drsr.io" }, diff --git a/requirements.dev.txt b/requirements.dev.txt index 18039cb2e..9031e8d03 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -72,7 +72,7 @@ prometheus-client==0.17.1 protobuf==4.24.1 pyarrow==12.0.1 pycryptodome==3.19.0 -pydantic==1.10.12 +pydantic==1.10.13 Pygments==2.16.1 pyhumps==3.8.0 pypika-tortoise==0.1.6 @@ -92,7 +92,7 @@ regex==2023.8.8 requests==2.31.0 rfc3339-validator==0.1.4 rlp==3.0.0 -ruamel-yaml==0.17.32 +ruamel-yaml==0.17.33 ruamel-yaml-clib==0.2.7 ruff==0.0.291 sentry-sdk==1.31.0 diff --git a/requirements.txt b/requirements.txt index 998116691..4be97fc56 100644 --- a/requirements.txt +++ b/requirements.txt @@ -61,7 +61,7 @@ prometheus-client==0.17.1 protobuf==4.24.1 pyarrow==12.0.1 pycryptodome==3.19.0 -pydantic==1.10.12 +pydantic==1.10.13 pyhumps==3.8.0 pypika-tortoise==0.1.6 pyrsistent==0.19.3 @@ -75,7 +75,7 @@ regex==2023.8.8 requests==2.31.0 rfc3339-validator==0.1.4 rlp==3.0.0 -ruamel-yaml==0.17.32 +ruamel-yaml==0.17.33 ruamel-yaml-clib==0.2.7 sentry-sdk==1.31.0 setuptools==68.2.2 From 4af837647ebf91477ed377e676b9ff647be8051a Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Sat, 30 Sep 2023 18:07:55 -0300 Subject: [PATCH 06/11] Update 7.0 release notes (#860) --- docs/9.release-notes/1.v7.0.md | 33 ++++++++++++++++++--------------- docs/9.release-notes/2.v6.2.md | 2 +- 2 files changed, 19 insertions(+), 16 deletions(-) diff --git a/docs/9.release-notes/1.v7.0.md b/docs/9.release-notes/1.v7.0.md index 51331a354..740794a1a 100644 --- a/docs/9.release-notes/1.v7.0.md +++ b/docs/9.release-notes/1.v7.0.md @@ -1,5 +1,5 @@ --- -title: 7.0 +title: 7.0.0 description: DipDup 7.0 release notes --- @@ -9,11 +9,11 @@ Welcome, developers! Today we introduce 7.0, the most significant major release Key highlights: -- Support for EVM-compatible blockchains -- Updated project package structure with configs and deployment recipes -- Storage layer improvements; a path to DipDup ORM +- Support for EVM-compatible blockchains. +- Updated project package structure with configs and deployment recipes. +- Storage layer improvements; first steps path to DipDup ORM. - A Python 3.11 environment, updated Docker images, and better performance. -- New convenient CLI commands +- New convenient CLI commands. Join our socials to discuss this release and ask any questions! @@ -23,17 +23,17 @@ Join our socials to discuss this release and ask any questions! Now DipDup supports EVM-compatible blockchains in addition to Tezos. The new index allows you to process contract events from Ethereum and other EVM-compatible networks. DipDup uses historical data from [Subsquid Archives](https://www.subsquid.io/), real-time data from RPC nodes, and ABIs from [Etherscan](https://etherscan.io/). All you need is to define an index in your config and implement handlers for each event. -We have two demo projects for EVM: a very basic USDt price indexer and a more complex one for Uniswap V3 protocol. Run the `dipdup new` command, choose "EVM" on the first page, then a template to use. +We have two demo projects for EVM: a very basic USDt price indexer and a more complex one for Uniswap v3 protocol. Run the `dipdup new` command, choose "EVM" on the first page, then a template to use. ## Project package -The project package structure was updated become more consistent and easier to extend. Every package now have a fixed structure with directories for code, configs, SQL, etc. It allows discovery of package contents and increases the overall readability. +The project package structure was updated to become more consistent and easier to extend. Every package now have a fixed structure with directories for code, configs, SQL, etc. It allows discovery of package contents and increases the overall readability. The DipDup package now includes three new sections: - `models` section replaces `models.py` module as a source of DipDup ORM models. You can use any structure inside; models will be discovered automatically. - `configs` directory contains files to extend the root config with environment-specific settings like database connection or logging. Keep these settings separated from the root config to make it more readable and declarative. -- `deploy` directory contains Dockerfiles, Compose stack definitions and other deployment recipes. Also, there are `.env.default` files for each config in the `configs` directory. Use them as a template to never miss or commit an environment variable again. +- `deploy` directory contains Dockerfiles, Compose stack definitions and other deployment recipes. Also, there are `.env.default` files for each config in the `configs` directory. Use them as a template never to miss or commit an environment variable again. The new `dipdup package tree` command allows inspecting the package structure and ensuring that everything is in place. @@ -68,7 +68,7 @@ curl -Lsf https://dipdup.io/install.py | python3 dipdup new ``` -Package and config discovery was improved, so managing a project is a bit easier now. You can omit default the `dipdup.y[a]ml` filename. Also, a package can be a workdir now saving you from typing lengthy paths. +Package and config discovery was improved, so managing a project is a bit easier now. You can omit default the `dipdup.y[a]ml` filename. Also, a package root can be a workdir now (optional; creates a magic symlink). ```shell [Terminal] dipdup -c . -c configs/dipdup.sqlite.yaml config export @@ -82,7 +82,9 @@ dipdup self install --force --ref next dipdup self uninstall ``` -Starting 7.0 we use [PDM](https://pdm.fming.dev/) as a default package manager. It's a swiss-knife to deal with Python's packaging pain with great PEP compatibility and a lot of features. Also, it can run scripts from pyproject.toml as npm does. We have updated the default template to use PDM instead of Poetry and GNU Make and included some scripts. +Starting 7.0 we use [PDM](https://pdm.fming.dev/) as a default package manager. It's a swiss-knife to deal with Python's packaging pain with great PEP compatibility and a lot of features. Also, it can run scripts from pyproject.toml as npm does. DipDup projects now use a single, well PEP-compliant pyproject.toml for everything code-related. Explore it to find useful scripts for development and deployment. + +Poetry and GNU Make were removed from the default template, but you can still use them if you like. See the [Installation](../1.getting-started/1.installation.md) page in the docs. @@ -92,7 +94,7 @@ DipDup projects now run on Python 3.11. Performance improvements introduced in t We have improved pre-fetching and caching data during indexing to increase the indexing speed. -Docker images are now based on Debian 12. They are simple, secure and easy to extend - just run pip as a default user. Alpine images are no longer published, but migration should be seamless. +Docker images are now based on Debian 12. They are simple, secure and easy to extend - just run pip as a default user. Alpine images are no longer published due to the lack of support in one of libraries we depend on, but migration should be seamless. See the [Docker](../6.deployment/2.docker.md) page in the docs. @@ -101,15 +103,16 @@ See the [Docker](../6.deployment/2.docker.md) page in the docs. Here are some other notable changes not covered above: - `dipdup_meta` internal table was added to the schema. You can store arbitrary JSON there and use it in your code, but don't touch records with `dipdup_` prefix. Survives reindexing. See the [Internal tables](../1.getting-started/6.internal-tables.md) page. -- Multiple feature flags were added for experimental and rarely used features. `metadata_interface` and `crash_reporting` flags (both disabled by default) were removed from the config. See the [Feature flags](../5.advanced/2.feature-flags.md) page. -- Saved crash- and performance reports in the home directory can be viewed with new `report ls` and `report show` commands. If you want to report an issue and share the report, just drop us a message on GitHub or Discord. +- Multiple feature flags were added for experimental and rarely used features. See the [Feature flags](../5.advanced/2.feature-flags.md) page. `metadata_interface` flag was removed, now it's always enabled. +- We no longer accept crash reports. Enabling them required adding `crash_reporting: True` to the config. Set up your own Sentry instance; it's easy! +- Saved crash- and performance reports in the home directory can be viewed with new `report ls` and `report show` commands. If you want to open an issue and share this report, just drop us a message on GitHub or Discord. - You can use long syntax to configure logging, a mapping of logger name and loglevel. See the [Logging](../6.deployment/5.logging.md) page. -- YAML files in templates and examples use `.yaml` extension instead of `.yml` as recommended by the YAML spec, but you can use any. +- YAML files in templates and examples use `.yaml` extension instead of `.yml` as recommended by the YAML and Compose specs, but you can use any. - `report` command has been renamed to `report ls` for consistency with other command groups. ## Future of DipDup 6.5 -The previous version of the framework is powering dozens of APIs in production. We want to give those projects enough time for migration or to just wait a bit while the current branch is being ironed out. Given that, _DipDup 6.5 will be supported until March 2024_. We will continue to release bugfixes and security updates until that date. You can find the old documentation [here](https://docs.dipdup.io/), but some pages may be outdated. If you've found such page or other issue, please drop us a message. +The previous version of the framework is powering dozens of APIs in production. We want to give those projects enough time for migration or to wait a bit while the current branch is being ironed out. Given that, **DipDup 6.5 will be supported until March 2024**. We will continue to release bugfixes and security updates until that date. You can find the old documentation [here](https://docs.dipdup.io/), but some pages may be outdated. If you've found such a page or other issue, please drop us a message. Of course, we encourage you migrating to 7.0 soon to explore all the cool stuff in this release. diff --git a/docs/9.release-notes/2.v6.2.md b/docs/9.release-notes/2.v6.2.md index 995d015e6..a373c69c0 100644 --- a/docs/9.release-notes/2.v6.2.md +++ b/docs/9.release-notes/2.v6.2.md @@ -1,6 +1,6 @@ --- title: 6.2.0 -description: DipDup v6.2 release notes +description: DipDup 6.2 release notes --- ## What's New From b3e40f450712cacb004508aa06f61f1850ecd523 Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Fri, 6 Oct 2023 22:28:51 -0300 Subject: [PATCH 07/11] Bump urllib3 and other deps (#861) --- pdm.lock | 58 +++++++++++++++++++++---------------------- pyproject.toml | 2 +- requirements.dev.txt | 8 +++--- requirements.txt | 6 ++--- src/dipdup/hasura.py | 7 +++++- src/dipdup/project.py | 5 +++- 6 files changed, 47 insertions(+), 39 deletions(-) diff --git a/pdm.lock b/pdm.lock index b25994e8c..187473c1a 100644 --- a/pdm.lock +++ b/pdm.lock @@ -6,7 +6,7 @@ groups = ["default", "dev"] cross_platform = true static_urls = false lock_version = "4.3" -content_hash = "sha256:68a71293f269a694f3b42a75257e265097e719ec917eb58cb61a5aeaaae65297" +content_hash = "sha256:efee5be5a71d12cb011518dfb65eeafb79905f8b94869c0867032ec6a4fa45c5" [[package]] name = "aiohttp" @@ -406,7 +406,7 @@ files = [ [[package]] name = "datamodel-code-generator" -version = "0.21.5" +version = "0.22.0" requires_python = ">=3.7,<4.0" summary = "Datamodel Code Generator" dependencies = [ @@ -424,8 +424,8 @@ dependencies = [ "toml<1.0.0,>=0.10.0", ] files = [ - {file = "datamodel_code_generator-0.21.5-py3-none-any.whl", hash = "sha256:b7fe5e83404600a24fec2d653d9db361976c768790729f5c80edbfacdd6d74e5"}, - {file = "datamodel_code_generator-0.21.5.tar.gz", hash = "sha256:be971a96de2b4e3639fab6b979f1fd538872a5a86bc7b78d710eb3591e4bc3ac"}, + {file = "datamodel_code_generator-0.22.0-py3-none-any.whl", hash = "sha256:5cf8fc4fb6fe7aa750595a558cd4fcd43e36e862f40b0fa4cc123b4548b16a1e"}, + {file = "datamodel_code_generator-0.22.0.tar.gz", hash = "sha256:73ebcefa498e39d0f210923856cb4a498bacc3b7bdea140cca7324e25f5c581b"}, ] [[package]] @@ -1430,15 +1430,15 @@ files = [ [[package]] name = "ruamel-yaml" -version = "0.17.33" +version = "0.17.35" requires_python = ">=3" summary = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" dependencies = [ - "ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.12\"", + "ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.13\"", ] files = [ - {file = "ruamel.yaml-0.17.33-py3-none-any.whl", hash = "sha256:2080c7a02b8a30fb3c06727cdf3e254a64055eedf3aa2d17c2b669639c04971b"}, - {file = "ruamel.yaml-0.17.33.tar.gz", hash = "sha256:5c56aa0bff2afceaa93bffbfc78b450b7dc1e01d5edb80b3a570695286ae62b1"}, + {file = "ruamel.yaml-0.17.35-py3-none-any.whl", hash = "sha256:b105e3e6fc15b41fdb201ba1b95162ae566a4ef792b9f884c46b4ccc5513a87a"}, + {file = "ruamel.yaml-0.17.35.tar.gz", hash = "sha256:801046a9caacb1b43acc118969b49b96b65e8847f29029563b29ac61d02db61b"}, ] [[package]] @@ -1458,27 +1458,27 @@ files = [ [[package]] name = "ruff" -version = "0.0.291" +version = "0.0.292" requires_python = ">=3.7" summary = "An extremely fast Python linter, written in Rust." files = [ - {file = "ruff-0.0.291-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b97d0d7c136a85badbc7fd8397fdbb336e9409b01c07027622f28dcd7db366f2"}, - {file = "ruff-0.0.291-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6ab44ea607967171e18aa5c80335237be12f3a1523375fa0cede83c5cf77feb4"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a04b384f2d36f00d5fb55313d52a7d66236531195ef08157a09c4728090f2ef0"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b727c219b43f903875b7503a76c86237a00d1a39579bb3e21ce027eec9534051"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87671e33175ae949702774071b35ed4937da06f11851af75cd087e1b5a488ac4"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b75f5801547f79b7541d72a211949754c21dc0705c70eddf7f21c88a64de8b97"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b09b94efdcd162fe32b472b2dd5bf1c969fcc15b8ff52f478b048f41d4590e09"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d5b56bc3a2f83a7a1d7f4447c54d8d3db52021f726fdd55d549ca87bca5d747"}, - {file = "ruff-0.0.291-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13f0d88e5f367b2dc8c7d90a8afdcfff9dd7d174e324fd3ed8e0b5cb5dc9b7f6"}, - {file = "ruff-0.0.291-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b3eeee1b1a45a247758ecdc3ab26c307336d157aafc61edb98b825cadb153df3"}, - {file = "ruff-0.0.291-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6c06006350c3bb689765d71f810128c9cdf4a1121fd01afc655c87bab4fb4f83"}, - {file = "ruff-0.0.291-py3-none-musllinux_1_2_i686.whl", hash = "sha256:fd17220611047de247b635596e3174f3d7f2becf63bd56301fc758778df9b629"}, - {file = "ruff-0.0.291-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5383ba67ad360caf6060d09012f1fb2ab8bd605ab766d10ca4427a28ab106e0b"}, - {file = "ruff-0.0.291-py3-none-win32.whl", hash = "sha256:1d5f0616ae4cdc7a938b493b6a1a71c8a47d0300c0d65f6e41c281c2f7490ad3"}, - {file = "ruff-0.0.291-py3-none-win_amd64.whl", hash = "sha256:8a69bfbde72db8ca1c43ee3570f59daad155196c3fbe357047cd9b77de65f15b"}, - {file = "ruff-0.0.291-py3-none-win_arm64.whl", hash = "sha256:d867384a4615b7f30b223a849b52104214442b5ba79b473d7edd18da3cde22d6"}, - {file = "ruff-0.0.291.tar.gz", hash = "sha256:c61109661dde9db73469d14a82b42a88c7164f731e6a3b0042e71394c1c7ceed"}, + {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"}, + {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"}, + {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"}, + {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"}, + {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"}, + {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"}, + {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"}, + {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"}, ] [[package]] @@ -1785,12 +1785,12 @@ files = [ [[package]] name = "urllib3" -version = "2.0.4" +version = "2.0.6" requires_python = ">=3.7" summary = "HTTP library with thread-safe connection pooling, file post, and more." files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, + {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, + {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index 0a57083fa..e1282a174 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ classifiers = [ dependencies = [ # NOTE: Pin core dependencies to minor versions; tortoise-orm to exact one "asyncpg~=0.28.0", - "datamodel-code-generator~=0.21.1", + "datamodel-code-generator~=0.22.0", "pydantic~=1.10.11", "tortoise-orm==0.19.3", diff --git a/requirements.dev.txt b/requirements.dev.txt index 9031e8d03..ad00f7d23 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -23,7 +23,7 @@ charset-normalizer==3.2.0 click==8.1.7 coverage==7.3.0 cytoolz==0.12.2 -datamodel-code-generator==0.21.5 +datamodel-code-generator==0.22.0 dnspython==2.4.2 docker==6.1.3 docutils==0.20.1 @@ -92,9 +92,9 @@ regex==2023.8.8 requests==2.31.0 rfc3339-validator==0.1.4 rlp==3.0.0 -ruamel-yaml==0.17.33 +ruamel-yaml==0.17.35 ruamel-yaml-clib==0.2.7 -ruff==0.0.291 +ruff==0.0.292 sentry-sdk==1.31.0 setuptools==68.2.2 six==1.16.0 @@ -119,7 +119,7 @@ types-pytz==2023.3.1.1 types-tabulate==0.9.0.3 typing-extensions==4.7.1 tzlocal==5.0.1 -urllib3==2.0.4 +urllib3==2.0.6 watchdog==3.0.0 web3==6.10.0 websocket-client==1.6.1 diff --git a/requirements.txt b/requirements.txt index 4be97fc56..140e0127d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,7 +20,7 @@ chardet==5.2.0 charset-normalizer==3.2.0 click==8.1.7 cytoolz==0.12.2 -datamodel-code-generator==0.21.5 +datamodel-code-generator==0.22.0 dnspython==2.4.2 email-validator==2.0.0.post2 eth-abi==4.2.1 @@ -75,7 +75,7 @@ regex==2023.8.8 requests==2.31.0 rfc3339-validator==0.1.4 rlp==3.0.0 -ruamel-yaml==0.17.33 +ruamel-yaml==0.17.35 ruamel-yaml-clib==0.2.7 sentry-sdk==1.31.0 setuptools==68.2.2 @@ -89,7 +89,7 @@ toolz==0.12.0 tortoise-orm==0.19.3 typing-extensions==4.7.1 tzlocal==5.0.1 -urllib3==2.0.4 +urllib3==2.0.6 web3==6.10.0 websockets==10.4 yarl==1.9.2 diff --git a/src/dipdup/hasura.py b/src/dipdup/hasura.py index 22021c1d5..2fe04b260 100644 --- a/src/dipdup/hasura.py +++ b/src/dipdup/hasura.py @@ -210,7 +210,12 @@ def _get_source(self, metadata: dict[str, Any], name: str) -> dict[str, Any] | N else: return None - async def _hasura_request(self, endpoint: str, json: dict[str, Any] | None = None, retry_count: int | None = None) -> dict[str, Any]: + async def _hasura_request( + self, + endpoint: str, + json: dict[str, Any] | None = None, + retry_count: int | None = None, + ) -> dict[str, Any]: self._logger.debug('Sending `%s` request: %s', endpoint, orjson.dumps(json)) try: if retry_count is not None: diff --git a/src/dipdup/project.py b/src/dipdup/project.py index cb1ce7551..9bfc2bd8b 100644 --- a/src/dipdup/project.py +++ b/src/dipdup/project.py @@ -216,7 +216,10 @@ def answers_from_terminal() -> Answers: ) if 'timescaledb-ha' in answers['postgres_image']: answers['postgres_data_path'] = '/home/postgres/pgdata/data' - echo('`timescaledb-ha` Docker image uses `/home/postgres/pgdata/data` as a data path; generated files were updated accordingly.', fg='yellow') + echo( + '`timescaledb-ha` Docker image uses `/home/postgres/pgdata/data` as a data path; generated files were updated accordingly.', + fg='yellow', + ) big_yellow_echo('Miscellaneous tunables; leave default values if unsure') From aee2240f281064a03ea3af26dd7b58c99888861d Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Mon, 9 Oct 2023 18:33:08 -0300 Subject: [PATCH 08/11] Update functions to manage database schema (#852) --- CHANGELOG.md | 10 ++ docs/5.advanced/1.reindexing.md | 14 +-- .../{3.internal-tables.md => 3.sql.md} | 39 +++++- docs/5.advanced/6.sql.md | 22 ---- src/dipdup/cli.py | 4 +- src/dipdup/database.py | 64 ++++++---- src/dipdup/sql/dipdup_approve.sql | 7 ++ .../{truncate_schema.sql => dipdup_wipe.sql} | 10 +- tests/test_demos.py | 116 +++++++++++++----- 9 files changed, 186 insertions(+), 100 deletions(-) rename docs/5.advanced/{3.internal-tables.md => 3.sql.md} (54%) delete mode 100644 docs/5.advanced/6.sql.md create mode 100644 src/dipdup/sql/dipdup_approve.sql rename src/dipdup/sql/{truncate_schema.sql => dipdup_wipe.sql} (86%) diff --git a/CHANGELOG.md b/CHANGELOG.md index b295118a0..9ab70d6c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog], and this project adheres to [Semantic Versioning]. +## [Unreleased] + +### Added + +- database: Added `dipdup_wipe` and `dipdup_approve` SQL functions to the schema. + +### Fixed + +- cli: Fixed `schema wipe` command for SQLite databases. + ## [7.0.1] - 2023-09-30 ### Added diff --git a/docs/5.advanced/1.reindexing.md b/docs/5.advanced/1.reindexing.md index c7bdd75b6..741964f5f 100644 --- a/docs/5.advanced/1.reindexing.md +++ b/docs/5.advanced/1.reindexing.md @@ -7,13 +7,13 @@ description: "In some cases, DipDup can't proceed with indexing without a full w In some cases, DipDup can't proceed with indexing without a full wipe. Several reasons trigger reindexing: -| reason | description | -| ----------------- | ------------------------------------------------------------------------------------------------------------------------- | -| `manual` | Reindexing triggered manually from callback with `ctx.reindex`. | -| `migration` | Applied migration requires reindexing. Check release notes before switching between major DipDup versions to be prepared. | -| `rollback` | Reorg message received from TzKT can not be processed. | -| `config_modified` | One of the index configs has been modified. | -| `schema_modified` | Database schema has been modified. Try to avoid manual schema modifications in favor of [sql](../5.advanced/6.sql.md). | +| reason | description | +| ----------------- | ------------------------------------------------------------------------------------------------------------------------------ | +| `manual` | Reindexing triggered manually from callback with `ctx.reindex`. | +| `migration` | Applied migration requires reindexing. Check release notes before switching between major DipDup versions to be prepared. | +| `rollback` | Reorg message received from datasource and can not be processed. | +| `config_modified` | One of the index configs has been modified. | +| `schema_modified` | Database schema has been modified. Try to avoid manual schema modifications in favor of [SQL scripts](../5.advanced/3.sql.md). | It is possible to configure desirable action on reindexing triggered by a specific reason. diff --git a/docs/5.advanced/3.internal-tables.md b/docs/5.advanced/3.sql.md similarity index 54% rename from docs/5.advanced/3.internal-tables.md rename to docs/5.advanced/3.sql.md index 2b938bec6..1a9b1170c 100644 --- a/docs/5.advanced/3.internal-tables.md +++ b/docs/5.advanced/3.sql.md @@ -1,11 +1,13 @@ --- -title: "Internal tables" -description: "This page describes the internal tables used by DipDup. They are created automatically and are not intended to be modified by the user. However, they can be useful for external monitoring and debugging." +title: "Advanced SQL" +description: "Put your *.sql scripts to dipdup_indexer/sql. You can run these scripts from any callback with ctx.execute_sql('name'). If name is a directory, each script it contains will be executed." --- -# Internal tables +# Advanced SQL -This page describes the internal tables used by DipDup. They are created automatically and are not intended to be modified by the user. However, they can be useful for external monitoring and debugging. +## Internal tables + +Several tables haing `dipdup_` prefix are created by DipDup automatically and are not intended to be modified by the user. However, they can be useful for external monitoring and debugging. | table | description | |:-------------------------- |:----------------------------------------------------------------------------------------------------------------------------------------- | @@ -15,8 +17,8 @@ This page describes the internal tables used by DipDup. They are created automat | `dipdup_contract` | Info about contracts used by all indexes, including ones added in runtime. | | `dipdup_model_update` | Service table to store model diffs for database rollback. Configured by `advanced.rollback_depth` | | `dipdup_meta` | Arbitrary key-value storage for DipDup internal use. Survives reindexing. You can use it too, but don't touch keys with `dipdup_` prefix. | -| `dipdup_contract_metadata` | See Metadata interface page | -| `dipdup_token_metadata` | See Metadata interface page | +| `dipdup_contract_metadata` | See [Metadata interface](/docs/advanced/metadata-interface) | +| `dipdup_token_metadata` | See [Metadata interface](/docs/advanced/metadata-interface) | See [`dipdup.models` module](https://github.com/dipdup-io/dipdup/blob/next/src/dipdup/models/__init__.py) for exact table definitions. @@ -32,3 +34,28 @@ SELECT name, status FROM dipdup_index; -- Get last reindex time SELECT created_at FROM dipdup_schema WHERE name = 'public'; ``` + +## Scripts + +Put your `*.sql` scripts to `{{ project.package }}/sql`. You can run these scripts from any callback with `ctx.execute_sql('name')`. If `name` is a directory, each script it contains will be executed. + +Scripts are executed without being wrapped with SQL transactions. It's generally a good idea to avoid touching table data in scripts. + +By default, an empty `sql/` directory is generated for every hook in config during init. Remove `ctx.execute_sql` call from hook callback to avoid executing them. + +```python +# Execute all scripts in sql/my_hook directory +await ctx.execute_sql('my_hook') + +# Execute a single script +await ctx.execute_sql('my_hook/my_script.sql') +``` + +## Managing schema + +When using PostgreSQL as database engine you can use `dipdup_approve` and `dipdup_wipe` functions to manage schema state from SQL console if needed: + +```sql +SELECT dipdup_approve('public'); +SELECT dipdup_wipe('public'); +``` diff --git a/docs/5.advanced/6.sql.md b/docs/5.advanced/6.sql.md deleted file mode 100644 index 8580e4b65..000000000 --- a/docs/5.advanced/6.sql.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -title: "SQL scripts" -description: "Put your *.sql scripts to dipdup_indexer/sql. You can run these scripts from any callback with ctx.execute_sql('name'). If name is a directory, each script it contains will be executed." ---- - -# SQL scripts - -Put your `*.sql` scripts to `{{ project.package }}/sql`. You can run these scripts from any callback with `ctx.execute_sql('name')`. If `name` is a directory, each script it contains will be executed. - -Scripts are executed without being wrapped with SQL transactions. It's generally a good idea to avoid touching table data in scripts. - -By default, an empty `sql/` directory is generated for every hook in config during init. Remove `ctx.execute_sql` call from hook callback to avoid executing them. - -## Usage - -```python -# Execute all scripts in sql/my_hook directory -await ctx.execute_sql('my_hook') - -# Execute a single script -await ctx.execute_sql('my_hook/my_script.sql') -``` diff --git a/src/dipdup/cli.py b/src/dipdup/cli.py index a2a32ce8a..9d49d9657 100644 --- a/src/dipdup/cli.py +++ b/src/dipdup/cli.py @@ -507,7 +507,9 @@ async def schema_wipe(ctx: click.Context, immune: bool, force: bool) -> None: conn = get_connection() await wipe_schema( conn=conn, - schema_name=config.database.schema_name, + schema_name=config.database.path + if isinstance(config.database, SqliteDatabaseConfig) + else config.database.schema_name, immune_tables=immune_tables, ) diff --git a/src/dipdup/database.py b/src/dipdup/database.py index 76f1c6c10..7777fd49d 100644 --- a/src/dipdup/database.py +++ b/src/dipdup/database.py @@ -194,44 +194,54 @@ async def generate_schema( conn: SupportedClient, name: str, ) -> None: - if isinstance(conn, AsyncpgClient): - await pg_create_schema(conn, name) + if isinstance(conn, SqliteClient): + await Tortoise.generate_schemas() + elif isinstance(conn, AsyncpgClient): + await _pg_create_schema(conn, name) + await Tortoise.generate_schemas() + await _pg_create_functions(conn) + await _pg_create_views(conn) + else: + raise NotImplementedError - await Tortoise.generate_schemas() - if isinstance(conn, AsyncpgClient): - # NOTE: Create a view for monitoring head status - sql_path = Path(__file__).parent / 'sql' / 'dipdup_head_status.sql' - # TODO: Configurable interval - await execute_sql(conn, sql_path, HEAD_STATUS_TIMEOUT) +async def _pg_create_functions(conn: AsyncpgClient) -> None: + for fn in ( + 'dipdup_approve.sql', + 'dipdup_wipe.sql', + ): + sql_path = Path(__file__).parent / 'sql' / fn + await execute_sql(conn, sql_path) -async def _wipe_schema_postgres( +async def _pg_create_views(conn: AsyncpgClient) -> None: + sql_path = Path(__file__).parent / 'sql' / 'dipdup_head_status.sql' + # TODO: Configurable interval + await execute_sql(conn, sql_path, HEAD_STATUS_TIMEOUT) + + +async def _pg_wipe_schema( conn: AsyncpgClient, schema_name: str, immune_tables: set[str], ) -> None: immune_schema_name = f'{schema_name}_immune' - # NOTE: Create a truncate_schema function to trigger cascade deletion - sql_path = Path(__file__).parent / 'sql' / 'truncate_schema.sql' - await execute_sql(conn, sql_path, schema_name, immune_schema_name) - # NOTE: Move immune tables to a separate schema - it's free! if immune_tables: - await pg_create_schema(conn, immune_schema_name) + await _pg_create_schema(conn, immune_schema_name) for table in immune_tables: - await pg_move_table(conn, table, schema_name, immune_schema_name) + await _pg_move_table(conn, table, schema_name, immune_schema_name) - await conn.execute_script(f"SELECT truncate_schema('{schema_name}')") + await conn.execute_script(f"SELECT dipdup_wipe('{schema_name}')") if immune_tables: for table in immune_tables: - await pg_move_table(conn, table, immune_schema_name, schema_name) - await pg_drop_schema(conn, immune_schema_name) + await _pg_move_table(conn, table, immune_schema_name, schema_name) + await _pg_drop_schema(conn, immune_schema_name) -async def _wipe_schema_sqlite( +async def _sqlite_wipe_schema( conn: SqliteClient, path: str, immune_tables: set[str], @@ -245,10 +255,10 @@ async def _wipe_schema_sqlite( await conn.execute_script(f'ATTACH DATABASE "{immune_path}" AS {namespace}') # NOTE: Copy immune tables to the new database. - master_query = 'SELECT name, type FROM sqlite_master' + master_query = 'SELECT name FROM sqlite_master WHERE type = "table"' result = await conn.execute_query(master_query) - for name, type_ in result[1]: - if type_ != 'table' or name not in immune_tables: + for name in result[1]: + if name not in immune_tables: continue expr = f'CREATE TABLE {namespace}.{name} AS SELECT * FROM {name}' @@ -271,23 +281,23 @@ async def wipe_schema( """Truncate schema preserving immune tables. Executes in a transaction""" async with conn._in_transaction() as conn: if isinstance(conn, SqliteClient): - await _wipe_schema_sqlite(conn, schema_name, immune_tables) + await _sqlite_wipe_schema(conn, schema_name, immune_tables) elif isinstance(conn, AsyncpgClient): - await _wipe_schema_postgres(conn, schema_name, immune_tables) + await _pg_wipe_schema(conn, schema_name, immune_tables) else: raise NotImplementedError -async def pg_create_schema(conn: AsyncpgClient, name: str) -> None: +async def _pg_create_schema(conn: AsyncpgClient, name: str) -> None: """Create PostgreSQL schema if not exists""" await conn.execute_script(f'CREATE SCHEMA IF NOT EXISTS {name}') -async def pg_drop_schema(conn: AsyncpgClient, name: str) -> None: +async def _pg_drop_schema(conn: AsyncpgClient, name: str) -> None: await conn.execute_script(f'DROP SCHEMA IF EXISTS {name}') -async def pg_move_table(conn: AsyncpgClient, name: str, schema: str, new_schema: str) -> None: +async def _pg_move_table(conn: AsyncpgClient, name: str, schema: str, new_schema: str) -> None: """Move table from one schema to another""" await conn.execute_script(f'ALTER TABLE {schema}.{name} SET SCHEMA {new_schema}') diff --git a/src/dipdup/sql/dipdup_approve.sql b/src/dipdup/sql/dipdup_approve.sql new file mode 100644 index 000000000..5691779e9 --- /dev/null +++ b/src/dipdup/sql/dipdup_approve.sql @@ -0,0 +1,7 @@ +CREATE OR REPLACE FUNCTION dipdup_approve(schema_name VARCHAR) RETURNS void AS $$ +BEGIN + UPDATE dipdup_index SET config_hash = null; + UPDATE dipdup_schema SET reindex = null, hash = null; + RETURN; +END; +$$ LANGUAGE plpgsql; diff --git a/src/dipdup/sql/truncate_schema.sql b/src/dipdup/sql/dipdup_wipe.sql similarity index 86% rename from src/dipdup/sql/truncate_schema.sql rename to src/dipdup/sql/dipdup_wipe.sql index 01f19f7ac..965fc951a 100644 --- a/src/dipdup/sql/truncate_schema.sql +++ b/src/dipdup/sql/dipdup_wipe.sql @@ -1,5 +1,5 @@ -- source of inspiration: https://stackoverflow.com/a/11462481 -CREATE OR REPLACE FUNCTION truncate_schema(schema_name VARCHAR) RETURNS void AS $$ +CREATE OR REPLACE FUNCTION dipdup_wipe(schema_name VARCHAR) RETURNS void AS $$ DECLARE rec RECORD; BEGIN @@ -63,14 +63,6 @@ BEGIN WHEN others THEN END; END LOOP; - -- BEGIN - -- CREATE EXTENSION IF NOT EXISTS pgcrypto; - -- CREATE EXTENSION IF NOT EXISTS timescaledb; - -- EXCEPTION - -- WHEN OTHERS THEN - -- NULL; - -- END; - RETURN; END; $$ LANGUAGE plpgsql; diff --git a/tests/test_demos.py b/tests/test_demos.py index 57ea16d67..93d810cb9 100644 --- a/tests/test_demos.py +++ b/tests/test_demos.py @@ -4,6 +4,7 @@ from collections.abc import AsyncIterator from collections.abc import Awaitable from collections.abc import Callable +from contextlib import AbstractAsyncContextManager from contextlib import AsyncExitStack from contextlib import asynccontextmanager from decimal import Decimal @@ -13,6 +14,7 @@ import pytest +from dipdup.database import get_connection from dipdup.database import tortoise_wrapper from dipdup.exceptions import FrameworkException from dipdup.models.tezos_tzkt import TzktOperationType @@ -21,50 +23,58 @@ @asynccontextmanager -async def run_dipdup_demo(config: str, package: str, cmd: str = 'run') -> AsyncIterator[Path]: - config_path = CONFIGS_PATH / config - dipdup_pkg_path = SRC_PATH / 'dipdup' - demo_pkg_path = SRC_PATH / package - sqlite_config_path = Path(__file__).parent / 'configs' / 'sqlite.yaml' - - with tempfile.TemporaryDirectory() as tmp_root_path: +async def tmp_project(config_path: Path, package: str, exists: bool) -> AsyncIterator[tuple[Path, dict[str, str]]]: + with tempfile.TemporaryDirectory() as tmp_package_path: # NOTE: Symlink configs, packages and executables - tmp_config_path = Path(tmp_root_path) / 'dipdup.yaml' + tmp_config_path = Path(tmp_package_path) / 'dipdup.yaml' os.symlink(config_path, tmp_config_path) - tmp_bin_path = Path(tmp_root_path) / 'bin' + tmp_bin_path = Path(tmp_package_path) / 'bin' tmp_bin_path.mkdir() for executable in ('dipdup', 'datamodel-codegen'): if (executable_path := which(executable)) is None: raise FrameworkException(f'Executable `{executable}` not found') os.symlink(executable_path, tmp_bin_path / executable) - tmp_dipdup_pkg_path = Path(tmp_root_path) / 'dipdup' - os.symlink(dipdup_pkg_path, tmp_dipdup_pkg_path) + os.symlink( + SRC_PATH / 'dipdup', + Path(tmp_package_path) / 'dipdup', + ) # NOTE: Ensure that `run` uses existing package and `init` creates a new one - if cmd == 'run': - tmp_demo_pkg_path = Path(tmp_root_path) / package - os.symlink(demo_pkg_path, tmp_demo_pkg_path) + if exists: + os.symlink( + SRC_PATH / package, + Path(tmp_package_path) / package, + ) # NOTE: Prepare environment env = { **os.environ, 'PATH': str(tmp_bin_path), - 'PYTHONPATH': str(tmp_root_path), + 'PYTHONPATH': str(tmp_package_path), 'DIPDUP_TEST': '1', } - subprocess.run( - f'dipdup -c {tmp_config_path} -c {sqlite_config_path} {cmd}', - cwd=tmp_root_path, - check=True, - shell=True, - env=env, - capture_output=True, - ) + yield Path(tmp_package_path), env - yield Path(tmp_root_path) + +async def run_in_tmp( + tmp_path: Path, + env: dict[str, str], + *cmd: str, +) -> None: + sqlite_config_path = Path(__file__).parent / 'configs' / 'sqlite.yaml' + tmp_config_path = Path(tmp_path) / 'dipdup.yaml' + + subprocess.run( + f'dipdup -c {tmp_config_path} -c {sqlite_config_path} {" ".join(cmd)}', + cwd=tmp_path, + check=True, + shell=True, + env=env, + capture_output=True, + ) async def assert_run_token() -> None: @@ -246,21 +256,71 @@ async def assert_run_dao() -> None: @pytest.mark.parametrize(test_args, test_params) -async def test_demos( +async def test_run_init( config: str, package: str, cmd: str, assert_fn: Callable[[], Awaitable[None]], ) -> None: + config_path = CONFIGS_PATH / config async with AsyncExitStack() as stack: - tmp_root_path = await stack.enter_async_context( - run_dipdup_demo(config, package, cmd), + tmp_package_path, env = await stack.enter_async_context( + tmp_project( + config_path, + package, + exists=cmd != 'init', + ), ) + await run_in_tmp(tmp_package_path, env, cmd) await stack.enter_async_context( tortoise_wrapper( - f'sqlite://{tmp_root_path}/db.sqlite3', + f'sqlite://{tmp_package_path}/db.sqlite3', f'{package}.models', ) ) await assert_fn() + + +async def _count_tables() -> int: + conn = get_connection() + _, res = await conn.execute_query('SELECT count(name) FROM sqlite_master WHERE type = "table";') + return int(res[0][0]) + + +async def test_schema() -> None: + package = 'demo_token' + config_path = CONFIGS_PATH / f'{package}.yml' + + async with AsyncExitStack() as stack: + tmp_package_path, env = await stack.enter_async_context( + tmp_project( + config_path, + package, + exists=True, + ), + ) + + def tortoise() -> AbstractAsyncContextManager[None]: + return tortoise_wrapper( + f'sqlite://{tmp_package_path}/db.sqlite3', + f'{package}.models', + ) + + async with tortoise(): + conn = get_connection() + assert (await _count_tables()) == 0 + + await run_in_tmp(tmp_package_path, env, 'schema', 'init') + + async with tortoise(): + conn = get_connection() + assert (await _count_tables()) == 10 + await conn.execute_script('CREATE TABLE test (id INTEGER PRIMARY KEY);') + assert (await _count_tables()) == 11 + + await run_in_tmp(tmp_package_path, env, 'schema', 'wipe', '--force') + + async with tortoise(): + conn = get_connection() + assert (await _count_tables()) == 0 From b930868f58c411686b0ad04bb9b2efda2ae22abf Mon Sep 17 00:00:00 2001 From: Igor Sereda Date: Wed, 11 Oct 2023 03:03:28 +0700 Subject: [PATCH 09/11] TzktDatasource get_transactions() fix (#865) --- src/dipdup/datasources/tezos_tzkt.py | 6 ++++-- src/dipdup/indexes/tezos_tzkt_operations/matcher.py | 2 ++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/dipdup/datasources/tezos_tzkt.py b/src/dipdup/datasources/tezos_tzkt.py index ee4a891da..288662b42 100644 --- a/src/dipdup/datasources/tezos_tzkt.py +++ b/src/dipdup/datasources/tezos_tzkt.py @@ -713,14 +713,16 @@ async def get_transactions( params = self._get_request_params( first_level=first_level, last_level=last_level, - offset=offset, limit=limit, select=TRANSACTION_OPERATION_FIELDS, values=True, - cursor=True, sort='level', status='applied', ) + + if offset is not None: + params['id.ge'] = offset + if addresses and not code_hashes: params[f'{field}.in'] = ','.join(addresses) elif code_hashes and not addresses: diff --git a/src/dipdup/indexes/tezos_tzkt_operations/matcher.py b/src/dipdup/indexes/tezos_tzkt_operations/matcher.py index 855dd5764..92cf2231a 100644 --- a/src/dipdup/indexes/tezos_tzkt_operations/matcher.py +++ b/src/dipdup/indexes/tezos_tzkt_operations/matcher.py @@ -208,6 +208,8 @@ def match_operation_subgroup( transaction = handler[2][-1] if isinstance(transaction, TzktOperationData): id_list.append(transaction.id) + elif isinstance(transaction, TzktOrigination): + id_list.append(transaction.data.id) elif isinstance(transaction, TzktTransaction): id_list.append(transaction.data.id) else: From 7d0a8668e47c8b0300620db02570836d5a1e7239 Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Tue, 10 Oct 2023 17:17:18 -0300 Subject: [PATCH 10/11] TzKT fix: add changelog, replace ge with gt (#866) --- CHANGELOG.md | 1 + src/dipdup/datasources/tezos_tzkt.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ab70d6c7..e619e7f1a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ The format is based on [Keep a Changelog], and this project adheres to [Semantic ### Fixed - cli: Fixed `schema wipe` command for SQLite databases. +- tezos.tzkt: Fixed regression in `get_transactions` method pagination. ## [7.0.1] - 2023-09-30 diff --git a/src/dipdup/datasources/tezos_tzkt.py b/src/dipdup/datasources/tezos_tzkt.py index 288662b42..00fd4d479 100644 --- a/src/dipdup/datasources/tezos_tzkt.py +++ b/src/dipdup/datasources/tezos_tzkt.py @@ -713,15 +713,17 @@ async def get_transactions( params = self._get_request_params( first_level=first_level, last_level=last_level, + # NOTE: This is intentional + offset=None, limit=limit, select=TRANSACTION_OPERATION_FIELDS, values=True, sort='level', status='applied', ) - + # TODO: TzKT doesn't support sort+cr currently if offset is not None: - params['id.ge'] = offset + params['id.gt'] = offset if addresses and not code_hashes: params[f'{field}.in'] = ','.join(addresses) From f1ad9cb983a1be6a7de6cde545abf242e80a461a Mon Sep 17 00:00:00 2001 From: Lev Gorodetskiy Date: Tue, 10 Oct 2023 19:45:44 -0300 Subject: [PATCH 11/11] Bump version 7.0.2 (#869) --- .github/workflows/docs.yml | 2 +- CHANGELOG.md | 8 +- pdm.lock | 84 +++++++++---------- pyproject.toml | 2 +- requirements.dev.txt | 8 +- requirements.txt | 6 +- scripts/dump_schema.py | 2 +- src/demo_uniswap/models/repo.py | 4 +- src/dipdup/database.py | 6 +- src/dipdup/datasources/evm_subsquid.py | 2 +- .../indexes/tezos_tzkt_operations/parser.py | 2 +- src/dipdup/performance.py | 4 +- src/dipdup/project.py | 2 +- .../projects/demo_uniswap/models/repo.py.j2 | 2 +- src/dipdup/scheduler.py | 10 +-- tests/profile_abi_decoding.py | 2 +- tests/test_hasura.py | 2 +- tests/test_introspection.py | 26 +++--- 18 files changed, 90 insertions(+), 84 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index ff7c66ef8..5cf16b51e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -17,7 +17,7 @@ on: env: FRONTEND_BRANCH: master - GH_TOKEN: ${{ secrets.API_TOKEN_EXT }} + GITHUB_TOKEN: ${{ secrets.DOCS_GITHUB_TOKEN }} jobs: docs: diff --git a/CHANGELOG.md b/CHANGELOG.md index e619e7f1a..451da2348 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog], and this project adheres to [Semantic Versioning]. -## [Unreleased] +## [7.0.2] - 2023-10-10 ### Added @@ -15,6 +15,12 @@ The format is based on [Keep a Changelog], and this project adheres to [Semantic - cli: Fixed `schema wipe` command for SQLite databases. - tezos.tzkt: Fixed regression in `get_transactions` method pagination. +## [6.5.13] - 2023-10-10 + +### Fixed + +- tzkt: Fixed regression in `get_transactions` method pagination. + ## [7.0.1] - 2023-09-30 ### Added diff --git a/pdm.lock b/pdm.lock index 187473c1a..a2c96f1ac 100644 --- a/pdm.lock +++ b/pdm.lock @@ -10,7 +10,7 @@ content_hash = "sha256:efee5be5a71d12cb011518dfb65eeafb79905f8b94869c0867032ec6a [[package]] name = "aiohttp" -version = "3.8.5" +version = "3.8.6" requires_python = ">=3.6" summary = "Async http client/server framework (asyncio)" dependencies = [ @@ -23,22 +23,22 @@ dependencies = [ "yarl<2.0,>=1.0", ] files = [ - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, + {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, + {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, + {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, ] [[package]] @@ -406,7 +406,7 @@ files = [ [[package]] name = "datamodel-code-generator" -version = "0.22.0" +version = "0.22.1" requires_python = ">=3.7,<4.0" summary = "Datamodel Code Generator" dependencies = [ @@ -420,12 +420,12 @@ dependencies = [ "openapi-spec-validator<=0.5.7,>=0.2.8", "packaging", "prance>=0.18.2", - "pydantic[email]<3.0,>=1.10.0; python_version >= \"3.11\" and python_version < \"4.0\"", + "pydantic[email]!=2.4.0,<3.0,>=1.10.0; python_version >= \"3.11\" and python_version < \"4.0\"", "toml<1.0.0,>=0.10.0", ] files = [ - {file = "datamodel_code_generator-0.22.0-py3-none-any.whl", hash = "sha256:5cf8fc4fb6fe7aa750595a558cd4fcd43e36e862f40b0fa4cc123b4548b16a1e"}, - {file = "datamodel_code_generator-0.22.0.tar.gz", hash = "sha256:73ebcefa498e39d0f210923856cb4a498bacc3b7bdea140cca7324e25f5c581b"}, + {file = "datamodel_code_generator-0.22.1-py3-none-any.whl", hash = "sha256:ac1fbc4fa778c2a43f740740fd352ca4300f705044e112a0023af8d04f0b61af"}, + {file = "datamodel_code_generator-0.22.1.tar.gz", hash = "sha256:48c8ce0b38b575bcc573237bb3faab696b072aa131b3f008c848d2c3b24a4417"}, ] [[package]] @@ -876,7 +876,7 @@ files = [ [[package]] name = "mypy" -version = "1.5.1" +version = "1.6.0" requires_python = ">=3.8" summary = "Optional static typing for Python" dependencies = [ @@ -884,13 +884,13 @@ dependencies = [ "typing-extensions>=4.1.0", ] files = [ - {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, - {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, - {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, - {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, - {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, - {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, + {file = "mypy-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c8835a07b8442da900db47ccfda76c92c69c3a575872a5b764332c4bacb5a0a"}, + {file = "mypy-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24f3de8b9e7021cd794ad9dfbf2e9fe3f069ff5e28cb57af6f873ffec1cb0425"}, + {file = "mypy-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856bad61ebc7d21dbc019b719e98303dc6256cec6dcc9ebb0b214b81d6901bd8"}, + {file = "mypy-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89513ddfda06b5c8ebd64f026d20a61ef264e89125dc82633f3c34eeb50e7d60"}, + {file = "mypy-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f8464ed410ada641c29f5de3e6716cbdd4f460b31cf755b2af52f2d5ea79ead"}, + {file = "mypy-1.6.0-py3-none-any.whl", hash = "sha256:9e1589ca150a51d9d00bb839bfeca2f7a04f32cd62fad87a847bc0818e15d7dc"}, + {file = "mypy-1.6.0.tar.gz", hash = "sha256:4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f"}, ] [[package]] @@ -954,21 +954,21 @@ files = [ [[package]] name = "orjson" -version = "3.9.7" -requires_python = ">=3.7" +version = "3.9.8" +requires_python = ">=3.8" summary = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" files = [ - {file = "orjson-3.9.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1f8b47650f90e298b78ecf4df003f66f54acdba6a0f763cc4df1eab048fe3738"}, - {file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f738fee63eb263530efd4d2e9c76316c1f47b3bbf38c1bf45ae9625feed0395e"}, - {file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38e34c3a21ed41a7dbd5349e24c3725be5416641fdeedf8f56fcbab6d981c900"}, - {file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21a3344163be3b2c7e22cef14fa5abe957a892b2ea0525ee86ad8186921b6cf0"}, - {file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23be6b22aab83f440b62a6f5975bcabeecb672bc627face6a83bc7aeb495dc7e"}, - {file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5205ec0dfab1887dd383597012199f5175035e782cdb013c542187d280ca443"}, - {file = "orjson-3.9.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8769806ea0b45d7bf75cad253fba9ac6700b7050ebb19337ff6b4e9060f963fa"}, - {file = "orjson-3.9.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f9e01239abea2f52a429fe9d95c96df95f078f0172489d691b4a848ace54a476"}, - {file = "orjson-3.9.7-cp311-none-win32.whl", hash = "sha256:8bdb6c911dae5fbf110fe4f5cba578437526334df381b3554b6ab7f626e5eeca"}, - {file = "orjson-3.9.7-cp311-none-win_amd64.whl", hash = "sha256:9d62c583b5110e6a5cf5169ab616aa4ec71f2c0c30f833306f9e378cf51b6c86"}, - {file = "orjson-3.9.7.tar.gz", hash = "sha256:85e39198f78e2f7e054d296395f6c96f5e02892337746ef5b6a1bf3ed5910142"}, + {file = "orjson-3.9.8-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8a1c92f467f5fd0f8fb79273006b563364b1e45667b3760423498348dc2e22fa"}, + {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:742d4d16d66579ffff4b2048a8de4a0b03d731847233e92c4edd418a9c582d0f"}, + {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d1aab08b373232f568ea9ae048f9f77e09f389068afee6dd44bb6140e2c3ea3"}, + {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68ed63273ec4ecdd7865e9d984d65a749c0d780882cf9dde6ab2bc6323f6471a"}, + {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d23edcb32383f3d86b2f4914f9825ce2d67625abd34be6e5ed1f59ec30127b7a"}, + {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9bcd3a48b260d3dfe68b8ce93d11f99a70bd4c908efe22d195a1b1dcfb15ac2"}, + {file = "orjson-3.9.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9ce982f3c1df83f7dc74f3b2690605470ff4790d12558e44359f01e822c5cb08"}, + {file = "orjson-3.9.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4433dd903d5b022a64e9dd1dca94f08ab04d5d928a0ecd33dd46110468960879"}, + {file = "orjson-3.9.8-cp311-none-win32.whl", hash = "sha256:a119c73520192c2882d0549151b9cdd65e0bb5396bedf8951ba5f70d6a873879"}, + {file = "orjson-3.9.8-cp311-none-win_amd64.whl", hash = "sha256:764306f6370e6c76cbbf3139dd9b05be9c4481ee0b15966bd1907827a5777216"}, + {file = "orjson-3.9.8.tar.gz", hash = "sha256:ed1adc6db9841974170a5195b827ee4e392b1e8ca385b19fcdc3248489844059"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index e1282a174..d53fa72dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "dipdup" description = "Modular framework for creating selective indexers and featureful backends for dapps" -version = "7.0.1" +version = "7.0.2" license = { text = "MIT" } authors = [ { name = "Lev Gorodetskii", email = "dipdup@drsr.io" }, diff --git a/requirements.dev.txt b/requirements.dev.txt index ad00f7d23..642cc6d9a 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -1,7 +1,7 @@ # This file is @generated by PDM. # Please do not edit it manually. -aiohttp==3.8.5 +aiohttp==3.8.6 aiolimiter==1.1.0 aiosignal==1.3.1 aiosqlite==0.17.0 @@ -23,7 +23,7 @@ charset-normalizer==3.2.0 click==8.1.7 coverage==7.3.0 cytoolz==0.12.2 -datamodel-code-generator==0.22.0 +datamodel-code-generator==0.22.1 dnspython==2.4.2 docker==6.1.3 docutils==0.20.1 @@ -54,12 +54,12 @@ lru-dict==1.2.0 MarkupSafe==2.1.3 msgpack==1.0.5 multidict==6.0.4 -mypy==1.5.1 +mypy==1.6.0 mypy-extensions==1.0.0 numpy==1.25.2 openapi-schema-validator==0.4.4 openapi-spec-validator==0.5.7 -orjson==3.9.7 +orjson==3.9.8 packaging==23.1 parsimonious==0.9.0 pathable==0.4.3 diff --git a/requirements.txt b/requirements.txt index 140e0127d..7e9cbb23d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ # This file is @generated by PDM. # Please do not edit it manually. -aiohttp==3.8.5 +aiohttp==3.8.6 aiolimiter==1.1.0 aiosignal==1.3.1 aiosqlite==0.17.0 @@ -20,7 +20,7 @@ chardet==5.2.0 charset-normalizer==3.2.0 click==8.1.7 cytoolz==0.12.2 -datamodel-code-generator==0.22.0 +datamodel-code-generator==0.22.1 dnspython==2.4.2 email-validator==2.0.0.post2 eth-abi==4.2.1 @@ -50,7 +50,7 @@ mypy-extensions==1.0.0 numpy==1.25.2 openapi-schema-validator==0.4.4 openapi-spec-validator==0.5.7 -orjson==3.9.7 +orjson==3.9.8 packaging==23.1 parsimonious==0.9.0 pathable==0.4.3 diff --git a/scripts/dump_schema.py b/scripts/dump_schema.py index b85b43d2b..55cc32f69 100644 --- a/scripts/dump_schema.py +++ b/scripts/dump_schema.py @@ -2,7 +2,7 @@ from pathlib import Path import orjson -from dc_schema import get_schema # type: ignore[import] +from dc_schema import get_schema # type: ignore[import-not-found] from dipdup.config import DipDupConfig diff --git a/src/demo_uniswap/models/repo.py b/src/demo_uniswap/models/repo.py index b0d64779b..736fe328d 100644 --- a/src/demo_uniswap/models/repo.py +++ b/src/demo_uniswap/models/repo.py @@ -2,7 +2,7 @@ from typing import Any from typing import cast -from lru import LRU # type: ignore[import] +from lru import LRU # type: ignore[import-not-found] import demo_uniswap.models as models from dipdup.config.evm import EvmContractConfig @@ -32,7 +32,7 @@ def save_pending_position(self, idx: str, position: dict[str, Any]) -> None: self._pending_positions[idx] = position def get_pending_position(self, idx: str) -> dict[str, Any] | None: - return self._pending_positions.get(idx, None) + return self._pending_positions.get(idx, None) # type: ignore[no-any-return] async def get_ctx_factory(ctx: HandlerContext) -> models.Factory: diff --git a/src/dipdup/database.py b/src/dipdup/database.py index 7777fd49d..cff3cf138 100644 --- a/src/dipdup/database.py +++ b/src/dipdup/database.py @@ -14,8 +14,8 @@ from typing import Any from typing import cast -import asyncpg.exceptions # type: ignore[import] -import sqlparse # type: ignore[import] +import asyncpg.exceptions # type: ignore[import-untyped] +import sqlparse # type: ignore[import-untyped] from tortoise import Tortoise from tortoise.backends.asyncpg.client import AsyncpgDBClient from tortoise.backends.base.client import BaseDBAsyncClient @@ -258,7 +258,7 @@ async def _sqlite_wipe_schema( master_query = 'SELECT name FROM sqlite_master WHERE type = "table"' result = await conn.execute_query(master_query) for name in result[1]: - if name not in immune_tables: + if name not in immune_tables: # type: ignore[comparison-overlap] continue expr = f'CREATE TABLE {namespace}.{name} AS SELECT * FROM {name}' diff --git a/src/dipdup/datasources/evm_subsquid.py b/src/dipdup/datasources/evm_subsquid.py index 1a866c558..e51ed26c1 100644 --- a/src/dipdup/datasources/evm_subsquid.py +++ b/src/dipdup/datasources/evm_subsquid.py @@ -7,7 +7,7 @@ from io import BytesIO from typing import Any -import pyarrow.ipc # type: ignore[import] +import pyarrow.ipc # type: ignore[import-untyped] from dipdup.config import HttpConfig from dipdup.config.evm_subsquid import SubsquidDatasourceConfig diff --git a/src/dipdup/indexes/tezos_tzkt_operations/parser.py b/src/dipdup/indexes/tezos_tzkt_operations/parser.py index da8527f3d..85fac26da 100644 --- a/src/dipdup/indexes/tezos_tzkt_operations/parser.py +++ b/src/dipdup/indexes/tezos_tzkt_operations/parser.py @@ -26,7 +26,7 @@ T = TypeVar('T', Hashable, type[BaseModel]) -def extract_root_outer_type(storage_type: type[BaseModel]) -> T: +def extract_root_outer_type(storage_type: type[BaseModel]) -> T: # type: ignore[type-var] """Extract Pydantic __root__ type""" root_field = storage_type.__fields__['__root__'] if root_field.allow_none: diff --git a/src/dipdup/performance.py b/src/dipdup/performance.py index 9d6296db8..05ab7041a 100644 --- a/src/dipdup/performance.py +++ b/src/dipdup/performance.py @@ -27,7 +27,7 @@ from typing import cast from async_lru import alru_cache -from lru import LRU # type: ignore[import] +from lru import LRU # type: ignore[import-not-found] from dipdup.exceptions import FrameworkException @@ -42,7 +42,7 @@ @asynccontextmanager async def with_pprofile(name: str) -> AsyncIterator[None]: try: - import pprofile # type: ignore[import] + import pprofile # type: ignore[import-untyped] _logger.warning('Full profiling is enabled, this will affect performance') except ImportError: diff --git a/src/dipdup/project.py b/src/dipdup/project.py index 9bfc2bd8b..11c96abdb 100644 --- a/src/dipdup/project.py +++ b/src/dipdup/project.py @@ -97,7 +97,7 @@ def prompt_anyof( default: int, ) -> tuple[int, str]: """Ask user to choose one of options; returns index and value""" - import survey # type: ignore[import] + import survey # type: ignore[import-untyped] table = tabulate( zip(options, comments, strict=True), diff --git a/src/dipdup/projects/demo_uniswap/models/repo.py.j2 b/src/dipdup/projects/demo_uniswap/models/repo.py.j2 index d739e3cde..e2e774ce5 100644 --- a/src/dipdup/projects/demo_uniswap/models/repo.py.j2 +++ b/src/dipdup/projects/demo_uniswap/models/repo.py.j2 @@ -2,7 +2,7 @@ from decimal import Decimal from typing import Any from typing import cast -from lru import LRU # type: ignore[import] +from lru import LRU # type: ignore[import-untyped] import {{ project.package }}.models as models from dipdup.config.evm import EvmContractConfig diff --git a/src/dipdup/scheduler.py b/src/dipdup/scheduler.py index 3778b3429..b1555cd07 100644 --- a/src/dipdup/scheduler.py +++ b/src/dipdup/scheduler.py @@ -4,13 +4,13 @@ from functools import partial from typing import Any -from apscheduler.events import EVENT_JOB_ERROR # type: ignore[import] +from apscheduler.events import EVENT_JOB_ERROR # type: ignore[import-untyped] from apscheduler.events import EVENT_JOB_EXECUTED from apscheduler.events import JobEvent -from apscheduler.job import Job # type: ignore[import] -from apscheduler.schedulers.asyncio import AsyncIOScheduler # type: ignore[import] -from apscheduler.triggers.cron import CronTrigger # type: ignore[import] -from apscheduler.triggers.interval import IntervalTrigger # type: ignore[import] +from apscheduler.job import Job # type: ignore[import-untyped] +from apscheduler.schedulers.asyncio import AsyncIOScheduler # type: ignore[import-untyped] +from apscheduler.triggers.cron import CronTrigger # type: ignore[import-untyped] +from apscheduler.triggers.interval import IntervalTrigger # type: ignore[import-untyped] from dipdup.config import JobConfig from dipdup.context import DipDupContext diff --git a/tests/profile_abi_decoding.py b/tests/profile_abi_decoding.py index 483fd16ed..ace6b6b9d 100644 --- a/tests/profile_abi_decoding.py +++ b/tests/profile_abi_decoding.py @@ -1,7 +1,7 @@ import time from pathlib import Path -import pprofile # type: ignore[import] +import pprofile # type: ignore[import-untyped] from dipdup.indexes.evm_subsquid_events.matcher import decode_event_data from dipdup.package import EventAbiExtra diff --git a/tests/test_hasura.py b/tests/test_hasura.py index cc5e5b792..ea233c607 100644 --- a/tests/test_hasura.py +++ b/tests/test_hasura.py @@ -9,7 +9,7 @@ import pytest from aiohttp import web from aiohttp.pytest_plugin import AiohttpClient -from docker.client import DockerClient # type: ignore[import] +from docker.client import DockerClient # type: ignore[import-untyped] from tortoise import Tortoise from dipdup.config import DipDupConfig diff --git a/tests/test_introspection.py b/tests/test_introspection.py index 957cdd587..ecc4f759e 100644 --- a/tests/test_introspection.py +++ b/tests/test_introspection.py @@ -15,8 +15,8 @@ def test_list_simple_args() -> None: assert get_list_elt_type(list[str]) == str assert get_list_elt_type(list[int]) == int assert get_list_elt_type(list[bool]) == bool - assert get_list_elt_type(list[str | None]) == str | None # type: ignore[comparison-overlap] - assert get_list_elt_type(list[str | int]) == str | int # type: ignore[comparison-overlap] + assert get_list_elt_type(list[str | None]) == str | None + assert get_list_elt_type(list[str | int]) == str | int assert get_list_elt_type(list[tuple[str]]) == tuple[str] assert get_list_elt_type(list[list[str]]) == list[str] assert get_list_elt_type(list[dict[str, str]]) == dict[str, str] @@ -27,8 +27,8 @@ class Class: ... assert get_list_elt_type(list[Class]) == Class - assert get_list_elt_type(list[Class | None]) == Class | None # type: ignore[comparison-overlap] - assert get_list_elt_type(list[Class | int]) == Class | int # type: ignore[comparison-overlap] + assert get_list_elt_type(list[Class | None]) == Class | None + assert get_list_elt_type(list[Class | int]) == Class | int assert get_list_elt_type(list[tuple[Class]]) == tuple[Class] assert get_list_elt_type(list[list[Class]]) == list[Class] assert get_list_elt_type(list[dict[str, Class]]) == dict[str, Class] @@ -44,7 +44,7 @@ class SomethingElse(BaseModel): class OptionalList(BaseModel): __root__: list[str] | None - assert get_list_elt_type(ListOfMapsStorage) == int | dict[str, str] # type: ignore[comparison-overlap] + assert get_list_elt_type(ListOfMapsStorage) == int | dict[str, str] with pytest.raises(IntrospectionError): get_list_elt_type(OptionalList) @@ -57,8 +57,8 @@ def test_dict_simple_args() -> None: assert get_dict_value_type(dict[str, str]) == str assert get_dict_value_type(dict[str, int]) == int assert get_dict_value_type(dict[str, bool]) == bool - assert get_dict_value_type(dict[str, str | None]) == str | None # type: ignore[comparison-overlap] - assert get_dict_value_type(dict[str, str | int]) == str | int # type: ignore[comparison-overlap] + assert get_dict_value_type(dict[str, str | None]) == str | None + assert get_dict_value_type(dict[str, str | int]) == str | int assert get_dict_value_type(dict[str, tuple[str]]) == tuple[str] assert get_dict_value_type(dict[str, list[str]]) == list[str] assert get_dict_value_type(dict[str, dict[str, str]]) == dict[str, str] @@ -69,8 +69,8 @@ class Class: ... assert get_dict_value_type(dict[str, Class]) == Class - assert get_dict_value_type(dict[str, Class | None]) == Class | None # type: ignore[comparison-overlap] - assert get_dict_value_type(dict[str, Class | int]) == Class | int # type: ignore[comparison-overlap] + assert get_dict_value_type(dict[str, Class | None]) == Class | None + assert get_dict_value_type(dict[str, Class | int]) == Class | int assert get_dict_value_type(dict[str, tuple[Class]]) == tuple[Class] assert get_dict_value_type(dict[str, list[Class]]) == list[Class] assert get_dict_value_type(dict[str, dict[str, Class]]) == dict[str, Class] @@ -86,7 +86,7 @@ class SomethingElse(BaseModel): class OptionalDict(BaseModel): __root__: dict[str, str] | None - assert get_dict_value_type(DictOfMapsStorage) == int | dict[str, str] # type: ignore[comparison-overlap] + assert get_dict_value_type(DictOfMapsStorage) == int | dict[str, str] with pytest.raises(IntrospectionError): get_dict_value_type(OptionalDict) @@ -105,8 +105,8 @@ class Storage(BaseModel): assert get_dict_value_type(Storage, 'plain_str') == str assert get_dict_value_type(Storage, 'list_str') == list[str] assert get_dict_value_type(Storage, 'dict_of_lists') == dict[str, list[str]] - assert get_dict_value_type(Storage, 'optional_str') == str | None # type: ignore[comparison-overlap] - assert get_dict_value_type(Storage, 'union_arg') == str | int # type: ignore[comparison-overlap] + assert get_dict_value_type(Storage, 'optional_str') == str | None + assert get_dict_value_type(Storage, 'union_arg') == str | int def test_is_array() -> None: @@ -144,6 +144,6 @@ class OptionalStr(BaseModel): class ListOfMapsStorage(BaseModel): __root__: list[int | dict[str, str]] - assert extract_root_outer_type(OptionalStr) == str | None # type: ignore[comparison-overlap] + assert extract_root_outer_type(OptionalStr) == str | None # FIXME: left operand type: "Type[BaseModel]", right operand type: "Type[List[Any]]" assert extract_root_outer_type(ListOfMapsStorage) == list[int | dict[str, str]] # type: ignore[comparison-overlap]