From 705b706d5cbc638e8ab44c7c0a44dd7e04d04c0d Mon Sep 17 00:00:00 2001 From: Pia Date: Tue, 23 Jul 2024 17:51:41 +0200 Subject: [PATCH] chore: fmt, feat: add request --- script/HdpExecutionStore.s.sol | 21 +-- src/HdpExecutionStore.sol | 139 +++++++----------- .../datalake/BlockSampledDatalakeCodecs.sol | 63 +++----- src/datatypes/datalake/ComputeCodecs.sol | 15 +- .../TransactionsInBlockDatalakeCodecs.sol | 74 ++++------ src/interfaces/IAggregatorsFactory.sol | 9 +- src/lib/Uint256Splitter.sol | 9 +- test/BlockSampledHdpExecutionStore.t.sol | 108 +++++--------- test/ModuleHdpExecutionStore.t.sol | 105 +++---------- ...TransactionsInBlockHdpExecutionStore.t.sol | 106 +++++-------- 10 files changed, 203 insertions(+), 446 deletions(-) diff --git a/script/HdpExecutionStore.s.sol b/script/HdpExecutionStore.s.sol index 3f6e46f..7d3d4d0 100644 --- a/script/HdpExecutionStore.s.sol +++ b/script/HdpExecutionStore.s.sol @@ -13,25 +13,14 @@ contract HdpExecutionStoreDeployer is Script { uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); vm.startBroadcast(deployerPrivateKey); - IFactsRegistry factsRegistry = IFactsRegistry( - vm.envAddress("FACTS_REGISTRY_ADDRESS") - ); - IAggregatorsFactory aggregatorsFactory = IAggregatorsFactory( - vm.envAddress("AGGREGATORS_FACTORY_ADDRESS") - ); + IFactsRegistry factsRegistry = IFactsRegistry(vm.envAddress("FACTS_REGISTRY_ADDRESS")); + IAggregatorsFactory aggregatorsFactory = IAggregatorsFactory(vm.envAddress("AGGREGATORS_FACTORY_ADDRESS")); bytes32 programHash = _getProgramHash(); // Deploy the HdpExecutionStore - HdpExecutionStore hdpExecutionStore = new HdpExecutionStore( - factsRegistry, - aggregatorsFactory, - programHash - ); - - console2.log( - "HdpExecutionStore deployed at: ", - address(hdpExecutionStore) - ); + HdpExecutionStore hdpExecutionStore = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash); + + console2.log("HdpExecutionStore deployed at: ", address(hdpExecutionStore)); vm.stopBroadcast(); } diff --git a/src/HdpExecutionStore.sol b/src/HdpExecutionStore.sol index 6f6c45f..0a8b81a 100644 --- a/src/HdpExecutionStore.sol +++ b/src/HdpExecutionStore.sol @@ -9,8 +9,12 @@ import {ISharpFactsAggregator} from "./interfaces/ISharpFactsAggregator.sol"; import {IAggregatorsFactory} from "./interfaces/IAggregatorsFactory.sol"; import {BlockSampledDatalake, BlockSampledDatalakeCodecs} from "./datatypes/datalake/BlockSampledDatalakeCodecs.sol"; -import {TransactionsInBlockDatalake, TransactionsInBlockDatalakeCodecs} from "./datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol"; +import { + TransactionsInBlockDatalake, + TransactionsInBlockDatalakeCodecs +} from "./datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol"; import {ComputationalTask, ComputationalTaskCodecs} from "./datatypes/datalake/ComputeCodecs.sol"; +import {Module, ModuleCodecs} from "./datatypes/module/ModuleCodecs.sol"; /// Caller is not authorized to perform the action error Unauthorized(); @@ -31,6 +35,7 @@ contract HdpExecutionStore is AccessControl { using BlockSampledDatalakeCodecs for BlockSampledDatalake; using TransactionsInBlockDatalakeCodecs for TransactionsInBlockDatalake; using ComputationalTaskCodecs for ComputationalTask; + using ModuleCodecs for Module; /// @notice The status of a task enum TaskStatus { @@ -49,16 +54,13 @@ contract HdpExecutionStore is AccessControl { event MmrRootCached(uint256 mmrId, uint256 mmrSize, bytes32 mmrRoot); /// @notice emitted when a new task with block sampled datalake is scheduled - event TaskWithBlockSampledDatalakeScheduled( - BlockSampledDatalake datalake, - ComputationalTask task - ); + event TaskWithBlockSampledDatalakeScheduled(BlockSampledDatalake datalake, ComputationalTask task); /// @notice emitted when a new task with transactions in block datalake is scheduled - event TaskWithTransactionsInBlockDatalakeScheduled( - TransactionsInBlockDatalake datalake, - ComputationalTask task - ); + event TaskWithTransactionsInBlockDatalakeScheduled(TransactionsInBlockDatalake datalake, ComputationalTask task); + + /// @notice emitted when a new task with module is scheduled + event ModuleTaskScheduled(Module moduleTask); /// @notice constant representing role of operator bytes32 public constant OPERATOR_ROLE = keccak256("OPERATOR_ROLE"); @@ -79,14 +81,9 @@ contract HdpExecutionStore is AccessControl { mapping(bytes32 => TaskResult) public cachedTasksResult; /// @notice mapping of chain id => mmr id => mmr size => mmr root - mapping(uint256 => mapping(uint256 => mapping(uint256 => bytes32))) - public cachedMMRsRoots; - - constructor( - IFactsRegistry factsRegistry, - IAggregatorsFactory aggregatorsFactory, - bytes32 programHash - ) { + mapping(uint256 => mapping(uint256 => mapping(uint256 => bytes32))) public cachedMMRsRoots; + + constructor(IFactsRegistry factsRegistry, IAggregatorsFactory aggregatorsFactory, bytes32 programHash) { SHARP_FACTS_REGISTRY = factsRegistry; AGGREGATORS_FACTORY = aggregatorsFactory; PROGRAM_HASH = programHash; @@ -105,20 +102,11 @@ contract HdpExecutionStore is AccessControl { /// @notice Caches the MMR root for a given MMR id /// @notice Get MMR size and root from the aggregator and cache it function cacheMmrRoot(uint256 mmrId) public { - ISharpFactsAggregator aggregator = AGGREGATORS_FACTORY.aggregatorsById( - mmrId - ); - ISharpFactsAggregator.AggregatorState - memory aggregatorState = aggregator.aggregatorState(); - cachedMMRsRoots[CHAIN_ID][mmrId][ - aggregatorState.mmrSize - ] = aggregatorState.poseidonMmrRoot; - - emit MmrRootCached( - mmrId, - aggregatorState.mmrSize, - aggregatorState.poseidonMmrRoot - ); + ISharpFactsAggregator aggregator = AGGREGATORS_FACTORY.aggregatorsById(mmrId); + ISharpFactsAggregator.AggregatorState memory aggregatorState = aggregator.aggregatorState(); + cachedMMRsRoots[CHAIN_ID][mmrId][aggregatorState.mmrSize] = aggregatorState.poseidonMmrRoot; + + emit MmrRootCached(mmrId, aggregatorState.mmrSize, aggregatorState.poseidonMmrRoot); } /// @notice Requests the execution of a task with a block sampled datalake @@ -137,15 +125,9 @@ contract HdpExecutionStore is AccessControl { } // Store the task result - cachedTasksResult[taskCommitment] = TaskResult({ - status: TaskStatus.SCHEDULED, - result: "" - }); - - emit TaskWithBlockSampledDatalakeScheduled( - blockSampledDatalake, - computationalTask - ); + cachedTasksResult[taskCommitment] = TaskResult({status: TaskStatus.SCHEDULED, result: ""}); + + emit TaskWithBlockSampledDatalakeScheduled(blockSampledDatalake, computationalTask); } /// @notice Requests the execution of a task with a transactions in block datalake @@ -164,15 +146,25 @@ contract HdpExecutionStore is AccessControl { } // Store the task result - cachedTasksResult[taskCommitment] = TaskResult({ - status: TaskStatus.SCHEDULED, - result: "" - }); - - emit TaskWithTransactionsInBlockDatalakeScheduled( - transactionsInBlockDatalake, - computationalTask - ); + cachedTasksResult[taskCommitment] = TaskResult({status: TaskStatus.SCHEDULED, result: ""}); + + emit TaskWithTransactionsInBlockDatalakeScheduled(transactionsInBlockDatalake, computationalTask); + } + + /// @notice Requests the execution of a task with a module + /// @param moduleTask module task + function requestExecutionOfModuleTask(Module calldata moduleTask) external { + bytes32 taskCommitment = moduleTask.commit(); + + // Ensure task is not already scheduled + if (cachedTasksResult[taskCommitment].status != TaskStatus.NONE) { + revert DoubleRegistration(); + } + + // Store the task result + cachedTasksResult[taskCommitment] = TaskResult({status: TaskStatus.SCHEDULED, result: ""}); + + emit ModuleTaskScheduled(moduleTask); } /// @notice Authenticates the execution of a task is finalized @@ -222,9 +214,7 @@ contract HdpExecutionStore is AccessControl { bytes32 programOutputHash = keccak256(abi.encodePacked(programOutput)); // Compute GPS fact hash - bytes32 gpsFactHash = keccak256( - abi.encode(PROGRAM_HASH, programOutputHash) - ); + bytes32 gpsFactHash = keccak256(abi.encode(PROGRAM_HASH, programOutputHash)); // Ensure GPS fact is registered if (!SHARP_FACTS_REGISTRY.isValid(gpsFactHash)) { @@ -238,63 +228,42 @@ contract HdpExecutionStore is AccessControl { bytes32[] memory resultInclusionProof = resultsInclusionProofs[i]; // Convert the low and high 128 bits to a single 256 bit value - bytes32 resultMerkleRoot = bytes32( - (resultMerkleRootHigh << 128) | resultMerkleRootLow - ); - bytes32 taskMerkleRoot = bytes32( - (taskMerkleRootHigh << 128) | taskMerkleRootLow - ); + bytes32 resultMerkleRoot = bytes32((resultMerkleRootHigh << 128) | resultMerkleRootLow); + bytes32 taskMerkleRoot = bytes32((taskMerkleRootHigh << 128) | taskMerkleRootLow); // Compute the Merkle leaf of the task bytes32 taskCommitment = taskCommitments[i]; bytes32 taskMerkleLeaf = standardLeafHash(taskCommitment); // Ensure that the task is included in the batch, by verifying the Merkle proof - bool isVerifiedTask = taskInclusionProof.verify( - taskMerkleRoot, - taskMerkleLeaf - ); + bool isVerifiedTask = taskInclusionProof.verify(taskMerkleRoot, taskMerkleLeaf); if (!isVerifiedTask) { revert NotInBatch(); } // Compute the Merkle leaf of the task result - bytes32 taskResultCommitment = keccak256( - abi.encode(taskCommitment, computationalTaskResult) - ); - bytes32 taskResultMerkleLeaf = standardLeafHash( - taskResultCommitment - ); + bytes32 taskResultCommitment = keccak256(abi.encode(taskCommitment, computationalTaskResult)); + bytes32 taskResultMerkleLeaf = standardLeafHash(taskResultCommitment); // Ensure that the task result is included in the batch, by verifying the Merkle proof - bool isVerifiedResult = resultInclusionProof.verify( - resultMerkleRoot, - taskResultMerkleLeaf - ); + bool isVerifiedResult = resultInclusionProof.verify(resultMerkleRoot, taskResultMerkleLeaf); if (!isVerifiedResult) { revert NotInBatch(); } // Store the task result - cachedTasksResult[taskCommitment] = TaskResult({ - status: TaskStatus.FINALIZED, - result: computationalTaskResult - }); + cachedTasksResult[taskCommitment] = + TaskResult({status: TaskStatus.FINALIZED, result: computationalTaskResult}); } } /// @notice Load MMR root from cache with given mmrId and mmrSize - function loadMmrRoot( - uint256 mmrId, - uint256 mmrSize - ) public view returns (bytes32) { + function loadMmrRoot(uint256 mmrId, uint256 mmrSize) public view returns (bytes32) { return cachedMMRsRoots[CHAIN_ID][mmrId][mmrSize]; } /// @notice Returns the result of a finalized task - function getFinalizedTaskResult( - bytes32 taskCommitment - ) external view returns (bytes32) { + function getFinalizedTaskResult(bytes32 taskCommitment) external view returns (bytes32) { // Ensure task is finalized if (cachedTasksResult[taskCommitment].status != TaskStatus.FINALIZED) { revert NotFinalized(); @@ -303,9 +272,7 @@ contract HdpExecutionStore is AccessControl { } /// @notice Returns the status of a task - function getTaskStatus( - bytes32 taskCommitment - ) external view returns (TaskStatus) { + function getTaskStatus(bytes32 taskCommitment) external view returns (TaskStatus) { return cachedTasksResult[taskCommitment].status; } diff --git a/src/datatypes/datalake/BlockSampledDatalakeCodecs.sol b/src/datatypes/datalake/BlockSampledDatalakeCodecs.sol index 767dd79..72b49fd 100644 --- a/src/datatypes/datalake/BlockSampledDatalakeCodecs.sol +++ b/src/datatypes/datalake/BlockSampledDatalakeCodecs.sol @@ -21,61 +21,46 @@ struct BlockSampledDatalake { library BlockSampledDatalakeCodecs { /// @dev Encodes a BlockSampledDatalake. /// @param datalake The BlockSampledDatalake to encode. - function encode( - BlockSampledDatalake memory datalake - ) internal pure returns (bytes memory) { - return - abi.encode( - DatalakeCode.BlockSampled, - datalake.chainId, - datalake.blockRangeStart, - datalake.blockRangeEnd, - datalake.increment, - datalake.sampledProperty - ); + function encode(BlockSampledDatalake memory datalake) internal pure returns (bytes memory) { + return abi.encode( + DatalakeCode.BlockSampled, + datalake.chainId, + datalake.blockRangeStart, + datalake.blockRangeEnd, + datalake.increment, + datalake.sampledProperty + ); } /// @dev Get the commitment of a BlockSampledDatalake. /// @param datalake The BlockSampledDatalake to commit. - function commit( - BlockSampledDatalake memory datalake - ) internal pure returns (bytes32) { + function commit(BlockSampledDatalake memory datalake) internal pure returns (bytes32) { return keccak256(encode(datalake)); } /// @dev Encodes a sampled property for a header property. /// @param headerPropId The header field from rlp decoded block header. - function encodeSampledPropertyForHeaderProp( - uint8 headerPropId - ) internal pure returns (bytes memory) { + function encodeSampledPropertyForHeaderProp(uint8 headerPropId) internal pure returns (bytes memory) { return abi.encodePacked(uint8(1), headerPropId); } /// @dev Encodes a sampled property for an account property. /// @param account The account address. /// @param propertyId The account field from rlp decoded account. - function encodeSampledPropertyForAccount( - address account, - uint8 propertyId - ) internal pure returns (bytes memory) { + function encodeSampledPropertyForAccount(address account, uint8 propertyId) internal pure returns (bytes memory) { return abi.encodePacked(uint8(2), account, propertyId); } /// @dev Encodes a sampled property for a storage. /// @param account The account address. /// @param slot The storage key. - function encodeSampledPropertyForStorage( - address account, - bytes32 slot - ) internal pure returns (bytes memory) { + function encodeSampledPropertyForStorage(address account, bytes32 slot) internal pure returns (bytes memory) { return abi.encodePacked(uint8(3), account, slot); } /// @dev Decodes a BlockSampledDatalake. /// @param data The encoded BlockSampledDatalake. - function decode( - bytes memory data - ) internal pure returns (BlockSampledDatalake memory) { + function decode(bytes memory data) internal pure returns (BlockSampledDatalake memory) { ( , uint256 chainId, @@ -83,17 +68,13 @@ library BlockSampledDatalakeCodecs { uint256 blockRangeEnd, uint256 increment, bytes memory sampledProperty - ) = abi.decode( - data, - (DatalakeCode, uint256, uint256, uint256, uint256, bytes) - ); - return - BlockSampledDatalake({ - chainId: chainId, - blockRangeStart: blockRangeStart, - blockRangeEnd: blockRangeEnd, - increment: increment, - sampledProperty: sampledProperty - }); + ) = abi.decode(data, (DatalakeCode, uint256, uint256, uint256, uint256, bytes)); + return BlockSampledDatalake({ + chainId: chainId, + blockRangeStart: blockRangeStart, + blockRangeEnd: blockRangeEnd, + increment: increment, + sampledProperty: sampledProperty + }); } } diff --git a/src/datatypes/datalake/ComputeCodecs.sol b/src/datatypes/datalake/ComputeCodecs.sol index 37a2448..3e102fb 100644 --- a/src/datatypes/datalake/ComputeCodecs.sol +++ b/src/datatypes/datalake/ComputeCodecs.sol @@ -43,18 +43,7 @@ library ComputationalTaskCodecs { /// @notice The commitment embeds the datalake commitment. /// @param task The ComputationalTask to commit. /// @param datalakeCommitment The commitment of the datalake. - function commit( - ComputationalTask memory task, - bytes32 datalakeCommitment - ) internal pure returns (bytes32) { - return - keccak256( - abi.encode( - datalakeCommitment, - task.aggregateFnId, - task.operatorId, - task.valueToCompare - ) - ); + function commit(ComputationalTask memory task, bytes32 datalakeCommitment) internal pure returns (bytes32) { + return keccak256(abi.encode(datalakeCommitment, task.aggregateFnId, task.operatorId, task.valueToCompare)); } } diff --git a/src/datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol b/src/datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol index 9e51269..d875580 100644 --- a/src/datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol +++ b/src/datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol @@ -25,51 +25,40 @@ struct TransactionsInBlockDatalake { library TransactionsInBlockDatalakeCodecs { /// @dev Encodes a TransactionsInBlockDatalake. /// @param datalake The TransactionsInBlockDatalake to encode. - function encode( - TransactionsInBlockDatalake memory datalake - ) internal pure returns (bytes memory) { - return - abi.encode( - DatalakeCode.TransactionsInBlock, - datalake.chainId, - datalake.targetBlock, - datalake.startIndex, - datalake.endIndex, - datalake.increment, - datalake.includedTypes, - datalake.sampledProperty - ); + function encode(TransactionsInBlockDatalake memory datalake) internal pure returns (bytes memory) { + return abi.encode( + DatalakeCode.TransactionsInBlock, + datalake.chainId, + datalake.targetBlock, + datalake.startIndex, + datalake.endIndex, + datalake.increment, + datalake.includedTypes, + datalake.sampledProperty + ); } /// @dev Get the commitment of a TransactionsInBlockDatalake. /// @param datalake The TransactionsInBlockDatalake to commit. - function commit( - TransactionsInBlockDatalake memory datalake - ) internal pure returns (bytes32) { + function commit(TransactionsInBlockDatalake memory datalake) internal pure returns (bytes32) { return keccak256(encode(datalake)); } /// @dev Encodes a sampled property for a transaction property. /// @param txPropId The field from rlp decoded block tx. - function encodeSampledPropertyForTxProp( - uint8 txPropId - ) internal pure returns (bytes memory) { + function encodeSampledPropertyForTxProp(uint8 txPropId) internal pure returns (bytes memory) { return abi.encodePacked(uint8(1), txPropId); } /// @dev Encodes a sampled property for an transaction receipt property. /// @param txReceiptPropId The field from rlp decoded block transaction receipt. - function encodeSampledPropertyFortxReceipt( - uint8 txReceiptPropId - ) internal pure returns (bytes memory) { + function encodeSampledPropertyFortxReceipt(uint8 txReceiptPropId) internal pure returns (bytes memory) { return abi.encodePacked(uint8(2), txReceiptPropId); } /// @dev Decodes a TransactionsInBlockDatalake. /// @param data The encoded TransactionsInBlockDatalake. - function decode( - bytes memory data - ) internal pure returns (TransactionsInBlockDatalake memory) { + function decode(bytes memory data) internal pure returns (TransactionsInBlockDatalake memory) { ( , uint256 chainId, @@ -79,28 +68,15 @@ library TransactionsInBlockDatalakeCodecs { uint256 increment, uint256 includedTypes, bytes memory sampledProperty - ) = abi.decode( - data, - ( - DatalakeCode, - uint256, - uint256, - uint256, - uint256, - uint256, - uint256, - bytes - ) - ); - return - TransactionsInBlockDatalake({ - chainId: chainId, - targetBlock: targetBlock, - startIndex: startIndex, - endIndex: endIndex, - increment: increment, - includedTypes: includedTypes, - sampledProperty: sampledProperty - }); + ) = abi.decode(data, (DatalakeCode, uint256, uint256, uint256, uint256, uint256, uint256, bytes)); + return TransactionsInBlockDatalake({ + chainId: chainId, + targetBlock: targetBlock, + startIndex: startIndex, + endIndex: endIndex, + increment: increment, + includedTypes: includedTypes, + sampledProperty: sampledProperty + }); } } diff --git a/src/interfaces/IAggregatorsFactory.sol b/src/interfaces/IAggregatorsFactory.sol index e6253cb..dd5afa9 100644 --- a/src/interfaces/IAggregatorsFactory.sol +++ b/src/interfaces/IAggregatorsFactory.sol @@ -5,12 +5,7 @@ import {ISharpFactsAggregator} from "./ISharpFactsAggregator.sol"; /// @notice Aggregators factory interface. interface IAggregatorsFactory { - function createAggregator( - uint256 id, - ISharpFactsAggregator aggregator - ) external; + function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external; - function aggregatorsById( - uint256 id - ) external view returns (ISharpFactsAggregator); + function aggregatorsById(uint256 id) external view returns (ISharpFactsAggregator); } diff --git a/src/lib/Uint256Splitter.sol b/src/lib/Uint256Splitter.sol index c2e4cfb..fe68ace 100644 --- a/src/lib/Uint256Splitter.sol +++ b/src/lib/Uint256Splitter.sol @@ -12,19 +12,14 @@ library Uint256Splitter { /// @notice Splits a uint256 into two uint128s (low, high) represented as uint256s. /// @param a The uint256 to split. - function split128( - uint256 a - ) internal pure returns (uint256 lower, uint256 upper) { + function split128(uint256 a) internal pure returns (uint256 lower, uint256 upper) { return (a & MASK, a >> 128); } /// @notice Merges two uint128s (low, high) into one uint256. /// @param lower The lower uint256. The caller is required to pass a value that is less than 2^128 - 1. /// @param upper The upper uint256. - function merge128( - uint256 lower, - uint256 upper - ) internal pure returns (uint256 a) { + function merge128(uint256 lower, uint256 upper) internal pure returns (uint256 a) { if (lower > MASK) { revert InvalidType(); } diff --git a/test/BlockSampledHdpExecutionStore.t.sol b/test/BlockSampledHdpExecutionStore.t.sol index a7aa8f7..d643fe5 100644 --- a/test/BlockSampledHdpExecutionStore.t.sol +++ b/test/BlockSampledHdpExecutionStore.t.sol @@ -3,7 +3,9 @@ pragma solidity ^0.8.4; import {Test} from "forge-std/Test.sol"; import {HdpExecutionStore} from "../src/HdpExecutionStore.sol"; -import {BlockSampledDatalake, BlockSampledDatalakeCodecs} from "../src/datatypes/datalake/BlockSampledDatalakeCodecs.sol"; +import { + BlockSampledDatalake, BlockSampledDatalakeCodecs +} from "../src/datatypes/datalake/BlockSampledDatalakeCodecs.sol"; import {ComputationalTask, ComputationalTaskCodecs} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {AggregateFn, Operator} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol"; @@ -22,10 +24,7 @@ contract MockFactsRegistry is IFactsRegistry { contract MockAggregatorsFactory is IAggregatorsFactory { mapping(uint256 => ISharpFactsAggregator) public aggregatorsById; - function createAggregator( - uint256 id, - ISharpFactsAggregator aggregator - ) external { + function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external { aggregatorsById[id] = aggregator; } } @@ -40,13 +39,12 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator { } function aggregatorState() external view returns (AggregatorState memory) { - return - AggregatorState({ - poseidonMmrRoot: usedMmrRoot, - keccakMmrRoot: bytes32(0), - mmrSize: usedMmrSize, - continuableParentHash: bytes32(0) - }); + return AggregatorState({ + poseidonMmrRoot: usedMmrRoot, + keccakMmrRoot: bytes32(0), + mmrSize: usedMmrSize, + continuableParentHash: bytes32(0) + }); } } @@ -75,22 +73,19 @@ contract HdpExecutionStoreTest is Test { // !! If want to fetch different input, modify helpers/target/bs_cached_input.json && helpers/target/bs_cached_output.json // !! And construct corresponding BlockSampledDatalake and ComputationalTask here - BlockSampledDatalake datalake = - BlockSampledDatalake({ - chainId: 11155111, - blockRangeStart: 5858987, - blockRangeEnd: 5858997, - increment: 2, - sampledProperty: BlockSampledDatalakeCodecs - .encodeSampledPropertyForHeaderProp(uint8(18)) - }); - - ComputationalTask computationalTask = - ComputationalTask({ - aggregateFnId: AggregateFn.SLR, - operatorId: Operator.NONE, - valueToCompare: uint256(10000000) - }); + BlockSampledDatalake datalake = BlockSampledDatalake({ + chainId: 11155111, + blockRangeStart: 5858987, + blockRangeEnd: 5858997, + increment: 2, + sampledProperty: BlockSampledDatalakeCodecs.encodeSampledPropertyForHeaderProp(uint8(18)) + }); + + ComputationalTask computationalTask = ComputationalTask({ + aggregateFnId: AggregateFn.SLR, + operatorId: Operator.NONE, + valueToCompare: uint256(10000000) + }); function setUp() public { vm.chainId(11155111); @@ -101,11 +96,7 @@ contract HdpExecutionStoreTest is Test { // Get program hash from compiled Cairo program programHash = _getProgramHash(); - hdp = new HdpExecutionStore( - factsRegistry, - aggregatorsFactory, - programHash - ); + hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash); // Parse from input file ( @@ -121,34 +112,24 @@ contract HdpExecutionStoreTest is Test { ) = _fetchCairoInput(); bytes32 computedDatalakeCommitment = datalake.commit(); - bytes32 computedTaskCommitment = computationalTask.commit( - computedDatalakeCommitment - ); + bytes32 computedTaskCommitment = computationalTask.commit(computedDatalakeCommitment); assertEq(fetchedTasksCommitments[0], computedTaskCommitment); // Mock SHARP facts aggregator - sharpFactsAggregator = new MockSharpFactsAggregator( - fetchedMmrRoots[0], - fetchedMmrSizes[0] - ); + sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]); // Create mock SHARP facts aggregator - aggregatorsFactory.createAggregator( - fetchedMmrIds[0], - sharpFactsAggregator - ); + aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator); assertTrue(hdp.hasRole(keccak256("OPERATOR_ROLE"), address(this))); hdp.grantRole(keccak256("OPERATOR_ROLE"), proverAddress); } function testHdpExecutionFlow() public { - (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128( - uint256(bytes32(fetchedTasksMerkleRoot)) - ); + (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot))); - (uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter - .split128(uint256(bytes32(fetchedResultsMerkleRoot))); + (uint256 resultRootLow, uint256 resultRootHigh) = + Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot))); // Cache MMR root for (uint256 i = 0; i < fetchedMmrIds.length; i++) { @@ -179,18 +160,11 @@ contract HdpExecutionStoreTest is Test { ); // Check if the task state is FINALIZED - HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus( - fetchedTasksCommitments[0] - ); - assertEq( - uint256(taskStatusAfter), - uint256(HdpExecutionStore.TaskStatus.FINALIZED) - ); + HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]); + assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED)); // Check if the task result is stored - bytes32 taskResult = hdp.getFinalizedTaskResult( - fetchedTasksCommitments[0] - ); + bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]); assertEq(taskResult, fetchedResults[0]); } @@ -205,9 +179,7 @@ contract HdpExecutionStoreTest is Test { return abi.decode(abiEncoded, (bytes32)); } - function bytesToString( - bytes memory _data - ) public pure returns (string memory) { + function bytesToString(bytes memory _data) public pure returns (string memory) { bytes memory buffer = new bytes(_data.length); for (uint256 i = 0; i < _data.length; i++) { bytes1 b = _data[i]; @@ -262,17 +234,7 @@ contract HdpExecutionStoreTest is Test { taskResults ) = abi.decode( abiEncoded, - ( - uint256[], - uint256[], - bytes32[], - bytes32, - bytes32, - bytes32[][], - bytes32[][], - bytes32[], - bytes32[] - ) + (uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[]) ); } } diff --git a/test/ModuleHdpExecutionStore.t.sol b/test/ModuleHdpExecutionStore.t.sol index deae424..2a7c3d2 100644 --- a/test/ModuleHdpExecutionStore.t.sol +++ b/test/ModuleHdpExecutionStore.t.sol @@ -20,10 +20,7 @@ contract MockFactsRegistry is IFactsRegistry { contract MockAggregatorsFactory is IAggregatorsFactory { mapping(uint256 => ISharpFactsAggregator) public aggregatorsById; - function createAggregator( - uint256 id, - ISharpFactsAggregator aggregator - ) external { + function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external { aggregatorsById[id] = aggregator; } } @@ -38,13 +35,12 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator { } function aggregatorState() external view returns (AggregatorState memory) { - return - AggregatorState({ - poseidonMmrRoot: usedMmrRoot, - keccakMmrRoot: bytes32(0), - mmrSize: usedMmrSize, - continuableParentHash: bytes32(0) - }); + return AggregatorState({ + poseidonMmrRoot: usedMmrRoot, + keccakMmrRoot: bytes32(0), + mmrSize: usedMmrSize, + continuableParentHash: bytes32(0) + }); } } @@ -77,33 +73,14 @@ contract HdpExecutionStoreTest is Test { // !! And construct corresponding BlockSampledDatalake and ComputationalTask here bytes32[] memory moduleInputs = new bytes32[](3); moduleInputs[0] = bytes32(uint256(5186021)); - assertEq( - moduleInputs[0], - bytes32( - 0x00000000000000000000000000000000000000000000000000000000004f21e5 - ) - ); + assertEq(moduleInputs[0], bytes32(0x00000000000000000000000000000000000000000000000000000000004f21e5)); moduleInputs[1] = bytes32(uint256(5186024)); - assertEq( - moduleInputs[1], - bytes32( - 0x00000000000000000000000000000000000000000000000000000000004f21e8 - ) - ); - moduleInputs[2] = bytes32( - uint256(113007187165825507614120510246167695609561346261) - ); - assertEq( - moduleInputs[2], - bytes32( - 0x00000000000000000000000013cb6ae34a13a0977f4d7101ebc24b87bb23f0d5 - ) - ); + assertEq(moduleInputs[1], bytes32(0x00000000000000000000000000000000000000000000000000000000004f21e8)); + moduleInputs[2] = bytes32(uint256(113007187165825507614120510246167695609561346261)); + assertEq(moduleInputs[2], bytes32(0x00000000000000000000000013cb6ae34a13a0977f4d7101ebc24b87bb23f0d5)); Module memory moduleTask = Module({ - programHash: bytes32( - 0x00af1333b8346c1ac941efe380f3122a71c1f7cbad19301543712e74f765bfca - ), + programHash: bytes32(0x00af1333b8346c1ac941efe380f3122a71c1f7cbad19301543712e74f765bfca), inputs: moduleInputs }); @@ -122,11 +99,7 @@ contract HdpExecutionStoreTest is Test { // Get program hash from compiled Cairo program programHash = _getProgramHash(); - hdp = new HdpExecutionStore( - factsRegistry, - aggregatorsFactory, - programHash - ); + hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash); // Parse from input file ( @@ -146,27 +119,19 @@ contract HdpExecutionStoreTest is Test { assertEq(fetchedTasksCommitments[0], moduleTaskCommitment); // Mock SHARP facts aggregator - sharpFactsAggregator = new MockSharpFactsAggregator( - fetchedMmrRoots[0], - fetchedMmrSizes[0] - ); + sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]); // Create mock SHARP facts aggregator - aggregatorsFactory.createAggregator( - fetchedMmrIds[0], - sharpFactsAggregator - ); + aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator); assertTrue(hdp.hasRole(keccak256("OPERATOR_ROLE"), address(this))); hdp.grantRole(keccak256("OPERATOR_ROLE"), proverAddress); } function testHdpExecutionFlow() public { - (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128( - uint256(bytes32(fetchedTasksMerkleRoot)) - ); + (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot))); - (uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter - .split128(uint256(bytes32(fetchedResultsMerkleRoot))); + (uint256 resultRootLow, uint256 resultRootHigh) = + Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot))); // Cache MMR root for (uint256 i = 0; i < fetchedMmrIds.length; i++) { @@ -197,18 +162,11 @@ contract HdpExecutionStoreTest is Test { ); // Check if the task state is FINALIZED - HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus( - fetchedTasksCommitments[0] - ); - assertEq( - uint256(taskStatusAfter), - uint256(HdpExecutionStore.TaskStatus.FINALIZED) - ); + HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]); + assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED)); // Check if the task result is stored - bytes32 taskResult = hdp.getFinalizedTaskResult( - fetchedTasksCommitments[0] - ); + bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]); assertEq(taskResult, fetchedResults[0]); } @@ -223,10 +181,7 @@ contract HdpExecutionStoreTest is Test { return abi.decode(abiEncoded, (bytes32)); } - function _callPreprocessCli( - bytes memory encodedTask, - bytes memory encodedDatalake - ) internal { + function _callPreprocessCli(bytes memory encodedTask, bytes memory encodedDatalake) internal { string[] memory inputs = new string[](4); inputs[0] = "node"; inputs[1] = "./helpers/fetch_cairo_input.js"; @@ -235,9 +190,7 @@ contract HdpExecutionStoreTest is Test { vm.ffi(inputs); } - function bytesToString( - bytes memory _data - ) public pure returns (string memory) { + function bytesToString(bytes memory _data) public pure returns (string memory) { bytes memory buffer = new bytes(_data.length); for (uint256 i = 0; i < _data.length; i++) { bytes1 b = _data[i]; @@ -292,17 +245,7 @@ contract HdpExecutionStoreTest is Test { taskResults ) = abi.decode( abiEncoded, - ( - uint256[], - uint256[], - bytes32[], - bytes32, - bytes32, - bytes32[][], - bytes32[][], - bytes32[], - bytes32[] - ) + (uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[]) ); } } diff --git a/test/TransactionsInBlockHdpExecutionStore.t.sol b/test/TransactionsInBlockHdpExecutionStore.t.sol index 3d5312e..42ab54a 100644 --- a/test/TransactionsInBlockHdpExecutionStore.t.sol +++ b/test/TransactionsInBlockHdpExecutionStore.t.sol @@ -3,7 +3,10 @@ pragma solidity ^0.8.4; import {Test} from "forge-std/Test.sol"; import {HdpExecutionStore} from "../src/HdpExecutionStore.sol"; -import {TransactionsInBlockDatalake, TransactionsInBlockDatalakeCodecs} from "../src/datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol"; +import { + TransactionsInBlockDatalake, + TransactionsInBlockDatalakeCodecs +} from "../src/datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol"; import {ComputationalTask, ComputationalTaskCodecs} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {AggregateFn, Operator} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol"; @@ -22,10 +25,7 @@ contract MockFactsRegistry is IFactsRegistry { contract MockAggregatorsFactory is IAggregatorsFactory { mapping(uint256 => ISharpFactsAggregator) public aggregatorsById; - function createAggregator( - uint256 id, - ISharpFactsAggregator aggregator - ) external { + function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external { aggregatorsById[id] = aggregator; } } @@ -40,13 +40,12 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator { } function aggregatorState() external view returns (AggregatorState memory) { - return - AggregatorState({ - poseidonMmrRoot: usedMmrRoot, - keccakMmrRoot: bytes32(0), - mmrSize: usedMmrSize, - continuableParentHash: bytes32(0) - }); + return AggregatorState({ + poseidonMmrRoot: usedMmrRoot, + keccakMmrRoot: bytes32(0), + mmrSize: usedMmrSize, + continuableParentHash: bytes32(0) + }); } } @@ -75,24 +74,18 @@ contract HdpExecutionStoreTest is Test { // !! If want to fetch different input, modify helpers/target/tx_cached_input.json && helpers/target/tx_cached_output.json // !! And construct corresponding TransactionsInBlockDatalake and ComputationalTask here - TransactionsInBlockDatalake datalake = - TransactionsInBlockDatalake({ - chainId: 11155111, - targetBlock: uint256(5605816), - startIndex: uint256(12), - endIndex: uint256(53), - increment: uint256(1), - includedTypes: uint256(0x00000101), - sampledProperty: TransactionsInBlockDatalakeCodecs - .encodeSampledPropertyFortxReceipt(uint8(0)) - }); + TransactionsInBlockDatalake datalake = TransactionsInBlockDatalake({ + chainId: 11155111, + targetBlock: uint256(5605816), + startIndex: uint256(12), + endIndex: uint256(53), + increment: uint256(1), + includedTypes: uint256(0x00000101), + sampledProperty: TransactionsInBlockDatalakeCodecs.encodeSampledPropertyFortxReceipt(uint8(0)) + }); ComputationalTask computationalTask = - ComputationalTask({ - aggregateFnId: AggregateFn.SLR, - operatorId: Operator.NONE, - valueToCompare: uint256(50) - }); + ComputationalTask({aggregateFnId: AggregateFn.SLR, operatorId: Operator.NONE, valueToCompare: uint256(50)}); function setUp() public { vm.chainId(11155111); @@ -103,11 +96,7 @@ contract HdpExecutionStoreTest is Test { // Get program hash from compiled Cairo program programHash = _getProgramHash(); - hdp = new HdpExecutionStore( - factsRegistry, - aggregatorsFactory, - programHash - ); + hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash); // Parse from input file ( @@ -123,34 +112,24 @@ contract HdpExecutionStoreTest is Test { ) = _fetchCairoInput(); bytes32 computedDatalakeCommitment = datalake.commit(); - bytes32 computedTaskCommitment = computationalTask.commit( - computedDatalakeCommitment - ); + bytes32 computedTaskCommitment = computationalTask.commit(computedDatalakeCommitment); assertEq(fetchedTasksCommitments[0], computedTaskCommitment); // Mock SHARP facts aggregator - sharpFactsAggregator = new MockSharpFactsAggregator( - fetchedMmrRoots[0], - fetchedMmrSizes[0] - ); + sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]); // Create mock SHARP facts aggregator - aggregatorsFactory.createAggregator( - fetchedMmrIds[0], - sharpFactsAggregator - ); + aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator); assertTrue(hdp.hasRole(keccak256("OPERATOR_ROLE"), address(this))); hdp.grantRole(keccak256("OPERATOR_ROLE"), proverAddress); } function testHdpExecutionFlow() public { - (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128( - uint256(bytes32(fetchedTasksMerkleRoot)) - ); + (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot))); - (uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter - .split128(uint256(bytes32(fetchedResultsMerkleRoot))); + (uint256 resultRootLow, uint256 resultRootHigh) = + Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot))); // Cache MMR roots for (uint256 i = 0; i < fetchedMmrIds.length; i++) { @@ -182,18 +161,11 @@ contract HdpExecutionStoreTest is Test { ); // Check if the task state is FINALIZED - HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus( - fetchedTasksCommitments[0] - ); - assertEq( - uint256(taskStatusAfter), - uint256(HdpExecutionStore.TaskStatus.FINALIZED) - ); + HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]); + assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED)); // Check if the task result is stored - bytes32 taskResult = hdp.getFinalizedTaskResult( - fetchedTasksCommitments[0] - ); + bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]); assertEq(taskResult, fetchedResults[0]); } @@ -208,9 +180,7 @@ contract HdpExecutionStoreTest is Test { return abi.decode(abiEncoded, (bytes32)); } - function bytesToString( - bytes memory _data - ) public pure returns (string memory) { + function bytesToString(bytes memory _data) public pure returns (string memory) { bytes memory buffer = new bytes(_data.length); for (uint256 i = 0; i < _data.length; i++) { bytes1 b = _data[i]; @@ -265,17 +235,7 @@ contract HdpExecutionStoreTest is Test { taskResults ) = abi.decode( abiEncoded, - ( - uint256[], - uint256[], - bytes32[], - bytes32, - bytes32, - bytes32[][], - bytes32[][], - bytes32[], - bytes32[] - ) + (uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[]) ); } }