From 46d7d3be781fe63fffaaec8172ce927dcf2ec3cc Mon Sep 17 00:00:00 2001 From: vincfurc <10850139+vincfurc@users.noreply.github.com> Date: Tue, 5 Nov 2024 12:34:11 +0000 Subject: [PATCH] Add DA references (#5652) --- .../other/da-beat/blockchain/avail/avail.ts | 14 ++++++++++ .../blockchain/avail/bridges/vector.ts | 12 +++++++- .../bridges/blobstream/SP1Blobstream.ts | 12 +++++++- .../da-beat/blockchain/celestia/celestia.ts | 23 +++++++++++++++ .../other/da-beat/blockchain/memo/memo.ts | 20 ++++++++++++- .../other/da-beat/blockchain/near/near.ts | 28 +++++++++++++++++-- .../da-beat/dac/bridges/eigenDABridge.ts | 12 +++++++- .../src/projects/other/da-beat/dac/eigenDA.ts | 18 ++++++++++-- .../projects/other/da-beat/dac/fraxtalDA.ts | 10 +++++++ .../projects/other/da-beat/dac/redstoneDA.ts | 12 +++++++- .../projects/other/da-beat/dac/xterioDA.ts | 10 +++++++ .../da-beat/templates/anytrust-template.ts | 6 ++++ .../da-beat/templates/polygoncdk-template.ts | 6 ++++ .../da-beat/templates/starkex-template.ts | 6 ++++ 14 files changed, 179 insertions(+), 10 deletions(-) diff --git a/packages/config/src/projects/other/da-beat/blockchain/avail/avail.ts b/packages/config/src/projects/other/da-beat/blockchain/avail/avail.ts index 2f8911e9038..3889d08dd98 100644 --- a/packages/config/src/projects/other/da-beat/blockchain/avail/avail.ts +++ b/packages/config/src/projects/other/da-beat/blockchain/avail/avail.ts @@ -75,6 +75,20 @@ export const avail: DaLayer = { App-specific data can be reconstructed by app clients, which request and assemble missing cells from the network to complete the data reconstruction process. `, + references: [ + { + text: 'Avail Documentation', + href: 'https://docs.availproject.org/docs/learn-about-avail/consensus/npos', + }, + { + text: 'Avail Light Client - Source Code', + href: 'https://github.com/availproject/avail-light/blob/main/core/src/light_client.rs', + }, + { + text: 'Avail App Client - Source Code', + href: 'https://github.com/availproject/avail-light/blob/a9e1741a6c7579d6ab1988eb409808b33f999180/core/src/app_client.rs', + }, + ], risks: [ { category: 'Funds can be lost if', diff --git a/packages/config/src/projects/other/da-beat/blockchain/avail/bridges/vector.ts b/packages/config/src/projects/other/da-beat/blockchain/avail/bridges/vector.ts index 3b262001809..28912d83068 100644 --- a/packages/config/src/projects/other/da-beat/blockchain/avail/bridges/vector.ts +++ b/packages/config/src/projects/other/da-beat/blockchain/avail/bridges/vector.ts @@ -84,10 +84,20 @@ export const vector = { The SP1 Vector bridge is composed of three main components: the **Vector** contract, the **Succinct Gateway** contracts, and the **Verifier** contracts.
By default, Vector operates asynchronously, handling requests in a fulfillment-based manner. First, zero-knowledge proofs of Avail block ranges are requested for proving. Requests can be submitted either off-chain through the Succinct API, or onchain through the requestCall() method of the Succinct Gateway smart contract. Alternatively, it is possible to run an SP1 Vector operator with local proving, allowing for self-generating the proofs. - Once a proving request is received, the off-chain prover generates the proof and submits it to the Vector contract. The Vector contract verifies the proof with the corresponding verifier contract and, if successful, stores the data commitment in storage.
+ Once a proving request is received, the off-chain prover generates the proof and relays it to the Vector contract. The Vector contract verifies the proof with the corresponding verifier contract and, if successful, stores the data commitment in storage.
By default, Vector on ${chainName} is updated by the Succinct operator at a cadence of approximately ${updateInterval} hours. `, + references: [ + { + text: 'SP1 Vector Operator', + href: 'https://github.com/succinctlabs/sp1-vector/blob/a9689768ff4052e0933cc575b79001d4bcfa0cd5/script/bin/operator.rs', + }, + { + text: 'Succinct Gateway - Etherscan', + href: 'https://etherscan.io/address/0x6c7a05e0AE641c6559fD76ac56641778B6eCd776#code#F1#L148', + }, + ], risks: [ { category: 'Funds can be lost if', diff --git a/packages/config/src/projects/other/da-beat/blockchain/celestia/bridges/blobstream/SP1Blobstream.ts b/packages/config/src/projects/other/da-beat/blockchain/celestia/bridges/blobstream/SP1Blobstream.ts index c8e759b0235..c2397ef5a94 100644 --- a/packages/config/src/projects/other/da-beat/blockchain/celestia/bridges/blobstream/SP1Blobstream.ts +++ b/packages/config/src/projects/other/da-beat/blockchain/celestia/bridges/blobstream/SP1Blobstream.ts @@ -87,12 +87,22 @@ export const SP1Blobstream = CELESTIA_BLOBSTREAM({ The Blobstream bridge is composed of three main components: the **Blobstream** contract, the **Succinct Gateway** contracts, and the **Verifier** contracts.
By default, Blobstream operates asynchronously, handling requests in a fulfillment-based manner. First, zero-knowledge proofs of Celestia block ranges are requested for proving. Requests can be submitted either off-chain through the Succinct API, or onchain through the requestCall() method of the Succinct Gateway smart contract. Alternatively, it is possible to run an SP1 Blobstream operator with local proving, allowing for self-generating the proofs. - Once a proving request is received, the off-chain prover generates the proof and submits it to Blobstream contract. The Blobstream contract verifies the proof with the corresponding verifier contract and, if successful, stores the data commitment in storage.
+ Once a proving request is received, the off-chain prover generates the proof and relays it to Blobstream contract. The Blobstream contract verifies the proof with the corresponding verifier contract and, if successful, stores the data commitment in storage.
Verifying a header range includes verifying tendermint consensus (header signatures are 2/3 of stake) and verifying the data commitment root. By default, Blobstream on Ethereum is updated by the Succinct operator at a regular cadence of ${ethereumUpdateInterval} hour. For Blobstream on Arbitrum, the update interval is ${arbitrumUpdateInterval} hour, and for Blobstream on Base, the update interval is ${baseUpdateInterval} hour. `, + references: [ + { + text: 'SP1 Blobstream Operator', + href: 'https://github.com/succinctlabs/sp1-blobstream/blob/b35c92bfcfc9a1711ea014cc871d6dd610dd5392/script/bin/operator.rs', + }, + { + text: 'Succinct Gateway - Etherscan', + href: 'https://etherscan.io/address/0x6c7a05e0AE641c6559fD76ac56641778B6eCd776#code#F1#L148', + }, + ], risks: [ { category: 'Funds can be lost if', diff --git a/packages/config/src/projects/other/da-beat/blockchain/celestia/celestia.ts b/packages/config/src/projects/other/da-beat/blockchain/celestia/celestia.ts index 438fb066490..722deaa7ce6 100644 --- a/packages/config/src/projects/other/da-beat/blockchain/celestia/celestia.ts +++ b/packages/config/src/projects/other/da-beat/blockchain/celestia/celestia.ts @@ -64,6 +64,7 @@ export const celestia: DaLayer = { To ensure data availability, Celestia light nodes perform sampling on the 2k x 2k data matrix. Each light node randomly selects a set of unique coordinates within the extended matrix and requests the corresponding data shares and Merkle proofs from full nodes. Currently, a Celestia light node must perform a minimum of 16 samples before declaring that a block is available. This sampling rate ensures that given the minimum number of unavailable shares, a light client will sample at least one unavailable share with a 99% probability.\n + For more details on DAS probabilistic analysis, see the Fraud and Data Availability Proofs paper.\n ![DAS](/images/da-layer-technology/celestia/das.png#center) @@ -76,6 +77,28 @@ export const celestia: DaLayer = { Applications can then retrieve the data by querying the Celestia blockchain for the data root of the blob and the namespace of the application. The data can be reconstructed by querying the Celestia network for the shares of the data matrix and reconstructing the data using the erasure coding scheme. `, + references: [ + { + text: 'Celestia Specifications', + href: 'https://celestiaorg.github.io/celestia-app/specs/index.html', + }, + { + text: 'Celestia Core - CometBFT', + href: 'https://github.com/celestiaorg/celestia-core', + }, + { + text: 'Celestia Node - Data Retrieval', + href: 'https://github.com/celestiaorg/celestia-node/blob/9ff58570ef86e505b718abfc755fd18643a2284c/share/eds/retriever.go#L60', + }, + { + text: 'Bad Encoding Fraud Proofs', + href: 'https://github.com/celestiaorg/celestia-node/blob/main/docs/adr/adr-006-fraud-service.md', + }, + { + text: 'Fraud and Data Availability Proofs paper', + href: 'https://arxiv.org/pdf/1809.09044', + }, + ], risks: [ { category: 'Funds can be lost if', diff --git a/packages/config/src/projects/other/da-beat/blockchain/memo/memo.ts b/packages/config/src/projects/other/da-beat/blockchain/memo/memo.ts index 73d339f57f1..e60654f908b 100644 --- a/packages/config/src/projects/other/da-beat/blockchain/memo/memo.ts +++ b/packages/config/src/projects/other/da-beat/blockchain/memo/memo.ts @@ -53,12 +53,30 @@ export const memo: DaLayer = { If no light node challenges the commitment within the specified validity period, the proof of availability is considered successful.
Submitting a fraud proof is expected to require multiple rounds of interaction, with pledges and rewards for both the challenger and the challenged. - The proving smart contracts ([FileProof](https://scan.metamemo.one:8080/address/0x58C3Ab98546879a859EDBa3252A9d38E43C9cbee/), [ControlFileProof](https://scan.metamemo.one:8080/address/0x6eEc7578dBAD9dcc1CA159A9Df0A73233548b89a/), [ProxyFileProof](https://scan.metamemo.one:8080/address/0x0c7B5A9Ce5e33B4fa1BcFaF9e8722B1c1c23243B/)) are deployed to the Memo chain and their code is not public, so the logic and security of the proving contracts is not verifiable. + The proving smart contracts (FileProof, ControlFileProof, ProxyFileProof) are deployed to the Memo chain and their code is not public, so the logic and security of the proving contracts is not verifiable. ## L2s Data Availability L2s can upload transaction data to Meeda through the MemoDA RPC, and the Meeda operator will generate an aggregated KZG polynomial commitment based on the transaction data. Nodes can request transaction data on Meeda based on the commitment value of the transaction data. `, + references: [ + { + text: 'Meeda Documentation - Architecture', + href: 'https://memolabs.gitbook.io/meeda/readme/overview-of-meeda/whats-meeda', + }, + { + text: 'Meeda FileProof contract - Metamemo Scan', + href: 'https://scan.metamemo.one:8080/address/0x58C3Ab98546879a859EDBa3252A9d38E43C9cbee/', + }, + { + text: 'Meeda ControlFileProof contract - Metamemo Scan', + href: 'https://scan.metamemo.one:8080/address/0x6eEc7578dBAD9dcc1CA159A9Df0A73233548b89a/', + }, + { + text: 'Meeda ProxyFileProof contract - Metamemo Scan', + href: 'https://scan.metamemo.one:8080/address/0x0c7B5A9Ce5e33B4fa1BcFaF9e8722B1c1c23243B/', + }, + ], risks: [ { category: 'Funds can be lost if', diff --git a/packages/config/src/projects/other/da-beat/blockchain/near/near.ts b/packages/config/src/projects/other/da-beat/blockchain/near/near.ts index 063556708b8..78ae3da2666 100644 --- a/packages/config/src/projects/other/da-beat/blockchain/near/near.ts +++ b/packages/config/src/projects/other/da-beat/blockchain/near/near.ts @@ -67,9 +67,9 @@ export const near: DaLayer = { ![Near Chunks](/images/da-layer-technology/near/nearChunks.png#center)\n - Becoming a block producer requires locking (staking) a certain amount of tokens, currently 25,500 NEAR. Staking operates through a threshold PoS mechanism, where a user’s stake must exceed the protocol's seat price—determined by the total NEAR tokens staked by other validators—in order to become a validator. The largest stakers at the beginning of a particular epoch are selected as block producers for that epoch. Each block producer is randomly assigned a certain number of shards to manage. + Becoming a block producer requires locking (staking) a certain amount of tokens, currently around 11,000 NEAR. Staking operates through a threshold PoS mechanism, where a user’s stake must exceed the protocol's seat price—determined by the total NEAR tokens staked by other validators—in order to become a validator. The largest stakers at the beginning of a particular epoch are selected as block producers for that epoch. Each block producer is randomly assigned a certain number of shards to manage. Before the epoch starts, the block producer downloads the state of the shard(s) they are assigned to (they have 1 epoch to complete this download). Throughout the epoch, they collect transactions that affect their assigned shard(s) and apply them to the state. NEAR nodes have an automatic 'garbage collection' routine that deletes the state of previous shards after five epochs, freeing up unused storage. - Within an epoch (12 hours), the main chain block and shard block production schedule is determined by a randomness seed generated at the beginning of the epoch. For each block height, a main chain block producer is assigned. + Within an epoch (12 hours), the main chain block and shard block production schedule is determined by a randomness seed generated at the beginning of the epoch. For each block height, a main chain block producer is assigned. Validators participate in several validation rounds within the epoch. For each round, one validator in each shard is chosen to be the chunk producer, and one validator from the entire set is chosen to be the block producer. Validators can serve as both block and chunk producers, but they maintain separate stakes for these roles. The shard block producer is responsible for producing the part of the block related to their shard, known as a chunk. The chunk contains the list of transactions for the shard to be included in the block, as well as the Merkle root of the resulting state. Each main chain block contains either one or zero chunks per shard, depending on whether the shard can keep up with the main chain block production speed. @@ -96,12 +96,34 @@ export const near: DaLayer = { ### Finality - Finality is determined by the Nightshade Finality Gadget (NFG). A block is considered final after two consecutive blocks are built on the same fork, making the block that is two blocks behind (t-2) final. Reverting a finalized block would require slashing at least one-third of the total stake. + Finality is determined by a modified Doomslug finality gadget. A block is considered final after two consecutive blocks are built on the same fork, making the block that is two blocks behind (t-2) final. Reverting a finalized block would require slashing at least one-third of the total stake. ## L2s Data Availability A rollup can utilize a dedicated Data Availability (DA) smart contract on a NEAR shard, known as a Blob Store contract, where it posts data as standard NEAR transactions. All transactions are converted into Receipts, and depending on their actions, some receipts may be processed over two blocks. Regarding data retrieval, full nodes prune Receipts after 3 epochs (approximately 36 hours). Once the pruning window expires, the data remains accessible only through archive nodes. `, + references: [ + { + text: 'Near Nightshade Consensus', + href: 'https://pages.near.org/downloads/Nightshade.pdf', + }, + { + text: 'Near Doomslug Finality Gadget', + href: 'https://discovery-domain.org/papers/doomslug.pdf', + }, + { + text: 'Near documentation', + href: 'https://dev.near.org/documentation/', + }, + { + text: 'Near Core - Architecture', + href: 'https://near.github.io/nearcore/', + }, + { + text: 'Blob Store contract - Nuffle Labs', + href: 'https://github.com/Nuffle-Labs/data-availability/blob/5026b81aa5d941aaf4dd1b23bc219b9150e84405/contracts/blob-store/src/lib.rs', + }, + ], risks: [ { category: 'Funds can be lost if', diff --git a/packages/config/src/projects/other/da-beat/dac/bridges/eigenDABridge.ts b/packages/config/src/projects/other/da-beat/dac/bridges/eigenDABridge.ts index 38acdbb3845..a6ed7e9caf8 100644 --- a/packages/config/src/projects/other/da-beat/dac/bridges/eigenDABridge.ts +++ b/packages/config/src/projects/other/da-beat/dac/bridges/eigenDABridge.ts @@ -227,7 +227,7 @@ export const eigenDAbridge = { ![EigenDA architecture once stored](/images/da-bridge-technology/eigenda/architecture1.png#center) The EigenDAServiceManager acts as a DA bridge smart contract verifying data availability claims from operators via signature verification. - The checkSignature function checks that the signature of all signers plus non-signers is equal to the registered quorum aggregated public key from the BLS registry. The quorum aggregated public key gets updated every time an operator is registered. + The checkSignatures() function checks that the signature of all signers plus non-signers is equal to the registered quorum aggregated public key from the BLS registry. The quorum aggregated public key gets updated every time an operator is registered. The bridge requires a threshold of signatures to be met before the data commitment is accepted. To verify the threshold is met, the function takes the total stake at the reference block for the quorum from the StakeRegistry, and it subtracts the stake of non signers to get the signed stake. Finally, it checks that the signed stake over the total stake is more than the required stake threshold. @@ -241,6 +241,16 @@ export const eigenDAbridge = { Ejectors can eject maximum ${ejectableStakePercent}% of the total stake in a ${formatSeconds(ejectionRateLimitWindow[0])} window for the ETH quorum, and the same stake percentage over a ${formatSeconds(ejectionRateLimitWindow[1])} window for the EIGEN quorum. An ejected operator can rejoin the quorum after ${formatSeconds(ejectionCooldown)}. `, + references: [ + { + text: 'EigenDA Registry Coordinator - Etherscan', + href: 'https://etherscan.io/address/0xdcabf0be991d4609096cce316df08d091356e03f', + }, + { + text: 'EigenDA Service Manager - Etherscan', + href: 'https://etherscan.io/address/0x58fDE694Db83e589ABb21A6Fe66cb20Ce5554a07', + }, + ], risks: [ { category: 'Funds can be lost if', diff --git a/packages/config/src/projects/other/da-beat/dac/eigenDA.ts b/packages/config/src/projects/other/da-beat/dac/eigenDA.ts index 681ec300fc4..8943ee17dcf 100644 --- a/packages/config/src/projects/other/da-beat/dac/eigenDA.ts +++ b/packages/config/src/projects/other/da-beat/dac/eigenDA.ts @@ -44,14 +44,14 @@ export const eigenDA: DaLayer = { ### Operators Stake Update - EigenDA operators' stake for quorum verification is fetched from the EigenDA StakeRegistry contract. To keep the stake in sync with changes in share balances in the EigenLayer DelegationManager (e.g., due to tokens delegated/undelegated to operators), the permissionless updateOperatorStake() function on the RegistryCoordinator contract needs to be called periodically. This function updates the operators' quorum weight in the StakeRegistry contract based on the operators' shares in the EigenLayer DelegationManager contract. + EigenDA operators' stake for quorum verification is fetched from the EigenDA StakeRegistry contract. To keep the stake in sync with changes in share balances in the EigenLayer DelegationManager (e.g., due to tokens delegated/undelegated to operators), the permissionless updateOperators() function on the RegistryCoordinator contract needs to be called periodically. This function updates the operators' quorum weight in the StakeRegistry contract based on the operators' shares in the EigenLayer DelegationManager contract. ![EigenDA operator stake sync](/images/da-layer-technology/eigenda/stakesync.png#center) ### Operators Blob Storage and Retrieval The process of storing a blob on EigenDA works as follows. A sequencer submits blobs to the EigenDA Disperser, which erasure codes the blobs into chunks and generates KZG commitments and proofs for each chunk, certifying the correctness of the data. The disperser then sends the chunks, KZG commitments, and KZG proofs to the operators. Multiple operators are responsible for storing chunks of the encoded data blobs and their associated KZG commitment and proof. - Once the chunks, KZG commitments, and KZG proofs are sent to the operators, each of them generates a signature certifying that they have stored the data. These signatures are then sent to the Disperser which aggregates them and uploads them to Ethereum by sending a transaction to the EigenDAServiceManager (the DA bridge). + Once the chunks, KZG commitments, and KZG proofs are sent to the operators, each of them generates a signature certifying that they have stored the data. These signatures are then sent to the Disperser which aggregates them and submits them to Ethereum by sending a transaction to the EigenDAServiceManager (the DA bridge). ![EigenDA storing/retrieving](/images/da-layer-technology/eigenda/storing-retrieving.png#center) @@ -62,6 +62,20 @@ export const eigenDA: DaLayer = { The EigenDARollupUtils.sol library's verifyBlob() function can then be used by L2s to verify that a data blob is included within a confirmed batch in the EigenDAServiceManager. This function is not used by the EigenDAServiceManager contract itself, but rather by L2 systems to prove inclusion of the blob in the EigenDAServiceManager contract, and that their trust assumptions (i.e., batch confirmation threshold) were as expected. `, + references: [ + { + text: 'EigenDA - Documentation', + href: 'https://docs.eigenda.xyz/overview', + }, + { + text: 'EigenDA Disperser - Source Code', + href: 'https://github.com/Layr-Labs/eigenda/blob/2ed86a0c1dd730b56c8235031c19e08a9837bde8/disperser/batcher/batcher.go', + }, + { + text: 'EigenDA Rollup Utils - Source Code', + href: 'https://github.com/Layr-Labs/eigenda-utils/blob/c4cbc9ec078aeca3e4a04bd278e2fb136bf3e6de/src/libraries/EigenDARollupUtils.sol', + }, + ], risks: [ { category: 'Users can be censored if', diff --git a/packages/config/src/projects/other/da-beat/dac/fraxtalDA.ts b/packages/config/src/projects/other/da-beat/dac/fraxtalDA.ts index 6fd77308215..19e2b2c963a 100644 --- a/packages/config/src/projects/other/da-beat/dac/fraxtalDA.ts +++ b/packages/config/src/projects/other/da-beat/dac/fraxtalDA.ts @@ -40,6 +40,16 @@ export const fraxtalDA: DaLayer = { The sequencer attests to data availability by posting an IPFS hash to an on-chain inbox contract on Ethereum. L2 nodes derive the L2 chain from the L1 by reading transactions commitments from this sequencer inbox. When reading from the inbox, the op-node verifies that the commitment hash is a valid IPFS CID. If the data corresponding to the hash is missing from IPFS, the op-node will halt, preventing further derivation of the L2 chain. `, + references: [ + { + text: 'FraxtalDA Documentation', + href: 'https://docs.frax.com/fraxtal/network/data-availability', + }, + { + text: 'Fraxtal DA Follower - Source Code', + href: 'https://github.com/FraxFinance/fraxtal-da-follower/blob/791e849b41465e1e00377f57c8f0c49d4b13caa8/main.go', + }, + ], risks: [ { category: 'Funds can be lost if', diff --git a/packages/config/src/projects/other/da-beat/dac/redstoneDA.ts b/packages/config/src/projects/other/da-beat/dac/redstoneDA.ts index 6c4f58045cc..a710290385f 100644 --- a/packages/config/src/projects/other/da-beat/dac/redstoneDA.ts +++ b/packages/config/src/projects/other/da-beat/dac/redstoneDA.ts @@ -58,8 +58,18 @@ export const redstoneDA: DaLayer = { A challenge can be resolved by publishing the preimage data within an additional ${daResolveWindow}. In such case, a portion of the challenger bond is burned, with the exact amount estimated as the cost incurred by the resolver to publish the full data, meaning that the resolver and challenger will approximately lose the same amount of funds. The system is not secure if the malicious sequencer is able to outspend the altruistic challengers. - If instead, after a challenge, the preimage data is not published, the chain reorgs to the last fully derivable state. + If instead, after a challenge, the preimage data is not published, the chain reorgs to the last fully derivable state. `, + references: [ + { + text: 'Alt-DA Specification', + href: 'https://github.com/ethereum-optimism/specs/blob/main/specs/experimental/alt-da.md', + }, + { + text: 'Security Considerations - Ethresear.ch ', + href: 'https://ethresear.ch/t/universal-plasma-and-da-challenges/18629', + }, + ], risks: [ { category: 'Funds can be lost if', diff --git a/packages/config/src/projects/other/da-beat/dac/xterioDA.ts b/packages/config/src/projects/other/da-beat/dac/xterioDA.ts index 47aae5dbc06..8f91c8b58c9 100644 --- a/packages/config/src/projects/other/da-beat/dac/xterioDA.ts +++ b/packages/config/src/projects/other/da-beat/dac/xterioDA.ts @@ -61,6 +61,16 @@ export const xterioDA: DaLayer = { The system is not secure if the malicious sequencer is able to outspend the altruistic challengers. If instead, after a challenge, the preimage data is not published, the chain reorgs to the last fully derivable state. `, + references: [ + { + text: 'Alt-DA Specification', + href: 'https://github.com/ethereum-optimism/specs/blob/main/specs/experimental/alt-da.md', + }, + { + text: 'Security Considerations - Ethresear.ch ', + href: 'https://ethresear.ch/t/universal-plasma-and-da-challenges/18629', + }, + ], risks: [ { category: 'Funds can be lost if', diff --git a/packages/config/src/projects/other/da-beat/templates/anytrust-template.ts b/packages/config/src/projects/other/da-beat/templates/anytrust-template.ts index 23f3f43cc13..57186773076 100644 --- a/packages/config/src/projects/other/da-beat/templates/anytrust-template.ts +++ b/packages/config/src/projects/other/da-beat/templates/anytrust-template.ts @@ -184,6 +184,12 @@ export function AnytrustDAC(template: TemplateVars): DacDaLayer { display: layerDisplay, technology: { description: layerTechnology, + references: [ + { + text: 'Inside AnyTrust - Arbitrum Docs', + href: 'https://docs.arbitrum.io/how-arbitrum-works/inside-anytrust', + }, + ], risks: template.layer?.technology?.risks, }, usedIn, diff --git a/packages/config/src/projects/other/da-beat/templates/polygoncdk-template.ts b/packages/config/src/projects/other/da-beat/templates/polygoncdk-template.ts index 5be4394a2b1..580f5d1bef7 100644 --- a/packages/config/src/projects/other/da-beat/templates/polygoncdk-template.ts +++ b/packages/config/src/projects/other/da-beat/templates/polygoncdk-template.ts @@ -177,6 +177,12 @@ export function PolygoncdkDAC(template: TemplateVars): DacDaLayer { technology: { description: layerTechnology, risks: template.layer?.technology?.risks, + references: [ + { + text: 'Polygon CDK Validium Documentation', + href: 'https://docs.polygon.technology/cdk/architecture/cdk-validium/#data-availability-committee-dac', + }, + ], }, usedIn, bridges: [dacBridge], diff --git a/packages/config/src/projects/other/da-beat/templates/starkex-template.ts b/packages/config/src/projects/other/da-beat/templates/starkex-template.ts index fdf9b9e0b9e..e9136394679 100644 --- a/packages/config/src/projects/other/da-beat/templates/starkex-template.ts +++ b/packages/config/src/projects/other/da-beat/templates/starkex-template.ts @@ -182,6 +182,12 @@ export function StarkexDAC(template: TemplateVars): DacDaLayer { technology: { description: layerTechnology, risks: template.layer?.technology?.risks, + references: [ + { + text: 'StarkEx Committee Service - Source Code', + href: 'https://github.com/starkware-libs/starkex-data-availability-committee', + }, + ], }, usedIn, bridges: [dacBridge],