Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Hip 551 hapi verbs #17390

Merged
merged 12 commits into from
Jan 29, 2025
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@
TOKEN_CANCEL_AIRDROP,
TOKEN_REJECT -> TokenService.NAME;

case UTIL_PRNG -> UtilService.NAME;
case UTIL_PRNG, ATOMIC_BATCH -> UtilService.NAME;

Check warning on line 109 in hedera-node/hedera-app/src/main/java/com/hedera/node/app/services/ServiceScopeLookup.java

View check run for this annotation

Codecov / codecov/patch

hedera-node/hedera-app/src/main/java/com/hedera/node/app/services/ServiceScopeLookup.java#L109

Added line #L109 was not covered by tests

case SYSTEM_DELETE -> switch (txBody.systemDeleteOrThrow().id().kind()) {
case CONTRACT_ID -> ContractService.NAME;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright (C) 2024 Hedera Hashgraph, LLC
* Copyright (C) 2024-2025 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -220,6 +220,8 @@ public static TransactionResponse submit(
.cancelAirdrop(transaction);
case TokenClaimAirdrop -> clients.getTokenSvcStub(nodeAccountId, false, false)
.claimAirdrop(transaction);
case AtomicBatch -> clients.getUtilSvcStub(nodeAccountId, false, false)
.atomicBatch(transaction);
default -> throw new IllegalArgumentException(functionality + " is not a transaction");
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ public abstract class HapiSpecOperation implements SpecOperation {
protected Optional<ControlForKey[]> controlOverrides = Optional.empty();
protected Map<Key, SigControl> overrides = Collections.EMPTY_MAP;

protected Optional<Function<HapiSpec, Key>> batchKey = Optional.empty();
protected Optional<Long> fee = Optional.empty();
protected List<Function<HapiSpec, CustomFeeLimit>> maxCustomFeeList = new ArrayList<>();
protected Optional<Long> validDurationSecs = Optional.empty();
Expand Down Expand Up @@ -292,6 +293,7 @@ protected Consumer<TransactionBody.Builder> bodyDef(final HapiSpec spec) {
Duration.newBuilder().setSeconds(s).build()));
genRecord.ifPresent(builder::setGenerateRecord);
memo.ifPresent(builder::setMemo);
batchKey.ifPresent(k -> builder.setBatchKey(k.apply(spec)));
};
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
import com.hederahashgraph.api.proto.java.TransactionBody;
import com.hederahashgraph.api.proto.java.TransactionGetReceiptResponse;
import com.hederahashgraph.api.proto.java.TransactionReceipt;
import com.hederahashgraph.api.proto.java.TransactionRecord;
import com.hederahashgraph.api.proto.java.TransactionResponse;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
Expand Down Expand Up @@ -871,4 +872,19 @@ public boolean hasActualStatus() {
public ResponseCodeEnum getActualStatus() {
return lastReceipt.getStatus();
}

public void updateStateFromRecord(TransactionRecord record, HapiSpec spec) throws Throwable {
this.actualStatus = record.getReceipt().getStatus();
this.lastReceipt = record.getReceipt();
updateStateOf(spec);
}

public T batchKey(String key) {
batchKey = Optional.of(spec -> spec.registry().getKey(key));
return self();
}

public Optional<AccountID> getNode() {
return node;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import com.hedera.services.bdd.spec.HapiSpec;
import com.hedera.services.bdd.spec.HapiSpecSetup;
import com.hedera.services.bdd.spec.utilops.mod.BodyMutation;
import com.hederahashgraph.api.proto.java.AtomicBatchTransactionBody;
import com.hederahashgraph.api.proto.java.ConsensusCreateTopicTransactionBody;
import com.hederahashgraph.api.proto.java.ConsensusDeleteTopicTransactionBody;
import com.hederahashgraph.api.proto.java.ConsensusSubmitMessageTransactionBody;
Expand Down Expand Up @@ -467,4 +468,8 @@ public Consumer<TokenClaimAirdropTransactionBody.Builder> defaultDefTokenClaimAi
public Consumer<TokenAirdropTransactionBody.Builder> defaultDefTokenAirdropTransactionBody() {
return builder -> {};
}

public Consumer<AtomicBatchTransactionBody.Builder> defaultDefAtomicBatchTransactionBody() {
return builder -> {};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@
import com.hedera.services.bdd.spec.transactions.token.HapiTokenUpdateNfts;
import com.hedera.services.bdd.spec.transactions.token.HapiTokenWipe;
import com.hedera.services.bdd.spec.transactions.token.TokenMovement;
import com.hedera.services.bdd.spec.transactions.util.HapiAtomicBatch;
import com.hedera.services.bdd.spec.transactions.util.HapiUtilPrng;
import com.hedera.services.bdd.spec.utilops.CustomSpecAssert;
import com.hederahashgraph.api.proto.java.ContractCreateTransactionBody;
Expand Down Expand Up @@ -770,4 +771,8 @@ public static HapiUtilPrng hapiPrng() {
public static HapiUtilPrng hapiPrng(int range) {
return new HapiUtilPrng(range);
}

public static HapiAtomicBatch atomicBatch(HapiTxnOp<?>... ops) {
return new HapiAtomicBatch(ops);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
/*
* Copyright (C) 2020-2025 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.spec.transactions.util;

import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord;
import static com.hedera.services.bdd.spec.transactions.TxnUtils.extractTxnId;
import static com.hedera.services.bdd.spec.transactions.TxnUtils.suFrom;
import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.SUCCESS;

import com.google.common.base.MoreObjects;
import com.hedera.node.app.hapi.fees.usage.BaseTransactionMeta;
import com.hedera.node.app.hapi.fees.usage.crypto.CryptoCreateMeta;
import com.hedera.node.app.hapi.fees.usage.state.UsageAccumulator;
import com.hedera.node.app.hapi.utils.fee.SigValueObj;
import com.hedera.services.bdd.spec.HapiSpec;
import com.hedera.services.bdd.spec.fees.AdapterUtils;
import com.hedera.services.bdd.spec.queries.meta.HapiGetTxnRecord;
import com.hedera.services.bdd.spec.transactions.HapiTxnOp;
import com.hederahashgraph.api.proto.java.AtomicBatchTransactionBody;
import com.hederahashgraph.api.proto.java.FeeData;
import com.hederahashgraph.api.proto.java.HederaFunctionality;
import com.hederahashgraph.api.proto.java.Key;
import com.hederahashgraph.api.proto.java.Transaction;
import com.hederahashgraph.api.proto.java.TransactionBody;
import com.hederahashgraph.api.proto.java.TransactionID;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.function.Function;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

public class HapiAtomicBatch extends HapiTxnOp<HapiAtomicBatch> {
static final Logger log = LogManager.getLogger(HapiAtomicBatch.class);

private static final String DEFAULT_NODE_ACCOUNT_ID = "0.0.0";
private List<HapiTxnOp<?>> operationsToBatch;
private final Map<TransactionID, HapiTxnOp<?>> operationsMap = new HashMap<>();

public HapiAtomicBatch(HapiTxnOp<?>... ops) {
this.operationsToBatch = Arrays.stream(ops).toList();
}

@Override
public HederaFunctionality type() {
return HederaFunctionality.AtomicBatch;
}

@Override
protected HapiAtomicBatch self() {
return this;
}

@Override
protected long feeFor(final HapiSpec spec, final Transaction txn, final int numPayerKeys) throws Throwable {
// TODO: Implement proper estimate for AtomicBatch
return 20_000_000L; // spec.fees().forActivityBasedOp(HederaFunctionality.AtomicBatch, this::usageEstimate, txn,
// numPayerKeys);
}

private FeeData usageEstimate(final TransactionBody txn, final SigValueObj svo) {
// TODO: check for correct estimation of the batch
final var baseMeta = new BaseTransactionMeta(txn.getMemoBytes().size(), 0);
final var opMeta = new CryptoCreateMeta(txn.getCryptoCreateAccount());
final var accumulator = new UsageAccumulator();
cryptoOpsUsage.cryptoCreateUsage(suFrom(svo), baseMeta, opMeta, accumulator);
return AdapterUtils.feeDataFrom(accumulator);
}

@Override
protected Consumer<TransactionBody.Builder> opBodyDef(final HapiSpec spec) throws Throwable {
final AtomicBatchTransactionBody opBody = spec.txns()
.<AtomicBatchTransactionBody, AtomicBatchTransactionBody.Builder>body(
AtomicBatchTransactionBody.class, b -> {
for (HapiTxnOp<?> op : operationsToBatch) {
try {
// set node account id to 0.0.0 if not set
if (op.getNode().isEmpty()) {
op.setNode(DEFAULT_NODE_ACCOUNT_ID);
}
// create a transaction for each operation
final var transaction = op.signedTxnFor(spec);
// save transaction id
final var txnId = extractTxnId(transaction);
operationsMap.put(txnId, op);
// add the transaction to the batch
b.addTransactions(transaction);
} catch (Throwable e) {
throw new RuntimeException(e);
}
}
});
return b -> b.setAtomicBatch(opBody);
}

@Override
public void updateStateOf(HapiSpec spec) throws Throwable {
if (actualStatus == SUCCESS) {
for (Map.Entry<TransactionID, HapiTxnOp<?>> entry : operationsMap.entrySet()) {
TransactionID txnId = entry.getKey();
HapiTxnOp<?> op = entry.getValue();

final HapiGetTxnRecord recordQuery =
getTxnRecord(txnId).noLogging().assertingNothing();
final Optional<Throwable> error = recordQuery.execFor(spec);
if (error.isPresent()) {
throw error.get();
}
op.updateStateFromRecord(recordQuery.getResponseRecord(), spec);
}
}
}

@Override
protected List<Function<HapiSpec, Key>> defaultSigners() {
return Arrays.asList(spec -> spec.registry().getKey(effectivePayer(spec)));
}

@Override
protected MoreObjects.ToStringHelper toStringHelper() {
return super.toStringHelper().add("range", operationsToBatch);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Copyright (C) 2022-2025 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.hedera.services.bdd.suites.hip551;

import static com.hedera.services.bdd.spec.HapiSpec.hapiTest;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.atomicBatch;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.usableTxnIdNamed;
import static com.hedera.services.bdd.suites.HapiSuite.ONE_HBAR;

import com.hedera.services.bdd.junit.HapiTest;
import com.hedera.services.bdd.junit.HapiTestLifecycle;
import java.util.stream.Stream;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.DynamicTest;

@HapiTestLifecycle
public class AtomicBatchTest {

@HapiTest
@Disabled
// just test that the batch is submitted
// disabled for now because there is no handler logic and streamValidation is failing in CI
public Stream<DynamicTest> simpleBatchTest() {
final var batchOperator = "batchOperator";
final var innerTnxPayer = "innerPayer";
final var innerTxnId = "innerId";

// create inner txn with:
// - custom txn id -> for getting the record
// - batch key -> for batch operator to sign
// - payer -> for paying the fee
final var innerTxn = cryptoCreate("foo")
.balance(ONE_HBAR)
.txnId(innerTxnId)
.batchKey(batchOperator)
.payingWith(innerTnxPayer);

return hapiTest(
// create batch operator
cryptoCreate(batchOperator).balance(ONE_HBAR),
// create another payer for the inner txn
cryptoCreate(innerTnxPayer).balance(ONE_HBAR),
// use custom txn id so we can get the record
usableTxnIdNamed(innerTxnId).payerId(innerTnxPayer),
// create a batch txn
atomicBatch(innerTxn).payingWith(batchOperator),
// get and log inner txn record
getTxnRecord(innerTxnId).assertingNothingAboutHashes().logged(),
// validate the batch txn result
getAccountBalance("foo").hasTinyBars(ONE_HBAR));
}
}