diff --git a/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/CommonPbjConverters.java b/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/CommonPbjConverters.java
index 4d70174339a3..9a878112bb66 100644
--- a/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/CommonPbjConverters.java
+++ b/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/CommonPbjConverters.java
@@ -2,6 +2,7 @@
package com.hedera.node.app.hapi.utils;
import static com.hedera.node.app.hapi.utils.ByteStringUtils.unwrapUnsafelyIfPossible;
+import static com.hedera.pbj.runtime.Codec.DEFAULT_MAX_DEPTH;
import static com.hederahashgraph.api.proto.java.HederaFunctionality.*;
import static java.util.Objects.requireNonNull;
@@ -47,6 +48,8 @@
import java.util.List;
public class CommonPbjConverters {
+ public static final int MAX_PBJ_RECORD_SIZE = 33554432;
+
public static @NonNull com.hederahashgraph.api.proto.java.Query fromPbj(@NonNull Query query) {
requireNonNull(query);
try {
@@ -427,7 +430,8 @@ public static Timestamp toPbj(@NonNull com.hederahashgraph.api.proto.java.Timest
requireNonNull(txBody);
try {
final var bytes = txBody.toByteArray();
- return TransactionBody.PROTOBUF.parse(BufferedData.wrap(bytes));
+ return TransactionBody.PROTOBUF.parse(
+ BufferedData.wrap(bytes), false, false, DEFAULT_MAX_DEPTH, MAX_PBJ_RECORD_SIZE);
} catch (ParseException e) {
throw new RuntimeException(e);
}
diff --git a/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/blocks/BlockStreamAccess.java b/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/blocks/BlockStreamAccess.java
index 1443d2ae303e..d386ecfe5ec9 100644
--- a/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/blocks/BlockStreamAccess.java
+++ b/hedera-node/hapi-utils/src/main/java/com/hedera/node/app/hapi/utils/blocks/BlockStreamAccess.java
@@ -1,7 +1,9 @@
// SPDX-License-Identifier: Apache-2.0
package com.hedera.node.app.hapi.utils.blocks;
+import static com.hedera.node.app.hapi.utils.CommonPbjConverters.MAX_PBJ_RECORD_SIZE;
import static com.hedera.node.app.hapi.utils.exports.recordstreaming.RecordStreamingUtils.SIDECAR_ONLY_TOKEN;
+import static com.hedera.pbj.runtime.Codec.DEFAULT_MAX_DEPTH;
import static java.util.Comparator.comparing;
import com.hedera.hapi.block.stream.Block;
@@ -159,10 +161,20 @@ public static Block blockFrom(@NonNull final Path path) {
try {
if (fileName.endsWith(".gz")) {
try (final GZIPInputStream in = new GZIPInputStream(Files.newInputStream(path))) {
- return Block.PROTOBUF.parse(Bytes.wrap(in.readAllBytes()));
+ return Block.PROTOBUF.parse(
+ Bytes.wrap(in.readAllBytes()).toReadableSequentialData(),
+ false,
+ false,
+ DEFAULT_MAX_DEPTH,
+ MAX_PBJ_RECORD_SIZE);
}
} else {
- return Block.PROTOBUF.parse(Bytes.wrap(Files.readAllBytes(path)));
+ return Block.PROTOBUF.parse(
+ Bytes.wrap(Files.readAllBytes(path)).toReadableSequentialData(),
+ false,
+ false,
+ DEFAULT_MAX_DEPTH,
+ MAX_PBJ_RECORD_SIZE);
}
} catch (IOException | ParseException e) {
throw new RuntimeException("Failed reading block @ " + path, e);
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/HistoryLibrary.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/HistoryLibrary.java
index 467d371dec22..c293533850c7 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/HistoryLibrary.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/HistoryLibrary.java
@@ -25,6 +25,11 @@ public interface HistoryLibrary {
*/
Bytes EMPTY_PUBLIC_KEY = Bytes.wrap(new byte[32]);
+ /**
+ * A placeholder metadata for the genesis WRAPS proof.
+ */
+ byte[] GENESIS_WRAPS_METADATA = new byte[1280];
+
/**
* An address book for use in the history library.
* @param weights the weights of the nodes in the address book
@@ -252,4 +257,9 @@ Proof constructIncrementalWrapsProof(
* @return true if the proof is valid; false otherwise
*/
boolean isValidWraps(byte[] compressedProof);
+
+ /**
+ * Returns whether the library is ready to be used.
+ */
+ boolean wrapsProverReady();
}
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/handlers/HistoryProofKeyPublicationHandler.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/handlers/HistoryProofKeyPublicationHandler.java
index fa0249018b76..fb7bff1363fd 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/handlers/HistoryProofKeyPublicationHandler.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/handlers/HistoryProofKeyPublicationHandler.java
@@ -13,7 +13,6 @@
import com.hedera.node.app.spi.workflows.PreHandleContext;
import com.hedera.node.app.spi.workflows.PureChecksContext;
import com.hedera.node.app.spi.workflows.TransactionHandler;
-import com.hedera.node.config.data.TssConfig;
import edu.umd.cs.findbugs.annotations.NonNull;
import javax.inject.Inject;
import javax.inject.Singleton;
@@ -63,8 +62,7 @@ public void handle(@NonNull final HandleContext context) throws HandleException
controllers.getAnyInProgress().ifPresent(controller -> {
final var publication =
new WrapsMessagePublication(nodeId, message, op.phase(), context.consensusNow());
- if (controller.addWrapsMessagePublication(
- publication, historyStore, context.configuration().getConfigData(TssConfig.class))) {
+ if (controller.addWrapsMessagePublication(publication, historyStore)) {
historyStore.addWrapsMessage(controller.constructionId(), publication);
}
});
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryLibraryImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryLibraryImpl.java
index 75f6d09db8eb..4a64bd72fe65 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryLibraryImpl.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryLibraryImpl.java
@@ -19,7 +19,6 @@
* Default implementation of the {@link HistoryLibrary}.
*/
public class HistoryLibraryImpl implements HistoryLibrary {
- private static final byte[] DUMMY_HINTS_KEY = new byte[1280];
public static final SplittableRandom RANDOM = new SplittableRandom();
public static final WRAPSLibraryBridge WRAPS = WRAPSLibraryBridge.getInstance();
@@ -131,7 +130,7 @@ public Proof constructGenesisWrapsProof(
addressBook.publicKeys(),
addressBook.weights(),
null,
- DUMMY_HINTS_KEY,
+ GENESIS_WRAPS_METADATA,
aggregatedSignature,
addressBook.signersMask(signers));
}
@@ -169,4 +168,9 @@ public boolean isValidWraps(@NonNull final byte[] compressedProof) {
requireNonNull(compressedProof);
return WRAPS.verifyCompressedProof(compressedProof);
}
+
+ @Override
+ public boolean wrapsProverReady() {
+ return WRAPSLibraryBridge.isProofSupported();
+ }
}
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryProver.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryProver.java
index 4e5c6f1bcfe9..c442503129f3 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryProver.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryProver.java
@@ -1,10 +1,9 @@
// SPDX-License-Identifier: Apache-2.0
package com.hedera.node.app.history.impl;
-import static java.util.Objects.requireNonNull;
-
import com.hedera.hapi.node.state.history.HistoryProof;
import com.hedera.hapi.node.state.history.HistoryProofConstruction;
+import com.hedera.hapi.node.state.history.HistoryProofVote;
import com.hedera.hapi.node.state.history.ProofKey;
import com.hedera.node.app.history.HistoryLibrary;
import com.hedera.node.app.history.ReadableHistoryStore.WrapsMessagePublication;
@@ -31,7 +30,7 @@
*
*
* Implementations are allowed to be completely asynchronous internally, and most implementations will likely converge
- * to an outcome by submitting votes via {@link HistorySubmissions#submitProofVote(long, HistoryProof)}. However, a
+ * to an outcome by submitting votes via {@link HistorySubmissions#submitExplicitProofVote(long, HistoryProof)}. However, a
* simple implementation could also return a completed proof from a synchronous call to {@link #advance}.
*
* Since implementations are expected to also be stateful, a {@link ProofController} will have a {@link HistoryProver}
@@ -44,6 +43,7 @@ public interface HistoryProver {
interface Factory {
HistoryProver create(
long selfId,
+ @NonNull TssConfig tssConfig,
@NonNull SchnorrKeyPair schnorrKeyPair,
@Nullable HistoryProof sourceProof,
@NonNull RosterTransitionWeights weights,
@@ -119,24 +119,28 @@ Outcome advance(
* @param constructionId the construction ID
* @param publication the WRAPS message publication
* @param writableHistoryStore the writable history store
- * @param tssConfig the TSS configuration
* @return true if the publication was needed by this prover, false otherwise
*/
boolean addWrapsSigningMessage(
long constructionId,
@NonNull WrapsMessagePublication publication,
- @NonNull WritableHistoryStore writableHistoryStore,
- @NonNull TssConfig tssConfig);
+ @NonNull WritableHistoryStore writableHistoryStore);
/**
* Replays a WRAPS message publication that previously reached consensus.
* @param constructionId the construction ID
* @param publication the WRAPS message publication
*/
- default void replayWrapsSigningMessage(
- final long constructionId, @NonNull final WrapsMessagePublication publication) {
- requireNonNull(publication);
- }
+ void replayWrapsSigningMessage(long constructionId, @NonNull WrapsMessagePublication publication);
+
+ /**
+ * Observes a proof vote.
+ *
+ * @param nodeId the node ID
+ * @param vote the vote
+ * @param proofFinalized whether this vote finalized the proof
+ */
+ void observeProofVote(long nodeId, @NonNull HistoryProofVote vote, boolean proofFinalized);
/**
* Returns a list of proof keys from the given map.
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryServiceImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryServiceImpl.java
index dca6a5310c07..630e3b09a609 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryServiceImpl.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistoryServiceImpl.java
@@ -90,7 +90,8 @@ public void reconcile(
construction,
historyStore,
activeHintsConstruction,
- historyStore.getActiveConstruction());
+ historyStore.getActiveConstruction(),
+ tssConfig);
controller.advanceConstruction(now, metadata, historyStore, isActive, tssConfig);
}
}
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistorySubmissions.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistorySubmissions.java
index a2d8b59bc81f..c728c308bbc9 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistorySubmissions.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/HistorySubmissions.java
@@ -76,11 +76,26 @@ public CompletableFuture submitWrapsSigningMessage(
* @param proof history proof to vote for
* @return a future that completes with the submission
*/
- public CompletableFuture submitProofVote(final long constructionId, @NonNull final HistoryProof proof) {
+ public CompletableFuture submitExplicitProofVote(
+ final long constructionId, @NonNull final HistoryProof proof) {
requireNonNull(proof);
logger.info("Submitting proof vote for construction #{}", constructionId);
final var vote = HistoryProofVote.newBuilder().proof(proof).build();
return submitIfActive(
b -> b.historyProofVote(new HistoryProofVoteTransactionBody(constructionId, vote)), onFailure);
}
+
+ /**
+ * Submits a history proof vote to the network.
+ * @param constructionId the construction id to vote on
+ * @param congruentNodeId the node id that has already voted for the same proof
+ * @return a future that completes with the submission
+ */
+ public CompletableFuture submitCongruentProofVote(final long constructionId, final long congruentNodeId) {
+ logger.info("Submitting proof vote congruent to node{} for construction #{}", congruentNodeId, constructionId);
+ final var vote =
+ HistoryProofVote.newBuilder().congruentNodeId(congruentNodeId).build();
+ return submitIfActive(
+ b -> b.historyProofVote(new HistoryProofVoteTransactionBody(constructionId, vote)), onFailure);
+ }
}
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/InertProofController.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/InertProofController.java
index 5faf74838ecf..9f1c621c8f80 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/InertProofController.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/InertProofController.java
@@ -60,11 +60,9 @@ public void addProofVote(
@Override
public boolean addWrapsMessagePublication(
@NonNull final WrapsMessagePublication publication,
- @NonNull final WritableHistoryStore writableHistoryStore,
- @NonNull final TssConfig tssConfig) {
+ @NonNull final WritableHistoryStore writableHistoryStore) {
requireNonNull(publication);
requireNonNull(writableHistoryStore);
- requireNonNull(tssConfig);
return false;
}
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofController.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofController.java
index 37cd539e587c..0a51bfa185be 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofController.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofController.java
@@ -58,12 +58,9 @@ void advanceConstruction(
*
* @param publication the proof key publication
* @param writableHistoryStore the writable history store
- * @param tssConfig the TSS configuration
*/
boolean addWrapsMessagePublication(
- @NonNull WrapsMessagePublication publication,
- @NonNull WritableHistoryStore writableHistoryStore,
- @NonNull TssConfig tssConfig);
+ @NonNull WrapsMessagePublication publication, @NonNull WritableHistoryStore writableHistoryStore);
/**
* If this controller's construction is not already complete, considers updating its state with this history
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofControllerImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofControllerImpl.java
index 14e23dd6d6f0..91b523351a1f 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofControllerImpl.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofControllerImpl.java
@@ -76,13 +76,17 @@ public ProofControllerImpl(
@NonNull final RosterTransitionWeights weights,
@NonNull final Executor executor,
@NonNull final HistorySubmissions submissions,
+ @NonNull final WrapsMpcStateMachine machine,
@NonNull final List keyPublications,
@NonNull final List wrapsMessagePublications,
@NonNull final Map votes,
@NonNull final HistoryService historyService,
@NonNull final HistoryLibrary historyLibrary,
@NonNull final HistoryProver.Factory proverFactory,
- @Nullable final HistoryProof sourceProof) {
+ @Nullable final HistoryProof sourceProof,
+ @NonNull final TssConfig tssConfig) {
+ requireNonNull(machine);
+ requireNonNull(tssConfig);
this.selfId = selfId;
this.executor = requireNonNull(executor);
this.submissions = requireNonNull(submissions);
@@ -107,6 +111,7 @@ public ProofControllerImpl(
: sourceProof.targetProofKeys().stream().collect(toMap(ProofKey::nodeId, ProofKey::key));
this.prover = proverFactory.create(
selfId,
+ tssConfig,
schnorrKeyPair,
sourceProof,
weights,
@@ -149,7 +154,6 @@ public void advanceConstruction(
if (isActive) {
ensureProofKeyPublished();
}
- log.info("Construction #{} still waiting for hinTS verification key", construction.constructionId());
return;
}
// Have the hinTS verification key, but not yet assembling the history or computing the WRAPS proof
@@ -192,16 +196,13 @@ public void addProofKeyPublication(@NonNull final ProofKeyPublication publicatio
@Override
public boolean addWrapsMessagePublication(
@NonNull final WrapsMessagePublication publication,
- @NonNull final WritableHistoryStore writableHistoryStore,
- @NonNull final TssConfig tssConfig) {
+ @NonNull final WritableHistoryStore writableHistoryStore) {
requireNonNull(publication);
requireNonNull(writableHistoryStore);
- requireNonNull(tssConfig);
if (construction.hasTargetProof()) {
return false;
}
- return requireNonNull(prover)
- .addWrapsSigningMessage(constructionId(), publication, writableHistoryStore, tssConfig);
+ return requireNonNull(prover).addWrapsSigningMessage(constructionId(), publication, writableHistoryStore);
}
@Override
@@ -230,6 +231,8 @@ public void addProofVote(
.map(Map.Entry::getKey)
.findFirst();
maybeWinningProof.ifPresent(proof -> finishProof(historyStore, proof));
+ // Let our prover know about the vote to optimize its choice of explicit or congruent voting
+ requireNonNull(prover).observeProofVote(nodeId, vote, maybeWinningProof.isPresent());
}
@Override
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofControllers.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofControllers.java
index 36d0a38fa125..a99936376f90 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofControllers.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ProofControllers.java
@@ -12,10 +12,12 @@
import com.hedera.node.app.history.ReadableHistoryStore;
import com.hedera.node.app.service.roster.impl.ActiveRosters;
import com.hedera.node.app.spi.info.NodeInfo;
+import com.hedera.node.config.data.TssConfig;
import com.hedera.pbj.runtime.io.buffer.Bytes;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.function.Supplier;
import javax.inject.Inject;
@@ -30,6 +32,7 @@ public class ProofControllers {
private final HistoryLibrary historyLibrary;
private final HistoryService historyService;
private final HistorySubmissions submissions;
+ private final WrapsMpcStateMachine machine;
private final Supplier selfNodeInfoSupplier;
/**
@@ -46,13 +49,15 @@ public ProofControllers(
@NonNull final HistoryLibrary historyLibrary,
@NonNull final HistorySubmissions submissions,
@NonNull final Supplier selfNodeInfoSupplier,
- @NonNull final HistoryService historyService) {
+ @NonNull final HistoryService historyService,
+ @NonNull final WrapsMpcStateMachine machine) {
this.executor = requireNonNull(executor);
this.keyAccessor = requireNonNull(keyAccessor);
this.historyLibrary = requireNonNull(historyLibrary);
this.submissions = requireNonNull(submissions);
this.selfNodeInfoSupplier = requireNonNull(selfNodeInfoSupplier);
this.historyService = requireNonNull(historyService);
+ this.machine = requireNonNull(machine);
}
/**
@@ -63,6 +68,7 @@ public ProofControllers(
* @param historyStore the history store
* @param activeHintsConstruction the active hinTS construction, if any
* @param activeProofConstruction the active proof construction, if any
+ * @param tssConfig the TSS configuration
* @return the result of the operation
*/
public @NonNull ProofController getOrCreateFor(
@@ -70,7 +76,8 @@ public ProofControllers(
@NonNull final HistoryProofConstruction construction,
@NonNull final ReadableHistoryStore historyStore,
@Nullable final HintsConstruction activeHintsConstruction,
- @NonNull final HistoryProofConstruction activeProofConstruction) {
+ @NonNull final HistoryProofConstruction activeProofConstruction,
+ @NonNull final TssConfig tssConfig) {
requireNonNull(activeRosters);
requireNonNull(construction);
requireNonNull(historyStore);
@@ -80,7 +87,12 @@ public ProofControllers(
controller.cancelPendingWork();
}
controller = newControllerFor(
- activeRosters, construction, historyStore, activeHintsConstruction, activeProofConstruction);
+ activeRosters,
+ construction,
+ historyStore,
+ activeHintsConstruction,
+ activeProofConstruction,
+ tssConfig);
}
return requireNonNull(controller);
}
@@ -114,6 +126,7 @@ public Optional getAnyInProgress() {
* @param historyStore the history store
* @param activeHintsConstruction the active hinTS construction, if any
* @param activeProofConstruction the active proof construction
+ * @param tssConfig the TSS configuration
* @return the controller
*/
private ProofController newControllerFor(
@@ -121,7 +134,8 @@ private ProofController newControllerFor(
@NonNull final HistoryProofConstruction construction,
@NonNull final ReadableHistoryStore historyStore,
@Nullable final HintsConstruction activeHintsConstruction,
- @NonNull final HistoryProofConstruction activeProofConstruction) {
+ @NonNull final HistoryProofConstruction activeProofConstruction,
+ @NonNull final TssConfig tssConfig) {
final var weights = activeRosters.transitionWeights(maybeWeightsFrom(activeHintsConstruction));
if (!weights.sourceNodesHaveTargetThreshold()) {
return new InertProofController(construction.constructionId());
@@ -133,6 +147,8 @@ private ProofController newControllerFor(
final var selfId = selfNodeInfoSupplier.get().nodeId();
final var schnorrKeyPair = keyAccessor.getOrCreateSchnorrKeyPair(construction.constructionId());
final var sourceProof = activeProofConstruction.targetProof();
+ final HistoryProver.Factory proverFactory = (s, t, k, p, w, r, x, l, m) -> new WrapsHistoryProver(
+ s, t.wrapsMessageGracePeriod(), k, p, w, r, CompletableFuture::delayedExecutor, x, l, m, machine);
return new ProofControllerImpl(
selfId,
schnorrKeyPair,
@@ -140,13 +156,15 @@ private ProofController newControllerFor(
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
votes,
historyService,
historyLibrary,
- WrapsHistoryProver::new,
- sourceProof);
+ proverFactory,
+ sourceProof,
+ tssConfig);
}
}
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ReadableHistoryStoreImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ReadableHistoryStoreImpl.java
index aa034970cfd3..88bdbd88377a 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ReadableHistoryStoreImpl.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/ReadableHistoryStoreImpl.java
@@ -4,7 +4,6 @@
import static com.hedera.hapi.util.HapiUtils.asInstant;
import static com.hedera.node.app.history.ReadableHistoryStore.WrapsMessagePublication.allFromHistory;
import static com.hedera.node.app.history.schemas.V059HistorySchema.ACTIVE_PROOF_CONSTRUCTION_STATE_ID;
-import static com.hedera.node.app.history.schemas.V059HistorySchema.HISTORY_SIGNATURES_STATE_ID;
import static com.hedera.node.app.history.schemas.V059HistorySchema.LEDGER_ID_STATE_ID;
import static com.hedera.node.app.history.schemas.V059HistorySchema.NEXT_PROOF_CONSTRUCTION_STATE_ID;
import static com.hedera.node.app.history.schemas.V059HistorySchema.PROOF_KEY_SETS_STATE_ID;
@@ -16,7 +15,6 @@
import com.hedera.hapi.node.state.history.HistoryProofConstruction;
import com.hedera.hapi.node.state.history.HistoryProofVote;
import com.hedera.hapi.node.state.history.ProofKeySet;
-import com.hedera.hapi.node.state.history.RecordedHistorySignature;
import com.hedera.hapi.node.state.history.WrapsMessageHistory;
import com.hedera.hapi.node.state.primitives.ProtoBytes;
import com.hedera.hapi.platform.state.NodeId;
@@ -43,7 +41,6 @@ public class ReadableHistoryStoreImpl implements ReadableHistoryStore {
private final ReadableSingletonState nextConstruction;
private final ReadableSingletonState activeConstruction;
private final ReadableKVState proofKeySets;
- private final ReadableKVState signatures;
private final ReadableKVState votes;
private final ReadableKVState wrapsMessageHistories;
@@ -53,7 +50,6 @@ public ReadableHistoryStoreImpl(@NonNull final ReadableStates states) {
this.nextConstruction = states.getSingleton(NEXT_PROOF_CONSTRUCTION_STATE_ID);
this.activeConstruction = states.getSingleton(ACTIVE_PROOF_CONSTRUCTION_STATE_ID);
this.proofKeySets = states.get(PROOF_KEY_SETS_STATE_ID);
- this.signatures = states.get(HISTORY_SIGNATURES_STATE_ID);
this.votes = states.get(PROOF_VOTES_STATE_ID);
this.wrapsMessageHistories = states.get(WRAPS_MESSAGE_HISTORIES_STATE_ID);
}
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WrapsHistoryProver.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WrapsHistoryProver.java
index 4cec87933126..65f56beb117f 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WrapsHistoryProver.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WrapsHistoryProver.java
@@ -6,11 +6,13 @@
import static com.hedera.hapi.node.state.history.WrapsPhase.R2;
import static com.hedera.hapi.node.state.history.WrapsPhase.R3;
import static com.hedera.hapi.util.HapiUtils.asInstant;
+import static com.hedera.node.app.hapi.utils.CommonUtils.noThrowSha384HashOf;
import static com.hedera.node.app.history.HistoryLibrary.EMPTY_PUBLIC_KEY;
+import static com.hedera.node.app.history.HistoryLibrary.GENESIS_WRAPS_METADATA;
import static com.hedera.node.app.history.impl.ProofControllers.isWrapsExtensible;
-import static com.hedera.node.app.service.roster.impl.RosterTransitionWeights.moreThanHalfOfTotal;
import static java.util.Collections.emptySortedMap;
import static java.util.Objects.requireNonNull;
+import static java.util.concurrent.TimeUnit.MILLISECONDS;
import com.hedera.cryptography.wraps.Proof;
import com.hedera.hapi.block.stream.AggregatedNodeSignatures;
@@ -18,6 +20,7 @@
import com.hedera.hapi.node.state.history.History;
import com.hedera.hapi.node.state.history.HistoryProof;
import com.hedera.hapi.node.state.history.HistoryProofConstruction;
+import com.hedera.hapi.node.state.history.HistoryProofVote;
import com.hedera.hapi.node.state.history.WrapsPhase;
import com.hedera.hapi.node.state.history.WrapsSigningState;
import com.hedera.node.app.history.HistoryLibrary;
@@ -31,15 +34,19 @@
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.security.SecureRandom;
+import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList;
import java.util.EnumMap;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.SortedMap;
-import java.util.TreeMap;
+import java.util.TreeSet;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
+import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -54,9 +61,11 @@ public class WrapsHistoryProver implements HistoryProver {
private static final Logger log = LogManager.getLogger(WrapsHistoryProver.class);
private final long selfId;
+ private final Duration wrapsMessageGracePeriod;
private final SchnorrKeyPair schnorrKeyPair;
private final Map proofKeys;
private final RosterTransitionWeights weights;
+ private final Delayer delayer;
private final Executor executor;
@Nullable
@@ -64,8 +73,11 @@ public class WrapsHistoryProver implements HistoryProver {
private final HistoryLibrary historyLibrary;
private final HistorySubmissions submissions;
+ private final WrapsMpcStateMachine machine;
+
private final Map> phaseMessages =
new EnumMap<>(WrapsPhase.class);
+ private final Map explicitHistoryProofHashes = new HashMap<>();
/**
* If not null, the WRAPS message being signed for the current construction.
@@ -80,11 +92,18 @@ public class WrapsHistoryProver implements HistoryProver {
private AddressBook targetAddressBook;
/**
- * If not null, the WRAPS message being signed for the current construction.
+ * If not null, the hash of the target address book.
*/
@Nullable
private byte[] targetAddressBookHash;
+ /**
+ * If non-null, the entropy used to generate the R1 message. (If this node rejoins the network
+ * after a restart, having lost its entropy, it cannot continue and the protocol will time out.)
+ */
+ @Nullable
+ private byte[] entropy;
+
/**
* Future that resolves on submission of this node's R1 signing message.
*/
@@ -104,24 +123,32 @@ public class WrapsHistoryProver implements HistoryProver {
private CompletableFuture r3Future;
/**
- * Future that resolves on submission of this node's vote for the aggregate signature.
+ * If non-null, the history proof we have constructed (recursive or otherwise).
*/
@Nullable
- private CompletableFuture voteFuture;
+ private HistoryProof historyProof;
/**
- * If non-null, the entropy used to generate the R1 message. (If this node rejoins the network
- * after a restart, having lost its entropy, it cannot continue and the protocol will time out.)
+ * Future that resolves on the completion of the vote decision post-jitter.
*/
@Nullable
- private byte[] entropy;
+ private CompletableFuture voteDecisionFuture;
+
+ /**
+ * Future that resolves on submission of this node's vote for the aggregate signature.
+ */
+ @Nullable
+ private CompletableFuture voteFuture;
/**
* The current WRAPS phase; starts with R1 and advances as messages are received.
*/
private WrapsPhase wrapsPhase = R1;
- private sealed interface WrapsPhaseOutput permits MessagePhaseOutput, ProofPhaseOutput, AggregatePhaseOutput {}
+ private sealed interface WrapsPhaseOutput
+ permits NoopOutput, MessagePhaseOutput, ProofPhaseOutput, AggregatePhaseOutput {}
+
+ private record NoopOutput(String reason) implements WrapsPhaseOutput {}
private record MessagePhaseOutput(byte[] message) implements WrapsPhaseOutput {}
@@ -129,23 +156,53 @@ private record AggregatePhaseOutput(byte[] signature, List nodeIds) implem
private record ProofPhaseOutput(byte[] compressed, byte[] uncompressed) implements WrapsPhaseOutput {}
+ private enum VoteChoice {
+ SUBMIT,
+ SKIP
+ }
+
+ private record VoteDecision(VoteChoice choice, @Nullable Long congruentNodeId) {
+ static VoteDecision explicit() {
+ return new VoteDecision(VoteChoice.SUBMIT, null);
+ }
+
+ static VoteDecision skip() {
+ return new VoteDecision(VoteChoice.SKIP, null);
+ }
+
+ static VoteDecision congruent(long nodeId) {
+ return new VoteDecision(VoteChoice.SUBMIT, nodeId);
+ }
+ }
+
+ public interface Delayer {
+ @NonNull
+ Executor delayedExecutor(long delay, @NonNull TimeUnit unit, @NonNull Executor executor);
+ }
+
public WrapsHistoryProver(
final long selfId,
+ @NonNull final Duration wrapsMessageGracePeriod,
@NonNull final SchnorrKeyPair schnorrKeyPair,
@Nullable final HistoryProof sourceProof,
@NonNull final RosterTransitionWeights weights,
@NonNull final Map proofKeys,
+ @NonNull final Delayer delayer,
@NonNull final Executor executor,
@NonNull final HistoryLibrary historyLibrary,
- @NonNull final HistorySubmissions submissions) {
+ @NonNull final HistorySubmissions submissions,
+ @NonNull final WrapsMpcStateMachine machine) {
this.selfId = selfId;
this.sourceProof = sourceProof;
+ this.wrapsMessageGracePeriod = requireNonNull(wrapsMessageGracePeriod);
this.schnorrKeyPair = requireNonNull(schnorrKeyPair);
this.weights = requireNonNull(weights);
this.proofKeys = requireNonNull(proofKeys);
+ this.delayer = requireNonNull(delayer);
this.executor = requireNonNull(executor);
this.historyLibrary = requireNonNull(historyLibrary);
this.submissions = requireNonNull(submissions);
+ this.machine = requireNonNull(machine);
}
@NonNull
@@ -180,7 +237,13 @@ public Outcome advance(
targetAddressBook = AddressBook.from(weights.targetNodeWeights(), nodeId -> targetProofKeys
.getOrDefault(nodeId, EMPTY_PUBLIC_KEY)
.toByteArray());
- wrapsMessage = historyLibrary.computeWrapsMessage(targetAddressBook, targetMetadata.toByteArray());
+ // In general the metadata in the WRAPS message is the target metadata (which is the hinTS verification
+ // key in the current TSS scheme); but for the special case of a source proof with a non-recursive
+ // proof, the additional signing work we need to do is actually over certain placeholder metadata
+ final var metadata = (tssConfig.wrapsEnabled() && proofIsWrapsGenesis())
+ ? GENESIS_WRAPS_METADATA
+ : targetMetadata.toByteArray();
+ wrapsMessage = historyLibrary.computeWrapsMessage(targetAddressBook, metadata);
targetAddressBookHash = historyLibrary.hashAddressBook(targetAddressBook);
}
publishIfNeeded(
@@ -193,17 +256,42 @@ public Outcome advance(
public boolean addWrapsSigningMessage(
final long constructionId,
@NonNull final WrapsMessagePublication publication,
- @NonNull final WritableHistoryStore writableHistoryStore,
- @NonNull final TssConfig tssConfig) {
+ @NonNull final WritableHistoryStore writableHistoryStore) {
requireNonNull(publication);
requireNonNull(writableHistoryStore);
- requireNonNull(tssConfig);
- return receiveWrapsSigningMessage(constructionId, publication, writableHistoryStore, tssConfig);
+ return receiveWrapsSigningMessage(constructionId, publication, writableHistoryStore);
}
@Override
public void replayWrapsSigningMessage(long constructionId, @NonNull WrapsMessagePublication publication) {
- receiveWrapsSigningMessage(constructionId, publication, null, null);
+ receiveWrapsSigningMessage(constructionId, publication, null);
+ }
+
+ @Override
+ public void observeProofVote(
+ final long nodeId, @NonNull final HistoryProofVote vote, final boolean proofFinalized) {
+ requireNonNull(vote);
+ // If we’ve already decided & sent our vote, nothing to do
+ if (voteDecisionFuture == null || voteDecisionFuture.isDone()) {
+ return;
+ }
+ if (proofFinalized) {
+ log.info("Observed finalized proof via node{}; skipping vote", nodeId);
+ tryCompleteVoteDecision(VoteDecision.skip());
+ return;
+ }
+ // Explicit vote case
+ if (vote.hasProof()) {
+ final var proof = vote.proofOrElse(HistoryProof.DEFAULT);
+ // Always store a hash – useful if we haven't finished our own proof yet.
+ final var hash = hashOf(proof);
+ explicitHistoryProofHashes.put(nodeId, hash);
+ // If we already have our proof, see if it matches.
+ if (historyProof != null && selfProofHashOrThrow().equals(hash)) {
+ log.info("Observed matching explicit proof from node{}; voting congruent instead", nodeId);
+ tryCompleteVoteDecision(VoteDecision.congruent(nodeId));
+ }
+ }
}
@Override
@@ -239,74 +327,28 @@ public boolean cancelPendingWork() {
private boolean receiveWrapsSigningMessage(
final long constructionId,
@NonNull final WrapsMessagePublication publication,
- @Nullable final WritableHistoryStore writableHistoryStore,
- @Nullable final TssConfig tssConfig) {
+ @Nullable final WritableHistoryStore writableHistoryStore) {
+ final var transition = machine.onNext(publication, wrapsPhase, weights, wrapsMessageGracePeriod, phaseMessages);
log.info(
- "Received {} message from node{} for construction #{} (current phase={})",
+ "Received {} message from node{} for construction #{} in phase={}) -> {} (new phase={})",
publication.phase(),
publication.nodeId(),
constructionId,
- wrapsPhase);
- if (publication.phase() != wrapsPhase) {
- return false;
- }
- final var startPhase = wrapsPhase;
- final var messages = phaseMessages.computeIfAbsent(wrapsPhase, p -> new TreeMap<>());
- if (wrapsPhase == R1) {
- if (messages.putIfAbsent(publication.nodeId(), publication) != null) {
- return false;
- }
- final long r1Weight = messages.values().stream()
- .mapToLong(p -> weights.sourceWeightOf(p.nodeId()))
- .sum();
- log.info(
- "Total weight of {} messages is {} (of {} total)",
- wrapsPhase,
- r1Weight,
- weights.sourceNodeWeights().values().stream()
- .mapToLong(Long::longValue)
- .sum());
- if (r1Weight >= moreThanHalfOfTotal(weights.sourceNodeWeights())) {
- if (writableHistoryStore != null && tssConfig != null) {
+ wrapsPhase,
+ transition.publicationAccepted() ? "accepted" : "rejected",
+ transition.newCurrentPhase());
+ if (transition.publicationAccepted()) {
+ if (transition.newCurrentPhase() != wrapsPhase) {
+ wrapsPhase = transition.newCurrentPhase();
+ log.info("Advanced to {} for construction #{}", wrapsPhase, constructionId);
+ if (writableHistoryStore != null) {
writableHistoryStore.advanceWrapsSigningPhase(
- constructionId, R2, publication.receiptTime().plus(tssConfig.wrapsMessageGracePeriod()));
+ constructionId, wrapsPhase, transition.gracePeriodEndTimeUpdate());
}
- wrapsPhase = R2;
- }
- } else if (wrapsPhase == R2) {
- final var r1Nodes = phaseMessages.get(R1).keySet();
- if (!r1Nodes.contains(publication.nodeId())) {
- return false;
- }
- if (messages.putIfAbsent(publication.nodeId(), publication) != null) {
- return false;
- }
- if (messages.keySet().containsAll(r1Nodes)) {
- if (writableHistoryStore != null && tssConfig != null) {
- writableHistoryStore.advanceWrapsSigningPhase(
- constructionId, R3, publication.receiptTime().plus(tssConfig.wrapsMessageGracePeriod()));
- }
- wrapsPhase = R3;
- }
- } else {
- final var r1Nodes = phaseMessages.get(R1).keySet();
- if (!r1Nodes.contains(publication.nodeId())) {
- return false;
- }
- if (messages.putIfAbsent(publication.nodeId(), publication) != null) {
- return false;
- }
- if (messages.keySet().containsAll(r1Nodes)) {
- if (writableHistoryStore != null && tssConfig != null) {
- writableHistoryStore.advanceWrapsSigningPhase(constructionId, AGGREGATE, null);
- }
- wrapsPhase = AGGREGATE;
}
+ return true;
}
- if (wrapsPhase != startPhase) {
- log.info("Advanced to {} for construction #{}", wrapsPhase, constructionId);
- }
- return true;
+ return false;
}
/**
@@ -345,43 +387,39 @@ private void publishIfNeeded(
}
case AggregatePhaseOutput aggregatePhaseOutput -> {
// We are doing a non-recursive proof via an aggregate signature
- final var nonRecursiveProof = new AggregatedNodeSignatures(
+ final var aggregatedNodeSignatures = new AggregatedNodeSignatures(
Bytes.wrap(aggregatePhaseOutput.signature()),
new ArrayList<>(phaseMessages
.get(R1)
.keySet()));
- submissions
- .submitProofVote(
- constructionId,
- HistoryProof.newBuilder()
- .targetProofKeys(proofKeyList)
- .targetHistory(new History(
- Bytes.wrap(bookHash), targetMetadata))
- .chainOfTrustProof(
- ChainOfTrustProof.newBuilder()
- .aggregatedNodeSignatures(
- nonRecursiveProof))
- .build())
- .join();
+ final var proof = HistoryProof.newBuilder()
+ .targetProofKeys(proofKeyList)
+ .targetHistory(
+ new History(Bytes.wrap(bookHash), targetMetadata))
+ .chainOfTrustProof(ChainOfTrustProof.newBuilder()
+ .aggregatedNodeSignatures(aggregatedNodeSignatures))
+ .build();
+ scheduleVoteWithJitter(constructionId, tssConfig, proof);
}
case ProofPhaseOutput proofOutput -> {
// We have a WRAPS proof
final var recursiveProof = Bytes.wrap(proofOutput.compressed());
final var uncompressedProof = Bytes.wrap(proofOutput.uncompressed());
- submissions
- .submitProofVote(
- constructionId,
- HistoryProof.newBuilder()
- .targetProofKeys(proofKeyList)
- .targetHistory(new History(
- Bytes.wrap(bookHash), targetMetadata))
- .chainOfTrustProof(
- ChainOfTrustProof.newBuilder()
- .wrapsProof(recursiveProof))
- .uncompressedWrapsProof(uncompressedProof)
- .build())
- .join();
+ final var proof = HistoryProof.newBuilder()
+ .targetProofKeys(proofKeyList)
+ .targetHistory(
+ new History(Bytes.wrap(bookHash), targetMetadata))
+ .chainOfTrustProof(ChainOfTrustProof.newBuilder()
+ .wrapsProof(recursiveProof))
+ .uncompressedWrapsProof(uncompressedProof)
+ .build();
+ scheduleVoteWithJitter(constructionId, tssConfig, proof);
}
+ case NoopOutput noopOutput ->
+ log.info(
+ "Skipping publication of {} output: {}",
+ phase,
+ noopOutput.reason());
}
},
executor)
@@ -396,6 +434,62 @@ private void publishIfNeeded(
}
}
+ private void scheduleVoteWithJitter(
+ final long constructionId, @NonNull final TssConfig tssConfig, @NonNull final HistoryProof proof) {
+ this.historyProof = proof;
+
+ final var selfProofHash = hashOf(proof);
+ for (final var entry : explicitHistoryProofHashes.entrySet()) {
+ if (selfProofHash.equals(entry.getValue())) {
+ log.info("Already observed explicit proof from node{}; voting congruent immediately", entry.getKey());
+ this.voteDecisionFuture = CompletableFuture.completedFuture(VoteDecision.congruent(entry.getKey()));
+ this.voteFuture = submissions.submitCongruentProofVote(constructionId, entry.getKey());
+ return;
+ }
+ }
+
+ this.voteDecisionFuture = new CompletableFuture<>();
+
+ final long jitterMs = computeJitterMs(tssConfig, constructionId);
+ final var delayed = delayer.delayedExecutor(jitterMs, MILLISECONDS, executor);
+
+ // If this is the first thread to complete the vote decision, we submit an explicit vote
+ CompletableFuture.runAsync(() -> tryCompleteVoteDecision(VoteDecision.explicit()), delayed);
+
+ this.voteFuture = voteDecisionFuture.thenCompose(decision -> switch (decision.choice()) {
+ case SKIP -> CompletableFuture.completedFuture(null);
+ case SUBMIT -> {
+ final var congruentNodeId = decision.congruentNodeId();
+ if (congruentNodeId != null) {
+ log.info(
+ "Submitting congruent vote to node{} for construction #{}",
+ congruentNodeId,
+ constructionId);
+ yield submissions.submitCongruentProofVote(constructionId, congruentNodeId);
+ } else {
+ log.info("Submitting explicit proof vote for construction #{}", constructionId);
+ yield submissions.submitExplicitProofVote(constructionId, proof);
+ }
+ }
+ });
+ }
+
+ private void tryCompleteVoteDecision(VoteDecision decision) {
+ final var f = this.voteDecisionFuture;
+ if (f != null && !f.isDone()) {
+ f.complete(decision);
+ }
+ }
+
+ private long computeJitterMs(@NonNull final TssConfig tssConfig, final long constructionId) {
+ final var allNodes = new ArrayList<>(weights.targetNodeWeights().keySet());
+ final int n = allNodes.size();
+ final int selfIndex = allNodes.indexOf(selfId);
+ final int leaderIndex = Math.floorMod((int) constructionId, n);
+ final int rank = Math.floorMod(selfIndex - leaderIndex, n);
+ return tssConfig.wrapsVoteJitterPerRank().toMillis() * rank;
+ }
+
private CompletableFuture outputFuture(
@NonNull final WrapsPhase phase,
@NonNull final TssConfig tssConfig,
@@ -440,54 +534,88 @@ yield new MessagePhaseOutput(historyLibrary.runWrapsPhaseR3(
yield null;
}
case AGGREGATE -> {
- if (entropy != null) {
- final var signature = historyLibrary.runAggregationPhase(
- message,
- rawMessagesFor(R1),
- rawMessagesFor(R2),
- rawMessagesFor(R3),
- publicKeysForR1());
- // Sans source proof, we are at genesis and need an aggregate signature proof right away
- if (sourceProof == null || !tssConfig.wrapsEnabled()) {
- yield new AggregatePhaseOutput(
- signature,
- phaseMessages.get(R1).keySet().stream().toList());
- } else {
- Proof proof;
- if (!isWrapsExtensible(sourceProof)) {
- proof = historyLibrary.constructGenesisWrapsProof(
- requireNonNull(ledgerId).toByteArray(),
- signature,
- phaseMessages.get(R1).keySet(),
- targetBook);
- } else {
- proof = new Proof(
- sourceProof.uncompressedWrapsProof().toByteArray(),
- sourceProof
- .chainOfTrustProofOrThrow()
- .wrapsProofOrThrow()
- .toByteArray());
- }
- final var sourceBook = AddressBook.from(weights.sourceNodeWeights(), nodeId -> proofKeys
- .getOrDefault(nodeId, EMPTY_PUBLIC_KEY)
- .toByteArray());
- proof = historyLibrary.constructIncrementalWrapsProof(
+ final var signature = historyLibrary.runAggregationPhase(
+ message, rawMessagesFor(R1), rawMessagesFor(R2), rawMessagesFor(R3), publicKeysForR1());
+ // Sans source proof, we are at genesis and need an aggregate signature proof right away
+ if (sourceProof == null || !tssConfig.wrapsEnabled()) {
+ final var isValid =
+ historyLibrary.verifyAggregateSignature(message, publicKeysForR1(), signature);
+ if (!isValid) {
+ throw new IllegalStateException("Invalid aggregate signature using nodes "
+ + phaseMessages.get(R1).keySet());
+ }
+ yield new AggregatePhaseOutput(
+ signature,
+ phaseMessages.get(R1).keySet().stream().toList());
+ } else {
+ if (!historyLibrary.wrapsProverReady()) {
+ yield new NoopOutput("WRAPS library is not ready");
+ }
+ final var isValid =
+ historyLibrary.verifyAggregateSignature(message, publicKeysForR1(), signature);
+ if (!isValid) {
+ throw new IllegalStateException("Invalid aggregate signature using nodes "
+ + phaseMessages.get(R1).keySet());
+ }
+ Proof proof;
+ if (!isWrapsExtensible(sourceProof)) {
+ final long now = System.nanoTime();
+ log.info("Constructing genesis WRAPS proof...");
+ proof = historyLibrary.constructGenesisWrapsProof(
requireNonNull(ledgerId).toByteArray(),
- proof.uncompressed(),
- sourceBook,
- targetBook,
- targetMetadata.toByteArray(),
signature,
- phaseMessages.get(R1).keySet());
- yield new ProofPhaseOutput(proof.compressed(), proof.uncompressed());
+ phaseMessages.get(R1).keySet(),
+ targetBook);
+ logElapsed("constructing genesis WRAPS proof", now);
+ } else {
+ proof = new Proof(
+ sourceProof.uncompressedWrapsProof().toByteArray(),
+ sourceProof
+ .chainOfTrustProofOrThrow()
+ .wrapsProofOrThrow()
+ .toByteArray());
}
+ final var sourceBook = AddressBook.from(weights.sourceNodeWeights(), nodeId -> proofKeys
+ .getOrDefault(nodeId, EMPTY_PUBLIC_KEY)
+ .toByteArray());
+ final long now = System.nanoTime();
+ log.info(
+ "Constructing incremental WRAPS proof (WRAPS genesis? {})...",
+ proofIsWrapsGenesis());
+ final var effectiveSignature = proofIsWrapsGenesis()
+ ? sourceProof
+ .chainOfTrustProofOrThrow()
+ .aggregatedNodeSignaturesOrThrow()
+ .aggregatedSignature()
+ .toByteArray()
+ : signature;
+ final Set effectiveSigners = proofIsWrapsGenesis()
+ ? new TreeSet<>(sourceProof
+ .chainOfTrustProofOrThrow()
+ .aggregatedNodeSignaturesOrThrow()
+ .signingNodeIds())
+ : phaseMessages.get(R1).keySet();
+ proof = historyLibrary.constructIncrementalWrapsProof(
+ requireNonNull(ledgerId).toByteArray(),
+ proof.uncompressed(),
+ sourceBook,
+ targetBook,
+ targetMetadata.toByteArray(),
+ effectiveSignature,
+ effectiveSigners);
+ logElapsed("constructing incremental WRAPS proof", now);
+ yield new ProofPhaseOutput(proof.compressed(), proof.uncompressed());
}
- yield null;
}
},
executor);
}
+ private void logElapsed(@NonNull final String event, final long startNs) {
+ final var duration = Duration.ofNanos(System.nanoTime() - startNs);
+ log.info("FINISHED {} - took {}m {}s", event, duration.toMinutes(), duration.toSecondsPart());
+ }
+
private byte[][] publicKeysForR1() {
return phaseMessages.get(R1).keySet().stream()
.map(nodeId -> proofKeys.get(nodeId).toByteArray())
@@ -518,4 +646,16 @@ private Consumer> consumerOf(@NonNull final WrapsPhase p
case AGGREGATE -> f -> voteFuture = f;
};
}
+
+ private Bytes selfProofHashOrThrow() {
+ return explicitHistoryProofHashes.computeIfAbsent(selfId, k -> hashOf(requireNonNull(historyProof)));
+ }
+
+ private boolean proofIsWrapsGenesis() {
+ return sourceProof != null && !isWrapsExtensible(sourceProof);
+ }
+
+ private static Bytes hashOf(@NonNull final HistoryProof proof) {
+ return noThrowSha384HashOf(HistoryProof.PROTOBUF.toBytes(proof));
+ }
}
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WrapsMpcStateMachine.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WrapsMpcStateMachine.java
new file mode 100644
index 000000000000..ed42e1bb5bd0
--- /dev/null
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WrapsMpcStateMachine.java
@@ -0,0 +1,122 @@
+// SPDX-License-Identifier: Apache-2.0
+package com.hedera.node.app.history.impl;
+
+import static com.hedera.hapi.node.state.history.WrapsPhase.AGGREGATE;
+import static com.hedera.hapi.node.state.history.WrapsPhase.R1;
+import static com.hedera.hapi.node.state.history.WrapsPhase.R2;
+import static com.hedera.hapi.node.state.history.WrapsPhase.R3;
+import static com.hedera.node.app.service.roster.impl.RosterTransitionWeights.moreThanHalfOfTotal;
+import static java.util.Objects.requireNonNull;
+
+import com.hedera.hapi.node.state.history.WrapsPhase;
+import com.hedera.node.app.history.ReadableHistoryStore.WrapsMessagePublication;
+import com.hedera.node.app.service.roster.impl.RosterTransitionWeights;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.time.Duration;
+import java.time.Instant;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import javax.inject.Inject;
+import javax.inject.Singleton;
+
+/**
+ * State machine logic for the multi-party computation (MPC) phases of the WRAPS protocol.
+ */
+@Singleton
+public class WrapsMpcStateMachine {
+ @Inject
+ public WrapsMpcStateMachine() {
+ // Dagger2
+ }
+
+ /**
+ * Represents a transition of the WRAPS signing state machine; may be a no-op transition, and always is
+ * if the publication triggering the transition was not accepted (e.g., because it was for the wrong phase
+ * or was a duplicate).
+ * @param publicationAccepted whether the publication triggering the transition was accepted
+ * @param newCurrentPhase the new current phase after the transition
+ * @param gracePeriodEndTimeUpdate the new grace period end time after the transition, if applicable
+ */
+ public record Transition(
+ boolean publicationAccepted,
+ @NonNull WrapsPhase newCurrentPhase,
+ @Nullable Instant gracePeriodEndTimeUpdate) {
+ public Transition {
+ requireNonNull(newCurrentPhase);
+ }
+
+ public static Transition rejectedAt(@NonNull final WrapsPhase currentPhase) {
+ return new Transition(false, currentPhase, null);
+ }
+
+ public static Transition incorporatedIn(@NonNull final WrapsPhase currentPhase) {
+ return new Transition(true, currentPhase, null);
+ }
+
+ public static Transition advanceTo(
+ @NonNull final WrapsPhase currentPhase, @Nullable final Instant gracePeriodEndTimeUpdate) {
+ return new Transition(true, currentPhase, gracePeriodEndTimeUpdate);
+ }
+ }
+
+ /**
+ * Computes the next state of the WRAPS signing state machine given the current state and a new publication.
+ *
+ * Important: On acceptance, has the side effect of adding the publication to the phase messages map.
+ * @param publication the new publication
+ * @param currentPhase the current phase
+ * @param weights the weights for parties in the MPC
+ * @param gracePeriod the grace period for each phase of the protocol
+ * @param phaseMessages the map of phase messages published so far
+ * @return the transition
+ */
+ public Transition onNext(
+ @NonNull final WrapsMessagePublication publication,
+ @NonNull final WrapsPhase currentPhase,
+ @NonNull final RosterTransitionWeights weights,
+ @NonNull final Duration gracePeriod,
+ @NonNull final Map> phaseMessages) {
+ requireNonNull(publication);
+ requireNonNull(currentPhase);
+ requireNonNull(weights);
+ requireNonNull(gracePeriod);
+ requireNonNull(phaseMessages);
+ // The final phase involves publishing votes, not messages, so abort
+ if (currentPhase == AGGREGATE) {
+ return Transition.rejectedAt(AGGREGATE);
+ }
+ // Otherwise the phase should match the current phase
+ if (publication.phase() != currentPhase) {
+ return Transition.rejectedAt(currentPhase);
+ }
+ final var messages = phaseMessages.computeIfAbsent(currentPhase, p -> new TreeMap<>());
+ if (currentPhase == R1) {
+ if (messages.putIfAbsent(publication.nodeId(), publication) != null) {
+ return Transition.rejectedAt(currentPhase);
+ }
+ final long r1Weight = messages.values().stream()
+ .mapToLong(p -> weights.sourceWeightOf(p.nodeId()))
+ .sum();
+ if (r1Weight >= moreThanHalfOfTotal(weights.sourceNodeWeights())) {
+ return Transition.advanceTo(R2, publication.receiptTime().plus(gracePeriod));
+ }
+ } else {
+ final var r1Nodes = phaseMessages.get(R1).keySet();
+ if (!r1Nodes.contains(publication.nodeId())) {
+ return Transition.rejectedAt(currentPhase);
+ }
+ if (messages.putIfAbsent(publication.nodeId(), publication) != null) {
+ return Transition.rejectedAt(currentPhase);
+ }
+ if (messages.keySet().containsAll(r1Nodes)) {
+ final var nextPhase = currentPhase == R2 ? R3 : AGGREGATE;
+ final var nextGracePeriodEndTime =
+ currentPhase == R2 ? publication.receiptTime().plus(gracePeriod) : null;
+ return Transition.advanceTo(nextPhase, nextGracePeriodEndTime);
+ }
+ }
+ return Transition.incorporatedIn(currentPhase);
+ }
+}
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WritableHistoryStoreImpl.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WritableHistoryStoreImpl.java
index 45e63e76b8e7..295621333ca8 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WritableHistoryStoreImpl.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/history/impl/WritableHistoryStoreImpl.java
@@ -4,7 +4,6 @@
import static com.hedera.hapi.util.HapiUtils.asTimestamp;
import static com.hedera.node.app.history.impl.ProofControllers.isWrapsExtensible;
import static com.hedera.node.app.history.schemas.V059HistorySchema.ACTIVE_PROOF_CONSTRUCTION_STATE_ID;
-import static com.hedera.node.app.history.schemas.V059HistorySchema.HISTORY_SIGNATURES_STATE_ID;
import static com.hedera.node.app.history.schemas.V059HistorySchema.LEDGER_ID_STATE_ID;
import static com.hedera.node.app.history.schemas.V059HistorySchema.NEXT_PROOF_CONSTRUCTION_STATE_ID;
import static com.hedera.node.app.history.schemas.V059HistorySchema.PROOF_KEY_SETS_STATE_ID;
@@ -20,7 +19,6 @@
import com.hedera.hapi.node.state.history.HistoryProofConstruction;
import com.hedera.hapi.node.state.history.HistoryProofVote;
import com.hedera.hapi.node.state.history.ProofKeySet;
-import com.hedera.hapi.node.state.history.RecordedHistorySignature;
import com.hedera.hapi.node.state.history.WrapsMessageHistory;
import com.hedera.hapi.node.state.history.WrapsSigningState;
import com.hedera.hapi.node.state.primitives.ProtoBytes;
@@ -58,7 +56,6 @@ public class WritableHistoryStoreImpl extends ReadableHistoryStoreImpl implement
private final WritableSingletonState nextConstruction;
private final WritableSingletonState activeConstruction;
private final WritableKVState proofKeySets;
- private final WritableKVState signatures;
private final WritableKVState votes;
private final WritableKVState wrapsMessageHistories;
@@ -68,7 +65,6 @@ public WritableHistoryStoreImpl(@NonNull final WritableStates states) {
this.nextConstruction = states.getSingleton(NEXT_PROOF_CONSTRUCTION_STATE_ID);
this.activeConstruction = states.getSingleton(ACTIVE_PROOF_CONSTRUCTION_STATE_ID);
this.proofKeySets = states.get(PROOF_KEY_SETS_STATE_ID);
- this.signatures = states.get(HISTORY_SIGNATURES_STATE_ID);
this.votes = states.get(PROOF_VOTES_STATE_ID);
this.wrapsMessageHistories = states.get(WRAPS_MESSAGE_HISTORIES_STATE_ID);
}
@@ -315,7 +311,7 @@ private void logNewConstruction(
}
/**
- * Purges the votes for the given construction relative to the given roster.
+ * Purges the publications for the given construction relative to the given roster.
*
* @param sourceRoster the construction
*/
@@ -323,7 +319,6 @@ private void purgePublications(final long constructionId, @NonNull final Roster
sourceRoster.rosterEntries().forEach(entry -> {
final var key = new ConstructionNodeId(constructionId, entry.nodeId());
votes.remove(key);
- signatures.remove(key);
wrapsMessageHistories.remove(key);
});
}
diff --git a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/TransactionChecker.java b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/TransactionChecker.java
index 6ba71fe03d83..34dde1372fd4 100644
--- a/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/TransactionChecker.java
+++ b/hedera-node/hedera-app/src/main/java/com/hedera/node/app/workflows/TransactionChecker.java
@@ -21,8 +21,10 @@
import static com.hedera.node.app.spi.validation.PreCheckValidator.checkMemo;
import static com.hedera.node.app.spi.workflows.PreCheckException.validateFalsePreCheck;
import static com.hedera.node.app.spi.workflows.PreCheckException.validateTruePreCheck;
+import static com.hedera.pbj.runtime.Codec.DEFAULT_MAX_DEPTH;
import static java.util.Objects.requireNonNull;
+import com.google.common.annotations.VisibleForTesting;
import com.hedera.hapi.node.base.AccountID;
import com.hedera.hapi.node.base.HederaFunctionality;
import com.hedera.hapi.node.base.ResponseCodeEnum;
@@ -123,82 +125,65 @@ public TransactionChecker(@NonNull final ConfigProvider configProvider, @NonNull
/**
* Parses and checks the transaction encoded as protobuf in the given buffer.
- *
* @param buffer The buffer containing the protobuf bytes of the transaction
* @return The parsed {@link TransactionInfo}
* @throws PreCheckException If parsing fails or any of the checks fail.
*/
@NonNull
public TransactionInfo parseAndCheck(@NonNull final Bytes buffer) throws PreCheckException {
+ final int maxBytes = maxIngestParseSize();
// Fail fast if there are too many transaction bytes
- if (buffer.length() > maxIngestParseSize()) {
+ if (buffer.length() > maxBytes) {
throw new PreCheckException(TRANSACTION_OVERSIZE);
}
- final var tx = parse(buffer);
- return check(tx);
+ final var tx = parse(buffer, maxBytes);
+ return check(tx, maxBytes);
}
/**
* Parses and checks a signed transaction encoded as protobuf in the given buffer.
- *
* @param buffer The buffer containing the protobuf bytes of the signed transaction
- * @param maxBytes The maximum number of bytes that can exist in the transaction
* @return The parsed {@link TransactionInfo}
* @throws PreCheckException If parsing fails or any of the checks fail.
*/
@NonNull
- public TransactionInfo parseSignedAndCheck(@NonNull final Bytes buffer, final int maxBytes)
- throws PreCheckException {
- // Fail fast if there are too many transaction bytes
- if (buffer.length() > maxBytes) {
- throw new PreCheckException(TRANSACTION_OVERSIZE);
- }
- final var signedTx = parseSigned(buffer);
- return checkSigned(signedTx, buffer);
+ public TransactionInfo parseSignedAndCheck(@NonNull final Bytes buffer) throws PreCheckException {
+ return parseSignedAndCheck(buffer, maxIngestParseSize());
}
/**
* Parses and checks a signed transaction encoded as protobuf in the given buffer.
- *
* @param buffer The buffer containing the protobuf bytes of the signed transaction
+ * @param maxBytes The maximum number of bytes that can exist in the transaction
* @return The parsed {@link TransactionInfo}
* @throws PreCheckException If parsing fails or any of the checks fail.
*/
@NonNull
- public TransactionInfo parseSignedAndCheck(@NonNull final Bytes buffer) throws PreCheckException {
- return parseSignedAndCheck(buffer, maxIngestParseSize());
- }
-
- /**
- * Parse the given {@link Bytes} into a transaction.
- *
- * After verifying that the number of bytes comprising the transaction does not exceed the maximum allowed, the
- * transaction is parsed. A transaction can be checked with {@link #check(Transaction)}.
- *
- * @param buffer the {@code ByteBuffer} with the serialized transaction
- * @return an {@link TransactionInfo} with the parsed and checked entities
- * @throws PreCheckException if the data is not valid
- * @throws NullPointerException if one of the arguments is {@code null}
- */
- @NonNull
- public Transaction parse(@NonNull final Bytes buffer) throws PreCheckException {
- return parseStrict(buffer.toReadableSequentialData(), Transaction.PROTOBUF, INVALID_TRANSACTION);
+ public TransactionInfo parseSignedAndCheck(@NonNull final Bytes buffer, final int maxBytes)
+ throws PreCheckException {
+ // Fail fast if there are too many transaction bytes
+ if (buffer.length() > maxBytes) {
+ throw new PreCheckException(TRANSACTION_OVERSIZE);
+ }
+ final var signedTx = parseSigned(buffer, maxBytes);
+ return checkSigned(signedTx, buffer, maxBytes);
}
/**
* Parse the given {@link Bytes} into a signed transaction.
*
*
After verifying that the number of bytes comprising the transaction does not exceed the maximum allowed, the
- * transaction is parsed. A transaction can be checked with {@link #check(Transaction)}.
+ * transaction is parsed. A transaction can be checked with {@link #check(Transaction, int)}.
*
* @param buffer the {@code ByteBuffer} with the serialized transaction
+ * @param maxSize the maximum size of the data
* @return an {@link TransactionInfo} with the parsed and checked entities
* @throws PreCheckException if the data is not valid
* @throws NullPointerException if one of the arguments is {@code null}
*/
@NonNull
- public SignedTransaction parseSigned(@NonNull final Bytes buffer) throws PreCheckException {
- return parseStrict(buffer.toReadableSequentialData(), SignedTransaction.PROTOBUF, INVALID_TRANSACTION);
+ public SignedTransaction parseSigned(@NonNull final Bytes buffer, final int maxSize) throws PreCheckException {
+ return parseStrict(buffer.toReadableSequentialData(), SignedTransaction.PROTOBUF, INVALID_TRANSACTION, maxSize);
}
/**
@@ -232,13 +217,15 @@ public SignedTransaction parseSigned(@NonNull final Bytes buffer) throws PreChec
*
* Note this method is only used at HAPI ingest, since by the time a transaction has been submitted,
* it no longer has a {@link Transaction} wrapper and is a serialized {@link SignedTransaction}.
+ *
* @param tx the {@link Transaction} that needs to be checked
+ * @param maxSize the maximum size of the data
* @return an {@link TransactionInfo} with the parsed and checked entities
* @throws PreCheckException if the data is not valid
* @throws NullPointerException if one of the arguments is {@code null}
*/
@NonNull
- public TransactionInfo check(@NonNull final Transaction tx) throws PreCheckException {
+ public TransactionInfo check(@NonNull final Transaction tx, final int maxSize) throws PreCheckException {
// NOTE: Since we've already parsed the transaction, we assume that the
// transaction was not too many bytes. This is a safe assumption because
// the code that receives the transaction bytes and parses the transaction
@@ -250,7 +237,10 @@ public TransactionInfo check(@NonNull final Transaction tx) throws PreCheckExcep
if (tx.signedTransactionBytes().length() > 0) {
serializedSignedTx = tx.signedTransactionBytes();
signedTx = parseStrict(
- serializedSignedTx.toReadableSequentialData(), SignedTransaction.PROTOBUF, INVALID_TRANSACTION);
+ serializedSignedTx.toReadableSequentialData(),
+ SignedTransaction.PROTOBUF,
+ INVALID_TRANSACTION,
+ maxSize);
validateFalsePreCheck(
signedTx.useSerializedTxMessageHashAlgorithm(), INVALID_SERIALIZED_TX_MESSAGE_HASH_ALGORITHM);
} else {
@@ -260,7 +250,7 @@ public TransactionInfo check(@NonNull final Transaction tx) throws PreCheckExcep
if (!signedTx.hasSigMap()) {
throw new PreCheckException(INVALID_TRANSACTION_BODY);
}
- return check(signedTx, serializedSignedTx);
+ return check(signedTx, serializedSignedTx, maxSize);
}
/**
@@ -292,17 +282,18 @@ public TransactionInfo check(@NonNull final Transaction tx) throws PreCheckExcep
*
* @param signedTx the {@link SignedTransaction} that needs to be checked
* @param serializedSignedTx if set, the serialized transaction bytes to include in the {@link TransactionInfo}
+ * @param maxBytes the maximum size of the data
* @return an {@link TransactionInfo} with the parsed and checked entities
* @throws PreCheckException if the data is not valid
* @throws NullPointerException if one of the arguments is {@code null}
*/
@NonNull
public TransactionInfo checkSigned(
- @NonNull final SignedTransaction signedTx, @NonNull final Bytes serializedSignedTx)
+ @NonNull final SignedTransaction signedTx, @NonNull final Bytes serializedSignedTx, final int maxBytes)
throws PreCheckException {
requireNonNull(signedTx);
requireNonNull(serializedSignedTx);
- return check(signedTx, serializedSignedTx);
+ return check(signedTx, serializedSignedTx, maxBytes);
}
public TransactionInfo checkParsed(@NonNull final TransactionInfo txInfo) throws PreCheckException {
@@ -622,36 +613,44 @@ private long toSecondsDuration(final long validForSecs, final Instant validStart
* A utility method for strictly parsing a protobuf message, throwing {@link PreCheckException} if the message
* is malformed or contains unknown fields.
*
+ * @param The type of the message to parseStrict.
* @param data The protobuf data to parse.
* @param codec The codec to use for parsing
* @param parseErrorCode The error code to use if the data is malformed or contains unknown fields.
- * @param The type of the message to parseStrict.
+ * @param maxSize the maximum size of the data
* @return The parsed message.
* @throws PreCheckException if the data is malformed or contains unknown fields.
*/
@NonNull
- private T parseStrict(@NonNull ReadableSequentialData data, Codec codec, ResponseCodeEnum parseErrorCode)
+ private T parseStrict(
+ @NonNull final ReadableSequentialData data,
+ @NonNull final Codec codec,
+ @NonNull final ResponseCodeEnum parseErrorCode,
+ final int maxSize)
throws PreCheckException {
try {
- return codec.parseStrict(data);
+ return codec.parse(data, true, false, DEFAULT_MAX_DEPTH, maxSize);
} catch (ParseException e) {
if (e.getCause() instanceof UnknownFieldException) {
// We do not allow newer clients to send transactions to older networks.
throw new PreCheckException(TRANSACTION_HAS_UNKNOWN_FIELDS);
}
-
// Either the protobuf was malformed, or something else failed during parsing
logger.warn("ParseException while parsing protobuf", e);
throw new PreCheckException(parseErrorCode);
}
}
- private TransactionInfo check(@NonNull final SignedTransaction signedTx, @NonNull final Bytes serializedSignedTx)
+ private TransactionInfo check(
+ @NonNull final SignedTransaction signedTx, @NonNull final Bytes serializedSignedTx, final int maxSize)
throws PreCheckException {
validateTruePreCheck(signedTx.hasSigMap(), INVALID_TRANSACTION_BODY);
final var signatureMap = signedTx.sigMapOrThrow();
final var txBody = parseStrict(
- signedTx.bodyBytes().toReadableSequentialData(), TransactionBody.PROTOBUF, INVALID_TRANSACTION_BODY);
+ signedTx.bodyBytes().toReadableSequentialData(),
+ TransactionBody.PROTOBUF,
+ INVALID_TRANSACTION_BODY,
+ maxSize);
final HederaFunctionality functionality;
try {
functionality = HapiUtils.functionOf(txBody);
@@ -698,6 +697,23 @@ private void checkPrefixMismatch(@NonNull final List sigPairs) th
}
}
+ /**
+ * Parse the given {@link Bytes} into a transaction.
+ * After verifying that the number of bytes comprising the transaction does not exceed the maximum allowed, the
+ * transaction is parsed. A transaction can be checked with {@link #check(Transaction, int)}.
+ * @param buffer the {@code ByteBuffer} with the serialized transaction
+ * @param maxSize the maximum size of the data
+ * @return an {@link TransactionInfo} with the parsed and checked entities
+ * @throws PreCheckException if the data is not valid
+ * @throws NullPointerException if one of the arguments is {@code null}
+ */
+ @NonNull
+ @VisibleForTesting
+ Transaction parse(@NonNull final Bytes buffer, final int maxSize) throws PreCheckException {
+ requireNonNull(buffer);
+ return parseStrict(buffer.toReadableSequentialData(), Transaction.PROTOBUF, INVALID_TRANSACTION, maxSize);
+ }
+
/**
* Sorts the list of signature pairs by the prefix of the public key. Sort them such that shorter prefixes come
* before longer prefixes, and if two prefixes are the same length then sort them lexicographically (lower bytes
diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/hints/impl/HintsContextTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/hints/impl/HintsContextTest.java
index ca16f2055655..dce9261b513d 100644
--- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/hints/impl/HintsContextTest.java
+++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/hints/impl/HintsContextTest.java
@@ -76,7 +76,8 @@ private static TssConfig defaultConfig() {
false,
false,
false,
- 2);
+ 2,
+ Duration.ofSeconds(5));
}
@Test
diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/handlers/HistoryProofKeyPublicationHandlerTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/handlers/HistoryProofKeyPublicationHandlerTest.java
index b723baa66360..73ec7855341e 100644
--- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/handlers/HistoryProofKeyPublicationHandlerTest.java
+++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/handlers/HistoryProofKeyPublicationHandlerTest.java
@@ -128,18 +128,15 @@ void wrapsMessageGivenToInProgressControllerAndPersistedWhenAccepted() {
given(context.storeFactory()).willReturn(factory);
given(factory.writableStore(WritableHistoryStore.class)).willReturn(store);
given(context.consensusNow()).willReturn(CONSENSUS_NOW);
- given(context.configuration()).willReturn(configuration);
- given(configuration.getConfigData(TssConfig.class)).willReturn(tssConfig);
given(controllers.getAnyInProgress()).willReturn(Optional.of(controller));
- given(controller.addWrapsMessagePublication(
- any(ReadableHistoryStore.WrapsMessagePublication.class), eq(store), eq(tssConfig)))
+ given(controller.addWrapsMessagePublication(any(ReadableHistoryStore.WrapsMessagePublication.class), eq(store)))
.willReturn(true);
given(controller.constructionId()).willReturn(42L);
subject.handle(context);
final var captor = ArgumentCaptor.forClass(ReadableHistoryStore.WrapsMessagePublication.class);
- verify(controller).addWrapsMessagePublication(captor.capture(), eq(store), eq(tssConfig));
+ verify(controller).addWrapsMessagePublication(captor.capture(), eq(store));
final var publication = captor.getValue();
assertEquals(NODE_ID, publication.nodeId());
assertEquals(WRAPS_MESSAGE, publication.message());
@@ -156,18 +153,14 @@ void doesNotPersistWrapsMessageIfControllerRejects() {
given(context.storeFactory()).willReturn(factory);
given(factory.writableStore(WritableHistoryStore.class)).willReturn(store);
given(context.consensusNow()).willReturn(CONSENSUS_NOW);
- given(context.configuration()).willReturn(configuration);
- given(configuration.getConfigData(TssConfig.class)).willReturn(tssConfig);
given(controllers.getAnyInProgress()).willReturn(Optional.of(controller));
- given(controller.addWrapsMessagePublication(
- any(ReadableHistoryStore.WrapsMessagePublication.class), eq(store), eq(tssConfig)))
+ given(controller.addWrapsMessagePublication(any(ReadableHistoryStore.WrapsMessagePublication.class), eq(store)))
.willReturn(false);
subject.handle(context);
verify(controller)
- .addWrapsMessagePublication(
- any(ReadableHistoryStore.WrapsMessagePublication.class), eq(store), eq(tssConfig));
+ .addWrapsMessagePublication(any(ReadableHistoryStore.WrapsMessagePublication.class), eq(store));
verify(store, never()).addWrapsMessage(anyLong(), any());
}
diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistoryLibraryImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistoryLibraryImplTest.java
index 255857647cab..8a2c91948071 100644
--- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistoryLibraryImplTest.java
+++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistoryLibraryImplTest.java
@@ -4,6 +4,7 @@
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
@@ -151,4 +152,9 @@ void wrapsPhasesAndVerificationCoverAllMethods() {
assertDoesNotThrow(() -> subject.verifyAggregateSignature(message, publicKeys, signature));
}
+
+ @Test
+ void wrapsLibraryBridgeIsNotReady() {
+ assertFalse(subject.wrapsProverReady());
+ }
}
diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistoryServiceImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistoryServiceImplTest.java
index 2120fca306d5..bdd468424a0d 100644
--- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistoryServiceImplTest.java
+++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistoryServiceImplTest.java
@@ -42,6 +42,7 @@ class HistoryServiceImplTest {
private static final Bytes CURRENT_VK = Bytes.wrap("Z");
private static final Metrics NO_OP_METRICS = new NoOpMetrics();
private static final Instant CONSENSUS_NOW = Instant.ofEpochSecond(1_234_567L, 890);
+ private static final TssConfig DEFAULT_TSS_CONFIG = DEFAULT_CONFIG.getConfigData(TssConfig.class);
@Mock
private AppContext appContext;
@@ -55,9 +56,6 @@ class HistoryServiceImplTest {
@Mock
private ProofController controller;
- @Mock
- private TssConfig tssConfig;
-
@Mock
private ActiveRosters activeRosters;
@@ -118,19 +116,19 @@ void usesComponentForHandlers() {
void handoffIsNoop() {
withMockSubject();
given(activeRosters.phase()).willReturn(HANDOFF);
- subject.reconcile(activeRosters, Bytes.EMPTY, store, CONSENSUS_NOW, tssConfig, true, null);
+ subject.reconcile(activeRosters, Bytes.EMPTY, store, CONSENSUS_NOW, DEFAULT_TSS_CONFIG, true, null);
}
@Test
void noopReconciliationIfBootstrapHasProof() {
withMockSubject();
given(activeRosters.phase()).willReturn(BOOTSTRAP);
- given(store.getOrCreateConstruction(activeRosters, CONSENSUS_NOW, tssConfig))
+ given(store.getOrCreateConstruction(activeRosters, CONSENSUS_NOW, DEFAULT_TSS_CONFIG))
.willReturn(HistoryProofConstruction.newBuilder()
.targetProof(HistoryProof.DEFAULT)
.build());
- subject.reconcile(activeRosters, null, store, CONSENSUS_NOW, tssConfig, true, null);
+ subject.reconcile(activeRosters, null, store, CONSENSUS_NOW, DEFAULT_TSS_CONFIG, true, null);
verifyNoMoreInteractions(component);
}
@@ -139,7 +137,7 @@ void noopReconciliationIfBootstrapHasProof() {
void activeReconciliationIfTransitionHasNoProofYet() {
withMockSubject();
given(activeRosters.phase()).willReturn(TRANSITION);
- given(store.getOrCreateConstruction(activeRosters, CONSENSUS_NOW, tssConfig))
+ given(store.getOrCreateConstruction(activeRosters, CONSENSUS_NOW, DEFAULT_TSS_CONFIG))
.willReturn(HistoryProofConstruction.DEFAULT);
given(store.getActiveConstruction()).willReturn(HistoryProofConstruction.DEFAULT);
given(component.controllers()).willReturn(controllers);
@@ -148,12 +146,14 @@ void activeReconciliationIfTransitionHasNoProofYet() {
HistoryProofConstruction.DEFAULT,
store,
HintsConstruction.DEFAULT,
- HistoryProofConstruction.DEFAULT))
+ HistoryProofConstruction.DEFAULT,
+ DEFAULT_CONFIG.getConfigData(TssConfig.class)))
.willReturn(controller);
- subject.reconcile(activeRosters, CURRENT_VK, store, CONSENSUS_NOW, tssConfig, true, HintsConstruction.DEFAULT);
+ subject.reconcile(
+ activeRosters, CURRENT_VK, store, CONSENSUS_NOW, DEFAULT_TSS_CONFIG, true, HintsConstruction.DEFAULT);
- verify(controller).advanceConstruction(CONSENSUS_NOW, CURRENT_VK, store, true, tssConfig);
+ verify(controller).advanceConstruction(CONSENSUS_NOW, CURRENT_VK, store, true, DEFAULT_TSS_CONFIG);
}
@Test
@@ -161,7 +161,8 @@ void doesNothingAfterIneffectualHandoff() {
withMockSubject();
given(activeRosters.phase()).willReturn(HANDOFF);
- subject.reconcile(activeRosters, null, store, CONSENSUS_NOW, tssConfig, true, HintsConstruction.DEFAULT);
+ subject.reconcile(
+ activeRosters, null, store, CONSENSUS_NOW, DEFAULT_TSS_CONFIG, true, HintsConstruction.DEFAULT);
verify(store, never()).getConstructionFor(activeRosters);
}
diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistorySubmissionsTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistorySubmissionsTest.java
index ba90af774808..4bbeed516cee 100644
--- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistorySubmissionsTest.java
+++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/HistorySubmissionsTest.java
@@ -124,7 +124,7 @@ void usesExpectedBodyForVote() {
given(appContext.configSupplier()).willReturn(() -> DEFAULT_CONFIG);
given(appContext.gossip()).willReturn(gossip);
- subject.submitProofVote(123L, HistoryProof.DEFAULT);
+ subject.submitExplicitProofVote(123L, HistoryProof.DEFAULT);
final ArgumentCaptor> captor = ArgumentCaptor.forClass(Consumer.class);
verify(gossip)
diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/ProofControllerImplTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/ProofControllerImplTest.java
index 0c87a16007c2..0001a3fe0688 100644
--- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/ProofControllerImplTest.java
+++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/ProofControllerImplTest.java
@@ -2,6 +2,7 @@
package com.hedera.node.app.history.impl;
import static com.hedera.hapi.node.state.history.WrapsPhase.R1;
+import static com.hedera.node.app.fixtures.AppTestBase.DEFAULT_CONFIG;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
@@ -42,9 +43,9 @@ class ProofControllerImplTest {
private static final long SELF_ID = 1L;
private static final long OTHER_NODE_ID = 2L;
private static final long CONSTRUCTION_ID = 100L;
-
private static final Bytes METADATA = Bytes.wrap("meta");
private static final Bytes PROOF_KEY_1 = Bytes.wrap("pk1");
+ private static final TssConfig DEFAULT_TSS_CONFIG = DEFAULT_CONFIG.getConfigData(TssConfig.class);
private Executor executor;
@@ -54,6 +55,9 @@ class ProofControllerImplTest {
@Mock
private HistorySubmissions submissions;
+ @Mock
+ private WrapsMpcStateMachine machine;
+
@Mock
private HistoryLibrary historyLibrary;
@@ -93,6 +97,7 @@ void setUp() {
given(proverFactory.create(
eq(SELF_ID),
+ eq(DEFAULT_TSS_CONFIG),
eq(keyPair),
any(),
eq(weights),
@@ -109,13 +114,15 @@ void setUp() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
}
@Test
@@ -142,13 +149,15 @@ void isStillInProgressFalseWhenHasTargetProof() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
assertFalse(subject.isStillInProgress());
}
@@ -167,13 +176,15 @@ void isStillInProgressFalseWhenHasFailureReason() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
assertFalse(subject.isStillInProgress());
}
@@ -192,13 +203,15 @@ void advanceConstructionReturnsEarlyWhenAlreadyFinished() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
subject.advanceConstruction(Instant.EPOCH, METADATA, writableHistoryStore, true, tssConfig);
@@ -238,13 +251,15 @@ void advanceConstructionDoesNothingWhenAssemblyStartedAndInactive() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
subject.advanceConstruction(Instant.EPOCH.plusSeconds(1), METADATA, writableHistoryStore, false, tssConfig);
@@ -265,13 +280,15 @@ void advanceConstructionDelegatesToProverWhenAssemblyStartedAndInProgress() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
given(writableHistoryStore.getLedgerId()).willReturn(Bytes.EMPTY);
given(prover.advance(any(), any(), any(), any(), eq(tssConfig), any()))
@@ -300,13 +317,15 @@ void advanceConstructionFinishesProofWhenProverCompletes() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
final var proof = HistoryProof.newBuilder().build();
@@ -338,13 +357,15 @@ void advanceConstructionFailsConstructionWhenProverFails() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
final var reason = "test-failure";
@@ -375,13 +396,15 @@ void addProofKeyPublicationIgnoredWhenNoGracePeriod() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
final var publication = new ProofKeyPublication(SELF_ID, PROOF_KEY_1, Instant.EPOCH);
@@ -423,35 +446,35 @@ void addWrapsMessagePublicationReturnsFalseWhenHasTargetProof() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
final var publication = new WrapsMessagePublication(SELF_ID, Bytes.EMPTY, R1, Instant.EPOCH);
- final var result = subject.addWrapsMessagePublication(publication, writableHistoryStore, tssConfig);
+ final var result = subject.addWrapsMessagePublication(publication, writableHistoryStore);
assertFalse(result);
- verify(prover, never()).addWrapsSigningMessage(anyLong(), any(), any(), any());
+ verify(prover, never()).addWrapsSigningMessage(anyLong(), any(), any());
}
@Test
void addWrapsMessagePublicationDelegatesToProverOtherwise() {
final var publication = new WrapsMessagePublication(SELF_ID, Bytes.EMPTY, R1, Instant.EPOCH);
- given(prover.addWrapsSigningMessage(
- eq(CONSTRUCTION_ID), eq(publication), eq(writableHistoryStore), eq(tssConfig)))
+ given(prover.addWrapsSigningMessage(eq(CONSTRUCTION_ID), eq(publication), eq(writableHistoryStore)))
.willReturn(true);
- final var result = subject.addWrapsMessagePublication(publication, writableHistoryStore, tssConfig);
+ final var result = subject.addWrapsMessagePublication(publication, writableHistoryStore);
assertTrue(result);
- verify(prover)
- .addWrapsSigningMessage(eq(CONSTRUCTION_ID), eq(publication), eq(writableHistoryStore), eq(tssConfig));
+ verify(prover).addWrapsSigningMessage(eq(CONSTRUCTION_ID), eq(publication), eq(writableHistoryStore));
}
@Test
@@ -468,13 +491,15 @@ void addProofVoteIgnoresWhenAlreadyCompleted() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
final var vote = HistoryProofVote.newBuilder()
.proof(HistoryProof.newBuilder().build())
@@ -515,13 +540,15 @@ void addProofVoteHandlesCongruentVotes() {
weights,
executor,
submissions,
+ machine,
keyPublications,
wrapsMessagePublications,
existingVotes,
historyService,
historyLibrary,
proverFactory,
- null);
+ null,
+ DEFAULT_TSS_CONFIG);
final var congruentVote =
HistoryProofVote.newBuilder().congruentNodeId(OTHER_NODE_ID).build();
diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/ProofControllersTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/ProofControllersTest.java
index 31b8a2851213..90dfaa37d4db 100644
--- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/ProofControllersTest.java
+++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/ProofControllersTest.java
@@ -1,6 +1,7 @@
// SPDX-License-Identifier: Apache-2.0
package com.hedera.node.app.history.impl;
+import static com.hedera.node.app.fixtures.AppTestBase.DEFAULT_CONFIG;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.BDDMockito.given;
@@ -12,6 +13,7 @@
import com.hedera.node.app.service.roster.impl.ActiveRosters;
import com.hedera.node.app.service.roster.impl.RosterTransitionWeights;
import com.hedera.node.app.spi.info.NodeInfo;
+import com.hedera.node.config.data.TssConfig;
import com.hedera.pbj.runtime.io.buffer.Bytes;
import java.util.concurrent.Executor;
import java.util.function.Supplier;
@@ -46,6 +48,9 @@ class ProofControllersTest {
@Mock
private HistorySubmissions submissions;
+ @Mock
+ private WrapsMpcStateMachine machine;
+
@Mock
private Supplier selfNodeInfoSupplier;
@@ -62,8 +67,8 @@ class ProofControllersTest {
@BeforeEach
void setUp() {
- subject =
- new ProofControllers(executor, keyAccessor, library, submissions, selfNodeInfoSupplier, historyService);
+ subject = new ProofControllers(
+ executor, keyAccessor, library, submissions, selfNodeInfoSupplier, historyService, machine);
}
@Test
@@ -79,7 +84,8 @@ void getsAndCreatesInertControllersAsExpected() {
ONE_CONSTRUCTION,
historyStore,
HintsConstruction.DEFAULT,
- HistoryProofConstruction.DEFAULT);
+ HistoryProofConstruction.DEFAULT,
+ DEFAULT_CONFIG.getConfigData(TssConfig.class));
assertTrue(subject.getAnyInProgress().isEmpty());
assertTrue(subject.getInProgressById(1L).isEmpty());
assertTrue(subject.getInProgressById(2L).isEmpty());
@@ -89,7 +95,8 @@ void getsAndCreatesInertControllersAsExpected() {
twoConstruction,
historyStore,
HintsConstruction.DEFAULT,
- HistoryProofConstruction.DEFAULT);
+ HistoryProofConstruction.DEFAULT,
+ DEFAULT_CONFIG.getConfigData(TssConfig.class));
assertNotSame(firstController, secondController);
assertInstanceOf(InertProofController.class, secondController);
}
@@ -106,7 +113,8 @@ void returnsActiveControllerWhenSourceNodesHaveTargetThresholdWeight() {
ONE_CONSTRUCTION,
historyStore,
HintsConstruction.DEFAULT,
- HistoryProofConstruction.DEFAULT);
+ HistoryProofConstruction.DEFAULT,
+ DEFAULT_CONFIG.getConfigData(TssConfig.class));
assertInstanceOf(ProofControllerImpl.class, controller);
}
diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/WrapsHistoryProverTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/WrapsHistoryProverTest.java
index 7cdb5ec9781c..3892026297c7 100644
--- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/WrapsHistoryProverTest.java
+++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/WrapsHistoryProverTest.java
@@ -15,10 +15,12 @@
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
+import com.hedera.hapi.block.stream.AggregatedNodeSignatures;
import com.hedera.hapi.block.stream.ChainOfTrustProof;
import com.hedera.hapi.node.base.Timestamp;
import com.hedera.hapi.node.state.history.HistoryProof;
import com.hedera.hapi.node.state.history.HistoryProofConstruction;
+import com.hedera.hapi.node.state.history.HistoryProofVote;
import com.hedera.hapi.node.state.history.WrapsPhase;
import com.hedera.hapi.node.state.history.WrapsSigningState;
import com.hedera.node.app.history.HistoryLibrary;
@@ -52,6 +54,7 @@ class WrapsHistoryProverTest {
private static final Bytes R1_MESSAGE = Bytes.wrap("r1");
private static final Bytes R2_MESSAGE = Bytes.wrap("r2");
private static final Bytes R3_MESSAGE = Bytes.wrap("r3");
+ private static final Duration GRACE_PERIOD = Duration.ofSeconds(5);
private static final ProofKeysAccessorImpl.SchnorrKeyPair KEY_PAIR =
new ProofKeysAccessorImpl.SchnorrKeyPair(Bytes.wrap("priv"), Bytes.wrap("pub"));
@@ -59,6 +62,8 @@ class WrapsHistoryProverTest {
@Mock
private Executor executor;
+ private final WrapsHistoryProver.Delayer delayer = (delay, unit, executor) -> executor;
+
@Mock
private HistoryLibrary historyLibrary;
@@ -97,7 +102,17 @@ void setUp() {
weights = new RosterTransitionWeights(sourceWeights, targetWeights);
subject = new WrapsHistoryProver(
- SELF_ID, KEY_PAIR, null, weights, proofKeys, executor, historyLibrary, submissions);
+ SELF_ID,
+ GRACE_PERIOD,
+ KEY_PAIR,
+ null,
+ weights,
+ proofKeys,
+ delayer,
+ executor,
+ historyLibrary,
+ submissions,
+ new WrapsMpcStateMachine());
}
private static HistoryProofConstruction constructionWithPhase(WrapsPhase phase, Instant graceEnd) {
@@ -117,7 +132,17 @@ void advanceFailsWhenNonGenesisWithoutLedgerId() {
.chainOfTrustProof(ChainOfTrustProof.DEFAULT)
.build();
subject = new WrapsHistoryProver(
- SELF_ID, KEY_PAIR, nonGenesisSourceProof, weights, proofKeys, executor, historyLibrary, submissions);
+ SELF_ID,
+ GRACE_PERIOD,
+ KEY_PAIR,
+ nonGenesisSourceProof,
+ weights,
+ proofKeys,
+ delayer,
+ executor,
+ historyLibrary,
+ submissions,
+ new WrapsMpcStateMachine());
final var outcome = subject.advance(
EPOCH, constructionWithPhase(R1, null), TARGET_METADATA, targetProofKeys, tssConfig, null);
@@ -135,13 +160,20 @@ void advanceFailsWhenGracePeriodExpired() {
final var construction = constructionWithPhase(R1, graceEnd);
subject = new WrapsHistoryProver(
- SELF_ID, KEY_PAIR, HistoryProof.DEFAULT, weights, proofKeys, executor, historyLibrary, submissions);
+ SELF_ID,
+ GRACE_PERIOD,
+ KEY_PAIR,
+ HistoryProof.DEFAULT,
+ weights,
+ proofKeys,
+ delayer,
+ executor,
+ historyLibrary,
+ submissions,
+ new WrapsMpcStateMachine());
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH), writableHistoryStore);
final var outcome = subject.advance(now, construction, TARGET_METADATA, targetProofKeys, tssConfig, LEDGER_ID);
@@ -153,7 +185,17 @@ void advanceFailsWhenGracePeriodExpired() {
@Test
void advanceInitializesWrapsMessageAndPublishesR1() {
subject = new WrapsHistoryProver(
- SELF_ID, KEY_PAIR, null, weights, proofKeys, runnable -> runnable.run(), historyLibrary, submissions);
+ SELF_ID,
+ GRACE_PERIOD,
+ KEY_PAIR,
+ null,
+ weights,
+ proofKeys,
+ delayer,
+ Runnable::run,
+ historyLibrary,
+ submissions,
+ new WrapsMpcStateMachine());
given(historyLibrary.hashAddressBook(any())).willReturn("HASH".getBytes(UTF_8));
given(historyLibrary.computeWrapsMessage(any(), any())).willReturn("MSG".getBytes(UTF_8));
given(historyLibrary.runWrapsPhaseR1(any(), any(), any())).willReturn(MESSAGE_BYTES.toByteArray());
@@ -173,7 +215,17 @@ void advanceInitializesWrapsMessageAndPublishesR1() {
@Test
void advancePublishesR3WhenEligible() {
subject = new WrapsHistoryProver(
- SELF_ID, KEY_PAIR, null, weights, proofKeys, runnable -> runnable.run(), historyLibrary, submissions);
+ SELF_ID,
+ GRACE_PERIOD,
+ KEY_PAIR,
+ null,
+ weights,
+ proofKeys,
+ delayer,
+ Runnable::run,
+ historyLibrary,
+ submissions,
+ new WrapsMpcStateMachine());
given(historyLibrary.hashAddressBook(any())).willReturn("HASH".getBytes(UTF_8));
given(historyLibrary.computeWrapsMessage(any(), any())).willReturn("MSG".getBytes(UTF_8));
given(historyLibrary.runWrapsPhaseR3(any(), any(), any(), any(), any(), any()))
@@ -183,26 +235,18 @@ void advancePublishesR3WhenEligible() {
setField("entropy", new byte[32]);
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R2_MESSAGE, R2, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
final var construction = constructionWithPhase(R3, null);
final var outcome =
@@ -217,7 +261,17 @@ void advancePublishesR3WhenEligible() {
@Test
void advancePublishesR2WhenEligible() {
subject = new WrapsHistoryProver(
- SELF_ID, KEY_PAIR, null, weights, proofKeys, runnable -> runnable.run(), historyLibrary, submissions);
+ SELF_ID,
+ GRACE_PERIOD,
+ KEY_PAIR,
+ null,
+ weights,
+ proofKeys,
+ delayer,
+ Runnable::run,
+ historyLibrary,
+ submissions,
+ new WrapsMpcStateMachine());
given(historyLibrary.hashAddressBook(any())).willReturn("HASH".getBytes(UTF_8));
given(historyLibrary.computeWrapsMessage(any(), any())).willReturn("MSG".getBytes(UTF_8));
given(historyLibrary.runWrapsPhaseR2(any(), any(), any(), any(), any())).willReturn(R2_MESSAGE.toByteArray());
@@ -226,15 +280,11 @@ void advancePublishesR2WhenEligible() {
setField("entropy", new byte[32]);
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
final var construction = constructionWithPhase(R2, null);
final var outcome =
@@ -250,26 +300,23 @@ void advancePublishesR2WhenEligible() {
void addWrapsSigningMessageRejectsWrongPhase() {
final var publication = new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R2, EPOCH);
- assertFalse(subject.addWrapsSigningMessage(CONSTRUCTION_ID, publication, writableHistoryStore, tssConfig));
+ assertFalse(subject.addWrapsSigningMessage(CONSTRUCTION_ID, publication, writableHistoryStore));
verifyNoInteractions(writableHistoryStore);
}
@Test
void r1PhaseAdvancesToR2WhenEnoughWeight() {
- given(tssConfig.wrapsMessageGracePeriod()).willReturn(Duration.ofSeconds(5));
-
final var first = new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH);
final var second = new WrapsMessagePublication(OTHER_NODE_ID, R1_MESSAGE, R1, EPOCH.plusSeconds(1));
- assertTrue(subject.addWrapsSigningMessage(CONSTRUCTION_ID, first, writableHistoryStore, tssConfig));
- assertTrue(subject.addWrapsSigningMessage(CONSTRUCTION_ID, second, writableHistoryStore, tssConfig));
+ assertTrue(subject.addWrapsSigningMessage(CONSTRUCTION_ID, first, writableHistoryStore));
+ assertTrue(subject.addWrapsSigningMessage(CONSTRUCTION_ID, second, writableHistoryStore));
// A third R1 message from any node should be rejected since only R1 messages from two nodes are allowed
assertFalse(subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(999L, R1_MESSAGE, R1, EPOCH.plusSeconds(2)),
- writableHistoryStore,
- tssConfig));
+ writableHistoryStore));
verify(writableHistoryStore).advanceWrapsSigningPhase(eq(CONSTRUCTION_ID), eq(R2), any());
}
@@ -279,20 +326,31 @@ void duplicateR1MessagesRejected() {
final var first = new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH);
final var duplicate = new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH.plusSeconds(1));
- assertTrue(subject.addWrapsSigningMessage(CONSTRUCTION_ID, first, writableHistoryStore, tssConfig));
- assertFalse(subject.addWrapsSigningMessage(CONSTRUCTION_ID, duplicate, writableHistoryStore, tssConfig));
+ assertTrue(subject.addWrapsSigningMessage(CONSTRUCTION_ID, first, writableHistoryStore));
+ assertFalse(subject.addWrapsSigningMessage(CONSTRUCTION_ID, duplicate, writableHistoryStore));
}
@Test
void aggregatePhasePublishesAggregateVoteWhenWrapsDisabledOrNoSourceProof() {
subject = new WrapsHistoryProver(
- SELF_ID, KEY_PAIR, null, weights, proofKeys, runnable -> runnable.run(), historyLibrary, submissions);
+ SELF_ID,
+ GRACE_PERIOD,
+ KEY_PAIR,
+ null,
+ weights,
+ proofKeys,
+ delayer,
+ Runnable::run,
+ historyLibrary,
+ submissions,
+ new WrapsMpcStateMachine());
given(historyLibrary.hashAddressBook(any())).willReturn("HASH".getBytes(UTF_8));
given(historyLibrary.computeWrapsMessage(any(), any())).willReturn("MSG".getBytes(UTF_8));
given(historyLibrary.runAggregationPhase(any(), any(), any(), any(), any()))
.willReturn(AGG_SIG.toByteArray());
- given(submissions.submitProofVote(eq(CONSTRUCTION_ID), any()))
+ given(submissions.submitExplicitProofVote(eq(CONSTRUCTION_ID), any()))
.willReturn(CompletableFuture.completedFuture(null));
+ given(historyLibrary.verifyAggregateSignature(any(), any(), any())).willReturn(true);
setField("entropy", new byte[32]);
subject.replayWrapsSigningMessage(CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH));
@@ -313,7 +371,7 @@ void aggregatePhasePublishesAggregateVoteWhenWrapsDisabledOrNoSourceProof() {
assertSame(HistoryProver.Outcome.InProgress.INSTANCE, outcome);
final var captor = ArgumentCaptor.forClass(HistoryProof.class);
- verify(submissions).submitProofVote(eq(CONSTRUCTION_ID), captor.capture());
+ verify(submissions).submitExplicitProofVote(eq(CONSTRUCTION_ID), captor.capture());
final var proof = captor.getValue();
final var chainOfTrust = proof.chainOfTrustProofOrThrow();
assertTrue(chainOfTrust.hasAggregatedNodeSignatures());
@@ -329,59 +387,52 @@ void aggregatePhasePublishesIncrementalWrapsVoteWhenSourceProofExtensible() {
subject = new WrapsHistoryProver(
SELF_ID,
+ GRACE_PERIOD,
KEY_PAIR,
sourceProof,
weights,
proofKeys,
- runnable -> runnable.run(),
+ delayer,
+ Runnable::run,
historyLibrary,
- submissions);
+ submissions,
+ new WrapsMpcStateMachine());
given(historyLibrary.hashAddressBook(any())).willReturn("HASH".getBytes(UTF_8));
given(historyLibrary.computeWrapsMessage(any(), any())).willReturn("MSG".getBytes(UTF_8));
given(historyLibrary.runAggregationPhase(any(), any(), any(), any(), any()))
.willReturn(AGG_SIG.toByteArray());
given(tssConfig.wrapsEnabled()).willReturn(true);
- given(submissions.submitProofVote(eq(CONSTRUCTION_ID), any()))
+ given(submissions.submitExplicitProofVote(eq(CONSTRUCTION_ID), any()))
.willReturn(CompletableFuture.completedFuture(null));
final var incremental =
new com.hedera.cryptography.wraps.Proof(UNCOMPRESSED.toByteArray(), COMPRESSED.toByteArray());
given(historyLibrary.constructIncrementalWrapsProof(any(), any(), any(), any(), any(), any(), any()))
.willReturn(incremental);
+ given(historyLibrary.wrapsProverReady()).willReturn(true);
+ given(historyLibrary.verifyAggregateSignature(any(), any(), any())).willReturn(true);
setField("entropy", new byte[32]);
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R2_MESSAGE, R2, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R3_MESSAGE, R3, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R3_MESSAGE, R3, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R3_MESSAGE, R3, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
final var construction = constructionWithPhase(AGGREGATE, null);
final var outcome =
@@ -389,7 +440,7 @@ void aggregatePhasePublishesIncrementalWrapsVoteWhenSourceProofExtensible() {
assertSame(HistoryProver.Outcome.InProgress.INSTANCE, outcome);
final var captor = ArgumentCaptor.forClass(HistoryProof.class);
- verify(submissions).submitProofVote(eq(CONSTRUCTION_ID), captor.capture());
+ verify(submissions).submitExplicitProofVote(eq(CONSTRUCTION_ID), captor.capture());
final var proof = captor.getValue();
assertEquals(UNCOMPRESSED, proof.uncompressedWrapsProof());
final var chainOfTrust = proof.chainOfTrustProofOrThrow();
@@ -400,24 +451,28 @@ void aggregatePhasePublishesIncrementalWrapsVoteWhenSourceProofExtensible() {
void aggregatePhasePublishesGenesisWrapsVoteWhenSourceProofNotExtensible() {
final var sourceProof = HistoryProof.newBuilder()
.uncompressedWrapsProof(Bytes.EMPTY)
- .chainOfTrustProof(ChainOfTrustProof.DEFAULT)
+ .chainOfTrustProof(
+ ChainOfTrustProof.newBuilder().aggregatedNodeSignatures(AggregatedNodeSignatures.DEFAULT))
.build();
subject = new WrapsHistoryProver(
SELF_ID,
+ GRACE_PERIOD,
KEY_PAIR,
sourceProof,
weights,
proofKeys,
- runnable -> runnable.run(),
+ delayer,
+ Runnable::run,
historyLibrary,
- submissions);
+ submissions,
+ new WrapsMpcStateMachine());
given(historyLibrary.hashAddressBook(any())).willReturn("HASH".getBytes(UTF_8));
given(historyLibrary.computeWrapsMessage(any(), any())).willReturn("MSG".getBytes(UTF_8));
given(historyLibrary.runAggregationPhase(any(), any(), any(), any(), any()))
.willReturn(AGG_SIG.toByteArray());
given(tssConfig.wrapsEnabled()).willReturn(true);
- given(submissions.submitProofVote(eq(CONSTRUCTION_ID), any()))
+ given(submissions.submitExplicitProofVote(eq(CONSTRUCTION_ID), any()))
.willReturn(CompletableFuture.completedFuture(null));
final var genesis =
@@ -426,40 +481,30 @@ void aggregatePhasePublishesGenesisWrapsVoteWhenSourceProofNotExtensible() {
.willReturn(genesis);
given(historyLibrary.constructIncrementalWrapsProof(any(), any(), any(), any(), any(), any(), any()))
.willReturn(genesis);
+ given(historyLibrary.wrapsProverReady()).willReturn(true);
+ given(historyLibrary.verifyAggregateSignature(any(), any(), any())).willReturn(true);
setField("entropy", new byte[32]);
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R2_MESSAGE, R2, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R3_MESSAGE, R3, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R3_MESSAGE, R3, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R3_MESSAGE, R3, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
final var construction = constructionWithPhase(AGGREGATE, null);
final var outcome =
@@ -467,7 +512,7 @@ void aggregatePhasePublishesGenesisWrapsVoteWhenSourceProofNotExtensible() {
assertSame(HistoryProver.Outcome.InProgress.INSTANCE, outcome);
final var captor = ArgumentCaptor.forClass(HistoryProof.class);
- verify(submissions).submitProofVote(eq(CONSTRUCTION_ID), captor.capture());
+ verify(submissions).submitExplicitProofVote(eq(CONSTRUCTION_ID), captor.capture());
final var proof = captor.getValue();
assertEquals(UNCOMPRESSED, proof.uncompressedWrapsProof());
final var chainOfTrust = proof.chainOfTrustProofOrThrow();
@@ -476,83 +521,53 @@ void aggregatePhasePublishesGenesisWrapsVoteWhenSourceProofNotExtensible() {
@Test
void r2PhaseRequiresR1ParticipationAndAdvancesToR3() {
- given(tssConfig.wrapsMessageGracePeriod()).willReturn(Duration.ofSeconds(5));
-
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
assertFalse(subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(999L, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig));
+ CONSTRUCTION_ID, new WrapsMessagePublication(999L, R2_MESSAGE, R2, EPOCH), writableHistoryStore));
assertTrue(subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig));
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R2_MESSAGE, R2, EPOCH), writableHistoryStore));
// Second R2 from OTHER_NODE_ID is accepted and is what triggers the phase change
assertTrue(subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig));
+ writableHistoryStore));
verify(writableHistoryStore).advanceWrapsSigningPhase(eq(CONSTRUCTION_ID), eq(R3), any());
}
@Test
void r3PhaseRequiresR1ParticipationAndAdvancesToAggregate() {
- given(tssConfig.wrapsMessageGracePeriod()).willReturn(Duration.ofSeconds(5));
-
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R1_MESSAGE, R1, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R1_MESSAGE, R1, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig);
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R2_MESSAGE, R2, EPOCH), writableHistoryStore);
subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R2_MESSAGE, R2, EPOCH),
- writableHistoryStore,
- tssConfig);
+ writableHistoryStore);
assertFalse(subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(999L, R3_MESSAGE, R3, EPOCH),
- writableHistoryStore,
- tssConfig));
+ CONSTRUCTION_ID, new WrapsMessagePublication(999L, R3_MESSAGE, R3, EPOCH), writableHistoryStore));
assertTrue(subject.addWrapsSigningMessage(
- CONSTRUCTION_ID,
- new WrapsMessagePublication(SELF_ID, R3_MESSAGE, R3, EPOCH),
- writableHistoryStore,
- tssConfig));
+ CONSTRUCTION_ID, new WrapsMessagePublication(SELF_ID, R3_MESSAGE, R3, EPOCH), writableHistoryStore));
// Second R3 from OTHER_NODE_ID is accepted and is what triggers the phase change
assertTrue(subject.addWrapsSigningMessage(
CONSTRUCTION_ID,
new WrapsMessagePublication(OTHER_NODE_ID, R3_MESSAGE, R3, EPOCH),
- writableHistoryStore,
- tssConfig));
+ writableHistoryStore));
verify(writableHistoryStore).advanceWrapsSigningPhase(eq(CONSTRUCTION_ID), eq(AGGREGATE), isNull());
}
@@ -574,7 +589,17 @@ void cancelPendingWorkCancelsFutures() {
final var voteFuture = new CompletableFuture();
subject = new WrapsHistoryProver(
- SELF_ID, KEY_PAIR, null, weights, proofKeys, runnable -> runnable.run(), historyLibrary, submissions);
+ SELF_ID,
+ GRACE_PERIOD,
+ KEY_PAIR,
+ null,
+ weights,
+ proofKeys,
+ delayer,
+ Runnable::run,
+ historyLibrary,
+ submissions,
+ new WrapsMpcStateMachine());
setField("r1Future", r1Future);
setField("r2Future", r2Future);
@@ -593,6 +618,121 @@ void cancelPendingWorkReturnsFalseWhenNothingToCancel() {
assertFalse(subject.cancelPendingWork());
}
+ @Test
+ void observeProofVoteDoesNothingWhenVoteDecisionFutureIsNull() {
+ final var vote =
+ HistoryProofVote.newBuilder().proof(HistoryProof.DEFAULT).build();
+
+ // voteDecisionFuture is null by default, so this should return early
+ subject.observeProofVote(OTHER_NODE_ID, vote, false);
+
+ // No exception thrown, and no interactions with submissions
+ verifyNoInteractions(submissions);
+ }
+
+ @Test
+ void observeProofVoteDoesNothingWhenVoteDecisionFutureIsDone() {
+ final var completedFuture = CompletableFuture.completedFuture(null);
+ setField("voteDecisionFuture", completedFuture);
+
+ final var vote =
+ HistoryProofVote.newBuilder().proof(HistoryProof.DEFAULT).build();
+
+ subject.observeProofVote(OTHER_NODE_ID, vote, false);
+
+ // No exception thrown, and no interactions with submissions
+ verifyNoInteractions(submissions);
+ }
+
+ @Test
+ void observeProofVoteSkipsVoteWhenProofFinalized() {
+ final var pendingFuture = new CompletableFuture<>();
+ setField("voteDecisionFuture", pendingFuture);
+
+ final var vote =
+ HistoryProofVote.newBuilder().proof(HistoryProof.DEFAULT).build();
+
+ subject.observeProofVote(OTHER_NODE_ID, vote, true);
+
+ // The vote decision future should be completed
+ assertTrue(pendingFuture.isDone());
+ }
+
+ @Test
+ void observeProofVoteStoresHashWhenVoteHasProofButHistoryProofIsNull() {
+ final var pendingFuture = new CompletableFuture<>();
+ setField("voteDecisionFuture", pendingFuture);
+
+ final var proof = HistoryProof.newBuilder()
+ .chainOfTrustProof(ChainOfTrustProof.DEFAULT)
+ .build();
+ final var vote = HistoryProofVote.newBuilder().proof(proof).build();
+
+ subject.observeProofVote(OTHER_NODE_ID, vote, false);
+
+ // The vote decision future should NOT be completed since historyProof is null
+ assertFalse(pendingFuture.isDone());
+ }
+
+ @Test
+ void observeProofVoteCompletesWithCongruentWhenProofMatches() {
+ final var pendingFuture = new CompletableFuture<>();
+ setField("voteDecisionFuture", pendingFuture);
+
+ // Create a proof and set it as the historyProof
+ final var proof = HistoryProof.newBuilder()
+ .chainOfTrustProof(ChainOfTrustProof.DEFAULT)
+ .build();
+ setField("historyProof", proof);
+
+ // Create a vote with the same proof
+ final var vote = HistoryProofVote.newBuilder().proof(proof).build();
+
+ subject.observeProofVote(OTHER_NODE_ID, vote, false);
+
+ // The vote decision future should be completed since the proofs match
+ assertTrue(pendingFuture.isDone());
+ }
+
+ @Test
+ void observeProofVoteDoesNotCompleteWhenProofDoesNotMatch() {
+ final var pendingFuture = new CompletableFuture<>();
+ setField("voteDecisionFuture", pendingFuture);
+
+ // Create a proof and set it as the historyProof
+ final var selfProof = HistoryProof.newBuilder()
+ .chainOfTrustProof(ChainOfTrustProof.DEFAULT)
+ .uncompressedWrapsProof(Bytes.wrap("selfProofData"))
+ .build();
+ setField("historyProof", selfProof);
+
+ // Create a vote with a different proof
+ final var otherProof = HistoryProof.newBuilder()
+ .chainOfTrustProof(ChainOfTrustProof.DEFAULT)
+ .uncompressedWrapsProof(Bytes.wrap("otherProofData"))
+ .build();
+ final var vote = HistoryProofVote.newBuilder().proof(otherProof).build();
+
+ subject.observeProofVote(OTHER_NODE_ID, vote, false);
+
+ // The vote decision future should NOT be completed since the proofs don't match
+ assertFalse(pendingFuture.isDone());
+ }
+
+ @Test
+ void observeProofVoteDoesNothingWhenVoteHasNoProof() {
+ final var pendingFuture = new CompletableFuture<>();
+ setField("voteDecisionFuture", pendingFuture);
+
+ // Create a vote with congruent_node_id instead of proof
+ final var vote = HistoryProofVote.newBuilder().congruentNodeId(999L).build();
+
+ subject.observeProofVote(OTHER_NODE_ID, vote, false);
+
+ // The vote decision future should NOT be completed
+ assertFalse(pendingFuture.isDone());
+ }
+
private void setField(String name, Object value) {
try {
final var field = WrapsHistoryProver.class.getDeclaredField(name);
diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/WrapsMpcStateMachineTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/WrapsMpcStateMachineTest.java
new file mode 100644
index 000000000000..e37af921852b
--- /dev/null
+++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/history/impl/WrapsMpcStateMachineTest.java
@@ -0,0 +1,379 @@
+// SPDX-License-Identifier: Apache-2.0
+package com.hedera.node.app.history.impl;
+
+import static com.hedera.hapi.node.state.history.WrapsPhase.AGGREGATE;
+import static com.hedera.hapi.node.state.history.WrapsPhase.R1;
+import static com.hedera.hapi.node.state.history.WrapsPhase.R2;
+import static com.hedera.hapi.node.state.history.WrapsPhase.R3;
+import static org.junit.jupiter.api.Assertions.*;
+
+import com.hedera.hapi.node.state.history.WrapsPhase;
+import com.hedera.node.app.history.ReadableHistoryStore.WrapsMessagePublication;
+import com.hedera.node.app.history.impl.WrapsMpcStateMachine.Transition;
+import com.hedera.node.app.service.roster.impl.RosterTransitionWeights;
+import com.hedera.pbj.runtime.io.buffer.Bytes;
+import java.time.Duration;
+import java.time.Instant;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+
+class WrapsMpcStateMachineTest {
+ private static final long NODE_1 = 1L;
+ private static final long NODE_2 = 2L;
+ private static final long NODE_3 = 3L;
+ private static final Bytes MESSAGE = Bytes.wrap("test-message");
+ private static final Duration GRACE_PERIOD = Duration.ofSeconds(10);
+ private static final Instant BASE_TIME = Instant.EPOCH;
+
+ private WrapsMpcStateMachine subject;
+ private SortedMap sourceWeights;
+ private SortedMap targetWeights;
+ private RosterTransitionWeights weights;
+ private Map> phaseMessages;
+
+ @BeforeEach
+ void setUp() {
+ subject = new WrapsMpcStateMachine();
+ sourceWeights = new TreeMap<>();
+ targetWeights = new TreeMap<>();
+ phaseMessages = new HashMap<>();
+ }
+
+ @Nested
+ class TransitionRecordTests {
+ @Test
+ void rejectedAtCreatesCorrectTransition() {
+ final var transition = Transition.rejectedAt(R1);
+
+ assertFalse(transition.publicationAccepted());
+ assertEquals(R1, transition.newCurrentPhase());
+ assertNull(transition.gracePeriodEndTimeUpdate());
+ }
+
+ @Test
+ void incorporatedInCreatesCorrectTransition() {
+ final var transition = Transition.incorporatedIn(R2);
+
+ assertTrue(transition.publicationAccepted());
+ assertEquals(R2, transition.newCurrentPhase());
+ assertNull(transition.gracePeriodEndTimeUpdate());
+ }
+
+ @Test
+ void advanceToCreatesCorrectTransition() {
+ final var endTime = Instant.now();
+ final var transition = Transition.advanceTo(R3, endTime);
+
+ assertTrue(transition.publicationAccepted());
+ assertEquals(R3, transition.newCurrentPhase());
+ assertEquals(endTime, transition.gracePeriodEndTimeUpdate());
+ }
+
+ @Test
+ void transitionRequiresNonNullPhase() {
+ assertThrows(NullPointerException.class, () -> new Transition(true, null, null));
+ }
+ }
+
+ @Nested
+ class OnNextAggregatePhaseTests {
+ @Test
+ void rejectsPublicationWhenCurrentPhaseIsAggregate() {
+ setupTwoNodeWeights();
+ final var publication = createPublication(NODE_1, AGGREGATE, BASE_TIME);
+
+ final var transition = subject.onNext(publication, AGGREGATE, weights, GRACE_PERIOD, phaseMessages);
+
+ assertFalse(transition.publicationAccepted());
+ assertEquals(AGGREGATE, transition.newCurrentPhase());
+ assertNull(transition.gracePeriodEndTimeUpdate());
+ }
+ }
+
+ @Nested
+ class OnNextPhaseMismatchTests {
+ @Test
+ void rejectsR2PublicationWhenCurrentPhaseIsR1() {
+ setupTwoNodeWeights();
+ final var publication = createPublication(NODE_1, R2, BASE_TIME);
+
+ final var transition = subject.onNext(publication, R1, weights, GRACE_PERIOD, phaseMessages);
+
+ assertFalse(transition.publicationAccepted());
+ assertEquals(R1, transition.newCurrentPhase());
+ }
+
+ @Test
+ void rejectsR1PublicationWhenCurrentPhaseIsR2() {
+ setupTwoNodeWeights();
+ final var publication = createPublication(NODE_1, R1, BASE_TIME);
+
+ final var transition = subject.onNext(publication, R2, weights, GRACE_PERIOD, phaseMessages);
+
+ assertFalse(transition.publicationAccepted());
+ assertEquals(R2, transition.newCurrentPhase());
+ }
+
+ @Test
+ void rejectsR3PublicationWhenCurrentPhaseIsR2() {
+ setupTwoNodeWeights();
+ final var publication = createPublication(NODE_1, R3, BASE_TIME);
+
+ final var transition = subject.onNext(publication, R2, weights, GRACE_PERIOD, phaseMessages);
+
+ assertFalse(transition.publicationAccepted());
+ assertEquals(R2, transition.newCurrentPhase());
+ }
+ }
+
+ @Nested
+ class OnNextR1PhaseTests {
+ @Test
+ void acceptsFirstR1PublicationAndIncorporates() {
+ setupTwoNodeWeights();
+ final var publication = createPublication(NODE_1, R1, BASE_TIME);
+
+ final var transition = subject.onNext(publication, R1, weights, GRACE_PERIOD, phaseMessages);
+
+ assertTrue(transition.publicationAccepted());
+ assertEquals(R1, transition.newCurrentPhase());
+ assertTrue(phaseMessages.get(R1).containsKey(NODE_1));
+ }
+
+ @Test
+ void rejectsDuplicateR1PublicationFromSameNode() {
+ setupTwoNodeWeights();
+ final var first = createPublication(NODE_1, R1, BASE_TIME);
+ final var duplicate = createPublication(NODE_1, R1, BASE_TIME.plusSeconds(1));
+
+ subject.onNext(first, R1, weights, GRACE_PERIOD, phaseMessages);
+ final var transition = subject.onNext(duplicate, R1, weights, GRACE_PERIOD, phaseMessages);
+
+ assertFalse(transition.publicationAccepted());
+ assertEquals(R1, transition.newCurrentPhase());
+ }
+
+ @Test
+ void advancesToR2WhenMoreThanHalfWeightReached() {
+ setupTwoNodeWeights();
+ final var first = createPublication(NODE_1, R1, BASE_TIME);
+ final var second = createPublication(NODE_2, R1, BASE_TIME.plusSeconds(1));
+
+ subject.onNext(first, R1, weights, GRACE_PERIOD, phaseMessages);
+ final var transition = subject.onNext(second, R1, weights, GRACE_PERIOD, phaseMessages);
+
+ assertTrue(transition.publicationAccepted());
+ assertEquals(R2, transition.newCurrentPhase());
+ assertEquals(BASE_TIME.plusSeconds(1).plus(GRACE_PERIOD), transition.gracePeriodEndTimeUpdate());
+ }
+
+ @Test
+ void staysInR1WhenNotEnoughWeight() {
+ setupThreeNodeWeightsWithMajorityRequired();
+ final var first = createPublication(NODE_1, R1, BASE_TIME);
+
+ final var transition = subject.onNext(first, R1, weights, GRACE_PERIOD, phaseMessages);
+
+ assertTrue(transition.publicationAccepted());
+ assertEquals(R1, transition.newCurrentPhase());
+ }
+
+ @Test
+ void advancesToR2WhenMajorityWeightReachedWithThreeNodes() {
+ setupThreeNodeWeightsWithMajorityRequired();
+ final var first = createPublication(NODE_1, R1, BASE_TIME);
+ final var second = createPublication(NODE_2, R1, BASE_TIME.plusSeconds(1));
+
+ subject.onNext(first, R1, weights, GRACE_PERIOD, phaseMessages);
+ final var transition = subject.onNext(second, R1, weights, GRACE_PERIOD, phaseMessages);
+
+ assertTrue(transition.publicationAccepted());
+ assertEquals(R2, transition.newCurrentPhase());
+ }
+ }
+
+ @Nested
+ class OnNextR2PhaseTests {
+ @BeforeEach
+ void setupR1Participants() {
+ setupTwoNodeWeights();
+ phaseMessages.put(R1, new TreeMap<>());
+ phaseMessages.get(R1).put(NODE_1, createPublication(NODE_1, R1, BASE_TIME));
+ phaseMessages.get(R1).put(NODE_2, createPublication(NODE_2, R1, BASE_TIME));
+ }
+
+ @Test
+ void rejectsR2PublicationFromNodeNotInR1() {
+ final var publication = createPublication(NODE_3, R2, BASE_TIME);
+
+ final var transition = subject.onNext(publication, R2, weights, GRACE_PERIOD, phaseMessages);
+
+ assertFalse(transition.publicationAccepted());
+ assertEquals(R2, transition.newCurrentPhase());
+ }
+
+ @Test
+ void acceptsR2PublicationFromR1Participant() {
+ final var publication = createPublication(NODE_1, R2, BASE_TIME);
+
+ final var transition = subject.onNext(publication, R2, weights, GRACE_PERIOD, phaseMessages);
+
+ assertTrue(transition.publicationAccepted());
+ assertEquals(R2, transition.newCurrentPhase());
+ }
+
+ @Test
+ void rejectsDuplicateR2PublicationFromSameNode() {
+ final var first = createPublication(NODE_1, R2, BASE_TIME);
+ final var duplicate = createPublication(NODE_1, R2, BASE_TIME.plusSeconds(1));
+
+ subject.onNext(first, R2, weights, GRACE_PERIOD, phaseMessages);
+ final var transition = subject.onNext(duplicate, R2, weights, GRACE_PERIOD, phaseMessages);
+
+ assertFalse(transition.publicationAccepted());
+ assertEquals(R2, transition.newCurrentPhase());
+ }
+
+ @Test
+ void advancesToR3WhenAllR1ParticipantsSubmitR2() {
+ final var first = createPublication(NODE_1, R2, BASE_TIME);
+ final var second = createPublication(NODE_2, R2, BASE_TIME.plusSeconds(1));
+
+ subject.onNext(first, R2, weights, GRACE_PERIOD, phaseMessages);
+ final var transition = subject.onNext(second, R2, weights, GRACE_PERIOD, phaseMessages);
+
+ assertTrue(transition.publicationAccepted());
+ assertEquals(R3, transition.newCurrentPhase());
+ assertEquals(BASE_TIME.plusSeconds(1).plus(GRACE_PERIOD), transition.gracePeriodEndTimeUpdate());
+ }
+ }
+
+ @Nested
+ class OnNextR3PhaseTests {
+ @BeforeEach
+ void setupR1AndR2Participants() {
+ setupTwoNodeWeights();
+ phaseMessages.put(R1, new TreeMap<>());
+ phaseMessages.get(R1).put(NODE_1, createPublication(NODE_1, R1, BASE_TIME));
+ phaseMessages.get(R1).put(NODE_2, createPublication(NODE_2, R1, BASE_TIME));
+ phaseMessages.put(R2, new TreeMap<>());
+ phaseMessages.get(R2).put(NODE_1, createPublication(NODE_1, R2, BASE_TIME));
+ phaseMessages.get(R2).put(NODE_2, createPublication(NODE_2, R2, BASE_TIME));
+ }
+
+ @Test
+ void rejectsR3PublicationFromNodeNotInR1() {
+ final var publication = createPublication(NODE_3, R3, BASE_TIME);
+
+ final var transition = subject.onNext(publication, R3, weights, GRACE_PERIOD, phaseMessages);
+
+ assertFalse(transition.publicationAccepted());
+ assertEquals(R3, transition.newCurrentPhase());
+ }
+
+ @Test
+ void acceptsR3PublicationFromR1Participant() {
+ final var publication = createPublication(NODE_1, R3, BASE_TIME);
+
+ final var transition = subject.onNext(publication, R3, weights, GRACE_PERIOD, phaseMessages);
+
+ assertTrue(transition.publicationAccepted());
+ assertEquals(R3, transition.newCurrentPhase());
+ }
+
+ @Test
+ void rejectsDuplicateR3PublicationFromSameNode() {
+ final var first = createPublication(NODE_1, R3, BASE_TIME);
+ final var duplicate = createPublication(NODE_1, R3, BASE_TIME.plusSeconds(1));
+
+ subject.onNext(first, R3, weights, GRACE_PERIOD, phaseMessages);
+ final var transition = subject.onNext(duplicate, R3, weights, GRACE_PERIOD, phaseMessages);
+
+ assertFalse(transition.publicationAccepted());
+ assertEquals(R3, transition.newCurrentPhase());
+ }
+
+ @Test
+ void advancesToAggregateWhenAllR1ParticipantsSubmitR3() {
+ final var first = createPublication(NODE_1, R3, BASE_TIME);
+ final var second = createPublication(NODE_2, R3, BASE_TIME.plusSeconds(1));
+
+ subject.onNext(first, R3, weights, GRACE_PERIOD, phaseMessages);
+ final var transition = subject.onNext(second, R3, weights, GRACE_PERIOD, phaseMessages);
+
+ assertTrue(transition.publicationAccepted());
+ assertEquals(AGGREGATE, transition.newCurrentPhase());
+ assertNull(transition.gracePeriodEndTimeUpdate());
+ }
+ }
+
+ @Nested
+ class NullParameterTests {
+ @Test
+ void throwsOnNullPublication() {
+ setupTwoNodeWeights();
+ assertThrows(
+ NullPointerException.class, () -> subject.onNext(null, R1, weights, GRACE_PERIOD, phaseMessages));
+ }
+
+ @Test
+ void throwsOnNullCurrentPhase() {
+ setupTwoNodeWeights();
+ final var publication = createPublication(NODE_1, R1, BASE_TIME);
+ assertThrows(
+ NullPointerException.class,
+ () -> subject.onNext(publication, null, weights, GRACE_PERIOD, phaseMessages));
+ }
+
+ @Test
+ void throwsOnNullWeights() {
+ final var publication = createPublication(NODE_1, R1, BASE_TIME);
+ assertThrows(
+ NullPointerException.class,
+ () -> subject.onNext(publication, R1, null, GRACE_PERIOD, phaseMessages));
+ }
+
+ @Test
+ void throwsOnNullGracePeriod() {
+ setupTwoNodeWeights();
+ final var publication = createPublication(NODE_1, R1, BASE_TIME);
+ assertThrows(
+ NullPointerException.class, () -> subject.onNext(publication, R1, weights, null, phaseMessages));
+ }
+
+ @Test
+ void throwsOnNullPhaseMessages() {
+ setupTwoNodeWeights();
+ final var publication = createPublication(NODE_1, R1, BASE_TIME);
+ assertThrows(
+ NullPointerException.class, () -> subject.onNext(publication, R1, weights, GRACE_PERIOD, null));
+ }
+ }
+
+ private void setupTwoNodeWeights() {
+ sourceWeights.put(NODE_1, 1L);
+ sourceWeights.put(NODE_2, 1L);
+ targetWeights.put(NODE_1, 1L);
+ targetWeights.put(NODE_2, 1L);
+ weights = new RosterTransitionWeights(sourceWeights, targetWeights);
+ }
+
+ private void setupThreeNodeWeightsWithMajorityRequired() {
+ sourceWeights.put(NODE_1, 1L);
+ sourceWeights.put(NODE_2, 1L);
+ sourceWeights.put(NODE_3, 1L);
+ targetWeights.put(NODE_1, 1L);
+ targetWeights.put(NODE_2, 1L);
+ targetWeights.put(NODE_3, 1L);
+ weights = new RosterTransitionWeights(sourceWeights, targetWeights);
+ }
+
+ private WrapsMessagePublication createPublication(long nodeId, WrapsPhase phase, Instant receiptTime) {
+ return new WrapsMessagePublication(nodeId, MESSAGE, phase, receiptTime);
+ }
+}
diff --git a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/TransactionCheckerTest.java b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/TransactionCheckerTest.java
index 5a06b661b8e9..b5d3d8ac502a 100644
--- a/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/TransactionCheckerTest.java
+++ b/hedera-node/hedera-app/src/test/java/com/hedera/node/app/workflows/TransactionCheckerTest.java
@@ -195,7 +195,7 @@ class ParseTest {
@SuppressWarnings("ConstantConditions")
@DisplayName("`parseAndCheck` requires Bytes")
void parseAndCheck() {
- assertThatThrownBy(() -> checker.parse(null)).isInstanceOf(NullPointerException.class);
+ assertThatThrownBy(() -> checker.parse(null, 0)).isInstanceOf(NullPointerException.class);
}
@Test
@@ -271,8 +271,8 @@ void parseAndCheckWithTooManyBytesJumboEnabled() {
void parseAndCheckWithNoBytes() throws PreCheckException {
// Given a transaction with no bytes at all
// Then the checker should throw a PreCheckException
- final var transaction = checker.parse(Bytes.EMPTY);
- assertThatThrownBy(() -> checker.check(transaction))
+ final var transaction = checker.parse(Bytes.EMPTY, Integer.MAX_VALUE);
+ assertThatThrownBy(() -> checker.check(transaction, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(INVALID_TRANSACTION_BODY));
}
@@ -287,8 +287,8 @@ void parseAndCheckWithNoBytes() throws PreCheckException {
@DisplayName("A valid transaction passes parse and check")
void happyPath() throws PreCheckException {
// Given a valid serialized transaction, when we parseStrict and check
- final var transaction = checker.parse(inputBuffer);
- final var info = checker.check(transaction);
+ final var transaction = checker.parse(inputBuffer, Integer.MAX_VALUE);
+ final var info = checker.check(transaction, Integer.MAX_VALUE);
// Then the parsed data is as we expected
assertThat(info.signedTx()).isEqualTo(signedTx);
@@ -322,8 +322,8 @@ void happyDeprecatedPath() throws PreCheckException {
inputBuffer = Bytes.wrap(asByteArray(localTx));
// When we parseStrict and check
- final var transaction = checker.parse(inputBuffer);
- final var info = checker.check(transaction);
+ final var transaction = checker.parse(inputBuffer, Integer.MAX_VALUE);
+ final var info = checker.check(transaction, Integer.MAX_VALUE);
// Then everything works because the deprecated fields are supported
assertThat(info.signedTx()).isEqualTo(repackagedSignedTx);
@@ -350,8 +350,8 @@ void parseAndCheckWithSuperDeprecatedFields() throws PreCheckException {
inputBuffer = Bytes.wrap(asByteArray(localTx));
// When we check, then we get a PreCheckException with INVALID_TRANSACTION_BODY
- final var transaction = checker.parse(inputBuffer);
- assertThatThrownBy(() -> checker.check(transaction))
+ final var transaction = checker.parse(inputBuffer, Integer.MAX_VALUE);
+ assertThatThrownBy(() -> checker.check(transaction, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(INVALID_TRANSACTION_BODY));
@@ -368,7 +368,7 @@ void badTransactionProtobuf() {
inputBuffer = Bytes.wrap(invalidProtobuf());
// When we parse and check, then the parsing fails because this is an INVALID_TRANSACTION
- assertThatThrownBy(() -> checker.parse(inputBuffer))
+ assertThatThrownBy(() -> checker.parse(inputBuffer, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(INVALID_TRANSACTION));
}
@@ -380,7 +380,7 @@ void unknownFieldInTransaction() {
inputBuffer = Bytes.wrap(appendUnknownField(asByteArray(tx)));
// When we parse and check, then the parsing fails because has unknown fields
- assertThatThrownBy(() -> checker.parse(inputBuffer))
+ assertThatThrownBy(() -> checker.parse(inputBuffer, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(TRANSACTION_HAS_UNKNOWN_FIELDS));
}
@@ -611,7 +611,7 @@ class CheckTest {
@SuppressWarnings("ConstantConditions")
@DisplayName("`check` requires a transaction")
void checkWithNull() {
- assertThatThrownBy(() -> checker.check(null)).isInstanceOf(NullPointerException.class);
+ assertThatThrownBy(() -> checker.check(null, Integer.MAX_VALUE)).isInstanceOf(NullPointerException.class);
}
@Nested
@@ -627,7 +627,7 @@ class HappyPaths {
@DisplayName("A valid transaction passes parseAndCheck with a BufferedData")
void happyPath() throws PreCheckException {
// Given a valid serialized transaction, when we parse and check
- final var info = checker.check(tx);
+ final var info = checker.check(tx, Integer.MAX_VALUE);
// Then the parsed data is as we expected
assertThat(info.signedTx()).isEqualTo(signedTx);
@@ -659,7 +659,7 @@ void happyWithDeprecatedFields() throws PreCheckException {
.build();
// When we parse and check
- final var info = checker.check(localTx);
+ final var info = checker.check(localTx, Integer.MAX_VALUE);
// Then everything works because the deprecated fields are supported
assertThat(info.signedTx()).isEqualTo(repackagedSignedTx);
@@ -685,7 +685,7 @@ void happyWithSuperDeprecatedFields() {
.build();
// When we check, then we get a PreCheckException with INVALID_TRANSACTION_BODY
- assertThatThrownBy(() -> checker.check(localTx))
+ assertThatThrownBy(() -> checker.check(localTx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(INVALID_TRANSACTION_BODY));
@@ -709,7 +709,7 @@ void checkWithSuperDeprecatedFieldsAndSignedTransactionBytes() throws PreCheckEx
.build();
// When we check
- final var info = checker.check(localTx);
+ final var info = checker.check(localTx, Integer.MAX_VALUE);
// Then the parsed data is as we expected
assertThat(info.signedTx()).isEqualTo(signedTx);
assertThat(info.txBody()).isEqualTo(txBody);
@@ -740,7 +740,7 @@ void checkWithSuperDeprecatedFieldsAndDeprecatedFields() throws PreCheckExceptio
.build();
// When we check
- final var info = checker.check(localTx);
+ final var info = checker.check(localTx, Integer.MAX_VALUE);
// Then the parsed data is as we expected
assertThat(info.signedTx()).isEqualTo(repackagedSignedTx);
assertThat(info.txBody()).isEqualTo(txBody);
@@ -765,7 +765,7 @@ void badTransactionWithSignedBytesAndBodyBytes() {
.build();
// When we check the transaction, then we find it is invalid
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(INVALID_TRANSACTION));
@@ -785,7 +785,7 @@ void badTransactionWithSignedBytesAndSigMap() {
.build();
// Then the checker should throw a PreCheckException
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(INVALID_TRANSACTION));
@@ -838,7 +838,7 @@ void badPrefixes(byte[]... prefixes) {
txBuilder(signedTxBuilder(txBody, localSignatureMap)).build();
// When we check the transaction, we find it is invalid due to duplicate prefixes
- assertThatThrownBy(() -> checker.check(localTx))
+ assertThatThrownBy(() -> checker.check(localTx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(KEY_PREFIX_MISMATCH));
}
@@ -856,7 +856,7 @@ void badSignedTransactionProtobuf() {
.build();
// When we parse and check, then the parsing fails because this is an INVALID_TRANSACTION
- assertThatThrownBy(() -> checker.check(localTx))
+ assertThatThrownBy(() -> checker.check(localTx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(INVALID_TRANSACTION));
}
@@ -871,7 +871,7 @@ void unknownFieldInSignedTransaction() {
.build();
// When we parse and check, then the parsing fails because has unknown fields
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(TRANSACTION_HAS_UNKNOWN_FIELDS));
}
@@ -895,7 +895,7 @@ void badTransactionBodyProtobuf() {
.build();
// When we parse and check, then the parsing fails because has unknown fields
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(INVALID_TRANSACTION_BODY));
}
@@ -916,7 +916,7 @@ void unknownFieldInTransactionBody() {
.build();
// When we parse and check, then the parsing fails because this is an TRANSACTION_HAS_UNKNOWN_FIELDS
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(TRANSACTION_HAS_UNKNOWN_FIELDS));
}
@@ -929,7 +929,7 @@ void testCheckTransactionBodyWithoutTransactionIDFails() {
final var tx = txBuilder(signedTxBuilder(body, sigMapBuilder())).build();
// Then the checker should throw a PreCheckException
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(INVALID_TRANSACTION_ID));
}
@@ -943,7 +943,7 @@ void testCheckTransactionBodyWithAliasAsPayer() throws PreCheckException {
final var body = bodyBuilder(txIdBuilder().accountID(payerId));
final var tx = txBuilder(signedTxBuilder(body, sigMapBuilder())).build();
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(PAYER_ACCOUNT_NOT_FOUND));
}
@@ -958,7 +958,7 @@ void testCheckTransactionBodyWithZeroAccountNumFails(long account) {
final var tx = txBuilder(signedTxBuilder(body, sigMapBuilder())).build();
// Then the checker should throw a PreCheckException
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(PAYER_ACCOUNT_NOT_FOUND));
}
@@ -974,7 +974,7 @@ void testCheckTransactionBodyWithBadShardFails(long shard) {
final var tx = txBuilder(signedTxBuilder(body, sigMapBuilder())).build();
// Then the checker should throw a PreCheckException
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(PAYER_ACCOUNT_NOT_FOUND));
}
@@ -990,7 +990,7 @@ void testCheckTransactionBodyWithBadRealmFails(long realm) {
final var tx = txBuilder(signedTxBuilder(body, sigMapBuilder())).build();
// Then the checker should throw a PreCheckException
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(PAYER_ACCOUNT_NOT_FOUND));
}
@@ -1002,7 +1002,7 @@ void testScheduledTransactionFails() {
final var tx = txBuilder(signedTxBuilder(body, sigMapBuilder())).build();
// Then the checker should throw a PreCheckException
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(TRANSACTION_ID_FIELD_NOT_ALLOWED));
}
@@ -1014,7 +1014,7 @@ void testInternalTransactionFails() {
final var tx = txBuilder(signedTxBuilder(body, sigMapBuilder())).build();
// Then the checker should throw a PreCheckException
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.has(responseCode(TRANSACTION_ID_FIELD_NOT_ALLOWED));
}
@@ -1032,7 +1032,7 @@ void testCheckTransactionBodyWithInvalidFeeFails(final long fee) {
final var tx = txBuilder(signedTxBuilder(body, sigMapBuilder())).build();
// When we check the transaction body
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.hasFieldOrPropertyWithValue("responseCode", INSUFFICIENT_TX_FEE);
}
@@ -1124,7 +1124,7 @@ void unknownFunctionality() {
hapiUtils.when(() -> HapiUtils.functionOf(eq(txBody))).thenThrow(new UnknownHederaFunctionality());
// When we parse and check, then the parsing fails due to the exception
- assertThatThrownBy(() -> checker.check(tx))
+ assertThatThrownBy(() -> checker.check(tx, Integer.MAX_VALUE))
.isInstanceOf(PreCheckException.class)
.hasFieldOrPropertyWithValue("responseCode", INVALID_TRANSACTION_BODY);
}
diff --git a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/TssConfig.java b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/TssConfig.java
index c7c61e515beb..71c57f485352 100644
--- a/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/TssConfig.java
+++ b/hedera-node/hedera-config/src/main/java/com/hedera/node/config/data/TssConfig.java
@@ -30,4 +30,5 @@ public record TssConfig(
// to give express consent for breaking the address book chain of trust
@ConfigProperty(defaultValue = "false") @NetworkProperty boolean forceHandoffs,
// Denominator used to compute signing threshold: totalWeight / signingThresholdDivisor
- @ConfigProperty(defaultValue = "2") @Min(1) @NetworkProperty int signingThresholdDivisor) {}
+ @ConfigProperty(defaultValue = "2") @Min(1) @NetworkProperty int signingThresholdDivisor,
+ @ConfigProperty(defaultValue = "5s") Duration wrapsVoteJitterPerRank) {}
diff --git a/hedera-node/test-clients/build.gradle.kts b/hedera-node/test-clients/build.gradle.kts
index cb4a923fc841..14748279a84b 100644
--- a/hedera-node/test-clients/build.gradle.kts
+++ b/hedera-node/test-clients/build.gradle.kts
@@ -123,10 +123,7 @@ val prCheckStartPorts =
}
val prCheckPropOverrides =
buildMap {
- put(
- "hapiTestAdhoc",
- "tss.hintsEnabled=true,tss.forceHandoffs=false,tss.initialCrsParties=16,blockStream.blockPeriod=2s",
- )
+ put("hapiTestAdhoc", "tss.hintsEnabled=true,tss.historyEnabled=true,tss.wrapsEnabled=true")
put(
"hapiTestCrypto",
"tss.hintsEnabled=true,tss.historyEnabled=true,tss.wrapsEnabled=false,blockStream.blockPeriod=1s",
diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/hedera/subprocess/ProcessUtils.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/hedera/subprocess/ProcessUtils.java
index 157a457e8663..4bb758d10c9b 100644
--- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/hedera/subprocess/ProcessUtils.java
+++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/hedera/subprocess/ProcessUtils.java
@@ -141,7 +141,10 @@ public static ProcessHandle startSubProcessNodeFrom(
environment.put("grpc.port", Integer.toString(metadata.grpcPort()));
environment.put("grpc.nodeOperatorPort", Integer.toString(metadata.grpcNodeOperatorPort()));
environment.put("hedera.config.version", Integer.toString(configVersion));
+ environment.put("RUST_BACKTRACE", "full");
environment.put("TSS_LIB_NUM_OF_CORES", Integer.toString(1));
+ // Use non-blank path to enable WRAPS prover
+ environment.put("TSS_LIB_WRAPS_ARTIFACTS_PATH", "");
environment.put("hedera.shard", String.valueOf(metadata.accountId().shardNum()));
environment.put("hedera.realm", String.valueOf(metadata.accountId().realmNum()));
// Include an PR check overrides from build.gradle.kts
diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/translators/inputs/TransactionParts.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/translators/inputs/TransactionParts.java
index 27e63577f821..ac2ea2d90825 100644
--- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/translators/inputs/TransactionParts.java
+++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/translators/inputs/TransactionParts.java
@@ -2,6 +2,8 @@
package com.hedera.services.bdd.junit.support.translators.inputs;
import static com.hedera.hapi.util.HapiUtils.functionOf;
+import static com.hedera.node.app.hapi.utils.CommonPbjConverters.MAX_PBJ_RECORD_SIZE;
+import static com.hedera.pbj.runtime.Codec.DEFAULT_MAX_DEPTH;
import static java.util.Objects.requireNonNull;
import com.hedera.hapi.node.base.HederaFunctionality;
@@ -41,7 +43,12 @@ public TransactionID transactionIdOrThrow() {
*/
public static TransactionParts from(@NonNull final Bytes serializedSignedTx) {
try {
- final var signedTx = SignedTransaction.PROTOBUF.parse(serializedSignedTx);
+ final var signedTx = SignedTransaction.PROTOBUF.parse(
+ serializedSignedTx.toReadableSequentialData(),
+ false,
+ false,
+ DEFAULT_MAX_DEPTH,
+ MAX_PBJ_RECORD_SIZE);
final Transaction wrapper;
if (signedTx.useSerializedTxMessageHashAlgorithm()) {
wrapper = Transaction.newBuilder()
@@ -53,7 +60,12 @@ public static TransactionParts from(@NonNull final Bytes serializedSignedTx) {
.signedTransactionBytes(serializedSignedTx)
.build();
}
- final var body = TransactionBody.PROTOBUF.parse(signedTx.bodyBytes());
+ final var body = TransactionBody.PROTOBUF.parse(
+ signedTx.bodyBytes().toReadableSequentialData(),
+ false,
+ false,
+ DEFAULT_MAX_DEPTH,
+ MAX_PBJ_RECORD_SIZE);
return new TransactionParts(wrapper, body, functionOf(body));
} catch (ParseException | UnknownHederaFunctionality e) {
// Fail immediately with invalid transactions that should not be in any production record stream
diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/validators/block/StateChangesValidator.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/validators/block/StateChangesValidator.java
index 482f0bf00d88..2aff4ad374e3 100644
--- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/validators/block/StateChangesValidator.java
+++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/junit/support/validators/block/StateChangesValidator.java
@@ -701,8 +701,13 @@ private void validateBlockProof(
wrapsMessage,
context.publicKeysFor(nonRecursiveProof.signingNodeIds()),
nonRecursiveProof.aggregatedSignature().toByteArray()),
- "Invalid aggregated signature in proof (start round #" + firstRound + ") - context "
- + context);
+ "Invalid aggregated signature "
+ + nonRecursiveProof.aggregatedSignature()
+ + " in proof (start round #" + firstRound
+ + ", signing node ids " + nonRecursiveProof.signingNodeIds()
+ + ") on WRAPS message " + Bytes.wrap(wrapsMessage)
+ + " with metadata " + vk
+ + " - context " + context);
}
case WRAPS_PROOF -> {
final var compressedProof = chainOfTrustProof.wrapsProofOrThrow();
diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoGetInfoRegression.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoGetInfoRegression.java
index c3b0f70249ee..96259317820a 100644
--- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoGetInfoRegression.java
+++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/crypto/CryptoGetInfoRegression.java
@@ -253,7 +253,6 @@ public Stream cryptoGetAccountBalanceQueryAssociationThrottles() {
tokenNames.add("t" + i);
}
final var ops = new ArrayList();
- ops.add(overridingThrottles("testSystemFiles/tiny-get-balance-throttle.json"));
ops.add(overridingAllOf(Map.of("tokens.countingGetBalanceThrottleEnabled", "true")));
ops.add(cryptoCreate(TARGET_ACC).withMatchingEvmAddress());
tokenNames.forEach(t -> {
diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/PerpetualTransfers.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/PerpetualTransfers.java
index 30fc7d5b5b0b..a24864048084 100644
--- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/PerpetualTransfers.java
+++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/PerpetualTransfers.java
@@ -1,13 +1,14 @@
// SPDX-License-Identifier: Apache-2.0
package com.hedera.services.bdd.suites.misc;
+import static com.hedera.services.bdd.junit.TestTags.ADHOC;
import static com.hedera.services.bdd.junit.TestTags.NOT_REPEATABLE;
import static com.hedera.services.bdd.spec.HapiSpec.hapiTest;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate;
import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer;
import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromTo;
import static com.hedera.services.bdd.spec.utilops.UtilVerbs.runWithProvider;
-import static java.util.concurrent.TimeUnit.SECONDS;
+import static java.util.concurrent.TimeUnit.HOURS;
import com.hedera.services.bdd.junit.HapiTest;
import com.hedera.services.bdd.spec.HapiSpec;
@@ -27,10 +28,11 @@
import org.junit.jupiter.api.Tag;
@Tag(NOT_REPEATABLE)
+@Tag(ADHOC)
public class PerpetualTransfers {
- private final AtomicLong duration = new AtomicLong(30);
- private final AtomicReference unit = new AtomicReference<>(SECONDS);
- private final AtomicInteger maxOpsPerSec = new AtomicInteger(500);
+ private final AtomicLong duration = new AtomicLong(24);
+ private final AtomicReference unit = new AtomicReference<>(HOURS);
+ private final AtomicInteger maxOpsPerSec = new AtomicInteger(1);
@HapiTest
final Stream canTransferBackAndForthForever() {
diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/system/JustQuiesceTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/system/JustQuiesceTest.java
index 650cd7227606..edd6ad0d2a72 100644
--- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/system/JustQuiesceTest.java
+++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/system/JustQuiesceTest.java
@@ -1,7 +1,6 @@
// SPDX-License-Identifier: Apache-2.0
package com.hedera.services.bdd.suites.regression.system;
-import static com.hedera.services.bdd.junit.TestTags.ADHOC;
import static com.hedera.services.bdd.spec.HapiSpec.hapiTest;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance;
import static com.hedera.services.bdd.spec.queries.QueryVerbs.getScheduleInfo;
@@ -26,12 +25,10 @@
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Stream;
import org.junit.jupiter.api.DynamicTest;
-import org.junit.jupiter.api.Tag;
/**
* Simple quiescence test to run in HAPI (Misc) check, which includes a {@code streamMode=RECORDS} variant.
*/
-@Tag(ADHOC)
public class JustQuiesceTest {
@HapiTest
final Stream justQuiesce() {
diff --git a/platform-sdk/base-utility/src/main/java/org/hiero/base/io/streams/SerializableDataInputStream.java b/platform-sdk/base-utility/src/main/java/org/hiero/base/io/streams/SerializableDataInputStream.java
index 859fd5dc4a79..ee414018977e 100644
--- a/platform-sdk/base-utility/src/main/java/org/hiero/base/io/streams/SerializableDataInputStream.java
+++ b/platform-sdk/base-utility/src/main/java/org/hiero/base/io/streams/SerializableDataInputStream.java
@@ -1,6 +1,7 @@
// SPDX-License-Identifier: Apache-2.0
package org.hiero.base.io.streams;
+import static com.hedera.pbj.runtime.Codec.DEFAULT_MAX_DEPTH;
import static org.hiero.base.io.streams.SerializableStreamConstants.NULL_CLASS_ID;
import static org.hiero.base.io.streams.SerializableStreamConstants.NULL_LIST_ARRAY_LENGTH;
import static org.hiero.base.io.streams.SerializableStreamConstants.NULL_VERSION;
@@ -38,6 +39,7 @@
* for use with the SerializableDet interface, and its use is described there.
*/
public class SerializableDataInputStream extends AugmentedDataInputStream {
+ private static final int MAX_PBJ_RECORD_SIZE = 33554432;
private static final Set SUPPORTED_PROTOCOL_VERSIONS = Set.of(SERIALIZATION_PROTOCOL_VERSION);
@@ -595,7 +597,7 @@ public T readPbjRecord(@NonNull final Codec codec) throws IOException {
final int size = readInt();
readableSequentialData.limit(readableSequentialData.position() + size);
try {
- final T parsed = codec.parse(readableSequentialData);
+ final T parsed = codec.parse(readableSequentialData, false, false, DEFAULT_MAX_DEPTH, MAX_PBJ_RECORD_SIZE);
if (readableSequentialData.position() != readableSequentialData.limit()) {
throw new EOFException("PBJ record was not fully read");
}
diff --git a/platform-sdk/swirlds-virtualmap/src/main/java/com/swirlds/virtualmap/VirtualMap.java b/platform-sdk/swirlds-virtualmap/src/main/java/com/swirlds/virtualmap/VirtualMap.java
index 6a90cf9c809d..92645dd365c7 100644
--- a/platform-sdk/swirlds-virtualmap/src/main/java/com/swirlds/virtualmap/VirtualMap.java
+++ b/platform-sdk/swirlds-virtualmap/src/main/java/com/swirlds/virtualmap/VirtualMap.java
@@ -1,6 +1,7 @@
// SPDX-License-Identifier: Apache-2.0
package com.swirlds.virtualmap;
+import static com.hedera.pbj.runtime.Codec.DEFAULT_MAX_DEPTH;
import static com.swirlds.common.io.streams.StreamDebugUtils.deserializeAndDebugOnFailure;
import static com.swirlds.common.threading.manager.AdHocThreadManager.getStaticThreadManager;
import static com.swirlds.logging.legacy.LogMarker.EXCEPTION;
@@ -161,6 +162,8 @@
public final class VirtualMap extends PartialBinaryMerkleInternal
implements CustomReconnectRoot, ExternalSelfSerializable, Labeled, MerkleInternal, VirtualRoot {
+ private static final int MAX_PBJ_RECORD_SIZE = 33554432;
+
/**
* Hardcoded virtual map label
*/
@@ -684,7 +687,14 @@ public V remove(@NonNull final Bytes key, @NonNull final Codec valueCodec
requireNonNull(valueCodec);
Bytes removedValueBytes = remove(key);
try {
- return removedValueBytes == null ? null : valueCodec.parse(removedValueBytes);
+ return removedValueBytes == null
+ ? null
+ : valueCodec.parse(
+ removedValueBytes.toReadableSequentialData(),
+ false,
+ false,
+ DEFAULT_MAX_DEPTH,
+ MAX_PBJ_RECORD_SIZE);
} catch (final ParseException e) {
throw new RuntimeException("Failed to deserialize a value from bytes", e);
}
diff --git a/settings.gradle.kts b/settings.gradle.kts
index a88ede875ede..9f52006d528c 100644
--- a/settings.gradle.kts
+++ b/settings.gradle.kts
@@ -1,7 +1,7 @@
// SPDX-License-Identifier: Apache-2.0
plugins {
id("org.hiero.gradle.build") version "0.6.2"
- id("com.hedera.pbj.pbj-compiler") version "0.12.8" apply false
+ id("com.hedera.pbj.pbj-compiler") version "0.12.10" apply false
}
javaModules {