Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions packages/ssz/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,20 @@ export {CompositeType, CompositeTypeAny, CompositeView, CompositeViewDU, isCompo
export {TreeView} from "./view/abstract.js";
export {ValueOfFields} from "./view/container.js";
export {TreeViewDU} from "./viewDU/abstract.js";
export {ListCompositeTreeViewDU} from "./viewDU/listComposite.js";
export {ListBasicTreeViewDU} from "./viewDU/listBasic.js";
export {ArrayCompositeTreeViewDUCache} from "./viewDU/arrayComposite.js";
export {ContainerNodeStructTreeViewDU} from "./viewDU/containerNodeStruct.js";

// Values
export {BitArray, getUint8ByteToBitBooleanArray} from "./value/bitArray.js";

// Utils
export {fromHexString, toHexString, byteArrayEquals} from "./util/byteArray.js";

export {Snapshot} from "./util/types.js";
export {hash64, symbolCachedPermanentRoot} from "./util/merkleize.js";
export {upgradeToNewType} from "./util/upgrade.js";

// others
export {BranchNodeStruct} from "./branchNodeStruct.js";
19 changes: 19 additions & 0 deletions packages/ssz/test/lodestarTypes/phase0/listValidator.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import {Node} from "@chainsafe/persistent-merkle-tree";
import {ListCompositeType} from "../../../src/type/listComposite.js";
import {ListCompositeTreeViewDU} from "../../../src/viewDU/listComposite.js";
import {ValidatorNodeStructType} from "./validator.js";
import {ListValidatorTreeViewDU} from "./viewDU/listValidator.js";

/**
* Model ssz type for a list of validators in ethereum consensus layer.
* This defines ListValidatorTreeViewDU to work with validators in batch.
*/
export class ListValidatorType extends ListCompositeType<ValidatorNodeStructType> {
constructor(limit: number) {
super(new ValidatorNodeStructType(), limit);
}

getViewDU(node: Node, cache?: unknown): ListCompositeTreeViewDU<ValidatorNodeStructType> {
return new ListValidatorTreeViewDU(this, node, cache as any);
}
}
11 changes: 8 additions & 3 deletions packages/ssz/test/lodestarTypes/phase0/sszTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ import {
BitListType,
BitVectorType,
ContainerType,
ContainerNodeStructType,
ListBasicType,
ListCompositeType,
VectorBasicType,
Expand All @@ -18,6 +17,10 @@ import {
ATTESTATION_SUBNET_COUNT,
} from "../params.js";
import * as primitiveSsz from "../primitive/sszTypes.js";
import {ListValidatorType} from "./listValidator.js";
import {ValidatorNodeStruct} from "./validator.js";

export {ValidatorNodeStruct};

const {
EPOCHS_PER_ETH1_VOTING_PERIOD,
Expand Down Expand Up @@ -261,12 +264,14 @@ export const ValidatorContainer = new ContainerType(
{typeName: "Validator", jsonCase: "eth2"}
);

export const ValidatorNodeStruct = new ContainerNodeStructType(ValidatorContainer.fields, ValidatorContainer.opts);
// The main Validator type is the 'ContainerNodeStructType' version
export const Validator = ValidatorNodeStruct;

// Export as stand-alone for direct tree optimizations
export const Validators = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT);
// since Mar 2025 instead of using ListCompositeType
// export const Validators = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT);
// we switch to ListValidatorType which support batch hash
export const Validators = new ListValidatorType(VALIDATOR_REGISTRY_LIMIT);
export const Balances = new ListUintNum64Type(VALIDATOR_REGISTRY_LIMIT);
export const RandaoMixes = new VectorCompositeType(Bytes32, EPOCHS_PER_HISTORICAL_VECTOR);
export const Slashings = new VectorBasicType(Gwei, EPOCHS_PER_SLASHINGS_VECTOR);
Expand Down
130 changes: 130 additions & 0 deletions packages/ssz/test/lodestarTypes/phase0/validator.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
import {ByteViews} from "../../../src/type/abstract.js";
import {ContainerNodeStructType} from "../../../src/type/containerNodeStruct.js";
import {ValueOfFields} from "../../../src/view/container.js";
import * as primitiveSsz from "../primitive/sszTypes.js";

const {Boolean, Bytes32, UintNum64, BLSPubkey, EpochInf} = primitiveSsz;

// this is to work with uint32, see https://github.com/ChainSafe/ssz/blob/ssz-v0.15.1/packages/ssz/src/type/uint.ts
const NUMBER_2_POW_32 = 2 ** 32;

/*
* Below constants are respective to their ssz type in `ValidatorType`.
*/
const UINT32_SIZE = 4;
const PUBKEY_SIZE = 48;
const WITHDRAWAL_CREDENTIALS_SIZE = 32;
const SLASHED_SIZE = 1;
const CHUNK_SIZE = 32;

export const ValidatorType = {
pubkey: BLSPubkey,
withdrawalCredentials: Bytes32,
effectiveBalance: UintNum64,
slashed: Boolean,
activationEligibilityEpoch: EpochInf,
activationEpoch: EpochInf,
exitEpoch: EpochInf,
withdrawableEpoch: EpochInf,
};

/**
* Improve serialization performance for state.validators.serialize();
*/
export class ValidatorNodeStructType extends ContainerNodeStructType<typeof ValidatorType> {
constructor() {
super(ValidatorType, {typeName: "Validator", jsonCase: "eth2"});
}

value_serializeToBytes(
{uint8Array: output, dataView}: ByteViews,
offset: number,
validator: ValueOfFields<typeof ValidatorType>
): number {
output.set(validator.pubkey, offset);
offset += PUBKEY_SIZE;
output.set(validator.withdrawalCredentials, offset);
offset += WITHDRAWAL_CREDENTIALS_SIZE;
const {effectiveBalance, activationEligibilityEpoch, activationEpoch, exitEpoch, withdrawableEpoch} = validator;
// effectiveBalance is UintNum64
dataView.setUint32(offset, effectiveBalance & 0xffffffff, true);
offset += UINT32_SIZE;
dataView.setUint32(offset, (effectiveBalance / NUMBER_2_POW_32) & 0xffffffff, true);
offset += UINT32_SIZE;
output[offset] = validator.slashed ? 1 : 0;
offset += SLASHED_SIZE;
offset = writeEpochInf(dataView, offset, activationEligibilityEpoch);
offset = writeEpochInf(dataView, offset, activationEpoch);
offset = writeEpochInf(dataView, offset, exitEpoch);
offset = writeEpochInf(dataView, offset, withdrawableEpoch);

return offset;
}
}

export const ValidatorNodeStruct = new ValidatorNodeStructType();

/**
* Write to level3 and level4 bytes to compute merkle root. Note that this is to compute
* merkle root and it's different from serialization (which is more compressed).
* pub0 + pub1 are at level4, they will be hashed to 1st chunked of level 3
* then use 8 chunks of level 3 to compute the root hash.
* reserved withdr eff sla actElig act exit with
* level 3 |----------|----------|----------|----------|----------|----------|----------|----------|
*
* pub0 pub1
* level4 |----------|----------|
*
*/
export function validatorToChunkBytes(
level3: ByteViews,
level4: Uint8Array,
value: ValueOfFields<typeof ValidatorType>
): void {
const {
pubkey,
withdrawalCredentials,
effectiveBalance,
slashed,
activationEligibilityEpoch,
activationEpoch,
exitEpoch,
withdrawableEpoch,
} = value;
const {uint8Array: outputLevel3, dataView} = level3;

// pubkey = 48 bytes which is 2 * CHUNK_SIZE
level4.set(pubkey, 0);
let offset = CHUNK_SIZE;
outputLevel3.set(withdrawalCredentials, offset);
offset += CHUNK_SIZE;
// effectiveBalance is UintNum64
dataView.setUint32(offset, effectiveBalance & 0xffffffff, true);
dataView.setUint32(offset + 4, (effectiveBalance / NUMBER_2_POW_32) & 0xffffffff, true);

offset += CHUNK_SIZE;
dataView.setUint32(offset, slashed ? 1 : 0, true);
offset += CHUNK_SIZE;
writeEpochInf(dataView, offset, activationEligibilityEpoch);
offset += CHUNK_SIZE;
writeEpochInf(dataView, offset, activationEpoch);
offset += CHUNK_SIZE;
writeEpochInf(dataView, offset, exitEpoch);
offset += CHUNK_SIZE;
writeEpochInf(dataView, offset, withdrawableEpoch);
}

function writeEpochInf(dataView: DataView, offset: number, value: number): number {
if (value === Infinity) {
dataView.setUint32(offset, 0xffffffff, true);
offset += UINT32_SIZE;
dataView.setUint32(offset, 0xffffffff, true);
offset += UINT32_SIZE;
} else {
dataView.setUint32(offset, value & 0xffffffff, true);
offset += UINT32_SIZE;
dataView.setUint32(offset, (value / NUMBER_2_POW_32) & 0xffffffff, true);
offset += UINT32_SIZE;
}
return offset;
}
179 changes: 179 additions & 0 deletions packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
import {byteArrayIntoHashObject} from "@chainsafe/as-sha256";
import {HashComputationLevel, Node, digestNLevel, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree";
import {ListCompositeType} from "../../../../src/type/listComposite.js";
import {ArrayCompositeTreeViewDUCache} from "../../../../src/viewDU/arrayComposite.js";
import {ListCompositeTreeViewDU} from "../../../../src/viewDU/listComposite.js";
import {ValidatorNodeStructType, ValidatorType, validatorToChunkBytes} from "../validator.js";
import {ByteViews} from "../../../../src/type/abstract.js";
import {ContainerNodeStructTreeViewDU} from "../../../../src/viewDU/containerNodeStruct.js";
import {ValidatorIndex} from "../../primitive/types.js";

/**
* hashtree has a MAX_SIZE of 1024 bytes = 32 chunks
* Given a level3 of validators have 8 chunks, we can hash 4 validators at a time
*/
const PARALLEL_FACTOR = 4;
/**
* Allocate memory once for batch hash validators.
*/
// each level 3 of validator has 8 chunks, each chunk has 32 bytes
const batchLevel3Bytes = new Uint8Array(PARALLEL_FACTOR * 8 * 32);
const level3ByteViewsArr: ByteViews[] = [];
for (let i = 0; i < PARALLEL_FACTOR; i++) {
const uint8Array = batchLevel3Bytes.subarray(i * 8 * 32, (i + 1) * 8 * 32);
const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength);
level3ByteViewsArr.push({uint8Array, dataView});
}
// each level 4 of validator has 2 chunks for pubkey, each chunk has 32 bytes
const batchLevel4Bytes = new Uint8Array(PARALLEL_FACTOR * 2 * 32);
const level4BytesArr: Uint8Array[] = [];
for (let i = 0; i < PARALLEL_FACTOR; i++) {
level4BytesArr.push(batchLevel4Bytes.subarray(i * 2 * 32, (i + 1) * 2 * 32));
}
const pubkeyRoots: Uint8Array[] = [];
for (let i = 0; i < PARALLEL_FACTOR; i++) {
pubkeyRoots.push(batchLevel4Bytes.subarray(i * 32, (i + 1) * 32));
}

const validatorRoots: Uint8Array[] = [];
for (let i = 0; i < PARALLEL_FACTOR; i++) {
validatorRoots.push(batchLevel3Bytes.subarray(i * 32, (i + 1) * 32));
}
const validatorRoot = new Uint8Array(32);

/**
* Similar to ListCompositeTreeViewDU with some differences:
* - if called without params, it's from hashTreeRoot() api call, no need to compute root
* - otherwise it's from batchHashTreeRoot() call, compute validator roots in batch
*/
export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU<ValidatorNodeStructType> {
constructor(
readonly type: ListCompositeType<ValidatorNodeStructType>,
protected _rootNode: Node,
cache?: ArrayCompositeTreeViewDUCache
) {
super(type, _rootNode, cache);
}

commit(hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): void {
if (hcByLevel === null) {
// this is not from batchHashTreeRoot() call, go with regular flow
return super.commit();
}

const isOldRootHashed = this._rootNode.h0 !== null;
if (this.viewsChanged.size === 0) {
if (!isOldRootHashed && hcByLevel !== null) {
// not possible to get HashComputations due to BranchNodeStruct
this._rootNode.root;
}
return;
}

// TODO - batch: remove this type cast
const viewsChanged = this.viewsChanged as unknown as Map<
number,
ContainerNodeStructTreeViewDU<typeof ValidatorType>
>;

const indicesChanged: number[] = [];
for (const [index, viewChanged] of viewsChanged) {
// should not have any params here in order not to compute root
viewChanged.commit();
// Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal
this.nodes[index] = viewChanged.node;
// `validators.get(i)` was called but it may not modify any property, do not need to compute root
if (viewChanged.node.h0 === null) {
indicesChanged.push(index);
}
}

// these validators don't have roots, we compute roots in batch
const sortedIndicesChanged = indicesChanged.sort((a, b) => a - b);
const nodesChanged: {index: ValidatorIndex; node: Node}[] = new Array<{index: ValidatorIndex; node: Node}>(
sortedIndicesChanged.length
);
for (const [i, validatorIndex] of sortedIndicesChanged.entries()) {
nodesChanged[i] = {index: validatorIndex, node: this.nodes[validatorIndex]};
}
doBatchHashTreeRootValidators(sortedIndicesChanged, viewsChanged);

// do the remaining commit step the same to parent (ArrayCompositeTreeViewDU)
const indexes = nodesChanged.map((entry) => entry.index);
const nodes = nodesChanged.map((entry) => entry.node);
const chunksNode = this.type.tree_getChunksNode(this._rootNode);
const offsetThis = hcOffset + this.type.tree_chunksNodeOffset();
const byLevelThis = hcByLevel != null && isOldRootHashed ? hcByLevel : null;
const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, offsetThis, byLevelThis);

this._rootNode = this.type.tree_setChunksNode(
this._rootNode,
newChunksNode,
this.dirtyLength ? this._length : null,
hcOffset,
hcByLevel
);

if (!isOldRootHashed && hcByLevel !== null) {
// should never happen, handle just in case
// not possible to get HashComputations due to BranchNodeStruct
this._rootNode.root;
}

this.viewsChanged.clear();
this.dirtyLength = false;
}
}

export function doBatchHashTreeRootValidators(
indices: ValidatorIndex[],
validators: Map<ValidatorIndex, ContainerNodeStructTreeViewDU<typeof ValidatorType>>
): void {
const endBatch = indices.length - (indices.length % PARALLEL_FACTOR);

// commit every 16 validators in batch
for (let i = 0; i < endBatch; i++) {
if (i % PARALLEL_FACTOR === 0) {
batchLevel3Bytes.fill(0);
batchLevel4Bytes.fill(0);
}
const indexInBatch = i % PARALLEL_FACTOR;
const viewIndex = indices[i];
const validator = validators.get(viewIndex);
if (validator) {
validatorToChunkBytes(level3ByteViewsArr[indexInBatch], level4BytesArr[indexInBatch], validator.value);
}

if (indexInBatch === PARALLEL_FACTOR - 1) {
// hash level 4, this is populated to pubkeyRoots
digestNLevel(batchLevel4Bytes, 1);
for (let j = 0; j < PARALLEL_FACTOR; j++) {
level3ByteViewsArr[j].uint8Array.set(pubkeyRoots[j], 0);
}
// hash level 3, this is populated to validatorRoots
digestNLevel(batchLevel3Bytes, 3);
// commit all validators in this batch
for (let j = PARALLEL_FACTOR - 1; j >= 0; j--) {
const viewIndex = indices[i - j];
const indexInBatch = (i - j) % PARALLEL_FACTOR;
const viewChanged = validators.get(viewIndex);
if (viewChanged) {
const branchNodeStruct = viewChanged.node;
byteArrayIntoHashObject(validatorRoots[indexInBatch], 0, branchNodeStruct);
}
}
}
}

// commit the remaining validators, we can do in batch too but don't want to create new Uint8Array views
// it's not much different to commit one by one
for (let i = endBatch; i < indices.length; i++) {
const viewIndex = indices[i];
const viewChanged = validators.get(viewIndex);
if (viewChanged) {
// compute root for each validator
viewChanged.type.hashTreeRootInto(viewChanged.value, validatorRoot, 0);
byteArrayIntoHashObject(validatorRoot, 0, viewChanged.node);
}
}
}
Loading