Skip to content

Commit

Permalink
fix the types/test
Browse files Browse the repository at this point in the history
rebase fixes
  • Loading branch information
g11tech committed Aug 9, 2024
1 parent a0c5d27 commit c7f6341
Show file tree
Hide file tree
Showing 13 changed files with 80 additions and 58 deletions.
32 changes: 20 additions & 12 deletions packages/beacon-node/src/chain/blocks/importBlock.ts
Original file line number Diff line number Diff line change
Expand Up @@ -464,16 +464,20 @@ export async function importBlock(
blockInput.type === BlockInputType.availableData &&
this.emitter.listenerCount(routes.events.EventType.blobSidecar)
) {
const {blobs} = blockInput.blockData;
for (const blobSidecar of blobs) {
const {index, kzgCommitment} = blobSidecar;
this.emitter.emit(routes.events.EventType.blobSidecar, {
blockRoot: blockRootHex,
slot: blockSlot,
index,
kzgCommitment: toHexString(kzgCommitment),
versionedHash: toHexString(kzgCommitmentToVersionedHash(kzgCommitment)),
});
if (blockInput.blockData.fork === ForkName.deneb) {
const {blobs} = blockInput.blockData;
for (const blobSidecar of blobs) {
const {index, kzgCommitment} = blobSidecar;
this.emitter.emit(routes.events.EventType.blobSidecar, {
blockRoot: blockRootHex,
slot: blockSlot,
index,
kzgCommitment: toHexString(kzgCommitment),
versionedHash: toHexString(kzgCommitmentToVersionedHash(kzgCommitment)),
});
}
} else {
// TODO add event for datacolumns
}
}
});
Expand All @@ -494,8 +498,12 @@ export async function importBlock(
// out of data range blocks and import then in forkchoice although one would not be able to
// attest and propose with such head similar to optimistic sync
if (blockInput.type === BlockInputType.availableData) {
const {blobsSource} = blockInput.blockData;
this.metrics?.importBlock.blobsBySource.inc({blobsSource});
if (blockInput.blockData.fork === ForkName.deneb) {
const {blobsSource} = blockInput.blockData;
this.metrics?.importBlock.blobsBySource.inc({blobsSource});
} else {
// TODO add data columns metrics
}
}

const advancedSlot = this.clock.slotWithFutureTolerance(REPROCESS_MIN_TIME_TO_NEXT_SLOT_SEC);
Expand Down
16 changes: 8 additions & 8 deletions packages/beacon-node/src/util/sszBytes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -209,16 +209,16 @@ export function getSlotFromBlobSidecarSerialized(data: Uint8Array): Slot | null
}
*/

const SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR = 20;
export function getSlotFromDataColumnSidecarSerialized(data: Uint8Array): Slot | null {
if (data.length < SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR + SLOT_SIZE) {
return null;
}

return getSlotFromOffset(data, SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR);
const SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR = 20;
export function getSlotFromDataColumnSidecarSerialized(data: Uint8Array): Slot | null {
if (data.length < SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR + SLOT_SIZE) {
return null;
}

/**
return getSlotFromOffset(data, SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR);
}

/**
* Read only the first 4 bytes of Slot, max value is 4,294,967,295 will be reached 1634 years after genesis
*
* If the high bytes are not zero, return null
Expand Down
2 changes: 2 additions & 0 deletions packages/beacon-node/test/e2e/network/mdns.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import {generateState, zeroProtoBlock} from "../../utils/state.js";
import {testLogger} from "../../utils/logger.js";
import {GossipHandlers} from "../../../src/network/gossip/index.js";
import {memoOnce} from "../../utils/cache.js";
import {computeNodeId} from "../../../src/network/subnets/index.js";

let port = 9000;
const mu = "/ip4/127.0.0.1/tcp/0";
Expand Down Expand Up @@ -98,6 +99,7 @@ describe.skip("mdns", function () {
const network = await Network.init({
...modules,
...(await createNetworkModules(mu, peerId, {...opts, mdns: true})),
nodeId: computeNodeId(peerId),
logger,
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import {generateState} from "../../../utils/state.js";
import {waitForEvent} from "../../../utils/events/resolver.js";
import {testLogger} from "../../../utils/logger.js";
import {getValidPeerId} from "../../../utils/peer.js";
import {IAttnetsService} from "../../../../src/network/subnets/index.js";
import {IAttnetsService, computeNodeId} from "../../../../src/network/subnets/index.js";
import {Clock} from "../../../../src/util/clock.js";
import {LocalStatusCache} from "../../../../src/network/statusCache.js";

Expand Down Expand Up @@ -82,6 +82,7 @@ describe("network / peers / PeerManager", function () {
syncnetsService: mockSubnetsService,
gossip: {getScore: () => 0, scoreParams: {decayInterval: 1000}} as unknown as Eth2Gossipsub,
peersData: new PeersData(),
nodeId: computeNodeId(peerId1),
},
{
targetPeers: 30,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import {rimraf} from "rimraf";
import {describe, it, expect, beforeEach, afterEach, beforeAll} from "vitest";
import {ssz} from "@lodestar/types";
import {ByteVectorType} from "@chainsafe/ssz";
import {ssz, electra} from "@lodestar/types";
import {createChainForkConfig} from "@lodestar/config";
import {LevelDbController} from "@lodestar/db";
import {NUMBER_OF_COLUMNS} from "@lodestar/params";
Expand Down Expand Up @@ -68,15 +69,18 @@ describe("block archive repository", function () {
(ssz.electra.Cell.fixedSize + ssz.deneb.KZGCommitment.fixedSize + ssz.deneb.KZGProof.fixedSize);

const numColumns = NUMBER_OF_COLUMNS;
const custodyColumns = new Uint8Array(numColumns);
const blobsLen = (singedBlock.message as electra.BeaconBlock).body.blobKzgCommitments.length;

// const dataColumnsSize = ssz.electra.DataColumnSidecar.minSize + blobsLen * (ssz.electra.Cell.fixedSize + ssz.deneb.KZGCommitment.fixedSize + ssz.deneb.KZGProof.fixedSize);

// const dataColumnsLen = blockInput.blockData;
const writeData = {
blockRoot,
slot,
numColumns,
blobsLen,
columnsSize,
custodyColumns,
dataColumnSidecars,
dataColumnsIndex: new ByteVectorType(NUMBER_OF_COLUMNS),
dataColumnSidecars: ssz.electra.DataColumnSidecars,
};

await dataColumnRepo.add(writeData);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ describe("beaconBlocksMaybeBlobsByRange", () => {
sendBlobSidecarsByRange: async () => blobSidecars,
} as Partial<INetwork> as INetwork;

const response = await beaconBlocksMaybeBlobsByRange(config, network, peerId, rangeRequest, 0);
const response = await beaconBlocksMaybeBlobsByRange(config, network, peerId, rangeRequest, 0, null);
expect(response).toEqual(expectedResponse);
});
});
Expand Down
8 changes: 4 additions & 4 deletions packages/beacon-node/test/unit/sync/range/batch.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ describe("sync / range / batch", () => {
// retry download: AwaitingDownload -> Downloading
// downloadingSuccess: Downloading -> AwaitingProcessing
batch.startDownloading(peer);
batch.downloadingSuccess(blocksDownloaded);
batch.downloadingSuccess({blocks: blocksDownloaded, pendingDataColumns: null});
expect(batch.state.status).toBe(BatchStatus.AwaitingProcessing);

// startProcessing: AwaitingProcessing -> Processing
Expand All @@ -57,7 +57,7 @@ describe("sync / range / batch", () => {
// retry download + processing: AwaitingDownload -> Downloading -> AwaitingProcessing -> Processing
// processingSuccess: Processing -> AwaitingValidation
batch.startDownloading(peer);
batch.downloadingSuccess(blocksDownloaded);
batch.downloadingSuccess({blocks: blocksDownloaded, pendingDataColumns: null});
batch.startProcessing();
batch.processingSuccess();
expect(batch.state.status).toBe(BatchStatus.AwaitingValidation);
Expand All @@ -68,7 +68,7 @@ describe("sync / range / batch", () => {

// retry download + processing + validation: AwaitingDownload -> Downloading -> AwaitingProcessing -> Processing -> AwaitingValidation
batch.startDownloading(peer);
batch.downloadingSuccess(blocksDownloaded);
batch.downloadingSuccess({blocks: blocksDownloaded, pendingDataColumns: null});
batch.startProcessing();
batch.processingSuccess();
expect(batch.state.status).toBe(BatchStatus.AwaitingValidation);
Expand All @@ -79,7 +79,7 @@ describe("sync / range / batch", () => {
const batch = new Batch(startEpoch, config);

expectThrowsLodestarError(
() => batch.downloadingSuccess(blocksDownloaded),
() => batch.downloadingSuccess({blocks: blocksDownloaded, pendingDataColumns: []}),
new BatchError({
code: BatchErrorCode.WRONG_STATUS,
startEpoch,
Expand Down
24 changes: 16 additions & 8 deletions packages/beacon-node/test/unit/sync/range/chain.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,11 @@ describe("sync / range / chain", () => {
}
};

const downloadBeaconBlocksByRange: SyncChainFns["downloadBeaconBlocksByRange"] = async (peerId, request) => {
const downloadBeaconBlocksByRange: SyncChainFns["downloadBeaconBlocksByRange"] = async (
peer,
request,
partialDownload
) => {
const blocks: BlockInput[] = [];
for (let i = request.startSlot; i < request.startSlot + request.count; i += request.step) {
if (skippedSlots?.has(i)) {
Expand All @@ -94,7 +98,7 @@ describe("sync / range / chain", () => {
)
);
}
return blocks;
return {blocks, pendingDataColumns: null};
};

const target: ChainTarget = {slot: computeStartSlotAtEpoch(targetEpoch), root: ZERO_HASH};
Expand All @@ -111,7 +115,7 @@ describe("sync / range / chain", () => {
);

const peers = [peer];
for (const peer of peers) initialSync.addPeer(peer, target);
for (const peer of peers) initialSync.addPeer(peer, target, []);

initialSync.startSyncing(startEpoch);
});
Expand All @@ -124,7 +128,11 @@ describe("sync / range / chain", () => {
const peers = [peer];

const processChainSegment: SyncChainFns["processChainSegment"] = async () => {};
const downloadBeaconBlocksByRange: SyncChainFns["downloadBeaconBlocksByRange"] = async (peer, request) => {
const downloadBeaconBlocksByRange: SyncChainFns["downloadBeaconBlocksByRange"] = async (
peer,
request,
partialDownload
) => {
const blocks: BlockInput[] = [];
for (let i = request.startSlot; i < request.startSlot + request.count; i += request.step) {
blocks.push(
Expand All @@ -139,7 +147,7 @@ describe("sync / range / chain", () => {
)
);
}
return blocks;
return {blocks, pendingDataColumns: null};
};

const target: ChainTarget = {slot: computeStartSlotAtEpoch(targetEpoch), root: ZERO_HASH};
Expand All @@ -157,7 +165,7 @@ describe("sync / range / chain", () => {

// Add peers after some time
setTimeout(() => {
for (const peer of peers) initialSync.addPeer(peer, target);
for (const peer of peers) initialSync.addPeer(peer, target, []);
}, 20);

initialSync.startSyncing(startEpoch);
Expand All @@ -181,9 +189,9 @@ function logSyncChainFns(logger: Logger, fns: SyncChainFns): SyncChainFns {
logger.debug("mock processChainSegment", {blocks: blocks.map((b) => b.block.message.slot).join(",")});
return fns.processChainSegment(blocks, syncType);
},
downloadBeaconBlocksByRange(peer, request) {
downloadBeaconBlocksByRange(peer, request, partialDownload) {
logger.debug("mock downloadBeaconBlocksByRange", request);
return fns.downloadBeaconBlocksByRange(peer, request);
return fns.downloadBeaconBlocksByRange(peer, request, null);
},
reportPeer(peer, action, actionName) {
logger.debug("mock reportPeer", {peer: peer.toString(), action, actionName});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ describe("sync / range / batches", () => {
batch.startDownloading(peer);
if (status === BatchStatus.Downloading) return batch;

batch.downloadingSuccess([]);
batch.downloadingSuccess({blocks: [], pendingDataColumns: []});
if (status === BatchStatus.AwaitingProcessing) return batch;

batch.startProcessing();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import {config} from "@lodestar/config/default";
import {Batch} from "../../../../../src/sync/range/batch.js";
import {ChainPeersBalancer} from "../../../../../src/sync/range/utils/peerBalancer.js";
import {getRandPeerIdStr} from "../../../../utils/peer.js";
import {PeerIdStr} from "../../../../../src/util/peerId.js";

describe("sync / range / peerBalancer", () => {
it("bestPeerToRetryBatch", async () => {
Expand All @@ -14,14 +15,16 @@ describe("sync / range / peerBalancer", () => {
const batch0 = new Batch(0, config);
const batch1 = new Batch(1, config);

const custodyColumn = new Map<PeerIdStr, {custodyColumns: number[]}>();

// Batch zero has a failedDownloadAttempt with peer0
batch0.startDownloading(peer1);
batch0.downloadingError();

// peer2 is busy downloading batch1
batch1.startDownloading(peer2);

const peerBalancer = new ChainPeersBalancer([peer1, peer2, peer3], [batch0, batch1]);
const peerBalancer = new ChainPeersBalancer([peer1, peer2, peer3], custodyColumn, [batch0, batch1]);

expect(peerBalancer.bestPeerToRetryBatch(batch0)).toBe(peer3);

Expand All @@ -41,11 +44,13 @@ describe("sync / range / peerBalancer", () => {
const batch0 = new Batch(0, config);
const batch1 = new Batch(1, config);

const custodyColumn = new Map<PeerIdStr, {custodyColumns: number[]}>();

// peer1 and peer2 are busy downloading
batch0.startDownloading(peer1);
batch1.startDownloading(peer2);

const peerBalancer = new ChainPeersBalancer([peer1, peer2, peer3, peer4], [batch0, batch1]);
const peerBalancer = new ChainPeersBalancer([peer1, peer2, peer3, peer4], custodyColumn, [batch0, batch1]);

const idlePeers = peerBalancer.idlePeers();

Expand Down
7 changes: 5 additions & 2 deletions packages/beacon-node/test/utils/networkWithMockDb.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import {GossipHandlers, Network, NetworkInitModules, getReqRespHandlers} from ".
import {NetworkOptions, defaultNetworkOptions} from "../../src/network/options.js";
import {GetReqRespHandlerFn} from "../../src/network/reqresp/types.js";
import {getMockedBeaconDb} from "../mocks/mockedBeaconDb.js";
import {computeNodeId} from "../../src/network/subnets/index.js";
import {createCachedBeaconStateTest} from "./cachedBeaconState.js";
import {ClockStatic} from "./clock.js";
import {testLogger} from "./logger.js";
Expand Down Expand Up @@ -72,7 +73,7 @@ export async function getNetworkForTest(
}
);

const modules: Omit<NetworkInitModules, "opts" | "peerId" | "logger"> = {
const modules: Omit<NetworkInitModules, "opts" | "peerId" | "logger" | "nodeId"> = {
config: beaconConfig,
chain,
db,
Expand All @@ -81,9 +82,11 @@ export async function getNetworkForTest(
metrics: null,
};

const peerId = await createSecp256k1PeerId();
const network = await Network.init({
...modules,
peerId: await createSecp256k1PeerId(),
peerId,
nodeId: computeNodeId(peerId),
opts: {
...defaultNetworkOptions,
maxPeers: 1,
Expand Down
2 changes: 1 addition & 1 deletion packages/types/src/sszTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ const typesByFork = {
[ForkName.bellatrix]: {...phase0, ...altair, ...bellatrix},
[ForkName.capella]: {...phase0, ...altair, ...bellatrix, ...capella},
[ForkName.deneb]: {...phase0, ...altair, ...bellatrix, ...capella, ...deneb},
[ForkName.deneb]: {...phase0, ...altair, ...bellatrix, ...capella, ...deneb, ...electra},
[ForkName.electra]: {...phase0, ...altair, ...bellatrix, ...capella, ...deneb, ...electra},
};

/**
Expand Down
17 changes: 4 additions & 13 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -4431,7 +4431,7 @@ binary@~0.3.0:
buffers "~0.1.1"
chainsaw "~0.1.0"

bindings@^1.3.0, bindings@^1.5.0:
bindings@^1.3.0:
version "1.5.0"
resolved "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz"
integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==
Expand Down Expand Up @@ -4693,13 +4693,9 @@ [email protected]:
resolved "https://registry.yarnpkg.com/byte-size/-/byte-size-8.1.1.tgz#3424608c62d59de5bfda05d31e0313c6174842ae"
integrity sha512-tUkzZWK0M/qdoLEqikxBWe4kumyuwjl3HO6zHTr4yEI23EojPtLYXdG1+AQY7MN0cGyNDvEaJ8wiYQm6P2bPxg==

c-kzg@^2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/c-kzg/-/c-kzg-2.1.2.tgz#355eca750c1b70398f533be5dd33e7ffbde005d8"
integrity sha512-QmP4vTp5H2hN4mkKXF9VXtZ7k7oH1kmBxLiFT/7LcyE78RgX32zvPGfwtRAsrq/b3T6RZFIsHZvQEN7JdzoNOg==
dependencies:
bindings "^1.5.0"
node-addon-api "^5.0.0"
c-kzg@matthewkeil/c-kzg-4844#13aa01464479aa7c1ccafa64d52cbc17699ffa07:
version "4.0.0-alpha.0"
resolved "https://codeload.github.com/matthewkeil/c-kzg-4844/tar.gz/13aa01464479aa7c1ccafa64d52cbc17699ffa07"

cac@^6.7.14:
version "6.7.14"
Expand Down Expand Up @@ -9757,11 +9753,6 @@ node-addon-api@^3.2.1:
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161"
integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==

node-addon-api@^5.0.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-5.1.0.tgz#49da1ca055e109a23d537e9de43c09cca21eb762"
integrity sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==

node-domexception@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
Expand Down

0 comments on commit c7f6341

Please sign in to comment.