refactor: move to new sdks

This commit is contained in:
Derrick Hammer 2023-07-05 22:33:44 -04:00
parent e62eadd436
commit 78d192f53f
Signed by: pcfreak30
GPG Key ID: C997C339BE476FF2
21 changed files with 272 additions and 439 deletions

9
.presetterrc.json Normal file
View File

@ -0,0 +1,9 @@
{
"preset": [
"@lumeweb/presetter-kernel-module-preset"
],
"config": {
"official": true,
"browser": true
}
}

View File

@ -1,50 +0,0 @@
import esbuild from "esbuild";
import { readFile } from "fs/promises";
import path from "path";
await esbuild.build({
entryPoints: ["src/index.ts"],
outfile: "dist/index.js",
format: "iife",
bundle: true,
legalComments: "external",
define: {
global: "self",
"import.meta": "true",
},
plugins: [
{
name: "base64",
setup(build) {
build.onResolve({ filter: /\?base64$/ }, (args) => {
return {
path: args.path,
pluginData: {
isAbsolute: path.isAbsolute(args.path),
resolveDir: args.resolveDir,
},
namespace: "base64-loader",
};
});
build.onLoad(
{ filter: /\?base64$/, namespace: "base64-loader" },
async (args) => {
const fullPath = args.pluginData.isAbsolute
? args.path
: path.join(args.pluginData.resolveDir, args.path);
return {
contents: Buffer.from(
await readFile(fullPath.replace(/\?base64$/, ""))
).toString("base64"),
loader: "text",
};
}
);
},
},
],
external: ["fs"],
inject: ["./polyfill.js"],
});
export {};

View File

@ -3,15 +3,27 @@
"version": "0.1.0", "version": "0.1.0",
"license": "MIT", "license": "MIT",
"type": "module", "type": "module",
"readme": "ERROR: No README data found!",
"_id": "@lumeweb/kernel-eth@0.1.0",
"repository": {
"type": "git",
"url": "gitea@git.lumeweb.com:LumeWeb/kernel-eth.git"
},
"scripts": { "scripts": {
"build-script": "tsc --project tsconfig.build.json && mv dist-build/build.js dist-build/build.mjs", "prepare": "presetter bootstrap",
"compile": "npm run build-script && rimraf node_modules/@lumeweb/dht-rpc-client/node_modules node_modules/@lumeweb/kernel-dht-client/node_modules/libkmodule && node build.js", "build": "shx touch nop.js;run build",
"build": "npm run compile && node ./dist-build/build.mjs dev" "semantic-release": "semantic-release"
},
"devDependencies": {
"@lumeweb/presetter-kernel-module-preset": "^0.1.0-develop.30",
"@rollup/plugin-alias": "^5.0.0",
"patch-package": "^7.0.0",
"rollup-plugin-ignore-import": "^1.3.2"
}, },
"dependencies": { "dependencies": {
"@chainsafe/as-sha256": "0.4.1", "@chainsafe/as-sha256": "0.4.1",
"@chainsafe/bls": "git+https://git.lumeweb.com/LumeWeb/chainsafe-bls.git", "@chainsafe/bls": "^7.1.1",
"@chainsafe/blst": "^0.2.8", "@chainsafe/blst": "0.2.9",
"@chainsafe/ssz": "0.11.1", "@chainsafe/ssz": "0.11.1",
"@ethereumjs/block": "^4.2.1", "@ethereumjs/block": "^4.2.1",
"@ethereumjs/blockchain": "^6.2.1", "@ethereumjs/blockchain": "^6.2.1",
@ -25,40 +37,17 @@
"@lodestar/light-client": "^1.7.2", "@lodestar/light-client": "^1.7.2",
"@lodestar/params": "^1.8.0", "@lodestar/params": "^1.8.0",
"@lodestar/types": "^1.7.2", "@lodestar/types": "^1.7.2",
"@lumeweb/kernel-rpc-client": "git+https://git.lumeweb.com/LumeWeb/kernel-rpc-client.git", "@lumeweb/kernel-rpc-client": "^0.0.2-develop.1",
"@lumeweb/libkernel": "^0.1.0-develop.14",
"@lumeweb/libweb": "^0.2.0-develop.21",
"decimal.js": "^10.4.3", "decimal.js": "^10.4.3",
"ethers": "^6.3.0", "ethers": "^6.3.0",
"json-rpc-2.0": "^1.5.1", "json-rpc-2.0": "^1.5.1",
"libkmodule": "^0.2.53",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"path-browserify": "^1.0.1",
"rlp": "^3.0.0", "rlp": "^3.0.0",
"stream-browserify": "^3.0.0",
"web3-core": "^1.9.0", "web3-core": "^1.9.0",
"web3-core-method": "^1.9.0", "web3-core-method": "^1.9.0",
"web3-eth": "^1.9.0", "web3-eth": "^1.9.0",
"yaml": "^2.2.1" "yaml": "^2.2.1"
},
"devDependencies": {
"@lumeweb/interface-relay": "git+https://git.lumeweb.com/LumeWeb/interface-relay.git",
"@scure/bip39": "^1.2.0",
"@skynetlabs/skynet-nodejs": "^2.9.0",
"@types/lodash": "^4.14.194",
"@types/node": "^18.15.11",
"@types/read": "^0.0.29",
"buffer": "^6.0.3",
"cli-progress": "^3.12.0",
"crypto-browserify": "^3.12.0",
"esbuild": "^0.17.17",
"esbuild-plugin-wasm": "^1.0.0",
"prettier": "^2.8.7",
"process": "^0.11.10",
"read": "^2.1.0",
"typescript": "^5.0.4"
},
"browser": {
"crypto": "crypto-browserify",
"stream": "stream-browserify",
"path": "path-browserify"
} }
} }

View File

@ -0,0 +1,76 @@
diff --git a/node_modules/@chainsafe/bls/lib/herumi/index.js b/node_modules/@chainsafe/bls/lib/herumi/index.js
index 4ed8fdd..f979984 100644
--- a/node_modules/@chainsafe/bls/lib/herumi/index.js
+++ b/node_modules/@chainsafe/bls/lib/herumi/index.js
@@ -1,16 +1,20 @@
-import { SecretKey } from "./secretKey.js";
-import { PublicKey } from "./publicKey.js";
-import { Signature } from "./signature.js";
-import { init, destroy } from "./context.js";
-import { functionalInterfaceFactory } from "../functional.js";
-await init();
-export * from "../constants.js";
-export { SecretKey, PublicKey, Signature, init, destroy };
-export const bls = {
- implementation: "herumi",
- SecretKey,
- PublicKey,
- Signature,
- ...functionalInterfaceFactory({ SecretKey, PublicKey, Signature }),
-};
-export default bls;
+import { SecretKey } from './secretKey.js'
+import { PublicKey } from './publicKey.js'
+import { Signature } from './signature.js'
+import { init, destroy } from './context.js'
+import { functionalInterfaceFactory } from '../functional.js'
+
+export * from '../constants.js'
+export { SecretKey, PublicKey, Signature, init, destroy }
+
+export const bls = async () => {
+ await init()
+ return {
+ implementation: 'herumi',
+ SecretKey,
+ PublicKey,
+ Signature,
+ ...functionalInterfaceFactory({ SecretKey, PublicKey, Signature }),
+ }
+}
+export default bls
diff --git a/node_modules/@chainsafe/bls/lib/index.d.ts b/node_modules/@chainsafe/bls/lib/index.d.ts
index 35a9432..097a938 100644
--- a/node_modules/@chainsafe/bls/lib/index.d.ts
+++ b/node_modules/@chainsafe/bls/lib/index.d.ts
@@ -1,3 +1,3 @@
import type { IBls } from "./types.js";
-declare let bls: IBls;
+export declare const bls: () => Promise<IBls>;
export default bls;
diff --git a/node_modules/@chainsafe/bls/lib/index.js b/node_modules/@chainsafe/bls/lib/index.js
index c2a5bdf..9572018 100644
--- a/node_modules/@chainsafe/bls/lib/index.js
+++ b/node_modules/@chainsafe/bls/lib/index.js
@@ -1,11 +1,14 @@
import { getImplementation } from "./getImplementation.js";
// Thanks https://github.com/iliakan/detect-node/blob/master/index.esm.js
const isNode = Object.prototype.toString.call(typeof process !== "undefined" ? process : 0) === "[object process]";
-let bls;
-try {
- bls = await getImplementation(isNode ? "blst-native" : "herumi");
-}
-catch (e) {
- bls = await getImplementation("herumi");
-}
+export const bls = async () => {
+ let bls;
+ try {
+ bls = await getImplementation(isNode ? "blst-native" : "herumi");
+ }
+ catch (e) {
+ bls = await getImplementation("herumi");
+ }
+ return bls;
+};
export default bls;

View File

@ -1,5 +0,0 @@
import * as process from "process";
import { Buffer } from "buffer";
self.process = process;
self.Buffer = Buffer;

View File

@ -1,218 +0,0 @@
// This is the standard build script for a kernel module.
import * as fs from "fs";
import read from "read";
import * as bip39 from "@scure/bip39";
import { wordlist } from "@scure/bip39/wordlists/english.js";
//@ts-ignore
import { SkynetClient } from "@skynetlabs/skynet-nodejs";
// Helper variables to make it easier to return empty values alongside errors.
const nu8 = new Uint8Array(0);
const nkp = {
publicKey: nu8,
secretKey: nu8,
};
// readFile is a wrapper for fs.readFileSync that handles the try-catch for the
// caller.
function readFile(fileName: string): [string, string | null] {
try {
let data = fs.readFileSync(fileName, "utf8");
return [data, null];
} catch (err) {
return ["", "unable to read file: " + JSON.stringify(err)];
}
}
// readFileBinary is a wrapper for fs.readFileSync that handles the try-catch
// for the caller.
function readFileBinary(fileName: string): [Uint8Array, string | null] {
try {
let data = fs.readFileSync(fileName, null);
return [data, null];
} catch (err) {
return [nu8, "unable to read file: " + JSON.stringify(err)];
}
}
// writeFile is a wrapper for fs.writeFileSync which handles the try-catch in a
// non-exception way.
function writeFile(fileName: string, fileData: string): string | null {
try {
fs.writeFileSync(fileName, fileData);
return null;
} catch (err) {
return "unable to write file: " + JSON.stringify(err);
}
}
// handlePass handles all portions of the script that occur after the password
// has been requested. If no password needs to be requested, handlePass will be
// called with a null input. We need to structure the code this way because the
// password reader is async and we can only access the password when using a
// callback.
function handlePass(password: string) {
try {
// If we are running prod and the seed file does not exist, we
// need to confirm the password and also warn the user to use a
// secure password.
if (!fs.existsSync(seedFile) && process.argv[2] === "prod") {
// The file does not exist, we need to confirm the
// password.
console.log();
console.log(
"No production entry found for module. Creating new production module..."
);
console.log(
"If someone can guess the password, they can push arbitrary changes to your module."
);
console.log("Please use a secure password.");
console.log();
read(
{ prompt: "Confirm Password: ", silent: true },
function (err: any, confirmPassword: string) {
if (err) {
console.error("unable to fetch password:", err);
process.exit(1);
}
if (password !== confirmPassword) {
console.error("passwords do not match");
process.exit(1);
}
handlePassConfirm(moduleSalt, password);
}
);
} else {
// If the seed file does exist, or if we are using dev,
// there's no need to confirm the password but we do
// need to pass the logic off to the handlePassConfirm
// callback.
handlePassConfirm(moduleSalt, password);
}
} catch (err) {
console.error("Unable to read seedFile:", err);
process.exit(1);
}
}
// handlePassConfirm handles the full script after the confirmation password
// has been provided. If not confirmation password is needed, this function
// will be called anyway using the unconfirmed password as input.
function handlePassConfirm(seed: string, password: string) {
// Create the seedFile if it does not exist. For dev we just save the
// seed to disk outright, because this is a dev build and therefore not
// security sensitive. Also the dev seed does not get pushed to the
// github repo.
//
// For prod, we use the seed to create a new seed (called the shield)
// which allows us to verify that the developer has provided the right
// password when deploying the module. The shield does get pushed to
// the github repo so that the production module is the same on all
// devices.
if (!fs.existsSync(seedFile) && process.argv[2] !== "prod") {
// Generate the seed phrase and write it to the file.
let seedPhrase = bip39.generateMnemonic(wordlist);
let errWF = writeFile(seedFile, seedPhrase);
if (errWF !== null) {
console.error("unable to write file:", errWF);
process.exit(1);
}
} else if (!fs.existsSync(seedFile) && process.argv[2] === "prod") {
// Generate the seed phrase.
let seedPhrase = bip39.generateMnemonic(wordlist);
// Write the registry link to the file.
}
// Load or verify the seed. If this is prod, the password is used to
// create and verify the seed. If this is dev, we just load the seed
// with no password.
let seedPhrase: string;
let registryLink: string;
if (process.argv[2] === "prod") {
// Generate the seed phrase from the password.
seedPhrase = bip39.generateMnemonic(wordlist);
} else {
let [sp, errRF] = readFile(seedFile);
if (errRF !== null) {
console.error("unable to read seed phrase for dev command from disk");
process.exit(1);
}
seedPhrase = sp;
}
let metadata = {
Filename: "index.js",
};
const client = new SkynetClient("https://web3portal.com");
client
.uploadFile("dist/index.js")
.then((result: any) => {
console.log("Immutable Link for module:", result);
})
.catch((err: any) => {
console.error("unable to upload file", err);
process.exit(1);
});
}
// Add a newline for readability.
console.log();
// Check for a 'dev' or 'prod' input to the script.
if (process.argv.length !== 3) {
console.error("need to provide either 'dev' or 'prod' as an input");
process.exit(1);
}
// Create the build folder if it does not exist.
if (!fs.existsSync("build")) {
fs.mkdirSync("build");
}
// Determine the seed file.
let seedFile: string;
if (process.argv[2] === "prod") {
seedFile = "module-skylink";
} else if (process.argv[2] === "dev") {
seedFile = "build/dev-seed";
} else {
console.error("need to provide either 'dev' or 'prod' as an input");
process.exit(1);
}
// If doing a prod deployment, check whether the salt file exists. If it does
// not, create it.
let moduleSalt: string;
if (!fs.existsSync(".module-salt")) {
moduleSalt = bip39.generateMnemonic(wordlist);
let errWF = writeFile(".module-salt", moduleSalt);
if (errWF !== null) {
console.error("unable to write module salt file:", errWF);
process.exit(1);
}
} else {
let [ms, errRF] = readFile(".module-salt");
if (errRF !== null) {
console.error("unable to read moduleSalt");
process.exit(1);
}
ms = ms.replace(/\n$/, "");
moduleSalt = ms;
}
// Need to get a password if this is a prod build.
if (process.argv[2] === "prod") {
read(
{ prompt: "Password: ", silent: true },
function (err: any, password: string) {
if (err) {
console.error("unable to fetch password:", err);
process.exit(1);
}
handlePass(password);
}
);
} else {
handlePass("");
}

View File

@ -23,7 +23,8 @@ import {
import { assertValidSignedHeader } from "@lodestar/light-client/validation"; import { assertValidSignedHeader } from "@lodestar/light-client/validation";
import { SyncCommitteeFast } from "@lodestar/light-client"; import { SyncCommitteeFast } from "@lodestar/light-client";
import bls, { init } from "@chainsafe/bls/switchable"; import bls, { init } from "@chainsafe/bls/switchable";
import { PublicKey } from "@chainsafe/bls/types.js"; // @ts-ignore
import type { PublicKey } from "@chainsafe/bls/lib/types.js";
import { fromHexString } from "@chainsafe/ssz"; import { fromHexString } from "@chainsafe/ssz";
import * as capella from "@lodestar/types/capella"; import * as capella from "@lodestar/types/capella";
import * as phase0 from "@lodestar/types/phase0"; import * as phase0 from "@lodestar/types/phase0";
@ -42,7 +43,7 @@ export default class Client {
latestBlockHash?: string; latestBlockHash?: string;
private config: ClientConfig = getDefaultClientConfig(); private config: ClientConfig = getDefaultClientConfig();
private genesisCommittee: Uint8Array[] = this.config.genesis.committee.map( private genesisCommittee: Uint8Array[] = this.config.genesis.committee.map(
(pk) => fromHexString(pk) (pk) => fromHexString(pk),
); );
private genesisPeriod = computeSyncPeriodAtSlot(this.config.genesis.slot); private genesisPeriod = computeSyncPeriodAtSlot(this.config.genesis.slot);
private genesisTime = this.config.genesis.time; private genesisTime = this.config.genesis.time;
@ -76,11 +77,11 @@ export default class Client {
const provider = new VerifyingProvider( const provider = new VerifyingProvider(
this.rpcCallback, this.rpcCallback,
blockNumber, blockNumber,
blockhash blockhash,
); );
this.subscribe((ei) => { this.subscribe((ei) => {
console.log( console.log(
`Recieved a new blockheader: ${ei.blockNumber} ${ei.blockhash}` `Recieved a new blockheader: ${ei.blockNumber} ${ei.blockhash}`,
); );
provider.update(ei.blockhash, ei.blockNumber); provider.update(ei.blockhash, ei.blockNumber);
}); });
@ -94,7 +95,7 @@ export default class Client {
public getCurrentPeriod(): number { public getCurrentPeriod(): number {
return computeSyncPeriodAtSlot( return computeSyncPeriodAtSlot(
getCurrentSlot(this.config.chainConfig, this.genesisTime) getCurrentSlot(this.config.chainConfig, this.genesisTime),
); );
} }
@ -115,12 +116,12 @@ export default class Client {
async optimisticUpdateVerify( async optimisticUpdateVerify(
committee: Uint8Array[], committee: Uint8Array[],
update: OptimisticUpdate update: OptimisticUpdate,
): Promise<VerifyWithReason> { ): Promise<VerifyWithReason> {
try { try {
const { attestedHeader: header, syncAggregate } = update; const { attestedHeader: header, syncAggregate } = update;
const headerBlockRoot = phase0.ssz.BeaconBlockHeader.hashTreeRoot( const headerBlockRoot = phase0.ssz.BeaconBlockHeader.hashTreeRoot(
header.beacon header.beacon,
); );
const committeeFast = this.deserializeSyncCommittee(committee); const committeeFast = this.deserializeSyncCommittee(committee);
try { try {
@ -129,7 +130,7 @@ export default class Client {
committeeFast, committeeFast,
syncAggregate, syncAggregate,
headerBlockRoot, headerBlockRoot,
header.beacon.slot header.beacon.slot,
); );
} catch (e) { } catch (e) {
return { correct: false, reason: "invalid signatures" }; return { correct: false, reason: "invalid signatures" };
@ -154,27 +155,27 @@ export default class Client {
private isValidLightClientHeader( private isValidLightClientHeader(
config: ChainForkConfig, config: ChainForkConfig,
header: allForks.LightClientHeader header: allForks.LightClientHeader,
): boolean { ): boolean {
return isValidMerkleBranch( return isValidMerkleBranch(
config config
.getExecutionForkTypes(header.beacon.slot) .getExecutionForkTypes(header.beacon.slot)
.ExecutionPayloadHeader.hashTreeRoot( .ExecutionPayloadHeader.hashTreeRoot(
(header as capella.LightClientHeader).execution (header as capella.LightClientHeader).execution,
), ),
(header as capella.LightClientHeader).executionBranch, (header as capella.LightClientHeader).executionBranch,
EXECUTION_PAYLOAD_DEPTH, EXECUTION_PAYLOAD_DEPTH,
EXECUTION_PAYLOAD_INDEX, EXECUTION_PAYLOAD_INDEX,
header.beacon.bodyRoot header.beacon.bodyRoot,
); );
} }
public async getNextValidExecutionInfo( public async getNextValidExecutionInfo(
retry: number = 10 retry: number = 10,
): Promise<ExecutionInfo> { ): Promise<ExecutionInfo> {
if (retry === 0) if (retry === 0)
throw new Error( throw new Error(
"no valid execution payload found in the given retry limit" "no valid execution payload found in the given retry limit",
); );
const ei = await this.getLatestExecution(); const ei = await this.getLatestExecution();
if (ei) return ei; if (ei) return ei;
@ -201,7 +202,7 @@ export default class Client {
let startPeriod = this.genesisPeriod; let startPeriod = this.genesisPeriod;
let lastCommitteeHash: Uint8Array = this.getCommitteeHash( let lastCommitteeHash: Uint8Array = this.getCommitteeHash(
this.genesisCommittee this.genesisCommittee,
); );
for (let period = startPeriod + 1; period <= currentPeriod; period++) { for (let period = startPeriod + 1; period <= currentPeriod; period++) {
@ -209,11 +210,11 @@ export default class Client {
lastCommitteeHash = await this.prover.getCommitteeHash( lastCommitteeHash = await this.prover.getCommitteeHash(
period, period,
currentPeriod, currentPeriod,
DEFAULT_BATCH_SIZE DEFAULT_BATCH_SIZE,
); );
} catch (e: any) { } catch (e: any) {
throw new Error( throw new Error(
`failed to fetch committee hash for prover at period(${period}): ${e.meessage}` `failed to fetch committee hash for prover at period(${period}): ${e.meessage}`,
); );
} }
} }
@ -225,7 +226,7 @@ export default class Client {
let startPeriod = this.latestPeriod; let startPeriod = this.latestPeriod;
let lastCommitteeHash: Uint8Array = this.getCommitteeHash( let lastCommitteeHash: Uint8Array = this.getCommitteeHash(
this.latestCommittee as Uint8Array[] this.latestCommittee as Uint8Array[],
); );
for (let period = startPeriod + 1; period <= currentPeriod; period++) { for (let period = startPeriod + 1; period <= currentPeriod; period++) {
@ -233,11 +234,11 @@ export default class Client {
lastCommitteeHash = await this.prover.getCommitteeHash( lastCommitteeHash = await this.prover.getCommitteeHash(
period, period,
currentPeriod, currentPeriod,
DEFAULT_BATCH_SIZE DEFAULT_BATCH_SIZE,
); );
} catch (e: any) { } catch (e: any) {
throw new Error( throw new Error(
`failed to fetch committee hash for prover at period(${period}): ${e.meessage}` `failed to fetch committee hash for prover at period(${period}): ${e.meessage}`,
); );
} }
} }
@ -246,7 +247,7 @@ export default class Client {
async getCommittee( async getCommittee(
period: number, period: number,
expectedCommitteeHash: Uint8Array | null expectedCommitteeHash: Uint8Array | null,
): Promise<Uint8Array[]> { ): Promise<Uint8Array[]> {
if (period === this.genesisPeriod) return this.genesisCommittee; if (period === this.genesisPeriod) return this.genesisCommittee;
if (!expectedCommitteeHash) if (!expectedCommitteeHash)
@ -260,19 +261,19 @@ export default class Client {
private async getLatestExecution(): Promise<ExecutionInfo | null> { private async getLatestExecution(): Promise<ExecutionInfo | null> {
const updateJSON = await this.prover.callback( const updateJSON = await this.prover.callback(
"consensus_optimistic_update" "consensus_optimistic_update",
); );
const update = this.optimisticUpdateFromJSON(updateJSON); const update = this.optimisticUpdateFromJSON(updateJSON);
const verify = await this.optimisticUpdateVerify( const verify = await this.optimisticUpdateVerify(
this.latestCommittee as Uint8Array[], this.latestCommittee as Uint8Array[],
update update,
); );
if (!verify.correct) { if (!verify.correct) {
console.error(`Invalid Optimistic Update: ${verify.reason}`); console.error(`Invalid Optimistic Update: ${verify.reason}`);
return null; return null;
} }
console.log( console.log(
`Optimistic update verified for slot ${updateJSON.attested_header.beacon.slot}` `Optimistic update verified for slot ${updateJSON.attested_header.beacon.slot}`,
); );
return { return {
blockhash: updateJSON.attested_header.execution.block_hash, blockhash: updateJSON.attested_header.execution.block_hash,
@ -281,7 +282,7 @@ export default class Client {
} }
private deserializeSyncCommittee( private deserializeSyncCommittee(
syncCommittee: Uint8Array[] syncCommittee: Uint8Array[],
): SyncCommitteeFast { ): SyncCommitteeFast {
const pubkeys = this.deserializePubkeys(syncCommittee); const pubkeys = this.deserializePubkeys(syncCommittee);
return { return {

View File

@ -522,7 +522,7 @@ export const mainnetConfig = {
}; };
export const BEACON_SYNC_SUPER_MAJORITY = Math.ceil( export const BEACON_SYNC_SUPER_MAJORITY = Math.ceil(
(BEACON_SYNC_COMMITTEE_SIZE * 2) / 3 (BEACON_SYNC_COMMITTEE_SIZE * 2) / 3,
); );
// These are the rough numbers from benchmark experiments // These are the rough numbers from benchmark experiments

View File

@ -8,7 +8,7 @@ export interface IProver {
getCommitteeHash( getCommitteeHash(
period: number, period: number,
currentPeriod: number, currentPeriod: number,
count: number count: number,
): Promise<Uint8Array>; ): Promise<Uint8Array>;
getSyncUpdate(period: number): Promise<LightClientUpdate>; getSyncUpdate(period: number): Promise<LightClientUpdate>;

View File

@ -24,7 +24,7 @@ export default class Prover implements IProver {
async getSyncUpdate(period: number): Promise<LightClientUpdate> { async getSyncUpdate(period: number): Promise<LightClientUpdate> {
const res = await this.callback("consensus_committee_period", { period }); const res = await this.callback("consensus_committee_period", { period });
return LightClientUpdateSSZ.deserialize( return LightClientUpdateSSZ.deserialize(
Uint8Array.from(Object.values(res)) Uint8Array.from(Object.values(res)),
); );
} }
@ -39,7 +39,7 @@ export default class Prover implements IProver {
async getCommitteeHash( async getCommitteeHash(
period: number, period: number,
currentPeriod: number, currentPeriod: number,
cacheCount: number cacheCount: number,
): Promise<Uint8Array> { ): Promise<Uint8Array> {
const _count = Math.min(currentPeriod - period + 1, cacheCount); const _count = Math.min(currentPeriod - period + 1, cacheCount);
if (!this.cachedHashes.has(period)) { if (!this.cachedHashes.has(period)) {

View File

@ -1,10 +1,10 @@
export const ZERO_ADDR = '0x0000000000000000000000000000000000000000'; export const ZERO_ADDR = "0x0000000000000000000000000000000000000000";
// TODO: set the correct gas limit! // TODO: set the correct gas limit!
export const GAS_LIMIT = '0x1c9c380'; export const GAS_LIMIT = "0x1c9c380";
export const REQUEST_BATCH_SIZE = 10; export const REQUEST_BATCH_SIZE = 10;
export const MAX_SOCKET = 10; export const MAX_SOCKET = 10;
export const EMPTY_ACCOUNT_EXTCODEHASH = export const EMPTY_ACCOUNT_EXTCODEHASH =
'0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470'; "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470";
export const MAX_BLOCK_HISTORY = BigInt(256); export const MAX_BLOCK_HISTORY = BigInt(256);
export const MAX_BLOCK_FUTURE = BigInt(3); export const MAX_BLOCK_FUTURE = BigInt(3);
export const DEFAULT_BLOCK_PARAMETER = 'latest'; export const DEFAULT_BLOCK_PARAMETER = "latest";

View File

@ -1,4 +1,4 @@
import { JSONRPCErrorCode, JSONRPCErrorException } from 'json-rpc-2.0'; import { JSONRPCErrorCode, JSONRPCErrorException } from "json-rpc-2.0";
export class InternalError extends JSONRPCErrorException { export class InternalError extends JSONRPCErrorException {
constructor(message: string) { constructor(message: string) {

View File

@ -35,7 +35,10 @@ import {
MAX_BLOCK_FUTURE, MAX_BLOCK_FUTURE,
DEFAULT_BLOCK_PARAMETER, DEFAULT_BLOCK_PARAMETER,
} from "./constants.js"; } from "./constants.js";
import { headerDataFromWeb3Response, blockDataFromWeb3Response } from "./utils"; import {
headerDataFromWeb3Response,
blockDataFromWeb3Response,
} from "./utils.js";
import { keccak256 } from "ethers"; import { keccak256 } from "ethers";
import { InternalError, InvalidParamsError } from "./errors.js"; import { InternalError, InvalidParamsError } from "./errors.js";
@ -68,14 +71,14 @@ export class VerifyingProvider {
eth_estimateGas: this.estimateGas, eth_estimateGas: this.estimateGas,
eth_sendRawTransaction: this.sendRawTransaction, eth_sendRawTransaction: this.sendRawTransaction,
eth_getTransactionReceipt: this.getTransactionReceipt, eth_getTransactionReceipt: this.getTransactionReceipt,
}) }),
); );
constructor( constructor(
rpcCallback: Function, rpcCallback: Function,
blockNumber: bigint | number, blockNumber: bigint | number,
blockHash: Bytes32, blockHash: Bytes32,
chain: bigint | Chain = Chain.Mainnet chain: bigint | Chain = Chain.Mainnet,
) { ) {
this.rpc = new RPC(rpcCallback); this.rpc = new RPC(rpcCallback);
this.common = new Common({ this.common = new Common({
@ -93,7 +96,7 @@ export class VerifyingProvider {
this.blockHashes[blockNumberHex] !== blockHash this.blockHashes[blockNumberHex] !== blockHash
) { ) {
console.log( console.log(
"Overriding an existing verified blockhash. Possibly the chain had a reorg" "Overriding an existing verified blockhash. Possibly the chain had a reorg",
); );
} }
const latestBlockNumber = this.latestBlockNumber; const latestBlockNumber = this.latestBlockNumber;
@ -119,7 +122,7 @@ export class VerifyingProvider {
private async getBalance( private async getBalance(
addressHex: AddressHex, addressHex: AddressHex,
blockOpt: BlockOpt = DEFAULT_BLOCK_PARAMETER blockOpt: BlockOpt = DEFAULT_BLOCK_PARAMETER,
) { ) {
const header = await this.getBlockHeader(blockOpt); const header = await this.getBlockHeader(blockOpt);
const address = Address.fromString(addressHex); const address = Address.fromString(addressHex);
@ -134,7 +137,7 @@ export class VerifyingProvider {
address, address,
[], [],
header.stateRoot, header.stateRoot,
proof proof,
); );
if (!isAccountCorrect) { if (!isAccountCorrect) {
throw new InternalError("Invalid account proof provided by the RPC"); throw new InternalError("Invalid account proof provided by the RPC");
@ -153,7 +156,7 @@ export class VerifyingProvider {
private async getCode( private async getCode(
addressHex: AddressHex, addressHex: AddressHex,
blockOpt: BlockOpt = DEFAULT_BLOCK_PARAMETER blockOpt: BlockOpt = DEFAULT_BLOCK_PARAMETER,
): Promise<HexString> { ): Promise<HexString> {
const header = await this.getBlockHeader(blockOpt); const header = await this.getBlockHeader(blockOpt);
const res = await this.rpc.requestBatch([ const res = await this.rpc.requestBatch([
@ -177,7 +180,7 @@ export class VerifyingProvider {
address, address,
[], [],
header.stateRoot, header.stateRoot,
accountProof accountProof,
); );
if (!isAccountCorrect) { if (!isAccountCorrect) {
throw new InternalError(`invalid account proof provided by the RPC`); throw new InternalError(`invalid account proof provided by the RPC`);
@ -185,11 +188,11 @@ export class VerifyingProvider {
const isCodeCorrect = await this.verifyCodeHash( const isCodeCorrect = await this.verifyCodeHash(
code, code,
accountProof.codeHash accountProof.codeHash,
); );
if (!isCodeCorrect) { if (!isCodeCorrect) {
throw new InternalError( throw new InternalError(
`code provided by the RPC doesn't match the account's codeHash` `code provided by the RPC doesn't match the account's codeHash`,
); );
} }
@ -198,7 +201,7 @@ export class VerifyingProvider {
private async getTransactionCount( private async getTransactionCount(
addressHex: AddressHex, addressHex: AddressHex,
blockOpt: BlockOpt = DEFAULT_BLOCK_PARAMETER blockOpt: BlockOpt = DEFAULT_BLOCK_PARAMETER,
): Promise<HexString> { ): Promise<HexString> {
const header = await this.getBlockHeader(blockOpt); const header = await this.getBlockHeader(blockOpt);
const address = Address.fromString(addressHex); const address = Address.fromString(addressHex);
@ -214,7 +217,7 @@ export class VerifyingProvider {
address, address,
[], [],
header.stateRoot, header.stateRoot,
proof proof,
); );
if (!isAccountCorrect) { if (!isAccountCorrect) {
throw new InternalError(`invalid account proof provided by the RPC`); throw new InternalError(`invalid account proof provided by the RPC`);
@ -225,7 +228,7 @@ export class VerifyingProvider {
private async call( private async call(
transaction: RPCTx, transaction: RPCTx,
blockOpt: BlockOpt = DEFAULT_BLOCK_PARAMETER blockOpt: BlockOpt = DEFAULT_BLOCK_PARAMETER,
) { ) {
try { try {
this.validateTx(transaction); this.validateTx(transaction);
@ -265,7 +268,7 @@ export class VerifyingProvider {
private async estimateGas( private async estimateGas(
transaction: RPCTx, transaction: RPCTx,
blockOpt: BlockOpt = DEFAULT_BLOCK_PARAMETER blockOpt: BlockOpt = DEFAULT_BLOCK_PARAMETER,
) { ) {
try { try {
this.validateTx(transaction); this.validateTx(transaction);
@ -284,7 +287,7 @@ export class VerifyingProvider {
? 2 ? 2
: transaction.accessList : transaction.accessList
? 1 ? 1
: 0 : 0,
); );
if (txType == BigInt(2)) { if (txType == BigInt(2)) {
transaction.maxFeePerGas = transaction.maxFeePerGas =
@ -350,7 +353,7 @@ export class VerifyingProvider {
} }
private async getTransactionReceipt( private async getTransactionReceipt(
txHash: Bytes32 txHash: Bytes32,
): Promise<JSONRPCReceipt | null> { ): Promise<JSONRPCReceipt | null> {
const { result: receipt, success } = await this.rpc.request({ const { result: receipt, success } = await this.rpc.request({
method: "eth_getTransactionReceipt", method: "eth_getTransactionReceipt",
@ -362,7 +365,7 @@ export class VerifyingProvider {
const header = await this.getBlockHeader(receipt.blockNumber); const header = await this.getBlockHeader(receipt.blockNumber);
const block = await this.getBlock(header); const block = await this.getBlock(header);
const index = block.transactions.findIndex( const index = block.transactions.findIndex(
(tx) => bufferToHex(tx.hash()) === txHash.toLowerCase() (tx) => bufferToHex(tx.hash()) === txHash.toLowerCase(),
); );
if (index === -1) { if (index === -1) {
throw new InternalError("the recipt provided by the RPC is invalid"); throw new InternalError("the recipt provided by the RPC is invalid");
@ -453,7 +456,7 @@ export class VerifyingProvider {
} }
const responses = _.chunk( const responses = _.chunk(
rawResponse.map((r: any) => r.result), rawResponse.map((r: any) => r.result),
2 2,
) as [AccountResponse, CodeResponse][]; ) as [AccountResponse, CodeResponse][];
for (let i = 0; i < accessList.length; i++) { for (let i = 0; i < accessList.length; i++) {
@ -471,7 +474,7 @@ export class VerifyingProvider {
address, address,
storageKeys, storageKeys,
header.stateRoot, header.stateRoot,
accountProof accountProof,
); );
if (!isAccountCorrect) { if (!isAccountCorrect) {
throw new InternalError(`invalid account proof provided by the RPC`); throw new InternalError(`invalid account proof provided by the RPC`);
@ -480,7 +483,7 @@ export class VerifyingProvider {
const isCodeCorrect = await this.verifyCodeHash(code, codeHash); const isCodeCorrect = await this.verifyCodeHash(code, codeHash);
if (!isCodeCorrect) { if (!isCodeCorrect) {
throw new InternalError( throw new InternalError(
`code provided by the RPC doesn't match the account's codeHash` `code provided by the RPC doesn't match the account's codeHash`,
); );
} }
@ -496,7 +499,7 @@ export class VerifyingProvider {
await vm.stateManager.putContractStorage( await vm.stateManager.putContractStorage(
address, address,
setLengthLeft(toBuffer(storageAccess.key), 32), setLengthLeft(toBuffer(storageAccess.key), 32),
setLengthLeft(toBuffer(storageAccess.value), 32) setLengthLeft(toBuffer(storageAccess.value), 32),
); );
} }
@ -527,7 +530,7 @@ export class VerifyingProvider {
throw new InvalidParamsError("specified block is too far in future"); throw new InvalidParamsError("specified block is too far in future");
} else if (blockNumber + MAX_BLOCK_HISTORY < this.latestBlockNumber) { } else if (blockNumber + MAX_BLOCK_HISTORY < this.latestBlockNumber) {
throw new InvalidParamsError( throw new InvalidParamsError(
`specified block cannot older that ${MAX_BLOCK_HISTORY}` `specified block cannot older that ${MAX_BLOCK_HISTORY}`,
); );
} }
return blockNumber; return blockNumber;
@ -566,7 +569,7 @@ export class VerifyingProvider {
if (!header.hash().equals(toBuffer(blockHash))) { if (!header.hash().equals(toBuffer(blockHash))) {
throw new InternalError( throw new InternalError(
`blockhash doesn't match the blockInfo provided by the RPC` `blockhash doesn't match the blockInfo provided by the RPC`,
); );
} }
this.blockHeaders[blockHash] = header; this.blockHeaders[blockHash] = header;
@ -578,14 +581,14 @@ export class VerifyingProvider {
address: Address, address: Address,
storageKeys: Bytes32[], storageKeys: Bytes32[],
stateRoot: Buffer, stateRoot: Buffer,
proof: GetProof proof: GetProof,
): Promise<boolean> { ): Promise<boolean> {
const trie = new Trie(); const trie = new Trie();
const key = keccak256(address.toString()); const key = keccak256(address.toString());
const expectedAccountRLP = await trie.verifyProof( const expectedAccountRLP = await trie.verifyProof(
stateRoot, stateRoot,
toBuffer(key), toBuffer(key),
proof.accountProof.map((a) => toBuffer(a)) proof.accountProof.map((a) => toBuffer(a)),
); );
const account = Account.fromAccountData({ const account = Account.fromAccountData({
nonce: BigInt(proof.nonce), nonce: BigInt(proof.nonce),
@ -607,12 +610,12 @@ export class VerifyingProvider {
for (let i = 0; i < storageKeys.length; i++) { for (let i = 0; i < storageKeys.length; i++) {
const sp = proof.storageProof[i]; const sp = proof.storageProof[i];
const key = keccak256( const key = keccak256(
bufferToHex(setLengthLeft(toBuffer(storageKeys[i]), 32)) bufferToHex(setLengthLeft(toBuffer(storageKeys[i]), 32)),
); );
const expectedStorageRLP = await trie.verifyProof( const expectedStorageRLP = await trie.verifyProof(
toBuffer(proof.storageHash), toBuffer(proof.storageHash),
toBuffer(key), toBuffer(key),
sp.proof.map((a) => toBuffer(a)) sp.proof.map((a) => toBuffer(a)),
); );
const isStorageValid = const isStorageValid =
(!expectedStorageRLP && sp.value === "0x0") || (!expectedStorageRLP && sp.value === "0x0") ||
@ -647,7 +650,7 @@ export class VerifyingProvider {
BigInt(tx.maxPriorityFeePerGas) > BigInt(tx.maxFeePerGas) BigInt(tx.maxPriorityFeePerGas) > BigInt(tx.maxFeePerGas)
) { ) {
throw new Error( throw new Error(
`maxPriorityFeePerGas (${tx.maxPriorityFeePerGas.toString()}) is bigger than maxFeePerGas (${tx.maxFeePerGas.toString()})` `maxPriorityFeePerGas (${tx.maxPriorityFeePerGas.toString()}) is bigger than maxFeePerGas (${tx.maxFeePerGas.toString()})`,
); );
} }
} }
@ -667,13 +670,13 @@ export class VerifyingProvider {
if (!block.header.hash().equals(header.hash())) { if (!block.header.hash().equals(header.hash())) {
throw new InternalError( throw new InternalError(
`BN(${header.number}): blockhash doest match the blockData provided by the RPC` `BN(${header.number}): blockhash doest match the blockData provided by the RPC`,
); );
} }
if (!(await block.validateTransactionsTrie())) { if (!(await block.validateTransactionsTrie())) {
throw new InternalError( throw new InternalError(
`transactionTree doesn't match the transactions provided by the RPC` `transactionTree doesn't match the transactions provided by the RPC`,
); );
} }
@ -696,7 +699,7 @@ export class VerifyingProvider {
this.blockHashes[parentBlockNumberHex] !== parentBlockHash this.blockHashes[parentBlockNumberHex] !== parentBlockHash
) { ) {
console.log( console.log(
"Overriding an existing verified blockhash. Possibly the chain had a reorg" "Overriding an existing verified blockhash. Possibly the chain had a reorg",
); );
} }
this.blockHashes[parentBlockNumberHex] = parentBlockHash; this.blockHashes[parentBlockNumberHex] = parentBlockHash;

View File

@ -25,7 +25,7 @@ export class RPC {
} }
async requestBatch(requests: RPCRequest[]) { async requestBatch(requests: RPCRequest[]) {
const res = []; const res: RPCResponse[] = [];
for (const request of requests) { for (const request of requests) {
const r = await this._retryRequest(request); const r = await this._retryRequest(request);
res.push(r); res.push(r);
@ -35,7 +35,7 @@ export class RPC {
private async _retryRequest( private async _retryRequest(
_request: RPCRequest, _request: RPCRequest,
retry = 5 retry = 5,
): Promise<RPCResponse> { ): Promise<RPCResponse> {
const request = { const request = {
..._request, ..._request,
@ -52,8 +52,8 @@ export class RPC {
`RPC batch request failed after maximum retries: ${JSON.stringify( `RPC batch request failed after maximum retries: ${JSON.stringify(
request, request,
null, null,
2 2,
)} ${JSON.stringify(res, null, 2)}` )} ${JSON.stringify(res, null, 2)}`,
); );
} }
} }

View File

@ -40,7 +40,7 @@ export function headerDataFromWeb3Response(blockInfo: any): HeaderData {
} }
export function txDataFromWeb3Response( export function txDataFromWeb3Response(
txInfo: any txInfo: any,
): TxData | AccessListEIP2930TxData | FeeMarketEIP1559TxData { ): TxData | AccessListEIP2930TxData | FeeMarketEIP1559TxData {
return { return {
...txInfo, ...txInfo,

View File

@ -1,4 +1,4 @@
import { InvalidParamsError } from './errors'; import { InvalidParamsError } from "./errors.js";
// Most of the validations are taken from: // Most of the validations are taken from:
// https://github.com/ethereumjs/ethereumjs-monorepo/blob/master/packages/client/lib/rpc/validation.ts // https://github.com/ethereumjs/ethereumjs-monorepo/blob/master/packages/client/lib/rpc/validation.ts
@ -30,13 +30,13 @@ export const validators = {
* @param index index of parameter * @param index index of parameter
*/ */
hex(params: any[], index: number) { hex(params: any[], index: number) {
if (typeof params[index] !== 'string') { if (typeof params[index] !== "string") {
throw new InvalidParamsError( throw new InvalidParamsError(
`invalid argument ${index}: argument must be a hex string`, `invalid argument ${index}: argument must be a hex string`,
); );
} }
if (params[index].substr(0, 2) !== '0x') { if (params[index].substr(0, 2) !== "0x") {
throw new InvalidParamsError( throw new InvalidParamsError(
`invalid argument ${index}: hex string without 0x prefix`, `invalid argument ${index}: hex string without 0x prefix`,
); );
@ -67,14 +67,14 @@ export const validators = {
blockOption(params: any[], index: number) { blockOption(params: any[], index: number) {
const blockOption = params[index]; const blockOption = params[index];
if (typeof blockOption !== 'string') { if (typeof blockOption !== "string") {
throw new InvalidParamsError( throw new InvalidParamsError(
`invalid argument ${index}: argument must be a string`, `invalid argument ${index}: argument must be a string`,
); );
} }
try { try {
if (['latest', 'earliest', 'pending'].includes(blockOption)) { if (["latest", "earliest", "pending"].includes(blockOption)) {
return; return;
} }
return this.hex([blockOption], 0); return this.hex([blockOption], 0);
@ -91,7 +91,7 @@ export const validators = {
* @param index index of parameter * @param index index of parameter
*/ */
bool(params: any[], index: number) { bool(params: any[], index: number) {
if (typeof params[index] !== 'boolean') { if (typeof params[index] !== "boolean") {
throw new InvalidParamsError( throw new InvalidParamsError(
`invalid argument ${index}: argument is not boolean`, `invalid argument ${index}: argument is not boolean`,
); );
@ -118,7 +118,7 @@ export const validators = {
transaction(params: any[], index: number) { transaction(params: any[], index: number) {
const tx = params[index]; const tx = params[index];
if (typeof tx !== 'object') { if (typeof tx !== "object") {
throw new InvalidParamsError( throw new InvalidParamsError(
`invalid argument ${index}: argument must be an object`, `invalid argument ${index}: argument must be an object`,
); );

View File

@ -1,5 +1,6 @@
import { import {
ByteVectorType, ByteVectorType,
ContainerType,
ListCompositeType, ListCompositeType,
VectorCompositeType, VectorCompositeType,
} from "@chainsafe/ssz"; } from "@chainsafe/ssz";
@ -8,15 +9,88 @@ import { BEACON_SYNC_COMMITTEE_SIZE } from "./constants.js";
const MAX_BATCHSIZE = 10000; const MAX_BATCHSIZE = 10000;
export const LightClientUpdateSSZ = capella.ssz.LightClientUpdate; export const LightClientUpdateSSZ = capella.ssz
.LightClientUpdate as unknown as ContainerType<{
attestedHeader: ContainerType<{
beacon: ContainerType<{
slot: import("@chainsafe/ssz").UintNumberType;
proposerIndex: import("@chainsafe/ssz").UintNumberType;
parentRoot: import("@chainsafe/ssz").ByteVectorType;
stateRoot: import("@chainsafe/ssz").ByteVectorType;
bodyRoot: import("@chainsafe/ssz").ByteVectorType;
}>;
execution: ContainerType<{
withdrawalsRoot: import("@chainsafe/ssz").ByteVectorType;
transactionsRoot: import("@chainsafe/ssz").ByteVectorType;
blockHash: import("@chainsafe/ssz").ByteVectorType;
parentHash: import("@chainsafe/ssz").ByteVectorType;
feeRecipient: import("@chainsafe/ssz").ByteVectorType;
stateRoot: import("@chainsafe/ssz").ByteVectorType;
receiptsRoot: import("@chainsafe/ssz").ByteVectorType;
logsBloom: import("@chainsafe/ssz").ByteVectorType;
prevRandao: import("@chainsafe/ssz").ByteVectorType;
blockNumber: import("@chainsafe/ssz").UintNumberType;
gasLimit: import("@chainsafe/ssz").UintNumberType;
gasUsed: import("@chainsafe/ssz").UintNumberType;
timestamp: import("@chainsafe/ssz").UintNumberType;
extraData: import("@chainsafe/ssz").ByteListType;
baseFeePerGas: import("@chainsafe/ssz").UintBigintType;
}>;
executionBranch: VectorCompositeType<
import("@chainsafe/ssz").ByteVectorType
>;
}>;
nextSyncCommittee: ContainerType<{
pubkeys: VectorCompositeType<import("@chainsafe/ssz").ByteVectorType>;
aggregatePubkey: import("@chainsafe/ssz").ByteVectorType;
}>;
nextSyncCommitteeBranch: VectorCompositeType<
import("@chainsafe/ssz").ByteVectorType
>;
finalizedHeader: ContainerType<{
beacon: ContainerType<{
slot: import("@chainsafe/ssz").UintNumberType;
proposerIndex: import("@chainsafe/ssz").UintNumberType;
parentRoot: import("@chainsafe/ssz").ByteVectorType;
stateRoot: import("@chainsafe/ssz").ByteVectorType;
bodyRoot: import("@chainsafe/ssz").ByteVectorType;
}>;
execution: ContainerType<{
withdrawalsRoot: import("@chainsafe/ssz").ByteVectorType;
transactionsRoot: import("@chainsafe/ssz").ByteVectorType;
blockHash: import("@chainsafe/ssz").ByteVectorType;
parentHash: import("@chainsafe/ssz").ByteVectorType;
feeRecipient: import("@chainsafe/ssz").ByteVectorType;
stateRoot: import("@chainsafe/ssz").ByteVectorType;
receiptsRoot: import("@chainsafe/ssz").ByteVectorType;
logsBloom: import("@chainsafe/ssz").ByteVectorType;
prevRandao: import("@chainsafe/ssz").ByteVectorType;
blockNumber: import("@chainsafe/ssz").UintNumberType;
gasLimit: import("@chainsafe/ssz").UintNumberType;
gasUsed: import("@chainsafe/ssz").UintNumberType;
timestamp: import("@chainsafe/ssz").UintNumberType;
extraData: import("@chainsafe/ssz").ByteListType;
baseFeePerGas: import("@chainsafe/ssz").UintBigintType;
}>;
executionBranch: VectorCompositeType<
import("@chainsafe/ssz").ByteVectorType
>;
}>;
finalityBranch: VectorCompositeType<import("@chainsafe/ssz").ByteVectorType>;
syncAggregate: ContainerType<{
syncCommitteeBits: import("@chainsafe/ssz").BitVectorType;
syncCommitteeSignature: import("@chainsafe/ssz").ByteVectorType;
}>;
signatureSlot: import("@chainsafe/ssz").UintNumberType;
}>;
export const LightClientUpdatesSSZ = new ListCompositeType( export const LightClientUpdatesSSZ = new ListCompositeType(
LightClientUpdateSSZ as any, LightClientUpdateSSZ as any,
MAX_BATCHSIZE MAX_BATCHSIZE,
); );
export const CommitteeSSZ = new VectorCompositeType( export const CommitteeSSZ = new VectorCompositeType(
new ByteVectorType(48), new ByteVectorType(48),
BEACON_SYNC_COMMITTEE_SIZE BEACON_SYNC_COMMITTEE_SIZE,
); );
const HashSSZ = new ByteVectorType(32); const HashSSZ = new ByteVectorType(32);

View File

@ -1,16 +1,9 @@
import Decimal from "decimal.js";
import { fromHexString, toHexString } from "@chainsafe/ssz"; import { fromHexString, toHexString } from "@chainsafe/ssz";
import bls from "@chainsafe/bls/switchable"; import bls from "@chainsafe/bls/switchable";
import { createBeaconConfig } from "@lodestar/config"; import { createBeaconConfig } from "@lodestar/config";
import { mainnetConfig } from "./constants.js"; import { mainnetConfig } from "./constants.js";
import { networksChainConfig } from "@lodestar/config/networks"; import { networksChainConfig } from "@lodestar/config/networks";
//import _ from "lodash";
export function logFloor(x: number, base: number = 2) {
return Decimal.log(x, base).floor().toNumber();
}
export function concatUint8Array(data: Uint8Array[]) { export function concatUint8Array(data: Uint8Array[]) {
const l = data.reduce((l, d) => l + d.length, 0); const l = data.reduce((l, d) => l + d.length, 0);
let result = new Uint8Array(l); let result = new Uint8Array(l);
@ -32,7 +25,7 @@ export function isCommitteeSame(a: Uint8Array[], b: Uint8Array[]): boolean {
} }
export function generateRandomSyncCommittee(): Uint8Array[] { export function generateRandomSyncCommittee(): Uint8Array[] {
let res = []; let res: Uint8Array[] = [];
// TODO: change 512 to constant // TODO: change 512 to constant
for (let i = 0; i < 512; i++) { for (let i = 0; i < 512; i++) {
res.push(bls.SecretKey.fromKeygen().toPublicKey().toBytes()); res.push(bls.SecretKey.fromKeygen().toPublicKey().toBytes());
@ -79,26 +72,10 @@ export async function wait(ms: number) {
}); });
} }
/*
export function deepTypecast<T>(
obj: any,
checker: (val: any) => boolean,
caster: (val: T) => any
): any {
return _.forEach(obj, (val: any, key: any, obj: any) => {
obj[key] = checker(val)
? caster(val)
: _.isObject(val)
? deepTypecast(val, checker, caster)
: val;
});
}
*/
export function getDefaultClientConfig() { export function getDefaultClientConfig() {
const chainConfig = createBeaconConfig( const chainConfig = createBeaconConfig(
networksChainConfig.mainnet, networksChainConfig.mainnet,
fromHexString(mainnetConfig.genesis_validator_root) fromHexString(mainnetConfig.genesis_validator_root),
); );
return { return {
genesis: { genesis: {

View File

@ -1,4 +1,8 @@
import { ActiveQuery, addHandler, handleMessage } from "libkmodule"; import {
ActiveQuery,
addHandler,
handleMessage,
} from "@lumeweb/libkernel/module";
import { createClient, RpcNetwork } from "@lumeweb/kernel-rpc-client"; import { createClient, RpcNetwork } from "@lumeweb/kernel-rpc-client";
import Client from "./client/client.js"; import Client from "./client/client.js";
import { Prover } from "./client/index.js"; import { Prover } from "./client/index.js";
@ -13,7 +17,7 @@ let moduleReady: Promise<void> = new Promise((resolve) => {
let client: Client; let client: Client;
let rpc: RpcNetwork; let rpc: RpcNetwork;
addHandler("presentSeed", handlePresentSeed); addHandler("presentKey", handlePresentKey);
addHandler("ready", handleReady); addHandler("ready", handleReady);
[ [
@ -50,7 +54,7 @@ addHandler("ready", handleReady);
}); });
}); });
async function handlePresentSeed() { async function handlePresentKey() {
await setup(); await setup();
moduleReadyResolve(); moduleReadyResolve();
} }
@ -60,7 +64,7 @@ async function handleRpcMethod(aq: ActiveQuery) {
return client.provider.rpcMethod( return client.provider.rpcMethod(
aq.callerInput?.method, aq.callerInput?.method,
// @ts-ignore // @ts-ignore
aq.callerInput?.params as any[] aq.callerInput?.params as any[],
); );
} }

View File

@ -1,14 +0,0 @@
{
"compilerOptions": {
"target": "esnext",
"module": "esnext",
"moduleResolution": "node",
"allowSyntheticDefaultImports": true,
"declaration": true,
"outDir": "./dist-build",
"strict": true,
"esModuleInterop": true
},
"include": ["src-build"],
"exclude": ["node_modules", "**/__tests__/*"]
}

View File

@ -1,13 +0,0 @@
{
"compilerOptions": {
"target": "esnext",
"declaration": true,
"moduleResolution": "node",
"outDir": "./build",
"strict": true,
"allowSyntheticDefaultImports": true,
"esModuleInterop": true
},
"include": ["src"],
"exclude": ["node_modules", "**/__tests__/*"]
}