Compare commits

..

No commits in common. "v0.1.0-develop.3" and "master" have entirely different histories.

12 changed files with 496 additions and 25002 deletions

View File

@ -1,13 +0,0 @@
name: Build/Publish
on:
push:
branches:
- master
- develop
- develop-*
jobs:
main:
uses: lumeweb/github-node-deploy-workflow/.github/workflows/main.yml@master
secrets: inherit

View File

@ -1,28 +0,0 @@
{
"preset": [
"@lumeweb/presetter-kernel-module-preset"
],
"config": {
"tsconfig": {
"compilerOptions": {
"module": "nodenext"
}
},
"vite": {
"resolve": {
"alias": {
"ethereum-cryptography/secp256k1": "ethereum-cryptography/secp256k1.js",
"ethereum-cryptography/keccak": "ethereum-cryptography/keccak.js",
"ethereum-cryptography/utils": "ethereum-cryptography/utils.js",
"ethereum-cryptography/sha256": "ethereum-cryptography/sha256.js",
"ethereum-cryptography/ripemd160": "ethereum-cryptography/ripemd160.js",
"bigint-mod-arith": "@lumeweb/bigint-mod-arith",
"bigint-crypto-utils": "@lumeweb/bigint-crypto-utils"
}
},
"optimize": {
"./blst-native/index.js": "export default undefined;"
}
}
}
}

View File

@ -1,20 +0,0 @@
# [0.1.0-develop.3](https://git.lumeweb.com/LumeWeb/kernel-eth/compare/v0.1.0-develop.2...v0.1.0-develop.3) (2023-11-17)
# [0.1.0-develop.2](https://git.lumeweb.com/LumeWeb/kernel-eth/compare/v0.1.0-develop.1...v0.1.0-develop.2) (2023-10-13)
# [0.1.0-develop.1](https://git.lumeweb.com/LumeWeb/kernel-eth/compare/v0.0.1...v0.1.0-develop.1) (2023-10-12)
### Bug Fixes
* add synced listener on eth client to flag the synced status ([5f64664](https://git.lumeweb.com/LumeWeb/kernel-eth/commit/5f64664676babae7065dc7560a5db780799c2c9e))
* createEthClient needs logging functions and sync delay passed ([0324700](https://git.lumeweb.com/LumeWeb/kernel-eth/commit/032470029cec8266b925ddb689512e18a580183b))
### Features
* add caching of the last consensus update via leveldb ([9f35ea7](https://git.lumeweb.com/LumeWeb/kernel-eth/commit/9f35ea7f9b31430a5840dc431fadd4cfd560759a))
* add name api ([9370003](https://git.lumeweb.com/LumeWeb/kernel-eth/commit/9370003a59d1dd0c343f6817c2940f7309703854))
* add register method for network registry ([e7d85a2](https://git.lumeweb.com/LumeWeb/kernel-eth/commit/e7d85a2af4c2325aae8b0a0e4c327a8f121fae6a))
* add status api ([2a079f3](https://git.lumeweb.com/LumeWeb/kernel-eth/commit/2a079f340f5f1a61809aebf92ea038a97665684a))
* Update to capella fork ([e62eadd](https://git.lumeweb.com/LumeWeb/kernel-eth/commit/e62eadd43617d4da0903d2d60d7c057966c5c71e))

48
build.js Normal file
View File

@ -0,0 +1,48 @@
import esbuild from "esbuild";
import { readFile } from "fs/promises";
import path from "path";
await esbuild.build({
entryPoints: ["src/index.ts"],
outfile: "dist/index.js",
format: "esm",
bundle: true,
legalComments: "external",
define: {
global: "self",
"import.meta": "true",
},
plugins: [
{
name: "base64",
setup(build) {
build.onResolve({ filter: /\?base64$/ }, (args) => {
return {
path: args.path,
pluginData: {
isAbsolute: path.isAbsolute(args.path),
resolveDir: args.resolveDir,
},
namespace: "base64-loader",
};
});
build.onLoad(
{ filter: /\?base64$/, namespace: "base64-loader" },
async (args) => {
const fullPath = args.pluginData.isAbsolute
? args.path
: path.join(args.pluginData.resolveDir, args.path);
return {
contents: Buffer.from(
await readFile(fullPath.replace(/\?base64$/, ""))
).toString("base64"),
loader: "text",
};
}
);
},
},
],
});
export {};

24645
npm-shrinkwrap.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,33 +1,30 @@
{
"name": "@lumeweb/kernel-eth",
"version": "0.1.0-develop.3",
"license": "MIT",
"type": "module",
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "gitea@git.lumeweb.com:LumeWeb/kernel-eth.git"
},
"scripts": {
"prepare": "presetter bootstrap",
"build": "run build",
"semantic-release": "semantic-release",
"postinstall": "patch-package"
},
"devDependencies": {
"@lumeweb/presetter-kernel-module-preset": "0.1.0-develop.42",
"@rollup/plugin-alias": "^5.0.0",
"patch-package": "^7.0.0",
"rollup-plugin-ignore-import": "^1.3.2"
},
"dependencies": {
"@lumeweb/bigint-crypto-utils": "1.0.0",
"@lumeweb/kernel-lavanet-client": "^0.1.0-develop.6",
"@lumeweb/kernel-network-registry-client": "^0.1.0-develop.10",
"@lumeweb/kernel-rpc-client": "0.0.2-develop.18",
"@lumeweb/libethsync": "0.1.0-develop.63",
"@lumeweb/libkernel": "0.1.0-develop.66",
"level": "^8.0.0",
"p-defer": "^4.0.0"
}
"name": "@lumeweb/kernel-eth",
"version": "0.1.0",
"license": "MIT",
"type": "module",
"scripts": {
"build-script": "tsc --project tsconfig.build.json && mv dist-build/build.js dist-build/build.mjs",
"compile": "npm run build-script && rimraf node_modules/@lumeweb/dht-rpc-client/node_modules node_modules/@lumeweb/kernel-dht-client/node_modules/libkmodule && node build.js",
"build": "npm run compile && node ./dist-build/build.mjs dev"
},
"dependencies": {
"@lumeweb/kernel-rpc-client": "git+https://git.lumeweb.com/LumeWeb/kernel-rpc-client.git",
"libkmodule": "^0.2.53",
"yaml": "^2.2.1"
},
"devDependencies": {
"@lumeweb/interface-relay": "git+https://git.lumeweb.com/LumeWeb/interface-relay.git",
"@scure/bip39": "^1.2.0",
"@skynetlabs/skynet-nodejs": "^2.9.0",
"@types/node": "^18.15.9",
"@types/read": "^0.0.29",
"buffer": "^6.0.3",
"cli-progress": "^3.12.0",
"esbuild": "^0.17.13",
"esbuild-plugin-wasm": "^1.0.0",
"prettier": "^2.8.7",
"read": "^2.0.0",
"typescript": "^5.0.2"
}
}

View File

@ -1,89 +0,0 @@
diff --git a/node_modules/@chainsafe/bls/lib/getImplementation.js b/node_modules/@chainsafe/bls/lib/getImplementation.js
index 9137cd4..ba8de8a 100644
--- a/node_modules/@chainsafe/bls/lib/getImplementation.js
+++ b/node_modules/@chainsafe/bls/lib/getImplementation.js
@@ -3,7 +3,7 @@ const isNode = Object.prototype.toString.call(typeof process !== "undefined" ? p
export async function getImplementation(impl = "herumi") {
switch (impl) {
case "herumi": {
- return (await import("./herumi/index.js")).bls;
+ return (await import("./herumi/index.js")).bls();
}
case "blst-native":
// Lazy import native bindings to prevent automatically importing binding.node files
diff --git a/node_modules/@chainsafe/bls/lib/herumi/index.js b/node_modules/@chainsafe/bls/lib/herumi/index.js
index 4ed8fdd..f979984 100644
--- a/node_modules/@chainsafe/bls/lib/herumi/index.js
+++ b/node_modules/@chainsafe/bls/lib/herumi/index.js
@@ -1,16 +1,20 @@
-import { SecretKey } from "./secretKey.js";
-import { PublicKey } from "./publicKey.js";
-import { Signature } from "./signature.js";
-import { init, destroy } from "./context.js";
-import { functionalInterfaceFactory } from "../functional.js";
-await init();
-export * from "../constants.js";
-export { SecretKey, PublicKey, Signature, init, destroy };
-export const bls = {
- implementation: "herumi",
- SecretKey,
- PublicKey,
- Signature,
- ...functionalInterfaceFactory({ SecretKey, PublicKey, Signature }),
-};
-export default bls;
+import { SecretKey } from './secretKey.js'
+import { PublicKey } from './publicKey.js'
+import { Signature } from './signature.js'
+import { init, destroy } from './context.js'
+import { functionalInterfaceFactory } from '../functional.js'
+
+export * from '../constants.js'
+export { SecretKey, PublicKey, Signature, init, destroy }
+
+export const bls = async () => {
+ await init()
+ return {
+ implementation: 'herumi',
+ SecretKey,
+ PublicKey,
+ Signature,
+ ...functionalInterfaceFactory({ SecretKey, PublicKey, Signature }),
+ }
+}
+export default bls
diff --git a/node_modules/@chainsafe/bls/lib/index.d.ts b/node_modules/@chainsafe/bls/lib/index.d.ts
index 35a9432..097a938 100644
--- a/node_modules/@chainsafe/bls/lib/index.d.ts
+++ b/node_modules/@chainsafe/bls/lib/index.d.ts
@@ -1,3 +1,3 @@
import type { IBls } from "./types.js";
-declare let bls: IBls;
+export declare const bls: () => Promise<IBls>;
export default bls;
diff --git a/node_modules/@chainsafe/bls/lib/index.js b/node_modules/@chainsafe/bls/lib/index.js
index c2a5bdf..9572018 100644
--- a/node_modules/@chainsafe/bls/lib/index.js
+++ b/node_modules/@chainsafe/bls/lib/index.js
@@ -1,11 +1,14 @@
import { getImplementation } from "./getImplementation.js";
// Thanks https://github.com/iliakan/detect-node/blob/master/index.esm.js
const isNode = Object.prototype.toString.call(typeof process !== "undefined" ? process : 0) === "[object process]";
-let bls;
-try {
- bls = await getImplementation(isNode ? "blst-native" : "herumi");
-}
-catch (e) {
- bls = await getImplementation("herumi");
-}
+export const bls = async () => {
+ let bls;
+ try {
+ bls = await getImplementation(isNode ? "blst-native" : "herumi");
+ }
+ catch (e) {
+ bls = await getImplementation("herumi");
+ }
+ return bls;
+};
export default bls;

218
src-build/build.ts Normal file
View File

@ -0,0 +1,218 @@
// This is the standard build script for a kernel module.
import * as fs from "fs";
import read from "read";
import * as bip39 from "@scure/bip39";
import { wordlist } from "@scure/bip39/wordlists/english.js";
//@ts-ignore
import { SkynetClient } from "@skynetlabs/skynet-nodejs";
// Helper variables to make it easier to return empty values alongside errors.
const nu8 = new Uint8Array(0);
const nkp = {
publicKey: nu8,
secretKey: nu8,
};
// readFile is a wrapper for fs.readFileSync that handles the try-catch for the
// caller.
function readFile(fileName: string): [string, string | null] {
try {
let data = fs.readFileSync(fileName, "utf8");
return [data, null];
} catch (err) {
return ["", "unable to read file: " + JSON.stringify(err)];
}
}
// readFileBinary is a wrapper for fs.readFileSync that handles the try-catch
// for the caller.
function readFileBinary(fileName: string): [Uint8Array, string | null] {
try {
let data = fs.readFileSync(fileName, null);
return [data, null];
} catch (err) {
return [nu8, "unable to read file: " + JSON.stringify(err)];
}
}
// writeFile is a wrapper for fs.writeFileSync which handles the try-catch in a
// non-exception way.
function writeFile(fileName: string, fileData: string): string | null {
try {
fs.writeFileSync(fileName, fileData);
return null;
} catch (err) {
return "unable to write file: " + JSON.stringify(err);
}
}
// handlePass handles all portions of the script that occur after the password
// has been requested. If no password needs to be requested, handlePass will be
// called with a null input. We need to structure the code this way because the
// password reader is async and we can only access the password when using a
// callback.
function handlePass(password: string) {
try {
// If we are running prod and the seed file does not exist, we
// need to confirm the password and also warn the user to use a
// secure password.
if (!fs.existsSync(seedFile) && process.argv[2] === "prod") {
// The file does not exist, we need to confirm the
// password.
console.log();
console.log(
"No production entry found for module. Creating new production module..."
);
console.log(
"If someone can guess the password, they can push arbitrary changes to your module."
);
console.log("Please use a secure password.");
console.log();
read(
{ prompt: "Confirm Password: ", silent: true },
function (err: any, confirmPassword: string) {
if (err) {
console.error("unable to fetch password:", err);
process.exit(1);
}
if (password !== confirmPassword) {
console.error("passwords do not match");
process.exit(1);
}
handlePassConfirm(moduleSalt, password);
}
);
} else {
// If the seed file does exist, or if we are using dev,
// there's no need to confirm the password but we do
// need to pass the logic off to the handlePassConfirm
// callback.
handlePassConfirm(moduleSalt, password);
}
} catch (err) {
console.error("Unable to read seedFile:", err);
process.exit(1);
}
}
// handlePassConfirm handles the full script after the confirmation password
// has been provided. If not confirmation password is needed, this function
// will be called anyway using the unconfirmed password as input.
function handlePassConfirm(seed: string, password: string) {
// Create the seedFile if it does not exist. For dev we just save the
// seed to disk outright, because this is a dev build and therefore not
// security sensitive. Also the dev seed does not get pushed to the
// github repo.
//
// For prod, we use the seed to create a new seed (called the shield)
// which allows us to verify that the developer has provided the right
// password when deploying the module. The shield does get pushed to
// the github repo so that the production module is the same on all
// devices.
if (!fs.existsSync(seedFile) && process.argv[2] !== "prod") {
// Generate the seed phrase and write it to the file.
let seedPhrase = bip39.generateMnemonic(wordlist);
let errWF = writeFile(seedFile, seedPhrase);
if (errWF !== null) {
console.error("unable to write file:", errWF);
process.exit(1);
}
} else if (!fs.existsSync(seedFile) && process.argv[2] === "prod") {
// Generate the seed phrase.
let seedPhrase = bip39.generateMnemonic(wordlist);
// Write the registry link to the file.
}
// Load or verify the seed. If this is prod, the password is used to
// create and verify the seed. If this is dev, we just load the seed
// with no password.
let seedPhrase: string;
let registryLink: string;
if (process.argv[2] === "prod") {
// Generate the seed phrase from the password.
seedPhrase = bip39.generateMnemonic(wordlist);
} else {
let [sp, errRF] = readFile(seedFile);
if (errRF !== null) {
console.error("unable to read seed phrase for dev command from disk");
process.exit(1);
}
seedPhrase = sp;
}
let metadata = {
Filename: "index.js",
};
const client = new SkynetClient("https://web3portal.com");
client
.uploadFile("dist/index.js")
.then((result: any) => {
console.log("Immutable Link for module:", result);
})
.catch((err: any) => {
console.error("unable to upload file", err);
process.exit(1);
});
}
// Add a newline for readability.
console.log();
// Check for a 'dev' or 'prod' input to the script.
if (process.argv.length !== 3) {
console.error("need to provide either 'dev' or 'prod' as an input");
process.exit(1);
}
// Create the build folder if it does not exist.
if (!fs.existsSync("build")) {
fs.mkdirSync("build");
}
// Determine the seed file.
let seedFile: string;
if (process.argv[2] === "prod") {
seedFile = "module-skylink";
} else if (process.argv[2] === "dev") {
seedFile = "build/dev-seed";
} else {
console.error("need to provide either 'dev' or 'prod' as an input");
process.exit(1);
}
// If doing a prod deployment, check whether the salt file exists. If it does
// not, create it.
let moduleSalt: string;
if (!fs.existsSync(".module-salt")) {
moduleSalt = bip39.generateMnemonic(wordlist);
let errWF = writeFile(".module-salt", moduleSalt);
if (errWF !== null) {
console.error("unable to write module salt file:", errWF);
process.exit(1);
}
} else {
let [ms, errRF] = readFile(".module-salt");
if (errRF !== null) {
console.error("unable to read moduleSalt");
process.exit(1);
}
ms = ms.replace(/\n$/, "");
moduleSalt = ms;
}
// Need to get a password if this is a prod build.
if (process.argv[2] === "prod") {
read(
{ prompt: "Password: ", silent: true },
function (err: any, password: string) {
if (err) {
console.error("unable to fetch password:", err);
process.exit(1);
}
handlePass(password);
}
);
} else {
handlePass("");
}

View File

@ -1,47 +1,26 @@
import {
ActiveQuery,
addHandler,
handleMessage,
log,
logErr,
} from "@lumeweb/libkernel/module";
import {
createClient as createRpcClient,
RpcNetwork,
} from "@lumeweb/kernel-rpc-client";
import { createClient as createNetworkRegistryClient } from "@lumeweb/kernel-network-registry-client";
import {
Client as EthClient,
ConsensusCommitteeUpdateRequest,
createDefaultClient as createEthClient,
} from "@lumeweb/libethsync/client";
import * as capella from "@lodestar/types/capella";
import defer from "p-defer";
import { Level } from "level";
import {
type LavaNetClient,
createClient as createLavanetClient,
} from "@lumeweb/kernel-lavanet-client";
import { ActiveQuery, addHandler, handleMessage } from "libkmodule";
import { createClient, RpcNetwork } from "@lumeweb/kernel-rpc-client";
import init, { Client } from "../wasm/helios_ts.js";
// @ts-ignore
import wasm from "../wasm/helios_ts_bg.wasm?base64";
import { Buffer } from "buffer";
import { RPCResponse } from "@lumeweb/interface-relay";
import { ConsensusRequest, ExecutionRequest } from "./types.js";
const CHECKPOINT =
"0x694433ba78dd08280df68d3713c0f79d668dbee9e0922ec2346fcceb1dc3daa9";
onmessage = handleMessage;
const TYPES = ["blockchain"];
const networkRegistry = createNetworkRegistryClient();
const moduleReadyDefer = defer();
const clientInitDefer = defer();
let client: EthClient;
let lavanet: LavaNetClient;
let rpc: RpcNetwork;
const db = new Level<number | string, Uint8Array>("consensus", {
valueEncoding: "buffer",
let moduleReadyResolve: Function;
let moduleReady: Promise<void> = new Promise((resolve) => {
moduleReadyResolve = resolve;
});
addHandler("presentKey", handlePresentKey);
addHandler("register", handleRegister);
addHandler("status", handleStatus, { receiveUpdates: true });
addHandler("name", handleName);
let client: Client;
let rpc: RpcNetwork;
addHandler("presentSeed", handlePresentSeed);
addHandler("ready", handleReady);
[
@ -69,169 +48,181 @@ addHandler("ready", handleReady);
params: aq.callerInput || {},
method: rpcMethod,
};
try {
const ret = await handleRpcMethod(aq);
aq.respond(ret);
} catch (e: any) {
aq.reject((e as Error).message);
}
aq.respond(await handleRpcMethod(aq));
});
});
async function handlePresentKey() {
async function handlePresentSeed() {
await setup();
moduleReadyDefer.resolve();
moduleReadyResolve();
}
async function handleRpcMethod(aq: ActiveQuery) {
await moduleReadyDefer.promise;
if (!client.isSynced) {
await client.sync();
}
return client.rpcCall(aq.callerInput?.method, aq.callerInput?.params);
}
async function consensusHandler(method: string, data: any) {
await rpc.ready;
while (true) {
let query = await rpc.simpleQuery({
query: {
module: "eth",
method,
data,
},
options: {
relayTimeout: 30,
queryTimeout: 30,
},
});
const ret = await query.result;
if (ret.data) {
return ret.data;
await moduleReady;
switch (aq.callerInput?.method) {
case "eth_accounts":
case "eth_requestAccounts": {
return [];
}
case "eth_getBalance": {
return client.get_balance(
aq.callerInput?.params[0],
aq.callerInput?.params[1]
);
}
case "eth_chainId": {
return client.chain_id();
}
case "eth_blockNumber": {
return client.get_block_number();
}
case "eth_getTransactionByHash": {
let tx = await client.get_transaction_by_hash(aq.callerInput?.params[0]);
return mapToObj(tx);
}
case "eth_getTransactionCount": {
return client.get_transaction_count(
aq.callerInput?.params[0],
aq.callerInput?.params[1]
);
}
case "eth_getBlockTransactionCountByHash": {
return client.get_block_transaction_count_by_hash(
aq.callerInput?.params[0]
);
}
case "eth_getBlockTransactionCountByNumber": {
return client.get_block_transaction_count_by_number(
aq.callerInput?.params[0]
);
}
case "eth_getCode": {
return client.get_code(
aq.callerInput?.params[0],
aq.callerInput?.params[1]
);
}
case "eth_call": {
return client.call(aq.callerInput?.params[0], aq.callerInput?.params[1]);
}
case "eth_estimateGas": {
return client.estimate_gas(aq.callerInput?.params[0]);
}
case "eth_gasPrice": {
return client.gas_price();
}
case "eth_maxPriorityFeePerGas": {
return client.max_priority_fee_per_gas();
}
case "eth_sendRawTransaction": {
return client.send_raw_transaction(aq.callerInput?.params[0]);
}
case "eth_getTransactionReceipt": {
return client.get_transaction_receipt(aq.callerInput?.params[0]);
}
case "eth_getLogs": {
return client.get_logs(aq.callerInput?.params[0]);
}
case "net_version": {
return client.chain_id();
}
}
}
async function executionHandler(data: Map<string, string | any>) {
await rpc.ready;
return await lavanet.query("ETH1", data);
}
async function setup() {
rpc = createRpcClient();
lavanet = createLavanetClient();
await db.open();
await rpc.ready;
rpc = createClient();
client = createEthClient(
async (args: ConsensusCommitteeUpdateRequest) => {
const updates = await consensusHandler("consensus_updates", args);
// @ts-ignore
await (
await rpc.ready
)();
return updates
.map((u) => new Uint8Array(Object.values(u)))
.map((u) => capella.ssz.LightClientUpdate.deserialize(u))
.map((u) => capella.ssz.LightClientUpdate.toJson(u));
},
executionHandler,
async () => {
const update = await consensusHandler("consensus_optimistic_update", {});
await init(URL.createObjectURL(new Blob([Buffer.from(wasm, "base64")])));
return capella.ssz.LightClientOptimisticUpdate.deserialize(
new Uint8Array(Object.values(update)),
);
},
log,
logErr,
500,
);
(self as any).consensus_rpc_handler = async (
data: Map<string, string | any>
) => {
const method = data.get("method");
const path = data.get("path");
let lastUpdate = 0;
let query;
let ret: RPCResponse;
client.store.on("set", async (period: number, update: Uint8Array) => {
if (period < lastUpdate) {
return;
}
await db.put("latest", update);
lastUpdate = period;
});
while (true) {
query = await rpc.simpleQuery({
query: {
module: "eth",
method: "consensus_request",
data: {
method,
path,
} as ConsensusRequest,
},
options: {
relayTimeout: 10,
queryTimeout: 10,
},
});
clientInitDefer.resolve();
let synced = false;
while (!synced) {
try {
let consensus;
try {
consensus = await db.get("latest");
} catch {}
if (consensus) {
try {
await client.syncFromCheckpoint(
capella.ssz.LightClientUpdate.deserialize(consensus),
);
} catch {
await client.sync();
}
} else {
await client.sync();
ret = await query.result;
if (ret?.data) {
break;
}
synced = true;
} catch (e) {
logErr(e.message);
}
}
if (path.startsWith("/eth/v1/beacon/light_client/updates")) {
return JSON.stringify(ret.data);
}
return JSON.stringify({ data: ret.data });
};
(self as any).execution_rpc_handler = async (
data: Map<string, string | any>
) => {
const method = data.get("method");
let params = data.get("params");
params = JSON.parse(params);
let query;
let ret: RPCResponse;
while (true) {
query = await rpc.simpleQuery({
query: {
module: "eth",
method: "execution_request",
data: {
method,
params,
} as ExecutionRequest,
},
});
ret = await query.result;
if (ret?.data) {
break;
}
}
return JSON.stringify(ret.data);
};
client = new Client(CHECKPOINT);
await client.sync();
}
async function handleReady(aq: ActiveQuery) {
await moduleReadyDefer.promise;
await moduleReady;
aq.respond();
}
async function handleRegister(aq: ActiveQuery) {
await networkRegistry.registerNetwork(TYPES);
function mapToObj(map: Map<any, any> | undefined): Object | undefined {
if (!map) return undefined;
aq.respond();
}
async function handleStatus(aq: ActiveQuery) {
await clientInitDefer.promise;
let chainProgress = 0;
const chainProgressListener = (currentUpdate, totalUpdates) => {
chainProgress = Math.round((currentUpdate / totalUpdates) * 100) / 100;
sendUpdate();
};
const chainSyncedListener = () => {
sendUpdate();
};
client.on("update", chainProgressListener);
client.on("synced", chainSyncedListener);
function sendUpdate() {
aq.sendUpdate({
sync: Math.floor(chainProgress * 100),
peers: 1,
ready: client.isSynced,
});
}
aq.setReceiveUpdate?.(() => {
client.off("update", chainProgressListener);
client.off("synced", chainSyncedListener);
aq.respond();
});
sendUpdate();
}
function handleName(aq: ActiveQuery) {
aq.respond("Ethereum");
return Array.from(map).reduce((obj: any, [key, value]) => {
obj[key] = value;
return obj;
}, {});
}

8
src/types.ts Normal file
View File

@ -0,0 +1,8 @@
export interface ConsensusRequest extends RequestInit {
path: string;
}
export interface ExecutionRequest {
method: string;
params: string;
}

14
tsconfig.build.json Normal file
View File

@ -0,0 +1,14 @@
{
"compilerOptions": {
"target": "esnext",
"module": "esnext",
"moduleResolution": "node",
"allowSyntheticDefaultImports": true,
"declaration": true,
"outDir": "./dist-build",
"strict": true,
"esModuleInterop": true
},
"include": ["src-build"],
"exclude": ["node_modules", "**/__tests__/*"]
}

13
tsconfig.json Normal file
View File

@ -0,0 +1,13 @@
{
"compilerOptions": {
"target": "esnext",
"declaration": true,
"moduleResolution": "node",
"outDir": "./build",
"strict": true,
"allowSyntheticDefaultImports": true,
"esModuleInterop": true
},
"include": ["src"],
"exclude": ["node_modules", "**/__tests__/*"]
}