Compare commits

..

No commits in common. "v0.1.0-develop.36" and "v0.1.0-develop.35" have entirely different histories.

6 changed files with 20 additions and 25 deletions

View File

@ -1,13 +1,3 @@
# [0.1.0-develop.36](https://git.lumeweb.com/LumeWeb/libethsync/compare/v0.1.0-develop.35...v0.1.0-develop.36) (2023-07-13)
### Bug Fixes
* need to use concat not push ([b87017e](https://git.lumeweb.com/LumeWeb/libethsync/commit/b87017eb678282bf47758ca834e2acbc1cf6e516))
* Revert "fix: create fixSerializedUint8Array helper method to deal with weird quirk of ssz serialize" ([6ef18db](https://git.lumeweb.com/LumeWeb/libethsync/commit/6ef18dbc05b5b5801a6b05cea5056d631e8a094d))
* temporarily disable block hash check as it is bugged ([91144cb](https://git.lumeweb.com/LumeWeb/libethsync/commit/91144cb5a2b5d05fd301b11501861aadd10a69b5))
* use byteArrayEquals ([157811b](https://git.lumeweb.com/LumeWeb/libethsync/commit/157811b2348fa94d5d6b076219f34b3b340a50ac))
# [0.1.0-develop.35](https://git.lumeweb.com/LumeWeb/libethsync/compare/v0.1.0-develop.34...v0.1.0-develop.35) (2023-07-13) # [0.1.0-develop.35](https://git.lumeweb.com/LumeWeb/libethsync/compare/v0.1.0-develop.34...v0.1.0-develop.35) (2023-07-13)

4
npm-shrinkwrap.json generated
View File

@ -1,12 +1,12 @@
{ {
"name": "@lumeweb/libethclient", "name": "@lumeweb/libethclient",
"version": "0.1.0-develop.36", "version": "0.1.0-develop.35",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@lumeweb/libethclient", "name": "@lumeweb/libethclient",
"version": "0.1.0-develop.36", "version": "0.1.0-develop.35",
"dependencies": { "dependencies": {
"@chainsafe/as-sha256": "^0.3.1", "@chainsafe/as-sha256": "^0.3.1",
"@chainsafe/bls": "7.1.1", "@chainsafe/bls": "7.1.1",

View File

@ -1,6 +1,6 @@
{ {
"name": "@lumeweb/libethsync", "name": "@lumeweb/libethsync",
"version": "0.1.0-develop.36", "version": "0.1.0-develop.35",
"type": "module", "type": "module",
"repository": { "repository": {
"type": "git", "type": "git",

View File

@ -45,7 +45,7 @@ import {
RPCTx, RPCTx,
} from "./types.js"; } from "./types.js";
import { keccak256 } from "ethereum-cryptography/keccak"; import { keccak256 } from "ethereum-cryptography/keccak";
import { byteArrayEquals, fromHexString } from "@chainsafe/ssz"; import { fromHexString } from "@chainsafe/ssz";
import { RPC } from "#client/rpc.js"; import { RPC } from "#client/rpc.js";
export interface IClientVerifyingProvider extends IVerifyingProvider { export interface IClientVerifyingProvider extends IVerifyingProvider {
@ -464,7 +464,7 @@ export default class VerifyingProvider implements IClientVerifyingProvider {
.map((r: any) => r.result) .map((r: any) => r.result)
.reduce( .reduce(
(acc, curr, idx, arr) => (acc, curr, idx, arr) =>
idx % 2 === 0 ? acc.concat([[curr, arr[idx + 1]]]) : acc, idx % 2 === 0 ? acc.push([curr, arr[idx + 1]]) : acc,
[], [],
) as [AccountResponse, CodeResponse][]; ) as [AccountResponse, CodeResponse][];
@ -579,9 +579,9 @@ export default class VerifyingProvider implements IClientVerifyingProvider {
const header = BlockHeader.fromHeaderData(headerData); const header = BlockHeader.fromHeaderData(headerData);
if (!header.hash().equals(toBuffer(blockHash))) { if (!header.hash().equals(toBuffer(blockHash))) {
/* throw new Error( throw new Error(
`blockhash doesn't match the blockInfo provided by the RPC`, `blockhash doesn't match the blockInfo provided by the RPC`,
);*/ );
} }
this.blockHeaders[blockHash] = header; this.blockHeaders[blockHash] = header;
} }
@ -640,10 +640,7 @@ export default class VerifyingProvider implements IClientVerifyingProvider {
private verifyCodeHash(code: Bytes, codeHash: Bytes32): boolean { private verifyCodeHash(code: Bytes, codeHash: Bytes32): boolean {
return ( return (
(code === "0x" && codeHash === "0x" + KECCAK256_NULL_S) || (code === "0x" && codeHash === "0x" + KECCAK256_NULL_S) ||
byteArrayEquals( keccak256(fromHexString(codeHash)) === fromHexString(codeHash)
keccak256(fromHexString(codeHash)),
fromHexString(codeHash),
)
); );
} }

View File

@ -5,6 +5,7 @@ import { concatBytes } from "@noble/hashes/utils";
import { LightClientUpdate } from "#types.js"; import { LightClientUpdate } from "#types.js";
import * as capella from "@lodestar/types/capella"; import * as capella from "@lodestar/types/capella";
import NodeCache from "node-cache"; import NodeCache from "node-cache";
import { fixSerializedUint8Array } from "#util.js";
export interface StoreItem { export interface StoreItem {
update: Uint8Array; update: Uint8Array;
@ -22,10 +23,14 @@ export default class Store implements IStore {
addUpdate(period: number, update: LightClientUpdate) { addUpdate(period: number, update: LightClientUpdate) {
try { try {
this.store.set(period, { this.store.set(period, {
update: capella.ssz.LightClientUpdate.serialize(update), update: fixSerializedUint8Array(
nextCommittee: CommitteeSSZ.serialize(update.nextSyncCommittee.pubkeys), capella.ssz.LightClientUpdate.serialize(update),
nextCommitteeHash: digest( ),
concatBytes(...update.nextSyncCommittee.pubkeys), nextCommittee: fixSerializedUint8Array(
CommitteeSSZ.serialize(update.nextSyncCommittee.pubkeys),
),
nextCommitteeHash: fixSerializedUint8Array(
digest(concatBytes(...update.nextSyncCommittee.pubkeys)),
), ),
}); });
} catch (e) { } catch (e) {

View File

@ -131,3 +131,6 @@ function isValidLightClientHeader(
header.beacon.bodyRoot, header.beacon.bodyRoot,
); );
} }
export function fixSerializedUint8Array(arr: Uint8Array) {
return new Uint8Array(Object.values(arr));
}