Compare commits

...

12 Commits

Author SHA1 Message Date
semantic-release-bot c27011cacf chore(release): 0.1.0-develop.61 [skip ci]
# [0.1.0-develop.61](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.60...v0.1.0-develop.61) (2023-11-17)

### Bug Fixes

* bad import ([23b8c01](23b8c01d1c))
* export StorageLocationProvider ([828741f](828741f569))
* need to store NodeId not the string form of it ([3ad41c7](3ad41c75c1))

### Features

* add encrypted_cid.ts ([6ff8a84](6ff8a84ed8))
* add getCachedStorageLocations method ([86522e1](86522e1ffe))
* add getMetadataByCID and downloadBytesByHash ([ec81583](ec815833ef))
* add metadata structures and ser/der functions ([4693117](4693117c76))
* add StorageLocationProvider ([6d19fdb](6d19fdb66e))
2023-11-17 13:32:36 +00:00
Derrick Hammer 23b8c01d1c
fix: bad import 2023-11-17 08:31:56 -05:00
Derrick Hammer 51a6747f42
Merge remote-tracking branch 'origin/develop' into develop 2023-11-17 08:13:40 -05:00
Derrick Hammer ef7c27dd4f
dep: add axios 2023-11-17 08:05:40 -05:00
Derrick Hammer ec815833ef
feat: add getMetadataByCID and downloadBytesByHash 2023-11-17 08:05:31 -05:00
Derrick Hammer 4693117c76
feat: add metadata structures and ser/der functions 2023-11-17 08:05:00 -05:00
Derrick Hammer 892dd6ccd4
refactor: update constants.ts 2023-11-17 08:04:19 -05:00
Derrick Hammer 6ff8a84ed8
feat: add encrypted_cid.ts 2023-11-17 08:04:01 -05:00
Derrick Hammer 828741f569
fix: export StorageLocationProvider 2023-11-17 05:03:18 -05:00
Derrick Hammer 6d19fdb66e
feat: add StorageLocationProvider 2023-11-17 04:57:15 -05:00
Derrick Hammer 86522e1ffe
feat: add getCachedStorageLocations method 2023-11-17 04:56:57 -05:00
Derrick Hammer 3ad41c75c1
fix: need to store NodeId not the string form of it 2023-11-17 04:56:19 -05:00
14 changed files with 1659 additions and 15 deletions

View File

@ -1,3 +1,21 @@
# [0.1.0-develop.61](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.60...v0.1.0-develop.61) (2023-11-17)
### Bug Fixes
* bad import ([23b8c01](https://git.lumeweb.com/LumeWeb/libs5/commit/23b8c01d1c3ade99fe4a1ff7c8a781f3fe4c3bb8))
* export StorageLocationProvider ([828741f](https://git.lumeweb.com/LumeWeb/libs5/commit/828741f569c3ce5dbf2932651c4ee7435adcc4bf))
* need to store NodeId not the string form of it ([3ad41c7](https://git.lumeweb.com/LumeWeb/libs5/commit/3ad41c75c174f80b0f18bf527959110f1af03448))
### Features
* add encrypted_cid.ts ([6ff8a84](https://git.lumeweb.com/LumeWeb/libs5/commit/6ff8a84ed8fb062dd7e08149d7b5a2a7d5cd7e36))
* add getCachedStorageLocations method ([86522e1](https://git.lumeweb.com/LumeWeb/libs5/commit/86522e1ffea743afe3c336d41eb5e633f2b5d809))
* add getMetadataByCID and downloadBytesByHash ([ec81583](https://git.lumeweb.com/LumeWeb/libs5/commit/ec815833ef9c3f703e03b731afdc67f3f4e8cc7c))
* add metadata structures and ser/der functions ([4693117](https://git.lumeweb.com/LumeWeb/libs5/commit/4693117c76f3a8f2ace49dd8ba987169e7145e62))
* add StorageLocationProvider ([6d19fdb](https://git.lumeweb.com/LumeWeb/libs5/commit/6d19fdb66e782b2a18edfb94541e08dd5ce6158f))
# [0.1.0-develop.60](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.59...v0.1.0-develop.60) (2023-09-19) # [0.1.0-develop.60](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.59...v0.1.0-develop.60) (2023-09-19)

95
npm-shrinkwrap.json generated
View File

@ -1,15 +1,16 @@
{ {
"name": "@lumeweb/libs5", "name": "@lumeweb/libs5",
"version": "0.1.0-develop.60", "version": "0.1.0-develop.61",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@lumeweb/libs5", "name": "@lumeweb/libs5",
"version": "0.1.0-develop.60", "version": "0.1.0-develop.61",
"dependencies": { "dependencies": {
"@noble/curves": "^1.1.0", "@noble/curves": "^1.1.0",
"@noble/hashes": "^1.3.1", "@noble/hashes": "^1.3.1",
"axios": "^1.6.2",
"detect-node": "^2.1.0", "detect-node": "^2.1.0",
"level": "^8.0.0", "level": "^8.0.0",
"multiformats": "^12.0.1", "multiformats": "^12.0.1",
@ -4037,6 +4038,11 @@
"dev": true, "dev": true,
"peer": true "peer": true
}, },
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/available-typed-arrays": { "node_modules/available-typed-arrays": {
"version": "1.0.5", "version": "1.0.5",
"resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz",
@ -4050,6 +4056,16 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/axios": {
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz",
"integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==",
"dependencies": {
"follow-redirects": "^1.15.0",
"form-data": "^4.0.0",
"proxy-from-env": "^1.1.0"
}
},
"node_modules/babel-jest": { "node_modules/babel-jest": {
"version": "29.6.3", "version": "29.6.3",
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.6.3.tgz", "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.6.3.tgz",
@ -4983,6 +4999,17 @@
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==",
"dev": true "dev": true
}, },
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/commander": { "node_modules/commander": {
"version": "9.5.0", "version": "9.5.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz",
@ -5940,6 +5967,14 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/delegates": { "node_modules/delegates": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
@ -7281,6 +7316,25 @@
"dev": true, "dev": true,
"peer": true "peer": true
}, },
"node_modules/follow-redirects": {
"version": "1.15.3",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz",
"integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/for-each": { "node_modules/for-each": {
"version": "0.3.3", "version": "0.3.3",
"resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
@ -7319,6 +7373,19 @@
"url": "https://github.com/sponsors/isaacs" "url": "https://github.com/sponsors/isaacs"
} }
}, },
"node_modules/form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/from2": { "node_modules/from2": {
"version": "2.3.0", "version": "2.3.0",
"resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz",
@ -10840,6 +10907,25 @@
"node": ">=10.0.0" "node": ">=10.0.0"
} }
}, },
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mimic-fn": { "node_modules/mimic-fn": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
@ -15733,6 +15819,11 @@
"integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==", "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==",
"dev": true "dev": true
}, },
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
},
"node_modules/punycode": { "node_modules/punycode": {
"version": "2.3.0", "version": "2.3.0",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",

View File

@ -1,6 +1,6 @@
{ {
"name": "@lumeweb/libs5", "name": "@lumeweb/libs5",
"version": "0.1.0-develop.60", "version": "0.1.0-develop.61",
"type": "module", "type": "module",
"main": "lib/index.js", "main": "lib/index.js",
"repository": { "repository": {
@ -20,6 +20,7 @@
"dependencies": { "dependencies": {
"@noble/curves": "^1.1.0", "@noble/curves": "^1.1.0",
"@noble/hashes": "^1.3.1", "@noble/hashes": "^1.3.1",
"axios": "^1.6.2",
"detect-node": "^2.1.0", "detect-node": "^2.1.0",
"level": "^8.0.0", "level": "^8.0.0",
"multiformats": "^12.0.1", "multiformats": "^12.0.1",

View File

@ -63,6 +63,10 @@ Object.freeze(PARENT_LINK_TYPES);
export const registryMaxDataSize = 64; export const registryMaxDataSize = 64;
export const parentLinkTypeUserIdentity = 1;
export const parentLinkTypeBoard = 5;
export const parentLinkTypeBridgeUser = 10;
// ! user identity // ! user identity
export const authPayloadVersion1 = 0x01; export const authPayloadVersion1 = 0x01;

67
src/encrypted_cid.ts Normal file
View File

@ -0,0 +1,67 @@
import Multibase from "#multibase.js";
import { Multihash } from "#multihash.js";
import CID from "#cid.js";
import { CID_TYPES } from "#constants.js";
import { decodeEndian, encodeEndian } from "#util.js";
export default class EncryptedCID extends Multibase {
encryptedBlobHash: Multihash;
originalCID: CID;
encryptionAlgorithm: number;
padding: number;
chunkSizeAsPowerOf2: number;
encryptionKey: Uint8Array;
constructor(
encryptedBlobHash: Multihash,
originalCID: CID,
encryptionKey: Uint8Array,
padding: number,
chunkSizeAsPowerOf2: number,
encryptionAlgorithm: number,
) {
super();
this.encryptedBlobHash = encryptedBlobHash;
this.originalCID = originalCID;
this.encryptionKey = encryptionKey;
this.padding = padding;
this.chunkSizeAsPowerOf2 = chunkSizeAsPowerOf2;
this.encryptionAlgorithm = encryptionAlgorithm;
}
static decode(cid: string): EncryptedCID {
return EncryptedCID.fromBytes(Multibase.decodeString(cid));
}
static fromBytes(bytes: Uint8Array): EncryptedCID {
if (bytes[0] !== CID_TYPES.ENCRYPTED_DYNAMIC) {
throw new Error(`Invalid CID type (${bytes[0]})`);
}
return new EncryptedCID(
new Multihash(bytes.slice(3, 36)),
CID.fromBytes(bytes.slice(72)),
bytes.slice(36, 68),
decodeEndian(bytes.slice(68, 72)),
bytes[2],
bytes[1],
);
}
get chunkSize(): number {
return Math.pow(2, this.chunkSizeAsPowerOf2);
}
toBytes(): Uint8Array {
const data = [
CID_TYPES.ENCRYPTED_STATIC,
this.encryptionAlgorithm,
this.chunkSizeAsPowerOf2,
...this.encryptedBlobHash.fullBytes,
...this.encryptionKey,
...encodeEndian(this.padding, 4),
...this.originalCID.toBytes(),
];
return new Uint8Array(data);
}
}

View File

@ -3,12 +3,22 @@ import NodeId from "./nodeId.js";
import { Logger, S5Config, S5NodeConfig, S5Services } from "./types.js"; import { Logger, S5Config, S5NodeConfig, S5Services } from "./types.js";
import Unpacker from "./serialization/unpack.js"; import Unpacker from "./serialization/unpack.js";
import Packer from "./serialization/pack.js"; import Packer from "./serialization/pack.js";
import StorageLocation from "./storage.js"; import StorageLocation, { StorageLocationProvider } from "./storage.js";
import KeyPairEd25519 from "#ed25519.js";
import { AbstractLevel } from "abstract-level"; import { AbstractLevel } from "abstract-level";
import { P2PService } from "#service/p2p.js"; import { P2PService } from "#service/p2p.js";
import { RegistryService } from "#service/registry.js"; import { RegistryService } from "#service/registry.js";
import { hash } from "@noble/hashes/_assert"; import {
CID_TYPES,
storageLocationTypeFile,
storageLocationTypeFull,
} from "#constants.js";
import axios from "axios";
import { equalBytes } from "@noble/curves/abstract/utils";
import { blake3 } from "@noble/hashes/blake3";
import CID from "#cid.js";
import type Metadata from "#serialization/metadata/base.js";
import { deserialize as deserializeMediaMetadata } from "#serialization/metadata/media.js";
import { deserialize as deserializeWebAppMetadata } from "#serialization/metadata/webapp.js";
const DEFAULT_LOGGER = { const DEFAULT_LOGGER = {
info(s: any) { info(s: any) {
console.info(s); console.info(s);
@ -29,6 +39,10 @@ const DEFAULT_LOGGER = {
export class S5Node { export class S5Node {
private _nodeConfig: S5NodeConfig; private _nodeConfig: S5NodeConfig;
private metadataCache: Map<Multihash, Metadata> = new Map<
Multihash,
Metadata
>();
constructor(config: S5NodeConfig) { constructor(config: S5NodeConfig) {
this._nodeConfig = config; this._nodeConfig = config;
@ -85,10 +99,44 @@ export class S5Node {
await this.services.p2p.stop(); await this.services.p2p.stop();
} }
async getCachedStorageLocations(
hash: Multihash,
types: number[],
): Promise<Map<NodeId, StorageLocation>> {
const locations = new Map<NodeId, StorageLocation>();
const map = await this.readStorageLocationsFromDB(hash); // Assuming this method exists and returns a Map or similar structure
if (map.size === 0) {
return new Map();
}
const ts = Math.floor(Date.now() / 1000);
types.forEach((type) => {
if (!map.has(type)) return;
map.get(type)!.forEach((value, key) => {
if (value[3] >= ts) {
const storageLocation = new StorageLocation(
type,
value[1].map((v: string) => v), // Assuming value[1] is an array of strings
value[3],
);
// Assuming providerMessage is a property of StorageLocation
storageLocation.providerMessage = value[4];
locations.set(key, storageLocation);
}
});
});
return locations;
}
async readStorageLocationsFromDB( async readStorageLocationsFromDB(
hash: Multihash, hash: Multihash,
): Promise<Map<number, Map<string, Map<number, any>>>> { ): Promise<Map<number, Map<NodeId, Map<number, any>>>> {
const map = new Map<number, Map<string, Map<number, any>>>(); const map = new Map<number, Map<NodeId, Map<number, any>>>();
const bytes = await this.db.get(stringifyHash(hash)); const bytes = await this.db.get(stringifyHash(hash));
if (bytes === null) { if (bytes === null) {
return map; return map;
@ -97,15 +145,12 @@ export class S5Node {
const mapLength = unpacker.unpackMapLength(); const mapLength = unpacker.unpackMapLength();
for (let i = 0; i < mapLength; i++) { for (let i = 0; i < mapLength; i++) {
const type = unpacker.unpackInt() as number; const type = unpacker.unpackInt() as number;
const innerMap = new Map<string, Map<number, any>>(); const innerMap = new Map<NodeId, Map<number, any>>();
map.set(type, innerMap); map.set(type, innerMap);
const innerMapLength = unpacker.unpackMapLength(); const innerMapLength = unpacker.unpackMapLength();
for (let j = 0; j < innerMapLength; j++) { for (let j = 0; j < innerMapLength; j++) {
const nodeId = new NodeId(unpacker.unpackBinary()); const nodeId = new NodeId(unpacker.unpackBinary());
innerMap.set( innerMap.set(nodeId, new Map(unpacker.unpackMap() as [number, any][]));
nodeId.toString(),
new Map(unpacker.unpackMap() as [number, any][]),
);
} }
} }
return map; return map;
@ -126,7 +171,7 @@ export class S5Node {
}) { }) {
const map = await this.readStorageLocationsFromDB(hash); const map = await this.readStorageLocationsFromDB(hash);
const innerMap = const innerMap =
map.get(location.type) || new Map<string, Map<number, any>>(); map.get(location.type) || new Map<NodeId, Map<number, any>>();
map.set(location.type, innerMap); map.set(location.type, innerMap);
const locationMap = new Map<number, any>([ const locationMap = new Map<number, any>([
@ -136,12 +181,81 @@ export class S5Node {
[4, message], [4, message],
]); ]);
innerMap.set(nodeId.toString(), locationMap); innerMap.set(nodeId, locationMap);
await config.cacheDb.put( await config.cacheDb.put(
stringifyHash(hash), stringifyHash(hash),
new Packer().pack(map).takeBytes(), new Packer().pack(map).takeBytes(),
); );
} }
async downloadBytesByHash(hash: Multihash): Promise<Uint8Array> {
const dlUriProvider = new StorageLocationProvider(this, hash, [
storageLocationTypeFull,
storageLocationTypeFile,
]);
dlUriProvider.start();
let retryCount = 0;
while (true) {
const dlUri = await dlUriProvider.next();
this.logger.verbose(`[try] ${dlUri.location.bytesUrl}`);
try {
const res = await axios.get(dlUri.location.bytesUrl, {
timeout: 30000, // Adjust timeout as needed
});
// Assuming rust.hashBlake3 and areBytesEqual are available functions
const resHash = blake3(res.data);
if (!equalBytes(hash.hashBytes, resHash)) {
throw new Error("Integrity verification failed");
}
dlUriProvider.upvote(dlUri);
return res.data;
} catch (error) {
this.logger.catched(error);
dlUriProvider.downvote(dlUri);
}
retryCount++;
if (retryCount > 32) {
throw new Error("Too many retries");
}
}
}
async getMetadataByCID(cid: CID): Promise<Metadata> {
const hash = cid.hash;
let metadata: Metadata;
if (this.metadataCache.has(hash)) {
metadata = this.metadataCache.get(hash)!;
} else {
const bytes = await this.downloadBytesByHash(hash);
switch (cid.type) {
case CID_TYPES.METADATA_MEDIA:
metadata = await deserializeMediaMetadata(bytes);
break;
case CID_TYPES.METADATA_WEBAPP:
metadata = await deserializeWebAppMetadata(bytes);
break;
case CID_TYPES.BRIDGE:
metadata = await deserializeMediaMetadata(bytes);
break;
default:
throw new Error("Unsupported metadata format");
}
this.metadataCache.set(hash, metadata);
}
return metadata;
}
} }
export function stringifyBytes(data: Uint8Array) { export function stringifyBytes(data: Uint8Array) {

View File

@ -0,0 +1,3 @@
export default abstract class Metadata {
abstract toJson(): { [key: string]: any };
}

View File

@ -0,0 +1,436 @@
import Metadata from "#serialization/metadata/base.js";
import Packer from "#serialization/pack.js";
import { METADATA_TYPES, metadataMagicByte } from "#constants.js";
import Unpacker from "#serialization/unpack.js";
import ExtraMetadata from "#serialization/metadata/extra.js";
import { Buffer } from "buffer";
import CID from "#cid.js";
import { Multihash } from "#multihash.js";
import { base64url } from "multiformats/bases/base64";
import EncryptedCID from "#encrypted_cid.js";
export default class DirectoryMetadata extends Metadata {
details: DirectoryMetadataDetails;
directories: { [key: string]: DirectoryReference };
files: { [key: string]: FileReference };
extraMetadata: ExtraMetadata;
constructor(
details: DirectoryMetadataDetails,
directories: { [key: string]: DirectoryReference },
files: { [key: string]: FileReference },
extraMetadata: ExtraMetadata,
) {
super();
this.details = details;
this.directories = directories;
this.files = files;
this.extraMetadata = extraMetadata;
}
serialize(): Uint8Array {
const p = new Packer();
p.packInt(metadataMagicByte);
p.packInt(METADATA_TYPES.DIRECTORY);
p.packListLength(4);
p.pack(this.details.data);
p.packMapLength(Object.keys(this.directories).length);
Object.entries(this.directories).forEach(([key, value]) => {
p.packString(key);
p.pack(value.encode());
});
p.packMapLength(Object.keys(this.files).length);
Object.entries(this.files).forEach(([key, value]) => {
p.packString(key);
p.pack(value.encode());
});
p.pack(this.extraMetadata.data);
return p.takeBytes();
}
toJson(): { [key: string]: any } {
return {
type: "directory",
details: this.details,
directories: this.directories,
files: this.files,
extraMetadata: this.extraMetadata,
};
}
static deserialize(bytes: Uint8Array): DirectoryMetadata {
const u = new Unpacker(Buffer.from(bytes));
const magicByte = u.unpackInt();
if (magicByte !== metadataMagicByte) {
throw new Error("Invalid metadata: Unsupported magic byte");
}
const typeAndVersion = u.unpackInt();
if (typeAndVersion !== METADATA_TYPES.DIRECTORY) {
throw new Error("Invalid metadata: Wrong metadata type");
}
u.unpackListLength();
const dir = new DirectoryMetadata(
new DirectoryMetadataDetails(u.unpackMap()),
{},
{},
new ExtraMetadata({}),
);
const dirCount = u.unpackMapLength();
for (let i = 0; i < dirCount; i++) {
const key = u.unpackString();
dir.directories[key as string] = DirectoryReference.decode(u.unpackMap());
}
const fileCount = u.unpackMapLength();
for (let i = 0; i < fileCount; i++) {
const key = u.unpackString();
dir.files[key as string] = FileReference.decode(u.unpackMap());
}
Object.assign(dir.extraMetadata.data, u.unpackMap());
return dir;
}
}
class DirectoryMetadataDetails {
data: Map<number, any>;
constructor(data: Map<number, any> | object) {
if (data instanceof Map && typeof data == "object") {
data = Object.entries(data).map(([key, value]) => [Number(key), value]);
}
this.data = data as Map<number, any>;
}
get isShared(): boolean {
return this.data.has(3);
}
get isSharedReadOnly(): boolean {
return this.data.get(3)?.[1] ?? false;
}
get isSharedReadWrite(): boolean {
return this.data.get(3)?.[2] ?? false;
}
setShared(value: boolean, write: boolean): void {
if (!this.data.has(3)) {
this.data.set(3, {});
}
this.data.get(3)[write ? 2 : 1] = value;
}
toJson(): { [key: string]: any } {
// Convert the data Map to a JSON object
const jsonObject: { [key: string]: any } = {};
this.data.forEach((value, key) => {
jsonObject[key.toString()] = value;
});
return jsonObject;
}
}
class DirectoryReference {
created: number;
name: string;
encryptedWriteKey: Uint8Array;
publicKey: Uint8Array;
encryptionKey: Uint8Array | null;
ext: { [key: string]: any } | null;
uri: string | null; // For internal operations
key: string | null; // For internal operations
size: number | null; // For internal operations
constructor(
created: number,
name: string,
encryptedWriteKey: Uint8Array,
publicKey: Uint8Array,
encryptionKey: Uint8Array | null,
ext: { [key: string]: any } | null,
) {
this.created = created;
this.name = name;
this.encryptedWriteKey = encryptedWriteKey;
this.publicKey = publicKey;
this.encryptionKey = encryptionKey;
this.ext = ext;
this.uri = null;
this.key = null;
this.size = null;
}
toJson(): { [key: string]: any } {
return {
name: this.name,
created: this.created,
publicKey: base64url.encode(this.publicKey),
encryptedWriteKey: base64url.encode(this.encryptedWriteKey),
encryptionKey: this.encryptionKey
? base64url.encode(this.encryptionKey)
: null,
ext: this.ext,
};
}
static decode(data: { [key: number]: any }): DirectoryReference {
return new DirectoryReference(
data[2],
data[1],
data[4],
data[3],
data[5],
data[6] ? (data[6] as { [key: string]: any }) : null,
);
}
encode(): { [key: number]: any } {
const map: { [key: number]: any } = {
1: this.name,
2: this.created,
3: this.publicKey,
4: this.encryptedWriteKey,
};
if (this.encryptionKey !== null) {
map[5] = this.encryptionKey;
}
if (this.ext !== null) {
map[6] = this.ext;
}
return map;
}
}
class FileReference {
created: number;
file: FileVersion;
history: Map<number, FileVersion> | null;
mimeType: string | null;
name: string;
version: number;
ext: { [key: string]: any } | null;
uri: string | null; // For internal operations
key: string | null; // For internal operations
constructor(
name: string,
created: number,
version: number,
file: FileVersion,
ext: { [key: string]: any } | null = null,
history: Map<number, FileVersion> | null = null,
mimeType: string | null = null,
) {
this.name = name;
this.created = created;
this.version = version;
this.file = file;
this.ext = ext;
this.history = history;
this.mimeType = mimeType;
this.uri = null;
this.key = null;
}
get modified(): number {
return this.file.ts;
}
toJson(): { [key: string]: any } {
return {
name: this.name,
created: this.created,
modified: this.modified,
version: this.version,
mimeType: this.mimeType,
file: this.file.toJson(),
ext: this.ext,
history: this.history
? Array.from(this.history.values()).map((fv) => fv.toJson())
: null,
};
}
static decode(data: { [key: number]: any }): FileReference {
const historyData = data[8] as { [key: number]: any } | undefined;
const history = historyData
? new Map(
Object.entries(historyData).map(([k, v]) => [
Number(k),
FileVersion.decode(v),
]),
)
: null;
return new FileReference(
data[1],
data[2],
data[5],
FileVersion.decode(data[4]),
data[7] ? (data[7] as { [key: string]: any }) : null,
history,
data[6],
);
}
encode(): { [key: number]: any } {
const data: { [key: number]: any } = {
1: this.name,
2: this.created,
4: this.file.encode(),
5: this.version,
};
if (this.mimeType !== null) {
data[6] = this.mimeType;
}
if (this.ext !== null) {
data[7] = this.ext;
}
if (this.history !== null) {
data[8] = Array.from(this.history.entries()).reduce(
(obj, [key, value]) => {
obj[key] = value.encode();
return obj;
},
{} as { [key: number]: any },
);
}
return data;
}
}
class FileVersion {
ts: number;
encryptedCID?: EncryptedCID;
plaintextCID?: CID;
thumbnail?: FileVersionThumbnail;
hashes?: Multihash[];
ext?: { [key: string]: any };
constructor(
ts: number,
encryptedCID?: EncryptedCID,
plaintextCID?: CID,
thumbnail?: FileVersionThumbnail,
hashes?: Multihash[],
ext?: { [key: string]: any },
) {
this.ts = ts;
this.encryptedCID = encryptedCID;
this.plaintextCID = plaintextCID;
this.thumbnail = thumbnail;
this.hashes = hashes;
this.ext = ext;
}
get cid(): CID {
return this.plaintextCID ?? this.encryptedCID!.originalCID;
}
static decode(data: { [key: number]: any }): FileVersion {
return new FileVersion(
data[8],
data[1] == null ? undefined : EncryptedCID.fromBytes(data[1]),
data[2] == null ? undefined : CID.fromBytes(data[2]),
data[10] == null ? undefined : FileVersionThumbnail.decode(data[10]),
data[9] ? data[9].map((e: any) => new Multihash(e)) : null,
);
}
encode(): { [key: number]: any } {
const data: { [key: number]: any } = { 8: this.ts };
if (!!this.encryptedCID) {
data[1] = this.encryptedCID.toBytes();
}
if (!!this.plaintextCID) {
data[2] = this.plaintextCID.toBytes();
}
if (!!this.hashes) {
data[9] = this.hashes.map((e) => e.fullBytes);
}
if (!!this.thumbnail) {
data[10] = this.thumbnail.encode();
}
return data;
}
toJson(): { [key: string]: any } {
return {
ts: this.ts,
encryptedCID: this.encryptedCID?.toBase58(),
cid: this.cid.toBase58(),
hashes: this.hashes?.map((e) => e.toBase64Url()),
thumbnail: this.thumbnail?.toJson(),
};
}
}
class FileVersionThumbnail {
imageType: string | null;
aspectRatio: number;
cid: EncryptedCID;
thumbhash: Uint8Array | null;
constructor(
imageType: string | null,
aspectRatio: number,
cid: EncryptedCID,
thumbhash: Uint8Array | null,
) {
this.imageType = imageType || "webp"; // Default to 'webp' if not provided
this.aspectRatio = aspectRatio;
this.cid = cid;
this.thumbhash = thumbhash;
}
toJson(): { [key: string]: any } {
return {
imageType: this.imageType,
aspectRatio: this.aspectRatio,
cid: this.cid.toBase58(),
thumbhash: this.thumbhash ? base64url.encode(this.thumbhash) : null,
};
}
static decode(data: { [key: number]: any }): FileVersionThumbnail {
return new FileVersionThumbnail(
data[1],
data[2],
EncryptedCID.fromBytes(data[3]),
data[4],
);
}
encode(): { [key: number]: any } {
const data: { [key: number]: any } = {
2: this.aspectRatio,
3: this.cid.toBytes(),
};
if (this.imageType !== null) {
data[1] = this.imageType;
}
if (this.thumbhash !== null) {
data[4] = this.thumbhash;
}
return data;
}
}

View File

@ -0,0 +1,60 @@
import {
metadataExtensionBasicMediaMetadata,
metadataExtensionBridge,
metadataExtensionCategories,
metadataExtensionDonationKeys,
metadataExtensionLanguages,
metadataExtensionLicenses,
metadataExtensionOriginalTimestamp,
metadataExtensionPreviousVersions,
metadataExtensionRoutingHints,
metadataExtensionSourceUris,
metadataExtensionTags,
metadataExtensionTimestamp,
metadataExtensionUpdateCID,
metadataExtensionViewTypes,
metadataExtensionWikidataClaims,
} from "#constants.js";
import CID from "#cid.js";
export default class ExtraMetadata {
data: Map<number, any>;
constructor(data: Map<number, any> | object) {
if (data instanceof Map && typeof data == "object") {
data = Object.entries(data).map(([key, value]) => [Number(key), value]);
}
this.data = data as Map<number, any>;
}
toJson(): { [key: string]: any } {
const map: { [key: string]: any } = {};
const names: { [key: number]: string } = {
[metadataExtensionLicenses]: "licenses",
[metadataExtensionDonationKeys]: "donationKeys",
[metadataExtensionWikidataClaims]: "wikidataClaims",
[metadataExtensionLanguages]: "languages",
[metadataExtensionSourceUris]: "sourceUris",
// metadataExtensionUpdateCID: 'updateCID',
[metadataExtensionPreviousVersions]: "previousVersions",
[metadataExtensionTimestamp]: "timestamp",
[metadataExtensionOriginalTimestamp]: "originalTimestamp",
[metadataExtensionTags]: "tags",
[metadataExtensionCategories]: "categories",
[metadataExtensionBasicMediaMetadata]: "basicMediaMetadata",
[metadataExtensionViewTypes]: "viewTypes",
[metadataExtensionBridge]: "bridge",
[metadataExtensionRoutingHints]: "routingHints",
};
this.data.forEach((value, key) => {
if (key === metadataExtensionUpdateCID) {
map["updateCID"] = CID.fromBytes(value).toString();
} else {
map[names[key]] = value;
}
});
return map;
}
}

View File

@ -0,0 +1,525 @@
import Metadata from "#serialization/metadata/base.js";
import CID from "#cid.js";
import {
CID_HASH_TYPES,
METADATA_TYPES,
metadataMagicByte,
metadataMediaDetailsDuration,
metadataMediaDetailsIsLive,
metadataProofTypeSignature,
parentLinkTypeUserIdentity,
} from "#constants.js";
import ExtraMetadata from "#serialization/metadata/extra.js";
import { MetadataParentLink } from "#serialization/metadata/parent.js";
import { Multihash } from "#multihash.js";
import { decodeEndian, encodeEndian } from "#util.js";
import Unpacker from "#serialization/unpack.js";
import { Buffer } from "buffer";
import { blake3 } from "@noble/hashes/blake3";
import { ed25519 } from "@noble/curves/ed25519";
import KeyPairEd25519 from "#ed25519.js";
import Packer from "#serialization/pack.js";
export default class MediaMetadata extends Metadata {
name: string;
mediaTypes: { [key: string]: MediaFormat[] };
parents: MetadataParentLink[];
details: MediaMetadataDetails;
links: MediaMetadataLinks | null;
extraMetadata: ExtraMetadata;
constructor(
name: string,
details: MediaMetadataDetails,
parents: MetadataParentLink[],
mediaTypes: { [key: string]: MediaFormat[] },
links: MediaMetadataLinks | null,
extraMetadata: ExtraMetadata,
) {
super();
this.name = name;
this.details = details;
this.parents = parents;
this.mediaTypes = mediaTypes;
this.links = links;
this.extraMetadata = extraMetadata;
}
toJson(): { [key: string]: any } {
return {
type: "media",
name: this.name,
details: this.details,
parents: this.parents,
mediaTypes: this.mediaTypes,
links: this.links,
extraMetadata: this.extraMetadata,
};
}
}
class MediaMetadataLinks {
count: number;
head: CID[];
collapsed: CID[] | null;
tail: CID[] | null;
constructor(head: CID[]) {
this.head = head;
this.count = head.length;
this.collapsed = null;
this.tail = null;
}
toJson(): { [key: string]: any } {
const map: { [key: string]: any } = {
count: this.count,
head: this.head.map((e) => e.toString()),
};
if (this.collapsed != null) {
map["collapsed"] = this.collapsed.map((e) => e.toString());
}
if (this.tail != null) {
map["tail"] = this.tail.map((e) => e.toString());
}
return map;
}
static decode(links: { [key: number]: any }): MediaMetadataLinks {
const count = links[1] as number;
const head = (links[2] as Uint8Array[]).map((bytes) =>
CID.fromBytes(bytes),
);
const collapsed = links[3]
? (links[3] as Uint8Array[]).map((bytes) => CID.fromBytes(bytes))
: null;
const tail = links[4]
? (links[4] as Uint8Array[]).map((bytes) => CID.fromBytes(bytes))
: null;
const instance = new MediaMetadataLinks(head);
instance.count = count;
instance.collapsed = collapsed;
instance.tail = tail;
return instance;
}
encode(): { [key: number]: any } {
const data: { [key: number]: any } = {
1: this.count,
2: this.head,
};
const addNotNull = (key: number, value: any) => {
if (value !== null && value !== undefined) {
data[key] = value;
}
};
addNotNull(3, this.collapsed);
addNotNull(4, this.tail);
return data;
}
}
class MediaMetadataDetails {
data: { [key: number]: any };
constructor(data: { [key: number]: any }) {
this.data = data;
}
toJson(): { [key: string]: any } {
const map: { [key: string]: any } = {};
const names: { [key: number]: string } = {
[metadataMediaDetailsDuration]: "duration",
[metadataMediaDetailsIsLive]: "live",
};
Object.entries(this.data).forEach(([key, value]) => {
map[names[+key]] = value;
});
return map;
}
get duration(): number | null {
return this.data[metadataMediaDetailsDuration];
}
get isLive(): boolean {
return !!this.data[metadataMediaDetailsIsLive];
}
}
class MediaFormat {
subtype: string;
role: string | null;
ext: string | null;
cid: CID | null;
height: number | null;
width: number | null;
languages: string[] | null;
asr: number | null;
fps: number | null;
bitrate: number | null;
audioChannels: number | null;
vcodec: string | null;
acodec: string | null;
container: string | null;
dynamicRange: string | null;
charset: string | null;
value: Uint8Array | null;
duration: number | null;
rows: number | null;
columns: number | null;
index: number | null;
initRange: string | null;
indexRange: string | null;
caption: string | null;
constructor(
subtype: string,
role: string | null,
ext: string | null,
cid: CID | null,
height: number | null,
width: number | null,
languages: string[] | null,
asr: number | null,
fps: number | null,
bitrate: number | null,
audioChannels: number | null,
vcodec: string | null,
acodec: string | null,
container: string | null,
dynamicRange: string | null,
charset: string | null,
value: Uint8Array | null,
duration: number | null,
rows: number | null,
columns: number | null,
index: number | null,
initRange: string | null,
indexRange: string | null,
caption: string | null,
) {
this.subtype = subtype;
this.role = role;
this.ext = ext;
this.cid = cid;
this.height = height;
this.width = width;
this.languages = languages;
this.asr = asr;
this.fps = fps;
this.bitrate = bitrate;
this.audioChannels = audioChannels;
this.vcodec = vcodec;
this.acodec = acodec;
this.container = container;
this.dynamicRange = dynamicRange;
this.charset = charset;
this.value = value;
this.duration = duration;
this.rows = rows;
this.columns = columns;
this.index = index;
this.initRange = initRange;
this.indexRange = indexRange;
this.caption = caption;
}
get valueAsString(): string | null {
if (this.value === null) {
return null;
}
return new TextDecoder().decode(this.value);
}
static decode(data: { [key: number]: any }): MediaFormat {
return new MediaFormat(
data[2], // subtype
data[3], // role
data[4], // ext
data[1] == null ? null : CID.fromBytes(Uint8Array.from(data[1])),
data[10], // height
data[11], // width
data[12] ? (data[12] as string[]) : null, // languages
data[13], // asr
data[14], // fps
data[15], // bitrate
data[18], // audioChannels
data[19], // vcodec
data[20], // acodec
data[21], // container
data[22], // dynamicRange
data[23], // charset
data[24] == null ? null : Uint8Array.from(data[24]), // value
data[25], // duration
data[26], // rows
data[27], // columns
data[28], // index
data[29], // initRange
data[30], // indexRange
data[31], // caption
);
}
encode(): { [key: number]: any } {
const data: { [key: number]: any } = {};
const addNotNull = (key: number, value: any) => {
if (value !== null && value !== undefined) {
data[key] = value;
}
};
addNotNull(1, this.cid?.toBytes());
addNotNull(2, this.subtype);
addNotNull(3, this.role);
addNotNull(4, this.ext);
addNotNull(10, this.height);
addNotNull(11, this.width);
addNotNull(12, this.languages);
addNotNull(13, this.asr);
addNotNull(14, this.fps);
addNotNull(15, this.bitrate);
// addNotNull(16, this.abr);
// addNotNull(17, this.vbr);
addNotNull(18, this.audioChannels);
addNotNull(19, this.vcodec);
addNotNull(20, this.acodec);
addNotNull(21, this.container);
addNotNull(22, this.dynamicRange);
addNotNull(23, this.charset);
addNotNull(24, this.value);
addNotNull(25, this.duration);
addNotNull(26, this.rows);
addNotNull(27, this.columns);
addNotNull(28, this.index);
addNotNull(29, this.initRange);
addNotNull(30, this.indexRange);
addNotNull(31, this.caption);
return data;
}
toJson(): { [key: string]: any } {
const data: { [key: string]: any } = {};
const addNotNull = (key: string, value: any) => {
if (value !== null && value !== undefined) {
data[key] = value;
}
};
addNotNull("cid", this.cid?.toBase64Url());
addNotNull("subtype", this.subtype);
addNotNull("role", this.role);
addNotNull("ext", this.ext);
addNotNull("height", this.height);
addNotNull("width", this.width);
addNotNull("languages", this.languages);
addNotNull("asr", this.asr);
addNotNull("fps", this.fps);
addNotNull("bitrate", this.bitrate);
// addNotNull('abr', this.abr);
// addNotNull('vbr', this.vbr);
addNotNull("audioChannels", this.audioChannels);
addNotNull("vcodec", this.vcodec);
addNotNull("acodec", this.acodec);
addNotNull("container", this.container);
addNotNull("dynamicRange", this.dynamicRange);
addNotNull("charset", this.charset);
addNotNull("value", this.value ? this.valueAsString : null); // Assuming valueAsString() is a method to convert value to string
addNotNull("duration", this.duration);
addNotNull("rows", this.rows);
addNotNull("columns", this.columns);
addNotNull("index", this.index);
addNotNull("initRange", this.initRange);
addNotNull("indexRange", this.indexRange);
addNotNull("caption", this.caption);
return data;
}
}
export async function deserialize(bytes: Uint8Array): Promise<MediaMetadata> {
const magicByte = bytes[0];
if (magicByte !== metadataMagicByte) {
throw new Error("Invalid metadata: Unsupported magic byte");
}
const typeAndVersion = bytes[1];
let bodyBytes: Uint8Array;
const provenPubKeys: Multihash[] = [];
if (typeAndVersion === METADATA_TYPES.PROOF) {
const proofSectionLength = decodeEndian(bytes.subarray(2, 4));
bodyBytes = bytes.subarray(4 + proofSectionLength);
if (proofSectionLength > 0) {
const proofUnpacker = new Unpacker(
Buffer.from(bytes.subarray(4, 4 + proofSectionLength)),
);
const b3hash = Uint8Array.from([
CID_HASH_TYPES.BLAKE3,
...blake3(bodyBytes),
]);
const proofCount = proofUnpacker.unpackListLength();
for (let i = 0; i < proofCount; i++) {
const parts = proofUnpacker.unpackList();
const proofType = parts[0] as number;
if (proofType === metadataProofTypeSignature) {
const mhashType = parts[1] as number;
const pubkey = parts[2] as Uint8Array;
const signature = parts[3] as Uint8Array;
if (mhashType !== CID_HASH_TYPES.BLAKE3) {
throw new Error(`Hash type ${mhashType} not supported`);
}
if (pubkey[0] !== CID_HASH_TYPES.ED25519) {
throw new Error("Only ed25519 keys are supported");
}
if (pubkey.length !== 33) {
throw new Error("Invalid userId");
}
const isValid = await ed25519.verify(
signature,
b3hash,
pubkey.subarray(1),
);
if (!isValid) {
throw new Error("Invalid signature found");
}
provenPubKeys.push(new Multihash(pubkey));
} else {
// Unsupported proof type
}
}
}
} else if (typeAndVersion === METADATA_TYPES.MEDIA) {
bodyBytes = bytes.subarray(1);
} else {
throw new Error(`Invalid metadata: Unsupported type ${typeAndVersion}`);
}
// Start of body section
const u = new Unpacker(Buffer.from(bodyBytes));
const type = u.unpackInt();
if (type !== METADATA_TYPES.MEDIA) {
throw new Error(`Invalid metadata: Unsupported type ${type}`);
}
u.unpackListLength();
const name = u.unpackString();
const details = new MediaMetadataDetails(u.unpackMap());
const parents: MetadataParentLink[] = [];
const userCount = u.unpackListLength();
for (let i = 0; i < userCount; i++) {
const m = u.unpackMap();
const cid = CID.fromBytes(m[1] as Uint8Array);
parents.push(
new MetadataParentLink(
cid,
(m[0] ?? parentLinkTypeUserIdentity) as number,
m[2],
provenPubKeys.some((pk) => pk.equals(cid.hash)), // Assuming Multihash class has an equals method
),
);
}
const mediaTypesMap = u.unpackMap() as Record<string, any>;
const mediaTypes: Record<string, MediaFormat[]> = {};
Object.entries(mediaTypesMap).forEach(([type, formats]) => {
mediaTypes[type] = formats.map((e: any) =>
MediaFormat.decode(e as Record<number, any>),
);
});
const links = u.unpackMap();
const extraMetadata = u.unpackMap();
return new MediaMetadata(
name || "",
details,
parents,
mediaTypes,
links.size > 0 ? MediaMetadataLinks.decode(links) : null,
new ExtraMetadata(extraMetadata),
);
}
export async function serialize(
m: MediaMetadata,
keyPairs: KeyPairEd25519[] = [],
): Promise<Uint8Array> {
const c = new Packer();
c.packInt(METADATA_TYPES.MEDIA);
c.packListLength(6);
c.packString(m.name);
c.pack(m.details.data);
c.packListLength(m.parents.length);
for (const parent of m.parents) {
c.pack({ 0: parent.type, 1: parent.cid.toBytes() });
}
c.packMapLength(Object.keys(m.mediaTypes).length);
for (const [key, value] of Object.entries(m.mediaTypes)) {
c.packString(key);
c.pack(value);
}
if (m.links === null) {
c.packMapLength(0);
} else {
c.pack(m.links.encode());
}
c.pack(m.extraMetadata.data);
const bodyBytes = c.takeBytes();
if (keyPairs.length === 0) {
return Uint8Array.from([metadataMagicByte, ...bodyBytes]);
}
const b3hash = Uint8Array.from([CID_HASH_TYPES.BLAKE3, ...blake3(bodyBytes)]);
const proofPacker = new Packer();
proofPacker.packListLength(keyPairs.length);
for (const kp of keyPairs) {
const signature = await ed25519.sign(b3hash, kp.extractBytes());
proofPacker.pack([
metadataProofTypeSignature,
CID_HASH_TYPES.BLAKE3,
kp.publicKey,
signature,
]);
}
const proofBytes = proofPacker.takeBytes();
const header = [
metadataMagicByte,
METADATA_TYPES.PROOF,
...encodeEndian(proofBytes.length, 2),
];
return Uint8Array.from([...header, ...proofBytes, ...bodyBytes]);
}

View File

@ -0,0 +1,34 @@
import CID from "#cid.js";
import { parentLinkTypeUserIdentity } from "#constants.js";
export class MetadataParentLink {
cid: CID;
type: number;
role: string | null;
signed: boolean;
constructor(
cid: CID,
type: number = parentLinkTypeUserIdentity,
role: string | null = null,
signed: boolean = false,
) {
this.cid = cid;
this.type = type;
this.role = role;
this.signed = signed;
}
toJson(): { [key: string]: any } {
const map: { [key: string]: any } = {};
map["cid"] = this.cid.toString();
map["type"] = this.type;
if (this.role !== null) {
map["role"] = this.role;
}
map["signed"] = this.signed;
return map;
}
}

View File

@ -0,0 +1,39 @@
import CID from "#cid.js";
export default class UserIdentityMetadata {
userID?: CID;
details: UserIdentityMetadataDetails;
signingKeys: UserIdentityPublicKey[];
encryptionKeys: UserIdentityPublicKey[];
links: Map<number, CID>;
constructor(
details: UserIdentityMetadataDetails,
signingKeys: UserIdentityPublicKey[],
encryptionKeys: UserIdentityPublicKey[],
links: Map<number, CID>,
) {
this.details = details;
this.signingKeys = signingKeys;
this.encryptionKeys = encryptionKeys;
this.links = links;
}
}
class UserIdentityMetadataDetails {
created: number;
createdBy: string;
constructor(created: number, createdBy: string) {
this.created = created;
this.createdBy = createdBy;
}
}
class UserIdentityPublicKey {
key: Uint8Array;
constructor(key: Uint8Array) {
this.key = key;
}
}

View File

@ -0,0 +1,114 @@
import Metadata from "#serialization/metadata/base.js";
import ExtraMetadata from "#serialization/metadata/extra.js";
import CID from "#cid.js";
import Unpacker from "#serialization/unpack.js";
import { METADATA_TYPES, metadataMagicByte } from "#constants.js";
import { Buffer } from "buffer";
export default class WebAppMetadata extends Metadata {
name: string | null;
tryFiles: string[];
errorPages: Map<number, string>;
extraMetadata: ExtraMetadata;
paths: { [key: string]: WebAppMetadataFileReference };
constructor(
name: string | null,
tryFiles: string[],
errorPages: Map<number, string>,
paths: { [key: string]: WebAppMetadataFileReference },
extraMetadata: ExtraMetadata,
) {
super();
this.name = name;
this.tryFiles = tryFiles;
this.errorPages = errorPages;
this.paths = paths;
this.extraMetadata = extraMetadata;
}
toJson(): { [key: string]: any } {
return {
type: "web_app",
name: this.name,
tryFiles: this.tryFiles,
errorPages: Array.from(this.errorPages.entries()).reduce(
(obj, [key, value]) => {
obj[key.toString()] = value;
return obj;
},
{} as { [key: string]: string },
),
paths: this.paths,
extraMetadata: this.extraMetadata,
};
}
}
class WebAppMetadataFileReference {
contentType: string | null;
cid: CID;
constructor(cid: CID, contentType: string | null) {
this.cid = cid;
this.contentType = contentType;
}
get size(): number {
return this.cid.size ?? 0;
}
toJson(): { [key: string]: any } {
return {
cid: this.cid.toBase64Url(),
contentType: this.contentType,
};
}
}
export async function deserialize(bytes: Uint8Array): Promise<WebAppMetadata> {
const u = new Unpacker(Buffer.from(bytes));
const magicByte = u.unpackInt();
if (magicByte !== metadataMagicByte) {
throw new Error("Invalid metadata: Unsupported magic byte");
}
const typeAndVersion = u.unpackInt();
if (typeAndVersion !== METADATA_TYPES.WEBAPP) {
throw new Error("Invalid metadata: Wrong metadata type");
}
u.unpackListLength();
const name = u.unpackString();
const tryFiles = u.unpackList() as string[];
const errorPages = u.unpackMap() as Record<number, string>;
const length = u.unpackListLength();
const paths: Record<string, WebAppMetadataFileReference> = {};
for (let i = 0; i < length; i++) {
u.unpackListLength();
const path = u.unpackString();
const cid = CID.fromBytes(u.unpackBinary());
paths[path as string] = new WebAppMetadataFileReference(
cid,
u.unpackString(),
);
}
const extraMetadata = u.unpackMap() as Record<number, any>;
return new WebAppMetadata(
name,
tryFiles,
new Map<number, string>(
Object.entries(errorPages).map(([key, value]) => [Number(key), value]),
),
paths,
new ExtraMetadata(extraMetadata),
);
}

View File

@ -1,3 +1,7 @@
import { S5Node } from "#node.js";
import { Multihash } from "#multihash.js";
import NodeId from "#nodeId.js";
export default class StorageLocation { export default class StorageLocation {
type: number; type: number;
parts: string[]; parts: string[];
@ -29,3 +33,137 @@ export default class StorageLocation {
}, expiry: ${expiryDate.toISOString()})`; }, expiry: ${expiryDate.toISOString()})`;
} }
} }
export class StorageLocationProvider {
private node: S5Node;
private hash: Multihash;
private types: number[];
private static readonly storageLocationTypeFull: number = 0; // Example value, adjust as necessary
private readonly timeoutDuration: number = 60000; // Duration in milliseconds
private availableNodes: NodeId[] = [];
private uris: Map<NodeId, StorageLocation> = new Map<
NodeId,
StorageLocation
>();
private timeout?: Date;
private isTimedOut: boolean = false;
constructor(
node: S5Node,
hash: Multihash,
types: number[] = [StorageLocationProvider.storageLocationTypeFull],
) {
this.node = node;
this.hash = hash;
this.types = types;
}
async start(): Promise<void> {
this.uris = new Map(
await this.node.getCachedStorageLocations(this.hash, this.types),
);
this.availableNodes = Array.from(this.uris.keys());
this.node.services.p2p.sortNodesByScore(this.availableNodes);
this.timeout = new Date(Date.now() + this.timeoutDuration);
let requestSent = false;
while (true) {
const newUris = new Map(
await this.node.getCachedStorageLocations(this.hash, this.types),
);
if (
this.availableNodes.length === 0 &&
newUris.size < 2 &&
!requestSent
) {
this.node.services.p2p.sendHashRequest(this.hash, this.types);
requestSent = true;
}
let hasNewNode = false;
for (const [key, value] of newUris) {
if (!this.uris.has(key) || this.uris.get(key) !== value) {
this.uris.set(key, value);
if (!this.availableNodes.includes(key)) {
this.availableNodes.push(key);
hasNewNode = true;
}
}
}
if (hasNewNode) {
this.node.services.p2p.sortNodesByScore(this.availableNodes);
}
await new Promise((resolve) => setTimeout(resolve, 10));
if (new Date() > this.timeout) {
this.isTimedOut = true;
return;
}
while (this.availableNodes.length > 0 || !this.isWaitingForUri) {
await new Promise((resolve) => setTimeout(resolve, 10));
if (new Date() > this.timeout) {
this.isTimedOut = true;
return;
}
}
}
}
private isWaitingForUri: boolean = false;
async next(): Promise<SignedStorageLocation> {
this.timeout = new Date(Date.now() + this.timeoutDuration);
while (true) {
if (this.availableNodes.length > 0) {
this.isWaitingForUri = false;
const nodeId = this.availableNodes.shift()!;
return new SignedStorageLocation(nodeId, this.uris.get(nodeId)!);
}
this.isWaitingForUri = true;
if (this.isTimedOut) {
throw new Error(
`Could not download raw file: Timed out after ${this.timeoutDuration}ms ${this.hash}`,
);
}
await new Promise((resolve) => setTimeout(resolve, 10));
}
}
upvote(uri: SignedStorageLocation): void {
this.node.services.p2p.upvote(uri.nodeId);
}
downvote(uri: SignedStorageLocation): void {
this.node.services.p2p.downvote(uri.nodeId);
}
}
class SignedStorageLocation {
nodeId: NodeId;
location: StorageLocation;
constructor(nodeId: NodeId, location: StorageLocation) {
this.nodeId = nodeId;
this.location = location;
}
toString(): string {
return `SignedStorageLocation(${this.location}, ${this.nodeId})`;
}
}