Compare commits
301 Commits
Author | SHA1 | Date |
---|---|---|
Derrick Hammer | 3065a13f75 | |
Derrick Hammer | 7c9fc160c7 | |
Derrick Hammer | 3b0cd655de | |
Derrick Hammer | 4f8c80fb62 | |
Derrick Hammer | 4830de48b4 | |
semantic-release-bot | 0c47ef6e53 | |
Derrick Hammer | 54cad2d482 | |
Derrick Hammer | 64d95353aa | |
semantic-release-bot | 1697c090d3 | |
Derrick Hammer | 5dc288b395 | |
Derrick Hammer | 8e786ec164 | |
semantic-release-bot | e5b4811c2e | |
Derrick Hammer | 067dd9cd22 | |
Derrick Hammer | 393c69f10b | |
Derrick Hammer | e8f2e23065 | |
semantic-release-bot | aa825747a9 | |
Derrick Hammer | f3432de638 | |
Derrick Hammer | 23b7b189b2 | |
semantic-release-bot | fee9a2116c | |
Derrick Hammer | 9d3ba32749 | |
Derrick Hammer | 0863f450b7 | |
semantic-release-bot | 1c7aaf84d4 | |
Derrick Hammer | ea8c9841a0 | |
Derrick Hammer | 01d29f0b59 | |
semantic-release-bot | afaa6995dc | |
Derrick Hammer | f14050a83d | |
Derrick Hammer | 0f4b978940 | |
semantic-release-bot | 3923cfcd87 | |
Derrick Hammer | 7f5cb50ece | |
Derrick Hammer | e760149699 | |
semantic-release-bot | eb33beddaf | |
Derrick Hammer | d379801e4d | |
Derrick Hammer | 19426b7fdb | |
semantic-release-bot | 74f23ae358 | |
Derrick Hammer | 1adac41c74 | |
Derrick Hammer | b3174cacf7 | |
semantic-release-bot | a2b5f3e539 | |
Derrick Hammer | 968459d1c0 | |
Derrick Hammer | 518bbcdd94 | |
semantic-release-bot | 04bf4d3f7c | |
Derrick Hammer | f067a8583d | |
Derrick Hammer | 9c5d417857 | |
semantic-release-bot | 52556bf703 | |
Derrick Hammer | 443fdcb14d | |
Derrick Hammer | c1f471e921 | |
semantic-release-bot | ac1421dae2 | |
Derrick Hammer | 6e589cbe64 | |
Derrick Hammer | cac24c9caa | |
semantic-release-bot | 5ace8cf52f | |
Derrick Hammer | f3c81e0375 | |
Derrick Hammer | 3fa96aa05a | |
semantic-release-bot | ed42b37777 | |
Derrick Hammer | 3cfb4b0464 | |
Derrick Hammer | a418410bd9 | |
semantic-release-bot | 6f0937fa6f | |
Derrick Hammer | 23e06e9cc4 | |
Derrick Hammer | 4848236882 | |
semantic-release-bot | e6aa5039e2 | |
Derrick Hammer | 479e6b8bd5 | |
Derrick Hammer | 5391666388 | |
Derrick Hammer | 8091e7ab36 | |
semantic-release-bot | 811d11ed3e | |
Derrick Hammer | d07351a928 | |
Derrick Hammer | 88f748c817 | |
semantic-release-bot | adf01350dd | |
Derrick Hammer | 0dbe18c75b | |
Derrick Hammer | b578a32a44 | |
semantic-release-bot | 5478a6830a | |
Derrick Hammer | 0a66d78298 | |
Derrick Hammer | 1c63497d2e | |
semantic-release-bot | c27011cacf | |
Derrick Hammer | 23b8c01d1c | |
Derrick Hammer | 51a6747f42 | |
Derrick Hammer | ef7c27dd4f | |
Derrick Hammer | ec815833ef | |
Derrick Hammer | 4693117c76 | |
Derrick Hammer | 892dd6ccd4 | |
Derrick Hammer | 6ff8a84ed8 | |
Derrick Hammer | 828741f569 | |
Derrick Hammer | 6d19fdb66e | |
Derrick Hammer | 86522e1ffe | |
Derrick Hammer | 3ad41c75c1 | |
semantic-release-bot | d7100dc449 | |
Derrick Hammer | 2c39f9fd76 | |
Derrick Hammer | 155e0b4c0c | |
semantic-release-bot | 4fe84d68fb | |
Derrick Hammer | 6cf2481164 | |
Derrick Hammer | e25087890f | |
semantic-release-bot | b4fa7ad645 | |
Derrick Hammer | 07893b1538 | |
Derrick Hammer | e03eb41e96 | |
semantic-release-bot | a46f0bf255 | |
Derrick Hammer | e2e269483d | |
Derrick Hammer | 517446310d | |
semantic-release-bot | 54c2b3d63b | |
Derrick Hammer | bef8f1fd8c | |
Derrick Hammer | 17151d25d0 | |
semantic-release-bot | 716c89227f | |
Derrick Hammer | c654a18159 | |
Derrick Hammer | 0ad6d611a6 | |
semantic-release-bot | 6c63da7354 | |
Derrick Hammer | 4d721ef4ab | |
Derrick Hammer | 565bed46d5 | |
semantic-release-bot | 6c4e010f56 | |
Derrick Hammer | c24f062f80 | |
Derrick Hammer | bf521425b3 | |
semantic-release-bot | e80fbb34b7 | |
Derrick Hammer | 8599ee3813 | |
Derrick Hammer | 0c060fde55 | |
semantic-release-bot | 68406eebb0 | |
Derrick Hammer | 2f690c5f80 | |
Derrick Hammer | b74fa99783 | |
semantic-release-bot | 2148ffaf58 | |
Derrick Hammer | a1a5912166 | |
Derrick Hammer | 4a157d9eca | |
semantic-release-bot | 07600cea13 | |
Derrick Hammer | 764685a111 | |
Derrick Hammer | cc5cbdb860 | |
semantic-release-bot | 18d54a08b2 | |
Derrick Hammer | bfbb9c9e0c | |
Derrick Hammer | 3c789459ec | |
semantic-release-bot | 3402d0f681 | |
Derrick Hammer | 085c97354e | |
Derrick Hammer | 8b87a61925 | |
semantic-release-bot | 3a3ad3ff73 | |
Derrick Hammer | c286fb1d9c | |
Derrick Hammer | 66612e9afc | |
semantic-release-bot | 10d898cfb9 | |
Derrick Hammer | cb4f23ed7e | |
Derrick Hammer | 9c471e6949 | |
semantic-release-bot | 4b173c9699 | |
Derrick Hammer | 614a699cc3 | |
Derrick Hammer | b91f33fb65 | |
semantic-release-bot | 9ca559e49f | |
Derrick Hammer | 437c8c0dcb | |
Derrick Hammer | a34bd1b813 | |
semantic-release-bot | 5da11dcb8e | |
Derrick Hammer | 7f8f35cb35 | |
Derrick Hammer | 6d575a1cec | |
semantic-release-bot | 5cd7ad7c10 | |
Derrick Hammer | 2f873efc43 | |
Derrick Hammer | d05d011fcd | |
semantic-release-bot | 065ca2a989 | |
Derrick Hammer | d03fae0356 | |
Derrick Hammer | cc66a0d839 | |
semantic-release-bot | 8f9067a4d9 | |
Derrick Hammer | 2c4905c266 | |
Derrick Hammer | 6cc0bd6500 | |
semantic-release-bot | 4f2a28bcad | |
Derrick Hammer | b3e690b0d4 | |
Derrick Hammer | 8373114d5c | |
semantic-release-bot | 589ccbe37c | |
Derrick Hammer | ef63d8d250 | |
Derrick Hammer | 384e941b09 | |
semantic-release-bot | 3f6a280d48 | |
Derrick Hammer | e53fd821fe | |
Derrick Hammer | a6c02bc772 | |
semantic-release-bot | 992f026c8b | |
Derrick Hammer | d461b917e6 | |
Derrick Hammer | 419c7f85a8 | |
semantic-release-bot | aa188e107b | |
Derrick Hammer | 01bf7fe1fa | |
Derrick Hammer | deebe15c6f | |
semantic-release-bot | 0b68d95e21 | |
Derrick Hammer | 8cc499c320 | |
Derrick Hammer | e3b7760050 | |
semantic-release-bot | 07459357fc | |
Derrick Hammer | 905c35aed1 | |
Derrick Hammer | dfe9e3f082 | |
Derrick Hammer | 90c0614773 | |
semantic-release-bot | 31465e3a63 | |
Derrick Hammer | e821da5a27 | |
Derrick Hammer | fabda024ad | |
semantic-release-bot | 58600e7163 | |
Derrick Hammer | 006c49409c | |
Derrick Hammer | daf357cd63 | |
semantic-release-bot | 283b16b3ee | |
Derrick Hammer | 0ede0480d4 | |
Derrick Hammer | 1a3241688c | |
semantic-release-bot | befa286bb3 | |
Derrick Hammer | d216f2b43e | |
Derrick Hammer | 4e6f1b3ad3 | |
Derrick Hammer | d9af5bd015 | |
semantic-release-bot | 29a12e7baf | |
Derrick Hammer | c360b8b1ce | |
Derrick Hammer | b34b7a3a01 | |
semantic-release-bot | 77fea7bab7 | |
Derrick Hammer | 3cc4c27cf2 | |
Derrick Hammer | 2061f6330d | |
semantic-release-bot | 3de242d824 | |
Derrick Hammer | ca1a3dd5e7 | |
Derrick Hammer | c641d03a1b | |
Derrick Hammer | 5d26f1ec61 | |
semantic-release-bot | 58240f032e | |
Derrick Hammer | af36a49257 | |
Derrick Hammer | 1b04799943 | |
semantic-release-bot | 8623669898 | |
Derrick Hammer | 427c423b2c | |
Derrick Hammer | 8b1325f631 | |
semantic-release-bot | 8e1dee22b4 | |
Derrick Hammer | 04c388248c | |
Derrick Hammer | a75c57b1e8 | |
semantic-release-bot | 6036265794 | |
Derrick Hammer | 174e5a0181 | |
Derrick Hammer | 9645105ec3 | |
semantic-release-bot | 22d5c8be10 | |
Derrick Hammer | 9119043051 | |
Derrick Hammer | ade87a9934 | |
semantic-release-bot | cce0afe694 | |
Derrick Hammer | bda8007cdf | |
Derrick Hammer | 9e03b62a59 | |
Derrick Hammer | f306ee2166 | |
semantic-release-bot | 97358b0419 | |
Derrick Hammer | 9f8f680948 | |
Derrick Hammer | 1c1ec9703a | |
semantic-release-bot | 69ad85c715 | |
Derrick Hammer | e4bcd774ab | |
Derrick Hammer | 80dd92ddb1 | |
Derrick Hammer | 8b7ffa8821 | |
semantic-release-bot | eeb8a816a0 | |
Derrick Hammer | f81f170ee9 | |
Derrick Hammer | 0a153379c8 | |
semantic-release-bot | ba82769d03 | |
Derrick Hammer | 76879734ea | |
Derrick Hammer | 46098bd07f | |
semantic-release-bot | c2bf1e70e2 | |
Derrick Hammer | 92bd8e2cd1 | |
Derrick Hammer | a4c5dd5b5b | |
semantic-release-bot | a9ce2f7993 | |
Derrick Hammer | 2d8222b7a0 | |
Derrick Hammer | a7b3129426 | |
semantic-release-bot | e5faf413b1 | |
Derrick Hammer | 4f3af7fdb4 | |
Derrick Hammer | d84aa18174 | |
semantic-release-bot | 3bd1adf4dd | |
Derrick Hammer | 1f391c15b1 | |
Derrick Hammer | 57e2c56d24 | |
Derrick Hammer | 3accd69ab0 | |
Derrick Hammer | a84c02c36f | |
semantic-release-bot | a0e9d82ea2 | |
Derrick Hammer | 215a4ce4fe | |
Derrick Hammer | bc608d00e3 | |
semantic-release-bot | ee715f2787 | |
Derrick Hammer | 4544e4b5c1 | |
Derrick Hammer | 16d0c758f3 | |
semantic-release-bot | f2d1019ecb | |
Derrick Hammer | 9305f3d42c | |
Derrick Hammer | bfa7bd4139 | |
Derrick Hammer | 6e74ef6d8f | |
semantic-release-bot | 7f8f04d489 | |
Derrick Hammer | bbda50ddfb | |
Derrick Hammer | 0af8bb3469 | |
semantic-release-bot | 74c7e2ba6f | |
Derrick Hammer | c7f271b8c6 | |
Derrick Hammer | 42ef635fb5 | |
semantic-release-bot | 3c806abac0 | |
Derrick Hammer | 149c725624 | |
Derrick Hammer | e110f8f197 | |
semantic-release-bot | 7dc4325056 | |
Derrick Hammer | 187367139d | |
Derrick Hammer | 87d1e6b0b4 | |
Derrick Hammer | 35ebf74f4c | |
Derrick Hammer | e342982163 | |
Derrick Hammer | 7afc759ece | |
Derrick Hammer | 5585907591 | |
Derrick Hammer | 6ebc477449 | |
Derrick Hammer | b5e491b01a | |
semantic-release-bot | 761d33f04e | |
Derrick Hammer | 10e6020f52 | |
Derrick Hammer | aed4865b73 | |
Derrick Hammer | db72e1eefd | |
Derrick Hammer | 08123762ce | |
semantic-release-bot | b10a1e1141 | |
Derrick Hammer | f199bcc665 | |
Derrick Hammer | a1549523fb | |
Derrick Hammer | a189fab1be | |
Derrick Hammer | 46721129f3 | |
Derrick Hammer | 84eb06f648 | |
semantic-release-bot | 3be3ae4de5 | |
Derrick Hammer | 03e7d9ba04 | |
Derrick Hammer | 5a1dca9775 | |
Derrick Hammer | 05cbd60373 | |
Derrick Hammer | 1221d7de63 | |
Derrick Hammer | a021243c89 | |
Derrick Hammer | a4b692b28f | |
Derrick Hammer | 17ff5fd96b | |
Derrick Hammer | 2ef91a4d9c | |
Derrick Hammer | 91034708bc | |
Derrick Hammer | 56bb5007f6 | |
Derrick Hammer | 68b7ffa855 | |
Derrick Hammer | 68e6c3a682 | |
Derrick Hammer | 91a15bd428 | |
Derrick Hammer | ae40d52f9e | |
Derrick Hammer | 22e486ea18 | |
Derrick Hammer | eaf35bcd2e | |
Derrick Hammer | 42cd101fb2 | |
Derrick Hammer | 132f43c34b | |
Derrick Hammer | fbffb1da72 | |
Derrick Hammer | 31e63f6c63 | |
Derrick Hammer | b1f4ab93d8 | |
Derrick Hammer | 2f2ae2f4fc |
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"$schema": "https://unpkg.com/@changesets/config@3.0.0/schema.json",
|
||||
"changelog": "@changesets/cli/changelog",
|
||||
"commit": false,
|
||||
"fixed": [],
|
||||
"linked": [],
|
||||
"access": "public",
|
||||
"baseBranch": "master",
|
||||
"updateInternalDependencies": "patch",
|
||||
"ignore": []
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"preset": [
|
||||
"@lumeweb/node-library-preset"
|
||||
]
|
||||
}
|
|
@ -0,0 +1,504 @@
|
|||
# [0.1.0-develop.82](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.81...v0.1.0-develop.82) (2023-12-28)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* don't force override cid type ([64d9535](https://git.lumeweb.com/LumeWeb/libs5/commit/64d95353aa53ba09f822e88c7e4e3475728d5bf5))
|
||||
|
||||
# [0.1.0-develop.81](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.80...v0.1.0-develop.81) (2023-12-12)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add fromSignedRegistryEntry ([8e786ec](https://git.lumeweb.com/LumeWeb/libs5/commit/8e786ec164cfd20eba5d89834579fbe07ac96480))
|
||||
|
||||
# [0.1.0-develop.80](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.79...v0.1.0-develop.80) (2023-12-12)
|
||||
|
||||
# [0.1.0-develop.79](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.78...v0.1.0-develop.79) (2023-12-11)
|
||||
|
||||
# [0.1.0-develop.78](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.77...v0.1.0-develop.78) (2023-12-11)
|
||||
|
||||
# [0.1.0-develop.77](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.76...v0.1.0-develop.77) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* replace u with U in decodeString, and strip U in toBase64Url ([01d29f0](https://git.lumeweb.com/LumeWeb/libs5/commit/01d29f0b59cdadfb23c826d9649ed635745fa45e))
|
||||
|
||||
# [0.1.0-develop.76](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.75...v0.1.0-develop.76) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* more base64urlpad fixes ([0f4b978](https://git.lumeweb.com/LumeWeb/libs5/commit/0f4b9789401075ac783edd9bd91e4e3f417e4329))
|
||||
|
||||
# [0.1.0-develop.75](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.74...v0.1.0-develop.75) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* base64urlpad requires a "U" prefix ([e760149](https://git.lumeweb.com/LumeWeb/libs5/commit/e76014969923fdbceb2c63a90351d3e8d2521d1c))
|
||||
|
||||
# [0.1.0-develop.74](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.73...v0.1.0-develop.74) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* use base64urlpad ([19426b7](https://git.lumeweb.com/LumeWeb/libs5/commit/19426b7fdb05517c437f7bfb7aa78876647fa470))
|
||||
|
||||
# [0.1.0-develop.73](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.72...v0.1.0-develop.73) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* don't strip u ([b3174ca](https://git.lumeweb.com/LumeWeb/libs5/commit/b3174cacf725fb24a73d5188bc123a6d88bff055))
|
||||
|
||||
# [0.1.0-develop.72](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.71...v0.1.0-develop.72) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* ensure axios returns binary data ([518bbcd](https://git.lumeweb.com/LumeWeb/libs5/commit/518bbcdd946ef92f8a5195f6684d11fdce7bbd50))
|
||||
|
||||
# [0.1.0-develop.71](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.70...v0.1.0-develop.71) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* value is a map ([9c5d417](https://git.lumeweb.com/LumeWeb/libs5/commit/9c5d417857376eef0fc7655c9abfc94485b3c6b3))
|
||||
|
||||
# [0.1.0-develop.70](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.69...v0.1.0-develop.70) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* need to use Object.entries ([c1f471e](https://git.lumeweb.com/LumeWeb/libs5/commit/c1f471e921930e5f1a1ab9a087b8ea894f985345))
|
||||
|
||||
# [0.1.0-develop.69](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.68...v0.1.0-develop.69) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* unpack int checks missing possible types ([cac24c9](https://git.lumeweb.com/LumeWeb/libs5/commit/cac24c9caa59896a9731dd1ec0af2ab93ad124da))
|
||||
|
||||
# [0.1.0-develop.68](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.67...v0.1.0-develop.68) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* update how maps are packed and add missing serialization for media, directory, and files ([3fa96aa](https://git.lumeweb.com/LumeWeb/libs5/commit/3fa96aa05acf292311faa5fbbc83f1f1bd120f68))
|
||||
|
||||
# [0.1.0-develop.67](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.66...v0.1.0-develop.67) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* storage locations are in cacheDb ([a418410](https://git.lumeweb.com/LumeWeb/libs5/commit/a418410bd99ee61cddb4fbebe36d54c9cb4ce2d3))
|
||||
|
||||
# [0.1.0-develop.66](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.65...v0.1.0-develop.66) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* hashQueryRoutingTable is a map not object ([4848236](https://git.lumeweb.com/LumeWeb/libs5/commit/4848236882cf643b05172b1d49893a18f3649e50))
|
||||
|
||||
# [0.1.0-develop.65](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.64...v0.1.0-develop.65) (2023-11-18)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add hashQuery protocol method ([5391666](https://git.lumeweb.com/LumeWeb/libs5/commit/53916663886b54f4adf5192f17f79e72c457d4be))
|
||||
|
||||
# [0.1.0-develop.64](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.63...v0.1.0-develop.64) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* hashQueryRoutingTable is on the p2p service ([88f748c](https://git.lumeweb.com/LumeWeb/libs5/commit/88f748c817e3cefa8eb8f0500601636fda364001))
|
||||
|
||||
# [0.1.0-develop.63](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.62...v0.1.0-develop.63) (2023-11-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* wrap db.get in a try/catch ([b578a32](https://git.lumeweb.com/LumeWeb/libs5/commit/b578a32a4460c0f26e3d027df038936b4ca818a4))
|
||||
|
||||
# [0.1.0-develop.62](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.61...v0.1.0-develop.62) (2023-11-17)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* convert paths to an object ([1c63497](https://git.lumeweb.com/LumeWeb/libs5/commit/1c63497d2e9e5d7a72969df83876991074b26673))
|
||||
|
||||
# [0.1.0-develop.61](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.60...v0.1.0-develop.61) (2023-11-17)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* bad import ([23b8c01](https://git.lumeweb.com/LumeWeb/libs5/commit/23b8c01d1c3ade99fe4a1ff7c8a781f3fe4c3bb8))
|
||||
* export StorageLocationProvider ([828741f](https://git.lumeweb.com/LumeWeb/libs5/commit/828741f569c3ce5dbf2932651c4ee7435adcc4bf))
|
||||
* need to store NodeId not the string form of it ([3ad41c7](https://git.lumeweb.com/LumeWeb/libs5/commit/3ad41c75c174f80b0f18bf527959110f1af03448))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add encrypted_cid.ts ([6ff8a84](https://git.lumeweb.com/LumeWeb/libs5/commit/6ff8a84ed8fb062dd7e08149d7b5a2a7d5cd7e36))
|
||||
* add getCachedStorageLocations method ([86522e1](https://git.lumeweb.com/LumeWeb/libs5/commit/86522e1ffea743afe3c336d41eb5e633f2b5d809))
|
||||
* add getMetadataByCID and downloadBytesByHash ([ec81583](https://git.lumeweb.com/LumeWeb/libs5/commit/ec815833ef9c3f703e03b731afdc67f3f4e8cc7c))
|
||||
* add metadata structures and ser/der functions ([4693117](https://git.lumeweb.com/LumeWeb/libs5/commit/4693117c76f3a8f2ace49dd8ba987169e7145e62))
|
||||
* add StorageLocationProvider ([6d19fdb](https://git.lumeweb.com/LumeWeb/libs5/commit/6d19fdb66e782b2a18edfb94541e08dd5ce6158f))
|
||||
|
||||
# [0.1.0-develop.60](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.59...v0.1.0-develop.60) (2023-09-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* bad base32 encoding, multiformats handles the prefix ([155e0b4](https://git.lumeweb.com/LumeWeb/libs5/commit/155e0b4c0c9e04a97ca88e9b1cbec72ade0225bf))
|
||||
|
||||
# [0.1.0-develop.59](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.58...v0.1.0-develop.59) (2023-09-11)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add fromRegistryPublicKey helper ([e250878](https://git.lumeweb.com/LumeWeb/libs5/commit/e25087890f1d19139da5152e0d18b6001911baff))
|
||||
|
||||
# [0.1.0-develop.58](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.57...v0.1.0-develop.58) (2023-09-11)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* signRegistryEntry accidentally recursive ([e03eb41](https://git.lumeweb.com/LumeWeb/libs5/commit/e03eb41e967ff95d27f41572e4e905f08ce2ba07))
|
||||
|
||||
# [0.1.0-develop.57](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.56...v0.1.0-develop.57) (2023-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* if the registry entry hash type is not ed25519, return a new cid with the type set to raw ([5174463](https://git.lumeweb.com/LumeWeb/libs5/commit/517446310dea876003246b15255659bddcccb0be))
|
||||
|
||||
# [0.1.0-develop.56](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.55...v0.1.0-develop.56) (2023-09-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* don't prefix base58 since encoder already does it ([17151d2](https://git.lumeweb.com/LumeWeb/libs5/commit/17151d25d04d9de83ecf1d58dd78adadbd05bf07))
|
||||
|
||||
# [0.1.0-develop.55](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.54...v0.1.0-develop.55) (2023-09-08)
|
||||
|
||||
# [0.1.0-develop.54](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.53...v0.1.0-develop.54) (2023-09-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add valid to CID ([565bed4](https://git.lumeweb.com/LumeWeb/libs5/commit/565bed46d511340854c443013c2be000c1e9302d))
|
||||
|
||||
# [0.1.0-develop.53](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.52...v0.1.0-develop.53) (2023-09-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add fromRegistry to CID ([bf52142](https://git.lumeweb.com/LumeWeb/libs5/commit/bf521425b3b2f161cf073db9caa4c3653587b7f9))
|
||||
|
||||
# [0.1.0-develop.52](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.51...v0.1.0-develop.52) (2023-09-08)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* export KeyPairEd25519 as a type ([0c060fd](https://git.lumeweb.com/LumeWeb/libs5/commit/0c060fde558c4755028b53360d9c805960c22b12))
|
||||
|
||||
# [0.1.0-develop.51](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.50...v0.1.0-develop.51) (2023-09-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add more s5 constants ([b74fa99](https://git.lumeweb.com/LumeWeb/libs5/commit/b74fa99783c7ef13e93f5efa0498a76da0d7ab2f))
|
||||
|
||||
# [0.1.0-develop.50](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.49...v0.1.0-develop.50) (2023-09-07)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add CID.fromHash ([4a157d9](https://git.lumeweb.com/LumeWeb/libs5/commit/4a157d9ecaa74eb9ff17d33398aa06ecf2b07607))
|
||||
|
||||
# [0.1.0-develop.49](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.48...v0.1.0-develop.49) (2023-09-07)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add CID class ([cc5cbdb](https://git.lumeweb.com/LumeWeb/libs5/commit/cc5cbdb8605fa3170eea964b2ad8765fc73f1caa))
|
||||
|
||||
# [0.1.0-develop.48](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.47...v0.1.0-develop.48) (2023-09-07)
|
||||
|
||||
# [0.1.0-develop.47](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.46...v0.1.0-develop.47) (2023-09-04)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* export util functions ([8b87a61](https://git.lumeweb.com/LumeWeb/libs5/commit/8b87a619258a00a45e2eac81e5846c5f9ab2551d))
|
||||
|
||||
# [0.1.0-develop.46](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.45...v0.1.0-develop.46) (2023-09-04)
|
||||
|
||||
# [0.1.0-develop.45](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.44...v0.1.0-develop.45) (2023-09-04)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* fix outdated reads. port of s5 08c4bda5c1109673fba907ba119c9855fc3fcf68 0d2bf39845b37dd0b2ebe06be34c5d51e7060280 ([9c471e6](https://git.lumeweb.com/LumeWeb/libs5/commit/9c471e694913771a06e05088041a9893dda3aed7))
|
||||
|
||||
# [0.1.0-develop.44](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.43...v0.1.0-develop.44) (2023-09-03)
|
||||
|
||||
# [0.1.0-develop.43](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.42...v0.1.0-develop.43) (2023-09-02)
|
||||
|
||||
# [0.1.0-develop.42](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.41...v0.1.0-develop.42) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* freeze REGISTRY_TYPES ([6d575a1](https://git.lumeweb.com/LumeWeb/libs5/commit/6d575a1cecad580360c7af3ea28611977c520619))
|
||||
|
||||
# [0.1.0-develop.41](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.40...v0.1.0-develop.41) (2023-09-02)
|
||||
|
||||
# [0.1.0-develop.40](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.39...v0.1.0-develop.40) (2023-09-02)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add const groups of the different set of types ([cc66a0d](https://git.lumeweb.com/LumeWeb/libs5/commit/cc66a0d839fe14a89f13256c0763cffdf2bdc29c))
|
||||
|
||||
# [0.1.0-develop.39](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.38...v0.1.0-develop.39) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* if a peer completed the handshake, but our node is stopped, end the peer and abort ([6cc0bd6](https://git.lumeweb.com/LumeWeb/libs5/commit/6cc0bd650016e9c5f7a7a4aba720696558f47e79))
|
||||
|
||||
# [0.1.0-develop.38](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.37...v0.1.0-develop.38) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* utf8ToBytes no longer needed ([8373114](https://git.lumeweb.com/LumeWeb/libs5/commit/8373114d5cf98452024b55220228ddbd47756954))
|
||||
|
||||
# [0.1.0-develop.37](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.36...v0.1.0-develop.37) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* set sublevel valueEncoding to buffer ([384e941](https://git.lumeweb.com/LumeWeb/libs5/commit/384e941b095b6b5c412f502cd8579758fe90ce17))
|
||||
|
||||
# [0.1.0-develop.36](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.35...v0.1.0-develop.36) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* set sublevel valueEncoding to buffer ([a6c02bc](https://git.lumeweb.com/LumeWeb/libs5/commit/a6c02bc772d84068b2529a656cd2490d3da66ea1))
|
||||
|
||||
# [0.1.0-develop.35](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.34...v0.1.0-develop.35) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* convert binary string to uint8array ([419c7f8](https://git.lumeweb.com/LumeWeb/libs5/commit/419c7f85a8bd22e8ee10839bc30ea1248e2b6f3a))
|
||||
|
||||
# [0.1.0-develop.34](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.33...v0.1.0-develop.34) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add started state on node and check it on connectToNode ([deebe15](https://git.lumeweb.com/LumeWeb/libs5/commit/deebe15c6fcb514a58a5dbaec04d61dca8f99c14))
|
||||
|
||||
# [0.1.0-develop.33](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.32...v0.1.0-develop.33) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* need to use close not end on WS transport ([e3b7760](https://git.lumeweb.com/LumeWeb/libs5/commit/e3b77600500289acca4aca96d418f25d1d1780b8))
|
||||
|
||||
# [0.1.0-develop.32](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.31...v0.1.0-develop.32) (2023-09-02)
|
||||
|
||||
# [0.1.0-develop.31](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.30...v0.1.0-develop.31) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* fix iteration of peers ([fabda02](https://git.lumeweb.com/LumeWeb/libs5/commit/fabda024ad788fcb00e398e73f75afb3e944db30))
|
||||
|
||||
# [0.1.0-develop.30](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.29...v0.1.0-develop.30) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* correct argument order on ed25519.verify ([daf357c](https://git.lumeweb.com/LumeWeb/libs5/commit/daf357cd639cb69ee5957e6628aad1b62a6bfc34))
|
||||
|
||||
# [0.1.0-develop.29](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.28...v0.1.0-develop.29) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add try/catch on db.get ([1a32416](https://git.lumeweb.com/LumeWeb/libs5/commit/1a3241688c2b92a8c685a6da15ead30c117582ff))
|
||||
|
||||
# [0.1.0-develop.28](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.27...v0.1.0-develop.28) (2023-09-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* set node registry service in service constructor ([d9af5bd](https://git.lumeweb.com/LumeWeb/libs5/commit/d9af5bd0151c40fab1385e966f87e59bef6fd5a3))
|
||||
|
||||
# [0.1.0-develop.27](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.26...v0.1.0-develop.27) (2023-09-01)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add eventemitter support on p2p service and have the handshakedone message emit peerConnected ([b34b7a3](https://git.lumeweb.com/LumeWeb/libs5/commit/b34b7a3a0172adf0b0ce1476f6c68912dcd05576))
|
||||
|
||||
# [0.1.0-develop.26](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.25...v0.1.0-develop.26) (2023-09-01)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add BOOTSTRAP_NODES constant ([2061f63](https://git.lumeweb.com/LumeWeb/libs5/commit/2061f6330d1da40df84c649c3e492b77bd10e2ae))
|
||||
|
||||
# [0.1.0-develop.25](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.24...v0.1.0-develop.25) (2023-09-01)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* fix import ([ca1a3dd](https://git.lumeweb.com/LumeWeb/libs5/commit/ca1a3dd5e7b42cad30bea1b03be17a10182cb8d2))
|
||||
|
||||
# [0.1.0-develop.24](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.23...v0.1.0-develop.24) (2023-09-01)
|
||||
|
||||
# [0.1.0-develop.23](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.22...v0.1.0-develop.23) (2023-09-01)
|
||||
|
||||
# [0.1.0-develop.22](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.21...v0.1.0-develop.22) (2023-09-01)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* port listen method ([a75c57b](https://git.lumeweb.com/LumeWeb/libs5/commit/a75c57b1e826a9f26fe25ec10873c0be81f7be5a))
|
||||
|
||||
# [0.1.0-develop.21](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.20...v0.1.0-develop.21) (2023-09-01)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* await not needed ([9645105](https://git.lumeweb.com/LumeWeb/libs5/commit/9645105ec322e7e7cf24b809829eb4a0d622578f))
|
||||
|
||||
# [0.1.0-develop.20](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.19...v0.1.0-develop.20) (2023-09-01)
|
||||
|
||||
# [0.1.0-develop.19](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.18...v0.1.0-develop.19) (2023-09-01)
|
||||
|
||||
# [0.1.0-develop.18](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.17...v0.1.0-develop.18) (2023-09-01)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* export S5Node type ([1c1ec97](https://git.lumeweb.com/LumeWeb/libs5/commit/1c1ec9703a1694f59708c548c023f71f31f588b6))
|
||||
|
||||
# [0.1.0-develop.17](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.16...v0.1.0-develop.17) (2023-09-01)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* browser returns a Blob, so convert to a buffer if we have a blob ([80dd92d](https://git.lumeweb.com/LumeWeb/libs5/commit/80dd92ddb13da043f6f41d34941c81eaf2608e33))
|
||||
* missing uri argument for node WS ([8b7ffa8](https://git.lumeweb.com/LumeWeb/libs5/commit/8b7ffa8821779c769ed2f5772854537675ab5da6))
|
||||
|
||||
# [0.1.0-develop.16](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.15...v0.1.0-develop.16) (2023-09-01)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add support for browser WS by checking if we are in node ([0a15337](https://git.lumeweb.com/LumeWeb/libs5/commit/0a153379c87ebbee8c7d7b3015fe4c1b77f379ca))
|
||||
|
||||
# [0.1.0-develop.15](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.14...v0.1.0-develop.15) (2023-09-01)
|
||||
|
||||
# [0.1.0-develop.14](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.13...v0.1.0-develop.14) (2023-08-31)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* make uris optional ([a4c5dd5](https://git.lumeweb.com/LumeWeb/libs5/commit/a4c5dd5b5b1c7bef14eccafbb2e3f4c93290726c))
|
||||
|
||||
# [0.1.0-develop.13](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.12...v0.1.0-develop.13) (2023-08-31)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* only set socket if passed ([a7b3129](https://git.lumeweb.com/LumeWeb/libs5/commit/a7b31294263cb2631824fd3b643e8b9e5e75bac3))
|
||||
|
||||
# [0.1.0-develop.12](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.11...v0.1.0-develop.12) (2023-08-31)
|
||||
|
||||
# [0.1.0-develop.11](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.10...v0.1.0-develop.11) (2023-08-31)
|
||||
|
||||
# [0.1.0-develop.10](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.9...v0.1.0-develop.10) (2023-08-31)
|
||||
|
||||
# [0.1.0-develop.9](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.8...v0.1.0-develop.9) (2023-08-31)
|
||||
|
||||
# [0.1.0-develop.8](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.7...v0.1.0-develop.8) (2023-08-31)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* fix imports ([9305f3d](https://git.lumeweb.com/LumeWeb/libs5/commit/9305f3d42c82d4c6e3c1519793b3750b1b74a0dc))
|
||||
|
||||
# [0.1.0-develop.7](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.6...v0.1.0-develop.7) (2023-08-31)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* use URL type from url package not global URL ([0af8bb3](https://git.lumeweb.com/LumeWeb/libs5/commit/0af8bb3469b363caa85a62d3ec6e27f23f261fab))
|
||||
|
||||
# [0.1.0-develop.6](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.5...v0.1.0-develop.6) (2023-08-31)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add createKeyPair factory that defaults to a random key ([42ef635](https://git.lumeweb.com/LumeWeb/libs5/commit/42ef635fb590e554e2c11565ce3255ed7ce3e2a6))
|
||||
|
||||
# [0.1.0-develop.5](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.4...v0.1.0-develop.5) (2023-08-31)
|
||||
|
||||
# [0.1.0-develop.4](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.3...v0.1.0-develop.4) (2023-08-31)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add getter for networkId and hashQueryRoutingTable ([6ebc477](https://git.lumeweb.com/LumeWeb/libs5/commit/6ebc477449ca8893a2e5e37310575e0c5e7dc444))
|
||||
* ed25519.sign arguments are wrong order ([5585907](https://git.lumeweb.com/LumeWeb/libs5/commit/5585907591c0275de5a229db99584c6ea3eb12d2))
|
||||
* fix object references ([e342982](https://git.lumeweb.com/LumeWeb/libs5/commit/e342982163fe957be516f9fc07afdedd440a76f9))
|
||||
* missing p2p object on S5Config ([b5e491b](https://git.lumeweb.com/LumeWeb/libs5/commit/b5e491b01a7a91fa2dd518e0ea260ac0b50c3a60))
|
||||
* need to store peer, set the id the pass it to onNewPeer ([7afc759](https://git.lumeweb.com/LumeWeb/libs5/commit/7afc759ece228b148f56d0e3203f0406f4820ffa))
|
||||
* need to use unpacked data from signed message ([35ebf74](https://git.lumeweb.com/LumeWeb/libs5/commit/35ebf74f4ca55281c1a402d058aad79b7ca86199))
|
||||
* update registered messages ([87d1e6b](https://git.lumeweb.com/LumeWeb/libs5/commit/87d1e6b0b4f4b7ec7ae9af85b9afae5c4eaa8630))
|
||||
|
||||
# [0.1.0-develop.3](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.2...v0.1.0-develop.3) (2023-08-31)
|
||||
|
||||
# [0.1.0-develop.2](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.1.0-develop.1...v0.1.0-develop.2) (2023-08-31)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add main to package.json ([4672112](https://git.lumeweb.com/LumeWeb/libs5/commit/46721129f3a9b6c03511e4c4e5404ff9da7d2b77))
|
||||
|
||||
# [0.1.0-develop.1](https://git.lumeweb.com/LumeWeb/libs5/compare/v0.0.1...v0.1.0-develop.1) (2023-08-31)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* _newBuf needs to reset offset to 0 ([31e63f6](https://git.lumeweb.com/LumeWeb/libs5/commit/31e63f6c63d785ba3456709bcea91760e8a6c00b))
|
||||
* add registry to services object in interface ([22e486e](https://git.lumeweb.com/LumeWeb/libs5/commit/22e486ea18bed2255fc3a32eb062a8eb07fef3fc))
|
||||
* add some exports ([5a1dca9](https://git.lumeweb.com/LumeWeb/libs5/commit/5a1dca97756d57a469184ca54ca3d4617e3369fc))
|
||||
* check protocol with colon ([a4b692b](https://git.lumeweb.com/LumeWeb/libs5/commit/a4b692b28f2bd62d3546d666c4318e20a89e049a))
|
||||
* do a truthy check on networkId ([68b7ffa](https://git.lumeweb.com/LumeWeb/libs5/commit/68b7ffa855dd4ae3178b4472837d533b5145b5bb))
|
||||
* fix retry logic ([1221d7d](https://git.lumeweb.com/LumeWeb/libs5/commit/1221d7de63633f05e10eab5ca092134549199964))
|
||||
* import websocket ([132f43c](https://git.lumeweb.com/LumeWeb/libs5/commit/132f43c34ba85b82db47f42e7049bfa1938055a1))
|
||||
* need a getter for id ([fbffb1d](https://git.lumeweb.com/LumeWeb/libs5/commit/fbffb1da72f55567af45420dc54abe230ff8b062))
|
||||
* pass the event data, not the event itself ([42cd101](https://git.lumeweb.com/LumeWeb/libs5/commit/42cd101fb29a4a1f7451ab6251e9bd89e7a51145))
|
||||
* remove unneeded getPublicKey call ([ae40d52](https://git.lumeweb.com/LumeWeb/libs5/commit/ae40d52f9e6d44e9dce2ead1c97a0d7b4f772e50))
|
||||
* unsupported url needs to be a real but dummy one ([17ff5fd](https://git.lumeweb.com/LumeWeb/libs5/commit/17ff5fd96b1a1e773e4b09254181dfe7180e0a7f))
|
||||
* update import ([91a15bd](https://git.lumeweb.com/LumeWeb/libs5/commit/91a15bd42849e540c56edef20e1508acd8fd32c6))
|
||||
* update level imports ([68e6c3a](https://git.lumeweb.com/LumeWeb/libs5/commit/68e6c3a682acc6b6541bfc871f80150c11a495b4))
|
||||
* we dont need to strip out auth ([a021243](https://git.lumeweb.com/LumeWeb/libs5/commit/a021243c8954ddedcd99c8bd32901055dbba49a7))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* initial version ([2f2ae2f](https://git.lumeweb.com/LumeWeb/libs5/commit/2f2ae2f4fca7174b289d658b387a941e0d6fc120))
|
2
LICENSE
2
LICENSE
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2023 LumeWeb
|
||||
Copyright (c) 2023 Hammer Technologies LLC
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
# libs5
|
||||
|
||||
`src/serialization` is licensed under BSD 3-Clause and original author is Nail Gilaziev. It is a port of `messagepack` from dart. See LICENSE at https://github.com/nailgilaziev/messagepack/blob/9fc7d685ac8519c2c02feed604b4de342ef96764/LICENSE.
|
||||
|
||||
This library is a port of libs5 and s5 combined from `https://github.com/s5-dev` under MIT license by redsolver. License at https://github.com/s5-dev/lib5/blob/1a4fafbac105a948e5c4ca2befacd2f731d72672/LICENSE
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,37 @@
|
|||
{
|
||||
"name": "@lumeweb/libs5",
|
||||
"version": "0.1.0-develop.82",
|
||||
"type": "module",
|
||||
"main": "lib/index.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "gitea@git.lumeweb.com:LumeWeb/libs5.git"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@changesets/cli": "^2.27.1",
|
||||
"@lumeweb/node-library-preset": "^0.2.7",
|
||||
"presetter": "*"
|
||||
},
|
||||
"readme": "ERROR: No README data found!",
|
||||
"scripts": {
|
||||
"prepare": "presetter bootstrap",
|
||||
"build": "run build",
|
||||
"semantic-release": "semantic-release"
|
||||
},
|
||||
"dependencies": {
|
||||
"@noble/curves": "^1.1.0",
|
||||
"@noble/hashes": "^1.3.1",
|
||||
"axios": "^1.6.2",
|
||||
"detect-node": "^2.1.0",
|
||||
"level": "^8.0.0",
|
||||
"multiformats": "^12.0.1",
|
||||
"p-defer": "^4.0.0",
|
||||
"ws": "^8.13.0"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
]
|
||||
}
|
|
@ -0,0 +1,162 @@
|
|||
import Multibase from "#multibase.js";
|
||||
import { Multihash } from "#multihash.js";
|
||||
import { CID_TYPES, REGISTRY_TYPES } from "#constants.js";
|
||||
import { decodeEndian, encodeEndian } from "#util.js";
|
||||
import { concatBytes, equalBytes } from "@noble/curves/abstract/utils";
|
||||
import { hexToBytes } from "@noble/hashes/utils";
|
||||
import { SignedRegistryEntry } from "#types.js";
|
||||
|
||||
export default class CID extends Multibase {
|
||||
type: number;
|
||||
hash: Multihash;
|
||||
size?: number;
|
||||
|
||||
constructor(type: number, hash: Multihash, size?: number) {
|
||||
super();
|
||||
this.type = type;
|
||||
this.hash = hash;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
static decode(cid: string): CID {
|
||||
const decodedBytes = Multibase.decodeString(cid);
|
||||
return CID._init(decodedBytes);
|
||||
}
|
||||
|
||||
static fromRegistry(bytes: Uint8Array): CID {
|
||||
if (!Object.values(REGISTRY_TYPES).includes(bytes[0])) {
|
||||
throw new Error(`invalid registry type ${bytes[0]}`);
|
||||
}
|
||||
|
||||
bytes = bytes.slice(1);
|
||||
|
||||
return CID._init(bytes);
|
||||
}
|
||||
|
||||
static fromBytes(bytes: Uint8Array): CID {
|
||||
return CID._init(bytes);
|
||||
}
|
||||
|
||||
static fromSignedRegistryEntry(sre: SignedRegistryEntry): CID {
|
||||
return CID.fromRegistryPublicKey(sre.pk);
|
||||
}
|
||||
|
||||
static fromRegistryPublicKey(pubkey: string | Uint8Array): CID {
|
||||
return CID.fromHash(pubkey, 0, CID_TYPES.RESOLVER);
|
||||
}
|
||||
|
||||
static fromHash(
|
||||
bytes: string | Uint8Array,
|
||||
size: number,
|
||||
type = CID_TYPES.RAW,
|
||||
): CID {
|
||||
if (typeof bytes === "string") {
|
||||
bytes = hexToBytes(bytes);
|
||||
}
|
||||
|
||||
if (!Object.values(CID_TYPES).includes(type)) {
|
||||
throw new Error(`invalid cid type ${type}`);
|
||||
}
|
||||
|
||||
return new CID(type, new Multihash(bytes), size);
|
||||
}
|
||||
|
||||
static verify(bytes: string | Uint8Array): boolean {
|
||||
if (typeof bytes === "string") {
|
||||
bytes = Multibase.decodeString(bytes);
|
||||
}
|
||||
|
||||
try {
|
||||
CID._init(bytes);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static _init(bytes: Uint8Array): CID {
|
||||
const type = bytes[0];
|
||||
if (type === CID_TYPES.BRIDGE) {
|
||||
return new CID(type, new Multihash(bytes));
|
||||
}
|
||||
|
||||
const hash = new Multihash(bytes.subarray(1, 34));
|
||||
let size: number | undefined = undefined;
|
||||
const sizeBytes = bytes.subarray(34);
|
||||
if (sizeBytes.length > 0) {
|
||||
size = decodeEndian(sizeBytes);
|
||||
}
|
||||
|
||||
if (!Object.values(CID_TYPES).includes(type)) {
|
||||
throw new Error(`invalid cid type ${type}`);
|
||||
}
|
||||
|
||||
return new CID(type, hash, size);
|
||||
}
|
||||
|
||||
copyWith({ size, type }: { type?: number; size?: number }): CID {
|
||||
type = type || this.type;
|
||||
|
||||
if (!Object.values(CID_TYPES).includes(type)) {
|
||||
throw new Error(`invalid cid type ${type}`);
|
||||
}
|
||||
|
||||
return new CID(type, this.hash, size || this.size);
|
||||
}
|
||||
|
||||
toBytes(): Uint8Array {
|
||||
if (this.type === CID_TYPES.BRIDGE) {
|
||||
return this.hash.fullBytes;
|
||||
} else if (this.type === CID_TYPES.RAW) {
|
||||
let sizeBytes = encodeEndian(this.size as number, 8);
|
||||
|
||||
while (sizeBytes.length > 0 && sizeBytes[sizeBytes.length - 1] === 0) {
|
||||
sizeBytes = sizeBytes.slice(0, -1);
|
||||
}
|
||||
if (sizeBytes.length === 0) {
|
||||
sizeBytes = new Uint8Array(1);
|
||||
}
|
||||
|
||||
return concatBytes(
|
||||
this._getPrefixBytes(),
|
||||
this.hash.fullBytes,
|
||||
sizeBytes,
|
||||
);
|
||||
}
|
||||
|
||||
return concatBytes(this._getPrefixBytes(), this.hash.fullBytes);
|
||||
}
|
||||
|
||||
private _getPrefixBytes(): Uint8Array {
|
||||
return Uint8Array.from([this.type]);
|
||||
}
|
||||
|
||||
toRegistryEntry(): Uint8Array {
|
||||
return concatBytes(Uint8Array.from([REGISTRY_TYPES.CID]), this.toBytes());
|
||||
}
|
||||
|
||||
toRegistryCID(): Uint8Array {
|
||||
return this.copyWith({ type: CID_TYPES.RESOLVER }).toBytes();
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.type === CID_TYPES.BRIDGE
|
||||
? Buffer.from(this.hash.fullBytes).toString("utf8")
|
||||
: this.toBase58();
|
||||
}
|
||||
|
||||
equals(other: CID): boolean {
|
||||
return equalBytes(this.toBytes(), other.toBytes());
|
||||
}
|
||||
|
||||
hashCode(): number {
|
||||
const fullBytes = this.toBytes();
|
||||
return (
|
||||
fullBytes[0] +
|
||||
fullBytes[1] * 256 +
|
||||
fullBytes[2] * 256 * 256 +
|
||||
fullBytes[3] * 256 * 256 * 256
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,164 @@
|
|||
export const CID_TYPES = {
|
||||
RAW: 0x26,
|
||||
METADATA_MEDIA: 0xc5,
|
||||
METADATA_WEBAPP: 0x59,
|
||||
RESOLVER: 0x25,
|
||||
USER_IDENTITY: 0x77,
|
||||
BRIDGE: 0x3a,
|
||||
DIRECTORY: 0x5d,
|
||||
// format for dynamic encrypted CID
|
||||
// type algo key resolver_type mkey_ed255 pubkey
|
||||
// in entry: encrypt(RAW CID or MEDIA or SOMETHING)
|
||||
|
||||
/// Used for immutable encrypted files and metadata formats, key can never be re-used
|
||||
///
|
||||
/// Used for file versions in Vup
|
||||
ENCRYPTED_STATIC: 0xae,
|
||||
ENCRYPTED_DYNAMIC: 0xad,
|
||||
};
|
||||
Object.freeze(CID_TYPES);
|
||||
|
||||
export const REGISTRY_TYPES = {
|
||||
CID: 0x5a,
|
||||
/// Used for encrypted files with update support
|
||||
///
|
||||
/// can point to resolver CID, Stream CID, Directory Metadata or Media Metadata object
|
||||
ENCRYPTED_CID: 0x5e,
|
||||
};
|
||||
Object.freeze(REGISTRY_TYPES);
|
||||
|
||||
// ! some multicodec bytes
|
||||
// BLAKE3 with default output size of 256 bits
|
||||
|
||||
export const CID_HASH_TYPES = {
|
||||
BLAKE3: 0x1f,
|
||||
ED25519: 0xed,
|
||||
};
|
||||
Object.freeze(CID_HASH_TYPES);
|
||||
|
||||
export const encryptionAlgorithmXChaCha20Poly1305 = 0xa6;
|
||||
export const encryptionAlgorithmXChaCha20Poly1305NonceSize = 24;
|
||||
|
||||
export const contentPackFileHeader = Uint8Array.from([0x5f, 0x26, 0x73, 0x35]);
|
||||
|
||||
// ! metadata files
|
||||
|
||||
// used as the first byte of metadata files
|
||||
export const metadataMagicByte = 0x5f;
|
||||
|
||||
export const METADATA_TYPES = {
|
||||
MEDIA: 0x02,
|
||||
WEBAPP: 0x03,
|
||||
DIRECTORY: 0x04,
|
||||
PROOF: 0x05,
|
||||
USER_IDENTITY: 0x07,
|
||||
};
|
||||
Object.freeze(METADATA_TYPES);
|
||||
|
||||
export const PARENT_LINK_TYPES = {
|
||||
USER_IDENTITY: 1,
|
||||
BOARD: 5,
|
||||
BRIDGE_USER: 10,
|
||||
};
|
||||
Object.freeze(PARENT_LINK_TYPES);
|
||||
|
||||
export const registryMaxDataSize = 64;
|
||||
|
||||
export const parentLinkTypeUserIdentity = 1;
|
||||
export const parentLinkTypeBoard = 5;
|
||||
export const parentLinkTypeBridgeUser = 10;
|
||||
|
||||
// ! user identity
|
||||
|
||||
export const authPayloadVersion1 = 0x01;
|
||||
|
||||
export const userIdentityLinkProfile = 0x00;
|
||||
export const userIdentityLinkPublicFileSystem = 0x01;
|
||||
// ! p2p protocol message types
|
||||
|
||||
export const protocolMethodHandshakeOpen = 1;
|
||||
export const protocolMethodHandshakeDone = 2;
|
||||
|
||||
export const protocolMethodSignedMessage = 10;
|
||||
|
||||
export const protocolMethodHashQuery = 4;
|
||||
export const protocolMethodAnnouncePeers = 8;
|
||||
export const protocolMethodRegistryQuery = 13;
|
||||
|
||||
export const recordTypeStorageLocation = 0x05; // cache
|
||||
export const recordTypeRegistryEntry = 0x07; // permanent
|
||||
export const recordTypeStreamEvent = 0x09; // temporary, delete after time X (like storage locations)
|
||||
|
||||
// ! Some optional metadata extensions (same for files, media files and directories)
|
||||
|
||||
// List<String>, license identifier from https://spdx.org/licenses/
|
||||
export const metadataExtensionLicenses = 11;
|
||||
|
||||
// List<Uint8List>, multicoded pubkey that references a registry entry that contains donation links and addresses
|
||||
export const metadataExtensionDonationKeys = 12;
|
||||
|
||||
// map string->map, external ids of this object by their wikidata property id.
|
||||
export const metadataExtensionWikidataClaims = 13;
|
||||
|
||||
// List<String>, for example [en, de, de-DE]
|
||||
export const metadataExtensionLanguages = 14;
|
||||
|
||||
// List<String>,
|
||||
export const metadataExtensionSourceUris = 15;
|
||||
|
||||
// Resolver CID, can be used to update this post. can also be used to "delete" a post.
|
||||
export const metadataExtensionUpdateCID = 16;
|
||||
|
||||
// List<CID>, lists previous versions of this post
|
||||
export const metadataExtensionPreviousVersions = 17;
|
||||
|
||||
// unix timestamp in milliseconds
|
||||
export const metadataExtensionTimestamp = 18;
|
||||
|
||||
export const metadataExtensionTags = 19;
|
||||
export const metadataExtensionCategories = 20;
|
||||
|
||||
// video, podcast, book, audio, music, ...
|
||||
export const metadataExtensionViewTypes = 21;
|
||||
|
||||
export const metadataExtensionBasicMediaMetadata = 22;
|
||||
|
||||
export const metadataExtensionBridge = 23;
|
||||
|
||||
export const metadataExtensionOriginalTimestamp = 24;
|
||||
|
||||
// List<Uint8List>
|
||||
export const metadataExtensionRoutingHints = 25;
|
||||
|
||||
// TODO comment to / reply to (use parents)
|
||||
// TODO mentions (use new extension field)
|
||||
// TODO Reposts (just link the original item)
|
||||
|
||||
// ! media details
|
||||
export const metadataMediaDetailsDuration = 10;
|
||||
export const metadataMediaDetailsIsLive = 11;
|
||||
|
||||
// ! metadata proofs
|
||||
export const metadataProofTypeSignature = 1;
|
||||
export const metadataProofTypeTimestamp = 2;
|
||||
|
||||
// ! storage locations
|
||||
export const storageLocationTypeArchive = 0;
|
||||
export const storageLocationTypeFile = 3;
|
||||
export const storageLocationTypeFull = 5;
|
||||
export const storageLocationTypeBridge = 7;
|
||||
export const supportedFeatures = 3;
|
||||
|
||||
export const hiddenDBTweak = 66;
|
||||
|
||||
export const pathKeyDerivationTweak = 1;
|
||||
export const writeKeyDerivationTweak = 2;
|
||||
export const encryptionKeyDerivationTweak = 3;
|
||||
export const encryptionKeyLength = 32;
|
||||
export const encryptionNonceLength = 24;
|
||||
export const encryptionOverheadLength = 16;
|
||||
|
||||
export const BOOTSTRAP_NODES: string[] = [
|
||||
"wss://z2DWuWNZcdSyZLpXFK2uCU3haaWMXrDAgxzv17sDEMHstZb@s5.garden/s5/p2p",
|
||||
"wss://z2DWuPbL5pweybXnEB618pMnV58ECj2VPDNfVGm3tFqBvjF@s5.ninja/s5/p2p",
|
||||
];
|
|
@ -0,0 +1,26 @@
|
|||
import { ed25519 } from "@noble/curves/ed25519";
|
||||
import { concatBytes } from "@noble/curves/abstract/utils";
|
||||
import { CID_HASH_TYPES } from "./constants.js";
|
||||
|
||||
export default class KeyPairEd25519 {
|
||||
private _bytes: Uint8Array;
|
||||
|
||||
constructor(bytes: Uint8Array) {
|
||||
this._bytes = bytes;
|
||||
}
|
||||
|
||||
public get publicKey(): Uint8Array {
|
||||
return concatBytes(
|
||||
Uint8Array.from([CID_HASH_TYPES.ED25519]),
|
||||
this.publicKeyRaw,
|
||||
);
|
||||
}
|
||||
|
||||
public get publicKeyRaw(): Uint8Array {
|
||||
return ed25519.getPublicKey(this._bytes);
|
||||
}
|
||||
|
||||
public extractBytes(): Uint8Array {
|
||||
return this._bytes;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
import Multibase from "#multibase.js";
|
||||
import { Multihash } from "#multihash.js";
|
||||
import CID from "#cid.js";
|
||||
import { CID_TYPES } from "#constants.js";
|
||||
import { decodeEndian, encodeEndian } from "#util.js";
|
||||
|
||||
export default class EncryptedCID extends Multibase {
|
||||
encryptedBlobHash: Multihash;
|
||||
originalCID: CID;
|
||||
encryptionAlgorithm: number;
|
||||
padding: number;
|
||||
chunkSizeAsPowerOf2: number;
|
||||
encryptionKey: Uint8Array;
|
||||
|
||||
constructor(
|
||||
encryptedBlobHash: Multihash,
|
||||
originalCID: CID,
|
||||
encryptionKey: Uint8Array,
|
||||
padding: number,
|
||||
chunkSizeAsPowerOf2: number,
|
||||
encryptionAlgorithm: number,
|
||||
) {
|
||||
super();
|
||||
this.encryptedBlobHash = encryptedBlobHash;
|
||||
this.originalCID = originalCID;
|
||||
this.encryptionKey = encryptionKey;
|
||||
this.padding = padding;
|
||||
this.chunkSizeAsPowerOf2 = chunkSizeAsPowerOf2;
|
||||
this.encryptionAlgorithm = encryptionAlgorithm;
|
||||
}
|
||||
|
||||
static decode(cid: string): EncryptedCID {
|
||||
return EncryptedCID.fromBytes(Multibase.decodeString(cid));
|
||||
}
|
||||
|
||||
static fromBytes(bytes: Uint8Array): EncryptedCID {
|
||||
if (bytes[0] !== CID_TYPES.ENCRYPTED_DYNAMIC) {
|
||||
throw new Error(`Invalid CID type (${bytes[0]})`);
|
||||
}
|
||||
|
||||
return new EncryptedCID(
|
||||
new Multihash(bytes.slice(3, 36)),
|
||||
CID.fromBytes(bytes.slice(72)),
|
||||
bytes.slice(36, 68),
|
||||
decodeEndian(bytes.slice(68, 72)),
|
||||
bytes[2],
|
||||
bytes[1],
|
||||
);
|
||||
}
|
||||
|
||||
get chunkSize(): number {
|
||||
return Math.pow(2, this.chunkSizeAsPowerOf2);
|
||||
}
|
||||
|
||||
toBytes(): Uint8Array {
|
||||
const data = [
|
||||
CID_TYPES.ENCRYPTED_STATIC,
|
||||
this.encryptionAlgorithm,
|
||||
this.chunkSizeAsPowerOf2,
|
||||
...this.encryptedBlobHash.fullBytes,
|
||||
...this.encryptionKey,
|
||||
...encodeEndian(this.padding, 4),
|
||||
...this.originalCID.toBytes(),
|
||||
];
|
||||
return new Uint8Array(data);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
import { S5Node } from "#node.js";
|
||||
import type { SignedRegistryEntry } from "./types.js";
|
||||
import { ed25519 } from "@noble/curves/ed25519";
|
||||
import KeyPairEd25519 from "./ed25519.js";
|
||||
import { S5NodeConfig } from "./types.js";
|
||||
import CID from "./cid.js";
|
||||
|
||||
export * from "./types.js";
|
||||
export * from "./constants.js";
|
||||
export * from "./util.js";
|
||||
import WebAppMetadata, {
|
||||
WebAppMetadataFileReference,
|
||||
deserialize as deserializeWebAppMetadata,
|
||||
} from "./serialization/metadata/webapp.js";
|
||||
export type { S5Node } from "./node.js";
|
||||
export {
|
||||
createTransportSocket,
|
||||
isTransport,
|
||||
createTransportPeer,
|
||||
BasePeer,
|
||||
} from "./transports/index.js";
|
||||
export type { SignedRegistryEntry, KeyPairEd25519 };
|
||||
|
||||
import Packer from "./serialization/pack.js";
|
||||
import Unpacker from "./serialization/unpack.js";
|
||||
|
||||
export {
|
||||
Packer,
|
||||
Unpacker,
|
||||
CID,
|
||||
WebAppMetadata,
|
||||
WebAppMetadataFileReference,
|
||||
deserializeWebAppMetadata,
|
||||
};
|
||||
|
||||
export function createNode(config: S5NodeConfig) {
|
||||
return new S5Node(config);
|
||||
}
|
||||
|
||||
export function createKeyPair(privateKey?: Uint8Array) {
|
||||
return new KeyPairEd25519(privateKey ?? ed25519.utils.randomPrivateKey());
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
import Packer from "#serialization/pack.js";
|
||||
import { protocolMethodHandshakeDone, supportedFeatures } from "#constants.js";
|
||||
import { S5Node } from "#node.js";
|
||||
import { Peer } from "#types.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
|
||||
export default async function (
|
||||
node: S5Node,
|
||||
peer: Peer,
|
||||
data: Unpacker,
|
||||
rawData: Uint8Array,
|
||||
) {
|
||||
const p2p = node.services.p2p;
|
||||
const p = new Packer();
|
||||
p.packInt(protocolMethodHandshakeDone);
|
||||
p.packBinary(data.unpackBinary());
|
||||
let peerNetworkId: string | null = null;
|
||||
try {
|
||||
peerNetworkId = data.unpackString();
|
||||
} catch {}
|
||||
|
||||
if (node.services.p2p.networkId && peerNetworkId !== p2p.networkId) {
|
||||
throw `Peer is in different network: ${peerNetworkId}`;
|
||||
}
|
||||
|
||||
p.packInt(supportedFeatures);
|
||||
p.packInt(p2p.selfConnectionUris.length);
|
||||
for (const uri of p2p.selfConnectionUris) {
|
||||
p.packString(uri.toString());
|
||||
}
|
||||
// TODO Protocol version
|
||||
// p.packInt(protocolVersion);
|
||||
peer.sendMessage(await p2p.signMessageSimple(p.takeBytes()));
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
import { S5Node } from "#node.js";
|
||||
import { Peer } from "#types.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
import { Multihash } from "#multihash.js";
|
||||
|
||||
export default async function (
|
||||
node: S5Node,
|
||||
peer: Peer,
|
||||
data: Unpacker,
|
||||
rawData: Uint8Array,
|
||||
) {
|
||||
const hash = new Multihash(data.unpackBinary());
|
||||
const types = data.unpackList().map((item) => Number(item));
|
||||
|
||||
try {
|
||||
const map = await node.getCachedStorageLocations(hash, types);
|
||||
|
||||
if (Object.keys(map).length > 0) {
|
||||
const availableNodes = [...map.keys()];
|
||||
node.services.p2p.sortNodesByScore(availableNodes);
|
||||
|
||||
const entry = map.get(availableNodes[0]);
|
||||
|
||||
peer.sendMessage(entry?.providerMessage as Uint8Array);
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
node.logger.catched(e);
|
||||
}
|
||||
|
||||
const hashCode = hash.hashCode;
|
||||
|
||||
if (node.hashQueryRoutingTable.has(hashCode)) {
|
||||
if (!node.hashQueryRoutingTable.get(hashCode)?.has(peer.id)) {
|
||||
node.hashQueryRoutingTable.get(hashCode)?.add(peer.id);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
node.hashQueryRoutingTable.set(hashCode, new Set([peer.id]));
|
||||
for (const p of node.services.p2p.peers.values()) {
|
||||
if (p.id !== peer.id) {
|
||||
p.sendMessage(rawData);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
import { P2PMessageHandler } from "#types.js";
|
||||
import handshakeOpen from "#messages/handshakeOpen.js";
|
||||
import {
|
||||
protocolMethodHandshakeOpen,
|
||||
protocolMethodHashQuery,
|
||||
protocolMethodRegistryQuery,
|
||||
protocolMethodSignedMessage,
|
||||
recordTypeRegistryEntry,
|
||||
recordTypeStorageLocation,
|
||||
} from "#constants.js";
|
||||
import registryQuery from "#messages/registryQuery.js";
|
||||
import registryEntry from "#messages/registryEntry.js";
|
||||
import storageLocation from "#messages/storageLocation.js";
|
||||
import signedMessage from "#messages/signedMessage.js";
|
||||
import hashQuery from "#messages/hashQuery.js";
|
||||
|
||||
const messages = new Map<number, P2PMessageHandler>(
|
||||
Object.entries({
|
||||
[protocolMethodHandshakeOpen]: handshakeOpen,
|
||||
[protocolMethodRegistryQuery]: registryQuery,
|
||||
[recordTypeRegistryEntry]: registryEntry,
|
||||
[recordTypeStorageLocation]: storageLocation,
|
||||
[protocolMethodSignedMessage]: signedMessage,
|
||||
[protocolMethodHashQuery]: hashQuery,
|
||||
}).map(([key, value]) => [Number(key), value]),
|
||||
);
|
||||
|
||||
Object.freeze(messages);
|
||||
|
||||
export default messages;
|
|
@ -0,0 +1,13 @@
|
|||
import { S5Node } from "#node.js";
|
||||
import { Peer } from "#types.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
|
||||
export default async function (
|
||||
node: S5Node,
|
||||
peer: Peer,
|
||||
data: Unpacker,
|
||||
rawData: Uint8Array,
|
||||
) {
|
||||
const sre = node.services.registry.deserializeRegistryEntry(rawData);
|
||||
await node.services.registry.set(sre, false, peer);
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
import { S5Node } from "#node.js";
|
||||
import { Peer } from "#types.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
|
||||
export default async function (
|
||||
node: S5Node,
|
||||
peer: Peer,
|
||||
data: Unpacker,
|
||||
rawData: Uint8Array,
|
||||
) {
|
||||
const pk = data.unpackBinary();
|
||||
const sre = await node.services.registry.getFromDB(pk);
|
||||
if (sre !== null) {
|
||||
peer.sendMessage(node.services.registry.serializeRegistryEntry(sre));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
import { S5Node } from "#node.js";
|
||||
import { Peer } from "#types.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
import messages from "#messages/signedMessages/index.js";
|
||||
|
||||
export default async function (
|
||||
node: S5Node,
|
||||
peer: Peer,
|
||||
data: Unpacker,
|
||||
rawData: Uint8Array,
|
||||
verifyId = true,
|
||||
) {
|
||||
const sm = await node.services.p2p.unpackAndVerifySignature(data);
|
||||
const u = Unpacker.fromPacked(sm.message);
|
||||
const method = u.unpackInt();
|
||||
|
||||
if (method !== null && messages.has(method)) {
|
||||
await messages.get(method)?.(node, peer, u, sm, verifyId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
import { S5Node } from "#node.js";
|
||||
import { Peer, SignedMessage } from "#types.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
import { equalBytes } from "@noble/curves/abstract/utils";
|
||||
import { URL } from "url";
|
||||
import NodeId from "#nodeId.js";
|
||||
|
||||
export default async function (
|
||||
node: S5Node,
|
||||
peer: Peer,
|
||||
data: Unpacker,
|
||||
message: SignedMessage,
|
||||
verifyId: boolean,
|
||||
) {
|
||||
const length = data.unpackInt() as number;
|
||||
for (let i = 0; i < length; i++) {
|
||||
const p2p = node.services.p2p;
|
||||
const peerIdBinary = data.unpackBinary();
|
||||
const id = new NodeId(peerIdBinary);
|
||||
|
||||
const isConnected = data.unpackBool() as boolean;
|
||||
|
||||
const connectionUrisCount = data.unpackInt() as number;
|
||||
|
||||
const connectionUris: URL[] = [];
|
||||
|
||||
for (let i = 0; i < connectionUrisCount; i++) {
|
||||
connectionUris.push(new URL(data.unpackString() as string));
|
||||
}
|
||||
|
||||
if (connectionUris.length > 0) {
|
||||
// TODO Fully support multiple connection uris
|
||||
const uri = new URL(connectionUris[0].toString());
|
||||
uri.username = id.toBase58();
|
||||
if (!p2p.reconnectDelay.has(NodeId.decode(uri.username).toString())) {
|
||||
p2p.connectToNode([uri]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
import { S5Node } from "#node.js";
|
||||
import { Peer, SignedMessage } from "#types.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
import { equalBytes } from "@noble/curves/abstract/utils";
|
||||
import { URL } from "url";
|
||||
|
||||
export default async function (
|
||||
node: S5Node,
|
||||
peer: Peer,
|
||||
data: Unpacker,
|
||||
message: SignedMessage,
|
||||
verifyId: boolean,
|
||||
) {
|
||||
if (!node.started) {
|
||||
peer.end();
|
||||
return;
|
||||
}
|
||||
const p2p = node.services.p2p;
|
||||
const challenge = data.unpackBinary();
|
||||
|
||||
if (!equalBytes(peer.challenge, challenge)) {
|
||||
throw "Invalid challenge";
|
||||
}
|
||||
|
||||
const pId = message.nodeId;
|
||||
|
||||
if (!verifyId) {
|
||||
peer.id = pId;
|
||||
} else {
|
||||
if (!peer.id.equals(pId)) {
|
||||
throw "Invalid transports id on initial list";
|
||||
}
|
||||
}
|
||||
|
||||
peer.isConnected = true;
|
||||
|
||||
const supportedFeatures = data.unpackInt();
|
||||
|
||||
if (supportedFeatures !== 3) {
|
||||
throw "Remote node does not support required features";
|
||||
}
|
||||
|
||||
p2p.peers.set(peer.id.toString(), peer);
|
||||
p2p.reconnectDelay.set(peer.id.toString(), 1);
|
||||
|
||||
const connectionUrisCount = data.unpackInt() as number;
|
||||
|
||||
peer.connectionUris = [];
|
||||
for (let i = 0; i < connectionUrisCount; i++) {
|
||||
peer.connectionUris.push(new URL(data.unpackString() as string));
|
||||
}
|
||||
|
||||
node.logger.info(
|
||||
`[+] ${peer.id.toString()} (${peer.renderLocationUri().toString()})`,
|
||||
);
|
||||
|
||||
p2p.sendPublicPeersToPeer(peer, Array.from(p2p.peers.values()));
|
||||
for (const p of p2p.peers.values()) {
|
||||
if (p.id.equals(peer.id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (p.isConnected) {
|
||||
p2p.sendPublicPeersToPeer(p, [peer]);
|
||||
}
|
||||
}
|
||||
p2p.emit("peerConnected", peer);
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
import { P2PSignedMessageHandler } from "#types.js";
|
||||
import handshakeDone from "#messages/signedMessages/handshakeDone.js";
|
||||
import {
|
||||
protocolMethodAnnouncePeers,
|
||||
protocolMethodHandshakeDone,
|
||||
} from "#constants.js";
|
||||
import announcePeers from "#messages/signedMessages/announcePeers.js";
|
||||
|
||||
const messages = new Map<number, P2PSignedMessageHandler>(
|
||||
Object.entries({
|
||||
[protocolMethodHandshakeDone]: handshakeDone,
|
||||
[protocolMethodAnnouncePeers]: announcePeers,
|
||||
}).map(([key, value]) => [Number(key), value]),
|
||||
);
|
||||
|
||||
Object.freeze(messages);
|
||||
|
||||
export default messages;
|
|
@ -0,0 +1,79 @@
|
|||
import { S5Node } from "#node.js";
|
||||
import { Peer } from "#types.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
import { Multihash } from "#multihash.js";
|
||||
import { decodeEndian } from "#util.js";
|
||||
import { ed25519 } from "@noble/curves/ed25519";
|
||||
import NodeId from "#nodeId.js";
|
||||
import StorageLocation from "#storage.js";
|
||||
import { CID_HASH_TYPES } from "#constants.js";
|
||||
|
||||
export default async function (
|
||||
node: S5Node,
|
||||
peer: Peer,
|
||||
data: Unpacker,
|
||||
rawData: Uint8Array,
|
||||
) {
|
||||
const p2p = node.services.p2p;
|
||||
const hash = new Multihash(rawData.subarray(1, 34));
|
||||
const type = rawData[34];
|
||||
const expiry = decodeEndian(rawData.subarray(35, 39));
|
||||
const partCount = rawData[39];
|
||||
const parts: string[] = [];
|
||||
let cursor = 40;
|
||||
for (let i = 0; i < partCount; i++) {
|
||||
const length = decodeEndian(rawData.subarray(cursor, cursor + 2));
|
||||
cursor += 2;
|
||||
parts.push(
|
||||
new TextDecoder().decode(rawData.subarray(cursor, cursor + length)),
|
||||
);
|
||||
cursor += length;
|
||||
}
|
||||
cursor++;
|
||||
|
||||
const publicKey = rawData.subarray(cursor, cursor + 33);
|
||||
const signature = rawData.subarray(cursor + 33);
|
||||
|
||||
if (publicKey[0] !== CID_HASH_TYPES.ED25519) {
|
||||
throw `Unsupported public key type ${publicKey[0]}`;
|
||||
}
|
||||
|
||||
if (
|
||||
!ed25519.verify(
|
||||
signature,
|
||||
rawData.subarray(0, cursor),
|
||||
publicKey.subarray(1),
|
||||
)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const nodeId = new NodeId(publicKey);
|
||||
await node.addStorageLocation({
|
||||
hash,
|
||||
nodeId,
|
||||
location: new StorageLocation(type, parts, expiry),
|
||||
message: rawData,
|
||||
config: node.config,
|
||||
});
|
||||
|
||||
const list =
|
||||
node.hashQueryRoutingTable.get(hash.hashCode) || new Set<NodeId>();
|
||||
for (const peerId of list) {
|
||||
if (peerId.equals(nodeId)) {
|
||||
continue;
|
||||
}
|
||||
if (peerId.equals(peer.id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (p2p.peers.has(peerId.toString())) {
|
||||
try {
|
||||
p2p.peers.get(peerId.toString())?.sendMessage(rawData);
|
||||
} catch (e) {
|
||||
node.logger.catched(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
node.hashQueryRoutingTable.delete(hash.hashCode);
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
import { base58btc } from "multiformats/bases/base58";
|
||||
import { bytesToHex, hexToBytes, utf8ToBytes } from "@noble/hashes/utils";
|
||||
import { base32 } from "multiformats/bases/base32";
|
||||
import { base64, base64urlpad } from "multiformats/bases/base64";
|
||||
|
||||
export default abstract class Multibase {
|
||||
abstract toBytes(): Uint8Array;
|
||||
|
||||
static decodeString(data: string): Uint8Array {
|
||||
let bytes: Uint8Array;
|
||||
if (data[0] === "z") {
|
||||
bytes = base58btc.decode(data);
|
||||
} else if (data[0] === "f") {
|
||||
bytes = Uint8Array.from(hexToBytes(data.substring(1)));
|
||||
} else if (data[0] === "b") {
|
||||
let str = data;
|
||||
while (str.length % 4 !== 0) {
|
||||
str += "=";
|
||||
}
|
||||
bytes = base32.decode(str);
|
||||
} else if (data[0] === "u") {
|
||||
bytes = base64urlpad.decode(data[0].toUpperCase() + data.substring(1));
|
||||
} else if (data[0] === ":") {
|
||||
bytes = utf8ToBytes(data);
|
||||
} else {
|
||||
throw new Error(`Multibase encoding ${data[0]} not supported`);
|
||||
}
|
||||
|
||||
return bytes;
|
||||
}
|
||||
|
||||
toHex(): string {
|
||||
return `f${bytesToHex(this.toBytes())}`;
|
||||
}
|
||||
|
||||
toBase32(): string {
|
||||
return `${base32.encode(this.toBytes()).replace(/=/g, "").toLowerCase()}`;
|
||||
}
|
||||
|
||||
toBase64Url(): string {
|
||||
return `u${base64urlpad.encode(this.toBytes()).substring(1)}`;
|
||||
}
|
||||
|
||||
toBase58(): string {
|
||||
return base58btc.encode(this.toBytes());
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.toBase58();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
import { base64url } from "multiformats/bases/base64";
|
||||
import { base32 } from "multiformats/bases/base32";
|
||||
import { equalBytes } from "@noble/curves/abstract/utils";
|
||||
import { CID_TYPES } from "#constants.js";
|
||||
|
||||
export class Multihash {
|
||||
fullBytes: Uint8Array;
|
||||
|
||||
constructor(fullBytes: Uint8Array) {
|
||||
this.fullBytes = fullBytes;
|
||||
}
|
||||
|
||||
get functionType(): number {
|
||||
return this.fullBytes[0];
|
||||
}
|
||||
|
||||
get hashBytes(): Uint8Array {
|
||||
return this.fullBytes.subarray(1);
|
||||
}
|
||||
|
||||
static fromBase64Url(hash: string): Multihash {
|
||||
while (hash.length % 4 !== 0) {
|
||||
hash += "=";
|
||||
}
|
||||
if (hash[0] !== "u") {
|
||||
hash = "u" + hash;
|
||||
}
|
||||
const bytes = base64url.decode(hash);
|
||||
return new Multihash(new Uint8Array(bytes));
|
||||
}
|
||||
|
||||
toBase64Url(): string {
|
||||
return base64url.encode(this.fullBytes).substring(1);
|
||||
}
|
||||
|
||||
toBase32(): string {
|
||||
return base32
|
||||
.encode(this.fullBytes)
|
||||
.replace(/=/g, "")
|
||||
.toLowerCase()
|
||||
.substring(1);
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.functionType === CID_TYPES.BRIDGE
|
||||
? new TextDecoder().decode(this.fullBytes)
|
||||
: this.toBase64Url();
|
||||
}
|
||||
|
||||
equals(other: any): boolean {
|
||||
if (!(other instanceof Multihash)) {
|
||||
return false;
|
||||
}
|
||||
return equalBytes(this.fullBytes, other.fullBytes);
|
||||
}
|
||||
|
||||
get hashCode(): number {
|
||||
return (
|
||||
this.fullBytes[0] +
|
||||
this.fullBytes[1] * 256 +
|
||||
this.fullBytes[2] * 256 * 256 +
|
||||
this.fullBytes[3] * 256 * 256 * 256
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,290 @@
|
|||
import { Multihash } from "./multihash.js";
|
||||
import NodeId from "./nodeId.js";
|
||||
import { Logger, S5Config, S5NodeConfig, S5Services } from "./types.js";
|
||||
import Unpacker from "./serialization/unpack.js";
|
||||
import Packer from "./serialization/pack.js";
|
||||
import StorageLocation, { StorageLocationProvider } from "./storage.js";
|
||||
import { AbstractLevel } from "abstract-level";
|
||||
import { P2PService } from "#service/p2p.js";
|
||||
import { RegistryService } from "#service/registry.js";
|
||||
import {
|
||||
CID_TYPES,
|
||||
storageLocationTypeFile,
|
||||
storageLocationTypeFull,
|
||||
} from "#constants.js";
|
||||
import axios from "axios";
|
||||
import { equalBytes } from "@noble/curves/abstract/utils";
|
||||
import { blake3 } from "@noble/hashes/blake3";
|
||||
import CID from "#cid.js";
|
||||
import type Metadata from "#serialization/metadata/base.js";
|
||||
import { deserialize as deserializeMediaMetadata } from "#serialization/metadata/media.js";
|
||||
import { deserialize as deserializeWebAppMetadata } from "#serialization/metadata/webapp.js";
|
||||
const DEFAULT_LOGGER = {
|
||||
info(s: any) {
|
||||
console.info(s);
|
||||
},
|
||||
verbose(s: any) {
|
||||
console.log(s);
|
||||
},
|
||||
warn(s: any) {
|
||||
console.warn(s);
|
||||
},
|
||||
error(s: any) {
|
||||
console.error(s);
|
||||
},
|
||||
catched(e: any, context?: string | null) {
|
||||
console.error(e, context);
|
||||
},
|
||||
};
|
||||
|
||||
export class S5Node {
|
||||
private _nodeConfig: S5NodeConfig;
|
||||
private metadataCache: Map<Multihash, Metadata> = new Map<
|
||||
Multihash,
|
||||
Metadata
|
||||
>();
|
||||
|
||||
constructor(config: S5NodeConfig) {
|
||||
this._nodeConfig = config;
|
||||
}
|
||||
|
||||
private _started = false;
|
||||
|
||||
private _hashQueryRoutingTable: Map<number, Set<NodeId>> = new Map();
|
||||
|
||||
get hashQueryRoutingTable(): Map<number, Set<NodeId>> {
|
||||
return this._hashQueryRoutingTable;
|
||||
}
|
||||
|
||||
get started(): boolean {
|
||||
return this._started;
|
||||
}
|
||||
|
||||
private _config?: S5Config;
|
||||
|
||||
get config() {
|
||||
return this._config as S5Config;
|
||||
}
|
||||
|
||||
get services() {
|
||||
return this._config?.services as S5Services;
|
||||
}
|
||||
|
||||
get db() {
|
||||
return this._config?.db as AbstractLevel<Uint8Array, string, Uint8Array>;
|
||||
}
|
||||
|
||||
get logger() {
|
||||
return this._config?.logger as Logger;
|
||||
}
|
||||
|
||||
public async start() {
|
||||
this._config = {
|
||||
keyPair: this._nodeConfig.keyPair,
|
||||
db: this._nodeConfig.db,
|
||||
logger: this._nodeConfig.logger ?? DEFAULT_LOGGER,
|
||||
cacheDb: this._nodeConfig.db.sublevel("s5-object-cache", {
|
||||
valueEncoding: "buffer",
|
||||
}),
|
||||
services: {} as any,
|
||||
p2p: this._nodeConfig.p2p,
|
||||
};
|
||||
|
||||
this._started = true;
|
||||
|
||||
const p2p = new P2PService(this);
|
||||
const registry = new RegistryService(this);
|
||||
|
||||
await p2p.init();
|
||||
await registry.init();
|
||||
await p2p.start();
|
||||
}
|
||||
|
||||
public async stop() {
|
||||
this._started = false;
|
||||
await this.services.p2p.stop();
|
||||
}
|
||||
|
||||
async getCachedStorageLocations(
|
||||
hash: Multihash,
|
||||
types: number[],
|
||||
): Promise<Map<NodeId, StorageLocation>> {
|
||||
const locations = new Map<NodeId, StorageLocation>();
|
||||
|
||||
const map = await this.readStorageLocationsFromDB(hash); // Assuming this method exists and returns a Map or similar structure
|
||||
if (map.size === 0) {
|
||||
return new Map();
|
||||
}
|
||||
|
||||
const ts = Math.floor(Date.now() / 1000);
|
||||
|
||||
types.forEach((type) => {
|
||||
if (!map.has(type)) return;
|
||||
|
||||
map.get(type)!.forEach((value, key) => {
|
||||
if (value.get(3) >= ts) {
|
||||
const storageLocation = new StorageLocation(
|
||||
type,
|
||||
value.get(1).map((v: string) => v), // Assuming value[1] is an array of strings
|
||||
value.get(3),
|
||||
);
|
||||
|
||||
// Assuming providerMessage is a property of StorageLocation
|
||||
storageLocation.providerMessage = value[4];
|
||||
locations.set(key, storageLocation);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return locations;
|
||||
}
|
||||
|
||||
async readStorageLocationsFromDB(
|
||||
hash: Multihash,
|
||||
): Promise<Map<number, Map<NodeId, Map<number, any>>>> {
|
||||
const map = new Map<number, Map<NodeId, Map<number, any>>>();
|
||||
let bytes;
|
||||
try {
|
||||
bytes = await this.config.cacheDb.get(stringifyHash(hash));
|
||||
} catch {
|
||||
return map;
|
||||
}
|
||||
if (bytes === null) {
|
||||
return map;
|
||||
}
|
||||
const unpacker = Unpacker.fromPacked(bytes);
|
||||
const mapLength = unpacker.unpackMapLength();
|
||||
for (let i = 0; i < mapLength; i++) {
|
||||
const type = unpacker.unpackInt() as number;
|
||||
const innerMap = new Map<NodeId, Map<number, any>>();
|
||||
map.set(type, innerMap);
|
||||
const innerMapLength = unpacker.unpackMapLength();
|
||||
for (let j = 0; j < innerMapLength; j++) {
|
||||
const nodeId = new NodeId(unpacker.unpackBinary());
|
||||
innerMap.set(
|
||||
nodeId,
|
||||
new Map(
|
||||
Object.entries(unpacker.unpackMap()).map(([key, value]) => [
|
||||
Number(key),
|
||||
value,
|
||||
]),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
async addStorageLocation({
|
||||
hash,
|
||||
nodeId,
|
||||
location,
|
||||
message,
|
||||
config,
|
||||
}: {
|
||||
hash: Multihash;
|
||||
nodeId: NodeId;
|
||||
location: StorageLocation;
|
||||
message?: Uint8Array;
|
||||
config: S5Config;
|
||||
}) {
|
||||
const map = await this.readStorageLocationsFromDB(hash);
|
||||
const innerMap =
|
||||
map.get(location.type) || new Map<NodeId, Map<number, any>>();
|
||||
map.set(location.type, innerMap);
|
||||
|
||||
const locationMap = new Map<number, any>([
|
||||
[1, location.parts],
|
||||
// [2, location.binaryParts],
|
||||
[3, location.expiry],
|
||||
[4, message],
|
||||
]);
|
||||
|
||||
innerMap.set(nodeId, locationMap);
|
||||
await config.cacheDb.put(
|
||||
stringifyHash(hash),
|
||||
new Packer().pack(map).takeBytes(),
|
||||
);
|
||||
}
|
||||
|
||||
async downloadBytesByHash(hash: Multihash): Promise<Uint8Array> {
|
||||
const dlUriProvider = new StorageLocationProvider(this, hash, [
|
||||
storageLocationTypeFull,
|
||||
storageLocationTypeFile,
|
||||
]);
|
||||
|
||||
dlUriProvider.start();
|
||||
|
||||
let retryCount = 0;
|
||||
while (true) {
|
||||
const dlUri = await dlUriProvider.next();
|
||||
|
||||
this.logger.verbose(`[try] ${dlUri.location.bytesUrl}`);
|
||||
|
||||
try {
|
||||
const res = await axios.get(dlUri.location.bytesUrl, {
|
||||
timeout: 30000, // Adjust timeout as needed
|
||||
responseType: "arraybuffer",
|
||||
});
|
||||
|
||||
// Assuming rust.hashBlake3 and areBytesEqual are available functions
|
||||
const resHash = blake3(Buffer.from(res.data));
|
||||
|
||||
if (!equalBytes(hash.hashBytes, resHash)) {
|
||||
throw new Error("Integrity verification failed");
|
||||
}
|
||||
|
||||
dlUriProvider.upvote(dlUri);
|
||||
return res.data;
|
||||
} catch (error) {
|
||||
this.logger.catched(error);
|
||||
|
||||
dlUriProvider.downvote(dlUri);
|
||||
}
|
||||
|
||||
retryCount++;
|
||||
if (retryCount > 32) {
|
||||
throw new Error("Too many retries");
|
||||
}
|
||||
}
|
||||
}
|
||||
async getMetadataByCID(cid: CID): Promise<Metadata> {
|
||||
const hash = cid.hash;
|
||||
|
||||
let metadata: Metadata;
|
||||
|
||||
if (this.metadataCache.has(hash)) {
|
||||
metadata = this.metadataCache.get(hash)!;
|
||||
} else {
|
||||
const bytes = await this.downloadBytesByHash(hash);
|
||||
|
||||
switch (cid.type) {
|
||||
case CID_TYPES.METADATA_MEDIA:
|
||||
metadata = await deserializeMediaMetadata(bytes);
|
||||
break;
|
||||
case CID_TYPES.METADATA_WEBAPP:
|
||||
metadata = await deserializeWebAppMetadata(bytes);
|
||||
break;
|
||||
case CID_TYPES.BRIDGE:
|
||||
metadata = await deserializeMediaMetadata(bytes);
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unsupported metadata format");
|
||||
}
|
||||
|
||||
this.metadataCache.set(hash, metadata);
|
||||
}
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
|
||||
export function stringifyBytes(data: Uint8Array) {
|
||||
return String.fromCharCode(...data);
|
||||
}
|
||||
|
||||
function stringifyHash(hash: Multihash) {
|
||||
return stringifyBytes(hash.fullBytes);
|
||||
}
|
||||
export function stringifyNode(node: NodeId) {
|
||||
return stringifyBytes(node.bytes);
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
import { base58btc } from "multiformats/bases/base58";
|
||||
import { equalBytes } from "@noble/curves/abstract/utils";
|
||||
|
||||
export default class NodeId {
|
||||
bytes: Uint8Array;
|
||||
|
||||
constructor(bytes: Uint8Array) {
|
||||
this.bytes = bytes;
|
||||
}
|
||||
|
||||
static decode(nodeId: string): NodeId {
|
||||
return new NodeId(base58btc.decode(nodeId));
|
||||
}
|
||||
|
||||
equals(other: any): boolean {
|
||||
if (!(other instanceof NodeId)) {
|
||||
return false;
|
||||
}
|
||||
return equalBytes(this.bytes, other.bytes);
|
||||
}
|
||||
|
||||
get hashCode(): number {
|
||||
return (
|
||||
this.bytes[0] +
|
||||
this.bytes[1] * 256 +
|
||||
this.bytes[2] * 256 * 256 +
|
||||
this.bytes[3] * 256 * 256 * 256
|
||||
);
|
||||
}
|
||||
|
||||
toBase58(): string {
|
||||
return base58btc.encode(this.bytes);
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.toBase58();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
export default abstract class Metadata {
|
||||
abstract toJson(): { [key: string]: any };
|
||||
}
|
|
@ -0,0 +1,436 @@
|
|||
import Metadata from "#serialization/metadata/base.js";
|
||||
import Packer from "#serialization/pack.js";
|
||||
import { METADATA_TYPES, metadataMagicByte } from "#constants.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
import ExtraMetadata from "#serialization/metadata/extra.js";
|
||||
import { Buffer } from "buffer";
|
||||
import CID from "#cid.js";
|
||||
import { Multihash } from "#multihash.js";
|
||||
import { base64url } from "multiformats/bases/base64";
|
||||
import EncryptedCID from "#encrypted_cid.js";
|
||||
|
||||
export default class DirectoryMetadata extends Metadata {
|
||||
details: DirectoryMetadataDetails;
|
||||
directories: { [key: string]: DirectoryReference };
|
||||
files: { [key: string]: FileReference };
|
||||
extraMetadata: ExtraMetadata;
|
||||
|
||||
constructor(
|
||||
details: DirectoryMetadataDetails,
|
||||
directories: { [key: string]: DirectoryReference },
|
||||
files: { [key: string]: FileReference },
|
||||
extraMetadata: ExtraMetadata,
|
||||
) {
|
||||
super();
|
||||
this.details = details;
|
||||
this.directories = directories;
|
||||
this.files = files;
|
||||
this.extraMetadata = extraMetadata;
|
||||
}
|
||||
|
||||
serialize(): Uint8Array {
|
||||
const p = new Packer();
|
||||
p.packInt(metadataMagicByte);
|
||||
p.packInt(METADATA_TYPES.DIRECTORY);
|
||||
|
||||
p.packListLength(4);
|
||||
|
||||
p.pack(this.details.data);
|
||||
|
||||
p.packMapLength(Object.keys(this.directories).length);
|
||||
Object.entries(this.directories).forEach(([key, value]) => {
|
||||
p.packString(key);
|
||||
p.pack(value.encode());
|
||||
});
|
||||
|
||||
p.packMapLength(Object.keys(this.files).length);
|
||||
Object.entries(this.files).forEach(([key, value]) => {
|
||||
p.packString(key);
|
||||
p.pack(value.encode());
|
||||
});
|
||||
|
||||
p.pack(this.extraMetadata.data);
|
||||
|
||||
return p.takeBytes();
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
return {
|
||||
type: "directory",
|
||||
details: this.details,
|
||||
directories: this.directories,
|
||||
files: this.files,
|
||||
extraMetadata: this.extraMetadata,
|
||||
};
|
||||
}
|
||||
|
||||
static deserialize(bytes: Uint8Array): DirectoryMetadata {
|
||||
const u = new Unpacker(Buffer.from(bytes));
|
||||
|
||||
const magicByte = u.unpackInt();
|
||||
if (magicByte !== metadataMagicByte) {
|
||||
throw new Error("Invalid metadata: Unsupported magic byte");
|
||||
}
|
||||
const typeAndVersion = u.unpackInt();
|
||||
if (typeAndVersion !== METADATA_TYPES.DIRECTORY) {
|
||||
throw new Error("Invalid metadata: Wrong metadata type");
|
||||
}
|
||||
|
||||
u.unpackListLength();
|
||||
|
||||
const dir = new DirectoryMetadata(
|
||||
new DirectoryMetadataDetails(u.unpackMap()),
|
||||
{},
|
||||
{},
|
||||
new ExtraMetadata({}),
|
||||
);
|
||||
|
||||
const dirCount = u.unpackMapLength();
|
||||
for (let i = 0; i < dirCount; i++) {
|
||||
const key = u.unpackString();
|
||||
dir.directories[key as string] = DirectoryReference.decode(u.unpackMap());
|
||||
}
|
||||
|
||||
const fileCount = u.unpackMapLength();
|
||||
for (let i = 0; i < fileCount; i++) {
|
||||
const key = u.unpackString();
|
||||
dir.files[key as string] = FileReference.decode(u.unpackMap());
|
||||
}
|
||||
|
||||
Object.assign(dir.extraMetadata.data, u.unpackMap());
|
||||
return dir;
|
||||
}
|
||||
}
|
||||
|
||||
class DirectoryMetadataDetails {
|
||||
data: Map<number, any>;
|
||||
|
||||
constructor(data: Map<number, any> | object) {
|
||||
if (data instanceof Map && typeof data == "object") {
|
||||
data = Object.entries(data).map(([key, value]) => [Number(key), value]);
|
||||
}
|
||||
this.data = data as Map<number, any>;
|
||||
}
|
||||
|
||||
get isShared(): boolean {
|
||||
return this.data.has(3);
|
||||
}
|
||||
|
||||
get isSharedReadOnly(): boolean {
|
||||
return this.data.get(3)?.[1] ?? false;
|
||||
}
|
||||
|
||||
get isSharedReadWrite(): boolean {
|
||||
return this.data.get(3)?.[2] ?? false;
|
||||
}
|
||||
|
||||
setShared(value: boolean, write: boolean): void {
|
||||
if (!this.data.has(3)) {
|
||||
this.data.set(3, {});
|
||||
}
|
||||
this.data.get(3)[write ? 2 : 1] = value;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
// Convert the data Map to a JSON object
|
||||
const jsonObject: { [key: string]: any } = {};
|
||||
this.data.forEach((value, key) => {
|
||||
jsonObject[key.toString()] = value;
|
||||
});
|
||||
return jsonObject;
|
||||
}
|
||||
}
|
||||
|
||||
export class DirectoryReference {
|
||||
created: number;
|
||||
name: string;
|
||||
encryptedWriteKey: Uint8Array;
|
||||
publicKey: Uint8Array;
|
||||
encryptionKey: Uint8Array | null;
|
||||
ext: { [key: string]: any } | null;
|
||||
|
||||
uri: string | null; // For internal operations
|
||||
key: string | null; // For internal operations
|
||||
size: number | null; // For internal operations
|
||||
|
||||
constructor(
|
||||
created: number,
|
||||
name: string,
|
||||
encryptedWriteKey: Uint8Array,
|
||||
publicKey: Uint8Array,
|
||||
encryptionKey: Uint8Array | null,
|
||||
ext: { [key: string]: any } | null,
|
||||
) {
|
||||
this.created = created;
|
||||
this.name = name;
|
||||
this.encryptedWriteKey = encryptedWriteKey;
|
||||
this.publicKey = publicKey;
|
||||
this.encryptionKey = encryptionKey;
|
||||
this.ext = ext;
|
||||
this.uri = null;
|
||||
this.key = null;
|
||||
this.size = null;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
return {
|
||||
name: this.name,
|
||||
created: this.created,
|
||||
publicKey: base64url.encode(this.publicKey),
|
||||
encryptedWriteKey: base64url.encode(this.encryptedWriteKey),
|
||||
encryptionKey: this.encryptionKey
|
||||
? base64url.encode(this.encryptionKey)
|
||||
: null,
|
||||
ext: this.ext,
|
||||
};
|
||||
}
|
||||
|
||||
static decode(data: { [key: number]: any }): DirectoryReference {
|
||||
return new DirectoryReference(
|
||||
data[2],
|
||||
data[1],
|
||||
data[4],
|
||||
data[3],
|
||||
data[5],
|
||||
data[6] ? (data[6] as { [key: string]: any }) : null,
|
||||
);
|
||||
}
|
||||
|
||||
encode(): { [key: number]: any } {
|
||||
const map: { [key: number]: any } = {
|
||||
1: this.name,
|
||||
2: this.created,
|
||||
3: this.publicKey,
|
||||
4: this.encryptedWriteKey,
|
||||
};
|
||||
|
||||
if (this.encryptionKey !== null) {
|
||||
map[5] = this.encryptionKey;
|
||||
}
|
||||
if (this.ext !== null) {
|
||||
map[6] = this.ext;
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
}
|
||||
export class FileReference {
|
||||
created: number;
|
||||
file: FileVersion;
|
||||
history: Map<number, FileVersion> | null;
|
||||
mimeType: string | null;
|
||||
name: string;
|
||||
version: number;
|
||||
ext: { [key: string]: any } | null;
|
||||
|
||||
uri: string | null; // For internal operations
|
||||
key: string | null; // For internal operations
|
||||
|
||||
constructor(
|
||||
name: string,
|
||||
created: number,
|
||||
version: number,
|
||||
file: FileVersion,
|
||||
ext: { [key: string]: any } | null = null,
|
||||
history: Map<number, FileVersion> | null = null,
|
||||
mimeType: string | null = null,
|
||||
) {
|
||||
this.name = name;
|
||||
this.created = created;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.ext = ext;
|
||||
this.history = history;
|
||||
this.mimeType = mimeType;
|
||||
this.uri = null;
|
||||
this.key = null;
|
||||
}
|
||||
|
||||
get modified(): number {
|
||||
return this.file.ts;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
return {
|
||||
name: this.name,
|
||||
created: this.created,
|
||||
modified: this.modified,
|
||||
version: this.version,
|
||||
mimeType: this.mimeType,
|
||||
file: this.file.toJson(),
|
||||
ext: this.ext,
|
||||
history: this.history
|
||||
? Array.from(this.history.values()).map((fv) => fv.toJson())
|
||||
: null,
|
||||
};
|
||||
}
|
||||
|
||||
static decode(data: { [key: number]: any }): FileReference {
|
||||
const historyData = data[8] as { [key: number]: any } | undefined;
|
||||
const history = historyData
|
||||
? new Map(
|
||||
Object.entries(historyData).map(([k, v]) => [
|
||||
Number(k),
|
||||
FileVersion.decode(v),
|
||||
]),
|
||||
)
|
||||
: null;
|
||||
|
||||
return new FileReference(
|
||||
data[1],
|
||||
data[2],
|
||||
data[5],
|
||||
FileVersion.decode(data[4]),
|
||||
data[7] ? (data[7] as { [key: string]: any }) : null,
|
||||
history,
|
||||
data[6],
|
||||
);
|
||||
}
|
||||
|
||||
encode(): { [key: number]: any } {
|
||||
const data: { [key: number]: any } = {
|
||||
1: this.name,
|
||||
2: this.created,
|
||||
4: this.file.encode(),
|
||||
5: this.version,
|
||||
};
|
||||
|
||||
if (this.mimeType !== null) {
|
||||
data[6] = this.mimeType;
|
||||
}
|
||||
if (this.ext !== null) {
|
||||
data[7] = this.ext;
|
||||
}
|
||||
if (this.history !== null) {
|
||||
data[8] = Array.from(this.history.entries()).reduce(
|
||||
(obj, [key, value]) => {
|
||||
obj[key] = value.encode();
|
||||
return obj;
|
||||
},
|
||||
{} as { [key: number]: any },
|
||||
);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
export class FileVersion {
|
||||
ts: number;
|
||||
encryptedCID?: EncryptedCID;
|
||||
plaintextCID?: CID;
|
||||
thumbnail?: FileVersionThumbnail;
|
||||
hashes?: Multihash[];
|
||||
ext?: { [key: string]: any };
|
||||
|
||||
constructor(
|
||||
ts: number,
|
||||
encryptedCID?: EncryptedCID,
|
||||
plaintextCID?: CID,
|
||||
thumbnail?: FileVersionThumbnail,
|
||||
hashes?: Multihash[],
|
||||
ext?: { [key: string]: any },
|
||||
) {
|
||||
this.ts = ts;
|
||||
this.encryptedCID = encryptedCID;
|
||||
this.plaintextCID = plaintextCID;
|
||||
this.thumbnail = thumbnail;
|
||||
this.hashes = hashes;
|
||||
this.ext = ext;
|
||||
}
|
||||
|
||||
get cid(): CID {
|
||||
return this.plaintextCID ?? this.encryptedCID!.originalCID;
|
||||
}
|
||||
|
||||
static decode(data: { [key: number]: any }): FileVersion {
|
||||
return new FileVersion(
|
||||
data[8],
|
||||
data[1] == null ? undefined : EncryptedCID.fromBytes(data[1]),
|
||||
data[2] == null ? undefined : CID.fromBytes(data[2]),
|
||||
data[10] == null ? undefined : FileVersionThumbnail.decode(data[10]),
|
||||
data[9] ? data[9].map((e: any) => new Multihash(e)) : null,
|
||||
);
|
||||
}
|
||||
|
||||
encode(): { [key: number]: any } {
|
||||
const data: { [key: number]: any } = { 8: this.ts };
|
||||
|
||||
if (!!this.encryptedCID) {
|
||||
data[1] = this.encryptedCID.toBytes();
|
||||
}
|
||||
if (!!this.plaintextCID) {
|
||||
data[2] = this.plaintextCID.toBytes();
|
||||
}
|
||||
if (!!this.hashes) {
|
||||
data[9] = this.hashes.map((e) => e.fullBytes);
|
||||
}
|
||||
if (!!this.thumbnail) {
|
||||
data[10] = this.thumbnail.encode();
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
return {
|
||||
ts: this.ts,
|
||||
encryptedCID: this.encryptedCID?.toBase58(),
|
||||
cid: this.cid.toBase58(),
|
||||
hashes: this.hashes?.map((e) => e.toBase64Url()),
|
||||
thumbnail: this.thumbnail?.toJson(),
|
||||
};
|
||||
}
|
||||
}
|
||||
export class FileVersionThumbnail {
|
||||
imageType: string | null;
|
||||
aspectRatio: number;
|
||||
cid: EncryptedCID;
|
||||
thumbhash: Uint8Array | null;
|
||||
|
||||
constructor(
|
||||
imageType: string | null,
|
||||
aspectRatio: number,
|
||||
cid: EncryptedCID,
|
||||
thumbhash: Uint8Array | null,
|
||||
) {
|
||||
this.imageType = imageType || "webp"; // Default to 'webp' if not provided
|
||||
this.aspectRatio = aspectRatio;
|
||||
this.cid = cid;
|
||||
this.thumbhash = thumbhash;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
return {
|
||||
imageType: this.imageType,
|
||||
aspectRatio: this.aspectRatio,
|
||||
cid: this.cid.toBase58(),
|
||||
thumbhash: this.thumbhash ? base64url.encode(this.thumbhash) : null,
|
||||
};
|
||||
}
|
||||
|
||||
static decode(data: { [key: number]: any }): FileVersionThumbnail {
|
||||
return new FileVersionThumbnail(
|
||||
data[1],
|
||||
data[2],
|
||||
EncryptedCID.fromBytes(data[3]),
|
||||
data[4],
|
||||
);
|
||||
}
|
||||
|
||||
encode(): { [key: number]: any } {
|
||||
const data: { [key: number]: any } = {
|
||||
2: this.aspectRatio,
|
||||
3: this.cid.toBytes(),
|
||||
};
|
||||
|
||||
if (this.imageType !== null) {
|
||||
data[1] = this.imageType;
|
||||
}
|
||||
if (this.thumbhash !== null) {
|
||||
data[4] = this.thumbhash;
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
import {
|
||||
metadataExtensionBasicMediaMetadata,
|
||||
metadataExtensionBridge,
|
||||
metadataExtensionCategories,
|
||||
metadataExtensionDonationKeys,
|
||||
metadataExtensionLanguages,
|
||||
metadataExtensionLicenses,
|
||||
metadataExtensionOriginalTimestamp,
|
||||
metadataExtensionPreviousVersions,
|
||||
metadataExtensionRoutingHints,
|
||||
metadataExtensionSourceUris,
|
||||
metadataExtensionTags,
|
||||
metadataExtensionTimestamp,
|
||||
metadataExtensionUpdateCID,
|
||||
metadataExtensionViewTypes,
|
||||
metadataExtensionWikidataClaims,
|
||||
} from "#constants.js";
|
||||
import CID from "#cid.js";
|
||||
|
||||
export default class ExtraMetadata {
|
||||
data: Map<number, any>;
|
||||
|
||||
constructor(data: Map<number, any> | object) {
|
||||
if (data instanceof Map && typeof data == "object") {
|
||||
data = Object.entries(data).map(([key, value]) => [Number(key), value]);
|
||||
}
|
||||
this.data = data as Map<number, any>;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
const map: { [key: string]: any } = {};
|
||||
const names: { [key: number]: string } = {
|
||||
[metadataExtensionLicenses]: "licenses",
|
||||
[metadataExtensionDonationKeys]: "donationKeys",
|
||||
[metadataExtensionWikidataClaims]: "wikidataClaims",
|
||||
[metadataExtensionLanguages]: "languages",
|
||||
[metadataExtensionSourceUris]: "sourceUris",
|
||||
// metadataExtensionUpdateCID: 'updateCID',
|
||||
[metadataExtensionPreviousVersions]: "previousVersions",
|
||||
[metadataExtensionTimestamp]: "timestamp",
|
||||
[metadataExtensionOriginalTimestamp]: "originalTimestamp",
|
||||
[metadataExtensionTags]: "tags",
|
||||
[metadataExtensionCategories]: "categories",
|
||||
[metadataExtensionBasicMediaMetadata]: "basicMediaMetadata",
|
||||
[metadataExtensionViewTypes]: "viewTypes",
|
||||
[metadataExtensionBridge]: "bridge",
|
||||
[metadataExtensionRoutingHints]: "routingHints",
|
||||
};
|
||||
|
||||
this.data.forEach((value, key) => {
|
||||
if (key === metadataExtensionUpdateCID) {
|
||||
map["updateCID"] = CID.fromBytes(value).toString();
|
||||
} else {
|
||||
map[names[key]] = value;
|
||||
}
|
||||
});
|
||||
|
||||
return map;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,525 @@
|
|||
import Metadata from "#serialization/metadata/base.js";
|
||||
import CID from "#cid.js";
|
||||
import {
|
||||
CID_HASH_TYPES,
|
||||
METADATA_TYPES,
|
||||
metadataMagicByte,
|
||||
metadataMediaDetailsDuration,
|
||||
metadataMediaDetailsIsLive,
|
||||
metadataProofTypeSignature,
|
||||
parentLinkTypeUserIdentity,
|
||||
} from "#constants.js";
|
||||
import ExtraMetadata from "#serialization/metadata/extra.js";
|
||||
import { MetadataParentLink } from "#serialization/metadata/parent.js";
|
||||
import { Multihash } from "#multihash.js";
|
||||
import { decodeEndian, encodeEndian } from "#util.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
import { Buffer } from "buffer";
|
||||
import { blake3 } from "@noble/hashes/blake3";
|
||||
import { ed25519 } from "@noble/curves/ed25519";
|
||||
import KeyPairEd25519 from "#ed25519.js";
|
||||
import Packer from "#serialization/pack.js";
|
||||
|
||||
export default class MediaMetadata extends Metadata {
|
||||
name: string;
|
||||
mediaTypes: { [key: string]: MediaFormat[] };
|
||||
parents: MetadataParentLink[];
|
||||
details: MediaMetadataDetails;
|
||||
links: MediaMetadataLinks | null;
|
||||
extraMetadata: ExtraMetadata;
|
||||
|
||||
constructor(
|
||||
name: string,
|
||||
details: MediaMetadataDetails,
|
||||
parents: MetadataParentLink[],
|
||||
mediaTypes: { [key: string]: MediaFormat[] },
|
||||
links: MediaMetadataLinks | null,
|
||||
extraMetadata: ExtraMetadata,
|
||||
) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.details = details;
|
||||
this.parents = parents;
|
||||
this.mediaTypes = mediaTypes;
|
||||
this.links = links;
|
||||
this.extraMetadata = extraMetadata;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
return {
|
||||
type: "media",
|
||||
name: this.name,
|
||||
details: this.details,
|
||||
parents: this.parents,
|
||||
mediaTypes: this.mediaTypes,
|
||||
links: this.links,
|
||||
extraMetadata: this.extraMetadata,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class MediaMetadataLinks {
|
||||
count: number;
|
||||
head: CID[];
|
||||
collapsed: CID[] | null;
|
||||
tail: CID[] | null;
|
||||
|
||||
constructor(head: CID[]) {
|
||||
this.head = head;
|
||||
this.count = head.length;
|
||||
this.collapsed = null;
|
||||
this.tail = null;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
const map: { [key: string]: any } = {
|
||||
count: this.count,
|
||||
head: this.head.map((e) => e.toString()),
|
||||
};
|
||||
if (this.collapsed != null) {
|
||||
map["collapsed"] = this.collapsed.map((e) => e.toString());
|
||||
}
|
||||
if (this.tail != null) {
|
||||
map["tail"] = this.tail.map((e) => e.toString());
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
static decode(links: { [key: number]: any }): MediaMetadataLinks {
|
||||
const count = links[1] as number;
|
||||
const head = (links[2] as Uint8Array[]).map((bytes) =>
|
||||
CID.fromBytes(bytes),
|
||||
);
|
||||
const collapsed = links[3]
|
||||
? (links[3] as Uint8Array[]).map((bytes) => CID.fromBytes(bytes))
|
||||
: null;
|
||||
const tail = links[4]
|
||||
? (links[4] as Uint8Array[]).map((bytes) => CID.fromBytes(bytes))
|
||||
: null;
|
||||
|
||||
const instance = new MediaMetadataLinks(head);
|
||||
instance.count = count;
|
||||
instance.collapsed = collapsed;
|
||||
instance.tail = tail;
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
encode(): { [key: number]: any } {
|
||||
const data: { [key: number]: any } = {
|
||||
1: this.count,
|
||||
2: this.head,
|
||||
};
|
||||
|
||||
const addNotNull = (key: number, value: any) => {
|
||||
if (value !== null && value !== undefined) {
|
||||
data[key] = value;
|
||||
}
|
||||
};
|
||||
|
||||
addNotNull(3, this.collapsed);
|
||||
addNotNull(4, this.tail);
|
||||
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
class MediaMetadataDetails {
|
||||
data: { [key: number]: any };
|
||||
|
||||
constructor(data: { [key: number]: any }) {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
const map: { [key: string]: any } = {};
|
||||
const names: { [key: number]: string } = {
|
||||
[metadataMediaDetailsDuration]: "duration",
|
||||
[metadataMediaDetailsIsLive]: "live",
|
||||
};
|
||||
Object.entries(this.data).forEach(([key, value]) => {
|
||||
map[names[+key]] = value;
|
||||
});
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
get duration(): number | null {
|
||||
return this.data[metadataMediaDetailsDuration];
|
||||
}
|
||||
|
||||
get isLive(): boolean {
|
||||
return !!this.data[metadataMediaDetailsIsLive];
|
||||
}
|
||||
}
|
||||
|
||||
export class MediaFormat {
|
||||
subtype: string;
|
||||
role: string | null;
|
||||
ext: string | null;
|
||||
cid: CID | null;
|
||||
height: number | null;
|
||||
width: number | null;
|
||||
languages: string[] | null;
|
||||
asr: number | null;
|
||||
fps: number | null;
|
||||
bitrate: number | null;
|
||||
audioChannels: number | null;
|
||||
vcodec: string | null;
|
||||
acodec: string | null;
|
||||
container: string | null;
|
||||
dynamicRange: string | null;
|
||||
charset: string | null;
|
||||
value: Uint8Array | null;
|
||||
duration: number | null;
|
||||
rows: number | null;
|
||||
columns: number | null;
|
||||
index: number | null;
|
||||
initRange: string | null;
|
||||
indexRange: string | null;
|
||||
caption: string | null;
|
||||
|
||||
constructor(
|
||||
subtype: string,
|
||||
role: string | null,
|
||||
ext: string | null,
|
||||
cid: CID | null,
|
||||
height: number | null,
|
||||
width: number | null,
|
||||
languages: string[] | null,
|
||||
asr: number | null,
|
||||
fps: number | null,
|
||||
bitrate: number | null,
|
||||
audioChannels: number | null,
|
||||
vcodec: string | null,
|
||||
acodec: string | null,
|
||||
container: string | null,
|
||||
dynamicRange: string | null,
|
||||
charset: string | null,
|
||||
value: Uint8Array | null,
|
||||
duration: number | null,
|
||||
rows: number | null,
|
||||
columns: number | null,
|
||||
index: number | null,
|
||||
initRange: string | null,
|
||||
indexRange: string | null,
|
||||
caption: string | null,
|
||||
) {
|
||||
this.subtype = subtype;
|
||||
this.role = role;
|
||||
this.ext = ext;
|
||||
this.cid = cid;
|
||||
this.height = height;
|
||||
this.width = width;
|
||||
this.languages = languages;
|
||||
this.asr = asr;
|
||||
this.fps = fps;
|
||||
this.bitrate = bitrate;
|
||||
this.audioChannels = audioChannels;
|
||||
this.vcodec = vcodec;
|
||||
this.acodec = acodec;
|
||||
this.container = container;
|
||||
this.dynamicRange = dynamicRange;
|
||||
this.charset = charset;
|
||||
this.value = value;
|
||||
this.duration = duration;
|
||||
this.rows = rows;
|
||||
this.columns = columns;
|
||||
this.index = index;
|
||||
this.initRange = initRange;
|
||||
this.indexRange = indexRange;
|
||||
this.caption = caption;
|
||||
}
|
||||
|
||||
get valueAsString(): string | null {
|
||||
if (this.value === null) {
|
||||
return null;
|
||||
}
|
||||
return new TextDecoder().decode(this.value);
|
||||
}
|
||||
|
||||
static decode(data: { [key: number]: any }): MediaFormat {
|
||||
return new MediaFormat(
|
||||
data[2], // subtype
|
||||
data[3], // role
|
||||
data[4], // ext
|
||||
data[1] == null ? null : CID.fromBytes(Uint8Array.from(data[1])),
|
||||
data[10], // height
|
||||
data[11], // width
|
||||
data[12] ? (data[12] as string[]) : null, // languages
|
||||
data[13], // asr
|
||||
data[14], // fps
|
||||
data[15], // bitrate
|
||||
data[18], // audioChannels
|
||||
data[19], // vcodec
|
||||
data[20], // acodec
|
||||
data[21], // container
|
||||
data[22], // dynamicRange
|
||||
data[23], // charset
|
||||
data[24] == null ? null : Uint8Array.from(data[24]), // value
|
||||
data[25], // duration
|
||||
data[26], // rows
|
||||
data[27], // columns
|
||||
data[28], // index
|
||||
data[29], // initRange
|
||||
data[30], // indexRange
|
||||
data[31], // caption
|
||||
);
|
||||
}
|
||||
|
||||
encode(): { [key: number]: any } {
|
||||
const data: { [key: number]: any } = {};
|
||||
|
||||
const addNotNull = (key: number, value: any) => {
|
||||
if (value !== null && value !== undefined) {
|
||||
data[key] = value;
|
||||
}
|
||||
};
|
||||
|
||||
addNotNull(1, this.cid?.toBytes());
|
||||
addNotNull(2, this.subtype);
|
||||
addNotNull(3, this.role);
|
||||
addNotNull(4, this.ext);
|
||||
addNotNull(10, this.height);
|
||||
addNotNull(11, this.width);
|
||||
addNotNull(12, this.languages);
|
||||
addNotNull(13, this.asr);
|
||||
addNotNull(14, this.fps);
|
||||
addNotNull(15, this.bitrate);
|
||||
// addNotNull(16, this.abr);
|
||||
// addNotNull(17, this.vbr);
|
||||
addNotNull(18, this.audioChannels);
|
||||
addNotNull(19, this.vcodec);
|
||||
addNotNull(20, this.acodec);
|
||||
addNotNull(21, this.container);
|
||||
addNotNull(22, this.dynamicRange);
|
||||
addNotNull(23, this.charset);
|
||||
addNotNull(24, this.value);
|
||||
addNotNull(25, this.duration);
|
||||
addNotNull(26, this.rows);
|
||||
addNotNull(27, this.columns);
|
||||
addNotNull(28, this.index);
|
||||
addNotNull(29, this.initRange);
|
||||
addNotNull(30, this.indexRange);
|
||||
addNotNull(31, this.caption);
|
||||
|
||||
return data;
|
||||
}
|
||||
toJson(): { [key: string]: any } {
|
||||
const data: { [key: string]: any } = {};
|
||||
|
||||
const addNotNull = (key: string, value: any) => {
|
||||
if (value !== null && value !== undefined) {
|
||||
data[key] = value;
|
||||
}
|
||||
};
|
||||
|
||||
addNotNull("cid", this.cid?.toBase64Url());
|
||||
addNotNull("subtype", this.subtype);
|
||||
addNotNull("role", this.role);
|
||||
addNotNull("ext", this.ext);
|
||||
addNotNull("height", this.height);
|
||||
addNotNull("width", this.width);
|
||||
addNotNull("languages", this.languages);
|
||||
addNotNull("asr", this.asr);
|
||||
addNotNull("fps", this.fps);
|
||||
addNotNull("bitrate", this.bitrate);
|
||||
// addNotNull('abr', this.abr);
|
||||
// addNotNull('vbr', this.vbr);
|
||||
addNotNull("audioChannels", this.audioChannels);
|
||||
addNotNull("vcodec", this.vcodec);
|
||||
addNotNull("acodec", this.acodec);
|
||||
addNotNull("container", this.container);
|
||||
addNotNull("dynamicRange", this.dynamicRange);
|
||||
addNotNull("charset", this.charset);
|
||||
addNotNull("value", this.value ? this.valueAsString : null); // Assuming valueAsString() is a method to convert value to string
|
||||
addNotNull("duration", this.duration);
|
||||
addNotNull("rows", this.rows);
|
||||
addNotNull("columns", this.columns);
|
||||
addNotNull("index", this.index);
|
||||
addNotNull("initRange", this.initRange);
|
||||
addNotNull("indexRange", this.indexRange);
|
||||
addNotNull("caption", this.caption);
|
||||
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
export async function deserialize(bytes: Uint8Array): Promise<MediaMetadata> {
|
||||
const magicByte = bytes[0];
|
||||
if (magicByte !== metadataMagicByte) {
|
||||
throw new Error("Invalid metadata: Unsupported magic byte");
|
||||
}
|
||||
const typeAndVersion = bytes[1];
|
||||
let bodyBytes: Uint8Array;
|
||||
|
||||
const provenPubKeys: Multihash[] = [];
|
||||
|
||||
if (typeAndVersion === METADATA_TYPES.PROOF) {
|
||||
const proofSectionLength = decodeEndian(bytes.subarray(2, 4));
|
||||
|
||||
bodyBytes = bytes.subarray(4 + proofSectionLength);
|
||||
|
||||
if (proofSectionLength > 0) {
|
||||
const proofUnpacker = new Unpacker(
|
||||
Buffer.from(bytes.subarray(4, 4 + proofSectionLength)),
|
||||
);
|
||||
|
||||
const b3hash = Uint8Array.from([
|
||||
CID_HASH_TYPES.BLAKE3,
|
||||
...blake3(bodyBytes),
|
||||
]);
|
||||
|
||||
const proofCount = proofUnpacker.unpackListLength();
|
||||
|
||||
for (let i = 0; i < proofCount; i++) {
|
||||
const parts = proofUnpacker.unpackList();
|
||||
const proofType = parts[0] as number;
|
||||
|
||||
if (proofType === metadataProofTypeSignature) {
|
||||
const mhashType = parts[1] as number;
|
||||
const pubkey = parts[2] as Uint8Array;
|
||||
const signature = parts[3] as Uint8Array;
|
||||
|
||||
if (mhashType !== CID_HASH_TYPES.BLAKE3) {
|
||||
throw new Error(`Hash type ${mhashType} not supported`);
|
||||
}
|
||||
|
||||
if (pubkey[0] !== CID_HASH_TYPES.ED25519) {
|
||||
throw new Error("Only ed25519 keys are supported");
|
||||
}
|
||||
if (pubkey.length !== 33) {
|
||||
throw new Error("Invalid userId");
|
||||
}
|
||||
|
||||
const isValid = await ed25519.verify(
|
||||
signature,
|
||||
b3hash,
|
||||
pubkey.subarray(1),
|
||||
);
|
||||
|
||||
if (!isValid) {
|
||||
throw new Error("Invalid signature found");
|
||||
}
|
||||
provenPubKeys.push(new Multihash(pubkey));
|
||||
} else {
|
||||
// Unsupported proof type
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (typeAndVersion === METADATA_TYPES.MEDIA) {
|
||||
bodyBytes = bytes.subarray(1);
|
||||
} else {
|
||||
throw new Error(`Invalid metadata: Unsupported type ${typeAndVersion}`);
|
||||
}
|
||||
|
||||
// Start of body section
|
||||
const u = new Unpacker(Buffer.from(bodyBytes));
|
||||
const type = u.unpackInt();
|
||||
|
||||
if (type !== METADATA_TYPES.MEDIA) {
|
||||
throw new Error(`Invalid metadata: Unsupported type ${type}`);
|
||||
}
|
||||
|
||||
u.unpackListLength();
|
||||
const name = u.unpackString();
|
||||
const details = new MediaMetadataDetails(u.unpackMap());
|
||||
|
||||
const parents: MetadataParentLink[] = [];
|
||||
const userCount = u.unpackListLength();
|
||||
for (let i = 0; i < userCount; i++) {
|
||||
const m = u.unpackMap();
|
||||
const cid = CID.fromBytes(m[1] as Uint8Array);
|
||||
|
||||
parents.push(
|
||||
new MetadataParentLink(
|
||||
cid,
|
||||
(m[0] ?? parentLinkTypeUserIdentity) as number,
|
||||
m[2],
|
||||
provenPubKeys.some((pk) => pk.equals(cid.hash)), // Assuming Multihash class has an equals method
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
const mediaTypesMap = u.unpackMap() as Record<string, any>;
|
||||
const mediaTypes: Record<string, MediaFormat[]> = {};
|
||||
|
||||
Object.entries(mediaTypesMap).forEach(([type, formats]) => {
|
||||
mediaTypes[type] = formats.map((e: any) =>
|
||||
MediaFormat.decode(e as Record<number, any>),
|
||||
);
|
||||
});
|
||||
|
||||
const links = u.unpackMap();
|
||||
const extraMetadata = u.unpackMap();
|
||||
|
||||
return new MediaMetadata(
|
||||
name || "",
|
||||
details,
|
||||
parents,
|
||||
mediaTypes,
|
||||
links.size > 0 ? MediaMetadataLinks.decode(links) : null,
|
||||
new ExtraMetadata(extraMetadata),
|
||||
);
|
||||
}
|
||||
|
||||
export async function serialize(
|
||||
m: MediaMetadata,
|
||||
keyPairs: KeyPairEd25519[] = [],
|
||||
): Promise<Uint8Array> {
|
||||
const c = new Packer();
|
||||
c.packInt(METADATA_TYPES.MEDIA);
|
||||
c.packListLength(6);
|
||||
|
||||
c.packString(m.name);
|
||||
c.pack(m.details.data);
|
||||
|
||||
c.packListLength(m.parents.length);
|
||||
for (const parent of m.parents) {
|
||||
c.pack({ 0: parent.type, 1: parent.cid.toBytes() });
|
||||
}
|
||||
|
||||
c.packMapLength(Object.keys(m.mediaTypes).length);
|
||||
for (const [key, value] of Object.entries(m.mediaTypes)) {
|
||||
c.packString(key);
|
||||
c.pack(value);
|
||||
}
|
||||
|
||||
if (m.links === null) {
|
||||
c.packMapLength(0);
|
||||
} else {
|
||||
c.pack(m.links.encode());
|
||||
}
|
||||
|
||||
c.pack(m.extraMetadata.data);
|
||||
|
||||
const bodyBytes = c.takeBytes();
|
||||
|
||||
if (keyPairs.length === 0) {
|
||||
return Uint8Array.from([metadataMagicByte, ...bodyBytes]);
|
||||
}
|
||||
|
||||
const b3hash = Uint8Array.from([CID_HASH_TYPES.BLAKE3, ...blake3(bodyBytes)]);
|
||||
|
||||
const proofPacker = new Packer();
|
||||
proofPacker.packListLength(keyPairs.length);
|
||||
|
||||
for (const kp of keyPairs) {
|
||||
const signature = await ed25519.sign(b3hash, kp.extractBytes());
|
||||
proofPacker.pack([
|
||||
metadataProofTypeSignature,
|
||||
CID_HASH_TYPES.BLAKE3,
|
||||
kp.publicKey,
|
||||
signature,
|
||||
]);
|
||||
}
|
||||
const proofBytes = proofPacker.takeBytes();
|
||||
|
||||
const header = [
|
||||
metadataMagicByte,
|
||||
METADATA_TYPES.PROOF,
|
||||
...encodeEndian(proofBytes.length, 2),
|
||||
];
|
||||
|
||||
return Uint8Array.from([...header, ...proofBytes, ...bodyBytes]);
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
import CID from "#cid.js";
|
||||
import { parentLinkTypeUserIdentity } from "#constants.js";
|
||||
|
||||
export class MetadataParentLink {
|
||||
cid: CID;
|
||||
type: number;
|
||||
role: string | null;
|
||||
signed: boolean;
|
||||
|
||||
constructor(
|
||||
cid: CID,
|
||||
type: number = parentLinkTypeUserIdentity,
|
||||
role: string | null = null,
|
||||
signed: boolean = false,
|
||||
) {
|
||||
this.cid = cid;
|
||||
this.type = type;
|
||||
this.role = role;
|
||||
this.signed = signed;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
const map: { [key: string]: any } = {};
|
||||
|
||||
map["cid"] = this.cid.toString();
|
||||
map["type"] = this.type;
|
||||
if (this.role !== null) {
|
||||
map["role"] = this.role;
|
||||
}
|
||||
map["signed"] = this.signed;
|
||||
|
||||
return map;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
import CID from "#cid.js";
|
||||
|
||||
export default class UserIdentityMetadata {
|
||||
userID?: CID;
|
||||
details: UserIdentityMetadataDetails;
|
||||
signingKeys: UserIdentityPublicKey[];
|
||||
encryptionKeys: UserIdentityPublicKey[];
|
||||
links: Map<number, CID>;
|
||||
|
||||
constructor(
|
||||
details: UserIdentityMetadataDetails,
|
||||
signingKeys: UserIdentityPublicKey[],
|
||||
encryptionKeys: UserIdentityPublicKey[],
|
||||
links: Map<number, CID>,
|
||||
) {
|
||||
this.details = details;
|
||||
this.signingKeys = signingKeys;
|
||||
this.encryptionKeys = encryptionKeys;
|
||||
this.links = links;
|
||||
}
|
||||
}
|
||||
|
||||
class UserIdentityMetadataDetails {
|
||||
created: number;
|
||||
createdBy: string;
|
||||
|
||||
constructor(created: number, createdBy: string) {
|
||||
this.created = created;
|
||||
this.createdBy = createdBy;
|
||||
}
|
||||
}
|
||||
|
||||
class UserIdentityPublicKey {
|
||||
key: Uint8Array;
|
||||
|
||||
constructor(key: Uint8Array) {
|
||||
this.key = key;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,116 @@
|
|||
import Metadata from "#serialization/metadata/base.js";
|
||||
import ExtraMetadata from "#serialization/metadata/extra.js";
|
||||
import CID from "#cid.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
import { METADATA_TYPES, metadataMagicByte } from "#constants.js";
|
||||
import { Buffer } from "buffer";
|
||||
|
||||
export default class WebAppMetadata extends Metadata {
|
||||
name: string | null;
|
||||
tryFiles: string[];
|
||||
errorPages: Map<number, string>;
|
||||
extraMetadata: ExtraMetadata;
|
||||
paths: { [key: string]: WebAppMetadataFileReference };
|
||||
|
||||
constructor(
|
||||
name: string | null,
|
||||
tryFiles: string[],
|
||||
errorPages: Map<number, string>,
|
||||
paths: { [key: string]: WebAppMetadataFileReference },
|
||||
extraMetadata: ExtraMetadata,
|
||||
) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.tryFiles = tryFiles;
|
||||
this.errorPages = errorPages;
|
||||
this.paths = paths;
|
||||
this.extraMetadata = extraMetadata;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
return {
|
||||
type: "web_app",
|
||||
name: this.name,
|
||||
tryFiles: this.tryFiles,
|
||||
errorPages: Array.from(this.errorPages.entries()).reduce(
|
||||
(obj, [key, value]) => {
|
||||
obj[key.toString()] = value;
|
||||
return obj;
|
||||
},
|
||||
{} as { [key: string]: string },
|
||||
),
|
||||
paths: Object.fromEntries(
|
||||
Object.entries(this.paths).map(([key, ref]) => [key, ref.toJson()]),
|
||||
),
|
||||
extraMetadata: this.extraMetadata,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class WebAppMetadataFileReference {
|
||||
contentType: string | null;
|
||||
cid: CID;
|
||||
|
||||
constructor(cid: CID, contentType: string | null) {
|
||||
this.cid = cid;
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this.cid.size ?? 0;
|
||||
}
|
||||
|
||||
toJson(): { [key: string]: any } {
|
||||
return {
|
||||
cid: this.cid.toBase64Url(),
|
||||
contentType: this.contentType,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function deserialize(bytes: Uint8Array): Promise<WebAppMetadata> {
|
||||
const u = new Unpacker(Buffer.from(bytes));
|
||||
|
||||
const magicByte = u.unpackInt();
|
||||
if (magicByte !== metadataMagicByte) {
|
||||
throw new Error("Invalid metadata: Unsupported magic byte");
|
||||
}
|
||||
const typeAndVersion = u.unpackInt();
|
||||
if (typeAndVersion !== METADATA_TYPES.WEBAPP) {
|
||||
throw new Error("Invalid metadata: Wrong metadata type");
|
||||
}
|
||||
|
||||
u.unpackListLength();
|
||||
|
||||
const name = u.unpackString();
|
||||
|
||||
const tryFiles = u.unpackList() as string[];
|
||||
|
||||
const errorPages = u.unpackMap() as Record<number, string>;
|
||||
|
||||
const length = u.unpackListLength();
|
||||
|
||||
const paths: Record<string, WebAppMetadataFileReference> = {};
|
||||
|
||||
for (let i = 0; i < length; i++) {
|
||||
u.unpackListLength();
|
||||
const path = u.unpackString();
|
||||
const cid = CID.fromBytes(u.unpackBinary());
|
||||
paths[path as string] = new WebAppMetadataFileReference(
|
||||
cid,
|
||||
u.unpackString(),
|
||||
);
|
||||
}
|
||||
|
||||
const extraMetadata = u.unpackMap() as Record<number, any>;
|
||||
|
||||
return new WebAppMetadata(
|
||||
name,
|
||||
tryFiles,
|
||||
new Map<number, string>(
|
||||
Object.entries(errorPages).map(([key, value]) => [Number(key), value]),
|
||||
),
|
||||
paths,
|
||||
new ExtraMetadata(extraMetadata),
|
||||
);
|
||||
}
|
|
@ -0,0 +1,292 @@
|
|||
import NodeId from "../nodeId.js";
|
||||
import CID from "../cid.js";
|
||||
import { MediaFormat } from "./metadata/media.js";
|
||||
import {
|
||||
DirectoryReference,
|
||||
FileReference,
|
||||
FileVersion,
|
||||
FileVersionThumbnail,
|
||||
} from "./metadata/directory.js";
|
||||
import { Buffer } from "buffer";
|
||||
|
||||
export default class Packer {
|
||||
private _bufSize: number;
|
||||
// @ts-ignore
|
||||
private _buf: Buffer;
|
||||
// @ts-ignore
|
||||
private _d: DataView;
|
||||
private _offset: number = 0;
|
||||
private _builder: Buffer[] = [];
|
||||
private _strCodec = new TextEncoder(); // UTF-8 TextEncoder
|
||||
|
||||
constructor(bufSize: number = 64) {
|
||||
this._bufSize = bufSize;
|
||||
this._newBuf(this._bufSize);
|
||||
}
|
||||
|
||||
private _newBuf(size: number) {
|
||||
this._buf = Buffer.alloc(size);
|
||||
this._d = new DataView(this._buf.buffer, this._buf.byteOffset);
|
||||
this._offset = 0;
|
||||
}
|
||||
|
||||
private _nextBuf() {
|
||||
this._flushBuf();
|
||||
this._bufSize *= 2;
|
||||
this._newBuf(this._bufSize);
|
||||
}
|
||||
|
||||
private _flushBuf() {
|
||||
this._builder.push(this._buf.slice(0, this._offset));
|
||||
}
|
||||
|
||||
private _putBytes(bytes: Buffer | Uint8Array) {
|
||||
const length = bytes.length;
|
||||
if (this._buf.length - this._offset < length) {
|
||||
this._nextBuf();
|
||||
}
|
||||
if (this._offset === 0) {
|
||||
this._builder.push(Buffer.from(bytes));
|
||||
} else {
|
||||
this._buf.set(bytes, this._offset);
|
||||
this._offset += length;
|
||||
}
|
||||
}
|
||||
|
||||
public packNull() {
|
||||
if (this._buf.length - this._offset < 1) {
|
||||
this._nextBuf();
|
||||
}
|
||||
this._d.setUint8(this._offset++, 0xc0);
|
||||
}
|
||||
|
||||
public packBool(v: boolean | null) {
|
||||
if (this._buf.length - this._offset < 1) {
|
||||
this._nextBuf();
|
||||
}
|
||||
if (v === null) {
|
||||
this._d.setUint8(this._offset++, 0xc0);
|
||||
} else {
|
||||
this._d.setUint8(this._offset++, v ? 0xc3 : 0xc2);
|
||||
}
|
||||
}
|
||||
|
||||
public packInt(v: number | null) {
|
||||
if (this._buf.length - this._offset < 9) {
|
||||
this._nextBuf();
|
||||
}
|
||||
if (v === null) {
|
||||
this._d.setUint8(this._offset++, 0xc0);
|
||||
} else if (v >= 0) {
|
||||
if (v <= 127) {
|
||||
this._d.setUint8(this._offset++, v);
|
||||
} else if (v <= 0xff) {
|
||||
this._d.setUint8(this._offset++, 0xcc);
|
||||
this._d.setUint8(this._offset++, v);
|
||||
} else if (v <= 0xffff) {
|
||||
this._d.setUint8(this._offset++, 0xcd);
|
||||
this._d.setUint16(this._offset, v);
|
||||
this._offset += 2;
|
||||
} else if (v <= 0xffffffff) {
|
||||
this._d.setUint8(this._offset++, 0xce);
|
||||
this._d.setUint32(this._offset, v);
|
||||
this._offset += 4;
|
||||
} else {
|
||||
this._d.setUint8(this._offset++, 0xcf);
|
||||
this._d.setBigUint64(this._offset, BigInt(v));
|
||||
this._offset += 8;
|
||||
}
|
||||
} else {
|
||||
if (v >= -32) {
|
||||
this._d.setInt8(this._offset++, v);
|
||||
} else if (v >= -128) {
|
||||
this._d.setUint8(this._offset++, 0xd0);
|
||||
this._d.setInt8(this._offset++, v);
|
||||
} else if (v >= -32768) {
|
||||
this._d.setUint8(this._offset++, 0xd1);
|
||||
this._d.setInt16(this._offset, v);
|
||||
this._offset += 2;
|
||||
} else if (v >= -2147483648) {
|
||||
this._d.setUint8(this._offset++, 0xd2);
|
||||
this._d.setInt32(this._offset, v);
|
||||
this._offset += 4;
|
||||
} else {
|
||||
this._d.setUint8(this._offset++, 0xd3);
|
||||
this._d.setBigInt64(this._offset, BigInt(v));
|
||||
this._offset += 8;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public packDouble(v: number | null) {
|
||||
if (this._buf.length - this._offset < 9) {
|
||||
this._nextBuf();
|
||||
}
|
||||
if (v === null) {
|
||||
this._d.setUint8(this._offset++, 0xc0);
|
||||
return;
|
||||
}
|
||||
this._d.setUint8(this._offset++, 0xcb);
|
||||
this._d.setFloat64(this._offset, v);
|
||||
this._offset += 8;
|
||||
}
|
||||
|
||||
public packString(v: string | null) {
|
||||
if (this._buf.length - this._offset < 5) {
|
||||
this._nextBuf();
|
||||
}
|
||||
if (v === null) {
|
||||
this._d.setUint8(this._offset++, 0xc0);
|
||||
return;
|
||||
}
|
||||
const encoded = this._strCodec.encode(v);
|
||||
const length = encoded.length;
|
||||
if (length <= 31) {
|
||||
this._d.setUint8(this._offset++, 0xa0 | length);
|
||||
} else if (length <= 0xff) {
|
||||
this._d.setUint8(this._offset++, 0xd9);
|
||||
this._d.setUint8(this._offset++, length);
|
||||
} else if (length <= 0xffff) {
|
||||
this._d.setUint8(this._offset++, 0xda);
|
||||
this._d.setUint16(this._offset, length);
|
||||
this._offset += 2;
|
||||
} else if (length <= 0xffffffff) {
|
||||
this._d.setUint8(this._offset++, 0xdb);
|
||||
this._d.setUint32(this._offset, length);
|
||||
this._offset += 4;
|
||||
} else {
|
||||
throw new Error("Max String length is 0xFFFFFFFF");
|
||||
}
|
||||
this._putBytes(Buffer.from(encoded));
|
||||
}
|
||||
|
||||
public packStringEmptyIsNull(v: string | null) {
|
||||
if (v === null || v === "") {
|
||||
this.packNull();
|
||||
} else {
|
||||
this.packString(v);
|
||||
}
|
||||
}
|
||||
|
||||
public packBinary(buffer: Buffer | null) {
|
||||
if (this._buf.length - this._offset < 5) {
|
||||
this._nextBuf();
|
||||
}
|
||||
if (buffer === null) {
|
||||
this._d.setUint8(this._offset++, 0xc0);
|
||||
return;
|
||||
}
|
||||
const length = buffer.length;
|
||||
if (length <= 0xff) {
|
||||
this._d.setUint8(this._offset++, 0xc4);
|
||||
this._d.setUint8(this._offset++, length);
|
||||
} else if (length <= 0xffff) {
|
||||
this._d.setUint8(this._offset++, 0xc5);
|
||||
this._d.setUint16(this._offset, length);
|
||||
this._offset += 2;
|
||||
} else if (length <= 0xffffffff) {
|
||||
this._d.setUint8(this._offset++, 0xc6);
|
||||
this._d.setUint32(this._offset, length);
|
||||
this._offset += 4;
|
||||
} else {
|
||||
throw new Error("Max binary length is 0xFFFFFFFF");
|
||||
}
|
||||
this._putBytes(buffer);
|
||||
}
|
||||
|
||||
public packListLength(length: number | null) {
|
||||
if (this._buf.length - this._offset < 5) {
|
||||
this._nextBuf();
|
||||
}
|
||||
if (length === null) {
|
||||
this._d.setUint8(this._offset++, 0xc0);
|
||||
} else if (length <= 0xf) {
|
||||
this._d.setUint8(this._offset++, 0x90 | length);
|
||||
} else if (length <= 0xffff) {
|
||||
this._d.setUint8(this._offset++, 0xdc);
|
||||
this._d.setUint16(this._offset, length);
|
||||
this._offset += 2;
|
||||
} else if (length <= 0xffffffff) {
|
||||
this._d.setUint8(this._offset++, 0xdd);
|
||||
this._d.setUint32(this._offset, length);
|
||||
this._offset += 4;
|
||||
} else {
|
||||
throw new Error("Max list length is 0xFFFFFFFF");
|
||||
}
|
||||
}
|
||||
|
||||
public packMapLength(length: number | null) {
|
||||
if (this._buf.length - this._offset < 5) {
|
||||
this._nextBuf();
|
||||
}
|
||||
if (length === null) {
|
||||
this._d.setUint8(this._offset++, 0xc0);
|
||||
} else if (length <= 0xf) {
|
||||
this._d.setUint8(this._offset++, 0x80 | length);
|
||||
} else if (length <= 0xffff) {
|
||||
this._d.setUint8(this._offset++, 0xde);
|
||||
this._d.setUint16(this._offset, length);
|
||||
this._offset += 2;
|
||||
} else if (length <= 0xffffffff) {
|
||||
this._d.setUint8(this._offset++, 0xdf);
|
||||
this._d.setUint32(this._offset, length);
|
||||
this._offset += 4;
|
||||
} else {
|
||||
throw new Error("Max map length is 0xFFFFFFFF");
|
||||
}
|
||||
}
|
||||
|
||||
public takeBytes(): Buffer {
|
||||
if (this._builder.length === 0) {
|
||||
return this._buf.slice(0, this._offset);
|
||||
}
|
||||
|
||||
this._flushBuf();
|
||||
return Buffer.concat(this._builder);
|
||||
}
|
||||
|
||||
public pack(v: any): this {
|
||||
if (v === null) {
|
||||
this.packNull();
|
||||
} else if (typeof v === "number") {
|
||||
if (Number.isInteger(v)) {
|
||||
this.packInt(v);
|
||||
} else {
|
||||
this.packDouble(v);
|
||||
}
|
||||
} else if (typeof v === "boolean") {
|
||||
this.packBool(v);
|
||||
} else if (typeof v === "string") {
|
||||
this.packString(v);
|
||||
} else if (v instanceof Uint8Array) {
|
||||
this.packBinary(Buffer.from(v));
|
||||
} else if (Array.isArray(v)) {
|
||||
this.packListLength(v.length);
|
||||
for (const item of v) {
|
||||
this.pack(item);
|
||||
}
|
||||
} else if (v instanceof Map) {
|
||||
this.packMapLength(v.size);
|
||||
v.forEach((value, key) => {
|
||||
this.pack(key);
|
||||
this.pack(value);
|
||||
});
|
||||
} else if (
|
||||
v instanceof MediaFormat ||
|
||||
v instanceof DirectoryReference ||
|
||||
v instanceof FileReference ||
|
||||
v instanceof FileVersion ||
|
||||
v instanceof FileVersionThumbnail
|
||||
) {
|
||||
this.pack(v.encode());
|
||||
} else if (v instanceof CID) {
|
||||
this.pack(v.toBytes());
|
||||
} else if (v instanceof NodeId) {
|
||||
this.pack(v.bytes);
|
||||
} else {
|
||||
throw new Error(`Could not pack type: ${typeof v}`);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,205 @@
|
|||
export default class Unpacker {
|
||||
private _list: Buffer;
|
||||
private _offset: number = 0;
|
||||
private _d: DataView;
|
||||
|
||||
constructor(list: Buffer) {
|
||||
this._list = list;
|
||||
this._d = new DataView(list.buffer, list.byteOffset);
|
||||
}
|
||||
|
||||
public static fromPacked(data: Uint8Array) {
|
||||
return new Unpacker(Buffer.from(data));
|
||||
}
|
||||
|
||||
public unpackBool(): boolean | null {
|
||||
const b = this._d.getUint8(this._offset++);
|
||||
if (b === 0xc2) return false;
|
||||
if (b === 0xc3) return true;
|
||||
if (b === 0xc0) return null;
|
||||
throw this._formatException("bool", b);
|
||||
}
|
||||
|
||||
public unpackInt(): number | null {
|
||||
const b = this._d.getUint8(this._offset++);
|
||||
if (b <= 0x7f || (b >= 0xe0 && b <= 0xff)) {
|
||||
return b;
|
||||
} else if (b === 0xcc) {
|
||||
return this._d.getUint8(this._offset++);
|
||||
} else if (b === 0xcd) {
|
||||
this._offset += 2;
|
||||
return this._d.getUint16(this._offset - 2);
|
||||
} else if (b === 0xce) {
|
||||
this._offset += 4;
|
||||
return this._d.getUint32(this._offset - 4);
|
||||
} else if (b === 0xcf) {
|
||||
this._offset += 8;
|
||||
const high = this._d.getUint32(this._offset - 8);
|
||||
const low = this._d.getUint32(this._offset - 4);
|
||||
return high * 0x100000000 + low;
|
||||
} else if (b === 0xd0) {
|
||||
return this._d.getInt8(this._offset++);
|
||||
} else if (b === 0xd1) {
|
||||
this._offset += 2;
|
||||
return this._d.getInt16(this._offset - 2);
|
||||
} else if (b === 0xd2) {
|
||||
this._offset += 4;
|
||||
return this._d.getInt32(this._offset - 4);
|
||||
} else if (b === 0xd3) {
|
||||
this._offset += 8;
|
||||
const high = this._d.getInt32(this._offset - 8);
|
||||
const low = this._d.getUint32(this._offset - 4);
|
||||
return high * 0x100000000 + low;
|
||||
} else if (b === 0xc0) {
|
||||
return null;
|
||||
} else {
|
||||
throw this._formatException("integer", b);
|
||||
}
|
||||
}
|
||||
|
||||
public unpackDouble(): number | null {
|
||||
const b = this._d.getUint8(this._offset++);
|
||||
if (b === 0xca) {
|
||||
this._offset += 4;
|
||||
return this._d.getFloat32(this._offset - 4);
|
||||
} else if (b === 0xcb) {
|
||||
this._offset += 8;
|
||||
return this._d.getFloat64(this._offset - 8);
|
||||
} else if (b === 0xc0) {
|
||||
return null;
|
||||
} else {
|
||||
throw this._formatException("double", b);
|
||||
}
|
||||
}
|
||||
public unpackString(): string | null {
|
||||
const b = this._d.getUint8(this._offset++);
|
||||
let len: number;
|
||||
if ((b & 0xe0) === 0xa0) {
|
||||
len = b & 0x1f;
|
||||
} else if (b === 0xd9) {
|
||||
len = this._d.getUint8(this._offset++);
|
||||
} else if (b === 0xda) {
|
||||
this._offset += 2;
|
||||
len = this._d.getUint16(this._offset - 2);
|
||||
} else if (b === 0xdb) {
|
||||
this._offset += 4;
|
||||
len = this._d.getUint32(this._offset - 4);
|
||||
} else if (b === 0xc0) {
|
||||
return null;
|
||||
} else {
|
||||
throw this._formatException("String", b);
|
||||
}
|
||||
const str = this._list.toString("utf-8", this._offset, this._offset + len);
|
||||
this._offset += len;
|
||||
return str;
|
||||
}
|
||||
|
||||
public unpackBinary(): Buffer {
|
||||
const b = this._d.getUint8(this._offset++);
|
||||
let len: number;
|
||||
if (b === 0xc4) {
|
||||
len = this._d.getUint8(this._offset++);
|
||||
} else if (b === 0xc5) {
|
||||
this._offset += 2;
|
||||
len = this._d.getUint16(this._offset - 2);
|
||||
} else if (b === 0xc6) {
|
||||
this._offset += 4;
|
||||
len = this._d.getUint32(this._offset - 4);
|
||||
} else if (b === 0xc0) {
|
||||
len = 0;
|
||||
} else {
|
||||
throw this._formatException("Binary", b);
|
||||
}
|
||||
const data = this._list.slice(this._offset, this._offset + len);
|
||||
this._offset += len;
|
||||
return data;
|
||||
}
|
||||
|
||||
public unpackList(): any[] {
|
||||
const length = this.unpackListLength();
|
||||
return Array.from({ length }, () => this._unpack());
|
||||
}
|
||||
|
||||
public unpackMap(): { [key: string]: any } {
|
||||
const length = this.unpackMapLength();
|
||||
const obj: { [key: string]: any } = {};
|
||||
for (let i = 0; i < length; i++) {
|
||||
const key = this._unpack();
|
||||
obj[key as string] = this._unpack();
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
public unpackListLength(): number {
|
||||
const b = this._d.getUint8(this._offset++);
|
||||
if ((b & 0xf0) === 0x90) {
|
||||
return b & 0xf;
|
||||
} else if (b === 0xdc) {
|
||||
this._offset += 2;
|
||||
return this._d.getUint16(this._offset - 2);
|
||||
} else if (b === 0xdd) {
|
||||
this._offset += 4;
|
||||
return this._d.getUint32(this._offset - 4);
|
||||
} else if (b === 0xc0) {
|
||||
return 0;
|
||||
} else {
|
||||
throw this._formatException("List length", b);
|
||||
}
|
||||
}
|
||||
|
||||
public unpackMapLength(): number {
|
||||
const b = this._d.getUint8(this._offset++);
|
||||
if ((b & 0xf0) === 0x80) {
|
||||
return b & 0xf;
|
||||
} else if (b === 0xde) {
|
||||
this._offset += 2;
|
||||
return this._d.getUint16(this._offset - 2);
|
||||
} else if (b === 0xdf) {
|
||||
this._offset += 4;
|
||||
return this._d.getUint32(this._offset - 4);
|
||||
} else if (b === 0xc0) {
|
||||
return 0;
|
||||
} else {
|
||||
throw this._formatException("Map length", b);
|
||||
}
|
||||
}
|
||||
|
||||
private _unpack(): any {
|
||||
const b = this._d.getUint8(this._offset);
|
||||
if (
|
||||
b <= 0x7f ||
|
||||
b >= 0xe0 ||
|
||||
b === 0xcc ||
|
||||
b === 0xcd ||
|
||||
b === 0xce ||
|
||||
b === 0xcf ||
|
||||
b === 0xd0 ||
|
||||
b === 0xd1 ||
|
||||
b === 0xd2 ||
|
||||
b === 0xd3
|
||||
) {
|
||||
return this.unpackInt();
|
||||
} else if (b === 0xc2 || b === 0xc3 || b === 0xc0) {
|
||||
return this.unpackBool();
|
||||
} else if (b === 0xca || b === 0xcb) {
|
||||
return this.unpackDouble();
|
||||
} else if ((b & 0xe0) === 0xa0 || b === 0xd9 || b === 0xda || b === 0xdb) {
|
||||
return this.unpackString();
|
||||
} else if (b === 0xc4 || b === 0xc5 || b === 0xc6) {
|
||||
return this.unpackBinary();
|
||||
} else if ((b & 0xf0) === 0x90 || b === 0xdc || b === 0xdd) {
|
||||
return this.unpackList();
|
||||
} else if ((b & 0xf0) === 0x80 || b === 0xde || b === 0xdf) {
|
||||
return this.unpackMap();
|
||||
} else {
|
||||
throw this._formatException("Unknown", b);
|
||||
}
|
||||
}
|
||||
// Implement other methods here, following the same pattern as unpackBool and unpackInt
|
||||
|
||||
private _formatException(type: string, b: number) {
|
||||
return new Error(
|
||||
`Try to unpack ${type} value, but it's not a ${type}, byte = ${b}`,
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,368 @@
|
|||
import { Multihash } from "../multihash.js";
|
||||
import NodeId from "../nodeId.js";
|
||||
import { Logger, Peer, SignedMessage } from "../types.js";
|
||||
import KeyPairEd25519 from "../ed25519.js";
|
||||
import * as crypto from "crypto";
|
||||
import {
|
||||
protocolMethodAnnouncePeers,
|
||||
protocolMethodHandshakeOpen,
|
||||
protocolMethodHashQuery,
|
||||
protocolMethodSignedMessage,
|
||||
recordTypeStorageLocation,
|
||||
storageLocationTypeFull,
|
||||
} from "../constants.js";
|
||||
import defer from "p-defer";
|
||||
import { calculateScore, encodeEndian } from "#util.js";
|
||||
import Packer from "#serialization/pack.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
import { ed25519 } from "@noble/curves/ed25519";
|
||||
import { AbstractLevel, AbstractSublevel } from "abstract-level";
|
||||
import StorageLocation from "#storage.js";
|
||||
import { S5Node, stringifyNode } from "#node.js";
|
||||
import { URL } from "url";
|
||||
import { Buffer } from "buffer";
|
||||
import {
|
||||
createTransportPeer,
|
||||
createTransportSocket,
|
||||
} from "#transports/index.js";
|
||||
import messages from "#messages/index.js";
|
||||
import { EventEmitter } from "events";
|
||||
|
||||
export class P2PService extends EventEmitter {
|
||||
private logger: Logger;
|
||||
private nodeKeyPair: KeyPairEd25519;
|
||||
private localNodeId?: NodeId;
|
||||
private nodesDb?: AbstractSublevel<
|
||||
AbstractLevel<Uint8Array, string, Uint8Array>,
|
||||
Uint8Array,
|
||||
string,
|
||||
Uint8Array
|
||||
>;
|
||||
|
||||
constructor(node: S5Node) {
|
||||
super();
|
||||
this._node = node;
|
||||
this._networkId = node.config.p2p?.network;
|
||||
this.nodeKeyPair = node.config.keyPair;
|
||||
this.logger = node.logger;
|
||||
|
||||
node.services.p2p = this;
|
||||
}
|
||||
private _networkId?: string;
|
||||
|
||||
get networkId(): string {
|
||||
return this._networkId as string;
|
||||
}
|
||||
|
||||
private _node: S5Node;
|
||||
|
||||
get node(): S5Node {
|
||||
return this._node;
|
||||
}
|
||||
|
||||
private _reconnectDelay: Map<string, number> = new Map();
|
||||
|
||||
get reconnectDelay(): Map<string, number> {
|
||||
return this._reconnectDelay;
|
||||
}
|
||||
|
||||
private _selfConnectionUris: Array<URL> = [];
|
||||
|
||||
get selfConnectionUris(): Array<URL> {
|
||||
return this._selfConnectionUris;
|
||||
}
|
||||
|
||||
private _peers: Map<string, Peer> = new Map();
|
||||
|
||||
get peers(): Map<string, Peer> {
|
||||
return this._peers;
|
||||
}
|
||||
|
||||
async init(): Promise<void> {
|
||||
this.localNodeId = new NodeId(this.nodeKeyPair.publicKey); // Define the NodeId constructor
|
||||
this.nodesDb = this._node.db.sublevel<string, Uint8Array>("s5-nodes", {
|
||||
valueEncoding: "buffer",
|
||||
});
|
||||
}
|
||||
|
||||
async start(): Promise<void> {
|
||||
const initialPeers = this._node.config?.p2p?.peers?.initial || [];
|
||||
|
||||
for (const p of initialPeers) {
|
||||
this.connectToNode([new URL(p)]);
|
||||
}
|
||||
}
|
||||
async stop() {
|
||||
[...this.node.services.p2p.peers.values()].forEach((peer) => peer.end());
|
||||
}
|
||||
|
||||
async onNewPeer(peer: Peer, verifyId: boolean): Promise<void> {
|
||||
peer.challenge = crypto.randomBytes(32);
|
||||
|
||||
const initialAuthPayloadPacker = new Packer();
|
||||
initialAuthPayloadPacker.packInt(protocolMethodHandshakeOpen);
|
||||
initialAuthPayloadPacker.packBinary(Buffer.from(peer.challenge));
|
||||
if (this._networkId) {
|
||||
initialAuthPayloadPacker.packString(this._networkId);
|
||||
}
|
||||
|
||||
const completer = defer<void>();
|
||||
|
||||
peer.listenForMessages(
|
||||
async (event: Uint8Array) => {
|
||||
let u = Unpacker.fromPacked(event);
|
||||
const method = u.unpackInt();
|
||||
|
||||
if (method !== null && messages.has(method)) {
|
||||
await messages.get(method)?.(this.node, peer, u, event, verifyId);
|
||||
}
|
||||
},
|
||||
{
|
||||
onDone: async () => {
|
||||
try {
|
||||
if (this._peers.has(peer.id.toString())) {
|
||||
this._peers.delete(peer.id.toString());
|
||||
this.logger.info(
|
||||
`[-] ${peer.id.toString()} (${peer
|
||||
.renderLocationUri()
|
||||
.toString()})`,
|
||||
);
|
||||
}
|
||||
} catch (_) {
|
||||
this.logger.info(`[-] ${peer.renderLocationUri()}`);
|
||||
}
|
||||
completer.reject("onDone");
|
||||
},
|
||||
onError: (e) => {
|
||||
this.logger.warn(`${peer.id}: ${e}`);
|
||||
},
|
||||
logger: this.logger,
|
||||
},
|
||||
);
|
||||
peer.sendMessage(initialAuthPayloadPacker.takeBytes());
|
||||
|
||||
return completer.promise;
|
||||
}
|
||||
|
||||
async prepareProvideMessage(
|
||||
hash: Multihash,
|
||||
location: StorageLocation,
|
||||
): Promise<Uint8Array> {
|
||||
const list: number[] = [
|
||||
recordTypeStorageLocation,
|
||||
...hash.fullBytes,
|
||||
location.type,
|
||||
...encodeEndian(location.expiry, 4),
|
||||
location.parts.length,
|
||||
];
|
||||
|
||||
for (const part of location.parts) {
|
||||
const bytes = new TextEncoder().encode(part);
|
||||
list.push(...encodeEndian(bytes.length, 2));
|
||||
list.push(...Array.from(bytes));
|
||||
}
|
||||
list.push(0);
|
||||
|
||||
const signature = ed25519.sign(
|
||||
new Uint8Array(list),
|
||||
this.nodeKeyPair.extractBytes(),
|
||||
);
|
||||
|
||||
return new Uint8Array([
|
||||
...list,
|
||||
...Array.from(this.nodeKeyPair.publicKey),
|
||||
...Array.from(signature),
|
||||
]);
|
||||
}
|
||||
|
||||
async sendPublicPeersToPeer(peer: Peer, peersToSend: Peer[]): Promise<void> {
|
||||
const p = new Packer();
|
||||
p.packInt(protocolMethodAnnouncePeers);
|
||||
|
||||
p.packInt(peersToSend.length);
|
||||
for (const pts of peersToSend) {
|
||||
p.packBinary(Buffer.from(pts.id.bytes));
|
||||
p.packBool(pts.isConnected);
|
||||
p.packInt(pts.connectionUris.length);
|
||||
for (const uri of pts.connectionUris) {
|
||||
p.packString(uri.toString());
|
||||
}
|
||||
}
|
||||
peer.sendMessage(await this.signMessageSimple(p.takeBytes()));
|
||||
}
|
||||
|
||||
async getNodeScore(nodeId: NodeId): Promise<number> {
|
||||
if (nodeId.equals(this.localNodeId)) {
|
||||
return 1;
|
||||
}
|
||||
const node = await this.nodesDb?.get(stringifyNode(nodeId));
|
||||
if (!node) {
|
||||
return 0.5;
|
||||
}
|
||||
const map = Unpacker.fromPacked(node).unpackMap();
|
||||
return calculateScore(map.get(1), map.get(2));
|
||||
}
|
||||
|
||||
async upvote(nodeId: NodeId): Promise<void> {
|
||||
await this._vote(nodeId, true);
|
||||
}
|
||||
|
||||
async downvote(nodeId: NodeId): Promise<void> {
|
||||
await this._vote(nodeId, false);
|
||||
}
|
||||
|
||||
// TODO add a bit of randomness with multiple options
|
||||
async sortNodesByScore(nodes: NodeId[]): Promise<NodeId[]> {
|
||||
const nodePromises = nodes.map(
|
||||
(item): [NodeId, Promise<number> | number] => [
|
||||
item,
|
||||
this.getNodeScore(item),
|
||||
],
|
||||
);
|
||||
|
||||
await Promise.all(nodePromises.map((item) => item[1]));
|
||||
|
||||
for (let i = 0; i < nodePromises.length; i++) {
|
||||
nodePromises[i][1] = await nodePromises[i][1];
|
||||
}
|
||||
|
||||
return nodePromises
|
||||
.sort((a: [NodeId, any], b: [NodeId, any]) => b[1] - a[1])
|
||||
.map((item) => item[0]);
|
||||
}
|
||||
|
||||
async signMessageSimple(message: Uint8Array): Promise<Uint8Array> {
|
||||
const packer = new Packer();
|
||||
|
||||
const signature = ed25519.sign(message, this.nodeKeyPair.extractBytes());
|
||||
|
||||
packer.packInt(protocolMethodSignedMessage);
|
||||
packer.packBinary(Buffer.from(this.localNodeId!.bytes));
|
||||
|
||||
packer.packBinary(Buffer.from(signature));
|
||||
packer.packBinary(Buffer.from(message));
|
||||
|
||||
return packer.takeBytes();
|
||||
}
|
||||
|
||||
async unpackAndVerifySignature(u: Unpacker): Promise<SignedMessage> {
|
||||
const nodeId = new NodeId(u.unpackBinary());
|
||||
const signature = u.unpackBinary();
|
||||
const message = u.unpackBinary();
|
||||
|
||||
const isValid = ed25519.verify(
|
||||
signature,
|
||||
message,
|
||||
nodeId.bytes.subarray(1),
|
||||
);
|
||||
|
||||
if (!isValid) {
|
||||
throw new Error("Invalid signature found");
|
||||
}
|
||||
return {
|
||||
nodeId: nodeId,
|
||||
message: message,
|
||||
};
|
||||
}
|
||||
|
||||
sendHashRequest(
|
||||
hash: Multihash,
|
||||
types: number[] = [storageLocationTypeFull],
|
||||
): void {
|
||||
const p = new Packer();
|
||||
|
||||
p.packInt(protocolMethodHashQuery);
|
||||
p.packBinary(Buffer.from(hash.fullBytes));
|
||||
p.pack(types);
|
||||
// TODO Maybe add int for hop count (or not because privacy concerns)
|
||||
|
||||
const req = p.takeBytes();
|
||||
|
||||
for (const peer of this._peers.values()) {
|
||||
peer.sendMessage(req);
|
||||
}
|
||||
}
|
||||
|
||||
async connectToNode(connectionUris: URL[], retried = false): Promise<void> {
|
||||
if (!this.node.started) {
|
||||
return;
|
||||
}
|
||||
|
||||
const unsupported = new URL("http://0.0.0.0");
|
||||
unsupported.protocol = "unsupported";
|
||||
|
||||
const connectionUri =
|
||||
connectionUris.find((uri) => ["ws:", "wss:"].includes(uri.protocol)) ||
|
||||
connectionUris.find((uri) => uri.protocol === "tcp:") ||
|
||||
unsupported;
|
||||
|
||||
if (connectionUri.protocol === "unsupported") {
|
||||
throw new Error(
|
||||
`None of the available connection URIs are supported (${connectionUris})`,
|
||||
);
|
||||
}
|
||||
|
||||
const protocol = connectionUri.protocol.replace(":", "");
|
||||
|
||||
if (!connectionUri.username) {
|
||||
throw new Error("Connection URI does not contain node id");
|
||||
}
|
||||
|
||||
const id = NodeId.decode(connectionUri.username);
|
||||
|
||||
this._reconnectDelay.set(
|
||||
id.toString(),
|
||||
this._reconnectDelay.get(id.toString()) || 1,
|
||||
);
|
||||
|
||||
if (id.equals(this.localNodeId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.logger.verbose(`[connect] ${connectionUri}`);
|
||||
|
||||
const socket = await createTransportSocket(protocol, connectionUri);
|
||||
const peer = createTransportPeer(protocol, {
|
||||
socket,
|
||||
uris: [connectionUri],
|
||||
});
|
||||
|
||||
peer.id = id;
|
||||
await this.onNewPeer(peer, true);
|
||||
} catch (e) {
|
||||
if (retried) {
|
||||
return;
|
||||
}
|
||||
retried = true;
|
||||
|
||||
this.logger.catched(e);
|
||||
|
||||
const delay = this._reconnectDelay.get(id.toString())!;
|
||||
this._reconnectDelay.set(id.toString(), delay * 2);
|
||||
await new Promise((resolve) => setTimeout(resolve, delay * 1000));
|
||||
|
||||
await this.connectToNode(connectionUris, retried);
|
||||
}
|
||||
}
|
||||
|
||||
private async _vote(nodeId: NodeId, upvote: boolean): Promise<void> {
|
||||
const node = await this.nodesDb?.get(stringifyNode(nodeId));
|
||||
const map = node
|
||||
? Unpacker.fromPacked(node).unpackMap()
|
||||
: new Map<number, number>(
|
||||
Object.entries({ 1: 0, 2: 0 }).map(([k, v]) => [+k, v]),
|
||||
);
|
||||
|
||||
if (upvote) {
|
||||
map.set(1, (map.get(1) ?? 0) + 1);
|
||||
} else {
|
||||
map.set(2, (map.get(2) ?? 0) + 1);
|
||||
}
|
||||
|
||||
await this.nodesDb?.put(
|
||||
stringifyNode(nodeId),
|
||||
new Packer().pack(map).takeBytes(),
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,295 @@
|
|||
import { Logger, Peer, SignedRegistryEntry } from "#types.js";
|
||||
import { AbstractLevel, AbstractSublevel } from "abstract-level";
|
||||
import {
|
||||
CID_HASH_TYPES,
|
||||
protocolMethodRegistryQuery,
|
||||
recordTypeRegistryEntry,
|
||||
registryMaxDataSize,
|
||||
} from "#constants.js";
|
||||
import { Multihash } from "#multihash.js";
|
||||
import { base64url } from "multiformats/bases/base64";
|
||||
import { decodeEndian, encodeEndian } from "#util.js";
|
||||
import { ed25519 } from "@noble/curves/ed25519";
|
||||
import Packer from "#serialization/pack.js";
|
||||
import { Buffer } from "buffer";
|
||||
import { EventEmitter } from "events";
|
||||
import KeyPairEd25519 from "#ed25519.js";
|
||||
import { S5Node, stringifyBytes } from "#node.js";
|
||||
|
||||
export class RegistryService {
|
||||
private db?: AbstractSublevel<
|
||||
AbstractLevel<Uint8Array, string, Uint8Array>,
|
||||
Uint8Array,
|
||||
string,
|
||||
Uint8Array
|
||||
>;
|
||||
private node: S5Node;
|
||||
private logger: Logger;
|
||||
private streams: Map<string, EventEmitter> = new Map<string, EventEmitter>();
|
||||
private subs: Map<string, Multihash> = new Map<string, Multihash>();
|
||||
|
||||
constructor(node: S5Node) {
|
||||
this.node = node;
|
||||
this.logger = this.node.logger;
|
||||
node.services.registry = this;
|
||||
}
|
||||
|
||||
async init(): Promise<void> {
|
||||
this.db = this.node.db.sublevel<string, Uint8Array>("s5-registry-db", {
|
||||
valueEncoding: "buffer",
|
||||
});
|
||||
}
|
||||
|
||||
async set(
|
||||
sre: SignedRegistryEntry,
|
||||
trusted: boolean = false,
|
||||
receivedFrom?: Peer,
|
||||
): Promise<void> {
|
||||
this.logger.verbose(
|
||||
`[registry] set ${base64url.encode(sre.pk)} ${sre.revision} (${
|
||||
receivedFrom?.id
|
||||
})`,
|
||||
);
|
||||
|
||||
if (!trusted) {
|
||||
if (sre.pk.length !== 33) {
|
||||
throw new Error("Invalid pubkey");
|
||||
}
|
||||
if (sre.pk[0] !== CID_HASH_TYPES.ED25519) {
|
||||
throw new Error("Only ed25519 keys are supported");
|
||||
}
|
||||
if (sre.revision < 0 || sre.revision > 281474976710656) {
|
||||
throw new Error("Invalid revision");
|
||||
}
|
||||
if (sre.data.length > registryMaxDataSize) {
|
||||
throw new Error("Data too long");
|
||||
}
|
||||
|
||||
const isValid = this.verifyRegistryEntry(sre);
|
||||
if (!isValid) {
|
||||
throw new Error("Invalid signature found");
|
||||
}
|
||||
}
|
||||
|
||||
const existingEntry = await this.getFromDB(sre.pk);
|
||||
|
||||
if (existingEntry) {
|
||||
if (receivedFrom) {
|
||||
if (existingEntry.revision === sre.revision) {
|
||||
return;
|
||||
} else if (existingEntry.revision > sre.revision) {
|
||||
const updateMessage = this.serializeRegistryEntry(existingEntry);
|
||||
receivedFrom.sendMessage(updateMessage);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (existingEntry.revision >= sre.revision) {
|
||||
throw new Error("Revision number too low");
|
||||
}
|
||||
}
|
||||
|
||||
const key = new Multihash(sre.pk);
|
||||
this.streams.get(key.toString())?.emit("event", sre);
|
||||
|
||||
this.db?.put(stringifyBytes(sre.pk), this.serializeRegistryEntry(sre));
|
||||
|
||||
this.broadcastEntry(sre, receivedFrom);
|
||||
}
|
||||
|
||||
// TODO: Clean this table after some time
|
||||
// TODO: If there are more than X peers, only broadcast to subscribed nodes (routing table) and shard-nodes (256)
|
||||
broadcastEntry(sre: SignedRegistryEntry, receivedFrom?: Peer): void {
|
||||
this.logger.verbose("[registry] broadcastEntry");
|
||||
const updateMessage = this.serializeRegistryEntry(sre);
|
||||
|
||||
for (const p of [...this.node.services.p2p.peers.values()]) {
|
||||
if (receivedFrom == null || p.id !== receivedFrom.id) {
|
||||
p.sendMessage(updateMessage);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sendRegistryRequest(pk: Uint8Array): void {
|
||||
const p = new Packer();
|
||||
|
||||
p.packInt(protocolMethodRegistryQuery);
|
||||
p.packBinary(Buffer.from(pk));
|
||||
|
||||
const req = p.takeBytes();
|
||||
|
||||
// TODO: Use shard system if there are more than X peers
|
||||
for (const peer of [...this.node.services.p2p.peers.values()]) {
|
||||
peer.sendMessage(req);
|
||||
}
|
||||
}
|
||||
|
||||
async get(pk: Uint8Array): Promise<SignedRegistryEntry | null> {
|
||||
const key = new Multihash(pk);
|
||||
const keyString = key.toString();
|
||||
if (this.subs.has(keyString)) {
|
||||
this.logger.verbose(`[registry] get (subbed) ${key}`);
|
||||
let res = await this.getFromDB(pk);
|
||||
if (res !== null) {
|
||||
return res;
|
||||
}
|
||||
this.sendRegistryRequest(pk);
|
||||
await pTimeout(200);
|
||||
return this.getFromDB(pk);
|
||||
} else {
|
||||
this.sendRegistryRequest(pk);
|
||||
this.subs.set(keyString, key);
|
||||
if (!this.streams.has(keyString)) {
|
||||
this.streams.set(keyString, new EventEmitter());
|
||||
}
|
||||
let res = await this.getFromDB(pk);
|
||||
if (res === null) {
|
||||
this.logger.verbose(`[registry] get (clean) ${key}`);
|
||||
for (let i = 0; i < 200; i++) {
|
||||
await pTimeout(10);
|
||||
if ((await this.getFromDB(pk)) !== null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.logger.verbose(`[registry] get (cached) ${key}`);
|
||||
await pTimeout(200);
|
||||
}
|
||||
return this.getFromDB(pk);
|
||||
}
|
||||
}
|
||||
|
||||
private async setEntryHelper(
|
||||
keyPair: KeyPairEd25519,
|
||||
data: Uint8Array,
|
||||
): Promise<void> {
|
||||
const revision = Math.round(Date.now() / 1000);
|
||||
|
||||
const sre: SignedRegistryEntry = this.signRegistryEntry({
|
||||
kp: keyPair,
|
||||
data,
|
||||
revision,
|
||||
});
|
||||
|
||||
await this.set(sre);
|
||||
}
|
||||
|
||||
signRegistryEntry({
|
||||
kp,
|
||||
data,
|
||||
revision,
|
||||
}: {
|
||||
kp: KeyPairEd25519;
|
||||
data: Uint8Array;
|
||||
revision: number;
|
||||
}): SignedRegistryEntry {
|
||||
return signRegistryEntry({ kp, data, revision });
|
||||
}
|
||||
|
||||
async getFromDB(pk: Uint8Array): Promise<SignedRegistryEntry | null> {
|
||||
let val;
|
||||
try {
|
||||
val = await this.db?.get(stringifyBytes(pk));
|
||||
} catch {}
|
||||
|
||||
if (val) {
|
||||
return this.deserializeRegistryEntry(val);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public listen(
|
||||
pk: Uint8Array,
|
||||
cb: (sre: SignedRegistryEntry) => void,
|
||||
): () => void {
|
||||
const key = new Multihash(pk).toString();
|
||||
if (!this.streams[key]) {
|
||||
this.streams[key] = new EventEmitter();
|
||||
this.sendRegistryRequest(pk);
|
||||
}
|
||||
const stream = this.streams[key] as EventEmitter;
|
||||
|
||||
const done = () => {
|
||||
stream.off("event", cb);
|
||||
};
|
||||
|
||||
stream.on("event", cb);
|
||||
|
||||
return done;
|
||||
}
|
||||
|
||||
public deserializeRegistryEntry(event: Uint8Array): SignedRegistryEntry {
|
||||
return deserializeRegistryEntry(event);
|
||||
}
|
||||
|
||||
public verifyRegistryEntry(sre: SignedRegistryEntry): boolean {
|
||||
return verifyRegistryEntry(sre);
|
||||
}
|
||||
public serializeRegistryEntry(sre: SignedRegistryEntry): Uint8Array {
|
||||
return serializeRegistryEntry(sre);
|
||||
}
|
||||
}
|
||||
|
||||
async function pTimeout(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export function deserializeRegistryEntry(
|
||||
event: Uint8Array,
|
||||
): SignedRegistryEntry {
|
||||
const dataLength = event[42];
|
||||
return {
|
||||
pk: event.slice(1, 34),
|
||||
revision: decodeEndian(event.slice(34, 42)),
|
||||
data: event.slice(43, 43 + dataLength),
|
||||
signature: event.slice(43 + dataLength),
|
||||
};
|
||||
}
|
||||
|
||||
export function verifyRegistryEntry(sre: SignedRegistryEntry): boolean {
|
||||
const list: Uint8Array = Uint8Array.from([
|
||||
recordTypeRegistryEntry,
|
||||
...encodeEndian(sre.revision, 8),
|
||||
sre.data.length, // 1 byte
|
||||
...sre.data,
|
||||
]);
|
||||
|
||||
return ed25519.verify(sre.signature, list, sre.pk.slice(1));
|
||||
}
|
||||
export function serializeRegistryEntry(sre: SignedRegistryEntry): Uint8Array {
|
||||
return Uint8Array.from([
|
||||
recordTypeRegistryEntry,
|
||||
...sre.pk,
|
||||
...encodeEndian(sre.revision, 8),
|
||||
sre.data.length,
|
||||
...sre.data,
|
||||
...sre.signature,
|
||||
]);
|
||||
}
|
||||
|
||||
export function signRegistryEntry({
|
||||
kp,
|
||||
data,
|
||||
revision,
|
||||
}: {
|
||||
kp: KeyPairEd25519;
|
||||
data: Uint8Array;
|
||||
revision: number;
|
||||
}): SignedRegistryEntry {
|
||||
const list = new Uint8Array([
|
||||
recordTypeRegistryEntry,
|
||||
...encodeEndian(revision, 8),
|
||||
data.length,
|
||||
...data,
|
||||
]);
|
||||
|
||||
const signature = ed25519.sign(list, kp.extractBytes());
|
||||
|
||||
return {
|
||||
pk: kp.publicKey,
|
||||
revision,
|
||||
data,
|
||||
signature: new Uint8Array(signature),
|
||||
};
|
||||
}
|
|
@ -0,0 +1,169 @@
|
|||
import { S5Node } from "#node.js";
|
||||
import { Multihash } from "#multihash.js";
|
||||
import NodeId from "#nodeId.js";
|
||||
|
||||
export default class StorageLocation {
|
||||
type: number;
|
||||
parts: string[];
|
||||
binaryParts: Uint8Array[] = [];
|
||||
expiry: number; // Unix timestamp in seconds
|
||||
providerMessage?: Uint8Array; // Made optional, similar to `late` in Dart
|
||||
|
||||
constructor(type: number, parts: string[], expiry: number) {
|
||||
this.type = type;
|
||||
this.parts = parts;
|
||||
this.expiry = expiry;
|
||||
}
|
||||
|
||||
get bytesUrl(): string {
|
||||
return this.parts[0];
|
||||
}
|
||||
|
||||
get outboardBytesUrl(): string {
|
||||
if (this.parts.length === 1) {
|
||||
return `${this.parts[0]}.obao`;
|
||||
}
|
||||
return this.parts[1];
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
const expiryDate = new Date(this.expiry * 1000);
|
||||
return `StorageLocation(${this.type}, ${
|
||||
this.parts
|
||||
}, expiry: ${expiryDate.toISOString()})`;
|
||||
}
|
||||
}
|
||||
|
||||
export class StorageLocationProvider {
|
||||
private node: S5Node;
|
||||
private hash: Multihash;
|
||||
private types: number[];
|
||||
|
||||
private static readonly storageLocationTypeFull: number = 0; // Example value, adjust as necessary
|
||||
private readonly timeoutDuration: number = 60000; // Duration in milliseconds
|
||||
|
||||
private availableNodes: NodeId[] = [];
|
||||
private uris: Map<NodeId, StorageLocation> = new Map<
|
||||
NodeId,
|
||||
StorageLocation
|
||||
>();
|
||||
|
||||
private timeout?: Date;
|
||||
private isTimedOut: boolean = false;
|
||||
|
||||
constructor(
|
||||
node: S5Node,
|
||||
hash: Multihash,
|
||||
types: number[] = [StorageLocationProvider.storageLocationTypeFull],
|
||||
) {
|
||||
this.node = node;
|
||||
this.hash = hash;
|
||||
this.types = types;
|
||||
}
|
||||
|
||||
async start(): Promise<void> {
|
||||
this.uris = new Map(
|
||||
await this.node.getCachedStorageLocations(this.hash, this.types),
|
||||
);
|
||||
|
||||
this.availableNodes = Array.from(this.uris.keys());
|
||||
this.node.services.p2p.sortNodesByScore(this.availableNodes);
|
||||
|
||||
this.timeout = new Date(Date.now() + this.timeoutDuration);
|
||||
|
||||
let requestSent = false;
|
||||
|
||||
while (true) {
|
||||
const newUris = new Map(
|
||||
await this.node.getCachedStorageLocations(this.hash, this.types),
|
||||
);
|
||||
|
||||
if (
|
||||
this.availableNodes.length === 0 &&
|
||||
newUris.size < 2 &&
|
||||
!requestSent
|
||||
) {
|
||||
this.node.services.p2p.sendHashRequest(this.hash, this.types);
|
||||
requestSent = true;
|
||||
}
|
||||
|
||||
let hasNewNode = false;
|
||||
|
||||
for (const [key, value] of newUris) {
|
||||
if (!this.uris.has(key) || this.uris.get(key) !== value) {
|
||||
this.uris.set(key, value);
|
||||
if (!this.availableNodes.includes(key)) {
|
||||
this.availableNodes.push(key);
|
||||
hasNewNode = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasNewNode) {
|
||||
this.node.services.p2p.sortNodesByScore(this.availableNodes);
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
if (new Date() > this.timeout) {
|
||||
this.isTimedOut = true;
|
||||
return;
|
||||
}
|
||||
|
||||
while (this.availableNodes.length > 0 || !this.isWaitingForUri) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
if (new Date() > this.timeout) {
|
||||
this.isTimedOut = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private isWaitingForUri: boolean = false;
|
||||
|
||||
async next(): Promise<SignedStorageLocation> {
|
||||
this.timeout = new Date(Date.now() + this.timeoutDuration);
|
||||
|
||||
while (true) {
|
||||
if (this.availableNodes.length > 0) {
|
||||
this.isWaitingForUri = false;
|
||||
const nodeId = this.availableNodes.shift()!;
|
||||
|
||||
return new SignedStorageLocation(nodeId, this.uris.get(nodeId)!);
|
||||
}
|
||||
|
||||
this.isWaitingForUri = true;
|
||||
|
||||
if (this.isTimedOut) {
|
||||
throw new Error(
|
||||
`Could not download raw file: Timed out after ${this.timeoutDuration}ms ${this.hash}`,
|
||||
);
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
}
|
||||
}
|
||||
|
||||
upvote(uri: SignedStorageLocation): void {
|
||||
this.node.services.p2p.upvote(uri.nodeId);
|
||||
}
|
||||
|
||||
downvote(uri: SignedStorageLocation): void {
|
||||
this.node.services.p2p.downvote(uri.nodeId);
|
||||
}
|
||||
}
|
||||
|
||||
class SignedStorageLocation {
|
||||
nodeId: NodeId;
|
||||
location: StorageLocation;
|
||||
|
||||
constructor(nodeId: NodeId, location: StorageLocation) {
|
||||
this.nodeId = nodeId;
|
||||
this.location = location;
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return `SignedStorageLocation(${this.location}, ${this.nodeId})`;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
import { Logger, Peer, PeerConstructorOptions } from "#types.js";
|
||||
import { URL } from "url";
|
||||
import NodeId from "#nodeId.js";
|
||||
|
||||
export abstract class BasePeer implements Peer {
|
||||
connectionUris: URL[];
|
||||
isConnected: boolean = false;
|
||||
challenge: Uint8Array;
|
||||
protected _socket: any;
|
||||
|
||||
constructor({ socket, uris = [] }: PeerConstructorOptions) {
|
||||
this.connectionUris = uris.map((uri) => new URL(uri.toString()));
|
||||
this.challenge = new Uint8Array();
|
||||
if (socket) {
|
||||
this._socket = socket;
|
||||
}
|
||||
}
|
||||
|
||||
private _id?: NodeId;
|
||||
|
||||
get id(): NodeId {
|
||||
return this._id as NodeId;
|
||||
}
|
||||
|
||||
set id(value: NodeId) {
|
||||
this._id = value;
|
||||
}
|
||||
|
||||
abstract sendMessage(message: Uint8Array);
|
||||
|
||||
abstract renderLocationUri(): string;
|
||||
|
||||
abstract listenForMessages(
|
||||
callback: (event: any) => Promise<void>,
|
||||
{
|
||||
onDone,
|
||||
onError,
|
||||
logger,
|
||||
}: { onDone?: any; onError?: (...args: any[]) => void; logger: Logger },
|
||||
): void;
|
||||
|
||||
abstract end(): void;
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
import { URL } from "url";
|
||||
import { TcpPeer } from "#transports/tcp.js";
|
||||
import { WebSocketPeer } from "#transports/webSocket.js";
|
||||
import { PeerConstructorOptions, PeerStatic } from "#types.js";
|
||||
import isNode from "detect-node";
|
||||
import { BasePeer } from "#transports/base.js";
|
||||
const transports = new Map<string, PeerStatic>();
|
||||
|
||||
export function registerTransport(type: string, transport: PeerStatic) {
|
||||
transports.set(type, transport);
|
||||
}
|
||||
|
||||
export function isTransport(type: string) {
|
||||
return transports.has(type);
|
||||
}
|
||||
|
||||
export function createTransportSocket(type: string, uri: URL) {
|
||||
if (!isTransport(type)) {
|
||||
throw new Error(`transport ${type} does not exist`);
|
||||
}
|
||||
|
||||
const transport = transports.get(type) as PeerStatic;
|
||||
|
||||
return transport.connect(uri);
|
||||
}
|
||||
|
||||
export function createTransportPeer(
|
||||
type: string,
|
||||
params: PeerConstructorOptions,
|
||||
) {
|
||||
if (!isTransport(type)) {
|
||||
throw new Error(`transport ${type} does not exist`);
|
||||
}
|
||||
|
||||
const transport = transports.get(type) as PeerStatic;
|
||||
|
||||
return new transport(params);
|
||||
}
|
||||
|
||||
export { BasePeer };
|
||||
|
||||
if (isNode) {
|
||||
registerTransport("tcp", TcpPeer);
|
||||
}
|
||||
registerTransport("ws", WebSocketPeer);
|
||||
registerTransport("wss", WebSocketPeer);
|
|
@ -0,0 +1,80 @@
|
|||
import { Logger, Peer } from "../types.js";
|
||||
import * as net from "net";
|
||||
import { URL } from "url";
|
||||
import { decodeEndian } from "../util.js";
|
||||
import * as console from "console";
|
||||
import { BasePeer } from "#transports/base.js";
|
||||
|
||||
export class TcpPeer extends BasePeer implements Peer {
|
||||
declare _socket: net.Socket;
|
||||
|
||||
sendMessage(message: Uint8Array): void {
|
||||
this._socket.write(message);
|
||||
}
|
||||
|
||||
renderLocationUri(): string {
|
||||
return this.connectionUris.length === 0
|
||||
? (this._socket.remoteAddress as string)
|
||||
: this.connectionUris[0].toString();
|
||||
}
|
||||
|
||||
listenForMessages(
|
||||
callback: (event: any) => Promise<void>,
|
||||
{
|
||||
onDone,
|
||||
onError,
|
||||
logger,
|
||||
}: { onDone?: any; onError?: (...args: any[]) => void; logger: Logger },
|
||||
): void {
|
||||
const listener = (data: Uint8Array) => {
|
||||
let pos = 0;
|
||||
|
||||
while (pos < data.length) {
|
||||
const lengthBuffer = data.slice(pos, pos + 4);
|
||||
const length = decodeEndian(lengthBuffer);
|
||||
|
||||
if (data.length < pos + 4 + length) {
|
||||
console.log(`Ignore message, invalid length (from ${this.id})`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const message = data.slice(pos + 4, pos + 4 + length);
|
||||
callback(message).catch((e) => {
|
||||
logger.catched(`Error in callback: ${e}`, this.id.toBase58());
|
||||
});
|
||||
} catch (e) {
|
||||
logger.catched(`Caught an exception: ${e}`, this.id.toBase58());
|
||||
}
|
||||
|
||||
pos += length + 4;
|
||||
}
|
||||
};
|
||||
|
||||
this._socket.on("data", listener);
|
||||
|
||||
if (onDone) {
|
||||
this._socket.on("end", onDone);
|
||||
}
|
||||
|
||||
if (onError) {
|
||||
this._socket.on("error", onError);
|
||||
}
|
||||
}
|
||||
end(): void {
|
||||
this._socket.end();
|
||||
}
|
||||
|
||||
public static async connect(uri: URL): Promise<net.Socket> {
|
||||
const host = uri.hostname;
|
||||
const port = parseInt(uri.port);
|
||||
return new Promise((resolve, reject) => {
|
||||
const socket = net.connect(port, host, () => {
|
||||
resolve(socket);
|
||||
});
|
||||
socket.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
import { Logger, Peer } from "../types.js";
|
||||
import { URL } from "url";
|
||||
import * as WS from "ws";
|
||||
import { BasePeer } from "#transports/base.js";
|
||||
import isNode from "detect-node";
|
||||
import net from "net";
|
||||
|
||||
export class WebSocketPeer extends BasePeer implements Peer {
|
||||
declare _socket: WebSocket;
|
||||
sendMessage(message: Uint8Array): void {
|
||||
this._socket.send(message);
|
||||
}
|
||||
|
||||
renderLocationUri(): string {
|
||||
return "WebSocket client";
|
||||
}
|
||||
|
||||
listenForMessages(
|
||||
callback: (event: any) => Promise<void>,
|
||||
{
|
||||
onDone,
|
||||
onError,
|
||||
logger,
|
||||
}: { onDone?: any; onError?: (...args: any[]) => void; logger: Logger },
|
||||
): void {
|
||||
this._socket.addEventListener(
|
||||
"message",
|
||||
async (event: MessageEvent<any>) => {
|
||||
let data = event.data;
|
||||
|
||||
if (data instanceof Blob) {
|
||||
data = Buffer.from(await data.arrayBuffer());
|
||||
}
|
||||
|
||||
await callback(data);
|
||||
},
|
||||
);
|
||||
|
||||
if (onDone) {
|
||||
this._socket.addEventListener("close", onDone);
|
||||
}
|
||||
|
||||
if (onError) {
|
||||
this._socket.addEventListener("error", onError);
|
||||
}
|
||||
}
|
||||
end(): void {
|
||||
this._socket.close();
|
||||
}
|
||||
|
||||
public static async connect(uri: URL): Promise<WebSocket> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const socket = isNode ? new WS.WebSocket(uri) : new WebSocket(uri);
|
||||
socket.addEventListener("open", () => {
|
||||
resolve(socket);
|
||||
});
|
||||
socket.addEventListener("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,107 @@
|
|||
import NodeId from "./nodeId.js";
|
||||
import KeyPairEd25519 from "#ed25519.js";
|
||||
import { AbstractLevel } from "abstract-level";
|
||||
import { P2PService } from "./service/p2p.js";
|
||||
import { RegistryService } from "./service/registry.js";
|
||||
import { S5Node } from "#node.js";
|
||||
import Unpacker from "#serialization/unpack.js";
|
||||
import type { URL } from "url";
|
||||
|
||||
export interface Peer {
|
||||
id: NodeId;
|
||||
connectionUris: Array<URL>;
|
||||
isConnected: boolean;
|
||||
challenge: Uint8Array;
|
||||
sendMessage(message: Uint8Array): void;
|
||||
|
||||
listenForMessages(
|
||||
callback: (event: any) => Promise<void>,
|
||||
{
|
||||
onDone,
|
||||
onError,
|
||||
logger,
|
||||
}: {
|
||||
onDone?: any;
|
||||
onError?: (...args: any[]) => void;
|
||||
logger: Logger;
|
||||
},
|
||||
): void;
|
||||
|
||||
renderLocationUri(): string;
|
||||
end(): void;
|
||||
}
|
||||
|
||||
export type PeerConstructorOptions = { socket: any; uris?: URL[] };
|
||||
|
||||
// Define the static side of the class
|
||||
export interface PeerStatic {
|
||||
new ({ socket, uris }: PeerConstructorOptions): Peer;
|
||||
connect(uri: URL): Promise<any>;
|
||||
}
|
||||
|
||||
export interface Logger {
|
||||
info(s: string): void;
|
||||
verbose(s: string): void;
|
||||
warn(s: string): void;
|
||||
error(s: string): void;
|
||||
catched(e: any, context?: string | null): void;
|
||||
}
|
||||
|
||||
export interface S5Services {
|
||||
p2p: P2PService;
|
||||
registry: RegistryService;
|
||||
}
|
||||
|
||||
export interface S5Config {
|
||||
p2p?: {
|
||||
network?: string;
|
||||
peers?: {
|
||||
initial?: string[];
|
||||
};
|
||||
};
|
||||
keyPair: KeyPairEd25519;
|
||||
logger: Logger;
|
||||
db: AbstractLevel<Uint8Array, string, Uint8Array>;
|
||||
cacheDb: AbstractLevel<Uint8Array, string, Uint8Array>;
|
||||
services: S5Services;
|
||||
}
|
||||
|
||||
export interface S5NodeConfig {
|
||||
p2p?: {
|
||||
network?: string;
|
||||
peers?: {
|
||||
initial?: string[];
|
||||
};
|
||||
};
|
||||
keyPair: KeyPairEd25519;
|
||||
db: AbstractLevel<Uint8Array, string, Uint8Array>;
|
||||
logger?: Logger;
|
||||
}
|
||||
|
||||
export interface SignedMessage {
|
||||
nodeId: NodeId;
|
||||
message: Uint8Array;
|
||||
}
|
||||
|
||||
export type P2PMessageHandler = (
|
||||
node: S5Node,
|
||||
peer: Peer,
|
||||
data: Unpacker,
|
||||
rawData: Uint8Array,
|
||||
verifyId: boolean,
|
||||
) => Promise<void>;
|
||||
|
||||
export type P2PSignedMessageHandler = (
|
||||
node: S5Node,
|
||||
peer: Peer,
|
||||
data: Unpacker,
|
||||
message: SignedMessage,
|
||||
verifyId: boolean,
|
||||
) => Promise<void>;
|
||||
|
||||
export interface SignedRegistryEntry {
|
||||
pk: Uint8Array; // public key with multicodec prefix
|
||||
revision: number; // revision number of this entry, maximum is (256^8)-1
|
||||
data: Uint8Array; // data stored in this entry, can have a maximum length of 48 bytes
|
||||
signature: Uint8Array; // signature of this registry entry
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
export function decodeEndian(bytes: Uint8Array): number {
|
||||
let total = 0;
|
||||
|
||||
for (let i = 0; i < bytes.length; i++) {
|
||||
total += bytes[i] * Math.pow(256, i);
|
||||
}
|
||||
|
||||
return total;
|
||||
}
|
||||
export function encodeEndian(value: number, length: number): Uint8Array {
|
||||
const res = new Uint8Array(length);
|
||||
|
||||
for (let i = 0; i < length; i++) {
|
||||
res[i] = value & 0xff;
|
||||
value = value >> 8;
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
export function calculateScore(
|
||||
goodResponses: number,
|
||||
badResponses: number,
|
||||
): number {
|
||||
const totalVotes = goodResponses + badResponses;
|
||||
|
||||
if (totalVotes === 0) return 0.5;
|
||||
|
||||
const average = goodResponses / totalVotes;
|
||||
return average - (average - 0.5) * Math.pow(2, -Math.log(totalVotes + 1));
|
||||
}
|
Loading…
Reference in New Issue