*heavy simplification to not use in memory stores and use default on-disk repo
This commit is contained in:
parent
8510c96cad
commit
f6a58ad4b6
|
@ -1,23 +1,14 @@
|
||||||
import { rpcError, RpcMethodList, validateChain } from "./index.js";
|
import { rpcError, RpcMethodList, validateChain } from "./index.js";
|
||||||
import type { IPFS } from "ipfs-core";
|
import type { IPFS } from "ipfs-core";
|
||||||
import type { UnixFSEntry } from "ipfs-core/dist/src/utils";
|
|
||||||
import { dynImport } from "../util.js";
|
import { dynImport } from "../util.js";
|
||||||
import { exporter } from "ipfs-unixfs-exporter";
|
|
||||||
// @ts-ignore
|
|
||||||
import { CID } from "multiformats/cid";
|
import { CID } from "multiformats/cid";
|
||||||
import { MemoryDatastore } from "datastore-core";
|
|
||||||
import { MemoryBlockstore } from "blockstore-core";
|
|
||||||
import { createRepo } from "ipfs-repo";
|
|
||||||
// @ts-ignore
|
|
||||||
import { MemoryLock } from "ipfs-repo/locks/memory";
|
|
||||||
// @ts-ignore
|
|
||||||
import * as rawCodec from "multiformats/codecs/raw";
|
|
||||||
import last from "it-last";
|
import last from "it-last";
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
import toStream from "it-to-stream";
|
import toStream from "it-to-stream";
|
||||||
import { addStream } from "../streams.js";
|
import { addStream } from "../streams.js";
|
||||||
import { bases } from "multiformats/basics";
|
import { bases } from "multiformats/basics";
|
||||||
import { ERR_HASH_IS_DIRECTORY } from "../error.js";
|
import { ERR_HASH_IS_DIRECTORY } from "../error.js";
|
||||||
|
import type { StatResult } from "ipfs-core/dist/src/components/files/stat";
|
||||||
|
|
||||||
let client: IPFS | Promise<any>;
|
let client: IPFS | Promise<any>;
|
||||||
let resolver: typeof import("ipfs-http-response").resolver;
|
let resolver: typeof import("ipfs-http-response").resolver;
|
||||||
|
@ -25,19 +16,6 @@ let utils: typeof import("ipfs-http-response").utils;
|
||||||
let detectContentType: typeof import("ipfs-http-response").utils.detectContentType;
|
let detectContentType: typeof import("ipfs-http-response").utils.detectContentType;
|
||||||
let normalizeCidPath: typeof import("ipfs-core/dist/src/utils.js").normalizeCidPath;
|
let normalizeCidPath: typeof import("ipfs-core/dist/src/utils.js").normalizeCidPath;
|
||||||
|
|
||||||
const repo = createRepo(
|
|
||||||
"",
|
|
||||||
async () => rawCodec,
|
|
||||||
{
|
|
||||||
blocks: new MemoryBlockstore(),
|
|
||||||
datastore: new MemoryDatastore(),
|
|
||||||
keys: new MemoryDatastore(),
|
|
||||||
pins: new MemoryDatastore(),
|
|
||||||
root: new MemoryDatastore(),
|
|
||||||
},
|
|
||||||
{ autoMigrate: false, repoLock: MemoryLock, repoOwner: true }
|
|
||||||
);
|
|
||||||
|
|
||||||
interface StatFileResponse {
|
interface StatFileResponse {
|
||||||
exists: boolean;
|
exists: boolean;
|
||||||
contentType: string | null;
|
contentType: string | null;
|
||||||
|
@ -76,7 +54,6 @@ async function initIpfs() {
|
||||||
client = IPFS.create({
|
client = IPFS.create({
|
||||||
// relay: { hop: { enabled: false } },
|
// relay: { hop: { enabled: false } },
|
||||||
silent: true,
|
silent: true,
|
||||||
repo,
|
|
||||||
});
|
});
|
||||||
client = await client;
|
client = await client;
|
||||||
}
|
}
|
||||||
|
@ -109,7 +86,9 @@ async function fetchFile(hash?: string, path?: string, fullPath?: string) {
|
||||||
return rpcError(ERR_HASH_IS_DIRECTORY);
|
return rpcError(ERR_HASH_IS_DIRECTORY);
|
||||||
}
|
}
|
||||||
|
|
||||||
const streamId = addStream(data.content());
|
client = client as IPFS;
|
||||||
|
|
||||||
|
const streamId = addStream(client.cat(data.cid));
|
||||||
|
|
||||||
return { streamId };
|
return { streamId };
|
||||||
}
|
}
|
||||||
|
@ -143,12 +122,12 @@ async function statFile(hash?: string, path?: string, fullPath?: string) {
|
||||||
|
|
||||||
if (exists?.type === "directory") {
|
if (exists?.type === "directory") {
|
||||||
stats.directory = true;
|
stats.directory = true;
|
||||||
stats.files = exists.node.Links.map((item) => {
|
for await (const item of client.ls(exists.cid)) {
|
||||||
return {
|
stats.files.push({
|
||||||
name: item.Name,
|
name: item.name,
|
||||||
size: item.Tsize,
|
size: item.size,
|
||||||
} as StatFileSubfile;
|
} as StatFileSubfile);
|
||||||
});
|
}
|
||||||
return stats;
|
return stats;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -168,14 +147,16 @@ async function fileExists(
|
||||||
hash?: string,
|
hash?: string,
|
||||||
path?: string,
|
path?: string,
|
||||||
fullPath?: string
|
fullPath?: string
|
||||||
): Promise<Error | UnixFSEntry> {
|
): Promise<Error | StatResult> {
|
||||||
await initIpfs();
|
await initIpfs();
|
||||||
client = client as IPFS;
|
client = client as IPFS;
|
||||||
let ipfsPath = normalizePath(hash, path, fullPath);
|
let ipfsPath = normalizePath(hash, path, fullPath);
|
||||||
try {
|
try {
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
setTimeout(() => controller.abort(), 5000);
|
setTimeout(() => controller.abort(), 5000);
|
||||||
return await exporter(ipfsPath, repo.blocks, { signal: controller.signal });
|
return client.files.stat(`/ipfs/${ipfsPath}`, {
|
||||||
|
signal: controller.signal,
|
||||||
|
});
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue