diff --git a/.gitignore b/.gitignore index 5691469f..60182528 100644 --- a/.gitignore +++ b/.gitignore @@ -39,4 +39,3 @@ build node_modules lib -dist diff --git a/.travis.yml b/.travis.yml index 3c3df143..ab015ea4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,6 @@ language: node_js cache: npm +dist: focal branches: only: diff --git a/packages/ipfs-unixfs-exporter/dist/src/index.d.ts b/packages/ipfs-unixfs-exporter/dist/src/index.d.ts new file mode 100644 index 00000000..ae8b732d --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/index.d.ts @@ -0,0 +1,29 @@ +export type UnixFS = import('ipfs-unixfs').UnixFS; +export type BlockAPI = import('ipfs-unixfs-importer/src/types').BlockAPI; +export type ExporterOptions = import('./types').ExporterOptions; +export type UnixFSFile = import('./types').UnixFSFile; +export type UnixFSDirectory = import('./types').UnixFSDirectory; +export type ObjectNode = import('./types').ObjectNode; +export type RawNode = import('./types').RawNode; +export type IdentityNode = import('./types').IdentityNode; +export type UnixFSEntry = import('./types').UnixFSEntry; +/** + * @param {string | CID} path + * @param {BlockAPI} blockService + * @param {ExporterOptions} [options] + */ +export function exporter(path: string | CID, blockService: BlockAPI, options?: import("./types").ExporterOptions | undefined): Promise; +/** + * @param {string | CID} path + * @param {BlockAPI} blockService + * @param {ExporterOptions} [options] + */ +export function walkPath(path: string | CID, blockService: BlockAPI, options?: import("./types").ExporterOptions | undefined): AsyncGenerator; +/** + * @param {string | CID} path + * @param {BlockAPI} blockService + * @param {ExporterOptions} [options] + */ +export function recursive(path: string | CID, blockService: BlockAPI, options?: import("./types").ExporterOptions | undefined): AsyncGenerator; +import { CID } from "multiformats/cid"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/index.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/index.d.ts.map new file mode 100644 index 00000000..ff6d6da2 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.js"],"names":[],"mappings":"qBAQa,OAAO,aAAa,EAAE,MAAM;uBAC5B,OAAO,gCAAgC,EAAE,QAAQ;8BACjD,OAAO,SAAS,EAAE,eAAe;yBACjC,OAAO,SAAS,EAAE,UAAU;8BAC5B,OAAO,SAAS,EAAE,eAAe;yBACjC,OAAO,SAAS,EAAE,UAAU;sBAC5B,OAAO,SAAS,EAAE,OAAO;2BACzB,OAAO,SAAS,EAAE,YAAY;0BAC9B,OAAO,SAAS,EAAE,WAAW;AAmF1C;;;;GAIG;AACH,+BAJW,MAAM,GAAG,GAAG,gBACZ,QAAQ,mGAWlB;AAlDD;;;;GAIG;AACH,+BAJW,MAAM,GAAG,GAAG,gBACZ,QAAQ,yHAiClB;AAiBD;;;;GAIG;AACH,gCAJW,MAAM,GAAG,GAAG,gBACZ,QAAQ,yHAoClB"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/dag-cbor.d.ts b/packages/ipfs-unixfs-exporter/dist/src/resolvers/dag-cbor.d.ts new file mode 100644 index 00000000..0023df0e --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/dag-cbor.d.ts @@ -0,0 +1,13 @@ +export = resolve; +/** + * @typedef {import('../types').Resolver} Resolver + */ +/** + * @type {Resolver} + */ +declare const resolve: Resolver; +declare namespace resolve { + export { Resolver }; +} +type Resolver = import('../types').Resolver; +//# sourceMappingURL=dag-cbor.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/dag-cbor.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/resolvers/dag-cbor.d.ts.map new file mode 100644 index 00000000..421183a1 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/dag-cbor.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"dag-cbor.d.ts","sourceRoot":"","sources":["../../../src/resolvers/dag-cbor.js"],"names":[],"mappings":";AAMA;;GAEG;AAEH;;GAEG;AACH,uBAFU,QAAQ,CA6DjB;;;;gBAjEY,OAAO,UAAU,EAAE,QAAQ"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/identity.d.ts b/packages/ipfs-unixfs-exporter/dist/src/resolvers/identity.d.ts new file mode 100644 index 00000000..48b711a6 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/identity.d.ts @@ -0,0 +1,11 @@ +export = resolve; +/** + * @type {Resolver} + */ +declare const resolve: Resolver; +declare namespace resolve { + export { ExporterOptions, Resolver }; +} +type Resolver = import('../types').Resolver; +type ExporterOptions = import('../types').ExporterOptions; +//# sourceMappingURL=identity.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/identity.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/resolvers/identity.d.ts.map new file mode 100644 index 00000000..5830ebd8 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/identity.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"identity.d.ts","sourceRoot":"","sources":["../../../src/resolvers/identity.js"],"names":[],"mappings":";AA+BA;;GAEG;AACH,uBAFU,QAAQ,CAoBjB;;;;gBA3CY,OAAO,UAAU,EAAE,QAAQ;uBAD3B,OAAO,UAAU,EAAE,eAAe"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/index.d.ts b/packages/ipfs-unixfs-exporter/dist/src/resolvers/index.d.ts new file mode 100644 index 00000000..310e738a --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/index.d.ts @@ -0,0 +1,14 @@ +export = resolve; +/** + * @type {Resolve} + */ +declare function resolve(cid: import("multiformats/cid").CID, name: string, path: string, toResolve: string[], depth: number, blockService: any, options: import("../types").ExporterOptions): Promise; +declare namespace resolve { + export { BlockAPI, ExporterOptions, UnixFSEntry, Resolver, Resolve }; +} +type BlockAPI = import('../').BlockAPI; +type ExporterOptions = import('../types').ExporterOptions; +type UnixFSEntry = import('../types').UnixFSEntry; +type Resolver = import('../types').Resolver; +type Resolve = import('../types').Resolve; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/index.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/resolvers/index.d.ts.map new file mode 100644 index 00000000..5c1528b0 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/resolvers/index.js"],"names":[],"mappings":";AAuBA;;GAEG;AACH,yOASC;;;;gBA7BY,OAAO,KAAK,EAAE,QAAQ;uBACtB,OAAO,UAAU,EAAE,eAAe;mBAClC,OAAO,UAAU,EAAE,WAAW;gBAC9B,OAAO,UAAU,EAAE,QAAQ;eAC3B,OAAO,UAAU,EAAE,OAAO"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/raw.d.ts b/packages/ipfs-unixfs-exporter/dist/src/resolvers/raw.d.ts new file mode 100644 index 00000000..c1bbde83 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/raw.d.ts @@ -0,0 +1,10 @@ +export = resolve; +/** + * @type {import('../types').Resolver} + */ +declare const resolve: import('../types').Resolver; +declare namespace resolve { + export { ExporterOptions }; +} +type ExporterOptions = import('../types').ExporterOptions; +//# sourceMappingURL=raw.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/raw.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/resolvers/raw.d.ts.map new file mode 100644 index 00000000..aeb0b6d0 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/raw.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"raw.d.ts","sourceRoot":"","sources":["../../../src/resolvers/raw.js"],"names":[],"mappings":";AA6BA;;GAEG;AACH,uBAFU,OAAO,UAAU,EAAE,QAAQ,CAqBpC;;;;uBA5CY,OAAO,UAAU,EAAE,eAAe"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/directory.d.ts b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/directory.d.ts new file mode 100644 index 00000000..cd496970 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/directory.d.ts @@ -0,0 +1,17 @@ +export = directoryContent; +/** + * @typedef {import('../../../types').ExporterOptions} ExporterOptions + * @typedef {import('../../../types').UnixfsV1DirectoryContent} UnixfsV1DirectoryContent + * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver + */ +/** + * @type {UnixfsV1Resolver} + */ +declare const directoryContent: UnixfsV1Resolver; +declare namespace directoryContent { + export { ExporterOptions, UnixfsV1DirectoryContent, UnixfsV1Resolver }; +} +type UnixfsV1Resolver = import('../../../types').UnixfsV1Resolver; +type ExporterOptions = import('../../../types').ExporterOptions; +type UnixfsV1DirectoryContent = import('../../../types').UnixfsV1DirectoryContent; +//# sourceMappingURL=directory.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/directory.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/directory.d.ts.map new file mode 100644 index 00000000..193e0f25 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/directory.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"directory.d.ts","sourceRoot":"","sources":["../../../../../src/resolvers/unixfs-v1/content/directory.js"],"names":[],"mappings":";AAEA;;;;GAIG;AAEH;;GAEG;AACH,gCAFU,gBAAgB,CAsBzB;;;;wBA1BY,OAAO,gBAAgB,EAAE,gBAAgB;uBAFzC,OAAO,gBAAgB,EAAE,eAAe;gCACxC,OAAO,gBAAgB,EAAE,wBAAwB"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/file.d.ts b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/file.d.ts new file mode 100644 index 00000000..5049b5de --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/file.d.ts @@ -0,0 +1,12 @@ +export = fileContent; +/** + * @type {import('../').UnixfsV1Resolver} + */ +declare const fileContent: import('../').UnixfsV1Resolver; +declare namespace fileContent { + export { ExporterOptions, BlockService, PBNode }; +} +type ExporterOptions = import('../../../types').ExporterOptions; +type BlockService = import('ipfs-unixfs-importer/src/types').BlockAPI; +type PBNode = import('@ipld/dag-pb').PBNode; +//# sourceMappingURL=file.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/file.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/file.d.ts.map new file mode 100644 index 00000000..d1b5b2a9 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/file.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../../../../src/resolvers/unixfs-v1/content/file.js"],"names":[],"mappings":";AAqGA;;GAEG;AACH,2BAFU,OAAO,KAAK,EAAE,gBAAgB,CAyBvC;;;;uBApHY,OAAO,gBAAgB,EAAE,eAAe;oBACxC,OAAO,gCAAgC,EAAE,QAAQ;cACjD,OAAO,cAAc,EAAE,MAAM"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/hamt-sharded-directory.d.ts b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/hamt-sharded-directory.d.ts new file mode 100644 index 00000000..fa9dac87 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/hamt-sharded-directory.d.ts @@ -0,0 +1,23 @@ +export = hamtShardedDirectoryContent; +/** + * @typedef {import('ipfs-unixfs-importer/src/types').BlockAPI} BlockAPI + * @typedef {import('../../../types').ExporterOptions} ExporterOptions + * @typedef {import('../../../types').Resolve} Resolve + * @typedef {import('../../../types').UnixfsV1DirectoryContent} UnixfsV1DirectoryContent + * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + */ +/** + * @type {UnixfsV1Resolver} + */ +declare const hamtShardedDirectoryContent: UnixfsV1Resolver; +declare namespace hamtShardedDirectoryContent { + export { BlockAPI, ExporterOptions, Resolve, UnixfsV1DirectoryContent, UnixfsV1Resolver, PBNode }; +} +type UnixfsV1Resolver = import('../../../types').UnixfsV1Resolver; +type BlockAPI = import('ipfs-unixfs-importer/src/types').BlockAPI; +type ExporterOptions = import('../../../types').ExporterOptions; +type Resolve = import('../../../types').Resolve; +type UnixfsV1DirectoryContent = import('../../../types').UnixfsV1DirectoryContent; +type PBNode = import('@ipld/dag-pb').PBNode; +//# sourceMappingURL=hamt-sharded-directory.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/hamt-sharded-directory.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/hamt-sharded-directory.d.ts.map new file mode 100644 index 00000000..94a26f78 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/hamt-sharded-directory.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"hamt-sharded-directory.d.ts","sourceRoot":"","sources":["../../../../../src/resolvers/unixfs-v1/content/hamt-sharded-directory.js"],"names":[],"mappings":";AAIA;;;;;;;GAOG;AAEH;;GAEG;AACH,2CAFU,gBAAgB,CAYzB;;;;wBAjBY,OAAO,gBAAgB,EAAE,gBAAgB;gBAJzC,OAAO,gCAAgC,EAAE,QAAQ;uBACjD,OAAO,gBAAgB,EAAE,eAAe;eACxC,OAAO,gBAAgB,EAAE,OAAO;gCAChC,OAAO,gBAAgB,EAAE,wBAAwB;cAEjD,OAAO,cAAc,EAAE,MAAM"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/raw.d.ts b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/raw.d.ts new file mode 100644 index 00000000..e1e92ca8 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/raw.d.ts @@ -0,0 +1,15 @@ +export = rawContent; +/** + * @typedef {import('../../../types').ExporterOptions} ExporterOptions + * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver + */ +/** + * @type {UnixfsV1Resolver} + */ +declare const rawContent: UnixfsV1Resolver; +declare namespace rawContent { + export { ExporterOptions, UnixfsV1Resolver }; +} +type UnixfsV1Resolver = import('../../../types').UnixfsV1Resolver; +type ExporterOptions = import('../../../types').ExporterOptions; +//# sourceMappingURL=raw.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/raw.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/raw.d.ts.map new file mode 100644 index 00000000..aec0596d --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/raw.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"raw.d.ts","sourceRoot":"","sources":["../../../../../src/resolvers/unixfs-v1/content/raw.js"],"names":[],"mappings":";AAKA;;;GAGG;AAEH;;GAEG;AACH,0BAFU,gBAAgB,CAsBzB;;;;wBA1BY,OAAO,gBAAgB,EAAE,gBAAgB;uBADzC,OAAO,gBAAgB,EAAE,eAAe"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.d.ts b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.d.ts new file mode 100644 index 00000000..fa0762f2 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.d.ts @@ -0,0 +1,15 @@ +export = unixFsResolver; +/** + * @type {Resolver} + */ +declare const unixFsResolver: Resolver; +declare namespace unixFsResolver { + export { ExporterOptions, UnixFSEntry, Resolve, Resolver, UnixfsV1Resolver, PBNode }; +} +type Resolver = import('../../types').Resolver; +type ExporterOptions = import('../../types').ExporterOptions; +type UnixFSEntry = import('../../types').UnixFSEntry; +type Resolve = import('../../types').Resolve; +type UnixfsV1Resolver = import('../../types').UnixfsV1Resolver; +type PBNode = import('@ipld/dag-pb').PBNode; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.d.ts.map new file mode 100644 index 00000000..76c94694 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/resolvers/unixfs-v1/index.js"],"names":[],"mappings":";AA2CA;;GAEG;AACH,8BAFU,QAAQ,CAoEjB;;;;gBApGY,OAAO,aAAa,EAAE,QAAQ;uBAH9B,OAAO,aAAa,EAAE,eAAe;mBACrC,OAAO,aAAa,EAAE,WAAW;eACjC,OAAO,aAAa,EAAE,OAAO;wBAE7B,OAAO,aAAa,EAAE,gBAAgB;cACtC,OAAO,cAAc,EAAE,MAAM"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/types.d.ts b/packages/ipfs-unixfs-exporter/dist/src/types.d.ts new file mode 100644 index 00000000..73f4c8e7 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/types.d.ts @@ -0,0 +1,74 @@ +import { CID } from 'multiformats/cid' +import UnixFS from 'ipfs-unixfs' +import { PBNode } from '@ipld/dag-pb' + +interface ExporterOptions { + offset?: number + length?: number + signal?: AbortSignal + timeout?: number +} + +interface Exportable { + type: 'file' | 'directory' | 'object' | 'raw' | 'identity', + name: string + path: string + cid: CID + depth: number + size: number + content: (options?: ExporterOptions) => AsyncIterable +} + +interface UnixFSFile extends Exportable { + type: 'file' + unixfs: UnixFS + node: PBNode +} + +interface UnixFSDirectory extends Exportable { + type: 'directory' + unixfs: UnixFS + node: PBNode +} + +interface ObjectNode extends Exportable { + type: 'object' + node: Uint8Array +} + +interface RawNode extends Exportable { + type: 'raw' + node: Uint8Array +} + +interface IdentityNode extends Exportable { + type: 'identity' + node: Uint8Array +} + +type UnixFSEntry = UnixFSFile | UnixFSDirectory | ObjectNode | RawNode | IdentityNode + +interface NextResult { + cid: CID + name: string + path: string + toResolve: string[] +} + +interface ResolveResult { + entry: UnixFSEntry + next?: NextResult +} + +type Resolve = (cid: CID, name: string, path: string, toResolve: string[], depth: number, ipld: IPLD, options: ExporterOptions) => Promise +type Resolver = (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, ipld: IPLD, options: ExporterOptions) => Promise + +type UnixfsV1FileContent = AsyncIterable | Iterable +type UnixfsV1DirectoryContent = AsyncIterable | Iterable +type UnixfsV1Content = UnixfsV1FileContent | UnixfsV1DirectoryContent +type UnixfsV1Resolver = (cid: CID, node: PBNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, ipld: IPLD) => (options: ExporterOptions) => UnixfsV1Content + +interface Block { + cid: CID, + bytes: Uint8Array +} diff --git a/packages/ipfs-unixfs-exporter/dist/src/utils/extract-data-from-block.d.ts b/packages/ipfs-unixfs-exporter/dist/src/utils/extract-data-from-block.d.ts new file mode 100644 index 00000000..bc211869 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/utils/extract-data-from-block.d.ts @@ -0,0 +1,3 @@ +declare function _exports(block: Uint8Array, blockStart: number, requestedStart: number, requestedEnd: number): Uint8Array; +export = _exports; +//# sourceMappingURL=extract-data-from-block.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/utils/extract-data-from-block.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/utils/extract-data-from-block.d.ts.map new file mode 100644 index 00000000..761824c0 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/utils/extract-data-from-block.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"extract-data-from-block.d.ts","sourceRoot":"","sources":["../../../src/utils/extract-data-from-block.js"],"names":[],"mappings":"AAQiB,iCALN,UAAU,cACV,MAAM,kBACN,MAAM,gBACN,MAAM,cAuBhB"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/utils/find-cid-in-shard.d.ts b/packages/ipfs-unixfs-exporter/dist/src/utils/find-cid-in-shard.d.ts new file mode 100644 index 00000000..03f9b264 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/utils/find-cid-in-shard.d.ts @@ -0,0 +1,30 @@ +export = findShardCid; +/** + * @typedef {object} ShardTraversalContext + * @property {number} hamtDepth + * @property {Bucket} rootBucket + * @property {Bucket} lastBucket + * + * @param {PBNode} node + * @param {string} name + * @param {BlockService} blockService + * @param {ShardTraversalContext} [context] + * @param {ExporterOptions} [options] + * @returns {Promise} + */ +declare function findShardCid(node: PBNode, name: string, blockService: BlockService, context?: ShardTraversalContext | undefined, options?: import("../types").ExporterOptions | undefined): Promise; +declare namespace findShardCid { + export { BlockService, CID, ExporterOptions, PBNode, PBLink, ShardTraversalContext }; +} +type PBNode = import('@ipld/dag-pb').PBNode; +type BlockService = import('ipfs-unixfs-importer/src/types').BlockAPI; +type ShardTraversalContext = { + hamtDepth: number; + rootBucket: Bucket; + lastBucket: Bucket; +}; +type CID = import('multiformats/cid').CID; +type ExporterOptions = import('../types').ExporterOptions; +type PBLink = import('@ipld/dag-pb').PBLink; +import { Bucket } from "hamt-sharding"; +//# sourceMappingURL=find-cid-in-shard.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/utils/find-cid-in-shard.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/utils/find-cid-in-shard.d.ts.map new file mode 100644 index 00000000..ce27c753 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/utils/find-cid-in-shard.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"find-cid-in-shard.d.ts","sourceRoot":"","sources":["../../../src/utils/find-cid-in-shard.js"],"names":[],"mappings":";AA2FA;;;;;;;;;;;;GAYG;AACH,oCAPW,MAAM,QACN,MAAM,gBACN,YAAY,0GAGV,QAAQ,GAAG,GAAC,IAAI,CAAC,CA8D7B;;;;cA1JY,OAAO,cAAc,EAAE,MAAM;oBAH7B,OAAO,gCAAgC,EAAE,QAAQ;;eAsFhD,MAAM;gBACN,OAAO,OAAO,CAAC;gBACf,OAAO,OAAO,CAAC;;WAvFhB,OAAO,kBAAkB,EAAE,GAAG;uBAC9B,OAAO,UAAU,EAAE,eAAe;cAElC,OAAO,cAAc,EAAE,MAAM"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/utils/validate-offset-and-length.d.ts b/packages/ipfs-unixfs-exporter/dist/src/utils/validate-offset-and-length.d.ts new file mode 100644 index 00000000..d181958e --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/utils/validate-offset-and-length.d.ts @@ -0,0 +1,11 @@ +export = validateOffsetAndLength; +/** + * @param {number} size + * @param {number} [offset] + * @param {number} [length] + */ +declare function validateOffsetAndLength(size: number, offset?: number | undefined, length?: number | undefined): { + offset: number; + length: number; +}; +//# sourceMappingURL=validate-offset-and-length.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/dist/src/utils/validate-offset-and-length.d.ts.map b/packages/ipfs-unixfs-exporter/dist/src/utils/validate-offset-and-length.d.ts.map new file mode 100644 index 00000000..5b1f25be --- /dev/null +++ b/packages/ipfs-unixfs-exporter/dist/src/utils/validate-offset-and-length.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"validate-offset-and-length.d.ts","sourceRoot":"","sources":["../../../src/utils/validate-offset-and-length.js"],"names":[],"mappings":";AAIA;;;;GAIG;AACH,+CAJW,MAAM;;;EAiChB"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index b9367685..965f11f6 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -42,16 +42,11 @@ "detect-node": "^2.0.4", "events": "^3.3.0", "ipfs-core-types": "^0.3.1", - "ipfs-unixfs-importer": "^7.0.3", - "ipld": "^0.29.0", - "ipld-block": "^0.11.1", - "ipld-dag-pb": "^0.22.2", - "ipld-in-memory": "^8.0.0", + "ipfs-unixfs-importer": "../ipfs-unixfs-importer", "it-all": "^1.0.5", "it-buffer-stream": "^2.0.0", "it-first": "^1.0.6", "merge-options": "^3.0.4", - "multicodec": "^3.0.1", "native-abort-controller": "^1.0.3", "nyc": "^15.0.0", "readable-stream": "^3.6.0", @@ -61,11 +56,14 @@ "util": "^0.12.3" }, "dependencies": { - "cids": "^1.1.5", + "@ipld/dag-cbor": "^5.0.0", + "@ipld/dag-pb": "^1.1.0", "err-code": "^3.0.1", "hamt-sharding": "^2.0.0", "ipfs-unixfs": "^4.0.3", "it-last": "^1.0.5", + "multicodec": "^3.0.1", + "multiformats": "^8.0.3", "multihashing-async": "^2.1.0" }, "types": "dist/src/index.d.ts", diff --git a/packages/ipfs-unixfs-exporter/src/index.js b/packages/ipfs-unixfs-exporter/src/index.js index e9cefe62..9819a83b 100644 --- a/packages/ipfs-unixfs-exporter/src/index.js +++ b/packages/ipfs-unixfs-exporter/src/index.js @@ -1,14 +1,13 @@ 'use strict' const errCode = require('err-code') -const CID = require('cids') +const { CID } = require('multiformats/cid') const resolve = require('./resolvers') const last = require('it-last') /** * @typedef {import('ipfs-unixfs').UnixFS} UnixFS - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode - * @typedef {import('ipld')} IPLD + * @typedef {import('ipfs-unixfs-importer/src/types').BlockAPI} BlockAPI * @typedef {import('./types').ExporterOptions} ExporterOptions * @typedef {import('./types').UnixFSFile} UnixFSFile * @typedef {import('./types').UnixFSDirectory} UnixFSDirectory @@ -32,14 +31,15 @@ const toPathComponents = (path = '') => { const cidAndRest = (path) => { if (path instanceof Uint8Array) { return { - cid: new CID(path), + cid: CID.decode(path), toResolve: [] } } - if (CID.isCID(path)) { + const cid = CID.asCID(path) + if (cid) { return { - cid: path, + cid, toResolve: [] } } @@ -52,7 +52,7 @@ const cidAndRest = (path) => { const output = toPathComponents(path) return { - cid: new CID(output[0]), + cid: CID.parse(output[0]), toResolve: output.slice(1) } } @@ -62,10 +62,10 @@ const cidAndRest = (path) => { /** * @param {string | CID} path - * @param {IPLD} ipld + * @param {BlockAPI} blockService * @param {ExporterOptions} [options] */ -async function * walkPath (path, ipld, options = {}) { +async function * walkPath (path, blockService, options = {}) { let { cid, toResolve @@ -75,7 +75,7 @@ async function * walkPath (path, ipld, options = {}) { const startingDepth = toResolve.length while (true) { - const result = await resolve(cid, name, entryPath, toResolve, startingDepth, ipld, options) + const result = await resolve(cid, name, entryPath, toResolve, startingDepth, blockService, options) if (!result.entry && !result.next) { throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND') @@ -99,11 +99,11 @@ async function * walkPath (path, ipld, options = {}) { /** * @param {string | CID} path - * @param {IPLD} ipld + * @param {BlockAPI} blockService * @param {ExporterOptions} [options] */ -async function exporter (path, ipld, options = {}) { - const result = await last(walkPath(path, ipld, options)) +async function exporter (path, blockService, options = {}) { + const result = await last(walkPath(path, blockService, options)) if (!result) { throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND') @@ -114,11 +114,11 @@ async function exporter (path, ipld, options = {}) { /** * @param {string | CID} path - * @param {IPLD} ipld + * @param {BlockAPI} blockService * @param {ExporterOptions} [options] */ -async function * recursive (path, ipld, options = {}) { - const node = await exporter(path, ipld, options) +async function * recursive (path, blockService, options = {}) { + const node = await exporter(path, blockService, options) if (!node) { return diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js b/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js index 40f69809..0276dc72 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js @@ -1,7 +1,8 @@ 'use strict' -const CID = require('cids') +const { CID } = require('multiformats/cid') const errCode = require('err-code') +const dagCbor = require('@ipld/dag-cbor') /** * @typedef {import('../types').Resolver} Resolver @@ -10,9 +11,9 @@ const errCode = require('err-code') /** * @type {Resolver} */ -const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options) => { - const object = await ipld.get(cid, options) - const block = await ipld.get(new CID(1, 'raw', cid.multihash)) +const resolve = async (cid, name, path, toResolve, resolve, depth, blockService, options) => { + const block = await blockService.get(cid) + const object = dagCbor.decode(block.bytes) let subObject = object let subPath = path @@ -24,14 +25,15 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options toResolve.shift() subPath = `${subPath}/${prop}` - if (CID.isCID(subObject[prop])) { + const subObjectCid = CID.asCID(subObject[prop]) + if (subObjectCid) { return { entry: { type: 'object', name, path, cid, - node: block, + node: block.bytes, depth, size: block.length, content: async function * () { @@ -39,7 +41,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options } }, next: { - cid: subObject[prop], + cid: subObjectCid, name: prop, path: subPath, toResolve @@ -60,7 +62,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options name, path, cid, - node: block, + node: block.bytes, depth, size: block.length, content: async function * () { diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/identity.js b/packages/ipfs-unixfs-exporter/src/resolvers/identity.js index 080ee182..c21093bf 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/identity.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/identity.js @@ -3,7 +3,7 @@ const errCode = require('err-code') const extractDataFromBlock = require('../utils/extract-data-from-block') const validateOffsetAndLength = require('../utils/validate-offset-and-length') -const mh = require('multihashing-async').multihash +const mh = require('multiformats/hashes/digest') /** * @typedef {import('../types').ExporterOptions} ExporterOptions @@ -32,12 +32,11 @@ const rawContent = (node) => { /** * @type {Resolver} */ -const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options) => { +const resolve = async (cid, name, path, toResolve, resolve, depth, blockService, options) => { if (toResolve.length) { throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND') } - - const buf = await mh.decode(cid.multihash) + const buf = await mh.decode(cid.multihash.bytes) return { entry: { @@ -47,7 +46,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options cid, content: rawContent(buf.digest), depth, - size: buf.length, + size: buf.digest.length, node: buf.digest } } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/index.js b/packages/ipfs-unixfs-exporter/src/resolvers/index.js index ac304a1c..8a8be0c5 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/index.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/index.js @@ -1,10 +1,10 @@ 'use strict' const errCode = require('err-code') +const multicodec = require('multicodec') /** - * @typedef {import('cids')} CID - * @typedef {import('ipld')} IPLD + * @typedef {import('../').BlockAPI} BlockAPI * @typedef {import('../types').ExporterOptions} ExporterOptions * @typedef {import('../types').UnixFSEntry} UnixFSEntry * @typedef {import('../types').Resolver} Resolver @@ -15,23 +15,24 @@ const errCode = require('err-code') * @type {{ [ key: string ]: Resolver }} */ const resolvers = { - 'dag-pb': require('./unixfs-v1'), - raw: require('./raw'), - 'dag-cbor': require('./dag-cbor'), - identity: require('./identity') + [multicodec.DAG_PB]: require('./unixfs-v1'), + [multicodec.RAW]: require('./raw'), + [multicodec.DAG_CBOR]: require('./dag-cbor'), + [multicodec.IDENTITY]: require('./identity') } /** * @type {Resolve} */ -function resolve (cid, name, path, toResolve, depth, ipld, options) { - const resolver = resolvers[cid.codec] +function resolve (cid, name, path, toResolve, depth, blockService, options) { + const resolver = resolvers[cid.code] if (!resolver) { - throw errCode(new Error(`No resolver for codec ${cid.codec}`), 'ERR_NO_RESOLVER') + // @ts-ignore - A `CodecCode` is expected, but a number is just fine + throw errCode(new Error(`No resolver for codec ${multicodec.getName(cid.code)}`), 'ERR_NO_RESOLVER') } - return resolver(cid, name, path, toResolve, resolve, depth, ipld, options) + return resolver(cid, name, path, toResolve, resolve, depth, blockService, options) } module.exports = resolve diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/raw.js b/packages/ipfs-unixfs-exporter/src/resolvers/raw.js index 5ec94544..e7af9a94 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/raw.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/raw.js @@ -30,12 +30,12 @@ const rawContent = (node) => { /** * @type {import('../types').Resolver} */ -const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options) => { +const resolve = async (cid, name, path, toResolve, resolve, depth, blockService, options) => { if (toResolve.length) { throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND') } - const buf = await ipld.get(cid, options) + const block = await blockService.get(cid, options) return { entry: { @@ -43,10 +43,10 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options name, path, cid, - content: rawContent(buf), + content: rawContent(block.bytes), depth, - size: buf.length, - node: buf + size: block.bytes.length, + node: block.bytes } } } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js index f30fac00..751673f4 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js @@ -9,7 +9,7 @@ /** * @type {UnixfsV1Resolver} */ -const directoryContent = (cid, node, unixfs, path, resolve, depth, ipld) => { +const directoryContent = (cid, node, unixfs, path, resolve, depth, blockService) => { /** * @param {ExporterOptions} [options] * @returns {UnixfsV1DirectoryContent} @@ -20,7 +20,7 @@ const directoryContent = (cid, node, unixfs, path, resolve, depth, ipld) => { const links = node.Links.slice(offset, length) for (const link of links) { - const result = await resolve(link.Hash, link.Name, `${path}/${link.Name}`, [], depth + 1, ipld, options) + const result = await resolve(link.Hash, link.Name || '', `${path}/${link.Name || ''}`, [], depth + 1, blockService, options) if (result.entry) { yield result.entry diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js index 4bcdba4a..64918a42 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js @@ -4,23 +4,24 @@ const extractDataFromBlock = require('../../../utils/extract-data-from-block') const validateOffsetAndLength = require('../../../utils/validate-offset-and-length') const { UnixFS } = require('ipfs-unixfs') const errCode = require('err-code') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const mc = require('multicodec') /** * @typedef {import('../../../types').ExporterOptions} ExporterOptions - * @typedef {import('ipld')} IPLD - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode - */ - -/** - * @param {IPLD} ipld - * @param {DAGNode} node + * @typedef {import('ipfs-unixfs-importer/src/types').BlockAPI} BlockService + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * + * @param {BlockService} blockService + * @param {PBNode} node * @param {number} start * @param {number} end * @param {number} streamPosition * @param {ExporterOptions} options * @returns {AsyncIterable} */ -async function * emitBytes (ipld, node, start, end, streamPosition = 0, options) { +async function * emitBytes (blockService, node, start, end, streamPosition = 0, options) { // a `raw` node if (node instanceof Uint8Array) { const buf = extractDataFromBlock(node, streamPosition, start, end) @@ -34,6 +35,10 @@ async function * emitBytes (ipld, node, start, end, streamPosition = 0, options) return streamPosition } + if (node.Data == null) { + throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS') + } + let file try { @@ -63,11 +68,26 @@ async function * emitBytes (ipld, node, start, end, streamPosition = 0, options) if ((start >= childStart && start < childEnd) || // child has offset byte (end > childStart && end <= childEnd) || // child has end byte (start < childStart && end > childEnd)) { // child is between offset and end bytes - const child = await ipld.get(childLink.Hash, { + const block = await blockService.get(childLink.Hash, { signal: options.signal }) + let child + switch (childLink.Hash.code) { + case mc.DAG_PB: + child = await dagPb.decode(block.bytes) + break + case mc.RAW: + child = block.bytes + break + case mc.DAG_CBOR: + child = await dagCbor.decode(block.bytes) + break + default: + // @ts-ignore - A `CodecCode` is expected, but a number is just fine + throw Error(`Unsupported codec: ${mc.getName(childLink.Hash.code)}`) + } - for await (const buf of emitBytes(ipld, child, start, end, streamPosition, options)) { + for await (const buf of emitBytes(blockService, child, start, end, streamPosition, options)) { streamPosition += buf.length yield buf @@ -82,7 +102,7 @@ async function * emitBytes (ipld, node, start, end, streamPosition = 0, options) /** * @type {import('../').UnixfsV1Resolver} */ -const fileContent = (cid, node, unixfs, path, resolve, depth, ipld) => { +const fileContent = (cid, node, unixfs, path, resolve, depth, blockService) => { /** * @param {ExporterOptions} options */ @@ -101,7 +121,7 @@ const fileContent = (cid, node, unixfs, path, resolve, depth, ipld) => { const start = offset const end = offset + length - return emitBytes(ipld, node, start, end, 0, options) + return emitBytes(blockService, node, start, end, 0, options) } return yieldFileContent diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js index 25d784b5..0aa1fb1a 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js @@ -1,54 +1,57 @@ 'use strict' +const { decode } = require('@ipld/dag-pb') + /** - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode - * @typedef {import('ipld')} IPLD + * @typedef {import('ipfs-unixfs-importer/src/types').BlockAPI} BlockAPI * @typedef {import('../../../types').ExporterOptions} ExporterOptions * @typedef {import('../../../types').Resolve} Resolve * @typedef {import('../../../types').UnixfsV1DirectoryContent} UnixfsV1DirectoryContent * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver + * @typedef {import('@ipld/dag-pb').PBNode} PBNode */ /** * @type {UnixfsV1Resolver} */ -const hamtShardedDirectoryContent = (cid, node, unixfs, path, resolve, depth, ipld) => { +const hamtShardedDirectoryContent = (cid, node, unixfs, path, resolve, depth, blockService) => { /** * @param {ExporterOptions} options * */ function yieldHamtDirectoryContent (options = {}) { - return listDirectory(node, path, resolve, depth, ipld, options) + return listDirectory(node, path, resolve, depth, blockService, options) } return yieldHamtDirectoryContent } /** - * @param {DAGNode} node + * @param {PBNode} node * @param {string} path * @param {Resolve} resolve * @param {number} depth - * @param {IPLD} ipld + * @param {BlockAPI} blockService * @param {ExporterOptions} options * * @returns {UnixfsV1DirectoryContent} */ -async function * listDirectory (node, path, resolve, depth, ipld, options) { +async function * listDirectory (node, path, resolve, depth, blockService, options) { const links = node.Links for (const link of links) { - const name = link.Name.substring(2) + const name = link.Name != null ? link.Name.substring(2) : null if (name) { - const result = await resolve(link.Hash, name, `${path}/${name}`, [], depth + 1, ipld, options) + const result = await resolve(link.Hash, name, `${path}/${name}`, [], depth + 1, blockService, options) yield result.entry } else { // descend into subshard - node = await ipld.get(link.Hash) + const block = await blockService.get(link.Hash) + node = decode(block.bytes) - for await (const file of listDirectory(node, path, resolve, depth, ipld, options)) { + for await (const file of listDirectory(node, path, resolve, depth, blockService, options)) { yield file } } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js index 1ac4b17e..176da0a6 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js @@ -11,7 +11,7 @@ const validateOffsetAndLength = require('../../../utils/validate-offset-and-leng /** * @type {UnixfsV1Resolver} */ -const rawContent = (cid, node, unixfs, path, resolve, depth, ipld) => { +const rawContent = (cid, node, unixfs, path, resolve, depth, blockService) => { /** * @param {ExporterOptions} options */ diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js index 441c17be..12200d13 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js @@ -3,20 +3,20 @@ const errCode = require('err-code') const { UnixFS } = require('ipfs-unixfs') const findShardCid = require('../../utils/find-cid-in-shard') +const { decode } = require('@ipld/dag-pb') /** - * @typedef {import('cids')} CID - * @typedef {import('ipld')} IPLD - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode + * @typedef {import('ipfs-unixfs-importer/src/types').BlockAPI} * @typedef {import('../../types').ExporterOptions} ExporterOptions * @typedef {import('../../types').UnixFSEntry} UnixFSEntry * @typedef {import('../../types').Resolve} Resolve * @typedef {import('../../types').Resolver} Resolver * @typedef {import('../../types').UnixfsV1Resolver} UnixfsV1Resolver + * @typedef {import('@ipld/dag-pb').PBNode} PBNode */ /** - * @param {import('ipld-dag-pb').DAGNode} node + * @param {PBNode} node * @param {string} name */ const findLinkCid = (node, name) => { @@ -33,10 +33,10 @@ const contentExporters = { file: require('./content/file'), directory: require('./content/directory'), 'hamt-sharded-directory': require('./content/hamt-sharded-directory'), - metadata: (cid, node, unixfs, path, resolve, depth, ipld) => { + metadata: (cid, node, unixfs, path, resolve, depth, blockService) => { return () => [] }, - symlink: (cid, node, unixfs, path, resolve, depth, ipld) => { + symlink: (cid, node, unixfs, path, resolve, depth, blockService) => { return () => [] } } @@ -44,8 +44,9 @@ const contentExporters = { /** * @type {Resolver} */ -const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, ipld, options) => { - const node = await ipld.get(cid, options) +const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, blockService, options) => { + const block = await blockService.get(cid, options) + const node = decode(block.bytes) let unixfs let next @@ -53,6 +54,10 @@ const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, ipld, name = cid.toString() } + if (node.Data == null) { + throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS') + } + try { unixfs = UnixFS.unmarshal(node.Data) } catch (err) { @@ -69,7 +74,7 @@ const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, ipld, if (unixfs && unixfs.type === 'hamt-sharded-directory') { // special case - unixfs v1 hamt shards - linkCid = await findShardCid(node, toResolve[0], ipld) + linkCid = await findShardCid(node, toResolve[0], blockService) } else { linkCid = findLinkCid(node, toResolve[0]) } @@ -97,7 +102,7 @@ const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, ipld, path, cid, // @ts-ignore - content: contentExporters[unixfs.type](cid, node, unixfs, path, resolve, depth, ipld), + content: contentExporters[unixfs.type](cid, node, unixfs, path, resolve, depth, blockService), unixfs, depth, node, diff --git a/packages/ipfs-unixfs-exporter/src/types.d.ts b/packages/ipfs-unixfs-exporter/src/types.d.ts index 52f3bf1e..73f4c8e7 100644 --- a/packages/ipfs-unixfs-exporter/src/types.d.ts +++ b/packages/ipfs-unixfs-exporter/src/types.d.ts @@ -1,6 +1,6 @@ -import CID from 'cids' +import { CID } from 'multiformats/cid' import UnixFS from 'ipfs-unixfs' -import DAGNode from 'ipld-dag-pb' +import { PBNode } from '@ipld/dag-pb' interface ExporterOptions { offset?: number @@ -22,13 +22,13 @@ interface Exportable { interface UnixFSFile extends Exportable { type: 'file' unixfs: UnixFS - node: DAGNode + node: PBNode } interface UnixFSDirectory extends Exportable { type: 'directory' unixfs: UnixFS - node: DAGNode + node: PBNode } interface ObjectNode extends Exportable { @@ -66,4 +66,9 @@ type Resolver = (cid: CID, name: string, path: string, toResolve: string[], reso type UnixfsV1FileContent = AsyncIterable | Iterable type UnixfsV1DirectoryContent = AsyncIterable | Iterable type UnixfsV1Content = UnixfsV1FileContent | UnixfsV1DirectoryContent -type UnixfsV1Resolver = (cid: CID, node: DAGNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, ipld: IPLD) => (options: ExporterOptions) => UnixfsV1Content +type UnixfsV1Resolver = (cid: CID, node: PBNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, ipld: IPLD) => (options: ExporterOptions) => UnixfsV1Content + +interface Block { + cid: CID, + bytes: Uint8Array +} diff --git a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js index 9b48fede..4d7cc066 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js +++ b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js @@ -2,11 +2,14 @@ const { Bucket, createHAMT } = require('hamt-sharding') const multihashing = require('multihashing-async') +const { decode } = require('@ipld/dag-pb') /** + * @typedef {import('ipfs-unixfs-importer/src/types').BlockAPI} BlockService + * @typedef {import('multiformats/cid').CID} CID * @typedef {import('../types').ExporterOptions} ExporterOptions - * @typedef {import('ipld')} IPLD - * @typedef {import('cids')} CID + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * @typedef {import('@ipld/dag-pb').PBLink} PBLink */ // FIXME: this is copy/pasted from ipfs-unixfs-importer/src/dir-sharded.js @@ -32,13 +35,17 @@ const hashFn = async function (buf) { } /** - * @param {import('ipld-dag-pb').DAGLink[]} links + * @param {PBLink[]} links * @param {Bucket} bucket * @param {Bucket} rootBucket */ const addLinksToHamtBucket = (links, bucket, rootBucket) => { return Promise.all( links.map(link => { + if (link.Name == null) { + // TODO(@rvagg): what do? this is technically possible + throw new Error('Unexpected Link without a Name') + } if (link.Name.length === 2) { const pos = parseInt(link.Name, 16) @@ -88,14 +95,14 @@ const toBucketPath = (position) => { * @property {Bucket} rootBucket * @property {Bucket} lastBucket * - * @param {import('ipld-dag-pb').DAGNode} node + * @param {PBNode} node * @param {string} name - * @param {IPLD} ipld + * @param {BlockService} blockService * @param {ShardTraversalContext} [context] * @param {ExporterOptions} [options] * @returns {Promise} */ -const findShardCid = async (node, name, ipld, context, options) => { +const findShardCid = async (node, name, blockService, context, options) => { if (!context) { const rootBucket = createHAMT({ hashFn @@ -121,6 +128,10 @@ const findShardCid = async (node, name, ipld, context, options) => { } const link = node.Links.find(link => { + if (link.Name == null) { + return false + } + const entryPrefix = link.Name.substring(0, 2) const entryName = link.Name.substring(2) @@ -141,15 +152,16 @@ const findShardCid = async (node, name, ipld, context, options) => { return null } - if (link.Name.substring(2) === name) { + if (link.Name != null && link.Name.substring(2) === name) { return link.Hash } context.hamtDepth++ - node = await ipld.get(link.Hash, options) + const block = await blockService.get(link.Hash, options) + node = decode(block.bytes) - return findShardCid(node, name, ipld, context, options) + return findShardCid(node, name, blockService, context, options) } module.exports = findShardCid diff --git a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js index c4bce877..f0605172 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js @@ -2,28 +2,21 @@ 'use strict' const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const { UnixFS } = require('ipfs-unixfs') -const mh = require('multihashing-async').multihash -const mc = require('multicodec') const all = require('it-all') const last = require('it-last') const randomBytes = require('it-buffer-stream') const { exporter, walkPath } = require('../src') const { importer } = require('ipfs-unixfs-importer') -const { - DAGLink, - DAGNode -} = require('ipld-dag-pb') +const dagPb = require('@ipld/dag-pb') +const { sha256 } = require('multiformats/hashes/sha2') +const Block = require('multiformats/block') const blockApi = require('./helpers/block') const uint8ArrayConcat = require('uint8arrays/concat') const asAsyncIterable = require('./helpers/as-async-iterable') /** - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID */ const SHARD_SPLIT_THRESHOLD = 10 @@ -31,10 +24,8 @@ const SHARD_SPLIT_THRESHOLD = 10 describe('exporter sharded', function () { this.timeout(30000) - /** @type {import('ipld')} */ - let ipld /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block + const block = blockApi() /** * @param {number} numFiles @@ -72,11 +63,6 @@ describe('exporter sharded', function () { return result.cid } - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) - it('exports a sharded directory', async () => { /** @type {{ [key: string]: { content: Uint8Array, cid?: CID }}} */ const files = {} @@ -110,14 +96,18 @@ describe('exporter sharded', function () { files[imported.path].cid = imported.cid }) - const dir = await ipld.get(dirCid) + const encodedBlock = await block.get(dirCid) + const dir = dagPb.decode(encodedBlock.bytes) + if (!dir.Data) { + throw Error('PBNode Data undefined') + } const dirMetadata = UnixFS.unmarshal(dir.Data) expect(dirMetadata.type).to.equal('hamt-sharded-directory') - const exported = await exporter(dirCid, ipld) + const exported = await exporter(dirCid, block) - expect(exported.cid.equals(dirCid)).to.be.true() + expect(exported.cid.toString()).to.be.equal(dirCid.toString()) if (exported.type !== 'directory') { throw new Error('Expected directory') @@ -140,7 +130,8 @@ describe('exporter sharded', function () { const data = uint8ArrayConcat(await all(dirFile.content())) // validate the CID - expect(files[dirFile.name]).to.have.property('cid').that.deep.equals(dirFile.cid) + // @ts-ignore - files[dirFile.name].cid is defined + expect(files[dirFile.name].cid.toString()).that.deep.equals(dirFile.cid.toString()) // validate the exported file content expect(files[dirFile.name].content).to.deep.equal(data) @@ -150,7 +141,7 @@ describe('exporter sharded', function () { it('exports all files from a sharded directory with subshards', async () => { const numFiles = 31 const dirCid = await createShard(numFiles) - const exported = await exporter(dirCid, ipld) + const exported = await exporter(dirCid, block) if (exported.type !== 'directory') { throw new Error('Unexpected type') @@ -172,42 +163,42 @@ describe('exporter sharded', function () { it('exports one file from a sharded directory', async () => { const dirCid = await createShard(31) - const exported = await exporter(`/ipfs/${dirCid}/file-14`, ipld) + const exported = await exporter(`/ipfs/${dirCid}/file-14`, block) expect(exported).to.have.property('name', 'file-14') }) it('exports one file from a sharded directory sub shard', async () => { const dirCid = await createShard(31) - const exported = await exporter(`/ipfs/${dirCid}/file-30`, ipld) + const exported = await exporter(`/ipfs/${dirCid}/file-30`, block) expect(exported.name).to.deep.equal('file-30') }) it('exports one file from a shard inside a shard inside a shard', async () => { const dirCid = await createShard(2568) - const exported = await exporter(`/ipfs/${dirCid}/file-2567`, ipld) + const exported = await exporter(`/ipfs/${dirCid}/file-2567`, block) expect(exported.name).to.deep.equal('file-2567') }) it('extracts a deep folder from the sharded directory', async () => { const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`) - const exported = await exporter(`/ipfs/${dirCid}/foo/bar/baz`, ipld) + const exported = await exporter(`/ipfs/${dirCid}/foo/bar/baz`, block) expect(exported.name).to.deep.equal('baz') }) it('extracts an intermediate folder from the sharded directory', async () => { const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`) - const exported = await exporter(`/ipfs/${dirCid}/foo/bar`, ipld) + const exported = await exporter(`/ipfs/${dirCid}/foo/bar`, block) expect(exported.name).to.deep.equal('bar') }) it('uses .path to extract all intermediate entries from the sharded directory', async () => { const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`) - const exported = await all(walkPath(`/ipfs/${dirCid}/foo/bar/baz/file-1`, ipld)) + const exported = await all(walkPath(`/ipfs/${dirCid}/foo/bar/baz/file-1`, block)) expect(exported.length).to.equal(5) @@ -224,7 +215,7 @@ describe('exporter sharded', function () { it('uses .path to extract all intermediate entries from the sharded directory as well as the contents', async () => { const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`) - const exported = await all(walkPath(`/ipfs/${dirCid}/foo/bar/baz`, ipld)) + const exported = await all(walkPath(`/ipfs/${dirCid}/foo/bar/baz`, block)) expect(exported.length).to.equal(4) @@ -252,23 +243,38 @@ describe('exporter sharded', function () { it('exports a file from a sharded directory inside a regular directory inside a sharded directory', async () => { const dirCid = await createShard(15) - const node = new DAGNode(new UnixFS({ type: 'directory' }).marshal(), [ - new DAGLink('shard', 5, dirCid) - ]) - const nodeCid = await ipld.put(node, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] + const node = dagPb.prepare({ + Data: new UnixFS({ type: 'directory' }).marshal(), + Links: [{ + Name: 'shard', + Tsize: 5, + Hash: dirCid + }] }) - - const shardNode = new DAGNode(new UnixFS({ type: 'hamt-sharded-directory' }).marshal(), [ - new DAGLink('75normal-dir', 5, nodeCid) - ]) - const shardNodeCid = await ipld.put(shardNode, mc.DAG_PB, { - cidVersion: 1, - hashAlg: mh.names['sha2-256'] + // TODO vmx 2021-02-23: Make it a CIDv0 + const nodeBlock = await Block.encode({ + value: node, + codec: dagPb, + hasher: sha256 + }) + await block.put(nodeBlock) + + const shardNode = dagPb.prepare({ + Data: new UnixFS({ type: 'hamt-sharded-directory' }).marshal(), + Links: [{ + Name: '75normal-dir', + Tsize: 5, + Hash: nodeBlock.cid.toV0() + }] + }) + const shardNodeBlock = await Block.encode({ + value: shardNode, + codec: dagPb, + hasher: sha256 }) + await block.put(shardNodeBlock) - const exported = await exporter(`/ipfs/${shardNodeCid}/normal-dir/shard/file-1`, ipld) + const exported = await exporter(`/ipfs/${shardNodeBlock.cid}/normal-dir/shard/file-1`, block) expect(exported.name).to.deep.equal('file-1') }) diff --git a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js index 14a11332..d7e47032 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js @@ -2,10 +2,6 @@ 'use strict' const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const { importer } = require('ipfs-unixfs-importer') const all = require('it-all') const last = require('it-last') @@ -19,15 +15,8 @@ const ONE_MEG = Math.pow(1024, 2) const { exporter, walkPath } = require('./../src') describe('exporter subtree', () => { - /** @type {import('ipld')} */ - let ipld /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('exports a file 2 levels down', async () => { const content = uint8ArrayConcat(await all(randomBytes(ONE_MEG))) @@ -44,7 +33,7 @@ describe('exporter subtree', () => { throw new Error('Nothing imported') } - const exported = await exporter(`${imported.cid}/level-1/200Bytes.txt`, ipld) + const exported = await exporter(`${imported.cid}/level-1/200Bytes.txt`, block) expect(exported).to.have.property('cid') expect(exported.name).to.equal('200Bytes.txt') @@ -74,7 +63,7 @@ describe('exporter subtree', () => { throw new Error('Nothing imported') } - const exported = await exporter(`${imported.cid}/level-1`, ipld) + const exported = await exporter(`${imported.cid}/level-1`, block) if (exported.type !== 'directory') { throw new Error('Unexpected type') @@ -108,7 +97,7 @@ describe('exporter subtree', () => { } try { - await exporter(`${imported.cid}/doesnotexist`, ipld) + await exporter(`${imported.cid}/doesnotexist`, block) } catch (err) { expect(err.code).to.equal('ERR_NOT_FOUND') } @@ -134,7 +123,7 @@ describe('exporter subtree', () => { throw new Error('Nothing imported') } - const exported = await all(walkPath(`${imported.cid}/level-1/level-2/200Bytes.txt`, ipld)) + const exported = await all(walkPath(`${imported.cid}/level-1/level-2/200Bytes.txt`, block)) expect(exported.length).to.equal(4) expect(exported[0].path).to.equal(imported.cid.toString()) diff --git a/packages/ipfs-unixfs-exporter/test/exporter.spec.js b/packages/ipfs-unixfs-exporter/test/exporter.spec.js index ee295045..67207623 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter.spec.js @@ -2,17 +2,14 @@ 'use strict' const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const { UnixFS } = require('ipfs-unixfs') -const CID = require('cids') -const { - DAGNode, - DAGLink -} = require('ipld-dag-pb') -const mh = require('multihashing-async').multihash +const { CID } = require('multiformats/cid') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const rawCodec = require('multiformats/codecs/raw') +const { sha256 } = require('multiformats/hashes/sha2') +const Block = require('multiformats/block') +const mh = require('multiformats/hashes/digest') const mc = require('multicodec') const { exporter, recursive } = require('../src') const { importer } = require('ipfs-unixfs-importer') @@ -29,11 +26,14 @@ const asAsyncIterable = require('./helpers/as-async-iterable') const ONE_MEG = Math.pow(1024, 2) +/** + * @typedef {import('@ipld/dag-pb').PBLink} PBLink + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + */ + describe('exporter', () => { - /** @type {import('ipld')} */ - let ipld /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block + const block = blockApi() /** @type {Uint8Array} */ let bigFile /** @type {Uint8Array} */ @@ -48,7 +48,7 @@ describe('exporter', () => { * @param {object} [options] * @param {string} [options.type='file'] * @param {Uint8Array} [options.content] - * @param {DAGLink[]} [options.links=[]] + * @param {PBLink[]} [options.links=[]] */ async function dagPut (options = {}) { options.type = options.type || 'file' @@ -60,13 +60,18 @@ describe('exporter', () => { data: options.content }) - const node = new DAGNode(file.marshal(), options.links) - const cid = await ipld.put(node, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] + const node = dagPb.prepare({ + Data: file.marshal(), + Links: options.links + }) + const encodedBlock = await Block.encode({ + value: node, + codec: dagPb, + hasher: sha256 }) + await block.put(encodedBlock) - return { file: file, node: node, cid: cid } + return { file: file, node: node, cid: encodedBlock.cid.toV0() } } /** @@ -102,7 +107,7 @@ describe('exporter', () => { */ async function addAndReadTestFile ({ file, offset, length, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves }) { const cid = await addTestFile({ file, strategy, path, maxChunkSize, rawLeaves }) - const entry = await exporter(cid, ipld) + const entry = await exporter(cid, block) if (entry.type !== 'file' && entry.type !== 'raw') { throw new Error('Unexpected type') @@ -135,49 +140,57 @@ describe('exporter', () => { } /** - * @param {import('ipld')} ipld * @param {'file' | 'directory' | 'raw'} type * @param {Uint8Array | ArrayLike | undefined} data - * @param {{ node: DAGNode, cid: CID }[]} children + * @param {{ node: PBNode, cid: CID }[]} children */ - async function createAndPersistNode (ipld, type, data, children) { + async function createAndPersistNode (type, data, children) { const file = new UnixFS({ type, data: data ? Uint8Array.from(data) : undefined }) const links = [] for (let i = 0; i < children.length; i++) { const child = children[i] + // @ts-ignore - we can guarantee that it's not undefined const leaf = UnixFS.unmarshal(child.node.Data) file.addBlockSize(leaf.fileSize()) - links.push(new DAGLink('', child.node.size, child.cid)) + links.push({ + Name: '', + Tsize: child.node.Data != null ? child.node.Data.length : 0, + Hash: child.cid + }) } - const node = new DAGNode(file.marshal(), links) - const cid = await ipld.put(node, mc.DAG_PB, { - cidVersion: 1, - hashAlg: mh.names['sha2-256'] + const node = dagPb.prepare({ + Data: file.marshal(), + Links: links }) + const encodedBlock = await Block.encode({ + value: node, + codec: dagPb, + hasher: sha256 + }) + await block.put(encodedBlock) return { node, - cid + cid: encodedBlock.cid } } - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) - it('ensure hash inputs are sanitized', async () => { const result = await dagPut() - const node = await ipld.get(result.cid) + const encodedBlock = await block.get(result.cid) + const node = dagPb.decode(encodedBlock.bytes) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const unmarsh = UnixFS.unmarshal(node.Data) expect(unmarsh.data).to.deep.equal(result.file.data) - const file = await exporter(result.cid, ipld) + const file = await exporter(result.cid, block) expect(file).to.have.property('cid') expect(file).to.have.property('path', result.cid.toString()) @@ -200,7 +213,7 @@ describe('exporter', () => { }], block)) const path = `/ipfs/${files[1].cid}/${fileName}` - const file = await exporter(path, ipld) + const file = await exporter(path, block) expect(file.name).to.equal(fileName) expect(file.path).to.equal(`${files[1].cid}/${fileName}`) @@ -216,7 +229,7 @@ describe('exporter', () => { }], block)) const path = `/ipfs/${files[1].cid}/${fileName}` - const file = await exporter(path, ipld) + const file = await exporter(path, block) expect(file.name).to.equal(fileName) expect(file.path).to.equal(`${files[1].cid}/${fileName}`) @@ -230,14 +243,18 @@ describe('exporter', () => { content: uint8ArrayConcat(await all(randomBytes(100))) }) - const node = await ipld.get(result.cid) + const encodedBlock = await block.get(result.cid) + const node = dagPb.decode(encodedBlock.bytes) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const unmarsh = UnixFS.unmarshal(node.Data) if (!unmarsh.data) { throw new Error('Unexpected data') } - const file = await exporter(result.cid, ipld) + const file = await exporter(result.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -257,18 +274,26 @@ describe('exporter', () => { type: 'raw', data: content.slice(0, 5) }) - const chunkNode1 = new DAGNode(chunk1.marshal()) - const chunkCid1 = await ipld.put(chunkNode1, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] + const chunkNode1 = dagPb.prepare({ Data: chunk1.marshal() }) + const chunkBlock1 = await Block.encode({ + value: chunkNode1, + codec: dagPb, + hasher: sha256 }) + await block.put(chunkBlock1) const chunk2 = new UnixFS({ type: 'raw', data: content.slice(5) }) - const chunkNode2 = new DAGNode(chunk2.marshal()) - const chunkCid2 = await ipld.put(chunkNode2, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] + const chunkNode2 = dagPb.prepare({ + Data: chunk2.marshal(), + codec: dagPb, + hasher: sha256 }) + const chunkBlock2 = await Block.encode({ + value: chunkNode2, + codec: dagPb, + hasher: sha256 + }) + await block.put(chunkBlock2) const file = new UnixFS({ type: 'file' @@ -276,16 +301,26 @@ describe('exporter', () => { file.addBlockSize(5) file.addBlockSize(5) - const fileNode = new DAGNode(file.marshal(), [ - new DAGLink('', chunkNode1.size, chunkCid1), - new DAGLink('', chunkNode2.size, chunkCid2) - ]) - const fileCid = await ipld.put(fileNode, mc.DAG_PB, { - cidVersion: 0, - hashAlg: mh.names['sha2-256'] + const fileNode = dagPb.prepare({ + Data: file.marshal(), + Links: [{ + Name: '', + Tsize: chunkNode1.Data != null ? chunkNode1.Data.length : 0, + Hash: chunkBlock1.cid.toV0() + }, { + Name: '', + Tsize: chunkNode2.Data != null ? chunkNode2.Data.length : 0, + Hash: chunkBlock2.cid.toV0() + }] }) + const fileBlock = await Block.encode({ + value: fileNode, + codec: dagPb, + hasher: sha256 + }) + await block.put(fileBlock) - const exported = await exporter(fileCid, ipld) + const exported = await exporter(fileBlock.cid.toV0(), block) if (exported.type !== 'file') { throw new Error('Unexpected type') @@ -302,16 +337,18 @@ describe('exporter', () => { const chunk = await dagPut({ content: uint8ArrayConcat(await all(randomBytes(100))) }) const result = await dagPut({ content: uint8ArrayConcat(await all(randomBytes(100))), - links: [ - new DAGLink('', chunk.node.size, chunk.cid) - ] + links: [{ + Name: '', + Tsize: chunk.node.Data != null ? chunk.node.Data.length : 0, + Hash: chunk.cid + }] }) if (!result.file.data) { throw new Error('Expected data') } - const file = await exporter(result.cid, ipld) + const file = await exporter(result.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -332,7 +369,7 @@ describe('exporter', () => { file: uint8ArrayConcat(await all(randomBytes(ONE_MEG * 6))) }) - const file = await exporter(cid, ipld) + const file = await exporter(cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -353,7 +390,7 @@ describe('exporter', () => { file: bytes }) - const file = await exporter(cid, ipld) + const file = await exporter(cid, block) expect(file).to.have.property('path', cid.toString()) if (file.type !== 'file') { @@ -419,7 +456,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const dir = await exporter(importedDir.cid, ipld) + const dir = await exporter(importedDir.cid, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -467,7 +504,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const dir = await exporter(importedDir.cid, ipld) + const dir = await exporter(importedDir.cid, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -623,7 +660,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const file = await exporter(imported.cid, ipld) + const file = await exporter(imported.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -643,7 +680,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const dir = await exporter(imported.cid, ipld) + const dir = await exporter(imported.cid, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -762,7 +799,7 @@ describe('exporter', () => { file: bigFile, maxChunkSize: 1024 }) - const file = await exporter(cid, ipld) + const file = await exporter(cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -800,19 +837,19 @@ describe('exporter', () => { const hash = 'bafybeidu2qqwriogfndznz32swi5r4p2wruf6ztu5k7my53tsezwhncs5y' try { - await exporter(hash, ipld) + await exporter(hash, block) } catch (err) { expect(err.code).to.equal('ERR_NOT_FOUND') } }) it('exports file with data on internal and leaf nodes', async () => { - const leaf = await createAndPersistNode(ipld, 'raw', [0x04, 0x05, 0x06, 0x07], []) - const node = await createAndPersistNode(ipld, 'file', [0x00, 0x01, 0x02, 0x03], [ + const leaf = await createAndPersistNode('raw', [0x04, 0x05, 0x06, 0x07], []) + const node = await createAndPersistNode('file', [0x00, 0x01, 0x02, 0x03], [ leaf ]) - const file = await exporter(node.cid, ipld) + const file = await exporter(node.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -835,23 +872,23 @@ describe('exporter', () => { // | \ // l l const leaves = await Promise.all([ - createAndPersistNode(ipld, 'raw', [0x00, 0x01, 0x02, 0x03], []), - createAndPersistNode(ipld, 'raw', [0x08, 0x09, 0x10, 0x11], []), - createAndPersistNode(ipld, 'raw', [0x12, 0x13, 0x14, 0x15], []) + createAndPersistNode('raw', [0x00, 0x01, 0x02, 0x03], []), + createAndPersistNode('raw', [0x08, 0x09, 0x10, 0x11], []), + createAndPersistNode('raw', [0x12, 0x13, 0x14, 0x15], []) ]) const internalNodes = await Promise.all([ - createAndPersistNode(ipld, 'raw', [0x04, 0x05, 0x06, 0x07], [leaves[1]]), - createAndPersistNode(ipld, 'raw', undefined, [leaves[2]]) + createAndPersistNode('raw', [0x04, 0x05, 0x06, 0x07], [leaves[1]]), + createAndPersistNode('raw', undefined, [leaves[2]]) ]) - const node = await createAndPersistNode(ipld, 'file', undefined, [ + const node = await createAndPersistNode('file', undefined, [ leaves[0], internalNodes[0], internalNodes[1] ]) - const file = await exporter(node.cid, ipld) + const file = await exporter(node.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -868,12 +905,12 @@ describe('exporter', () => { }) it('exports file with data on internal and leaf nodes with an offset that only fetches data from leaf nodes', async () => { - const leaf = await createAndPersistNode(ipld, 'raw', [0x04, 0x05, 0x06, 0x07], []) - const node = await createAndPersistNode(ipld, 'file', [0x00, 0x01, 0x02, 0x03], [ + const leaf = await createAndPersistNode('raw', [0x04, 0x05, 0x06, 0x07], []) + const node = await createAndPersistNode('file', [0x00, 0x01, 0x02, 0x03], [ leaf ]) - const file = await exporter(node.cid, ipld) + const file = await exporter(node.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -900,7 +937,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const file = await exporter(imported.cid, ipld) + const file = await exporter(imported.cid, block) if (file.type !== 'file') { throw new Error('Unexpected type') @@ -925,8 +962,8 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const file = await exporter(imported.cid, ipld) - expect(CID.isCID(file.cid)).to.be.true() + const file = await exporter(imported.cid, block) + expect(CID.asCID(file.cid)).to.not.be.undefined() if (file.type !== 'raw') { throw new Error('Unexpected type') @@ -937,12 +974,15 @@ describe('exporter', () => { }) it('errors when exporting a non-existent key from a cbor node', async () => { - const cborNodeCid = await ipld.put({ - foo: 'bar' - }, mc.DAG_CBOR) + const cborBlock = await Block.encode({ + value: { foo: 'bar' }, + codec: dagCbor, + hasher: sha256 + }) + await block.put(cborBlock) try { - await exporter(`${cborNodeCid}/baz`, ipld) + await exporter(`${cborBlock.cid}/baz`, block) } catch (err) { expect(err.code).to.equal('ERR_NO_PROP') } @@ -953,8 +993,13 @@ describe('exporter', () => { foo: 'bar' } - const cborNodeCid = await ipld.put(node, mc.DAG_CBOR) - const exported = await exporter(`${cborNodeCid}`, ipld) + const cborBlock = await Block.encode({ + value: node, + codec: dagCbor, + hasher: sha256 + }) + await block.put(cborBlock) + const exported = await exporter(`${cborBlock.cid}`, block) if (exported.type !== 'object') { throw new Error('Unexpected type') @@ -964,50 +1009,74 @@ describe('exporter', () => { }) it('errors when exporting a node with no resolver', async () => { - const cid = new CID(1, 'git-raw', new CID('zdj7WkRPAX9o9nb9zPbXzwG7JEs78uyhwbUs8JSUayB98DWWY').multihash) + const cid = CID.create(1, mc.GIT_RAW, CID.parse('zdj7WkRPAX9o9nb9zPbXzwG7JEs78uyhwbUs8JSUayB98DWWY').multihash) try { - await exporter(`${cid}`, ipld) + await exporter(`${cid}`, block) } catch (err) { expect(err.code).to.equal('ERR_NO_RESOLVER') } }) it('errors if we try to export links from inside a raw node', async () => { - const cid = await ipld.put(Uint8Array.from([0, 1, 2, 3, 4]), mc.RAW) + const rawBlock = await Block.encode({ + value: Uint8Array.from([0, 1, 2, 3, 4]), + codec: rawCodec, + hasher: sha256 + }) + await block.put(rawBlock) try { - await exporter(`${cid}/lol`, ipld) + await exporter(`${rawBlock.cid}/lol`, block) } catch (err) { expect(err.code).to.equal('ERR_NOT_FOUND') } }) it('errors we export a non-unixfs dag-pb node', async () => { - const cid = await ipld.put(new DAGNode(Uint8Array.from([0, 1, 2, 3, 4])), mc.DAG_PB) + const dagpbBlock = await Block.encode({ + value: dagPb.prepare({ Data: Uint8Array.from([0, 1, 2, 3, 4]) }), + codec: dagPb, + hasher: sha256 + }) + await block.put(dagpbBlock) try { - await exporter(cid, ipld) + await exporter(dagpbBlock.cid, block) } catch (err) { expect(err.code).to.equal('ERR_NOT_UNIXFS') } }) it('errors we export a unixfs node that has a non-unixfs/dag-pb child', async () => { - const cborNodeCid = await ipld.put({ - foo: 'bar' - }, mc.DAG_CBOR) + const cborBlock = await Block.encode({ + value: { foo: 'bar' }, + codec: dagCbor, + hasher: sha256 + }) + await block.put(cborBlock) const file = new UnixFS({ type: 'file' }) file.addBlockSize(100) - const cid = await ipld.put(new DAGNode(file.marshal(), [ - new DAGLink('', 100, cborNodeCid) - ]), mc.DAG_PB) + const dagpbNode = dagPb.prepare({ + Data: file.marshal(), + Links: [{ + Name: '', + Tsize: 100, + Hash: cborBlock.cid + }] + }) + const dagpbBlock = await Block.encode({ + value: dagpbNode, + codec: dagPb, + hasher: sha256 + }) + await block.put(dagpbBlock) - const exported = await exporter(cid, ipld) + const exported = await exporter(dagpbBlock.cid, block) if (exported.type !== 'file') { throw new Error('Unexpected type') @@ -1026,7 +1095,7 @@ describe('exporter', () => { content: asAsyncIterable(uint8ArrayFromString('hello world')) }], block)) - const exported = await exporter(imported[0].cid, ipld) + const exported = await exporter(imported[0].cid, block) expect(exported.depth).to.equal(0) }) @@ -1047,7 +1116,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const exported = await all(recursive(dir.cid, ipld)) + const exported = await all(recursive(dir.cid, block)) const dirCid = dir.cid.toString() expect(exported[0].depth).to.equal(0) @@ -1072,10 +1141,10 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash', async () => { const data = uint8ArrayFromString('hello world') - const hash = mh.encode(data, 'identity') - const cid = new CID(1, 'identity', hash) + const hash = mh.create(mc.IDENTITY, data) + const cid = CID.create(1, mc.IDENTITY, hash) - const exported = await exporter(cid, ipld) + const exported = await exporter(cid, block) if (exported.type !== 'identity') { throw new Error('Unexpected type') @@ -1089,10 +1158,10 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash with an offset', async () => { const data = uint8ArrayFromString('hello world') - const hash = mh.encode(data, 'identity') - const cid = new CID(1, 'identity', hash) + const hash = mh.create(mc.IDENTITY, data) + const cid = CID.create(1, mc.IDENTITY, hash) - const exported = await exporter(cid, ipld) + const exported = await exporter(cid, block) if (exported.type !== 'identity') { throw new Error('Unexpected type') @@ -1107,10 +1176,10 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash with a length', async () => { const data = uint8ArrayFromString('hello world') - const hash = mh.encode(data, 'identity') - const cid = new CID(1, 'identity', hash) + const hash = mh.create(mc.IDENTITY, data) + const cid = CID.create(1, mc.IDENTITY, hash) - const exported = await exporter(cid, ipld) + const exported = await exporter(cid, block) if (exported.type !== 'identity') { throw new Error('Unexpected type') @@ -1125,10 +1194,10 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash with an offset and a length', async () => { const data = uint8ArrayFromString('hello world') - const hash = mh.encode(data, 'identity') - const cid = new CID(1, 'identity', hash) + const hash = mh.create(mc.IDENTITY, data) + const cid = CID.create(1, mc.IDENTITY, hash) - const exported = await exporter(cid, ipld) + const exported = await exporter(cid, block) if (exported.type !== 'identity') { throw new Error('Unexpected type') @@ -1147,8 +1216,8 @@ describe('exporter', () => { // data should not be in IPLD const data = uint8ArrayFromString(`hello world '${Math.random()}`) - const hash = mh.encode(data, 'sha2-256') - const cid = new CID(1, 'dag-pb', hash) + const hash = mh.create(mc.SHA2_256, data) + const cid = CID.create(1, mc.DAG_PB, hash) const message = `User aborted ${Math.random()}` setTimeout(() => { @@ -1157,7 +1226,7 @@ describe('exporter', () => { // regular test IPLD is offline-only, we need to mimic what happens when // we try to get a block from the network - const ipld = { + const customBlock = { /** * * @param {CID} cid @@ -1174,7 +1243,7 @@ describe('exporter', () => { } // @ts-ignore ipld implementation incomplete - await expect(exporter(cid, ipld, { + await expect(exporter(cid, customBlock, { signal: abortController.signal })).to.eventually.be.rejectedWith(message) }) diff --git a/packages/ipfs-unixfs-exporter/test/helpers/block.js b/packages/ipfs-unixfs-exporter/test/helpers/block.js index 64b718fd..6afc2a56 100644 --- a/packages/ipfs-unixfs-exporter/test/helpers/block.js +++ b/packages/ipfs-unixfs-exporter/test/helpers/block.js @@ -1,60 +1,27 @@ 'use strict' -const { - DAGNode, - util -} = require('ipld-dag-pb') -const multicodec = require('multicodec') -const mh = require('multihashing-async').multihash -const CID = require('cids') -const Block = require('ipld-block') +const errCode = require('err-code') -/** - * @param {import('ipld')} ipld - */ -function createBlockApi (ipld) { - // make ipld behave like the block api, some tests need to pull - // data from ipld so can't use a simple in-memory cid->block map - /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - const BlockApi = { - put: async (buf, options) => { - if (!options || !options.cid) { - throw new Error('No cid passed') - } - - const cid = new CID(options.cid) - - const multihash = mh.decode(cid.multihash) - - if (Block.isBlock(buf)) { - buf = buf.data - } +function createBlockApi () { + /** @type {{[key: string]: Uint8Array}} */ + const blocks = {} - /** @type {any} */ - let obj = buf - - if (cid.codec === 'dag-pb') { - obj = util.deserialize(buf) + /** @type {import('ipfs-unixfs-importer').BlockAPI} */ + const BlockApi = { + put: async ({ cid, bytes }, options) => { + if (!options || !options.onlyHash) { + blocks[cid.toV1().toString()] = bytes } - await ipld.put(obj, cid.codec === 'dag-pb' ? multicodec.DAG_PB : multicodec.RAW, { - cidVersion: cid.version, - hashAlg: multihash.code - }) - - return new Block(buf, cid) + return { cid, bytes } }, - get: async (cid, options) => { - cid = new CID(cid) - - /** @type {Uint8Array} */ - let buf = await ipld.get(cid, options) - - if (buf instanceof DAGNode) { - buf = buf.serialize() + get: async (cid, _options) => { + const bytes = blocks[cid.toV1().toString()] + if (bytes === undefined) { + throw errCode(new Error(`Couold not find data for CID '${cid}'`), 'ERR_NOT_FOUND') } - return new Block(buf, cid) + return { cid, bytes } } } diff --git a/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js b/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js index b8f04509..6d221983 100644 --- a/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js +++ b/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js @@ -1,15 +1,22 @@ 'use strict' +const { decode } = require('@ipld/dag-pb') + +/** + * @typedef {import('@ipld/dag-pb').PBLink} PBLink + */ + /** - * @param {import('cids')} cid - * @param {import('ipld')} ipld + * @param {import('multiformats/cid').CID} cid + * @param {import('ipfs-unixfs-importer/src/types').BlockAPI} blockService */ -module.exports = function (cid, ipld) { +module.exports = function (cid, blockService) { /** - * @param {import('cids')} cid + * @param {import('multiformats/cid').CID} cid */ async function * traverse (cid) { - const node = await ipld.get(cid) + const block = await blockService.get(cid) + const node = decode(block.bytes) if (node instanceof Uint8Array || !node.Links.length) { yield { @@ -22,7 +29,7 @@ module.exports = function (cid, ipld) { node.Links.forEach( /** - * @param {import('ipld-dag-pb').DAGLink} link + * @param {PBLink} link */ link => traverse(link.Hash) ) diff --git a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js index 14cb081a..5aecb98c 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js @@ -5,10 +5,6 @@ const { importer } = require('ipfs-unixfs-importer') const { exporter } = require('../src') const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const all = require('it-all') const last = require('it-last') const blockApi = require('./helpers/block') @@ -23,15 +19,8 @@ const asAsyncIterable = require('./helpers/as-async-iterable') */ describe('builder: directory sharding', () => { - /** @type {import('ipld')} */ - let ipld /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() describe('basic dirbuilder', () => { it('yields a non-sharded dir', async () => { @@ -48,7 +37,7 @@ describe('builder: directory sharding', () => { expect(nodes[0].path).to.equal('a/b') expect(nodes[1].path).to.equal('a') - const dirNode = await exporter(nodes[1].cid, ipld) + const dirNode = await exporter(nodes[1].cid, block) if (dirNode.type !== 'directory') { throw new Error('Unexpected type') @@ -56,7 +45,7 @@ describe('builder: directory sharding', () => { expect(dirNode.unixfs.type).to.equal('directory') - const fileNode = await exporter(nodes[0].cid, ipld) + const fileNode = await exporter(nodes[0].cid, block) if (fileNode.type !== 'file') { throw new Error('Unexpected type') @@ -83,7 +72,7 @@ describe('builder: directory sharding', () => { expect(nodes[0].path).to.equal('a/b') expect(nodes[1].path).to.equal('a') - const node = await exporter(nodes[1].cid, ipld) + const node = await exporter(nodes[1].cid, block) if (node.type !== 'directory') { throw new Error('Unexpected type') @@ -103,7 +92,7 @@ describe('builder: directory sharding', () => { const nonShardedHash = nodes[1].cid - const dir = await exporter(nonShardedHash, ipld) + const dir = await exporter(nonShardedHash, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -140,7 +129,7 @@ describe('builder: directory sharding', () => { const shardedHash = nodes[1].cid - const dir = await exporter(shardedHash, ipld) + const dir = await exporter(shardedHash, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -207,7 +196,7 @@ describe('builder: directory sharding', () => { expect(nodes.length).to.equal(maxDirs + 1) // files plus the containing directory - const dir = await exporter(nodes[nodes.length - 1].cid, ipld) + const dir = await exporter(nodes[nodes.length - 1].cid, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -229,7 +218,7 @@ describe('builder: directory sharding', () => { const maxDirs = 2000 const maxDepth = 3 - /** @type {import('cids')} */ + /** @type {import('multiformats/cid').CID} */ let rootHash before(async () => { @@ -276,7 +265,7 @@ describe('builder: directory sharding', () => { }) it('imports a big dir', async () => { - const dir = await exporter(rootHash, ipld) + const dir = await exporter(rootHash, block) /** * @param {UnixFSEntry} node @@ -351,7 +340,7 @@ describe('builder: directory sharding', () => { } } - const dir = await exporter(rootHash, ipld) + const dir = await exporter(rootHash, block) const entries = await collectContent(dir) let index = 0 @@ -365,7 +354,7 @@ describe('builder: directory sharding', () => { it('exports a big dir with subpath', async () => { const exportHash = rootHash.toString() + '/big/big/2000' - const node = await exporter(exportHash, ipld) + const node = await exporter(exportHash, block) expect(node.path).to.equal(exportHash) if (node.type !== 'file') { diff --git a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js index 12ad7061..436aab8b 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js @@ -2,10 +2,6 @@ 'use strict' const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const all = require('it-all') const { importer } = require('ipfs-unixfs-importer') const { exporter } = require('../src') @@ -17,15 +13,8 @@ const asAsyncIterable = require('./helpers/as-async-iterable') describe('import and export: directory', () => { const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK' - /** @type {import('ipld')} */ - let ipld /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('imports', async function () { this.timeout(20 * 1000) @@ -76,7 +65,7 @@ describe('import and export: directory', () => { it('exports', async function () { this.timeout(20 * 1000) - const dir = await exporter(rootHash, ipld) + const dir = await exporter(rootHash, block) const files = await recursiveExport(dir, rootHash) expect(files.sort(byPath)).to.eql([{ @@ -121,7 +110,7 @@ async function recursiveExport (node, path, entries = []) { } /** - * @param {{ path?: string, cid: import('cids') }} node + * @param {{ path?: string, cid: import('multiformats/cid').CID }} node */ function normalizeNode (node) { return { diff --git a/packages/ipfs-unixfs-exporter/test/import-export.spec.js b/packages/ipfs-unixfs-exporter/test/import-export.spec.js index 245f830a..0767f2a7 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export.spec.js @@ -4,10 +4,6 @@ const { expect } = require('aegir/utils/chai') // @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') -// @ts-ignore const loadFixture = require('aegir/utils/fixtures') // @ts-ignore const isNode = require('detect-node') @@ -31,15 +27,8 @@ describe('import and export', function () { const importerOptions = { strategy: strategy } describe('using builder: ' + strategy, () => { - /** @type {import('ipld')} */ - let ipld /** @type {import('ipfs-unixfs-importer/src/types').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('imports and exports', async () => { const path = `${strategy}-big.dat` @@ -49,7 +38,7 @@ describe('import and export', function () { for await (const file of importer(values, block, importerOptions)) { expect(file.path).to.eql(path) - const result = await exporter(file.cid, ipld) + const result = await exporter(file.cid, block) if (result.type !== 'file') { throw new Error('Unexpected type') diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.js b/packages/ipfs-unixfs-exporter/test/importer.spec.js index a84d6f63..bae88e6a 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.js +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.js @@ -6,10 +6,6 @@ const { exporter, recursive } = require('../src') const extend = require('merge-options') const { expect } = require('aegir/utils/chai') const sinon = require('sinon') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const { UnixFS } = require('ipfs-unixfs') const collectLeafCids = require('./helpers/collect-leaf-cids') // @ts-ignore @@ -27,13 +23,14 @@ const uint8ArrayConcat = require('uint8arrays/concat') const uint8ArrayFromString = require('uint8arrays/from-string') const asAsyncIterable = require('./helpers/as-async-iterable') const last = require('it-last') -const CID = require('cids') +const { CID } = require('multiformats/cid') +const { base58btc } = require('multiformats/bases/base58') +const { decode } = require('@ipld/dag-pb') const { parseMtime } = require('ipfs-unixfs') /** - * @typedef {import('ipld')} IPLD * @typedef {import('ipfs-unixfs-importer/src/types').BlockAPI} BlockAPI - * @typedef {import('ipld-dag-pb').DAGNode} DAGNode + * @typedef {import('@ipld/dag-pb').PBNode} PBNode */ /** @@ -201,11 +198,10 @@ const strategyOverrides = { /** * @param {BlockAPI} block - * @param {IPLD} ipld * @param {import('ipfs-unixfs-importer').UserImporterOptions} options * @param {*} expected */ -const checkLeafNodeTypes = async (block, ipld, options, expected) => { +const checkLeafNodeTypes = async (block, options, expected) => { const file = await first(importer([{ path: 'foo', content: asAsyncIterable(new Uint8Array(262144 + 5).fill(1)) @@ -215,18 +211,27 @@ const checkLeafNodeTypes = async (block, ipld, options, expected) => { throw new Error('Nothing imported') } - /** @type {DAGNode} */ - const node = await ipld.get(file.cid) + // @type {Block} + const fileBlock = await block.get(file.cid) + /** @type {PBNode} */ + const node = decode(fileBlock.bytes) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const meta = UnixFS.unmarshal(node.Data) expect(meta.type).to.equal('file') expect(node.Links.length).to.equal(2) - const linkedNodes = await Promise.all( - node.Links.map(link => ipld.get(link.Hash)) + const linkedBlocks = await Promise.all( + node.Links.map(link => block.get(link.Hash)) ) - linkedNodes.forEach(node => { + linkedBlocks.forEach(({ bytes }) => { + const node = decode(bytes) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const meta = UnixFS.unmarshal(node.Data) expect(meta.type).to.equal(expected) }) @@ -234,16 +239,19 @@ const checkLeafNodeTypes = async (block, ipld, options, expected) => { /** * @param {BlockAPI} block - * @param {IPLD} ipld * @param {import('ipfs-unixfs-importer').UserImporterOptions} options * @param {*} expected */ -const checkNodeLinks = async (block, ipld, options, expected) => { +const checkNodeLinks = async (block, options, expected) => { for await (const file of importer([{ path: 'foo', content: asAsyncIterable(new Uint8Array(100).fill(1)) }], block, options)) { - const node = await ipld.get(file.cid) + const fileBlock = await block.get(file.cid) + const node = decode(fileBlock.bytes) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const meta = UnixFS.unmarshal(node.Data) expect(meta.type).to.equal('file') @@ -341,7 +349,7 @@ strategies.forEach((strategy) => { const actualFile = actualFiles[i] expect(actualFile.path).to.equal(expectedFile.path) - expect(actualFile.cid.toString('base58btc')).to.equal(expectedFile.cid) + expect(actualFile.cid.toString(base58btc)).to.equal(expectedFile.cid.toString()) if (actualFile.unixfs) { expect(actualFile.unixfs.type).to.equal(expectedFile.type) @@ -356,10 +364,8 @@ strategies.forEach((strategy) => { describe('importer: ' + strategy, function () { this.timeout(30 * 1000) - /** @type {IPLD} */ - let ipld /** @type {BlockAPI} */ - let block + const block = blockApi() /** @type {import('ipfs-unixfs-importer').UserImporterOptions} */ const options = { // @ts-ignore @@ -372,11 +378,6 @@ strategies.forEach((strategy) => { options.reduceSingleLeafToSelf = false } - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) - it('fails on bad content', async () => { try { // @ts-expect-error bad content @@ -671,7 +672,7 @@ strategies.forEach((strategy) => { expect(file).to.exist() try { - await ipld.get(file.cid) + await block.get(file.cid) throw new Error('No error was thrown') } catch (err) { @@ -756,11 +757,11 @@ strategies.forEach((strategy) => { // Just check the intermediate directory can be retrieved if (!inputFile) { - await ipld.get(cid) + await block.get(cid) } // Check the imported content is correct - const node = await exporter(cid, ipld) + const node = await exporter(cid, block) if (node.type !== 'file') { throw new Error('Unexpected type') @@ -771,25 +772,25 @@ strategies.forEach((strategy) => { }) it('imports file with raw leaf nodes when specified', () => { - return checkLeafNodeTypes(block, ipld, { + return checkLeafNodeTypes(block, { leafType: 'raw' }, 'raw') }) it('imports file with file leaf nodes when specified', () => { - return checkLeafNodeTypes(block, ipld, { + return checkLeafNodeTypes(block, { leafType: 'file' }, 'file') }) it('reduces file to single node when specified', () => { - return checkNodeLinks(block, ipld, { + return checkNodeLinks(block, { reduceSingleLeafToSelf: true }, 0) }) it('does not reduce file to single node when overidden by options', () => { - return checkNodeLinks(block, ipld, { + return checkNodeLinks(block, { reduceSingleLeafToSelf: false }, 1) }) @@ -805,7 +806,7 @@ strategies.forEach((strategy) => { path: '1.2MiB.txt', content: asAsyncIterable(bigFile) }], block, options)) { - for await (const { cid } of collectLeafCids(file.cid, ipld)) { + for await (const { cid } of collectLeafCids(file.cid, block)) { expect(cid).to.have.property('codec', 'raw') expect(cid).to.have.property('version', 1) } @@ -825,7 +826,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile), mtime: parseMtime(now) }], block, options)) { - const node = await exporter(file.cid, ipld) + const node = await exporter(file.cid, block) expect(node).to.have.deep.nested.property('unixfs.mtime', dateToTimespec(now)) } @@ -841,7 +842,7 @@ strategies.forEach((strategy) => { mtime: parseMtime(now) }], block)) - const node = await exporter(entries[0].cid, ipld) + const node = await exporter(entries[0].cid, block) expect(node).to.have.deep.nested.property('unixfs.mtime', dateToTimespec(now)) }) @@ -860,7 +861,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block)) - const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) + const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory').pop() if (!node) { @@ -886,7 +887,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block)) - const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) + const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory').pop() if (!node) { @@ -917,7 +918,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block)) - const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) + const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory' && node.name === 'bar').pop() if (!node) { @@ -948,7 +949,7 @@ strategies.forEach((strategy) => { shardSplitThreshold: 0 })) - const nodes = await all(recursive(entries[entries.length - 1].cid, ipld)) + const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory' && node.unixfs.type === 'hamt-sharded-directory').pop() if (!node) { @@ -971,7 +972,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile), mode }], block, options)) { - const node = await exporter(file.cid, ipld) + const node = await exporter(file.cid, block) expect(node).to.have.nested.property('unixfs.mode', mode) } @@ -987,7 +988,7 @@ strategies.forEach((strategy) => { mode }], block)) - const node = await exporter(entries[0].cid, ipld) + const node = await exporter(entries[0].cid, block) expect(node).to.have.nested.property('unixfs.mode', mode) }) @@ -1007,10 +1008,10 @@ strategies.forEach((strategy) => { mode: mode2 }], block)) - const node1 = await exporter(entries[0].cid, ipld) + const node1 = await exporter(entries[0].cid, block) expect(node1).to.have.nested.property('unixfs.mode', mode1) - const node2 = await exporter(entries[1].cid, ipld) + const node2 = await exporter(entries[1].cid, block) expect(node2).to.have.nested.property('unixfs.mode', mode2) }) @@ -1028,10 +1029,10 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block)) - const node1 = await exporter(entries[0].cid, ipld) + const node1 = await exporter(entries[0].cid, block) expect(node1).to.have.nested.property('unixfs.mode', mode) - const node2 = await exporter(entries[1].cid, ipld) + const node2 = await exporter(entries[1].cid, block) expect(node2).to.have.nested.property('unixfs.mode').that.does.not.equal(mode) }) @@ -1043,29 +1044,22 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block)) - const node1 = await exporter(entries[0].cid, ipld) + const node1 = await exporter(entries[0].cid, block) expect(node1).to.have.nested.property('unixfs.mode', 0o0644) - const node2 = await exporter(entries[1].cid, ipld) + const node2 = await exporter(entries[1].cid, block) expect(node2).to.have.nested.property('unixfs.mode', 0o0755) }) }) }) describe('configuration', () => { - /** @type {IPLD} */ - let ipld /** @type {BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('alllows configuring with custom dag and tree builder', async () => { let builtTree = false - const cid = new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') + const cid = CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') const unixfs = new UnixFS({ type: 'directory' }) // @ts-expect-error custom dag builder expects weird data diff --git a/packages/ipfs-unixfs-importer/dist/src/chunker/fixed-size.d.ts b/packages/ipfs-unixfs-importer/dist/src/chunker/fixed-size.d.ts new file mode 100644 index 00000000..214ca322 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/chunker/fixed-size.d.ts @@ -0,0 +1,3 @@ +declare const _exports: import('../types').Chunker; +export = _exports; +//# sourceMappingURL=fixed-size.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/chunker/fixed-size.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/chunker/fixed-size.d.ts.map new file mode 100644 index 00000000..8a1f19c7 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/chunker/fixed-size.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fixed-size.d.ts","sourceRoot":"","sources":["../../../src/chunker/fixed-size.js"],"names":[],"mappings":"wBAMU,OAAO,UAAU,EAAE,OAAO"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/chunker/index.d.ts b/packages/ipfs-unixfs-importer/dist/src/chunker/index.d.ts new file mode 100644 index 00000000..8b01a3ea --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/chunker/index.d.ts @@ -0,0 +1,5 @@ +declare function _exports(type: import('../types').ChunkerType, source: AsyncIterable, options: import('../types').ImporterOptions): AsyncIterable; +export = _exports; +export type ImporterOptions = import('../types').ImporterOptions; +export type Chunker = import('../types').Chunker; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/chunker/index.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/chunker/index.d.ts.map new file mode 100644 index 00000000..151c9fb3 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/chunker/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/chunker/index.js"],"names":[],"mappings":"AAsBiB,gCAJN,OAAO,UAAU,EAAE,WAAW,UAC9B,cAAc,UAAU,CAAC,WACzB,OAAO,UAAU,EAAE,eAAe,6BAU5C;;8BAzBY,OAAO,UAAU,EAAE,eAAe;sBAClC,OAAO,UAAU,EAAE,OAAO"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/chunker/rabin.d.ts b/packages/ipfs-unixfs-importer/dist/src/chunker/rabin.d.ts new file mode 100644 index 00000000..2403c0d9 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/chunker/rabin.d.ts @@ -0,0 +1,10 @@ +declare const _exports: import('../types').Chunker; +export = _exports; +export type RabinOptions = { + min: number; + max: number; + bits: number; + window: number; + polynomial: number; +}; +//# sourceMappingURL=rabin.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/chunker/rabin.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/chunker/rabin.d.ts.map new file mode 100644 index 00000000..202ebd76 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/chunker/rabin.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rabin.d.ts","sourceRoot":"","sources":["../../../src/chunker/rabin.js"],"names":[],"mappings":"wBAkBU,OAAO,UAAU,EAAE,OAAO;;;SARtB,MAAM;SACN,MAAM;UACN,MAAM;YACN,MAAM;gBACN,MAAM"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/dir.d.ts b/packages/ipfs-unixfs-importer/dist/src/dag-builder/dir.d.ts new file mode 100644 index 00000000..e5a5ca6e --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/dir.d.ts @@ -0,0 +1,13 @@ +export = dirBuilder; +/** + * @typedef {import('../types').Directory} Directory + */ +/** + * @type {import('../types').UnixFSV1DagBuilder} + */ +declare const dirBuilder: import('../types').UnixFSV1DagBuilder; +declare namespace dirBuilder { + export { Directory }; +} +type Directory = import('../types').Directory; +//# sourceMappingURL=dir.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/dir.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dag-builder/dir.d.ts.map new file mode 100644 index 00000000..45275d2d --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/dir.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"dir.d.ts","sourceRoot":"","sources":["../../../src/dag-builder/dir.js"],"names":[],"mappings":";AAMA;;GAEG;AAEH;;GAEG;AACH,0BAFU,OAAO,UAAU,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAmBzD;;;;iBAvBY,OAAO,UAAU,EAAE,SAAS"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/balanced.d.ts b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/balanced.d.ts new file mode 100644 index 00000000..e84f8a1c --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/balanced.d.ts @@ -0,0 +1,13 @@ +export = balanced; +/** + * @typedef {import('../../types').FileDAGBuilder} FileDAGBuilder + */ +/** + * @type {FileDAGBuilder} + */ +declare function balanced(source: AsyncIterable | Iterable, reduce: import("../../types").Reducer, options: import("../../types").ImporterOptions): Promise; +declare namespace balanced { + export { FileDAGBuilder }; +} +type FileDAGBuilder = import('../../types').FileDAGBuilder; +//# sourceMappingURL=balanced.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/balanced.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/balanced.d.ts.map new file mode 100644 index 00000000..33a36921 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/balanced.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"balanced.d.ts","sourceRoot":"","sources":["../../../../src/dag-builder/file/balanced.js"],"names":[],"mappings":";AAIA;;GAEG;AAEH;;GAEG;AACH,sSAEC;;;;sBARY,OAAO,aAAa,EAAE,cAAc"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/buffer-importer.d.ts b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/buffer-importer.d.ts new file mode 100644 index 00000000..71b3b20e --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/buffer-importer.d.ts @@ -0,0 +1,13 @@ +export = bufferImporter; +/** + * @typedef {import('../../types').BufferImporter} BufferImporter + */ +/** + * @type {BufferImporter} + */ +declare function bufferImporter(file: import("../../types").File, block: import("../../types").BlockAPI, options: import("../../types").ImporterOptions): AsyncIterable<() => Promise>; +declare namespace bufferImporter { + export { BufferImporter }; +} +type BufferImporter = import('../../types').BufferImporter; +//# sourceMappingURL=buffer-importer.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/buffer-importer.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/buffer-importer.d.ts.map new file mode 100644 index 00000000..86a57d6a --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/buffer-importer.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"buffer-importer.d.ts","sourceRoot":"","sources":["../../../../src/dag-builder/file/buffer-importer.js"],"names":[],"mappings":";AAOA;;GAEG;AAEH;;GAEG;AACH,qOAmCC;;;;sBAzCY,OAAO,aAAa,EAAE,cAAc"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/flat.d.ts b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/flat.d.ts new file mode 100644 index 00000000..5be05008 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/flat.d.ts @@ -0,0 +1,3 @@ +declare const _exports: import('../../types').FileDAGBuilder; +export = _exports; +//# sourceMappingURL=flat.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/flat.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/flat.d.ts.map new file mode 100644 index 00000000..b8b0d861 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/flat.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"flat.d.ts","sourceRoot":"","sources":["../../../../src/dag-builder/file/flat.js"],"names":[],"mappings":"wBAKU,OAAO,aAAa,EAAE,cAAc"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/index.d.ts b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/index.d.ts new file mode 100644 index 00000000..9320e69c --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/index.d.ts @@ -0,0 +1,15 @@ +export = fileBuilder; +/** + * @type {import('../../types').UnixFSV1DagBuilder} + */ +declare function fileBuilder(file: import("../../types").File, block: import("../../types").BlockAPI, options: import("../../types").ImporterOptions): Promise; +declare namespace fileBuilder { + export { BlockAPI, File, ImporterOptions, Reducer, DAGBuilder, FileDAGBuilder }; +} +type BlockAPI = import('../../types').BlockAPI; +type File = import('../../types').File; +type ImporterOptions = import('../../types').ImporterOptions; +type Reducer = import('../../types').Reducer; +type DAGBuilder = import('../../types').DAGBuilder; +type FileDAGBuilder = import('../../types').FileDAGBuilder; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/index.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/index.d.ts.map new file mode 100644 index 00000000..3677ef11 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/dag-builder/file/index.js"],"names":[],"mappings":";AA+LA;;GAEG;AACH,6MAQC;;;;gBA/LY,OAAO,aAAa,EAAE,QAAQ;YAC9B,OAAO,aAAa,EAAE,IAAI;uBAC1B,OAAO,aAAa,EAAE,eAAe;eACrC,OAAO,aAAa,EAAE,OAAO;kBAC7B,OAAO,aAAa,EAAE,UAAU;sBAChC,OAAO,aAAa,EAAE,cAAc"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/trickle.d.ts b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/trickle.d.ts new file mode 100644 index 00000000..42ce17c0 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/trickle.d.ts @@ -0,0 +1,9 @@ +declare const _exports: FileDAGBuilder; +export = _exports; +export type UnixFS = import('ipfs-unixfs').UnixFS; +export type ImporterOptions = import('../../types').ImporterOptions; +export type InProgressImportResult = import('../../types').InProgressImportResult; +export type TrickleDagNode = import('../../types').TrickleDagNode; +export type Reducer = import('../../types').Reducer; +export type FileDAGBuilder = import('../../types').FileDAGBuilder; +//# sourceMappingURL=trickle.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/trickle.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/trickle.d.ts.map new file mode 100644 index 00000000..f2bff826 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/file/trickle.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"trickle.d.ts","sourceRoot":"","sources":["../../../../src/dag-builder/file/trickle.js"],"names":[],"mappings":"wBAcU,cAAc;;qBATX,OAAO,aAAa,EAAE,MAAM;8BAC5B,OAAO,aAAa,EAAE,eAAe;qCACrC,OAAO,aAAa,EAAE,sBAAsB;6BAC5C,OAAO,aAAa,EAAE,cAAc;sBACpC,OAAO,aAAa,EAAE,OAAO;6BAC7B,OAAO,aAAa,EAAE,cAAc"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/index.d.ts b/packages/ipfs-unixfs-importer/dist/src/dag-builder/index.d.ts new file mode 100644 index 00000000..143999a7 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/index.d.ts @@ -0,0 +1,14 @@ +export = dagBuilder; +/** + * @type {DAGBuilder} + */ +declare function dagBuilder(source: AsyncIterable | Iterable, block: import("../types").BlockAPI, options: import("../types").ImporterOptions): AsyncIterable<() => Promise>; +declare namespace dagBuilder { + export { File, Directory, DAGBuilder, Chunker, ChunkValidator }; +} +type File = import('../types').File; +type Directory = import('../types').Directory; +type DAGBuilder = import('../types').DAGBuilder; +type Chunker = import('../types').Chunker; +type ChunkValidator = import('../types').ChunkValidator; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/index.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dag-builder/index.d.ts.map new file mode 100644 index 00000000..b25ac40f --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/dag-builder/index.js"],"names":[],"mappings":";AAsDA;;GAEG;AACH,gSA4DC;;;;YA9GY,OAAO,UAAU,EAAE,IAAI;iBACvB,OAAO,UAAU,EAAE,SAAS;kBAC5B,OAAO,UAAU,EAAE,UAAU;eAC7B,OAAO,UAAU,EAAE,OAAO;sBAC1B,OAAO,UAAU,EAAE,cAAc"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/validate-chunks.d.ts b/packages/ipfs-unixfs-importer/dist/src/dag-builder/validate-chunks.d.ts new file mode 100644 index 00000000..d3df9e80 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/validate-chunks.d.ts @@ -0,0 +1,13 @@ +export = validateChunks; +/** + * @typedef {import('../types').ChunkValidator} ChunkValidator + */ +/** + * @type {ChunkValidator} + */ +declare function validateChunks(source: AsyncIterable): AsyncIterable; +declare namespace validateChunks { + export { ChunkValidator }; +} +type ChunkValidator = import('../types').ChunkValidator; +//# sourceMappingURL=validate-chunks.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dag-builder/validate-chunks.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dag-builder/validate-chunks.d.ts.map new file mode 100644 index 00000000..a2e34ccc --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dag-builder/validate-chunks.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"validate-chunks.d.ts","sourceRoot":"","sources":["../../../src/dag-builder/validate-chunks.js"],"names":[],"mappings":";AAKA;;GAEG;AAEH;;GAEG;AACH,8FAgBC;;;;sBAtBY,OAAO,UAAU,EAAE,cAAc"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dir-flat.d.ts b/packages/ipfs-unixfs-importer/dist/src/dir-flat.d.ts new file mode 100644 index 00000000..40b912f7 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dir-flat.d.ts @@ -0,0 +1,31 @@ +export = DirFlat; +/** + * @typedef {import('./types').ImporterOptions} ImporterOptions + * @typedef {import('./types').ImportResult} ImportResult + * @typedef {import('./types').InProgressImportResult} InProgressImportResult + * @typedef {import('./types').BlockAPI} BlockAPI + * @typedef {import('./dir').DirProps} DirProps + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * @typedef {import('@ipld/dag-pb').PBLink} PBLink + */ +declare class DirFlat extends Dir { + /** @type {{ [key: string]: InProgressImportResult | Dir }} */ + _children: { + [key: string]: import("./types").InProgressImportResult | Dir; + }; + childCount(): number; + directChildrenCount(): number; + onlyChild(): import("./types").InProgressImportResult | Dir; +} +declare namespace DirFlat { + export { ImporterOptions, ImportResult, InProgressImportResult, BlockAPI, DirProps, PBNode, PBLink }; +} +import Dir = require("./dir"); +type ImporterOptions = import('./types').ImporterOptions; +type ImportResult = import('./types').ImportResult; +type InProgressImportResult = import('./types').InProgressImportResult; +type BlockAPI = import('./types').BlockAPI; +type DirProps = import('./dir').DirProps; +type PBNode = import('@ipld/dag-pb').PBNode; +type PBLink = import('@ipld/dag-pb').PBLink; +//# sourceMappingURL=dir-flat.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dir-flat.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dir-flat.d.ts.map new file mode 100644 index 00000000..7b35660b --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dir-flat.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"dir-flat.d.ts","sourceRoot":"","sources":["../../src/dir-flat.js"],"names":[],"mappings":";AAOA;;;;;;;;GAQG;AAEH;IAQI,8DAA8D;IAC9D;;MAAmB;IAqBrB,qBAEC;IAED,8BAEC;IAED,4DAEC;CAuEF;;;;;uBAxHY,OAAO,SAAS,EAAE,eAAe;oBACjC,OAAO,SAAS,EAAE,YAAY;8BAC9B,OAAO,SAAS,EAAE,sBAAsB;gBACxC,OAAO,SAAS,EAAE,QAAQ;gBAC1B,OAAO,OAAO,EAAE,QAAQ;cACxB,OAAO,cAAc,EAAE,MAAM;cAC7B,OAAO,cAAc,EAAE,MAAM"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dir-sharded.d.ts b/packages/ipfs-unixfs-importer/dist/src/dir-sharded.d.ts new file mode 100644 index 00000000..cfdaee94 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dir-sharded.d.ts @@ -0,0 +1,28 @@ +export = DirSharded; +/** + * @typedef {import('./types').ImporterOptions} ImporterOptions + * @typedef {import('./types').ImportResult} ImportResult + * @typedef {import('./types').InProgressImportResult} InProgressImportResult + * @typedef {import('./types').BlockAPI} BlockAPI + */ +/** + * @typedef {import('./dir').DirProps} DirProps + */ +declare class DirSharded extends Dir { + /** @type {Bucket} */ + _bucket: Bucket; + childCount(): number; + directChildrenCount(): number; + onlyChild(): Bucket | Bucket.BucketChild; +} +declare namespace DirSharded { + export { ImporterOptions, ImportResult, InProgressImportResult, BlockAPI, DirProps }; +} +import Dir = require("./dir"); +import { Bucket } from "hamt-sharding"; +type InProgressImportResult = import('./types').InProgressImportResult; +type ImporterOptions = import('./types').ImporterOptions; +type ImportResult = import('./types').ImportResult; +type BlockAPI = import('./types').BlockAPI; +type DirProps = import('./dir').DirProps; +//# sourceMappingURL=dir-sharded.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dir-sharded.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dir-sharded.d.ts.map new file mode 100644 index 00000000..885d2704 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dir-sharded.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"dir-sharded.d.ts","sourceRoot":"","sources":["../../src/dir-sharded.js"],"names":[],"mappings":";AAQA;;;;;GAKG;AAEH;;GAEG;AAEH;IAQI,mDAAmD;IACnD,SADW,OAAO,sBAAsB,GAAG,GAAG,CAAC,CAI7C;IAkBJ,qBAEC;IAED,8BAEC;IAED,yIAEC;CAuBF;;;;;;8BAvEY,OAAO,SAAS,EAAE,sBAAsB;uBAFxC,OAAO,SAAS,EAAE,eAAe;oBACjC,OAAO,SAAS,EAAE,YAAY;gBAE9B,OAAO,SAAS,EAAE,QAAQ;gBAI1B,OAAO,OAAO,EAAE,QAAQ"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dir.d.ts b/packages/ipfs-unixfs-importer/dist/src/dir.d.ts new file mode 100644 index 00000000..b9edc648 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dir.d.ts @@ -0,0 +1,85 @@ +export = Dir; +/** + * @typedef {import('./types').ImporterOptions} ImporterOptions + * @typedef {import('./types').ImportResult} ImportResult + * @typedef {import('./types').InProgressImportResult} InProgressImportResult + * @typedef {import('./types').BlockAPI} BlockAPI + * @typedef {import('multiformats/cid').CID} CID + * @typedef {object} DirProps + * @property {boolean} root + * @property {boolean} dir + * @property {string} path + * @property {boolean} dirty + * @property {boolean} flat + * @property {Dir} [parent] + * @property {string} [parentKey] + * @property {import('ipfs-unixfs').UnixFS} [unixfs] + * @property {number} [mode] + * @property {import('ipfs-unixfs').Mtime} [mtime] + */ +declare class Dir { + /** + * + * @param {DirProps} props + * @param {ImporterOptions} options + */ + constructor(props: DirProps, options: ImporterOptions); + options: import("./types").ImporterOptions; + root: boolean; + dir: boolean; + path: string; + dirty: boolean; + flat: boolean; + parent: import("./dir") | undefined; + parentKey: string | undefined; + unixfs: import("ipfs-unixfs").UnixFS | undefined; + mode: number | undefined; + mtime: import("ipfs-unixfs/dist/src/types").Mtime | undefined; + /** @type {CID | undefined} */ + cid: CID | undefined; + /** @type {number | undefined} */ + size: number | undefined; + /** + * @param {string} name + * @param {InProgressImportResult | Dir} value + */ + put(name: string, value: InProgressImportResult | Dir): Promise; + /** + * @param {string} name + * @returns {Promise} + */ + get(name: string): Promise; + /** + * @returns {AsyncIterable<{ key: string, child: InProgressImportResult | Dir}>} + */ + eachChildSeries(): AsyncIterable<{ + key: string; + child: InProgressImportResult | Dir; + }>; + /** + * @param {BlockAPI} block + * @returns {AsyncIterable} + */ + flush(block: BlockAPI): AsyncIterable; +} +declare namespace Dir { + export { ImporterOptions, ImportResult, InProgressImportResult, BlockAPI, CID, DirProps }; +} +type CID = import('multiformats/cid').CID; +type InProgressImportResult = import('./types').InProgressImportResult; +type BlockAPI = import('./types').BlockAPI; +type ImportResult = import('./types').ImportResult; +type DirProps = { + root: boolean; + dir: boolean; + path: string; + dirty: boolean; + flat: boolean; + parent?: import("./dir") | undefined; + parentKey?: string | undefined; + unixfs?: import("ipfs-unixfs").UnixFS | undefined; + mode?: number | undefined; + mtime?: import("ipfs-unixfs/dist/src/types").Mtime | undefined; +}; +type ImporterOptions = import('./types').ImporterOptions; +//# sourceMappingURL=dir.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/dir.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/dir.d.ts.map new file mode 100644 index 00000000..28399a34 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/dir.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"dir.d.ts","sourceRoot":"","sources":["../../src/dir.js"],"names":[],"mappings":";AAEA;;;;;;;;;;;;;;;;;GAiBG;AACH;IACE;;;;OAIG;IACH,mBAHW,QAAQ,WACR,eAAe,EAoBzB;IAjBC,2CAA4B;IAE5B,cAAsB;IACtB,aAAoB;IACpB,aAAsB;IACtB,eAAwB;IACxB,cAAsB;IACtB,oCAA0B;IAC1B,8BAAgC;IAChC,iDAA0B;IAC1B,yBAAsB;IACtB,8DAAwB;IAExB,8BAA8B;IAC9B,KADW,GAAG,GAAG,SAAS,CACN;IACpB,iCAAiC;IACjC,MADW,MAAM,GAAG,SAAS,CACR;IAGvB;;;OAGG;IACH,UAHW,MAAM,SACN,sBAAsB,GAAG,GAAG,iBAEZ;IAE3B;;;OAGG;IACH,UAHW,MAAM,GACJ,QAAQ,sBAAsB,GAAG,GAAG,GAAG,SAAS,CAAC,CAI7D;IAED;;OAEG;IACH,mBAFa,cAAc;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,sBAAsB,GAAG,GAAG,CAAA;KAAC,CAAC,CAEjD;IAE9B;;;OAGG;IACH,aAHW,QAAQ,GACN,cAAc,YAAY,CAAC,CAEf;CAC1B;;;;WA/DY,OAAO,kBAAkB,EAAE,GAAG;8BAF9B,OAAO,SAAS,EAAE,sBAAsB;gBACxC,OAAO,SAAS,EAAE,QAAQ;oBAF1B,OAAO,SAAS,EAAE,YAAY;;UAK7B,OAAO;SACP,OAAO;UACP,MAAM;WACN,OAAO;UACP,OAAO;;;;;;;uBAVR,OAAO,SAAS,EAAE,eAAe"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/flat-to-shard.d.ts b/packages/ipfs-unixfs-importer/dist/src/flat-to-shard.d.ts new file mode 100644 index 00000000..02dfe981 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/flat-to-shard.d.ts @@ -0,0 +1,6 @@ +declare function _exports(child: Dir | null, dir: Dir, threshold: number, options: ImporterOptions): Promise; +export = _exports; +export type Dir = import('./dir'); +export type ImporterOptions = import('./types').ImporterOptions; +import DirSharded = require("./dir-sharded"); +//# sourceMappingURL=flat-to-shard.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/flat-to-shard.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/flat-to-shard.d.ts.map new file mode 100644 index 00000000..30a3a53b --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/flat-to-shard.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"flat-to-shard.d.ts","sourceRoot":"","sources":["../../src/flat-to-shard.js"],"names":[],"mappings":"AAiBiB,iCANN,GAAG,GAAG,IAAI,OACV,GAAG,aACH,MAAM,WACN,eAAe,GACb,QAAQ,UAAU,CAAC,CA6B/B;;kBAtCY,OAAO,OAAO,CAAC;8BACf,OAAO,SAAS,EAAE,eAAe"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/index.d.ts b/packages/ipfs-unixfs-importer/dist/src/index.d.ts new file mode 100644 index 00000000..0f654835 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/index.d.ts @@ -0,0 +1,43 @@ +export type BlockAPI = import('./types').BlockAPI; +export type ImportCandidate = import('./types').ImportCandidate; +export type UserImporterOptions = import('./types').UserImporterOptions; +export type ImporterOptions = import('./types').ImporterOptions; +export type Directory = import('./types').Directory; +export type File = import('./types').File; +export type ImportResult = import('./types').ImportResult; +export type Chunker = import('./types').Chunker; +export type DAGBuilder = import('./types').DAGBuilder; +export type TreeBuilder = import('./types').TreeBuilder; +export type BufferImporter = import('./types').BufferImporter; +export type ChunkValidator = import('./types').ChunkValidator; +export type Reducer = import('./types').Reducer; +export type ProgressHandler = import('./types').ProgressHandler; +/** + * @typedef {import('./types').BlockAPI} BlockAPI + * @typedef {import('./types').ImportCandidate} ImportCandidate + * @typedef {import('./types').UserImporterOptions} UserImporterOptions + * @typedef {import('./types').ImporterOptions} ImporterOptions + * @typedef {import('./types').Directory} Directory + * @typedef {import('./types').File} File + * @typedef {import('./types').ImportResult} ImportResult + * + * @typedef {import('./types').Chunker} Chunker + * @typedef {import('./types').DAGBuilder} DAGBuilder + * @typedef {import('./types').TreeBuilder} TreeBuilder + * @typedef {import('./types').BufferImporter} BufferImporter + * @typedef {import('./types').ChunkValidator} ChunkValidator + * @typedef {import('./types').Reducer} Reducer + * @typedef {import('./types').ProgressHandler} ProgressHandler + */ +/** + * @param {AsyncIterable | Iterable | ImportCandidate} source + * @param {BlockAPI} block + * @param {UserImporterOptions} options + */ +export function importer(source: AsyncIterable | Iterable | ImportCandidate, block: BlockAPI, options?: UserImporterOptions): AsyncGenerator<{ + cid: import("multiformats/cid").CID; + path: string | undefined; + unixfs: import("ipfs-unixfs").UnixFS | undefined; + size: number; +}, void, unknown>; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/index.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/index.d.ts.map new file mode 100644 index 00000000..6a5777a2 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.js"],"names":[],"mappings":"uBAMa,OAAO,SAAS,EAAE,QAAQ;8BAC1B,OAAO,SAAS,EAAE,eAAe;kCACjC,OAAO,SAAS,EAAE,mBAAmB;8BACrC,OAAO,SAAS,EAAE,eAAe;wBACjC,OAAO,SAAS,EAAE,SAAS;mBAC3B,OAAO,SAAS,EAAE,IAAI;2BACtB,OAAO,SAAS,EAAE,YAAY;sBAE9B,OAAO,SAAS,EAAE,OAAO;yBACzB,OAAO,SAAS,EAAE,UAAU;0BAC5B,OAAO,SAAS,EAAE,WAAW;6BAC7B,OAAO,SAAS,EAAE,cAAc;6BAChC,OAAO,SAAS,EAAE,cAAc;sBAChC,OAAO,SAAS,EAAE,OAAO;8BACzB,OAAO,SAAS,EAAE,eAAe;AAf9C;;;;;;;;;;;;;;;;GAgBG;AAEH;;;;GAIG;AACH,iCAJW,cAAc,eAAe,CAAC,GAAG,SAAS,eAAe,CAAC,GAAG,eAAe,SAC5E,QAAQ,YACR,mBAAmB;;;;;kBAwC7B"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/options.d.ts b/packages/ipfs-unixfs-importer/dist/src/options.d.ts new file mode 100644 index 00000000..6f391813 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/options.d.ts @@ -0,0 +1,5 @@ +declare function _exports(options?: UserImporterOptions): ImporterOptions; +export = _exports; +export type UserImporterOptions = import('./types').UserImporterOptions; +export type ImporterOptions = import('./types').ImporterOptions; +//# sourceMappingURL=options.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/options.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/options.d.ts.map new file mode 100644 index 00000000..c9e50ba1 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/options.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"options.d.ts","sourceRoot":"","sources":["../../src/options.js"],"names":[],"mappings":"AAwEiB,oCAHN,mBAAmB,GACjB,eAAe,CAI3B;;kCA9CY,OAAO,SAAS,EAAE,mBAAmB;8BACrC,OAAO,SAAS,EAAE,eAAe"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/tree-builder.d.ts b/packages/ipfs-unixfs-importer/dist/src/tree-builder.d.ts new file mode 100644 index 00000000..8f654459 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/tree-builder.d.ts @@ -0,0 +1,14 @@ +export = treeBuilder; +/** + * @type {TreeBuilder} + */ +declare function treeBuilder(source: AsyncIterable, block: import("./types").BlockAPI, options: import("./types").ImporterOptions): AsyncIterable; +declare namespace treeBuilder { + export { ImportResult, InProgressImportResult, ImporterOptions, BlockAPI, TreeBuilder }; +} +type ImportResult = import('./types').ImportResult; +type InProgressImportResult = import('./types').InProgressImportResult; +type ImporterOptions = import('./types').ImporterOptions; +type BlockAPI = import('./types').BlockAPI; +type TreeBuilder = (source: AsyncIterable, block: BlockAPI, options: ImporterOptions) => AsyncIterable; +//# sourceMappingURL=tree-builder.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/tree-builder.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/tree-builder.d.ts.map new file mode 100644 index 00000000..c9dd22ca --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/tree-builder.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tree-builder.d.ts","sourceRoot":"","sources":["../../src/tree-builder.js"],"names":[],"mappings":";AAiFA;;GAEG;AACH,4NAiCC;;;;oBA7GY,OAAO,SAAS,EAAE,YAAY;8BAC9B,OAAO,SAAS,EAAE,sBAAsB;uBACxC,OAAO,SAAS,EAAE,eAAe;gBACjC,OAAO,SAAS,EAAE,QAAQ;4BACjB,cAAc,sBAAsB,CAAC,SAAS,QAAQ,WAAW,eAAe,KAAK,cAAc,YAAY,CAAC"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/types.d.ts b/packages/ipfs-unixfs-importer/dist/src/types.d.ts new file mode 100644 index 00000000..2d32eefd --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/types.d.ts @@ -0,0 +1,167 @@ +import { UnixFS, Mtime } from 'ipfs-unixfs' +import { CID } from 'multiformats/cid' +import { HashName } from 'multihashes' +import { CodecName } from 'multicodec' +import MultihashDigest from 'multiformats/hashes/hasher' + +interface ImportCandidate { + path?: string + content?: AsyncIterable + mtime?: Mtime + mode?: number +} + +interface File { + content: AsyncIterable + path?: string + mtime?: Mtime + mode?: number +} + +interface Directory { + path?: string + mtime?: Mtime + mode?: number +} + +interface ImportResult { + cid: CID + size: number + path?: string + unixfs?: UnixFS +} + +interface InProgressImportResult extends ImportResult { + single?: boolean +} + +type ChunkerType = 'fixed' | 'rabin' +type ProgressHandler = (chunkSize: number, path?: string) => void +type HamtHashFn = (value: Uint8Array) => Promise +type Chunker = (source: AsyncIterable, options: ImporterOptions) => AsyncIterable +type DAGBuilder = (source: AsyncIterable | Iterable, block: BlockAPI, options: ImporterOptions) => AsyncIterable<() => Promise> +type TreeBuilder = (source: AsyncIterable, block: BlockAPI, options: ImporterOptions) => AsyncIterable +type BufferImporter = (file: File, block: BlockAPI, options: ImporterOptions) => AsyncIterable<() => Promise> +type ChunkValidator = (source: AsyncIterable, options: ImporterOptions) => AsyncIterable +type UnixFSV1DagBuilder = (item: T, block: BlockAPI, options: ImporterOptions) => Promise +type Reducer = (leaves: InProgressImportResult[]) => Promise + +type FileDAGBuilder = (source: AsyncIterable | Iterable, reducer: Reducer, options: ImporterOptions) => Promise + +interface UserImporterOptions { + strategy?: 'balanced' | 'flat' | 'trickle' + rawLeaves?: boolean + onlyHash?: boolean + reduceSingleLeafToSelf?: boolean + hasher?: MultihashDigest + leafType?: 'file' | 'raw' + cidVersion?: CIDVersion + progress?: ProgressHandler + shardSplitThreshold?: number + fileImportConcurrency?: number + blockWriteConcurrency?: number + minChunkSize?: number + maxChunkSize?: number + avgChunkSize?: number + window?: number + polynomial?: number + maxChildrenPerNode?: number + layerRepeat?: number + wrapWithDirectory?: boolean + pin?: boolean + recursive?: boolean + hidden?: boolean + preload?: boolean + timeout?: number + hamtHashFn?: HamtHashFn + hamtBucketBits?: number + hamtHashCode?: number + chunker?: ChunkerType | Chunker + dagBuilder?: DAGBuilder + treeBuilder?: TreeBuilder + bufferImporter?: BufferImporter + chunkValidator?: ChunkValidator +} + +interface ImporterOptions { + strategy: 'balanced' | 'flat' | 'trickle' + rawLeaves: boolean + onlyHash: boolean + reduceSingleLeafToSelf: boolean + hasher: MultihashDigest + leafType: 'file' | 'raw' + cidVersion: CIDVersion + progress: ProgressHandler + shardSplitThreshold: number + fileImportConcurrency: number + blockWriteConcurrency: number + minChunkSize: number + maxChunkSize: number + avgChunkSize: number + window: number + polynomial: number + maxChildrenPerNode: number + layerRepeat: number + wrapWithDirectory: boolean + pin: boolean + recursive: boolean + hidden: boolean + preload: boolean + timeout?: number + hamtHashFn: HamtHashFn + hamtBucketBits: number + hamtHashCode: number + chunker: ChunkerType | Chunker + dagBuilder?: DAGBuilder + treeBuilder?: TreeBuilder + bufferImporter?: BufferImporter + chunkValidator?: ChunkValidator +} + +export interface TrickleDagNode { + children: InProgressImportResult[], + depth: number, + maxDepth: number, + maxChildren: number, + data?: InProgressImportResult[], + parent?: TrickleDagNode + cid?: CID, + size?: number, + unixfs?: UnixFS +} + +export interface PersistOptions { + //codec?: string + codec?: number + cidVersion: CIDVersion + hasher: MultihashDigest + onlyHash: boolean + preload?: boolean + timeout?: number + signal?: AbortSignal +} + +// TODO vmx 2021-03-24: decide where to put this +export interface Block { + cid: CID + bytes: Uint8Array +} + +// TODO: remove this and get from core-ipfs-types +export interface BlockAPI { + get: (cid: CID, options?: BlockOptions) => Promise + put: (block: Block, options?: PutOptions) => Promise +} + +// TODO: remove this and get from core-ipfs-types +export interface BlockOptions { + signal?: AbortSignal + timeout?: number + preload?: boolean +} + +// TODO: remove this and get from core-ipfs-types +export interface PutOptions extends BlockOptions { + onlyHash?: boolean + pin?: boolean +} diff --git a/packages/ipfs-unixfs-importer/dist/src/utils/persist.d.ts b/packages/ipfs-unixfs-importer/dist/src/utils/persist.d.ts new file mode 100644 index 00000000..29e94370 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/utils/persist.d.ts @@ -0,0 +1,9 @@ +export = persist; +/** + * @param {Uint8Array} buffer + * @param {import('../types').BlockAPI} block + * @param {import('../types').PersistOptions} options + */ +declare function persist(buffer: Uint8Array, block: import('../types').BlockAPI, options: import('../types').PersistOptions): Promise; +import { CID } from "multiformats/cid"; +//# sourceMappingURL=persist.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/utils/persist.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/utils/persist.d.ts.map new file mode 100644 index 00000000..b45eeb7c --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/utils/persist.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"persist.d.ts","sourceRoot":"","sources":["../../../src/utils/persist.js"],"names":[],"mappings":";AAKA;;;;GAIG;AACH,iCAJW,UAAU,SACV,OAAO,UAAU,EAAE,QAAQ,WAC3B,OAAO,UAAU,EAAE,cAAc,gBA+B3C"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/utils/to-path-components.d.ts b/packages/ipfs-unixfs-importer/dist/src/utils/to-path-components.d.ts new file mode 100644 index 00000000..0888cc82 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/utils/to-path-components.d.ts @@ -0,0 +1,3 @@ +export = toPathComponents; +declare function toPathComponents(path?: string): string[]; +//# sourceMappingURL=to-path-components.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/dist/src/utils/to-path-components.d.ts.map b/packages/ipfs-unixfs-importer/dist/src/utils/to-path-components.d.ts.map new file mode 100644 index 00000000..e1370932 --- /dev/null +++ b/packages/ipfs-unixfs-importer/dist/src/utils/to-path-components.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"to-path-components.d.ts","sourceRoot":"","sources":["../../../src/utils/to-path-components.js"],"names":[],"mappings":";AAEA,2DAMC"} \ No newline at end of file diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index 9263d40c..a32db397 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -39,28 +39,25 @@ "copy": "^0.3.2", "crypto-browserify": "^3.12.0", "events": "^3.3.0", - "ipld": "^0.29.0", - "ipld-block": "^0.11.1", - "ipld-in-memory": "^8.0.0", "it-buffer-stream": "^2.0.0", - "multicodec": "^3.0.1", "nyc": "^15.0.0", "readable-stream": "^3.6.0", "rimraf": "^3.0.2", "util": "^0.12.3" }, "dependencies": { + "@ipld/dag-pb": "^1.1.0", "bl": "^5.0.0", - "cids": "^1.1.5", "err-code": "^3.0.1", "hamt-sharding": "^2.0.0", "ipfs-unixfs": "^4.0.3", - "ipld-dag-pb": "^0.22.2", "it-all": "^1.0.5", "it-batch": "^1.0.8", "it-first": "^1.0.6", "it-parallel-batch": "^1.0.9", "merge-options": "^3.0.4", + "multicodec": "^3.0.1", + "multiformats": "^8.0.3", "multihashing-async": "^2.1.0", "rabin-wasm": "^0.1.4", "uint8arrays": "^2.1.2" diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/dir.js b/packages/ipfs-unixfs-importer/src/dag-builder/dir.js index 36106c9e..cd5749f2 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/dir.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/dir.js @@ -2,9 +2,7 @@ const { UnixFS } = require('ipfs-unixfs') const persist = require('../utils/persist') -const { - DAGNode -} = require('ipld-dag-pb') +const { encode, prepare } = require('@ipld/dag-pb') /** * @typedef {import('../types').Directory} Directory @@ -20,7 +18,7 @@ const dirBuilder = async (item, block, options) => { mode: item.mode }) - const buffer = new DAGNode(unixfs.marshal()).serialize() + const buffer = encode(prepare({ Data: unixfs.marshal() })) const cid = await persist(buffer, block, options) const path = item.path diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js index b8179e89..6cd2d914 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js @@ -2,9 +2,8 @@ const { UnixFS } = require('ipfs-unixfs') const persist = require('../../utils/persist') -const { - DAGNode -} = require('ipld-dag-pb') +const { encode, prepare } = require('@ipld/dag-pb') +const mc = require('multicodec') /** * @typedef {import('../../types').BufferImporter} BufferImporter @@ -21,14 +20,14 @@ async function * bufferImporter (file, block, options) { /** @type {import('../../types').PersistOptions} */ const opts = { - codec: 'dag-pb', + codec: mc.DAG_PB, cidVersion: options.cidVersion, - hashAlg: options.hashAlg, + hasher: options.hasher, onlyHash: options.onlyHash } if (options.rawLeaves) { - opts.codec = 'raw' + opts.codec = mc.RAW opts.cidVersion = 1 } else { unixfs = new UnixFS({ @@ -38,7 +37,7 @@ async function * bufferImporter (file, block, options) { mode: file.mode }) - buffer = new DAGNode(unixfs.marshal()).serialize() + buffer = encode(prepare({ Data: unixfs.marshal() })) } return { diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js index 152dac91..10128b7a 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js @@ -3,12 +3,10 @@ const errCode = require('err-code') const { UnixFS } = require('ipfs-unixfs') const persist = require('../../utils/persist') -const { - DAGNode, - DAGLink -} = require('ipld-dag-pb') +const { encode, prepare } = require('@ipld/dag-pb') const parallelBatch = require('it-parallel-batch') -const mh = require('multihashing-async').multihash +const mc = require('multicodec') +const rawCodec = require('multiformats/codecs/raw') /** * @typedef {import('../../types').BlockAPI} BlockAPI @@ -77,10 +75,10 @@ const reduce = (file, block, options) => { if (leaves.length === 1 && leaves[0].single && options.reduceSingleLeafToSelf) { const leaf = leaves[0] - if (leaf.cid.codec === 'raw' && (file.mtime !== undefined || file.mode !== undefined)) { + if (leaf.cid.code === rawCodec.code && (file.mtime !== undefined || file.mode !== undefined)) { // only one leaf node which is a buffer - we have metadata so convert it into a // UnixFS entry otherwise we'll have nowhere to store the metadata - let { data: buffer } = await block.get(leaf.cid, options) + let { bytes: buffer } = await block.get(leaf.cid, options) leaf.unixfs = new UnixFS({ type: 'file', @@ -89,13 +87,31 @@ const reduce = (file, block, options) => { data: buffer }) - const multihash = mh.decode(leaf.cid.multihash) - buffer = new DAGNode(leaf.unixfs.marshal()).serialize() - + buffer = encode(prepare({ Data: leaf.unixfs.marshal() })) + + // // TODO vmx 2021-03-26: This is what the original code does, it checks + // // the multihash of the original leaf node and uses then the same + // // hasher. i wonder if that's really needed or if we could just use + // // the hasher from `options.hasher` instead. + // const multihash = mh.decode(leaf.cid.multihash.bytes) + // let hasher + // switch multihash { + // case sha256.code { + // hasher = sha256 + // break; + // } + // //case identity.code { + // // hasher = identity + // // break; + // //} + // default: { + // throw new Error(`Unsupported hasher "${multihash}"`) + // } + // } leaf.cid = await persist(buffer, block, { ...options, - codec: 'dag-pb', - hashAlg: multihash.name, + codec: mc.DAG_PB, + hasher: options.hasher, cidVersion: options.cidVersion }) leaf.size = buffer.length @@ -118,7 +134,7 @@ const reduce = (file, block, options) => { const links = leaves .filter(leaf => { - if (leaf.cid.codec === 'raw' && leaf.size) { + if (leaf.cid.code === rawCodec.code && leaf.size) { return true } @@ -129,11 +145,15 @@ const reduce = (file, block, options) => { return Boolean(leaf.unixfs && leaf.unixfs.data && leaf.unixfs.data.length) }) .map((leaf) => { - if (leaf.cid.codec === 'raw') { + if (leaf.cid.code === rawCodec.code) { // node is a leaf buffer f.addBlockSize(leaf.size) - return new DAGLink('', leaf.size, leaf.cid) + return { + Name: '', + Tsize: leaf.size, + Hash: leaf.cid + } } if (!leaf.unixfs || !leaf.unixfs.data) { @@ -144,11 +164,18 @@ const reduce = (file, block, options) => { f.addBlockSize(leaf.unixfs.data.length) } - return new DAGLink('', leaf.size, leaf.cid) + return { + Name: '', + Tsize: leaf.size, + Hash: leaf.cid + } }) - const node = new DAGNode(f.marshal(), links) - const buffer = node.serialize() + const node = { + Data: f.marshal(), + Links: links + } + const buffer = encode(prepare(node)) const cid = await persist(buffer, block, options) return { diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js index fc1c6aef..1de9c98c 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js @@ -3,7 +3,6 @@ const batch = require('it-batch') /** - * @typedef {import('cids')} CID * @typedef {import('ipfs-unixfs').UnixFS} UnixFS * @typedef {import('../../types').ImporterOptions} ImporterOptions * @typedef {import('../../types').InProgressImportResult} InProgressImportResult diff --git a/packages/ipfs-unixfs-importer/src/dir-flat.js b/packages/ipfs-unixfs-importer/src/dir-flat.js index 0a720e4a..2d6e11c1 100644 --- a/packages/ipfs-unixfs-importer/src/dir-flat.js +++ b/packages/ipfs-unixfs-importer/src/dir-flat.js @@ -1,9 +1,6 @@ 'use strict' -const { - DAGLink, - DAGNode -} = require('ipld-dag-pb') +const { encode, prepare } = require('@ipld/dag-pb') const { UnixFS } = require('ipfs-unixfs') const Dir = require('./dir') const persist = require('./utils/persist') @@ -14,7 +11,8 @@ const persist = require('./utils/persist') * @typedef {import('./types').InProgressImportResult} InProgressImportResult * @typedef {import('./types').BlockAPI} BlockAPI * @typedef {import('./dir').DirProps} DirProps - * @typedef {import('cids')} CID + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * @typedef {import('@ipld/dag-pb').PBLink} PBLink */ class DirFlat extends Dir { @@ -92,7 +90,11 @@ class DirFlat extends Dir { } if (child.size != null && child.cid) { - links.push(new DAGLink(children[i], child.size, child.cid)) + links.push({ + Name: children[i], + Tsize: child.size, + Hash: child.cid + }) } } @@ -102,15 +104,16 @@ class DirFlat extends Dir { mode: this.mode }) - const node = new DAGNode(unixfs.marshal(), links) - const buffer = node.serialize() + /** @type {PBNode} */ + const node = { Data: unixfs.marshal(), Links: links } + const buffer = encode(prepare(node)) const cid = await persist(buffer, block, this.options) const size = buffer.length + node.Links.reduce( /** * @param {number} acc - * @param {DAGLink} curr + * @param {PBLink} curr */ - (acc, curr) => acc + curr.Tsize, + (acc, curr) => acc + (curr.Tsize == null ? 0 : curr.Tsize), 0) this.cid = cid diff --git a/packages/ipfs-unixfs-importer/src/dir-sharded.js b/packages/ipfs-unixfs-importer/src/dir-sharded.js index e0567838..9b523496 100644 --- a/packages/ipfs-unixfs-importer/src/dir-sharded.js +++ b/packages/ipfs-unixfs-importer/src/dir-sharded.js @@ -1,9 +1,6 @@ 'use strict' -const { - DAGLink, - DAGNode -} = require('ipld-dag-pb') +const { encode, prepare } = require('@ipld/dag-pb') const { UnixFS } = require('ipfs-unixfs') const Dir = require('./dir') const persist = require('./utils/persist') @@ -119,7 +116,11 @@ async function * flush (bucket, block, shardRoot, options) { throw new Error('Could not flush sharded directory, no subshard found') } - links.push(new DAGLink(labelPrefix, shard.size, shard.cid)) + links.push({ + Name: labelPrefix, + Tsize: shard.size, + Hash: shard.cid + }) childrenSize += shard.size } else if (typeof child.value.flush === 'function') { const dir = child.value @@ -132,7 +133,11 @@ async function * flush (bucket, block, shardRoot, options) { } const label = labelPrefix + child.key - links.push(new DAGLink(label, flushedDir.size, flushedDir.cid)) + links.push({ + Name: label, + Tsize: flushedDir.size, + Hash: flushedDir.cid + }) childrenSize += flushedDir.size } else { @@ -145,7 +150,11 @@ async function * flush (bucket, block, shardRoot, options) { const label = labelPrefix + child.key const size = value.size - links.push(new DAGLink(label, size, value.cid)) + links.push({ + Name: label, + Tsize: size, + Hash: value.cid + }) childrenSize += size } } @@ -162,8 +171,11 @@ async function * flush (bucket, block, shardRoot, options) { mode: shardRoot && shardRoot.mode }) - const node = new DAGNode(dir.marshal(), links) - const buffer = node.serialize() + const node = { + Data: dir.marshal(), + Links: links + } + const buffer = encode(prepare(node)) const cid = await persist(buffer, block, options) const size = buffer.length + childrenSize diff --git a/packages/ipfs-unixfs-importer/src/dir.js b/packages/ipfs-unixfs-importer/src/dir.js index 072d66a6..d235e38c 100644 --- a/packages/ipfs-unixfs-importer/src/dir.js +++ b/packages/ipfs-unixfs-importer/src/dir.js @@ -5,7 +5,7 @@ * @typedef {import('./types').ImportResult} ImportResult * @typedef {import('./types').InProgressImportResult} InProgressImportResult * @typedef {import('./types').BlockAPI} BlockAPI - * @typedef {import('cids')} CID + * @typedef {import('multiformats/cid').CID} CID * @typedef {object} DirProps * @property {boolean} root * @property {boolean} dir diff --git a/packages/ipfs-unixfs-importer/src/options.js b/packages/ipfs-unixfs-importer/src/options.js index 6e9f4a3c..6dd51a45 100644 --- a/packages/ipfs-unixfs-importer/src/options.js +++ b/packages/ipfs-unixfs-importer/src/options.js @@ -2,6 +2,7 @@ const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const multihashing = require('multihashing-async') +const { sha256 } = require('multiformats/hashes/sha2') /** * @param {Uint8Array} buf @@ -38,7 +39,7 @@ const defaultOptions = { rawLeaves: false, onlyHash: false, reduceSingleLeafToSelf: true, - hashAlg: 'sha2-256', + hasher: sha256, leafType: 'file', // 'raw' cidVersion: 0, progress: () => () => {}, diff --git a/packages/ipfs-unixfs-importer/src/types.d.ts b/packages/ipfs-unixfs-importer/src/types.d.ts index eacedca3..2d32eefd 100644 --- a/packages/ipfs-unixfs-importer/src/types.d.ts +++ b/packages/ipfs-unixfs-importer/src/types.d.ts @@ -1,8 +1,8 @@ import { UnixFS, Mtime } from 'ipfs-unixfs' -import CID, { CIDVersion } from 'cids' +import { CID } from 'multiformats/cid' import { HashName } from 'multihashes' -import Block from 'ipld-block' import { CodecName } from 'multicodec' +import MultihashDigest from 'multiformats/hashes/hasher' interface ImportCandidate { path?: string @@ -53,7 +53,7 @@ interface UserImporterOptions { rawLeaves?: boolean onlyHash?: boolean reduceSingleLeafToSelf?: boolean - hashAlg?: HashName + hasher?: MultihashDigest leafType?: 'file' | 'raw' cidVersion?: CIDVersion progress?: ProgressHandler @@ -88,7 +88,7 @@ interface ImporterOptions { rawLeaves: boolean onlyHash: boolean reduceSingleLeafToSelf: boolean - hashAlg: HashName + hasher: MultihashDigest leafType: 'file' | 'raw' cidVersion: CIDVersion progress: ProgressHandler @@ -131,19 +131,26 @@ export interface TrickleDagNode { } export interface PersistOptions { - codec?: string + //codec?: string + codec?: number cidVersion: CIDVersion - hashAlg: HashName + hasher: MultihashDigest onlyHash: boolean preload?: boolean timeout?: number signal?: AbortSignal } +// TODO vmx 2021-03-24: decide where to put this +export interface Block { + cid: CID + bytes: Uint8Array +} + // TODO: remove this and get from core-ipfs-types export interface BlockAPI { - get: (cid: CID | string | Uint8Array, options?: BlockOptions) => Promise - put: (block: Block | Uint8Array, options?: PutOptions) => Promise + get: (cid: CID, options?: BlockOptions) => Promise + put: (block: Block, options?: PutOptions) => Promise } // TODO: remove this and get from core-ipfs-types @@ -155,9 +162,6 @@ export interface BlockOptions { // TODO: remove this and get from core-ipfs-types export interface PutOptions extends BlockOptions { - cid?: CID - format?: CodecName - mhtype?: HashName - version?: CIDVersion + onlyHash?: boolean pin?: boolean } diff --git a/packages/ipfs-unixfs-importer/src/utils/persist.js b/packages/ipfs-unixfs-importer/src/utils/persist.js index a4aba3be..bf170df5 100644 --- a/packages/ipfs-unixfs-importer/src/utils/persist.js +++ b/packages/ipfs-unixfs-importer/src/utils/persist.js @@ -1,7 +1,7 @@ 'use strict' -const mh = require('multihashing-async') -const CID = require('cids') +const mc = require('multicodec') +const { CID } = require('multiformats/cid') /** * @param {Uint8Array} buffer @@ -9,33 +9,30 @@ const CID = require('cids') * @param {import('../types').PersistOptions} options */ const persist = async (buffer, block, options) => { - if (!options.codec) { - options.codec = 'dag-pb' - } - - if (!options.cidVersion) { - options.cidVersion = 0 + if (!options.hasher) { + throw new Error('Hasher must be specified.') } - if (!options.hashAlg) { - options.hashAlg = 'sha2-256' + if (!options.codec) { + options.codec = mc.DAG_PB } - if (options.hashAlg !== 'sha2-256') { + if (options.cidVersion === undefined) { options.cidVersion = 1 } - const multihash = await mh(buffer, options.hashAlg) - const cid = new CID(options.cidVersion, options.codec, multihash) + const multihash = await options.hasher.digest(buffer) + const cid = CID.create(options.cidVersion, options.codec, multihash) if (!options.onlyHash) { - // @ts-ignore block api takes uint8arrays or blocks but is missing from typedefs - await block.put(buffer, { + await block.put({ + bytes: buffer, + cid + }, { // @ts-ignore pin option is missing from block api typedefs pin: options.pin, preload: options.preload, - timeout: options.timeout, - cid + timeout: options.timeout }) } diff --git a/packages/ipfs-unixfs-importer/test/benchmark.spec.js b/packages/ipfs-unixfs-importer/test/benchmark.spec.js index 73079e54..c6e7e1ff 100644 --- a/packages/ipfs-unixfs-importer/test/benchmark.spec.js +++ b/packages/ipfs-unixfs-importer/test/benchmark.spec.js @@ -3,12 +3,7 @@ const { importer } = require('../src') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const bufferStream = require('it-buffer-stream') -const all = require('it-all') const blockApi = require('./helpers/block') const REPEATS = 10 @@ -18,15 +13,8 @@ const CHUNK_SIZE = 65536 describe.skip('benchmark', function () { this.timeout(30 * 1000) - /** @type {import('ipld')} */ - let ipld /** @type {import('../src').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() /** @type {number[]} */ const times = [] @@ -67,7 +55,7 @@ describe.skip('benchmark', function () { const buf = new Uint8Array(CHUNK_SIZE).fill(0) - await all(importer([{ + await importer([{ path: '200Bytes.txt', content: bufferStream(size, { chunkSize: CHUNK_SIZE, @@ -75,7 +63,7 @@ describe.skip('benchmark', function () { return buf } }) - }], block, options)) + }], block, options) }) } }) diff --git a/packages/ipfs-unixfs-importer/test/builder-balanced.spec.js b/packages/ipfs-unixfs-importer/test/builder-balanced.spec.js index 91d830b2..84c072cf 100644 --- a/packages/ipfs-unixfs-importer/test/builder-balanced.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-balanced.spec.js @@ -3,7 +3,7 @@ const { expect } = require('aegir/utils/chai') const builder = require('../src/dag-builder/file/balanced') -const CID = require('cids') +const { CID } = require('multiformats/cid') const defaultOptions = require('../src/options') /** @@ -31,7 +31,7 @@ const options = { describe('builder: balanced', () => { it('reduces one value into itself', async () => { const source = [{ - cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), + cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), size: 0 }] @@ -44,13 +44,13 @@ describe('builder: balanced', () => { it('reduces 3 values into parent', async () => { const source = [{ - cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), + cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), size: 0 }, { - cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), + cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), size: 0 }, { - cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), + cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), size: 0 }] diff --git a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js index 42e22930..3ad17cf2 100644 --- a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js @@ -2,9 +2,6 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const builder = require('../src/dag-builder') const all = require('it-all') const blockApi = require('./helpers/block') @@ -12,15 +9,8 @@ const defaultOptions = require('../src/options') const asAsyncIterable = require('./helpers/as-async-iterable') describe('builder: onlyHash', () => { - /** @type {IPLD} */ - let ipld /** @type {import('../src/types').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('will only chunk and hash if passed an "onlyHash" option', async () => { const nodes = await all(builder([{ @@ -34,7 +24,7 @@ describe('builder: onlyHash', () => { expect(nodes.length).to.equal(1) try { - await ipld.get((await nodes[0]()).cid) + await block.get((await nodes[0]()).cid) throw new Error('Should have errored') } catch (err) { diff --git a/packages/ipfs-unixfs-importer/test/builder.spec.js b/packages/ipfs-unixfs-importer/test/builder.spec.js index 49a92d87..1f77e288 100644 --- a/packages/ipfs-unixfs-importer/test/builder.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder.spec.js @@ -2,11 +2,9 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const mh = require('multihashing-async').multihash -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') +const mh = require('multiformats/hashes/digest') +const { sha256, sha512 } = require('multiformats/hashes/sha2') +const { decode } = require('@ipld/dag-pb') const { UnixFS } = require('ipfs-unixfs') const builder = require('../src/dag-builder') const first = require('it-first') @@ -16,21 +14,14 @@ const defaultOptions = require('../src/options') const asAsyncIterable = require('./helpers/as-async-iterable') describe('builder', () => { - /** @type {import('ipld')} */ - let ipld /** @type {import('../src').BlockAPI} */ - let block + const block = blockApi() - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) - - const testMultihashes = Object.keys(mh.names).slice(1, 10) + const testMultihashes = [sha256, sha512] it('allows multihash hash algorithm to be specified', async () => { for (let i = 0; i < testMultihashes.length; i++) { - const hashAlg = testMultihashes[i] + const hasher = testMultihashes[i] const content = uint8ArrayFromString(String(Math.random() + Date.now())) const inputFile = { path: content + '.txt', @@ -39,8 +30,7 @@ describe('builder', () => { const result = await first(builder([inputFile], block, { ...defaultOptions(), - // @ts-ignore thinks these aren't valid hash alg names - hashAlg + hasher })) if (!result) { @@ -48,15 +38,17 @@ describe('builder', () => { } const imported = await result() - expect(imported).to.exist() - // Verify multihash has been encoded using hashAlg - expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) - - // Fetch using hashAlg encoded multihash - const node = await ipld.get(imported.cid) + // Verify multihash has been encoded using hasher + expect(mh.decode(imported.cid.multihash.bytes).code).to.equal(hasher.code) + // Fetch using hasher encoded multihash + const importedBlock = await block.get(imported.cid) + const node = decode(importedBlock.bytes) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const fetchedContent = UnixFS.unmarshal(node.Data).data expect(fetchedContent).to.deep.equal(content) } @@ -66,7 +58,7 @@ describe('builder', () => { this.timeout(30000) for (let i = 0; i < testMultihashes.length; i++) { - const hashAlg = testMultihashes[i] + const hasher = testMultihashes[i] const content = String(Math.random() + Date.now()) const inputFile = { path: content + '.txt', @@ -76,8 +68,7 @@ describe('builder', () => { const result = await first(builder([inputFile], block, { ...defaultOptions(), - // @ts-ignore thinks these aren't valid hash alg names - hashAlg + hasher })) if (!result) { @@ -87,21 +78,20 @@ describe('builder', () => { const imported = await result() expect(imported).to.exist() - expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) + expect(mh.decode(imported.cid.multihash.bytes).code).to.equal(hasher.code) } }) it('allows multihash hash algorithm to be specified for a directory', async () => { for (let i = 0; i < testMultihashes.length; i++) { - const hashAlg = testMultihashes[i] + const hasher = testMultihashes[i] const inputFile = { path: `${String(Math.random() + Date.now())}-dir` } const result = await first(builder([{ ...inputFile }], block, { ...defaultOptions(), - // @ts-ignore thinks these aren't valid hash alg names - hashAlg + hasher })) if (!result) { @@ -110,11 +100,15 @@ describe('builder', () => { const imported = await result() - expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) + expect(mh.decode(imported.cid.multihash.bytes).code).to.equal(hasher.code) - // Fetch using hashAlg encoded multihash - const node = await ipld.get(imported.cid) + // Fetch using hasher encoded multihash + const importedBlock = await block.get(imported.cid) + const node = decode(importedBlock.bytes) + if (!node.Data) { + throw new Error('PBNode Data undefined') + } const meta = UnixFS.unmarshal(node.Data) expect(meta.type).to.equal('directory') } diff --git a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js index ebb7a796..6e40a1c9 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js +++ b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js @@ -3,11 +3,9 @@ const { importer } = require('../src') const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') -const mc = require('multicodec') +const rawCodec = require('multiformats/codecs/raw') +const { sha256 } = require('multiformats/hashes/sha2') +const Block = require('multiformats/block') const blockApi = require('./helpers/block') const uint8ArrayFromString = require('uint8arrays/from-string') const { UnixFS } = require('ipfs-unixfs') @@ -18,10 +16,8 @@ const iter = async function * () { } describe('custom chunker', function () { - /** @type {import('ipld')} */ - let ipld /** @type {import('../src').BlockAPI} */ - let block + const block = blockApi() /** * @param {AsyncIterable} content @@ -32,9 +28,14 @@ describe('custom chunker', function () { * @param {Uint8Array} buf */ const put = async (buf) => { - const cid = await ipld.put(buf, mc.RAW) + const encodedBlock = await Block.encode({ + value: buf, + codec: rawCodec, + hasher: sha256 + }) + return { - cid, + cid: encodedBlock.cid, size: buf.length, unixfs: new UnixFS() } @@ -54,11 +55,6 @@ describe('custom chunker', function () { } } - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) - it('keeps custom chunking', async () => { const content = iter() for await (const part of importer([{ path: 'test', content }], block, { diff --git a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js index 9e67f52e..df59ce90 100644 --- a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js +++ b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js @@ -4,10 +4,6 @@ const { importer } = require('../src') const { expect } = require('aegir/utils/chai') -// @ts-ignore -const IPLD = require('ipld') -// @ts-ignore -const inMemory = require('ipld-in-memory') const randomByteStream = require('./helpers/finite-pseudorandom-byte-stream') const first = require('it-first') const blockApi = require('./helpers/block') @@ -39,15 +35,8 @@ strategies.forEach(strategy => { } describe('go-ipfs interop using importer:' + strategy, () => { - /** @type {import('ipld')} */ - let ipld /** @type {import('../src').BlockAPI} */ - let block - - before(async () => { - ipld = await inMemory(IPLD) - block = blockApi(ipld) - }) + const block = blockApi() it('yields the same tree as go-ipfs', async function () { this.timeout(100 * 1000) diff --git a/packages/ipfs-unixfs-importer/test/helpers/block.js b/packages/ipfs-unixfs-importer/test/helpers/block.js index 330d8a14..f3dd708c 100644 --- a/packages/ipfs-unixfs-importer/test/helpers/block.js +++ b/packages/ipfs-unixfs-importer/test/helpers/block.js @@ -1,60 +1,27 @@ 'use strict' -const { - DAGNode, - util -} = require('ipld-dag-pb') -const multicodec = require('multicodec') -const mh = require('multihashing-async').multihash -const CID = require('cids') -const Block = require('ipld-block') +const errCode = require('err-code') -/** - * @param {import('ipld')} ipld - */ -function createBlockApi (ipld) { - // make ipld behave like the block api, some tests need to pull - // data from ipld so can't use a simple in-memory cid->block map - /** @type {import('../../src/types').BlockAPI} */ - const BlockApi = { - put: async (buf, options) => { - if (!options || !options.cid) { - throw new Error('No cid passed') - } - - const cid = new CID(options.cid) - - const multihash = mh.decode(cid.multihash) - - if (Block.isBlock(buf)) { - buf = buf.data - } +function createBlockApi () { + /** @type {{[key: string]: Uint8Array}} */ + const blocks = {} - /** @type {any} */ - let obj = buf - - if (cid.codec === 'dag-pb') { - obj = util.deserialize(buf) + /** @type {import('../../src').BlockAPI} */ + const BlockApi = { + put: async ({ cid, bytes }, options) => { + if (!options || !options.onlyHash) { + blocks[cid.toV1().toString()] = bytes } - await ipld.put(obj, cid.codec === 'dag-pb' ? multicodec.DAG_PB : multicodec.RAW, { - cidVersion: cid.version, - hashAlg: multihash.code - }) - - return new Block(buf, cid) + return { cid, bytes } }, - get: async (cid, options) => { - cid = new CID(cid) - - /** @type {Uint8Array} */ - let buf = await ipld.get(cid, options) - - if (buf instanceof DAGNode) { - buf = buf.serialize() + get: async (cid, _options) => { + const bytes = blocks[cid.toV1().toString()] + if (bytes === undefined) { + throw errCode(new Error(`Couold not find data for CID '${cid}'`), 'ERR_NOT_FOUND') } - return new Block(buf, cid) + return { cid, bytes } } } diff --git a/packages/ipfs-unixfs-importer/tsconfig.json b/packages/ipfs-unixfs-importer/tsconfig.json index c6eb8f25..ecced392 100644 --- a/packages/ipfs-unixfs-importer/tsconfig.json +++ b/packages/ipfs-unixfs-importer/tsconfig.json @@ -10,7 +10,8 @@ ], "exclude": [ "dist", - "node_modules" + "node_modules", + "src/unixfs.js" ], "references": [ { diff --git a/packages/ipfs-unixfs/dist/src/index.d.ts b/packages/ipfs-unixfs/dist/src/index.d.ts new file mode 100644 index 00000000..02e4ff88 --- /dev/null +++ b/packages/ipfs-unixfs/dist/src/index.d.ts @@ -0,0 +1,73 @@ +export type Mtime = import('./types').Mtime; +export type MtimeLike = import('./types').MtimeLike; +declare class Data { + /** + * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md + * + * @param {Uint8Array} marshaled + */ + static unmarshal(marshaled: Uint8Array): Data; + /** + * @param {object} [options] + * @param {string} [options.type='file'] + * @param {Uint8Array} [options.data] + * @param {number[]} [options.blockSizes] + * @param {number} [options.hashType] + * @param {number} [options.fanout] + * @param {MtimeLike | null} [options.mtime] + * @param {number | string} [options.mode] + */ + constructor(options?: { + type?: string | undefined; + data?: Uint8Array | undefined; + blockSizes?: number[] | undefined; + hashType?: number | undefined; + fanout?: number | undefined; + mtime?: import("./types").MtimeLike | null | undefined; + mode?: string | number | undefined; + } | undefined); + type: string; + data: Uint8Array | undefined; + hashType: number | undefined; + fanout: number | undefined; + /** @type {number[]} */ + blockSizes: number[]; + _originalMode: number; + /** + * @param {number | undefined} mode + */ + set mode(arg: number | undefined); + /** + * @returns {number | undefined} + */ + get mode(): number | undefined; + mtime: import("./types").Mtime | undefined; + _mode: number | undefined; + isDirectory(): boolean; + /** + * @param {number} size + */ + addBlockSize(size: number): void; + /** + * @param {number} index + */ + removeBlockSize(index: number): void; + /** + * Returns `0` for directories or `data.length + sum(blockSizes)` for everything else + */ + fileSize(): number; + /** + * encode to protobuf Uint8Array + */ + marshal(): Uint8Array; +} +/** + * @param {string | number | undefined} [mode] + */ +export function parseMode(mode?: string | number | undefined): number | undefined; +/** + * @param {any} input + */ +export function parseMtime(input: any): import("./types").Mtime | undefined; +export { Data as UnixFS }; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs/dist/src/index.d.ts.map b/packages/ipfs-unixfs/dist/src/index.d.ts.map new file mode 100644 index 00000000..7f8672a2 --- /dev/null +++ b/packages/ipfs-unixfs/dist/src/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.js"],"names":[],"mappings":"oBAQa,OAAO,SAAS,EAAE,KAAK;wBACvB,OAAO,SAAS,EAAE,SAAS;AAmHxC;IACE;;;;OAIG;IACH,4BAFW,UAAU,QA4BpB;IAED;;;;;;;;;OASG;IACH;;;;;;;;mBAkCC;IAjBC,aAA0B;IAC1B,6BAAgB;IAChB,6BAAwB;IACxB,2BAAoB;IAEpB,uBAAuB;IACvB,YADW,MAAM,EAAE,CACe;IAClC,sBAAsB;IAYxB;;OAEG;IACH,kCAQC;IAED;;OAEG;IACH,+BAEC;IA1BG,2CAA8B;IAYhC,0BAA4E;IAgB9E,uBAEC;IAED;;OAEG;IACH,mBAFW,MAAM,QAIhB;IAED;;OAEG;IACH,uBAFW,MAAM,QAIhB;IAED;;OAEG;IACH,mBAgBC;IAED;;OAEG;IACH,sBA+DC;CACF;AA7SD;;GAEG;AACH,iCAFW,MAAM,GAAG,MAAM,GAAG,SAAS,sBAoBrC;AAED;;GAEG;AACH,kCAFW,GAAG,uCAqEb"} \ No newline at end of file diff --git a/packages/ipfs-unixfs/dist/src/types.d.ts b/packages/ipfs-unixfs/dist/src/types.d.ts new file mode 100644 index 00000000..cedc5057 --- /dev/null +++ b/packages/ipfs-unixfs/dist/src/types.d.ts @@ -0,0 +1,7 @@ + +export interface Mtime { + secs: number + nsecs?: number +} + +export type MtimeLike = Mtime | { Seconds: number, FractionalNanoseconds?: number } | [number, number] | Date diff --git a/packages/ipfs-unixfs/dist/src/unixfs.d.ts b/packages/ipfs-unixfs/dist/src/unixfs.d.ts new file mode 100644 index 00000000..ca5a8549 --- /dev/null +++ b/packages/ipfs-unixfs/dist/src/unixfs.d.ts @@ -0,0 +1,238 @@ +import * as $protobuf from "protobufjs"; +/** Properties of a Data. */ +export interface IData { + + /** Data Type */ + Type: Data.DataType; + + /** Data Data */ + Data?: (Uint8Array|null); + + /** Data filesize */ + filesize?: (number|null); + + /** Data blocksizes */ + blocksizes?: (number[]|null); + + /** Data hashType */ + hashType?: (number|null); + + /** Data fanout */ + fanout?: (number|null); + + /** Data mode */ + mode?: (number|null); + + /** Data mtime */ + mtime?: (IUnixTime|null); +} + +/** Represents a Data. */ +export class Data implements IData { + + /** + * Constructs a new Data. + * @param [p] Properties to set + */ + constructor(p?: IData); + + /** Data Type. */ + public Type: Data.DataType; + + /** Data Data. */ + public Data: Uint8Array; + + /** Data filesize. */ + public filesize: number; + + /** Data blocksizes. */ + public blocksizes: number[]; + + /** Data hashType. */ + public hashType: number; + + /** Data fanout. */ + public fanout: number; + + /** Data mode. */ + public mode: number; + + /** Data mtime. */ + public mtime?: (IUnixTime|null); + + /** + * Encodes the specified Data message. Does not implicitly {@link Data.verify|verify} messages. + * @param m Data message or plain object to encode + * @param [w] Writer to encode to + * @returns Writer + */ + public static encode(m: IData, w?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Data message from the specified reader or buffer. + * @param r Reader or buffer to decode from + * @param [l] Message length if known beforehand + * @returns Data + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Data; + + /** + * Creates a Data message from a plain object. Also converts values to their respective internal types. + * @param d Plain object + * @returns Data + */ + public static fromObject(d: { [k: string]: any }): Data; + + /** + * Creates a plain object from a Data message. Also converts values to other types if specified. + * @param m Data + * @param [o] Conversion options + * @returns Plain object + */ + public static toObject(m: Data, o?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Data to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; +} + +export namespace Data { + + /** DataType enum. */ + enum DataType { + Raw = 0, + Directory = 1, + File = 2, + Metadata = 3, + Symlink = 4, + HAMTShard = 5 + } +} + +/** Properties of an UnixTime. */ +export interface IUnixTime { + + /** UnixTime Seconds */ + Seconds: number; + + /** UnixTime FractionalNanoseconds */ + FractionalNanoseconds?: (number|null); +} + +/** Represents an UnixTime. */ +export class UnixTime implements IUnixTime { + + /** + * Constructs a new UnixTime. + * @param [p] Properties to set + */ + constructor(p?: IUnixTime); + + /** UnixTime Seconds. */ + public Seconds: number; + + /** UnixTime FractionalNanoseconds. */ + public FractionalNanoseconds: number; + + /** + * Encodes the specified UnixTime message. Does not implicitly {@link UnixTime.verify|verify} messages. + * @param m UnixTime message or plain object to encode + * @param [w] Writer to encode to + * @returns Writer + */ + public static encode(m: IUnixTime, w?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an UnixTime message from the specified reader or buffer. + * @param r Reader or buffer to decode from + * @param [l] Message length if known beforehand + * @returns UnixTime + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): UnixTime; + + /** + * Creates an UnixTime message from a plain object. Also converts values to their respective internal types. + * @param d Plain object + * @returns UnixTime + */ + public static fromObject(d: { [k: string]: any }): UnixTime; + + /** + * Creates a plain object from an UnixTime message. Also converts values to other types if specified. + * @param m UnixTime + * @param [o] Conversion options + * @returns Plain object + */ + public static toObject(m: UnixTime, o?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this UnixTime to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; +} + +/** Properties of a Metadata. */ +export interface IMetadata { + + /** Metadata MimeType */ + MimeType?: (string|null); +} + +/** Represents a Metadata. */ +export class Metadata implements IMetadata { + + /** + * Constructs a new Metadata. + * @param [p] Properties to set + */ + constructor(p?: IMetadata); + + /** Metadata MimeType. */ + public MimeType: string; + + /** + * Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages. + * @param m Metadata message or plain object to encode + * @param [w] Writer to encode to + * @returns Writer + */ + public static encode(m: IMetadata, w?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Metadata message from the specified reader or buffer. + * @param r Reader or buffer to decode from + * @param [l] Message length if known beforehand + * @returns Metadata + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Metadata; + + /** + * Creates a Metadata message from a plain object. Also converts values to their respective internal types. + * @param d Plain object + * @returns Metadata + */ + public static fromObject(d: { [k: string]: any }): Metadata; + + /** + * Creates a plain object from a Metadata message. Also converts values to other types if specified. + * @param m Metadata + * @param [o] Conversion options + * @returns Plain object + */ + public static toObject(m: Metadata, o?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Metadata to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; +} diff --git a/packages/ipfs-unixfs/dist/test/unixfs-format.spec.d.ts b/packages/ipfs-unixfs/dist/test/unixfs-format.spec.d.ts new file mode 100644 index 00000000..2e4973f1 --- /dev/null +++ b/packages/ipfs-unixfs/dist/test/unixfs-format.spec.d.ts @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=unixfs-format.spec.d.ts.map \ No newline at end of file diff --git a/packages/ipfs-unixfs/dist/test/unixfs-format.spec.d.ts.map b/packages/ipfs-unixfs/dist/test/unixfs-format.spec.d.ts.map new file mode 100644 index 00000000..3fd95c66 --- /dev/null +++ b/packages/ipfs-unixfs/dist/test/unixfs-format.spec.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"unixfs-format.spec.d.ts","sourceRoot":"","sources":["../../test/unixfs-format.spec.js"],"names":[],"mappings":""} \ No newline at end of file diff --git a/packages/ipfs-unixfs/dist/tsconfig.tsbuildinfo b/packages/ipfs-unixfs/dist/tsconfig.tsbuildinfo new file mode 100644 index 00000000..b6dcbb3d --- /dev/null +++ b/packages/ipfs-unixfs/dist/tsconfig.tsbuildinfo @@ -0,0 +1,1232 @@ +{ + "program": { + "fileInfos": { + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es5.d.ts": { + "version": "b3584bc5798ed422ce2516df360ffa9cf2d80b5eae852867db9ba3743145f895", + "signature": "b3584bc5798ed422ce2516df360ffa9cf2d80b5eae852867db9ba3743145f895", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.d.ts": { + "version": "dc47c4fa66b9b9890cf076304de2a9c5201e94b740cffdf09f87296d877d71f6", + "signature": "dc47c4fa66b9b9890cf076304de2a9c5201e94b740cffdf09f87296d877d71f6", + "affectsGlobalScope": false + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2016.d.ts": { + "version": "7a387c58583dfca701b6c85e0adaf43fb17d590fb16d5b2dc0a2fbd89f35c467", + "signature": "7a387c58583dfca701b6c85e0adaf43fb17d590fb16d5b2dc0a2fbd89f35c467", + "affectsGlobalScope": false + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.d.ts": { + "version": "8a12173c586e95f4433e0c6dc446bc88346be73ffe9ca6eec7aa63c8f3dca7f9", + "signature": "8a12173c586e95f4433e0c6dc446bc88346be73ffe9ca6eec7aa63c8f3dca7f9", + "affectsGlobalScope": false + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.d.ts": { + "version": "5f4e733ced4e129482ae2186aae29fde948ab7182844c3a5a51dd346182c7b06", + "signature": "5f4e733ced4e129482ae2186aae29fde948ab7182844c3a5a51dd346182c7b06", + "affectsGlobalScope": false + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2019.d.ts": { + "version": "e6b724280c694a9f588847f754198fb96c43d805f065c3a5b28bbc9594541c84", + "signature": "e6b724280c694a9f588847f754198fb96c43d805f065c3a5b28bbc9594541c84", + "affectsGlobalScope": false + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.d.ts": { + "version": "e21c071ca3e1b4a815d5f04a7475adcaeea5d64367e840dd0154096d705c3940", + "signature": "e21c071ca3e1b4a815d5f04a7475adcaeea5d64367e840dd0154096d705c3940", + "affectsGlobalScope": false + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.dom.d.ts": { + "version": "feeeb1dd8a80fb76be42b0426e8f3ffa9bdef3c2f3c12c147e7660b1c5ba8b3b", + "signature": "feeeb1dd8a80fb76be42b0426e8f3ffa9bdef3c2f3c12c147e7660b1c5ba8b3b", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.dom.iterable.d.ts": { + "version": "d42f4141bd9ce82b4e2902f26acb00c183e321be19a38bbc0e76a922c1724c94", + "signature": "d42f4141bd9ce82b4e2902f26acb00c183e321be19a38bbc0e76a922c1724c94", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.core.d.ts": { + "version": "46ee15e9fefa913333b61eaf6b18885900b139867d89832a515059b62cf16a17", + "signature": "46ee15e9fefa913333b61eaf6b18885900b139867d89832a515059b62cf16a17", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.collection.d.ts": { + "version": "43fb1d932e4966a39a41b464a12a81899d9ae5f2c829063f5571b6b87e6d2f9c", + "signature": "43fb1d932e4966a39a41b464a12a81899d9ae5f2c829063f5571b6b87e6d2f9c", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.generator.d.ts": { + "version": "cdccba9a388c2ee3fd6ad4018c640a471a6c060e96f1232062223063b0a5ac6a", + "signature": "cdccba9a388c2ee3fd6ad4018c640a471a6c060e96f1232062223063b0a5ac6a", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.iterable.d.ts": { + "version": "8b2a5df1ce95f78f6b74f1a555ccdb6baab0486b42d8345e0871dd82811f9b9a", + "signature": "8b2a5df1ce95f78f6b74f1a555ccdb6baab0486b42d8345e0871dd82811f9b9a", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.promise.d.ts": { + "version": "2bb4b3927299434052b37851a47bf5c39764f2ba88a888a107b32262e9292b7c", + "signature": "2bb4b3927299434052b37851a47bf5c39764f2ba88a888a107b32262e9292b7c", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.proxy.d.ts": { + "version": "810627a82ac06fb5166da5ada4159c4ec11978dfbb0805fe804c86406dab8357", + "signature": "810627a82ac06fb5166da5ada4159c4ec11978dfbb0805fe804c86406dab8357", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.reflect.d.ts": { + "version": "62d80405c46c3f4c527ee657ae9d43fda65a0bf582292429aea1e69144a522a6", + "signature": "62d80405c46c3f4c527ee657ae9d43fda65a0bf582292429aea1e69144a522a6", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.symbol.d.ts": { + "version": "3013574108c36fd3aaca79764002b3717da09725a36a6fc02eac386593110f93", + "signature": "3013574108c36fd3aaca79764002b3717da09725a36a6fc02eac386593110f93", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts": { + "version": "9d122b7e8c1a5c72506eea50c0973cba55b92b5532d5cafa8a6ce2c547d57551", + "signature": "9d122b7e8c1a5c72506eea50c0973cba55b92b5532d5cafa8a6ce2c547d57551", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2016.array.include.d.ts": { + "version": "3be5a1453daa63e031d266bf342f3943603873d890ab8b9ada95e22389389006", + "signature": "3be5a1453daa63e031d266bf342f3943603873d890ab8b9ada95e22389389006", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.object.d.ts": { + "version": "17bb1fc99591b00515502d264fa55dc8370c45c5298f4a5c2083557dccba5a2a", + "signature": "17bb1fc99591b00515502d264fa55dc8370c45c5298f4a5c2083557dccba5a2a", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.sharedmemory.d.ts": { + "version": "7ce9f0bde3307ca1f944119f6365f2d776d281a393b576a18a2f2893a2d75c98", + "signature": "7ce9f0bde3307ca1f944119f6365f2d776d281a393b576a18a2f2893a2d75c98", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.string.d.ts": { + "version": "6a6b173e739a6a99629a8594bfb294cc7329bfb7b227f12e1f7c11bc163b8577", + "signature": "6a6b173e739a6a99629a8594bfb294cc7329bfb7b227f12e1f7c11bc163b8577", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.intl.d.ts": { + "version": "12a310447c5d23c7d0d5ca2af606e3bd08afda69100166730ab92c62999ebb9d", + "signature": "12a310447c5d23c7d0d5ca2af606e3bd08afda69100166730ab92c62999ebb9d", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.typedarrays.d.ts": { + "version": "b0124885ef82641903d232172577f2ceb5d3e60aed4da1153bab4221e1f6dd4e", + "signature": "b0124885ef82641903d232172577f2ceb5d3e60aed4da1153bab4221e1f6dd4e", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.asyncgenerator.d.ts": { + "version": "0eb85d6c590b0d577919a79e0084fa1744c1beba6fd0d4e951432fa1ede5510a", + "signature": "0eb85d6c590b0d577919a79e0084fa1744c1beba6fd0d4e951432fa1ede5510a", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.asynciterable.d.ts": { + "version": "a40c4d82bf13fcded295ac29f354eb7d40249613c15e07b53f2fc75e45e16359", + "signature": "a40c4d82bf13fcded295ac29f354eb7d40249613c15e07b53f2fc75e45e16359", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.intl.d.ts": { + "version": "df9c8a72ca8b0ed62f5470b41208a0587f0f73f0a7db28e5a1272cf92537518e", + "signature": "df9c8a72ca8b0ed62f5470b41208a0587f0f73f0a7db28e5a1272cf92537518e", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.promise.d.ts": { + "version": "bb2d3fb05a1d2ffbca947cc7cbc95d23e1d053d6595391bd325deb265a18d36c", + "signature": "bb2d3fb05a1d2ffbca947cc7cbc95d23e1d053d6595391bd325deb265a18d36c", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.regexp.d.ts": { + "version": "c80df75850fea5caa2afe43b9949338ce4e2de086f91713e9af1a06f973872b8", + "signature": "c80df75850fea5caa2afe43b9949338ce4e2de086f91713e9af1a06f973872b8", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2019.array.d.ts": { + "version": "9d57b2b5d15838ed094aa9ff1299eecef40b190722eb619bac4616657a05f951", + "signature": "9d57b2b5d15838ed094aa9ff1299eecef40b190722eb619bac4616657a05f951", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2019.object.d.ts": { + "version": "6c51b5dd26a2c31dbf37f00cfc32b2aa6a92e19c995aefb5b97a3a64f1ac99de", + "signature": "6c51b5dd26a2c31dbf37f00cfc32b2aa6a92e19c995aefb5b97a3a64f1ac99de", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2019.string.d.ts": { + "version": "93544ca2f26a48716c1b6c5091842cad63129daac422dfa4bc52460465f22bb1", + "signature": "93544ca2f26a48716c1b6c5091842cad63129daac422dfa4bc52460465f22bb1", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2019.symbol.d.ts": { + "version": "2ad234885a4240522efccd77de6c7d99eecf9b4de0914adb9a35c0c22433f993", + "signature": "2ad234885a4240522efccd77de6c7d99eecf9b4de0914adb9a35c0c22433f993", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.bigint.d.ts": { + "version": "7b5a10e3c897fabece5a51aa85b4111727d7adb53c2734b5d37230ff96802a09", + "signature": "7b5a10e3c897fabece5a51aa85b4111727d7adb53c2734b5d37230ff96802a09", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.promise.d.ts": { + "version": "7435b75fdf3509622e79622dbe5091cf4b09688410ee2034e4fc17d0c99d0862", + "signature": "7435b75fdf3509622e79622dbe5091cf4b09688410ee2034e4fc17d0c99d0862", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.sharedmemory.d.ts": { + "version": "e7e8e1d368290e9295ef18ca23f405cf40d5456fa9f20db6373a61ca45f75f40", + "signature": "e7e8e1d368290e9295ef18ca23f405cf40d5456fa9f20db6373a61ca45f75f40", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.string.d.ts": { + "version": "faf0221ae0465363c842ce6aa8a0cbda5d9296940a8e26c86e04cc4081eea21e", + "signature": "faf0221ae0465363c842ce6aa8a0cbda5d9296940a8e26c86e04cc4081eea21e", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.symbol.wellknown.d.ts": { + "version": "936d7d2e8851af9ccfa5333b15e877a824417d352b1d7fd06388639dc69ef80a", + "signature": "936d7d2e8851af9ccfa5333b15e877a824417d352b1d7fd06388639dc69ef80a", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.intl.d.ts": { + "version": "e79ca55569f09a5dc3354be04dba4ae85865b1dce98bf46738ffe231c669621f", + "signature": "e79ca55569f09a5dc3354be04dba4ae85865b1dce98bf46738ffe231c669621f", + "affectsGlobalScope": true + }, + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.esnext.intl.d.ts": { + "version": "506b80b9951c9381dc5f11897b31fca5e2a65731d96ddefa19687fbc26b23c6e", + "signature": "506b80b9951c9381dc5f11897b31fca5e2a65731d96ddefa19687fbc26b23c6e", + "affectsGlobalScope": true + }, + "../node_modules/protobufjs/index.d.ts": { + "version": "1558c642e03689d42843e7b047b9c20e77ee09ab388ff854484db5dcfbed11da", + "signature": "1558c642e03689d42843e7b047b9c20e77ee09ab388ff854484db5dcfbed11da", + "affectsGlobalScope": false + }, + "../src/unixfs.d.ts": { + "version": "73d05042815b5a14ebaf565ad246e1cc17b9a328d80da339bdd7b40f60f07828", + "signature": "73d05042815b5a14ebaf565ad246e1cc17b9a328d80da339bdd7b40f60f07828", + "affectsGlobalScope": false + }, + "../node_modules/err-code/dist/index.d.ts": { + "version": "a02a19deaa2c497d36c136459dd2561b57ebb22075ac0d6aad176430a8639aa1", + "signature": "a02a19deaa2c497d36c136459dd2561b57ebb22075ac0d6aad176430a8639aa1", + "affectsGlobalScope": false + }, + "../src/types.d.ts": { + "version": "df037a1c4bb54f6aa65f2482a6b1d976b5b3c31d49ea5b7deb10197e0608ae9e", + "signature": "df037a1c4bb54f6aa65f2482a6b1d976b5b3c31d49ea5b7deb10197e0608ae9e", + "affectsGlobalScope": false + }, + "../src/index.js": { + "version": "e6b6c563ab9741f3a6da94ed2e2390b207ec4bf2fe0fd0722df8acfdc930a713", + "signature": "ae7b1240bfe299fcb85712afff404a5b90c533375f4edd4b3c1515aaf1b5631d", + "affectsGlobalScope": true + }, + "../node_modules/@types/chai/index.d.ts": { + "version": "f5fcdcb84e1594e419ad42eb62d96491739433c90bb810edbacf4e7d2908865c", + "signature": "f5fcdcb84e1594e419ad42eb62d96491739433c90bb810edbacf4e7d2908865c", + "affectsGlobalScope": true + }, + "../node_modules/@types/chai-as-promised/index.d.ts": { + "version": "f6ae17283c6912c202004178339d6d22f8c9edfe4e335f9f11b555c631633daf", + "signature": "f6ae17283c6912c202004178339d6d22f8c9edfe4e335f9f11b555c631633daf", + "affectsGlobalScope": true + }, + "../node_modules/chai-parentheses/index.d.ts": { + "version": "c1f79c6d85cd84518fd7349588b3c61bdc189f58e3866f925cbf4631c81e31f9", + "signature": "c1f79c6d85cd84518fd7349588b3c61bdc189f58e3866f925cbf4631c81e31f9", + "affectsGlobalScope": true + }, + "../node_modules/@types/chai-subset/index.d.ts": { + "version": "f4c0db3a49cea9babd5d224ba14243a6a6119bf65a65198994033aaea3a60a71", + "signature": "f4c0db3a49cea9babd5d224ba14243a6a6119bf65a65198994033aaea3a60a71", + "affectsGlobalScope": true + }, + "../node_modules/chai-bytes/index.d.ts": { + "version": "c6950fb69844190789c747a2ee2d49f34bd84273d692176d2c54d18d95fa3387", + "signature": "c6950fb69844190789c747a2ee2d49f34bd84273d692176d2c54d18d95fa3387", + "affectsGlobalScope": true + }, + "../node_modules/aegir/dist/utils/chai.d.ts": { + "version": "33f12bdfbbb9a552db999b771a6e831fd0c94b321f958a1473def67b2b3e473f", + "signature": "33f12bdfbbb9a552db999b771a6e831fd0c94b321f958a1473def67b2b3e473f", + "affectsGlobalScope": false + }, + "../node_modules/aegir/dist/utils/fixtures.d.ts": { + "version": "a8c10138639ce587acf8a6ce227177a737708fb592705fc0754c12d3a18b5130", + "signature": "a8c10138639ce587acf8a6ce227177a737708fb592705fc0754c12d3a18b5130", + "affectsGlobalScope": false + }, + "../node_modules/multibase/src/types.d.ts": { + "version": "40166c5a74c5420962d2f223211c3b99be001a5b60a23828d1c9eb7a768a877f", + "signature": "40166c5a74c5420962d2f223211c3b99be001a5b60a23828d1c9eb7a768a877f", + "affectsGlobalScope": false + }, + "../node_modules/uint8arrays/dist/from-string.d.ts": { + "version": "c44e238f0d17834e54b2fc91fbda1f4c42e807e26aaf052ed67a427c5870b963", + "signature": "c44e238f0d17834e54b2fc91fbda1f4c42e807e26aaf052ed67a427c5870b963", + "affectsGlobalScope": false + }, + "../test/unixfs-format.spec.js": { + "version": "c9e3da89b30af692dbc91420a4eb9d0cf6322b8a966d5adb3e868e843cabf084", + "signature": "f40e8bdafdffb90065d371f14774f2f279ec0f96ca7814be66c81f6fe84fcf2c", + "affectsGlobalScope": true + }, + "../node_modules/@types/istanbul-lib-coverage/index.d.ts": { + "version": "de18acda71730bac52f4b256ce7511bb56cc21f6f114c59c46782eff2f632857", + "signature": "de18acda71730bac52f4b256ce7511bb56cc21f6f114c59c46782eff2f632857", + "affectsGlobalScope": false + }, + "../node_modules/@types/json-schema/index.d.ts": { + "version": "3a1e165b22a1cb8df82c44c9a09502fd2b33f160cd277de2cd3a055d8e5c6b27", + "signature": "3a1e165b22a1cb8df82c44c9a09502fd2b33f160cd277de2cd3a055d8e5c6b27", + "affectsGlobalScope": false + }, + "../node_modules/@types/json5/index.d.ts": { + "version": "96d14f21b7652903852eef49379d04dbda28c16ed36468f8c9fa08f7c14c9538", + "signature": "96d14f21b7652903852eef49379d04dbda28c16ed36468f8c9fa08f7c14c9538", + "affectsGlobalScope": false + }, + "../node_modules/@types/long/index.d.ts": { + "version": "e8465811693dfe4e96ef2b3dffda539d6edfe896961b7af37b44db2c0e48532b", + "signature": "e8465811693dfe4e96ef2b3dffda539d6edfe896961b7af37b44db2c0e48532b", + "affectsGlobalScope": false + }, + "../node_modules/@types/minimist/index.d.ts": { + "version": "e437d83044ba17246a861aa9691aa14223ff4a9d6f338ab1269c41c758586a88", + "signature": "e437d83044ba17246a861aa9691aa14223ff4a9d6f338ab1269c41c758586a88", + "affectsGlobalScope": false + }, + "../node_modules/@types/mocha/index.d.ts": { + "version": "0359800d3b440f8515001431cde1500944e156040577425eb3f7b80af0846612", + "signature": "0359800d3b440f8515001431cde1500944e156040577425eb3f7b80af0846612", + "affectsGlobalScope": true + }, + "../node_modules/@types/node/assert/strict.d.ts": { + "version": "c7bdc99177a2a94d25fb13722adaaf5b3291bf70b4d1b27584ba189dd3889ba3", + "signature": "c7bdc99177a2a94d25fb13722adaaf5b3291bf70b4d1b27584ba189dd3889ba3", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/globals.d.ts": { + "version": "5402314c88d0127f63f94a0272f79e04ea0fc010ff6da6613807504c4163a1ad", + "signature": "5402314c88d0127f63f94a0272f79e04ea0fc010ff6da6613807504c4163a1ad", + "affectsGlobalScope": true + }, + "../node_modules/@types/node/async_hooks.d.ts": { + "version": "c9e8a340da877b05a52525554aa255b3f44958c7f6748ebf5cbe0bfbe6766878", + "signature": "c9e8a340da877b05a52525554aa255b3f44958c7f6748ebf5cbe0bfbe6766878", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/buffer.d.ts": { + "version": "a473cf45c3d9809518f8af913312139d9f4db6887dc554e0d06d0f4e52722e6b", + "signature": "a473cf45c3d9809518f8af913312139d9f4db6887dc554e0d06d0f4e52722e6b", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/child_process.d.ts": { + "version": "a668dfae917097b30fc29bbebeeb869cee22529f2aa9976cea03c7e834a1b841", + "signature": "a668dfae917097b30fc29bbebeeb869cee22529f2aa9976cea03c7e834a1b841", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/cluster.d.ts": { + "version": "04eaa93bd75f937f9184dcb95a7983800c5770cf8ddd8ac0f3734dc02f5b20ef", + "signature": "04eaa93bd75f937f9184dcb95a7983800c5770cf8ddd8ac0f3734dc02f5b20ef", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/console.d.ts": { + "version": "c8155caf28fc7b0a564156a5df28ad8a844a3bd32d331d148d8f3ce88025c870", + "signature": "c8155caf28fc7b0a564156a5df28ad8a844a3bd32d331d148d8f3ce88025c870", + "affectsGlobalScope": true + }, + "../node_modules/@types/node/constants.d.ts": { + "version": "45ac321f2e15d268fd74a90ddaa6467dcaaff2c5b13f95b4b85831520fb7a491", + "signature": "45ac321f2e15d268fd74a90ddaa6467dcaaff2c5b13f95b4b85831520fb7a491", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/crypto.d.ts": { + "version": "51a77d65fbadc93dcdf454ea1d94bd9f1f146008d890bea4f168dd397192b9fb", + "signature": "51a77d65fbadc93dcdf454ea1d94bd9f1f146008d890bea4f168dd397192b9fb", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/dgram.d.ts": { + "version": "797a9d37eb1f76143311c3f0a186ce5c0d8735e94c0ca08ff8712a876c9b4f9e", + "signature": "797a9d37eb1f76143311c3f0a186ce5c0d8735e94c0ca08ff8712a876c9b4f9e", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/dns.d.ts": { + "version": "ec7dafafe751a5121f8f1c80201ebe7e7238c47e6329280a73c4d1ca4bb7fa28", + "signature": "ec7dafafe751a5121f8f1c80201ebe7e7238c47e6329280a73c4d1ca4bb7fa28", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/dns/promises.d.ts": { + "version": "64debeb10e4b7ae4ec9e89bfb4e04c6101ab98c3cc806d14e5488607cfec2753", + "signature": "64debeb10e4b7ae4ec9e89bfb4e04c6101ab98c3cc806d14e5488607cfec2753", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/domain.d.ts": { + "version": "2866a528b2708aa272ec3eaafd3c980abb23aec1ef831cfc5eb2186b98c37ce5", + "signature": "2866a528b2708aa272ec3eaafd3c980abb23aec1ef831cfc5eb2186b98c37ce5", + "affectsGlobalScope": true + }, + "../node_modules/@types/node/events.d.ts": { + "version": "36ba0e764ace4fb55e6165c5f33f2826f88b1d3767f7ec658fe2f6c85ac5e776", + "signature": "36ba0e764ace4fb55e6165c5f33f2826f88b1d3767f7ec658fe2f6c85ac5e776", + "affectsGlobalScope": true + }, + "../node_modules/@types/node/fs.d.ts": { + "version": "d3315298050252531e7db9046296ca2162fa5218a85b10a62ed79140e3822e3c", + "signature": "d3315298050252531e7db9046296ca2162fa5218a85b10a62ed79140e3822e3c", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/fs/promises.d.ts": { + "version": "bdaf554ae2d9d09e2a42f58a29ef7f80e5b5c1d7b96bfb717243dc91a477216e", + "signature": "bdaf554ae2d9d09e2a42f58a29ef7f80e5b5c1d7b96bfb717243dc91a477216e", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/http.d.ts": { + "version": "bd311ff12ac011f5531edd217189606e7a697376d6109e8a18361358f6249b46", + "signature": "bd311ff12ac011f5531edd217189606e7a697376d6109e8a18361358f6249b46", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/http2.d.ts": { + "version": "a870e564e578dccc7b34018d6919f9466bae462b4bafc3449ca63a2331ac27c5", + "signature": "a870e564e578dccc7b34018d6919f9466bae462b4bafc3449ca63a2331ac27c5", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/https.d.ts": { + "version": "13257840c0850d4ebd7c2b17604a9e006f752de76c2400ebc752bc465c330452", + "signature": "13257840c0850d4ebd7c2b17604a9e006f752de76c2400ebc752bc465c330452", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/inspector.d.ts": { + "version": "42176966283d3835c34278b9b5c0f470d484c0c0c6a55c20a2c916a1ce69b6e8", + "signature": "42176966283d3835c34278b9b5c0f470d484c0c0c6a55c20a2c916a1ce69b6e8", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/module.d.ts": { + "version": "0cff7901aedfe78e314f7d44088f07e2afa1b6e4f0473a4169b8456ca2fb245d", + "signature": "0cff7901aedfe78e314f7d44088f07e2afa1b6e4f0473a4169b8456ca2fb245d", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/net.d.ts": { + "version": "1eaf8fecdae37cc4c85f496d2bdb9e8d46c21b3643b7e27d3646a330585515a5", + "signature": "1eaf8fecdae37cc4c85f496d2bdb9e8d46c21b3643b7e27d3646a330585515a5", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/os.d.ts": { + "version": "69640cc2e76dad52daeb9914e6b70c5c9a5591a3a65190a2d3ea432cf0015e16", + "signature": "69640cc2e76dad52daeb9914e6b70c5c9a5591a3a65190a2d3ea432cf0015e16", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/path.d.ts": { + "version": "21e64a125f65dff99cc3ed366c96e922b90daed343eb52ecdace5f220401dcda", + "signature": "21e64a125f65dff99cc3ed366c96e922b90daed343eb52ecdace5f220401dcda", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/perf_hooks.d.ts": { + "version": "4982d94cb6427263c8839d8d6324a8bbe129e931deb61a7380f8fad17ba2cfc0", + "signature": "4982d94cb6427263c8839d8d6324a8bbe129e931deb61a7380f8fad17ba2cfc0", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/process.d.ts": { + "version": "61977e8f4c042abd392645828c90afd1b551a6fd32c51b93717c68003ce7c983", + "signature": "61977e8f4c042abd392645828c90afd1b551a6fd32c51b93717c68003ce7c983", + "affectsGlobalScope": true + }, + "../node_modules/@types/node/punycode.d.ts": { + "version": "7f77304372efe3c9967e5f9ea2061f1b4bf41dc3cda3c83cdd676f2e5af6b7e6", + "signature": "7f77304372efe3c9967e5f9ea2061f1b4bf41dc3cda3c83cdd676f2e5af6b7e6", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/querystring.d.ts": { + "version": "992c6f6be16c0a1d2eec13ece33adeea2c747ba27fcd078353a8f4bb5b4fea58", + "signature": "992c6f6be16c0a1d2eec13ece33adeea2c747ba27fcd078353a8f4bb5b4fea58", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/readline.d.ts": { + "version": "3b790d08129aca55fd5ae1672d1d26594147ac0d5f2eedc30c7575eb18daef7e", + "signature": "3b790d08129aca55fd5ae1672d1d26594147ac0d5f2eedc30c7575eb18daef7e", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/repl.d.ts": { + "version": "64535caf208a02420d2d04eb2029269efedd11eb8597ada0d5e6f3d54ec663ae", + "signature": "64535caf208a02420d2d04eb2029269efedd11eb8597ada0d5e6f3d54ec663ae", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/stream.d.ts": { + "version": "3b02b1e3c3a6730d79e2c8652f3be6a7caef1a604b9c5103abbbcea921694be1", + "signature": "3b02b1e3c3a6730d79e2c8652f3be6a7caef1a604b9c5103abbbcea921694be1", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/stream/promises.d.ts": { + "version": "fd2298fba0640e7295e7bd545e2dfbfcccbb00c27019e501c87965a02bbdebf6", + "signature": "fd2298fba0640e7295e7bd545e2dfbfcccbb00c27019e501c87965a02bbdebf6", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/string_decoder.d.ts": { + "version": "4fd3c4debadce3e9ab9dec3eb45f7f5e2e3d4ad65cf975a6d938d883cfb25a50", + "signature": "4fd3c4debadce3e9ab9dec3eb45f7f5e2e3d4ad65cf975a6d938d883cfb25a50", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/timers.d.ts": { + "version": "f49709e8c096b05aca0674d39f471aa05261de7c756df9abdf4a53ed0fa98901", + "signature": "f49709e8c096b05aca0674d39f471aa05261de7c756df9abdf4a53ed0fa98901", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/timers/promises.d.ts": { + "version": "baca27d1de400e027cdc70217ca73e414002baef5798aa24a921097c20066fa1", + "signature": "baca27d1de400e027cdc70217ca73e414002baef5798aa24a921097c20066fa1", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/tls.d.ts": { + "version": "f89a6d56f0267f6e73c707f8a89d2f38e9928e10bfa505f39a4f4bf954093aee", + "signature": "f89a6d56f0267f6e73c707f8a89d2f38e9928e10bfa505f39a4f4bf954093aee", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/trace_events.d.ts": { + "version": "7df562288f949945cf69c21cd912100c2afedeeb7cdb219085f7f4b46cb7dde4", + "signature": "7df562288f949945cf69c21cd912100c2afedeeb7cdb219085f7f4b46cb7dde4", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/tty.d.ts": { + "version": "9d16690485ff1eb4f6fc57aebe237728fd8e03130c460919da3a35f4d9bd97f5", + "signature": "9d16690485ff1eb4f6fc57aebe237728fd8e03130c460919da3a35f4d9bd97f5", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/url.d.ts": { + "version": "dcc6910d95a3625fd2b0487fda055988e46ab46c357a1b3618c27b4a8dd739c9", + "signature": "dcc6910d95a3625fd2b0487fda055988e46ab46c357a1b3618c27b4a8dd739c9", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/util.d.ts": { + "version": "e649840284bab8c4d09cadc125cd7fbde7529690cc1a0881872b6a9cd202819b", + "signature": "e649840284bab8c4d09cadc125cd7fbde7529690cc1a0881872b6a9cd202819b", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/v8.d.ts": { + "version": "a364b4a8a015ae377052fa4fac94204d79a69d879567f444c7ceff1b7a18482d", + "signature": "a364b4a8a015ae377052fa4fac94204d79a69d879567f444c7ceff1b7a18482d", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/vm.d.ts": { + "version": "1aa7dbace2b7b2ef60897dcd4f66252ee6ba85e594ded8918c9acdcecda1896c", + "signature": "1aa7dbace2b7b2ef60897dcd4f66252ee6ba85e594ded8918c9acdcecda1896c", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/worker_threads.d.ts": { + "version": "1535aeef6e803bc8c784b14c99bd214dba9223df4914f3dee616af351ab46042", + "signature": "1535aeef6e803bc8c784b14c99bd214dba9223df4914f3dee616af351ab46042", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/zlib.d.ts": { + "version": "4926467de88a92a4fc9971d8c6f21b91eca1c0e7fc2a46cc4638ab9440c73875", + "signature": "4926467de88a92a4fc9971d8c6f21b91eca1c0e7fc2a46cc4638ab9440c73875", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/globals.global.d.ts": { + "version": "2708349d5a11a5c2e5f3a0765259ebe7ee00cdcc8161cb9990cb4910328442a1", + "signature": "2708349d5a11a5c2e5f3a0765259ebe7ee00cdcc8161cb9990cb4910328442a1", + "affectsGlobalScope": true + }, + "../node_modules/@types/node/wasi.d.ts": { + "version": "4e0a4d84b15692ea8669fe4f3d05a4f204567906b1347da7a58b75f45bae48d3", + "signature": "4e0a4d84b15692ea8669fe4f3d05a4f204567906b1347da7a58b75f45bae48d3", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/ts3.6/base.d.ts": { + "version": "210ef68f34baca2a4499c07a51f05d51b4f0ef01d64efea3017cb3bc31c37e33", + "signature": "210ef68f34baca2a4499c07a51f05d51b4f0ef01d64efea3017cb3bc31c37e33", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/assert.d.ts": { + "version": "b3593bd345ebea5e4d0a894c03251a3774b34df3d6db57075c18e089a599ba76", + "signature": "b3593bd345ebea5e4d0a894c03251a3774b34df3d6db57075c18e089a599ba76", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/base.d.ts": { + "version": "e61a21e9418f279bc480394a94d1581b2dee73747adcbdef999b6737e34d721b", + "signature": "e61a21e9418f279bc480394a94d1581b2dee73747adcbdef999b6737e34d721b", + "affectsGlobalScope": false + }, + "../node_modules/@types/node/index.d.ts": { + "version": "6c9c7e459e013ddf52c70b90f88bbdd925e483ef984d80f9bffb501029974e82", + "signature": "6c9c7e459e013ddf52c70b90f88bbdd925e483ef984d80f9bffb501029974e82", + "affectsGlobalScope": false + }, + "../node_modules/@types/normalize-package-data/index.d.ts": { + "version": "c9ad058b2cc9ce6dc2ed92960d6d009e8c04bef46d3f5312283debca6869f613", + "signature": "c9ad058b2cc9ce6dc2ed92960d6d009e8c04bef46d3f5312283debca6869f613", + "affectsGlobalScope": false + }, + "../node_modules/@types/parse-json/index.d.ts": { + "version": "2b8264b2fefd7367e0f20e2c04eed5d3038831fe00f5efbc110ff0131aab899b", + "signature": "2b8264b2fefd7367e0f20e2c04eed5d3038831fe00f5efbc110ff0131aab899b", + "affectsGlobalScope": false + }, + "../node_modules/@types/sinonjs__fake-timers/index.d.ts": { + "version": "558a9770503071d5a6fc6c596f7230bb79f2d034ced4a205bd1ebcad3b5879ec", + "signature": "558a9770503071d5a6fc6c596f7230bb79f2d034ced4a205bd1ebcad3b5879ec", + "affectsGlobalScope": false + }, + "../node_modules/@types/sinon/index.d.ts": { + "version": "9d92b037978bb9525bc4b673ebddd443277542e010c0aef019c03a170ccdaa73", + "signature": "9d92b037978bb9525bc4b673ebddd443277542e010c0aef019c03a170ccdaa73", + "affectsGlobalScope": false + }, + "../node_modules/@types/yargs-parser/index.d.ts": { + "version": "3bdd93ec24853e61bfa4c63ebaa425ff3e474156e87a47d90122e1d8cc717c1f", + "signature": "3bdd93ec24853e61bfa4c63ebaa425ff3e474156e87a47d90122e1d8cc717c1f", + "affectsGlobalScope": false + }, + "../node_modules/@types/yargs/index.d.ts": { + "version": "5a2a25feca554a8f289ed62114771b8c63d89f2b58325e2f8b7043e4e0160d11", + "signature": "5a2a25feca554a8f289ed62114771b8c63d89f2b58325e2f8b7043e4e0160d11", + "affectsGlobalScope": false + }, + "../node_modules/@types/yauzl/index.d.ts": { + "version": "3845d3b64286c12c60d39fc90ac1cc5e47cbc951530658d2567d578b2faa1f26", + "signature": "3845d3b64286c12c60d39fc90ac1cc5e47cbc951530658d2567d578b2faa1f26", + "affectsGlobalScope": false + }, + "../../../node_modules/@types/minimatch/index.d.ts": { + "version": "95c22bc19835e28e2e524a4bb8898eb5f2107b640d7279a6d3aade261916bbf2", + "signature": "95c22bc19835e28e2e524a4bb8898eb5f2107b640d7279a6d3aade261916bbf2", + "affectsGlobalScope": false + }, + "../../../node_modules/@types/glob/index.d.ts": { + "version": "393137c76bd922ba70a2f8bf1ade4f59a16171a02fb25918c168d48875b0cfb0", + "signature": "393137c76bd922ba70a2f8bf1ade4f59a16171a02fb25918c168d48875b0cfb0", + "affectsGlobalScope": false + } + }, + "options": { + "strict": true, + "outDir": "./", + "allowJs": true, + "checkJs": true, + "target": 7, + "lib": [ + "lib.es2020.d.ts", + "lib.es2020.promise.d.ts", + "lib.es2020.string.d.ts", + "lib.es2020.bigint.d.ts", + "lib.dom.d.ts", + "lib.dom.iterable.d.ts" + ], + "noEmit": false, + "noEmitOnError": true, + "emitDeclarationOnly": true, + "declaration": true, + "declarationMap": true, + "incremental": true, + "composite": true, + "isolatedModules": true, + "removeComments": false, + "esModuleInterop": true, + "moduleResolution": 2, + "noImplicitReturns": false, + "noFallthroughCasesInSwitch": true, + "noUnusedLocals": true, + "noUnusedParameters": false, + "importsNotUsedAsValues": 2, + "forceConsistentCasingInFileNames": true, + "skipLibCheck": true, + "stripInternal": true, + "resolveJsonModule": true, + "configFilePath": "../tsconfig.json" + }, + "referencedMap": { + "../../../node_modules/@types/glob/index.d.ts": [ + "../../../node_modules/@types/minimatch/index.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/index.d.ts" + ], + "../node_modules/@types/chai-as-promised/index.d.ts": [ + "../node_modules/@types/chai/index.d.ts" + ], + "../node_modules/@types/chai-subset/index.d.ts": [ + "../node_modules/@types/chai/index.d.ts" + ], + "../node_modules/@types/node/assert/strict.d.ts": [ + "../node_modules/@types/node/assert.d.ts" + ], + "../node_modules/@types/node/base.d.ts": [ + "../node_modules/@types/node/assert.d.ts", + "../node_modules/@types/node/ts3.6/base.d.ts" + ], + "../node_modules/@types/node/child_process.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs.d.ts", + "../node_modules/@types/node/net.d.ts", + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/node/cluster.d.ts": [ + "../node_modules/@types/node/child_process.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/net.d.ts" + ], + "../node_modules/@types/node/console.d.ts": [ + "../node_modules/@types/node/util.d.ts" + ], + "../node_modules/@types/node/constants.d.ts": [ + "../node_modules/@types/node/crypto.d.ts", + "../node_modules/@types/node/fs.d.ts", + "../node_modules/@types/node/os.d.ts" + ], + "../node_modules/@types/node/crypto.d.ts": [ + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/node/dgram.d.ts": [ + "../node_modules/@types/node/dns.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/net.d.ts" + ], + "../node_modules/@types/node/dns.d.ts": [ + "../node_modules/@types/node/dns/promises.d.ts" + ], + "../node_modules/@types/node/dns/promises.d.ts": [ + "../node_modules/@types/node/dns.d.ts" + ], + "../node_modules/@types/node/domain.d.ts": [ + "../node_modules/@types/node/events.d.ts" + ], + "../node_modules/@types/node/events.d.ts": [ + "../node_modules/@types/node/events.d.ts" + ], + "../node_modules/@types/node/fs.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs/promises.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/url.d.ts" + ], + "../node_modules/@types/node/fs/promises.d.ts": [ + "../node_modules/@types/node/fs.d.ts" + ], + "../node_modules/@types/node/http.d.ts": [ + "../node_modules/@types/node/net.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/url.d.ts" + ], + "../node_modules/@types/node/http2.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs.d.ts", + "../node_modules/@types/node/http.d.ts", + "../node_modules/@types/node/net.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/tls.d.ts", + "../node_modules/@types/node/url.d.ts" + ], + "../node_modules/@types/node/https.d.ts": [ + "../node_modules/@types/node/http.d.ts", + "../node_modules/@types/node/tls.d.ts", + "../node_modules/@types/node/url.d.ts" + ], + "../node_modules/@types/node/index.d.ts": [ + "../node_modules/@types/node/base.d.ts" + ], + "../node_modules/@types/node/inspector.d.ts": [ + "../node_modules/@types/node/events.d.ts" + ], + "../node_modules/@types/node/module.d.ts": [ + "../node_modules/@types/node/url.d.ts" + ], + "../node_modules/@types/node/net.d.ts": [ + "../node_modules/@types/node/dns.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/node/perf_hooks.d.ts": [ + "../node_modules/@types/node/async_hooks.d.ts" + ], + "../node_modules/@types/node/process.d.ts": [ + "../node_modules/@types/node/tty.d.ts" + ], + "../node_modules/@types/node/readline.d.ts": [ + "../node_modules/@types/node/events.d.ts" + ], + "../node_modules/@types/node/repl.d.ts": [ + "../node_modules/@types/node/readline.d.ts", + "../node_modules/@types/node/util.d.ts", + "../node_modules/@types/node/vm.d.ts" + ], + "../node_modules/@types/node/stream.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/stream/promises.d.ts" + ], + "../node_modules/@types/node/stream/promises.d.ts": [ + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/node/timers/promises.d.ts": [ + "../node_modules/@types/node/timers.d.ts" + ], + "../node_modules/@types/node/tls.d.ts": [ + "../node_modules/@types/node/net.d.ts" + ], + "../node_modules/@types/node/ts3.6/base.d.ts": [ + "../node_modules/@types/node/assert/strict.d.ts", + "../node_modules/@types/node/async_hooks.d.ts", + "../node_modules/@types/node/buffer.d.ts", + "../node_modules/@types/node/child_process.d.ts", + "../node_modules/@types/node/cluster.d.ts", + "../node_modules/@types/node/console.d.ts", + "../node_modules/@types/node/constants.d.ts", + "../node_modules/@types/node/crypto.d.ts", + "../node_modules/@types/node/dgram.d.ts", + "../node_modules/@types/node/dns.d.ts", + "../node_modules/@types/node/dns/promises.d.ts", + "../node_modules/@types/node/domain.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs.d.ts", + "../node_modules/@types/node/fs/promises.d.ts", + "../node_modules/@types/node/globals.d.ts", + "../node_modules/@types/node/globals.global.d.ts", + "../node_modules/@types/node/http.d.ts", + "../node_modules/@types/node/http2.d.ts", + "../node_modules/@types/node/https.d.ts", + "../node_modules/@types/node/inspector.d.ts", + "../node_modules/@types/node/module.d.ts", + "../node_modules/@types/node/net.d.ts", + "../node_modules/@types/node/os.d.ts", + "../node_modules/@types/node/path.d.ts", + "../node_modules/@types/node/perf_hooks.d.ts", + "../node_modules/@types/node/process.d.ts", + "../node_modules/@types/node/punycode.d.ts", + "../node_modules/@types/node/querystring.d.ts", + "../node_modules/@types/node/readline.d.ts", + "../node_modules/@types/node/repl.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/stream/promises.d.ts", + "../node_modules/@types/node/string_decoder.d.ts", + "../node_modules/@types/node/timers.d.ts", + "../node_modules/@types/node/timers/promises.d.ts", + "../node_modules/@types/node/tls.d.ts", + "../node_modules/@types/node/trace_events.d.ts", + "../node_modules/@types/node/tty.d.ts", + "../node_modules/@types/node/url.d.ts", + "../node_modules/@types/node/util.d.ts", + "../node_modules/@types/node/v8.d.ts", + "../node_modules/@types/node/vm.d.ts", + "../node_modules/@types/node/wasi.d.ts", + "../node_modules/@types/node/worker_threads.d.ts", + "../node_modules/@types/node/zlib.d.ts" + ], + "../node_modules/@types/node/tty.d.ts": [ + "../node_modules/@types/node/net.d.ts" + ], + "../node_modules/@types/node/url.d.ts": [ + "../node_modules/@types/node/querystring.d.ts" + ], + "../node_modules/@types/node/v8.d.ts": [ + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/node/worker_threads.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs/promises.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/url.d.ts", + "../node_modules/@types/node/vm.d.ts" + ], + "../node_modules/@types/node/zlib.d.ts": [ + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/sinon/index.d.ts": [ + "../node_modules/@types/sinonjs__fake-timers/index.d.ts" + ], + "../node_modules/@types/yargs/index.d.ts": [ + "../node_modules/@types/yargs-parser/index.d.ts" + ], + "../node_modules/@types/yauzl/index.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/index.d.ts", + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/aegir/dist/utils/chai.d.ts": [ + "../node_modules/@types/chai-as-promised/index.d.ts", + "../node_modules/@types/chai-subset/index.d.ts", + "../node_modules/@types/chai/index.d.ts", + "../node_modules/chai-bytes/index.d.ts", + "../node_modules/chai-parentheses/index.d.ts" + ], + "../node_modules/chai-bytes/index.d.ts": [ + "../node_modules/@types/chai/index.d.ts" + ], + "../node_modules/chai-parentheses/index.d.ts": [ + "../node_modules/@types/chai-as-promised/index.d.ts", + "../node_modules/@types/chai/index.d.ts" + ], + "../node_modules/uint8arrays/dist/from-string.d.ts": [ + "../node_modules/multibase/src/types.d.ts" + ], + "../src/index.js": [ + "../node_modules/err-code/dist/index.d.ts", + "../src/types.d.ts", + "../src/unixfs.d.ts" + ], + "../src/unixfs.d.ts": [ + "../node_modules/protobufjs/index.d.ts" + ], + "../test/unixfs-format.spec.js": [ + "../node_modules/aegir/dist/utils/chai.d.ts", + "../node_modules/aegir/dist/utils/fixtures.d.ts", + "../node_modules/uint8arrays/dist/from-string.d.ts", + "../src/index.js", + "../src/unixfs.d.ts" + ] + }, + "exportedModulesMap": { + "../../../node_modules/@types/glob/index.d.ts": [ + "../../../node_modules/@types/minimatch/index.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/index.d.ts" + ], + "../node_modules/@types/chai-as-promised/index.d.ts": [ + "../node_modules/@types/chai/index.d.ts" + ], + "../node_modules/@types/chai-subset/index.d.ts": [ + "../node_modules/@types/chai/index.d.ts" + ], + "../node_modules/@types/node/assert/strict.d.ts": [ + "../node_modules/@types/node/assert.d.ts" + ], + "../node_modules/@types/node/base.d.ts": [ + "../node_modules/@types/node/assert.d.ts", + "../node_modules/@types/node/ts3.6/base.d.ts" + ], + "../node_modules/@types/node/child_process.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs.d.ts", + "../node_modules/@types/node/net.d.ts", + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/node/cluster.d.ts": [ + "../node_modules/@types/node/child_process.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/net.d.ts" + ], + "../node_modules/@types/node/console.d.ts": [ + "../node_modules/@types/node/util.d.ts" + ], + "../node_modules/@types/node/constants.d.ts": [ + "../node_modules/@types/node/crypto.d.ts", + "../node_modules/@types/node/fs.d.ts", + "../node_modules/@types/node/os.d.ts" + ], + "../node_modules/@types/node/crypto.d.ts": [ + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/node/dgram.d.ts": [ + "../node_modules/@types/node/dns.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/net.d.ts" + ], + "../node_modules/@types/node/dns.d.ts": [ + "../node_modules/@types/node/dns/promises.d.ts" + ], + "../node_modules/@types/node/dns/promises.d.ts": [ + "../node_modules/@types/node/dns.d.ts" + ], + "../node_modules/@types/node/domain.d.ts": [ + "../node_modules/@types/node/events.d.ts" + ], + "../node_modules/@types/node/events.d.ts": [ + "../node_modules/@types/node/events.d.ts" + ], + "../node_modules/@types/node/fs.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs/promises.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/url.d.ts" + ], + "../node_modules/@types/node/fs/promises.d.ts": [ + "../node_modules/@types/node/fs.d.ts" + ], + "../node_modules/@types/node/http.d.ts": [ + "../node_modules/@types/node/net.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/url.d.ts" + ], + "../node_modules/@types/node/http2.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs.d.ts", + "../node_modules/@types/node/http.d.ts", + "../node_modules/@types/node/net.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/tls.d.ts", + "../node_modules/@types/node/url.d.ts" + ], + "../node_modules/@types/node/https.d.ts": [ + "../node_modules/@types/node/http.d.ts", + "../node_modules/@types/node/tls.d.ts", + "../node_modules/@types/node/url.d.ts" + ], + "../node_modules/@types/node/index.d.ts": [ + "../node_modules/@types/node/base.d.ts" + ], + "../node_modules/@types/node/inspector.d.ts": [ + "../node_modules/@types/node/events.d.ts" + ], + "../node_modules/@types/node/module.d.ts": [ + "../node_modules/@types/node/url.d.ts" + ], + "../node_modules/@types/node/net.d.ts": [ + "../node_modules/@types/node/dns.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/node/perf_hooks.d.ts": [ + "../node_modules/@types/node/async_hooks.d.ts" + ], + "../node_modules/@types/node/process.d.ts": [ + "../node_modules/@types/node/tty.d.ts" + ], + "../node_modules/@types/node/readline.d.ts": [ + "../node_modules/@types/node/events.d.ts" + ], + "../node_modules/@types/node/repl.d.ts": [ + "../node_modules/@types/node/readline.d.ts", + "../node_modules/@types/node/util.d.ts", + "../node_modules/@types/node/vm.d.ts" + ], + "../node_modules/@types/node/stream.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/stream/promises.d.ts" + ], + "../node_modules/@types/node/stream/promises.d.ts": [ + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/node/timers/promises.d.ts": [ + "../node_modules/@types/node/timers.d.ts" + ], + "../node_modules/@types/node/tls.d.ts": [ + "../node_modules/@types/node/net.d.ts" + ], + "../node_modules/@types/node/ts3.6/base.d.ts": [ + "../node_modules/@types/node/assert/strict.d.ts", + "../node_modules/@types/node/async_hooks.d.ts", + "../node_modules/@types/node/buffer.d.ts", + "../node_modules/@types/node/child_process.d.ts", + "../node_modules/@types/node/cluster.d.ts", + "../node_modules/@types/node/console.d.ts", + "../node_modules/@types/node/constants.d.ts", + "../node_modules/@types/node/crypto.d.ts", + "../node_modules/@types/node/dgram.d.ts", + "../node_modules/@types/node/dns.d.ts", + "../node_modules/@types/node/dns/promises.d.ts", + "../node_modules/@types/node/domain.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs.d.ts", + "../node_modules/@types/node/fs/promises.d.ts", + "../node_modules/@types/node/globals.d.ts", + "../node_modules/@types/node/globals.global.d.ts", + "../node_modules/@types/node/http.d.ts", + "../node_modules/@types/node/http2.d.ts", + "../node_modules/@types/node/https.d.ts", + "../node_modules/@types/node/inspector.d.ts", + "../node_modules/@types/node/module.d.ts", + "../node_modules/@types/node/net.d.ts", + "../node_modules/@types/node/os.d.ts", + "../node_modules/@types/node/path.d.ts", + "../node_modules/@types/node/perf_hooks.d.ts", + "../node_modules/@types/node/process.d.ts", + "../node_modules/@types/node/punycode.d.ts", + "../node_modules/@types/node/querystring.d.ts", + "../node_modules/@types/node/readline.d.ts", + "../node_modules/@types/node/repl.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/stream/promises.d.ts", + "../node_modules/@types/node/string_decoder.d.ts", + "../node_modules/@types/node/timers.d.ts", + "../node_modules/@types/node/timers/promises.d.ts", + "../node_modules/@types/node/tls.d.ts", + "../node_modules/@types/node/trace_events.d.ts", + "../node_modules/@types/node/tty.d.ts", + "../node_modules/@types/node/url.d.ts", + "../node_modules/@types/node/util.d.ts", + "../node_modules/@types/node/v8.d.ts", + "../node_modules/@types/node/vm.d.ts", + "../node_modules/@types/node/wasi.d.ts", + "../node_modules/@types/node/worker_threads.d.ts", + "../node_modules/@types/node/zlib.d.ts" + ], + "../node_modules/@types/node/tty.d.ts": [ + "../node_modules/@types/node/net.d.ts" + ], + "../node_modules/@types/node/url.d.ts": [ + "../node_modules/@types/node/querystring.d.ts" + ], + "../node_modules/@types/node/v8.d.ts": [ + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/node/worker_threads.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs/promises.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/url.d.ts", + "../node_modules/@types/node/vm.d.ts" + ], + "../node_modules/@types/node/zlib.d.ts": [ + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/@types/sinon/index.d.ts": [ + "../node_modules/@types/sinonjs__fake-timers/index.d.ts" + ], + "../node_modules/@types/yargs/index.d.ts": [ + "../node_modules/@types/yargs-parser/index.d.ts" + ], + "../node_modules/@types/yauzl/index.d.ts": [ + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/index.d.ts", + "../node_modules/@types/node/stream.d.ts" + ], + "../node_modules/aegir/dist/utils/chai.d.ts": [ + "../node_modules/@types/chai-as-promised/index.d.ts", + "../node_modules/@types/chai-subset/index.d.ts", + "../node_modules/@types/chai/index.d.ts", + "../node_modules/chai-bytes/index.d.ts", + "../node_modules/chai-parentheses/index.d.ts" + ], + "../node_modules/chai-bytes/index.d.ts": [ + "../node_modules/@types/chai/index.d.ts" + ], + "../node_modules/chai-parentheses/index.d.ts": [ + "../node_modules/@types/chai-as-promised/index.d.ts", + "../node_modules/@types/chai/index.d.ts" + ], + "../node_modules/uint8arrays/dist/from-string.d.ts": [ + "../node_modules/multibase/src/types.d.ts" + ], + "../src/index.js": [ + "../src/types.d.ts" + ], + "../src/unixfs.d.ts": [ + "../node_modules/protobufjs/index.d.ts" + ] + }, + "semanticDiagnosticsPerFile": [ + "../../../node_modules/@types/glob/index.d.ts", + "../../../node_modules/@types/minimatch/index.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.dom.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.dom.iterable.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.collection.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.core.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.generator.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.iterable.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.promise.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.proxy.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.reflect.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.symbol.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2016.array.include.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2016.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.intl.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.object.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.sharedmemory.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.string.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2017.typedarrays.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.asyncgenerator.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.asynciterable.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.intl.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.promise.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2018.regexp.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2019.array.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2019.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2019.object.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2019.string.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2019.symbol.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.bigint.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.intl.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.promise.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.sharedmemory.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.string.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es2020.symbol.wellknown.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.es5.d.ts", + "../../ipfs-unixfs-exporter/node_modules/typescript/lib/lib.esnext.intl.d.ts", + "../node_modules/@types/chai-as-promised/index.d.ts", + "../node_modules/@types/chai-subset/index.d.ts", + "../node_modules/@types/chai/index.d.ts", + "../node_modules/@types/istanbul-lib-coverage/index.d.ts", + "../node_modules/@types/json-schema/index.d.ts", + "../node_modules/@types/json5/index.d.ts", + "../node_modules/@types/long/index.d.ts", + "../node_modules/@types/minimist/index.d.ts", + "../node_modules/@types/mocha/index.d.ts", + "../node_modules/@types/node/assert.d.ts", + "../node_modules/@types/node/assert/strict.d.ts", + "../node_modules/@types/node/async_hooks.d.ts", + "../node_modules/@types/node/base.d.ts", + "../node_modules/@types/node/buffer.d.ts", + "../node_modules/@types/node/child_process.d.ts", + "../node_modules/@types/node/cluster.d.ts", + "../node_modules/@types/node/console.d.ts", + "../node_modules/@types/node/constants.d.ts", + "../node_modules/@types/node/crypto.d.ts", + "../node_modules/@types/node/dgram.d.ts", + "../node_modules/@types/node/dns.d.ts", + "../node_modules/@types/node/dns/promises.d.ts", + "../node_modules/@types/node/domain.d.ts", + "../node_modules/@types/node/events.d.ts", + "../node_modules/@types/node/fs.d.ts", + "../node_modules/@types/node/fs/promises.d.ts", + "../node_modules/@types/node/globals.d.ts", + "../node_modules/@types/node/globals.global.d.ts", + "../node_modules/@types/node/http.d.ts", + "../node_modules/@types/node/http2.d.ts", + "../node_modules/@types/node/https.d.ts", + "../node_modules/@types/node/index.d.ts", + "../node_modules/@types/node/inspector.d.ts", + "../node_modules/@types/node/module.d.ts", + "../node_modules/@types/node/net.d.ts", + "../node_modules/@types/node/os.d.ts", + "../node_modules/@types/node/path.d.ts", + "../node_modules/@types/node/perf_hooks.d.ts", + "../node_modules/@types/node/process.d.ts", + "../node_modules/@types/node/punycode.d.ts", + "../node_modules/@types/node/querystring.d.ts", + "../node_modules/@types/node/readline.d.ts", + "../node_modules/@types/node/repl.d.ts", + "../node_modules/@types/node/stream.d.ts", + "../node_modules/@types/node/stream/promises.d.ts", + "../node_modules/@types/node/string_decoder.d.ts", + "../node_modules/@types/node/timers.d.ts", + "../node_modules/@types/node/timers/promises.d.ts", + "../node_modules/@types/node/tls.d.ts", + "../node_modules/@types/node/trace_events.d.ts", + "../node_modules/@types/node/ts3.6/base.d.ts", + "../node_modules/@types/node/tty.d.ts", + "../node_modules/@types/node/url.d.ts", + "../node_modules/@types/node/util.d.ts", + "../node_modules/@types/node/v8.d.ts", + "../node_modules/@types/node/vm.d.ts", + "../node_modules/@types/node/wasi.d.ts", + "../node_modules/@types/node/worker_threads.d.ts", + "../node_modules/@types/node/zlib.d.ts", + "../node_modules/@types/normalize-package-data/index.d.ts", + "../node_modules/@types/parse-json/index.d.ts", + "../node_modules/@types/sinon/index.d.ts", + "../node_modules/@types/sinonjs__fake-timers/index.d.ts", + "../node_modules/@types/yargs-parser/index.d.ts", + "../node_modules/@types/yargs/index.d.ts", + "../node_modules/@types/yauzl/index.d.ts", + "../node_modules/aegir/dist/utils/chai.d.ts", + "../node_modules/aegir/dist/utils/fixtures.d.ts", + "../node_modules/chai-bytes/index.d.ts", + "../node_modules/chai-parentheses/index.d.ts", + "../node_modules/err-code/dist/index.d.ts", + "../node_modules/multibase/src/types.d.ts", + "../node_modules/protobufjs/index.d.ts", + "../node_modules/uint8arrays/dist/from-string.d.ts", + "../src/index.js", + "../src/types.d.ts", + "../src/unixfs.d.ts", + "../test/unixfs-format.spec.js" + ] + }, + "version": "4.2.3" +} \ No newline at end of file