diff --git a/docs/generated/changelog.html b/docs/generated/changelog.html
index c23d4d2c0..1c7ebc33d 100644
--- a/docs/generated/changelog.html
+++ b/docs/generated/changelog.html
@@ -28,6 +28,10 @@
Version 0.13.4
bug: auth-client resolves window.open issue in login function in safari due to async
storage call
+
+ New package: @dfinity/assets. This package provides an asset manager to manage assets
+ on an assets canister.
+
bug: auth-client storage wrapper returns after resolve to avoid idb to be recreated
Version 0.13.3
diff --git a/e2e/node/basic/assets.test.ts b/e2e/node/basic/assets.test.ts
new file mode 100644
index 000000000..79e677b59
--- /dev/null
+++ b/e2e/node/basic/assets.test.ts
@@ -0,0 +1,125 @@
+/**
+ * @jest-environment node
+ */
+import { existsSync, readFileSync, unlinkSync } from 'fs';
+import path from 'path';
+import agent from '../utils/agent';
+import { Actor } from '@dfinity/agent';
+import { Principal } from '@dfinity/principal';
+import { AssetManager } from '@dfinity/assets';
+
+/**
+ * Create (pseudo) random bytes Readable
+ * @param fileName File name of Readable
+ * @param length Byte length of Readable
+ */
+const randomBytesReadable = (fileName: string, length: number) => {
+ const rand = Math.floor(Math.random() * 10000);
+ return {
+ fileName,
+ contentType: 'application/octet-stream',
+ length,
+ // eslint-disable-next-line @typescript-eslint/no-empty-function
+ open: async () => {},
+ // eslint-disable-next-line @typescript-eslint/no-empty-function
+ close: async () => {},
+ slice: async (start: number, end: number) => {
+ return Uint8Array.from(
+ Array.from({ length: end - start }).map((_, i) => {
+ const offset = start + i;
+ const x = Math.sin(rand + offset) * 10000;
+ return Math.floor((x - Math.floor(x)) * 256);
+ }),
+ );
+ },
+ };
+};
+
+/**
+ * File paths used in file read/write tests
+ */
+const testFile = {
+ source: path.join(__dirname, '../package.json'),
+ target: path.join(__dirname, '../package_copy.json'),
+};
+
+jest.setTimeout(100000);
+describe('assets', () => {
+ let canisterId: Principal;
+
+ const testRandomBytes = async (fileName: string, length: number) => {
+ const assetManager = new AssetManager({
+ canisterId,
+ agent: await agent,
+ maxSingleFileSize: 1900,
+ maxChunkSize: 1900,
+ });
+ const readable = randomBytesReadable(fileName, length);
+ const key = await assetManager.store(readable);
+ const asset = await assetManager.get(key);
+ const sentData = await readable.slice(0, readable.length);
+ const receivedData = await asset.toUint8Array();
+ const isCertified = await asset.isCertified();
+ const isValidSha = await asset.verifySha256(receivedData);
+ await assetManager.delete(key);
+
+ expect(key).toEqual(`/${readable.fileName}`);
+ expect(asset.contentType).toEqual(readable.contentType);
+ expect(asset.length).toEqual(readable.length);
+ expect(Array.from(receivedData).join()).toEqual(Array.from(sentData).join());
+ expect(isCertified).toBe(true);
+ expect(isValidSha).toBe(true);
+ await expect(assetManager.get(key)).rejects.toThrow(/asset not found/);
+ };
+
+ beforeAll(async () => {
+ const module = readFileSync(path.join(__dirname, '../canisters/assets.wasm'));
+ canisterId = await Actor.createCanister({ agent: await agent });
+ await Actor.install({ module }, { canisterId, agent: await agent });
+ });
+
+ afterEach(async () => {
+ const assetManager = new AssetManager({ canisterId, agent: await agent });
+ await assetManager.clear();
+ if (existsSync(testFile.target)) {
+ unlinkSync(testFile.target);
+ }
+ });
+
+ it('store, get and delete 1KB asset (single chunk)', () => testRandomBytes('1KB.bin', 1000));
+
+ it('store, get and delete 3KB asset (multiple chunk)', () => testRandomBytes('3KB.bin', 3000));
+
+ it('batch process assets and verify asset list', async () => {
+ const assetManager = new AssetManager({ canisterId, agent: await agent });
+ const batch = assetManager.batch();
+
+ // Initial X asset
+ const x = randomBytesReadable('X.bin', 1000);
+ await assetManager.store(x);
+
+ // Batch store A and B assets and delete X asset
+ const readables = [randomBytesReadable('A.bin', 1000), randomBytesReadable('B.bin', 1000)];
+ await batch.delete(`/${x.fileName}`);
+ await Promise.all(readables.map(readable => batch.store(readable)));
+ await batch.commit();
+ await expect(
+ assetManager.list().then(assets => assets.map(asset => asset.key).sort()),
+ ).resolves.toEqual(readables.map(({ fileName }) => `/${fileName}`).sort());
+ });
+
+ it('read file from disk, store as asset, get asset, write file to disk and compare files', async () => {
+ const assetManager = new AssetManager({
+ canisterId,
+ agent: await agent,
+ // Make sure files are read and written in chunks during this test
+ maxSingleFileSize: 200,
+ maxChunkSize: 200,
+ });
+ const key = await assetManager.store(testFile.source);
+ const asset = await assetManager.get(key);
+ await asset.write(testFile.target);
+
+ expect(readFileSync(testFile.target, 'utf8')).toEqual(readFileSync(testFile.source, 'utf8'));
+ });
+});
diff --git a/e2e/node/canisters/assets.wasm b/e2e/node/canisters/assets.wasm
new file mode 100644
index 000000000..27a887379
Binary files /dev/null and b/e2e/node/canisters/assets.wasm differ
diff --git a/e2e/node/package.json b/e2e/node/package.json
index d34a03491..502a9663c 100644
--- a/e2e/node/package.json
+++ b/e2e/node/package.json
@@ -18,6 +18,7 @@
"dependencies": {
"@dfinity/agent": "^0.13.3",
"@dfinity/authentication": "^0.13.3",
+ "@dfinity/assets": "^0.13.3",
"@dfinity/identity": "^0.13.3",
"@dfinity/principal": "^0.13.3",
"@trust/webcrypto": "^0.9.2",
diff --git a/package-lock.json b/package-lock.json
index 38b81ee04..a48e4f620 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -44,6 +44,7 @@
"packages/identity-ledgerhq",
"packages/authentication",
"packages/auth-client",
+ "packages/assets",
"e2e/node",
"e2e/browser",
"demos/ledgerhq",
@@ -119,6 +120,7 @@
"version": "0.13.3",
"dependencies": {
"@dfinity/agent": "^0.13.3",
+ "@dfinity/assets": "^0.13.3",
"@dfinity/authentication": "^0.13.3",
"@dfinity/identity": "^0.13.3",
"@dfinity/principal": "^0.13.3",
@@ -1897,6 +1899,10 @@
"resolved": "packages/agent",
"link": true
},
+ "node_modules/@dfinity/assets": {
+ "resolved": "packages/assets",
+ "link": true
+ },
"node_modules/@dfinity/auth-client": {
"resolved": "packages/auth-client",
"link": true
@@ -5929,6 +5935,34 @@
"url": "https://opencollective.com/typescript-eslint"
}
},
+ "node_modules/@web-std/blob": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@web-std/blob/-/blob-3.0.4.tgz",
+ "integrity": "sha512-+dibyiw+uHYK4dX5cJ7HA+gtDAaUUe6JsOryp2ZpAC7h4ICsh49E34JwHoEKPlPvP0llCrNzz45vvD+xX5QDBg==",
+ "dev": true,
+ "dependencies": {
+ "@web-std/stream": "1.0.0",
+ "web-encoding": "1.1.5"
+ }
+ },
+ "node_modules/@web-std/file": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@web-std/file/-/file-3.0.2.tgz",
+ "integrity": "sha512-pIH0uuZsmY8YFvSHP1NsBIiMT/1ce0suPrX74fEeO3Wbr1+rW0fUGEe4d0R99iLwXtyCwyserqCFI4BJkJlkRA==",
+ "dev": true,
+ "dependencies": {
+ "@web-std/blob": "^3.0.3"
+ }
+ },
+ "node_modules/@web-std/stream": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/@web-std/stream/-/stream-1.0.0.tgz",
+ "integrity": "sha512-jyIbdVl+0ZJyKGTV0Ohb9E6UnxP+t7ZzX4Do3AHjZKxUXKMs9EmqnBDQgHF7bEw0EzbQygOjtt/7gvtmi//iCQ==",
+ "dev": true,
+ "dependencies": {
+ "web-streams-polyfill": "^3.1.1"
+ }
+ },
"node_modules/@webassemblyjs/ast": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz",
@@ -6114,6 +6148,13 @@
"@types/ledgerhq__hw-transport": "^4.21.3"
}
},
+ "node_modules/@zxing/text-encoding": {
+ "version": "0.9.0",
+ "resolved": "https://registry.npmjs.org/@zxing/text-encoding/-/text-encoding-0.9.0.tgz",
+ "integrity": "sha512-U/4aVJ2mxI0aDNI8Uq0wEhMgY+u4CNtEb0om3+y3+niDAsoTCOB33UF0sxpzqzdqXLqmvc+vZyAt4O8pPdfkwA==",
+ "dev": true,
+ "optional": true
+ },
"node_modules/abab": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz",
@@ -18609,6 +18650,27 @@
"integrity": "sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==",
"dev": true
},
+ "node_modules/web-encoding": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/web-encoding/-/web-encoding-1.1.5.tgz",
+ "integrity": "sha512-HYLeVCdJ0+lBYV2FvNZmv3HJ2Nt0QYXqZojk3d9FJOLkwnuhzM9tmamh8d7HPM8QqjKH8DeHkFTx+CFlWpZZDA==",
+ "dev": true,
+ "dependencies": {
+ "util": "^0.12.3"
+ },
+ "optionalDependencies": {
+ "@zxing/text-encoding": "0.9.0"
+ }
+ },
+ "node_modules/web-streams-polyfill": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz",
+ "integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==",
+ "dev": true,
+ "engines": {
+ "node": ">= 8"
+ }
+ },
"node_modules/webcrypto-core": {
"version": "1.7.5",
"resolved": "https://registry.npmjs.org/webcrypto-core/-/webcrypto-core-1.7.5.tgz",
@@ -19385,6 +19447,137 @@
"integrity": "sha512-Uw5ooOQxRASHgu6C7GVvUxisKXfSgW4oFlO+aa+PAkgmH89O3CXxEEzNRNtHSqtXFTl0nAC1uYj0GMSH27uwtQ==",
"dev": true
},
+ "packages/assets": {
+ "name": "@dfinity/assets",
+ "version": "0.13.3",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "base64-arraybuffer": "^1.0.2",
+ "mime": "^3.0.0"
+ },
+ "devDependencies": {
+ "@peculiar/webcrypto": "^1.4.0",
+ "@types/jest": "^28.1.4",
+ "@types/mime": "^2.0.3",
+ "@typescript-eslint/eslint-plugin": "^5.30.5",
+ "@typescript-eslint/parser": "^5.30.5",
+ "@web-std/file": "^3.0.2",
+ "eslint": "^8.19.0",
+ "eslint-plugin-jsdoc": "^39.3.3",
+ "jest": "^28.1.2",
+ "ts-jest": "^28.0.5",
+ "typedoc": "^0.22.11",
+ "typescript": "^4.7.4"
+ },
+ "peerDependencies": {
+ "@dfinity/agent": "^0.13.3",
+ "js-sha256": "0.9.0"
+ }
+ },
+ "packages/assets/node_modules/@types/mime": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/@types/mime/-/mime-2.0.3.tgz",
+ "integrity": "sha512-Jus9s4CDbqwocc5pOAnh8ShfrnMcPHuJYzVcSUU7lrh8Ni5HuIqX3oilL86p3dlTrk0LzHRCgA/GQ7uNCw6l2Q==",
+ "dev": true
+ },
+ "packages/assets/node_modules/base64-arraybuffer": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz",
+ "integrity": "sha512-I3yl4r9QB5ZRY3XuJVEPfc2XhZO6YweFPI+UovAzn+8/hb3oJ6lnysaFcjVpkCPfVWFUDvoZ8kmVDP7WyRtYtQ==",
+ "engines": {
+ "node": ">= 0.6.0"
+ }
+ },
+ "packages/assets/node_modules/brace-expansion": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+ "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "dev": true,
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "packages/assets/node_modules/glob": {
+ "version": "8.0.3",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-8.0.3.tgz",
+ "integrity": "sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==",
+ "dev": true,
+ "dependencies": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^5.0.1",
+ "once": "^1.3.0"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "packages/assets/node_modules/mime": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
+ "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
+ "bin": {
+ "mime": "cli.js"
+ },
+ "engines": {
+ "node": ">=10.0.0"
+ }
+ },
+ "packages/assets/node_modules/minimatch": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.0.tgz",
+ "integrity": "sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==",
+ "dev": true,
+ "dependencies": {
+ "brace-expansion": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "packages/assets/node_modules/shiki": {
+ "version": "0.10.1",
+ "resolved": "https://registry.npmjs.org/shiki/-/shiki-0.10.1.tgz",
+ "integrity": "sha512-VsY7QJVzU51j5o1+DguUd+6vmCmZ5v/6gYu4vyYAhzjuNQU6P/vmSy4uQaOhvje031qQMiW0d2BwgMH52vqMng==",
+ "dev": true,
+ "dependencies": {
+ "jsonc-parser": "^3.0.0",
+ "vscode-oniguruma": "^1.6.1",
+ "vscode-textmate": "5.2.0"
+ }
+ },
+ "packages/assets/node_modules/typedoc": {
+ "version": "0.22.18",
+ "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.22.18.tgz",
+ "integrity": "sha512-NK9RlLhRUGMvc6Rw5USEYgT4DVAUFk7IF7Q6MYfpJ88KnTZP7EneEa4RcP+tX1auAcz7QT1Iy0bUSZBYYHdoyA==",
+ "dev": true,
+ "dependencies": {
+ "glob": "^8.0.3",
+ "lunr": "^2.3.9",
+ "marked": "^4.0.16",
+ "minimatch": "^5.1.0",
+ "shiki": "^0.10.1"
+ },
+ "bin": {
+ "typedoc": "bin/typedoc"
+ },
+ "engines": {
+ "node": ">= 12.10.0"
+ },
+ "peerDependencies": {
+ "typescript": "4.0.x || 4.1.x || 4.2.x || 4.3.x || 4.4.x || 4.5.x || 4.6.x || 4.7.x"
+ }
+ },
+ "packages/assets/node_modules/vscode-textmate": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/vscode-textmate/-/vscode-textmate-5.2.0.tgz",
+ "integrity": "sha512-Uw5ooOQxRASHgu6C7GVvUxisKXfSgW4oFlO+aa+PAkgmH89O3CXxEEzNRNtHSqtXFTl0nAC1uYj0GMSH27uwtQ==",
+ "dev": true
+ },
"packages/auth-client": {
"name": "@dfinity/auth-client",
"version": "0.13.3",
@@ -21366,6 +21559,104 @@
}
}
},
+ "@dfinity/assets": {
+ "version": "file:packages/assets",
+ "requires": {
+ "@peculiar/webcrypto": "^1.4.0",
+ "@types/jest": "^28.1.4",
+ "@types/mime": "^2.0.3",
+ "@typescript-eslint/eslint-plugin": "^5.30.5",
+ "@typescript-eslint/parser": "^5.30.5",
+ "@web-std/file": "^3.0.2",
+ "base64-arraybuffer": "^1.0.2",
+ "eslint": "^8.19.0",
+ "eslint-plugin-jsdoc": "^39.3.3",
+ "jest": "^28.1.2",
+ "mime": "^3.0.0",
+ "ts-jest": "^28.0.5",
+ "typedoc": "^0.22.11",
+ "typescript": "^4.7.4"
+ },
+ "dependencies": {
+ "@types/mime": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/@types/mime/-/mime-2.0.3.tgz",
+ "integrity": "sha512-Jus9s4CDbqwocc5pOAnh8ShfrnMcPHuJYzVcSUU7lrh8Ni5HuIqX3oilL86p3dlTrk0LzHRCgA/GQ7uNCw6l2Q==",
+ "dev": true
+ },
+ "base64-arraybuffer": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz",
+ "integrity": "sha512-I3yl4r9QB5ZRY3XuJVEPfc2XhZO6YweFPI+UovAzn+8/hb3oJ6lnysaFcjVpkCPfVWFUDvoZ8kmVDP7WyRtYtQ=="
+ },
+ "brace-expansion": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+ "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "dev": true,
+ "requires": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "glob": {
+ "version": "8.0.3",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-8.0.3.tgz",
+ "integrity": "sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==",
+ "dev": true,
+ "requires": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^5.0.1",
+ "once": "^1.3.0"
+ }
+ },
+ "mime": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
+ "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="
+ },
+ "minimatch": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.0.tgz",
+ "integrity": "sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==",
+ "dev": true,
+ "requires": {
+ "brace-expansion": "^2.0.1"
+ }
+ },
+ "shiki": {
+ "version": "0.10.1",
+ "resolved": "https://registry.npmjs.org/shiki/-/shiki-0.10.1.tgz",
+ "integrity": "sha512-VsY7QJVzU51j5o1+DguUd+6vmCmZ5v/6gYu4vyYAhzjuNQU6P/vmSy4uQaOhvje031qQMiW0d2BwgMH52vqMng==",
+ "dev": true,
+ "requires": {
+ "jsonc-parser": "^3.0.0",
+ "vscode-oniguruma": "^1.6.1",
+ "vscode-textmate": "5.2.0"
+ }
+ },
+ "typedoc": {
+ "version": "0.22.18",
+ "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.22.18.tgz",
+ "integrity": "sha512-NK9RlLhRUGMvc6Rw5USEYgT4DVAUFk7IF7Q6MYfpJ88KnTZP7EneEa4RcP+tX1auAcz7QT1Iy0bUSZBYYHdoyA==",
+ "dev": true,
+ "requires": {
+ "glob": "^8.0.3",
+ "lunr": "^2.3.9",
+ "marked": "^4.0.16",
+ "minimatch": "^5.1.0",
+ "shiki": "^0.10.1"
+ }
+ },
+ "vscode-textmate": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/vscode-textmate/-/vscode-textmate-5.2.0.tgz",
+ "integrity": "sha512-Uw5ooOQxRASHgu6C7GVvUxisKXfSgW4oFlO+aa+PAkgmH89O3CXxEEzNRNtHSqtXFTl0nAC1uYj0GMSH27uwtQ==",
+ "dev": true
+ }
+ }
+ },
"@dfinity/auth-client": {
"version": "file:packages/auth-client",
"requires": {
@@ -21908,6 +22199,7 @@
"@babel/preset-env": "^7.18.6",
"@babel/preset-typescript": "^7.18.6",
"@dfinity/agent": "^0.13.3",
+ "@dfinity/assets": "^0.13.3",
"@dfinity/authentication": "^0.13.3",
"@dfinity/identity": "^0.13.3",
"@dfinity/principal": "^0.13.3",
@@ -24828,6 +25120,34 @@
"eslint-visitor-keys": "^3.3.0"
}
},
+ "@web-std/blob": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@web-std/blob/-/blob-3.0.4.tgz",
+ "integrity": "sha512-+dibyiw+uHYK4dX5cJ7HA+gtDAaUUe6JsOryp2ZpAC7h4ICsh49E34JwHoEKPlPvP0llCrNzz45vvD+xX5QDBg==",
+ "dev": true,
+ "requires": {
+ "@web-std/stream": "1.0.0",
+ "web-encoding": "1.1.5"
+ }
+ },
+ "@web-std/file": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@web-std/file/-/file-3.0.2.tgz",
+ "integrity": "sha512-pIH0uuZsmY8YFvSHP1NsBIiMT/1ce0suPrX74fEeO3Wbr1+rW0fUGEe4d0R99iLwXtyCwyserqCFI4BJkJlkRA==",
+ "dev": true,
+ "requires": {
+ "@web-std/blob": "^3.0.3"
+ }
+ },
+ "@web-std/stream": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/@web-std/stream/-/stream-1.0.0.tgz",
+ "integrity": "sha512-jyIbdVl+0ZJyKGTV0Ohb9E6UnxP+t7ZzX4Do3AHjZKxUXKMs9EmqnBDQgHF7bEw0EzbQygOjtt/7gvtmi//iCQ==",
+ "dev": true,
+ "requires": {
+ "web-streams-polyfill": "^3.1.1"
+ }
+ },
"@webassemblyjs/ast": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz",
@@ -24999,6 +25319,13 @@
"@types/ledgerhq__hw-transport": "^4.21.3"
}
},
+ "@zxing/text-encoding": {
+ "version": "0.9.0",
+ "resolved": "https://registry.npmjs.org/@zxing/text-encoding/-/text-encoding-0.9.0.tgz",
+ "integrity": "sha512-U/4aVJ2mxI0aDNI8Uq0wEhMgY+u4CNtEb0om3+y3+niDAsoTCOB33UF0sxpzqzdqXLqmvc+vZyAt4O8pPdfkwA==",
+ "dev": true,
+ "optional": true
+ },
"abab": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz",
@@ -34185,6 +34512,22 @@
"integrity": "sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==",
"dev": true
},
+ "web-encoding": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/web-encoding/-/web-encoding-1.1.5.tgz",
+ "integrity": "sha512-HYLeVCdJ0+lBYV2FvNZmv3HJ2Nt0QYXqZojk3d9FJOLkwnuhzM9tmamh8d7HPM8QqjKH8DeHkFTx+CFlWpZZDA==",
+ "dev": true,
+ "requires": {
+ "@zxing/text-encoding": "0.9.0",
+ "util": "^0.12.3"
+ }
+ },
+ "web-streams-polyfill": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz",
+ "integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==",
+ "dev": true
+ },
"webcrypto-core": {
"version": "1.7.5",
"resolved": "https://registry.npmjs.org/webcrypto-core/-/webcrypto-core-1.7.5.tgz",
diff --git a/package.json b/package.json
index 61cf2d87a..c35be4821 100644
--- a/package.json
+++ b/package.json
@@ -13,6 +13,7 @@
"packages/identity-ledgerhq",
"packages/authentication",
"packages/auth-client",
+ "packages/assets",
"e2e/node",
"e2e/browser",
"demos/ledgerhq",
diff --git a/packages/agent/tsconfig.json b/packages/agent/tsconfig.json
index 38c9b6462..15f63515c 100644
--- a/packages/agent/tsconfig.json
+++ b/packages/agent/tsconfig.json
@@ -12,7 +12,8 @@
"moduleResolution": "node",
"outDir": "lib/esm",
"paths": {
- "@dfinity/agent": ["src"]
+ "@dfinity/agent": ["src"],
+ "@dfinity/assets": ["src"]
},
"resolveJsonModule": true,
"rootDir": "./src",
diff --git a/packages/assets/.gitignore b/packages/assets/.gitignore
new file mode 100644
index 000000000..40ba0014d
--- /dev/null
+++ b/packages/assets/.gitignore
@@ -0,0 +1,17 @@
+build_info.json
+node_modules/
+dist/
+**/*.js
+**/*.js.map
+**/*.d.ts
+
+# generated docs
+/docs/reference
+
+# Cannot ignore .d.ts files in types/
+!types/**/*.d.ts
+
+# Cannot ignore setup files for webpack and jest, which are still JavaScript.
+!webpack.config.js
+!jest.config.js
+!test-setup.js
diff --git a/packages/assets/.npmignore b/packages/assets/.npmignore
new file mode 100644
index 000000000..eb725972f
--- /dev/null
+++ b/packages/assets/.npmignore
@@ -0,0 +1,11 @@
+# We work with a safelist here, so block everything that's not permitted, and add packages
+# that are.
+**
+
+!lib/**
+!types/**/*.d.ts
+!package.json
+!README.md
+
+# The following line further removes all test files (which matches .js and .d.ts).
+lib/**/*.test.*
diff --git a/packages/assets/LICENSE b/packages/assets/LICENSE
new file mode 100644
index 000000000..b27ba1fe8
--- /dev/null
+++ b/packages/assets/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2020 DFINITY LLC.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/assets/README.md b/packages/assets/README.md
new file mode 100644
index 000000000..8644a9afe
--- /dev/null
+++ b/packages/assets/README.md
@@ -0,0 +1,185 @@
+# @dfinity/assets
+
+Manage assets on an Internet Computer assets canister.
+
+Visit the [Dfinity Forum](https://forum.dfinity.org/) and [SDK Documentation](https://sdk.dfinity.org/docs/index.html)
+for more information and support building on the Internet Computer.
+
+Additional API Documentation can be found [here](https://agent-js.icp.xyz/assets/index.html).
+
+---
+
+## Installation
+
+Using AssetManager:
+
+```
+npm i --save @dfinity/assets
+```
+
+### In the browser:
+
+```
+import { AssetManager } from "@dfinity/assets";
+```
+
+### In Node.js:
+
+```
+const { AssetManager } = require("@dfinity/assets");
+```
+
+## Using AssetManager
+
+AssetManager supports the (chunked) upload of File, Blob, ArrayBuffer, Uint8Array and number[].
+
+Create an asset manager instance
+
+```js
+const assetManager = new AssetManager({
+ canisterId: ..., // Principal of assets canister
+ agent: ..., // Identity in agent must be authorized by the assets canister to make any changes
+});
+```
+
+AssetManager config extends Actor config with additional options
+
+```ts
+export interface AssetManagerConfig extends ActorConfig {
+ /**
+ * Max number of concurrent requests to the Internet Computer
+ * @default 16
+ */
+ concurrency?: number;
+ /**
+ * Max file size in bytes that the asset manager shouldn't chunk
+ * @default 1900000
+ */
+ maxSingleFileSize?: number;
+ /**
+ * Size of each chunk in bytes when the asset manager has to chunk a file
+ * @default 1900000
+ */
+ maxChunkSize?: number;
+}
+```
+
+Select file and upload to asset canister in browser
+
+```js
+const input = document.createElement('input');
+input.type = 'file';
+input.addEventListener('change', async () => {
+ const file = e.target.files[0];
+ const key = await assetManager.store(file);
+});
+input.click();
+```
+
+Config can be optionally passed as second argument in the `store` method.
+The `fileName` property is required when the data passed in the first argument
+is not a `File`, file path or custom `Readable` implementation
+
+```ts
+export interface StoreConfig {
+ /**
+ * File name
+ * @default File object name or name in file path
+ */
+ fileName?: string;
+ /**
+ * File path that file will be uploaded to
+ * @default '/'
+ */
+ path?: string;
+ /**
+ * File content type
+ * @default File/Blob object type or type from file name extension
+ */
+ contentType?: string;
+ /**
+ * Content encoding
+ * @default 'identity'
+ */
+ contentEncoding?: ContentEncoding;
+ /**
+ * File hash generation will be skipped if hash is provided
+ */
+ sha256?: Uint8Array;
+ /**
+ * Callback method to get upload progress in bytes (current / total)
+ */
+ onProgress?: (progress: Progress) => void;
+}
+```
+
+Read file from disk and upload to asset canister in Node.js
+
+```js
+const file = fs.readFileSync('./example.csv');
+const key = await assetManager.store(file, {fileName: 'example.csv'});
+```
+
+Delete file from asset canister
+
+```js
+const key = '/path/to/example.jpg'
+await assetManager.delete(key);
+```
+
+List files in asset canister
+
+```js
+const files = await assetManager.list();
+```
+
+Upload multiple files and delete an existing file as batch in Node.js
+
+```js
+const fs = require('fs');
+
+const banana = fs.readFileSync('./banana.png');
+const apple = fs.readFileSync('./apple.png');
+const strawberry = fs.readFileSync('./strawberry.png');
+const batch = assetManager.batch();
+const keys = [
+ await batch.store(banana, {fileName: 'banana.png'}),
+ await batch.store(apple, {fileName: 'apple.png', path: '/directory/with/apples'}),
+ await batch.store(strawberry, {fileName: 'strawberry.png'}),
+];
+await batch.delete('/path/to/old/file.csv');
+await batch.commit();
+```
+
+Read file from disk, compress with gzip and upload to asset canister in Node.js,
+GZIP compression is recommended for HTML and JS files
+
+```js
+const fs = require('fs');
+const pako = require('pako');
+
+const file = fs.readFileSync('./index.html');
+const gzippedFile = pako.gzip(file);
+const key = await assetManager.insert(gzippedFile, {
+ fileName: 'index.html',
+ contentEncoding: 'gzip',
+});
+```
+
+Download image asset to blob and open in new browser tab
+
+```js
+const asset = await assetManager.get('/path/to/file/on/asset/canister/motoko.png');
+const blob = await asset.toBlob();
+const url = URL.createObjectURL(blob);
+
+window.open(URL.createObjectURL(blob, '_blank'));
+```
+
+Download and write asset to path in Node.js
+
+```js
+const asset = await assetManager.get('/large_dataset.csv');
+asset.write('/large_dataset.csv');
+```
+
diff --git a/packages/assets/jest.config.ts b/packages/assets/jest.config.ts
new file mode 100644
index 000000000..bf2478f0a
--- /dev/null
+++ b/packages/assets/jest.config.ts
@@ -0,0 +1,17 @@
+import baseConfig from '../../jest.config.base';
+const packageName = 'assets';
+
+module.exports = {
+ ...baseConfig,
+ roots: [`/packages/${packageName}`],
+ bail: false,
+ moduleDirectories: ['node_modules'],
+ modulePaths: [`/packages/${packageName}/src/`],
+ setupFiles: [`/packages/${packageName}/test-setup.ts`],
+ transform: {
+ '^.+\\.ts$': 'ts-jest',
+ },
+ collectCoverageFrom: ['src/**/*.{ts,tsx}'],
+ displayName: packageName,
+ rootDir: '../..',
+};
diff --git a/packages/assets/package.json b/packages/assets/package.json
new file mode 100644
index 000000000..fdbfbfc87
--- /dev/null
+++ b/packages/assets/package.json
@@ -0,0 +1,76 @@
+{
+ "name": "@dfinity/assets",
+ "version": "0.13.3",
+ "author": "DFINITY Stiftung ",
+ "license": "Apache-2.0",
+ "description": "JavaScript and TypeScript library to manage assets on the Internet Computer",
+ "homepage": "https://smartcontracts.org",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/dfinity/agent-js.git",
+ "directory": "packages/assets"
+ },
+ "bugs": {
+ "url": "https://github.com/dfinity/agent-js/issues"
+ },
+ "keywords": [
+ "internet computer",
+ "internet-computer",
+ "ic",
+ "dfinity",
+ "assets",
+ "asset",
+ "file",
+ "upload",
+ "agent",
+ "actor",
+ "dfx",
+ "canister",
+ "motoko",
+ "javascript",
+ "typescript",
+ "blockchain",
+ "crypto",
+ "distributed",
+ "api",
+ "sdk"
+ ],
+ "main": "./lib/cjs/index.js",
+ "module": "./lib/esm/index.js",
+ "scripts": {
+ "build": "tsc -b && tsc -p tsconfig-cjs.json",
+ "bundle": "npm run build",
+ "lint": "eslint 'src' --ext '.js,.jsx,.ts,.tsx'",
+ "lint:fix": "npm run lint -- --fix",
+ "make:docs/reference": "typedoc src/index.ts --out ../../docs/generated/assets",
+ "release": "npm publish",
+ "test": "jest",
+ "test:coverage": "jest --verbose --collectCoverage"
+ },
+ "peerDependencies": {
+ "@dfinity/agent": "^0.13.3",
+ "js-sha256": "0.9.0"
+ },
+ "dependencies": {
+ "base64-arraybuffer": "^1.0.2",
+ "mime": "^3.0.0"
+ },
+ "devDependencies": {
+ "@peculiar/webcrypto": "^1.4.0",
+ "@types/jest": "^28.1.4",
+ "@types/mime": "^2.0.3",
+ "@typescript-eslint/eslint-plugin": "^5.30.5",
+ "@typescript-eslint/parser": "^5.30.5",
+ "@web-std/file": "^3.0.2",
+ "eslint": "^8.19.0",
+ "eslint-plugin-jsdoc": "^39.3.3",
+ "jest": "^28.1.2",
+ "ts-jest": "^28.0.5",
+ "typedoc": "^0.22.11",
+ "typescript": "^4.7.4"
+ },
+ "browser": {
+ "fs": "./lib/cjs/utils/browserShim.js",
+ "path": "./lib/cjs/utils/browserShim.js"
+ }
+}
diff --git a/packages/assets/src/canisters/assets.ts b/packages/assets/src/canisters/assets.ts
new file mode 100644
index 000000000..7a69000d3
--- /dev/null
+++ b/packages/assets/src/canisters/assets.ts
@@ -0,0 +1,13 @@
+import { Actor, ActorConfig, ActorSubclass } from '@dfinity/agent';
+import { idlFactory } from './assets_idl';
+import _SERVICE from './assets_service';
+
+export type AssetsCanisterRecord = _SERVICE;
+
+/**
+ * Create an assets canister actor
+ * @param config Configuration to make calls to the Replica.
+ */
+export function getAssetsCanister(config: ActorConfig): ActorSubclass {
+ return Actor.createActor(idlFactory, config);
+}
diff --git a/packages/assets/src/canisters/assets_idl.js b/packages/assets/src/canisters/assets_idl.js
new file mode 100644
index 000000000..4bf539156
--- /dev/null
+++ b/packages/assets/src/canisters/assets_idl.js
@@ -0,0 +1,150 @@
+export const idlFactory = ({ IDL }) => {
+ const ClearArguments = IDL.Record({});
+ const BatchId = IDL.Nat;
+ const Key = IDL.Text;
+ const CreateAssetArguments = IDL.Record({
+ key: Key,
+ content_type: IDL.Text,
+ });
+ const UnsetAssetContentArguments = IDL.Record({
+ key: Key,
+ content_encoding: IDL.Text,
+ });
+ const DeleteAssetArguments = IDL.Record({ key: Key });
+ const ChunkId = IDL.Nat;
+ const SetAssetContentArguments = IDL.Record({
+ key: Key,
+ sha256: IDL.Opt(IDL.Vec(IDL.Nat8)),
+ chunk_ids: IDL.Vec(ChunkId),
+ content_encoding: IDL.Text,
+ });
+ const BatchOperationKind = IDL.Variant({
+ CreateAsset: CreateAssetArguments,
+ UnsetAssetContent: UnsetAssetContentArguments,
+ DeleteAsset: DeleteAssetArguments,
+ SetAssetContent: SetAssetContentArguments,
+ Clear: ClearArguments,
+ });
+ const HeaderField = IDL.Tuple(IDL.Text, IDL.Text);
+ const HttpRequest = IDL.Record({
+ url: IDL.Text,
+ method: IDL.Text,
+ body: IDL.Vec(IDL.Nat8),
+ headers: IDL.Vec(HeaderField),
+ });
+ const StreamingCallbackToken = IDL.Record({
+ key: Key,
+ sha256: IDL.Opt(IDL.Vec(IDL.Nat8)),
+ index: IDL.Nat,
+ content_encoding: IDL.Text,
+ });
+ const StreamingCallbackHttpResponse = IDL.Record({
+ token: IDL.Opt(StreamingCallbackToken),
+ body: IDL.Vec(IDL.Nat8),
+ });
+ const StreamingStrategy = IDL.Variant({
+ Callback: IDL.Record({
+ token: StreamingCallbackToken,
+ callback: IDL.Func(
+ [StreamingCallbackToken],
+ [IDL.Opt(StreamingCallbackHttpResponse)],
+ ['query'],
+ ),
+ }),
+ });
+ const HttpResponse = IDL.Record({
+ body: IDL.Vec(IDL.Nat8),
+ headers: IDL.Vec(HeaderField),
+ streaming_strategy: IDL.Opt(StreamingStrategy),
+ status_code: IDL.Nat16,
+ });
+ const Time = IDL.Int;
+ return IDL.Service({
+ authorize: IDL.Func([IDL.Principal], [], []),
+ clear: IDL.Func([ClearArguments], [], []),
+ commit_batch: IDL.Func(
+ [
+ IDL.Record({
+ batch_id: BatchId,
+ operations: IDL.Vec(BatchOperationKind),
+ }),
+ ],
+ [],
+ [],
+ ),
+ create_asset: IDL.Func([CreateAssetArguments], [], []),
+ create_batch: IDL.Func([IDL.Record({})], [IDL.Record({ batch_id: BatchId })], []),
+ create_chunk: IDL.Func(
+ [IDL.Record({ content: IDL.Vec(IDL.Nat8), batch_id: BatchId })],
+ [IDL.Record({ chunk_id: ChunkId })],
+ [],
+ ),
+ delete_asset: IDL.Func([DeleteAssetArguments], [], []),
+ get: IDL.Func(
+ [IDL.Record({ key: Key, accept_encodings: IDL.Vec(IDL.Text) })],
+ [
+ IDL.Record({
+ content: IDL.Vec(IDL.Nat8),
+ sha256: IDL.Opt(IDL.Vec(IDL.Nat8)),
+ content_type: IDL.Text,
+ content_encoding: IDL.Text,
+ total_length: IDL.Nat,
+ }),
+ ],
+ ['query'],
+ ),
+ get_chunk: IDL.Func(
+ [
+ IDL.Record({
+ key: Key,
+ sha256: IDL.Opt(IDL.Vec(IDL.Nat8)),
+ index: IDL.Nat,
+ content_encoding: IDL.Text,
+ }),
+ ],
+ [IDL.Record({ content: IDL.Vec(IDL.Nat8) })],
+ ['query'],
+ ),
+ http_request: IDL.Func([HttpRequest], [HttpResponse], ['query']),
+ http_request_streaming_callback: IDL.Func(
+ [StreamingCallbackToken],
+ [IDL.Opt(StreamingCallbackHttpResponse)],
+ ['query'],
+ ),
+ list: IDL.Func(
+ [IDL.Record({})],
+ [
+ IDL.Vec(
+ IDL.Record({
+ key: Key,
+ encodings: IDL.Vec(
+ IDL.Record({
+ modified: Time,
+ sha256: IDL.Opt(IDL.Vec(IDL.Nat8)),
+ length: IDL.Nat,
+ content_encoding: IDL.Text,
+ }),
+ ),
+ content_type: IDL.Text,
+ }),
+ ),
+ ],
+ ['query'],
+ ),
+ set_asset_content: IDL.Func([SetAssetContentArguments], [], []),
+ store: IDL.Func(
+ [
+ IDL.Record({
+ key: Key,
+ content: IDL.Vec(IDL.Nat8),
+ sha256: IDL.Opt(IDL.Vec(IDL.Nat8)),
+ content_type: IDL.Text,
+ content_encoding: IDL.Text,
+ }),
+ ],
+ [],
+ [],
+ ),
+ unset_asset_content: IDL.Func([UnsetAssetContentArguments], [], []),
+ });
+};
diff --git a/packages/assets/src/canisters/assets_service.ts b/packages/assets/src/canisters/assets_service.ts
new file mode 100644
index 000000000..c3cbc9bbd
--- /dev/null
+++ b/packages/assets/src/canisters/assets_service.ts
@@ -0,0 +1,125 @@
+import type { Principal } from '@dfinity/principal';
+
+export type BatchId = bigint;
+export type BatchOperationKind =
+ | { CreateAsset: CreateAssetArguments }
+ | { UnsetAssetContent: UnsetAssetContentArguments }
+ | { DeleteAsset: DeleteAssetArguments }
+ | { SetAssetContent: SetAssetContentArguments }
+ | { Clear: ClearArguments };
+export type ChunkId = bigint;
+export type ClearArguments = Record;
+
+export interface CreateAssetArguments {
+ key: Key;
+ content_type: string;
+}
+
+export interface DeleteAssetArguments {
+ key: Key;
+}
+
+export type HeaderField = [string, string];
+
+export interface HttpRequest {
+ url: string;
+ method: string;
+ body: Uint8Array;
+ headers: Array;
+}
+
+export interface HttpResponse {
+ body: Uint8Array;
+ headers: Array;
+ streaming_strategy: [] | [StreamingStrategy];
+ status_code: number;
+}
+
+export type Key = string;
+export type Result = { Ok: null } | { Err: string };
+
+export interface SetAssetContentArguments {
+ key: Key;
+ sha256: [] | [Uint8Array];
+ chunk_ids: Array;
+ content_encoding: string;
+}
+
+export interface StreamingCallbackHttpResponse {
+ token: [] | [StreamingCallbackToken];
+ body: Uint8Array;
+}
+
+export interface StreamingCallbackToken {
+ key: Key;
+ sha256: [] | [Uint8Array];
+ index: bigint;
+ content_encoding: string;
+}
+
+export type StreamingStrategy = {
+ Callback: {
+ token: StreamingCallbackToken;
+ callback: [Principal, string];
+ };
+};
+export type Time = bigint;
+
+export interface UnsetAssetContentArguments {
+ key: Key;
+ content_encoding: string;
+}
+
+export default interface _SERVICE {
+ authorize: (arg_0: Principal) => Promise;
+ clear: (arg_0: ClearArguments) => Promise;
+ commit_batch: (arg_0: {
+ batch_id: BatchId;
+ operations: Array;
+ }) => Promise;
+ create_asset: (arg_0: CreateAssetArguments) => Promise;
+ create_batch: (arg_0: Record) => Promise<{ batch_id: BatchId }>;
+ create_chunk: (arg_0: {
+ content: Uint8Array;
+ batch_id: BatchId;
+ }) => Promise<{ chunk_id: ChunkId }>;
+ delete_asset: (arg_0: DeleteAssetArguments) => Promise;
+ get: (arg_0: { key: Key; accept_encodings: Array }) => Promise<{
+ content: Uint8Array;
+ sha256: [] | [Uint8Array];
+ content_type: string;
+ content_encoding: string;
+ total_length: bigint;
+ }>;
+ get_chunk: (arg_0: {
+ key: Key;
+ sha256: [] | [Uint8Array];
+ index: bigint;
+ content_encoding: string;
+ }) => Promise<{ content: Uint8Array }>;
+ http_request: (arg_0: HttpRequest) => Promise;
+ http_request_streaming_callback: (
+ arg_0: StreamingCallbackToken,
+ ) => Promise<[] | [StreamingCallbackHttpResponse]>;
+ list: (arg_0: Record) => Promise<
+ Array<{
+ key: Key;
+ encodings: Array<{
+ modified: Time;
+ sha256: [] | [Uint8Array];
+ length: bigint;
+ content_encoding: string;
+ }>;
+ content_type: string;
+ }>
+ >;
+ set_asset_content: (arg_0: SetAssetContentArguments) => Promise;
+ store: (arg_0: {
+ key: Key;
+ content: Uint8Array;
+ sha256: [] | [Uint8Array];
+ content_type: string;
+ content_encoding: string;
+ }) => Promise;
+ unset_asset_content: (arg_0: UnsetAssetContentArguments) => Promise;
+}
diff --git a/packages/assets/src/index.ts b/packages/assets/src/index.ts
new file mode 100644
index 000000000..2a6cc18c1
--- /dev/null
+++ b/packages/assets/src/index.ts
@@ -0,0 +1,584 @@
+import {
+ Actor,
+ ActorConfig,
+ ActorSubclass,
+ Cbor as cbor,
+ Certificate,
+ compare,
+ getDefaultAgent,
+ HashTree,
+ lookup_path,
+ reconstruct,
+} from '@dfinity/agent';
+import { lebDecode } from '@dfinity/candid';
+import { PipeArrayBuffer } from '@dfinity/candid/lib/cjs/utils/buffer';
+import { AssetsCanisterRecord, getAssetsCanister } from './canisters/assets';
+import { Hasher, sha256 as jsSha256 } from 'js-sha256';
+import { BatchOperationKind } from './canisters/assets_service';
+import * as base64Arraybuffer from 'base64-arraybuffer';
+import { isReadable, Readable } from './readable/readable';
+import { ReadableFile } from './readable/readableFile';
+import { ReadableBlob } from './readable/readableBlob';
+import { ReadablePath } from './readable/readablePath';
+import { ReadableBytes } from './readable/readableBytes';
+import { limit, LimitFn } from './utils/limit';
+import fs from 'fs';
+
+/**
+ * Supported content encodings by asset canister
+ */
+export type ContentEncoding = 'identity' | 'gzip' | 'compress' | 'deflate' | 'br';
+
+/**
+ * Upload progress in bytes
+ */
+export interface Progress {
+ current: number;
+ total: number;
+}
+
+/**
+ * Configuration that can be passed to set and override defaults and add progress callback
+ */
+export interface StoreConfig {
+ /**
+ * File name
+ * @default File object name or name in file path
+ */
+ fileName?: string;
+ /**
+ * File path that file will be uploaded to
+ * @default '/'
+ */
+ path?: string;
+ /**
+ * File content type
+ * @default File/Blob object type or type from file name extension
+ */
+ contentType?: string;
+ /**
+ * Content encoding
+ * @default 'identity'
+ */
+ contentEncoding?: ContentEncoding;
+ /**
+ * File hash generation will be skipped if hash is provided
+ */
+ sha256?: Uint8Array;
+ /**
+ * Callback method to get upload progress in bytes (current / total)
+ */
+ onProgress?: (progress: Progress) => void;
+}
+
+export type StoreReadableArgs = [readable: Readable, config?: StoreConfig];
+
+export type StoreFileArgs = [file: File, config?: StoreConfig];
+
+export type StoreBlobArgs = [
+ blob: Blob,
+ config: Omit & Required>,
+];
+
+export type StorePathArgs = [path: string, config?: StoreConfig];
+
+export type StoreBytesArgs = [
+ bytes: Uint8Array | ArrayBuffer | number[],
+ config: Omit & Required>,
+];
+
+/**
+ * Arguments to store an asset in asset manager
+ */
+export type StoreArgs =
+ | StoreReadableArgs
+ | StoreFileArgs
+ | StoreBlobArgs
+ | StorePathArgs
+ | StoreBytesArgs;
+
+/**
+ * Arguments to commit batch in asset manager
+ */
+export interface CommitBatchArgs {
+ onProgress?: (progress: Progress) => void;
+}
+
+/**
+ * Configuration that can be passed to set the canister id of the
+ * assets canister to be managed, inherits actor configuration and
+ * has additional asset manager specific configuration options.
+ */
+export interface AssetManagerConfig extends ActorConfig {
+ /**
+ * Max number of concurrent requests to the Internet Computer
+ * @default 16
+ */
+ concurrency?: number;
+ /**
+ * Max file size in bytes that the asset manager shouldn't chunk
+ * @default 1900000
+ */
+ maxSingleFileSize?: number;
+ /**
+ * Size of each chunk in bytes when the asset manager has to chunk a file
+ * @default 1900000
+ */
+ maxChunkSize?: number;
+}
+
+export class AssetManager {
+ private readonly _actor: ActorSubclass;
+ private readonly _limit: LimitFn;
+ private readonly _maxSingleFileSize: number;
+ private readonly _maxChunkSize: number;
+
+ /**
+ * Create assets canister manager instance
+ * @param config Additional configuration options, canister id is required
+ */
+ constructor(config: AssetManagerConfig) {
+ const { concurrency, maxSingleFileSize, maxChunkSize, ...actorConfig } = config;
+ this._actor = getAssetsCanister(actorConfig);
+ this._limit = limit(concurrency ?? 16);
+ this._maxSingleFileSize = maxSingleFileSize ?? 1900000;
+ this._maxChunkSize = maxChunkSize ?? 1900000;
+ }
+
+ /**
+ * Create readable from store arguments
+ * @param args Arguments with either a file, blob, path, bytes or custom Readable implementation
+ */
+ static async toReadable(...args: StoreArgs): Promise {
+ if (typeof File === 'function' && args[0] instanceof File) {
+ return new ReadableFile(args[0]);
+ }
+ if (typeof Blob === 'function' && args[0] instanceof Blob && args[1]?.fileName) {
+ return new ReadableBlob(args[1].fileName, args[0]);
+ }
+ if (typeof args[0] === 'string') {
+ return await ReadablePath.create(args[0]);
+ }
+ if (
+ (Array.isArray(args[0]) || args[0] instanceof Uint8Array || args[0] instanceof ArrayBuffer) &&
+ args[1]?.fileName
+ ) {
+ return new ReadableBytes(args[1].fileName, args[0]);
+ }
+ if (isReadable(args[0])) {
+ return args[0];
+ }
+
+ throw new Error('Invalid arguments, readable could not be created');
+ }
+
+ /**
+ * Get list of all files in assets canister
+ * @returns All files in asset canister
+ */
+ public async list(): ReturnType {
+ return this._actor.list({});
+ }
+
+ /**
+ * Store data on assets canister
+ * @param args Arguments with either a file, blob, path, bytes or custom Readable implementation
+ */
+ public async store(...args: StoreArgs): Promise {
+ const readable = await AssetManager.toReadable(...args);
+ const [, config] = args;
+ const key = [config?.path ?? '', config?.fileName ?? readable.fileName].join('/');
+
+ // If asset is small enough upload in one request else upload in chunks (batch)
+ if (readable.length <= this._maxSingleFileSize) {
+ config?.onProgress?.({ current: 0, total: readable.length });
+ await this._limit(async () => {
+ await readable.open();
+ const bytes = await readable.slice(0, readable.length);
+ await readable.close();
+ const sha256 =
+ config?.sha256 ??
+ new Uint8Array(jsSha256.create().update(new Uint8Array(bytes)).arrayBuffer());
+ return this._actor.store({
+ key,
+ content: bytes,
+ content_type: readable.contentType,
+ sha256: [sha256],
+ content_encoding: config?.contentEncoding ?? 'identity',
+ });
+ });
+ config?.onProgress?.({ current: readable.length, total: readable.length });
+ } else {
+ // Create batch to upload asset in chunks
+ const batch = this.batch();
+ await batch.store(readable, config);
+ await batch.commit();
+ }
+
+ return key;
+ }
+
+ /**
+ * Delete file from assets canister
+ * @param key The path to the file on the assets canister e.g. /folder/to/my_file.txt
+ */
+ public async delete(key: string): Promise {
+ await this._actor.delete_asset({ key });
+ }
+
+ /**
+ * Delete all files from assets canister
+ */
+ public async clear(): Promise {
+ await this._actor.clear({});
+ }
+
+ /**
+ * Get asset instance from assets canister
+ * @param key The path to the file on the assets canister e.g. /folder/to/my_file.txt
+ * @param acceptEncodings The accepted content encodings, defaults to ['identity']
+ */
+ public async get(key: string, acceptEncodings?: ContentEncoding[]): Promise {
+ const data = await this._actor.get({
+ key,
+ accept_encodings: acceptEncodings ?? ['identity'],
+ });
+
+ return new Asset(
+ this._actor,
+ this._limit,
+ this._maxSingleFileSize,
+ this._maxChunkSize,
+ key,
+ acceptEncodings ?? ['identity'],
+ data.content,
+ data.content_type,
+ Number(data.total_length),
+ data.content_encoding,
+ data.content.length,
+ data.sha256[0],
+ );
+ }
+
+ /**
+ * Create a batch assets operations instance, commit multiple operations in a single request
+ */
+ public batch(): AssetManagerBatch {
+ return new AssetManagerBatch(this._actor, this._limit, this._maxChunkSize);
+ }
+}
+
+class AssetManagerBatch {
+ private _scheduledOperations: Array<
+ (batch_id: bigint, onProgress?: (progress: Progress) => void) => Promise
+ > = [];
+ private _sha256: { [key: string]: Hasher } = {};
+ private _progress: { [key: string]: Progress } = {};
+
+ constructor(
+ private readonly _actor: ActorSubclass,
+ private readonly _limit: LimitFn,
+ private readonly _maxChunkSize: number,
+ ) {}
+
+ /**
+ * Insert batch operation to store data on assets canister
+ * @param args Arguments with either a file, blob, path, bytes or custom Readable implementation
+ */
+ public async store(...args: StoreArgs): Promise {
+ const readable = await AssetManager.toReadable(...args);
+ const [, config] = args;
+ const key = [config?.path ?? '', config?.fileName ?? readable.fileName].join('/');
+ if (!config?.sha256) {
+ this._sha256[key] = jsSha256.create();
+ }
+ this._progress[key] = { current: 0, total: readable.length };
+ config?.onProgress?.(this._progress[key]);
+ this._scheduledOperations.push(async (batch_id, onProgress) => {
+ await readable.open();
+ const chunkCount = Math.ceil(readable.length / this._maxChunkSize);
+ const chunkIds: bigint[] = await Promise.all(
+ Array.from({ length: chunkCount }).map(async (_, index) => {
+ const content = await readable.slice(
+ index * this._maxChunkSize,
+ Math.min((index + 1) * this._maxChunkSize, readable.length),
+ );
+ if (!config?.sha256) {
+ this._sha256[key].update(content);
+ }
+ const { chunk_id } = await this._limit(() =>
+ this._actor.create_chunk({
+ content,
+ batch_id,
+ }),
+ );
+ this._progress[key].current += content.length;
+ config?.onProgress?.(this._progress[key]);
+ onProgress?.({
+ current: Object.values(this._progress).reduce((acc, val) => acc + val.current, 0),
+ total: Object.values(this._progress).reduce((acc, val) => acc + val.total, 0),
+ });
+
+ return chunk_id;
+ }),
+ );
+ await readable.close();
+ return [
+ {
+ CreateAsset: { key, content_type: config?.contentType ?? readable.contentType },
+ },
+ {
+ SetAssetContent: {
+ key,
+ sha256: [config?.sha256 ?? new Uint8Array(this._sha256[key].arrayBuffer())],
+ chunk_ids: chunkIds,
+ content_encoding: config?.contentEncoding ?? 'identity',
+ },
+ },
+ ];
+ });
+ return key;
+ }
+
+ /**
+ * Insert batch operation to delete file from assets canister
+ * @param key The path to the file on the assets canister e.g. /folder/to/my_file.txt
+ */
+ public delete(key: string): void {
+ this._scheduledOperations.push(async () => [{ DeleteAsset: { key } }]);
+ }
+
+ /**
+ * Commit all batch operations to assets canister
+ * @param args Optional arguments with optional progress callback for commit progress
+ */
+ public async commit(args?: CommitBatchArgs): Promise {
+ // Create batch
+ const { batch_id } = await this._limit(() => this._actor.create_batch({}));
+
+ // Progress callback
+ args?.onProgress?.({
+ current: Object.values(this._progress).reduce((acc, val) => acc + val.current, 0),
+ total: Object.values(this._progress).reduce((acc, val) => acc + val.total, 0),
+ });
+
+ // Execute scheduled operations
+ const operations = (
+ await Promise.all(
+ this._scheduledOperations.map(scheduled_operation =>
+ scheduled_operation(batch_id, args?.onProgress),
+ ),
+ )
+ ).flat();
+
+ // Commit batch
+ await this._limit(() => this._actor.commit_batch({ batch_id, operations }));
+
+ // Cleanup
+ this._scheduledOperations = [];
+ this._sha256 = {};
+ this._progress = {};
+ }
+}
+
+class Asset {
+ constructor(
+ private readonly _actor: ActorSubclass,
+ private readonly _limit: LimitFn,
+ private readonly _maxSingleFileSize: number,
+ private readonly _maxChunkSize: number,
+ private readonly _key: string,
+ private readonly _acceptEncodings: ContentEncoding[],
+ private readonly _content: Uint8Array,
+ public readonly contentType: string,
+ public readonly length: number,
+ public readonly contentEncoding: string,
+ public readonly chunkSize: number,
+ public readonly sha256?: Uint8Array,
+ ) {}
+
+ /**
+ * Get asset content as blob (web), most browsers are able to use disk storage for larger blobs
+ */
+ public async toBlob(): Promise {
+ const blobs = Array.from({ length: Math.ceil(this.length / this.chunkSize) });
+ await this.getChunks((index, chunk) => (blobs[index] = new Blob([chunk])));
+ return new Blob([...blobs]);
+ }
+
+ /**
+ * Get asset content as unsigned 8-bit integer array, use `toBlob` (web) or `write` (Node.js) for larger files
+ */
+ public async toUint8Array(): Promise {
+ const bytes = new Uint8Array(this.length);
+ await this.getChunks((index, chunk) => bytes.set(chunk, index * this.chunkSize));
+ return bytes;
+ }
+
+ /**
+ * Get asset content as number array, use `toBlob` (web) or `write` (Node.js) for larger files
+ */
+ public async toNumberArray(): Promise {
+ const chunks = Array.from({ length: Math.ceil(this.length / this.chunkSize) });
+ await this.getChunks((index, chunk) => (chunks[index] = Array.from(chunk)));
+ return chunks.flat();
+ }
+
+ /**
+ * Write asset content to file (Node.js)
+ * @param path File path to write to
+ */
+ public async write(path: string): Promise {
+ const fd = await new Promise((resolve, reject) =>
+ fs.open(path, 'w', (err: unknown, fd: number) => {
+ if (err) {
+ reject(err);
+ return;
+ }
+ resolve(fd);
+ }),
+ );
+ await this.getChunks(
+ (index, chunk) =>
+ new Promise((resolve, reject) =>
+ fs.write(fd, chunk, 0, chunk.length, index * this.chunkSize, (err: unknown) => {
+ if (err) {
+ reject(err);
+ return;
+ }
+ resolve();
+ }),
+ ),
+ );
+ await new Promise(resolve => fs.close(fd, () => resolve()));
+ }
+
+ /**
+ * Get All chunks of asset through `onChunk` callback, can be used for a custom storage implementation
+ * @param onChunk Called on each received chunk
+ * @param sequential Chunks are received in sequential order when true or `concurrency` is `1` in config
+ */
+ public async getChunks(
+ onChunk: (index: number, chunk: Uint8Array) => void,
+ sequential?: boolean,
+ ) {
+ onChunk(0, this._content);
+ const chunkLimit = sequential ? limit(1) : this._limit;
+ await Promise.all(
+ Array.from({ length: Math.ceil(this.length / this.chunkSize) - 1 }).map((_, index) =>
+ chunkLimit(async () => {
+ const { content } = await this._actor.get_chunk({
+ key: this._key,
+ content_encoding: this.contentEncoding,
+ index: BigInt(index + 1),
+ sha256: this.sha256 ? [this.sha256] : [],
+ });
+ onChunk(index + 1, content);
+ }),
+ ),
+ );
+ }
+
+ /**
+ * Check if asset has been certified, which means that the content's hash is in the canister hash tree
+ */
+ public async isCertified(): Promise {
+ // Below implementation is based on Internet Computer service worker
+ const agent = Actor.agentOf(this._actor) ?? getDefaultAgent();
+ const canisterId = Actor.canisterIdOf(this._actor);
+
+ if (!agent.rootKey) {
+ throw Error('Agent is missing root key');
+ }
+
+ const response = await this._limit(() =>
+ this._actor.http_request({
+ method: 'get',
+ url: this._key,
+ headers: [['Accept-Encoding', this._acceptEncodings.join(', ')]],
+ body: new Uint8Array(),
+ }),
+ );
+
+ let certificate: ArrayBuffer | undefined;
+ let tree: ArrayBuffer | undefined;
+ const certificateHeader = response.headers.find(
+ ([key]) => key.trim().toLowerCase() === 'ic-certificate',
+ );
+ if (!certificateHeader) {
+ return false;
+ }
+ const fields = certificateHeader[1].split(/,/);
+ for (const f of fields) {
+ const [, name, b64Value] = [...(f.match(/^(.*)=:(.*):$/) ?? [])].map(x => x.trim());
+ const value = base64Arraybuffer.decode(b64Value);
+ if (name === 'certificate') {
+ certificate = value;
+ } else if (name === 'tree') {
+ tree = value;
+ }
+ }
+
+ if (!certificate || !tree) {
+ // No certificate or tree in response header
+ return false;
+ }
+
+ const cert = await Certificate.create({
+ certificate: new Uint8Array(certificate),
+ rootKey: agent.rootKey,
+ canisterId,
+ }).catch(() => Promise.resolve());
+
+ if (!cert) {
+ // Certificate is not valid
+ return false;
+ }
+
+ // Check certificate time
+ const decodedTime = lebDecode(new PipeArrayBuffer(cert.lookup(['time'])));
+ const certTime = Number(decodedTime / BigInt(1_000_000)); // Convert from nanos to millis
+ const now = Date.now();
+ const maxCertTimeOffset = 300_000; // 5 min
+ if (certTime - maxCertTimeOffset > now || certTime + maxCertTimeOffset < now) {
+ return false;
+ }
+
+ const hashTree: HashTree = cbor.decode(new Uint8Array(tree));
+ const reconstructed = await reconstruct(hashTree);
+ const witness = cert.lookup(['canister', canisterId.toUint8Array(), 'certified_data']);
+
+ if (!witness) {
+ // Could not find certified data for this canister in the certificate
+ return false;
+ }
+
+ // First validate that the Tree is as good as the certification
+ if (compare(witness, reconstructed) !== 0) {
+ // Witness != Tree passed in ic-certification
+ return false;
+ }
+
+ // Lookup hash of asset in tree
+ const treeSha = lookup_path(['http_assets', this._key], hashTree);
+
+ return !!treeSha && !!this.sha256 && compare(this.sha256.buffer, treeSha) === 0;
+ }
+
+ /**
+ * Check if the hash of the asset data is equal to the hash that has been certified
+ * @param bytes Optionally pass data to hash instead of waiting for asset data to be fetched and hashed
+ */
+ public async verifySha256(bytes?: Uint8Array | number[]): Promise {
+ if (!this.sha256?.buffer) {
+ return false;
+ }
+ const sha256 = jsSha256.create();
+ if (bytes) {
+ sha256.update(Array.isArray(bytes) ? new Uint8Array(bytes) : bytes);
+ } else {
+ await this.getChunks((_, chunk) => sha256.update(chunk), true);
+ }
+ return compare(this.sha256.buffer, sha256.arrayBuffer()) === 0;
+ }
+}
diff --git a/packages/assets/src/readable/readable.ts b/packages/assets/src/readable/readable.ts
new file mode 100644
index 000000000..dbcc07567
--- /dev/null
+++ b/packages/assets/src/readable/readable.ts
@@ -0,0 +1,25 @@
+export interface Readable {
+ fileName: string;
+ contentType: string;
+ length: number;
+ open: () => Promise;
+ close: () => Promise;
+ slice: (start: number, end: number) => Promise;
+}
+
+const isObjWithKeys = (
+ obj: unknown,
+ ...keys: Array
+): obj is Record =>
+ obj !== null &&
+ typeof obj === 'object' &&
+ keys.every(key => key !== null && key !== undefined && key in obj);
+
+export const isReadable = (value: unknown): value is Readable =>
+ isObjWithKeys(value, 'fileName', 'contentType', 'length', 'open', 'close', 'slice') &&
+ typeof value.fileName === 'string' &&
+ typeof value.contentType === 'string' &&
+ typeof value.length === 'number' &&
+ typeof value.open === 'function' &&
+ typeof value.close === 'function' &&
+ typeof value.slice === 'function';
diff --git a/packages/assets/src/readable/readableBlob.test.ts b/packages/assets/src/readable/readableBlob.test.ts
new file mode 100644
index 000000000..5b6cdac74
--- /dev/null
+++ b/packages/assets/src/readable/readableBlob.test.ts
@@ -0,0 +1,18 @@
+import { ReadableBlob } from './readableBlob';
+
+describe('ReadableBlob', () => {
+ test('ReadableBlob from Blob', async () => {
+ const blob = new Blob(['Hello world!']);
+ const fileName = 'hello.txt';
+ const readable = new ReadableBlob(fileName, blob);
+
+ expect(readable.fileName).toEqual(fileName);
+ expect(readable.contentType).toEqual('text/plain');
+ expect(readable.length).toEqual(blob.size);
+ await readable.open();
+ expect(await readable.slice(1, 4)).toEqual(
+ new Uint8Array(await blob.slice(1, 4).arrayBuffer()),
+ );
+ await readable.close();
+ });
+});
diff --git a/packages/assets/src/readable/readableBlob.ts b/packages/assets/src/readable/readableBlob.ts
new file mode 100644
index 000000000..e21f44908
--- /dev/null
+++ b/packages/assets/src/readable/readableBlob.ts
@@ -0,0 +1,32 @@
+import { Readable } from './readable';
+import mime from 'mime/lite';
+
+export class ReadableBlob implements Readable {
+ public readonly fileName: string;
+ private readonly _blob: Blob;
+
+ constructor(fileName: string, blob: Blob) {
+ this.fileName = fileName;
+ this._blob = blob;
+ }
+
+ public get contentType(): string {
+ return this._blob.type || (mime.getType(this.fileName) ?? 'application/octet-stream');
+ }
+
+ public get length(): number {
+ return this._blob.size;
+ }
+
+ async open(): Promise {
+ return Promise.resolve();
+ }
+
+ async close(): Promise {
+ return Promise.resolve();
+ }
+
+ async slice(start: number, end: number): Promise {
+ return new Uint8Array(await this._blob.slice(start, end).arrayBuffer());
+ }
+}
diff --git a/packages/assets/src/readable/readableBytes.test.ts b/packages/assets/src/readable/readableBytes.test.ts
new file mode 100644
index 000000000..1f76b7f15
--- /dev/null
+++ b/packages/assets/src/readable/readableBytes.test.ts
@@ -0,0 +1,35 @@
+import { ReadableBytes } from './readableBytes';
+
+const transparentPixelGif = [
+ 71, 73, 70, 56, 57, 97, 1, 0, 1, 0, 0, 0, 0, 33, 249, 4, 1, 0, 0, 0, 0, 44, 0, 0, 0, 0, 1, 0, 1,
+ 0, 0, 2, 1, 0, 0,
+];
+
+describe('ReadableBytes', () => {
+ test('ReadableBytes from Uint8Array', async () => {
+ const uint8Array = Uint8Array.from(transparentPixelGif);
+ const fileName = 'transparent_pixel.gif';
+ const readable = new ReadableBytes(fileName, Uint8Array.from(transparentPixelGif));
+
+ expect(readable.fileName).toEqual(fileName);
+ expect(readable.contentType).toEqual('image/gif');
+ expect(readable.length).toEqual(uint8Array.length);
+ await readable.open();
+ expect(await readable.slice(16, 24)).toEqual(uint8Array.slice(16, 24));
+ await readable.close();
+ });
+
+ test('ReadableBytes from number[]', async () => {
+ const fileName = 'transparent_pixel.gif';
+ const readable = new ReadableBytes(fileName, transparentPixelGif);
+
+ expect(readable.fileName).toEqual(fileName);
+ expect(readable.contentType).toEqual('image/gif');
+ expect(readable.length).toEqual(transparentPixelGif.length);
+ await readable.open();
+ expect(await readable.slice(16, 24)).toEqual(
+ Uint8Array.from(transparentPixelGif.slice(16, 24)),
+ );
+ await readable.close();
+ });
+});
diff --git a/packages/assets/src/readable/readableBytes.ts b/packages/assets/src/readable/readableBytes.ts
new file mode 100644
index 000000000..845b319f5
--- /dev/null
+++ b/packages/assets/src/readable/readableBytes.ts
@@ -0,0 +1,32 @@
+import { Readable } from './readable';
+import mime from 'mime/lite';
+
+export class ReadableBytes implements Readable {
+ public readonly fileName: string;
+ private readonly _bytes: Uint8Array;
+
+ constructor(fileName: string, bytes: Uint8Array | ArrayBuffer | number[]) {
+ this.fileName = fileName;
+ this._bytes = bytes instanceof Uint8Array ? bytes : new Uint8Array(bytes);
+ }
+
+ public get contentType(): string {
+ return mime.getType(this.fileName) ?? 'application/octet-stream';
+ }
+
+ public get length(): number {
+ return this._bytes.byteLength;
+ }
+
+ public async open(): Promise {
+ return Promise.resolve();
+ }
+
+ public async close(): Promise {
+ return Promise.resolve();
+ }
+
+ public async slice(start: number, end: number): Promise {
+ return this._bytes.slice(start, end);
+ }
+}
diff --git a/packages/assets/src/readable/readableFile.test.ts b/packages/assets/src/readable/readableFile.test.ts
new file mode 100644
index 000000000..4e84d9951
--- /dev/null
+++ b/packages/assets/src/readable/readableFile.test.ts
@@ -0,0 +1,17 @@
+import { ReadableFile } from './readableFile';
+
+describe('ReadableFile', () => {
+ test('ReadableFile from File', async () => {
+ const file = new File(['Hello world!'], 'hello.txt');
+ const readable = new ReadableFile(file);
+
+ expect(readable.fileName).toEqual(file.name);
+ expect(readable.contentType).toEqual('text/plain');
+ expect(readable.length).toEqual(file.size);
+ await readable.open();
+ expect(await readable.slice(1, 4)).toEqual(
+ new Uint8Array(await file.slice(1, 4).arrayBuffer()),
+ );
+ await readable.close();
+ });
+});
diff --git a/packages/assets/src/readable/readableFile.ts b/packages/assets/src/readable/readableFile.ts
new file mode 100644
index 000000000..9ff1a9388
--- /dev/null
+++ b/packages/assets/src/readable/readableFile.ts
@@ -0,0 +1,33 @@
+import { Readable } from './readable';
+
+export class ReadableFile implements Readable {
+ private readonly _file: File;
+
+ constructor(file: File) {
+ this._file = file;
+ }
+
+ public get fileName(): string {
+ return this._file.name;
+ }
+
+ public get contentType(): string {
+ return this._file.type;
+ }
+
+ public get length(): number {
+ return this._file.size;
+ }
+
+ public async open(): Promise {
+ return Promise.resolve();
+ }
+
+ public async close(): Promise {
+ return Promise.resolve();
+ }
+
+ public async slice(start: number, end: number): Promise {
+ return new Uint8Array(await this._file.slice(start, end).arrayBuffer());
+ }
+}
diff --git a/packages/assets/src/readable/readablePath.test.ts b/packages/assets/src/readable/readablePath.test.ts
new file mode 100644
index 000000000..fc64d66e6
--- /dev/null
+++ b/packages/assets/src/readable/readablePath.test.ts
@@ -0,0 +1,19 @@
+import { basename, resolve } from 'path';
+import { ReadablePath } from './readablePath';
+import { readFileSync, statSync } from 'fs';
+
+describe('ReadablePath', () => {
+ test('ReadablePath from path', async () => {
+ const path = resolve(__dirname, '../../package.json');
+ const readable = await ReadablePath.create(path);
+
+ expect(readable.fileName).toEqual(basename(path));
+ expect(readable.contentType).toEqual('application/json');
+ expect(readable.length).toEqual(statSync(path).size);
+ await readable.open();
+ expect(await readable.slice(16, 24)).toEqual(
+ new Uint8Array(readFileSync(path).subarray(16, 24)),
+ );
+ await readable.close();
+ });
+});
diff --git a/packages/assets/src/readable/readablePath.ts b/packages/assets/src/readable/readablePath.ts
new file mode 100644
index 000000000..496a8a9fb
--- /dev/null
+++ b/packages/assets/src/readable/readablePath.ts
@@ -0,0 +1,89 @@
+import { Readable } from './readable';
+import mime from 'mime/lite';
+import fs from 'fs';
+import path from 'path';
+
+export class ReadablePath implements Readable {
+ private readonly _path: string;
+ private readonly _size: number;
+ private _fd?: number;
+
+ protected constructor(path: string, size: number) {
+ this._path = path;
+ this._size = size;
+ }
+
+ public get fileName(): string {
+ return path.basename(this._path);
+ }
+
+ public get contentType(): string {
+ return mime.getType(this.fileName) ?? 'application/octet-stream';
+ }
+
+ public get length(): number {
+ return this._size;
+ }
+
+ static async create(path: string): Promise {
+ return new Promise((resolve, reject) => {
+ fs.stat(path, (err, stats) => {
+ if (err) {
+ reject(err);
+ return;
+ }
+ resolve(new ReadablePath(path, stats.size));
+ });
+ });
+ }
+
+ public async open(): Promise {
+ return new Promise((resolve, reject) => {
+ if (this._fd !== undefined) {
+ reject('File is already open');
+ return;
+ }
+ fs.open(this._path, (err, fd) => {
+ if (err) {
+ reject(err);
+ return;
+ }
+ this._fd = fd;
+ resolve();
+ });
+ });
+ }
+
+ public async close(): Promise {
+ return new Promise((resolve, reject) => {
+ if (this._fd === undefined) {
+ reject('No open file handle found');
+ return;
+ }
+ fs.close(this._fd, err => {
+ if (err) {
+ reject(err);
+ return;
+ }
+ resolve();
+ });
+ });
+ }
+
+ public async slice(start: number, end: number): Promise {
+ return new Promise((resolve, reject) => {
+ if (this._fd === undefined) {
+ reject('No open file handle found');
+ return;
+ }
+ const buffer = Buffer.alloc(end - start);
+ fs.read(this._fd, buffer, 0, end - start, start, (err: unknown) => {
+ if (err) {
+ reject(err);
+ return;
+ }
+ resolve(new Uint8Array(buffer));
+ });
+ });
+ }
+}
diff --git a/packages/assets/src/utils/browserShim.ts b/packages/assets/src/utils/browserShim.ts
new file mode 100644
index 000000000..94ed946cc
--- /dev/null
+++ b/packages/assets/src/utils/browserShim.ts
@@ -0,0 +1,2 @@
+// Export empty object as shim for node `fs` and `path`
+export default {};
diff --git a/packages/assets/src/utils/limit.ts b/packages/assets/src/utils/limit.ts
new file mode 100644
index 000000000..f000453be
--- /dev/null
+++ b/packages/assets/src/utils/limit.ts
@@ -0,0 +1,32 @@
+/**
+ * Minimal promise executor with concurrency limit implementation
+ * @param concurrency Maximum number of promises executed concurrently
+ */
+export const limit = (concurrency: number) => {
+ const queue: Array<{
+ fn: () => Promise;
+ resolve: (value: unknown | PromiseLike) => void;
+ reject: (reason: unknown) => void;
+ }> = [];
+ let active = 0;
+ const next = () => {
+ if (active < concurrency && queue.length > 0) {
+ active++;
+ const { fn, resolve, reject } = queue.shift() ?? {};
+ fn?.()
+ .then(resolve)
+ .catch(reject)
+ .then(() => {
+ active--;
+ next();
+ });
+ }
+ };
+ return (fn: () => Promise) =>
+ new Promise((resolve, reject) => {
+ queue.push({ fn, resolve, reject });
+ next();
+ }) as Promise;
+};
+
+export type LimitFn = ReturnType;
diff --git a/packages/assets/test-setup.ts b/packages/assets/test-setup.ts
new file mode 100644
index 000000000..03ffb68de
--- /dev/null
+++ b/packages/assets/test-setup.ts
@@ -0,0 +1,24 @@
+// This file may be used to polyfill features that aren't available in the test
+// environment, i.e. JSDom.
+//
+// We sometimes need to do this because our target browsers are expected to have
+// a feature that JSDom doesn't.
+//
+// Note that we can use webpack configuration to make some features available to
+// Node.js in a similar way.
+
+import mime from 'mime-types';
+
+global.crypto = require('@peculiar/webcrypto');
+global.TextEncoder = require('text-encoding').TextEncoder;
+global.TextDecoder = require('text-encoding').TextDecoder;
+global.MessageChannel = require('worker_threads').MessageChannel;
+global.Blob = require('@web-std/file').Blob;
+// @ts-ignore File polyfill with additional mime type polyfill
+global.File = class FilePolyfill extends require('@web-std/file').File {
+ constructor(init: BlobPart[], name?: string, options?: FilePropertyBag | undefined) {
+ super(init, name, options);
+ this._type = mime.lookup(name) || 'application/octet-stream';
+ }
+};
+require('whatwg-fetch');
diff --git a/packages/assets/tsconfig-cjs.json b/packages/assets/tsconfig-cjs.json
new file mode 100644
index 000000000..945c51f27
--- /dev/null
+++ b/packages/assets/tsconfig-cjs.json
@@ -0,0 +1,7 @@
+{
+ "extends": "./tsconfig.json",
+ "compilerOptions": {
+ "module": "CommonJS",
+ "outDir": "./lib/cjs"
+ }
+}
diff --git a/packages/assets/tsconfig.json b/packages/assets/tsconfig.json
new file mode 100644
index 000000000..a9576c98f
--- /dev/null
+++ b/packages/assets/tsconfig.json
@@ -0,0 +1,23 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "allowJs": true,
+ "baseUrl": "./",
+ "composite": true,
+ "declaration": true,
+ "esModuleInterop": true,
+ "forceConsistentCasingInFileNames": true,
+ "incremental": true,
+ "module": "ES2020",
+ "moduleResolution": "node",
+ "outDir": "./lib/esm",
+ "resolveJsonModule": true,
+ "rootDir": "./src",
+ "sourceMap": true,
+ "inlineSources": true,
+ "strict": true,
+ "target": "es2017"
+ },
+ "include": ["types/*", "src/**/*", "client-test"],
+ "references": [{ "path": "../agent" }]
+}