Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions packages/react-storage/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
"scripts": {
"build": "yarn build:rollup",
"build:rollup": "rollup --config",
"build:watch": "rollup -c -w",
"check:esm": "node --input-type=module --eval 'import \"@aws-amplify/ui-react-storage\"'",
"clean": "rimraf dist node_modules",
"dev": "yarn build:rollup --watch",
Expand All @@ -48,15 +49,17 @@
"@aws-amplify/ui": "6.12.1",
"@aws-amplify/ui-react": "6.13.1",
"@aws-amplify/ui-react-core": "3.4.6",
"tslib": "^2.5.2"
"tslib": "^2.5.2",
"jszip":"^3.10.1"
},
"peerDependencies": {
"aws-amplify": "^6.14.3",
"react": "^16.14 || ^17 || ^18 || ^19",
"react-dom": "^16.14 || ^17 || ^18 || ^19"
},
"devDependencies": {
"@types/node": "^18.19.50"
"@types/node": "^18.19.50",
"node-fetch":"~3.3.2"
},
"sideEffects": [
"dist/**/*.css"
Expand All @@ -66,7 +69,7 @@
"name": "createStorageBrowser",
"path": "dist/esm/browser.mjs",
"import": "{ createStorageBrowser }",
"limit": "70 kB",
"limit": "93.9 kB",
"ignore": [
"@aws-amplify/storage"
]
Expand All @@ -75,7 +78,7 @@
"name": "StorageBrowser",
"path": "dist/esm/index.mjs",
"import": "{ StorageBrowser }",
"limit": "92 kB"
"limit": "116.1 kB"
},
{
"name": "FileUploader",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { copyHandler } from '../copy';
import { createFolderHandler } from '../createFolder';
import { deleteHandler } from '../delete';
import { downloadHandler } from '../download';
import { zipDownloadHandler } from '../zipdownload';
import { listLocationItemsHandler } from '../listLocationItems';
import { uploadHandler } from '../upload';
import { defaultHandlers } from '../defaults';
Expand All @@ -12,7 +12,7 @@ describe('defaultHandlers', () => {
copy: copyHandler,
createFolder: createFolderHandler,
delete: deleteHandler,
download: downloadHandler,
download: zipDownloadHandler,
listLocationItems: listLocationItemsHandler,
upload: uploadHandler,
});
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
import { getUrl, GetUrlInput } from '../../../storage-internal';

import { zipDownloadHandler } from '../zipdownload';
import type { DownloadHandlerInput } from '../download';

jest.mock('../../../storage-internal');

const baseInput: DownloadHandlerInput = {
config: {
accountId: 'accountId',
bucket: 'bucket',
credentials: jest.fn(),
customEndpoint: 'mock-endpoint',
region: 'region',
},
data: {
id: 'id',
key: 'prefix/file-name',
fileKey: 'file-name',
},
};

describe('zipDownloadHandler', () => {
const url = new URL('mock://fake.url');
const mockGetUrl = jest.mocked(getUrl);

beforeAll(() => {
if (!globalThis.fetch) {
globalThis.fetch = jest.fn(() => {
return Promise.resolve({
headers: {
get: (header: string) => {
if (header === 'content-length') {
return 100;
}
},
},
body: {
getReader: jest.fn(() => {
return {
read: () => ({
value: 100,
done: true,
}),
};
}),
},
}) as unknown as Promise<Response>;
});
}
});
beforeEach(() => {
const expiresAt = new Date();
expiresAt.setDate(expiresAt.getDate() + 1);
mockGetUrl.mockResolvedValue({ expiresAt, url });
});

afterEach(() => {
mockGetUrl.mockReset();
});

it('calls `getUrl` with the expected values', () => {
zipDownloadHandler(baseInput);

const expected: GetUrlInput = {
path: baseInput.data.key,
options: {
bucket: {
bucketName: baseInput.config.bucket,
region: baseInput.config.region,
},
customEndpoint: baseInput.config.customEndpoint,
locationCredentialsProvider: baseInput.config.credentials,
validateObjectExistence: true,
contentDisposition: 'attachment',
expectedBucketOwner: baseInput.config.accountId,
},
};

expect(mockGetUrl).toHaveBeenCalledWith(expected);
});

it('returns a complete status', async () => {
const { result } = zipDownloadHandler(baseInput);

expect(await result).toEqual({ status: 'COMPLETE' });
});

it('returns failed status', async () => {
const error = new Error('No download!');
mockGetUrl.mockRejectedValue(error);
const { result } = zipDownloadHandler(baseInput);

expect(await result).toEqual({
error,
message: error.message,
status: 'FAILED',
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { createFolderHandler } from './createFolder';
import type { DeleteHandler } from './delete';
import { deleteHandler } from './delete';
import type { DownloadHandler } from './download';
import { downloadHandler } from './download';
import { zipDownloadHandler } from './zipdownload';
import type { ListLocationItemsHandler } from './listLocationItems';
import { listLocationItemsHandler } from './listLocationItems';
import type { UploadHandler } from './upload';
Expand All @@ -24,7 +24,7 @@ export const defaultHandlers: DefaultHandlers = {
copy: copyHandler,
createFolder: createFolderHandler,
delete: deleteHandler,
download: downloadHandler,
download: zipDownloadHandler,
listLocationItems: listLocationItemsHandler,
upload: uploadHandler,
};
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ function downloadFromUrl(fileName: string, url: string) {
document.body.removeChild(a);
}

export const downloadHandler: DownloadHandler = ({ config, data }) => {
export const downloadHandler: DownloadHandler = ({ config, data }): DownloadHandlerOutput => {
const { accountId, credentials, customEndpoint } = config;
const { key } = data;

Expand All @@ -53,14 +53,14 @@ export const downloadHandler: DownloadHandler = ({ config, data }) => {
expectedBucketOwner: accountId,
},
})
.then(({ url }) => {
downloadFromUrl(key, url.toString());
return { status: 'COMPLETE' as const, value: { url } };
})
.catch((error: Error) => {
const { message } = error;
return { error, message, status: 'FAILED' as const };
});
.then(({ url }) => {
downloadFromUrl(key, url.toString());
return { status: 'COMPLETE' as const, value: { url } };
})
.catch((error: Error) => {
const { message } = error;
return { error, message, status: 'FAILED' as const };
});

return { result };
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we need to define the cancel callback here?

  return {
    result,
    cancel: () => {
      //cancel download
    },
  };

};
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ export * from './createFolder';
export * from './defaults';
export * from './delete';
export * from './download';
export * from './zipdownload';
export * from './listLocationItems';
export * from './listLocations';
export * from './upload';
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
import { getUrl } from '../../storage-internal';
import type { DownloadHandler, DownloadHandlerInput } from './download';
import type { TaskResult, TaskResultStatus } from './types';
import { isFunction } from '@aws-amplify/ui';
import { getProgress } from './utils';
import JSZip from 'jszip';

type DownloadTaskResult = TaskResult<TaskResultStatus, { url: URL }>;

interface MyZipper {
addFile: (file: Blob, name: string) => Promise<void>;
getBlobUrl: (
onProgress?: (percent: number, key: string) => void
) => Promise<string>;
}
const zipper: MyZipper = (() => {
let zip: JSZip | null = null;
return {
addFile: (file, name) => {
if (!zip) {
zip = new JSZip();
}
return new Promise((ok, no) => {
try {
zip?.file(name, file);
ok();
} catch (e) {
no();
}
});
},
getBlobUrl: async (onProgress) => {
if (!zip) {
throw new Error('no zip');
}
const blob = await zip.generateAsync(
{
type: 'blob',
streamFiles: true,
compression: 'DEFLATE',
compressionOptions: {
level: 3,
},
},
({ percent, currentFile }) => {
if (isFunction(onProgress) && currentFile) {
onProgress(percent, currentFile);
}
}
);
zip = null;
return URL.createObjectURL(blob);
},
};
})();

const constructBucket = ({
bucket: bucketName,
region,
}: DownloadHandlerInput['config']) => ({ bucketName, region });

const readBody = async (
response: Response,
{ data, options }: DownloadHandlerInput
) => {
let loading = true;
const chunks = [];
const reader = response.body!.getReader();
const size = +(response.headers.get('content-length') ?? 0);
let received = 0;
while (loading) {
const { value, done } = await reader.read();

if (done) {
loading = false;
} else {
chunks.push(value);
received += value.length;
if (isFunction(options?.onProgress)) {
options?.onProgress(
data,
getProgress({
totalBytes: size,
transferredBytes: received,
})
);
}
}
}

return new Blob(chunks);
};

const download = async (
{ config, data, options }: DownloadHandlerInput,
abortController: AbortController
) => {
const { customEndpoint, credentials, accountId } = config;
const { key } = data;
const { url } = await getUrl({
path: key,
options: {
bucket: constructBucket(config),
customEndpoint,
locationCredentialsProvider: credentials,
validateObjectExistence: true,
contentDisposition: 'attachment',
expectedBucketOwner: accountId,
},
});

const response = await fetch(url, {
mode: 'cors',
signal: abortController.signal,
});
const blob = await readBody(response, { config, data, options });
const [filename] = key.split('/').reverse();
await zipper.addFile(blob, filename);
return filename;
};

const downloadHandler = (() => {
const fileDownloadQueue = new Set<string>();
let timer: ReturnType<typeof setTimeout>;

const handler: DownloadHandler = ({ config, data, options }) => {
const { key } = data;
const [, folder] = key.split('/').reverse();
fileDownloadQueue.add(key);
const abortController = new AbortController();
return {
cancel: () => {
abortController.abort();
fileDownloadQueue.delete(key);
},
result: download({ config, data, options }, abortController)
.then((): DownloadTaskResult => {
fileDownloadQueue.delete(key);
return {
status: 'COMPLETE',
};
})
.catch((e): DownloadTaskResult => {
const error = e as Error;
fileDownloadQueue.delete(key);
return {
status: 'FAILED',
message: error.message,
error,
};
})
.finally(() => {
if (timer) clearTimeout(timer);
timer = setTimeout(() => {
if (fileDownloadQueue.size === 0) {
zipper.getBlobUrl().then((blobURL) => {
if (blobURL) {
const anchor = document.createElement('a');
const clickEvent = new MouseEvent('click');
anchor.href = blobURL;
anchor.download = `${folder || 'archive'}.zip`;
anchor.dispatchEvent(clickEvent);
}
});
}
}, 250);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

question: what's the reason behind 250ms specifically?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

so the issue is, that the handler gets invoked for every file individually.
The handler itself manages a queue of files to download or which are downloading.

Since the download-handler, is only invoked on 4 files at a time, it can happen, that before the next file is being queued by the storage-manager, the download-handler "thinks" the queue is empty and assumes it has processed all files.

this small delay, gives the storage-manager some time push more files into the download-handler, and thus continue downloading.

It would be much more beneficial, if the action-handlers would know how many files are being processed in total e.g. like Array.map, where the signature is <T, R>(item: T, index: number, all: T[]) => R[]

but I'll do a follow-up on this one in another PR

}),
};
};

return handler;
})();

export { downloadHandler as zipDownloadHandler };
Loading