-
Notifications
You must be signed in to change notification settings - Fork 340
feat(storage-manager): add multi-file zipdownload #6746
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,100 @@ | ||
| import { getUrl, GetUrlInput } from '../../../storage-internal'; | ||
|
|
||
| import { zipDownloadHandler } from '../zipdownload'; | ||
| import type { DownloadHandlerInput } from '../download'; | ||
|
|
||
| jest.mock('../../../storage-internal'); | ||
|
|
||
| const baseInput: DownloadHandlerInput = { | ||
| config: { | ||
| accountId: 'accountId', | ||
| bucket: 'bucket', | ||
| credentials: jest.fn(), | ||
| customEndpoint: 'mock-endpoint', | ||
| region: 'region', | ||
| }, | ||
| data: { | ||
| id: 'id', | ||
| key: 'prefix/file-name', | ||
| fileKey: 'file-name', | ||
| }, | ||
| }; | ||
|
|
||
| describe('zipDownloadHandler', () => { | ||
| const url = new URL('mock://fake.url'); | ||
| const mockGetUrl = jest.mocked(getUrl); | ||
|
|
||
| beforeAll(() => { | ||
| if (!globalThis.fetch) { | ||
| globalThis.fetch = jest.fn(() => { | ||
| return Promise.resolve({ | ||
| headers: { | ||
| get: (header: string) => { | ||
| if (header === 'content-length') { | ||
| return 100; | ||
| } | ||
| }, | ||
| }, | ||
| body: { | ||
| getReader: jest.fn(() => { | ||
| return { | ||
| read: () => ({ | ||
| value: 100, | ||
| done: true, | ||
| }), | ||
| }; | ||
| }), | ||
| }, | ||
| }) as unknown as Promise<Response>; | ||
| }); | ||
| } | ||
| }); | ||
| beforeEach(() => { | ||
| const expiresAt = new Date(); | ||
| expiresAt.setDate(expiresAt.getDate() + 1); | ||
| mockGetUrl.mockResolvedValue({ expiresAt, url }); | ||
| }); | ||
|
|
||
| afterEach(() => { | ||
| mockGetUrl.mockReset(); | ||
| }); | ||
|
|
||
| it('calls `getUrl` with the expected values', () => { | ||
| zipDownloadHandler(baseInput); | ||
|
|
||
| const expected: GetUrlInput = { | ||
| path: baseInput.data.key, | ||
| options: { | ||
| bucket: { | ||
| bucketName: baseInput.config.bucket, | ||
| region: baseInput.config.region, | ||
| }, | ||
| customEndpoint: baseInput.config.customEndpoint, | ||
| locationCredentialsProvider: baseInput.config.credentials, | ||
| validateObjectExistence: true, | ||
| contentDisposition: 'attachment', | ||
| expectedBucketOwner: baseInput.config.accountId, | ||
| }, | ||
| }; | ||
|
|
||
| expect(mockGetUrl).toHaveBeenCalledWith(expected); | ||
| }); | ||
|
|
||
| it('returns a complete status', async () => { | ||
| const { result } = zipDownloadHandler(baseInput); | ||
|
|
||
| expect(await result).toEqual({ status: 'COMPLETE' }); | ||
| }); | ||
|
|
||
| it('returns failed status', async () => { | ||
| const error = new Error('No download!'); | ||
| mockGetUrl.mockRejectedValue(error); | ||
| const { result } = zipDownloadHandler(baseInput); | ||
|
|
||
| expect(await result).toEqual({ | ||
| error, | ||
| message: error.message, | ||
| status: 'FAILED', | ||
| }); | ||
| }); | ||
| }); |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,174 @@ | ||
| import { getUrl } from '../../storage-internal'; | ||
| import type { DownloadHandler, DownloadHandlerInput } from './download'; | ||
| import type { TaskResult, TaskResultStatus } from './types'; | ||
| import { isFunction } from '@aws-amplify/ui'; | ||
| import { getProgress } from './utils'; | ||
| import JSZip from 'jszip'; | ||
|
|
||
| type DownloadTaskResult = TaskResult<TaskResultStatus, { url: URL }>; | ||
|
|
||
| interface MyZipper { | ||
| addFile: (file: Blob, name: string) => Promise<void>; | ||
| getBlobUrl: ( | ||
| onProgress?: (percent: number, key: string) => void | ||
| ) => Promise<string>; | ||
| } | ||
| const zipper: MyZipper = (() => { | ||
| let zip: JSZip | null = null; | ||
| return { | ||
| addFile: (file, name) => { | ||
| if (!zip) { | ||
| zip = new JSZip(); | ||
| } | ||
| return new Promise((ok, no) => { | ||
| try { | ||
| zip?.file(name, file); | ||
| ok(); | ||
| } catch (e) { | ||
| no(); | ||
| } | ||
| }); | ||
| }, | ||
| getBlobUrl: async (onProgress) => { | ||
| if (!zip) { | ||
| throw new Error('no zip'); | ||
| } | ||
| const blob = await zip.generateAsync( | ||
| { | ||
| type: 'blob', | ||
| streamFiles: true, | ||
| compression: 'DEFLATE', | ||
| compressionOptions: { | ||
| level: 3, | ||
| }, | ||
| }, | ||
| ({ percent, currentFile }) => { | ||
| if (isFunction(onProgress) && currentFile) { | ||
| onProgress(percent, currentFile); | ||
| } | ||
| } | ||
| ); | ||
| zip = null; | ||
| return URL.createObjectURL(blob); | ||
| }, | ||
| }; | ||
| })(); | ||
|
|
||
| const constructBucket = ({ | ||
| bucket: bucketName, | ||
| region, | ||
| }: DownloadHandlerInput['config']) => ({ bucketName, region }); | ||
|
|
||
| const readBody = async ( | ||
| response: Response, | ||
| { data, options }: DownloadHandlerInput | ||
| ) => { | ||
| let loading = true; | ||
| const chunks = []; | ||
| const reader = response.body!.getReader(); | ||
| const size = +(response.headers.get('content-length') ?? 0); | ||
| let received = 0; | ||
| while (loading) { | ||
| const { value, done } = await reader.read(); | ||
|
|
||
| if (done) { | ||
| loading = false; | ||
| } else { | ||
| chunks.push(value); | ||
| received += value.length; | ||
| if (isFunction(options?.onProgress)) { | ||
| options?.onProgress( | ||
| data, | ||
| getProgress({ | ||
| totalBytes: size, | ||
| transferredBytes: received, | ||
| }) | ||
| ); | ||
| } | ||
| } | ||
| } | ||
|
|
||
| return new Blob(chunks); | ||
| }; | ||
|
|
||
| const download = async ( | ||
| { config, data, options }: DownloadHandlerInput, | ||
| abortController: AbortController | ||
| ) => { | ||
| const { customEndpoint, credentials, accountId } = config; | ||
| const { key } = data; | ||
| const { url } = await getUrl({ | ||
| path: key, | ||
| options: { | ||
| bucket: constructBucket(config), | ||
| customEndpoint, | ||
| locationCredentialsProvider: credentials, | ||
| validateObjectExistence: true, | ||
| contentDisposition: 'attachment', | ||
| expectedBucketOwner: accountId, | ||
| }, | ||
| }); | ||
|
|
||
| const response = await fetch(url, { | ||
| mode: 'cors', | ||
| signal: abortController.signal, | ||
| }); | ||
| const blob = await readBody(response, { config, data, options }); | ||
| const [filename] = key.split('/').reverse(); | ||
| await zipper.addFile(blob, filename); | ||
| return filename; | ||
| }; | ||
|
|
||
| const downloadHandler = (() => { | ||
| const fileDownloadQueue = new Set<string>(); | ||
| let timer: ReturnType<typeof setTimeout>; | ||
|
|
||
| const handler: DownloadHandler = ({ config, data, options }) => { | ||
| const { key } = data; | ||
| const [, folder] = key.split('/').reverse(); | ||
| fileDownloadQueue.add(key); | ||
| const abortController = new AbortController(); | ||
| return { | ||
| cancel: () => { | ||
| abortController.abort(); | ||
| fileDownloadQueue.delete(key); | ||
| }, | ||
| result: download({ config, data, options }, abortController) | ||
| .then((): DownloadTaskResult => { | ||
| fileDownloadQueue.delete(key); | ||
| return { | ||
| status: 'COMPLETE', | ||
| }; | ||
| }) | ||
| .catch((e): DownloadTaskResult => { | ||
| const error = e as Error; | ||
| fileDownloadQueue.delete(key); | ||
| return { | ||
| status: 'FAILED', | ||
| message: error.message, | ||
| error, | ||
| }; | ||
| }) | ||
| .finally(() => { | ||
| if (timer) clearTimeout(timer); | ||
| timer = setTimeout(() => { | ||
| if (fileDownloadQueue.size === 0) { | ||
| zipper.getBlobUrl().then((blobURL) => { | ||
| if (blobURL) { | ||
| const anchor = document.createElement('a'); | ||
| const clickEvent = new MouseEvent('click'); | ||
| anchor.href = blobURL; | ||
| anchor.download = `${folder || 'archive'}.zip`; | ||
| anchor.dispatchEvent(clickEvent); | ||
| } | ||
| }); | ||
| } | ||
| }, 250); | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. question: what's the reason behind 250ms specifically?
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. so the issue is, that the handler gets invoked for every file individually. Since the download-handler, is only invoked on 4 files at a time, it can happen, that before the next file is being queued by the storage-manager, the download-handler "thinks" the queue is empty and assumes it has processed all files. this small delay, gives the storage-manager some time push more files into the download-handler, and thus continue downloading. It would be much more beneficial, if the action-handlers would know how many files are being processed in total e.g. like but I'll do a follow-up on this one in another PR |
||
| }), | ||
| }; | ||
| }; | ||
|
|
||
| return handler; | ||
| })(); | ||
|
|
||
| export { downloadHandler as zipDownloadHandler }; | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Do we need to define the cancel callback here?