// discourse-skip-module (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){let c="function"===typeof require&&require;if(!f&&c){return c(i,!0);}if(u){return u(i,!0);}let a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a;}let p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){let n=e[i][1][r];return o(n||r);},p,p.exports,r,e,n,t);}return n[i].exports;}for(var u="function"===typeof require&&require,i=0;i= 10 || num % 1 === 0) { // Do not show decimals when the number is two-digit, or if the number has no // decimal component. return (neg ? '-' : '') + num.toFixed(0) + ' ' + unit; } else { return (neg ? '-' : '') + num.toFixed(1) + ' ' + unit; } }; },{}],2:[function(require,module,exports){ function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; } let id = 0; function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; } const { AbortController, createAbortError } = require('@uppy/utils/lib/AbortController'); const delay = require('@uppy/utils/lib/delay'); const MB = 1024 * 1024; const defaultOptions = { limit: 1, retryDelays: [0, 1000, 3000, 5000], getChunkSize(file) { return Math.ceil(file.size / 10000); }, onStart() {}, onProgress() {}, onPartComplete() {}, onSuccess() {}, onError(err) { throw err; } }; function ensureInt(value) { if (typeof value === 'string') { return parseInt(value, 10); } if (typeof value === 'number') { return value; } throw new TypeError('Expected a number'); } let _aborted = /*#__PURE__*/_classPrivateFieldLooseKey("aborted"); let _initChunks = /*#__PURE__*/_classPrivateFieldLooseKey("initChunks"); let _createUpload = /*#__PURE__*/_classPrivateFieldLooseKey("createUpload"); let _resumeUpload = /*#__PURE__*/_classPrivateFieldLooseKey("resumeUpload"); let _uploadParts = /*#__PURE__*/_classPrivateFieldLooseKey("uploadParts"); let _retryable = /*#__PURE__*/_classPrivateFieldLooseKey("retryable"); let _prepareUploadParts = /*#__PURE__*/_classPrivateFieldLooseKey("prepareUploadParts"); let _uploadPartRetryable = /*#__PURE__*/_classPrivateFieldLooseKey("uploadPartRetryable"); let _uploadPart = /*#__PURE__*/_classPrivateFieldLooseKey("uploadPart"); let _onPartProgress = /*#__PURE__*/_classPrivateFieldLooseKey("onPartProgress"); let _onPartComplete = /*#__PURE__*/_classPrivateFieldLooseKey("onPartComplete"); let _uploadPartBytes = /*#__PURE__*/_classPrivateFieldLooseKey("uploadPartBytes"); let _completeUpload = /*#__PURE__*/_classPrivateFieldLooseKey("completeUpload"); let _abortUpload = /*#__PURE__*/_classPrivateFieldLooseKey("abortUpload"); let _onError = /*#__PURE__*/_classPrivateFieldLooseKey("onError"); class MultipartUploader { constructor(file, options) { Object.defineProperty(this, _onError, { value: _onError2 }); Object.defineProperty(this, _abortUpload, { value: _abortUpload2 }); Object.defineProperty(this, _completeUpload, { value: _completeUpload2 }); Object.defineProperty(this, _uploadPartBytes, { value: _uploadPartBytes2 }); Object.defineProperty(this, _onPartComplete, { value: _onPartComplete2 }); Object.defineProperty(this, _onPartProgress, { value: _onPartProgress2 }); Object.defineProperty(this, _uploadPart, { value: _uploadPart2 }); Object.defineProperty(this, _uploadPartRetryable, { value: _uploadPartRetryable2 }); Object.defineProperty(this, _prepareUploadParts, { value: _prepareUploadParts2 }); Object.defineProperty(this, _retryable, { value: _retryable2 }); Object.defineProperty(this, _uploadParts, { value: _uploadParts2 }); Object.defineProperty(this, _resumeUpload, { value: _resumeUpload2 }); Object.defineProperty(this, _createUpload, { value: _createUpload2 }); Object.defineProperty(this, _initChunks, { value: _initChunks2 }); Object.defineProperty(this, _aborted, { value: _aborted2 }); this.options = { ...defaultOptions, ...options }; // Use default `getChunkSize` if it was null or something if (!this.options.getChunkSize) { this.options.getChunkSize = defaultOptions.getChunkSize; } this.file = file; this.abortController = new AbortController(); this.key = this.options.key || null; this.uploadId = this.options.uploadId || null; this.parts = []; // Do `this.createdPromise.then(OP)` to execute an operation `OP` _only_ if the // upload was created already. That also ensures that the sequencing is right // (so the `OP` definitely happens if the upload is created). // // This mostly exists to make `#abortUpload` work well: only sending the abort request if // the upload was already created, and if the createMultipartUpload request is still in flight, // aborting it immediately after it finishes. this.createdPromise = Promise.reject(); // eslint-disable-line prefer-promise-reject-errors this.isPaused = false; this.partsInProgress = 0; this.chunks = null; this.chunkState = null; _classPrivateFieldLooseBase(this, _initChunks)[_initChunks](); this.createdPromise.catch(() => {}); // silence uncaught rejection warning } /** * Was this upload aborted? * * If yes, we may need to throw an AbortError. * * @returns {boolean} */ start() { this.isPaused = false; if (this.uploadId) { _classPrivateFieldLooseBase(this, _resumeUpload)[_resumeUpload](); } else { _classPrivateFieldLooseBase(this, _createUpload)[_createUpload](); } } pause() { this.abortController.abort(); // Swap it out for a new controller, because this instance may be resumed later. this.abortController = new AbortController(); this.isPaused = true; } abort(opts) { let _opts; if (opts === void 0) { opts = undefined; } if ((_opts = opts) != null && _opts.really) {_classPrivateFieldLooseBase(this, _abortUpload)[_abortUpload]();}else {this.pause();} } } function _aborted2() { return this.abortController.signal.aborted; } function _initChunks2() { const chunks = []; const desiredChunkSize = this.options.getChunkSize(this.file); // at least 5MB per request, at most 10k requests const minChunkSize = Math.max(5 * MB, Math.ceil(this.file.size / 10000)); const chunkSize = Math.max(desiredChunkSize, minChunkSize); // Upload zero-sized files in one zero-sized chunk if (this.file.size === 0) { chunks.push(this.file); } else { for (let i = 0; i < this.file.size; i += chunkSize) { const end = Math.min(this.file.size, i + chunkSize); chunks.push(this.file.slice(i, end)); } } this.chunks = chunks; this.chunkState = chunks.map(() => ({ uploaded: 0, busy: false, done: false })); } function _createUpload2() { this.createdPromise = Promise.resolve().then(() => this.options.createMultipartUpload()); return this.createdPromise.then(result => { if (_classPrivateFieldLooseBase(this, _aborted)[_aborted]()) {throw createAbortError();} const valid = typeof result === 'object' && result && typeof result.uploadId === 'string' && typeof result.key === 'string'; if (!valid) { throw new TypeError('AwsS3/Multipart: Got incorrect result from `createMultipartUpload()`, expected an object `{ uploadId, key }`.'); } this.key = result.key; this.uploadId = result.uploadId; this.options.onStart(result); _classPrivateFieldLooseBase(this, _uploadParts)[_uploadParts](); }).catch(err => { _classPrivateFieldLooseBase(this, _onError)[_onError](err); }); } async function _resumeUpload2() { try { const parts = await this.options.listParts({ uploadId: this.uploadId, key: this.key }); if (_classPrivateFieldLooseBase(this, _aborted)[_aborted]()) {throw createAbortError();} parts.forEach(part => { const i = part.PartNumber - 1; this.chunkState[i] = { uploaded: ensureInt(part.Size), etag: part.ETag, done: true }; // Only add if we did not yet know about this part. if (!this.parts.some(p => p.PartNumber === part.PartNumber)) { this.parts.push({ PartNumber: part.PartNumber, ETag: part.ETag }); } }); _classPrivateFieldLooseBase(this, _uploadParts)[_uploadParts](); } catch (err) { _classPrivateFieldLooseBase(this, _onError)[_onError](err); } } function _uploadParts2() { if (this.isPaused) {return;} // All parts are uploaded. if (this.chunkState.every(state => state.done)) { _classPrivateFieldLooseBase(this, _completeUpload)[_completeUpload](); return; } // For a 100MB file, with the default min chunk size of 5MB and a limit of 10: // // Total 20 parts // --------- // Need 1 is 10 // Need 2 is 5 // Need 3 is 5 const need = this.options.limit - this.partsInProgress; const completeChunks = this.chunkState.filter(state => state.done).length; const remainingChunks = this.chunks.length - completeChunks; let minNeeded = Math.ceil(this.options.limit / 2); if (minNeeded > remainingChunks) { minNeeded = remainingChunks; } if (need < minNeeded) {return;} const candidates = []; for (let i = 0; i < this.chunkState.length; i++) { const state = this.chunkState[i]; // eslint-disable-next-line no-continue if (state.done || state.busy) {continue;} candidates.push(i); if (candidates.length >= need) { break; } } if (candidates.length === 0) {return;} _classPrivateFieldLooseBase(this, _prepareUploadParts)[_prepareUploadParts](candidates).then(result => { candidates.forEach(index => { const partNumber = index + 1; const prePreparedPart = { url: result.presignedUrls[partNumber], headers: result.headers }; _classPrivateFieldLooseBase(this, _uploadPartRetryable)[_uploadPartRetryable](index, prePreparedPart).then(() => { _classPrivateFieldLooseBase(this, _uploadParts)[_uploadParts](); }, err => { _classPrivateFieldLooseBase(this, _onError)[_onError](err); }); }); }); } function _retryable2(_ref) { let { before, attempt, after } = _ref; const { retryDelays } = this.options; const { signal } = this.abortController; if (before) {before();} function shouldRetry(err) { if (err.source && typeof err.source.status === 'number') { const { status } = err.source; // 0 probably indicates network failure return status === 0 || status === 409 || status === 423 || status >= 500 && status < 600; } return false; } const doAttempt = retryAttempt => attempt().catch(err => { if (_classPrivateFieldLooseBase(this, _aborted)[_aborted]()) {throw createAbortError();} if (shouldRetry(err) && retryAttempt < retryDelays.length) { return delay(retryDelays[retryAttempt], { signal }).then(() => doAttempt(retryAttempt + 1)); } throw err; }); return doAttempt(0).then(result => { if (after) {after();} return result; }, err => { if (after) {after();} throw err; }); } async function _prepareUploadParts2(candidates) { candidates.forEach(i => { this.chunkState[i].busy = true; }); const result = await _classPrivateFieldLooseBase(this, _retryable)[_retryable]({ attempt: () => this.options.prepareUploadParts({ key: this.key, uploadId: this.uploadId, partNumbers: candidates.map(index => index + 1), chunks: candidates.reduce((chunks, candidate) => ({ ...chunks, // Use the part number as the index [candidate + 1]: this.chunks[candidate] }), {}) }) }); if (typeof (result == null ? void 0 : result.presignedUrls) !== 'object') { throw new TypeError('AwsS3/Multipart: Got incorrect result from `prepareUploadParts()`, expected an object `{ presignedUrls }`.'); } return result; } function _uploadPartRetryable2(index, prePreparedPart) { return _classPrivateFieldLooseBase(this, _retryable)[_retryable]({ before: () => { this.partsInProgress += 1; }, attempt: () => _classPrivateFieldLooseBase(this, _uploadPart)[_uploadPart](index, prePreparedPart), after: () => { this.partsInProgress -= 1; } }); } function _uploadPart2(index, prePreparedPart) { this.chunkState[index].busy = true; const valid = typeof (prePreparedPart == null ? void 0 : prePreparedPart.url) === 'string'; if (!valid) { throw new TypeError('AwsS3/Multipart: Got incorrect result for `prePreparedPart`, expected an object `{ url }`.'); } const { url, headers } = prePreparedPart; if (_classPrivateFieldLooseBase(this, _aborted)[_aborted]()) { this.chunkState[index].busy = false; throw createAbortError(); } return _classPrivateFieldLooseBase(this, _uploadPartBytes)[_uploadPartBytes](index, url, headers); } function _onPartProgress2(index, sent) { this.chunkState[index].uploaded = ensureInt(sent); const totalUploaded = this.chunkState.reduce((n, c) => n + c.uploaded, 0); this.options.onProgress(totalUploaded, this.file.size); } function _onPartComplete2(index, etag) { this.chunkState[index].etag = etag; this.chunkState[index].done = true; const part = { PartNumber: index + 1, ETag: etag }; this.parts.push(part); this.options.onPartComplete(part); } function _uploadPartBytes2(index, url, headers) { const body = this.chunks[index]; const { signal } = this.abortController; let defer; const promise = new Promise((resolve, reject) => { defer = { resolve, reject }; }); const xhr = new XMLHttpRequest(); xhr.open('PUT', url, true); if (headers) { Object.keys(headers).forEach(key => { xhr.setRequestHeader(key, headers[key]); }); } xhr.responseType = 'text'; function cleanup() { // eslint-disable-next-line no-use-before-define signal.removeEventListener('abort', onabort); } function onabort() { xhr.abort(); } signal.addEventListener('abort', onabort); xhr.upload.addEventListener('progress', ev => { if (!ev.lengthComputable) {return;} _classPrivateFieldLooseBase(this, _onPartProgress)[_onPartProgress](index, ev.loaded, ev.total); }); xhr.addEventListener('abort', () => { cleanup(); this.chunkState[index].busy = false; defer.reject(createAbortError()); }); xhr.addEventListener('load', ev => { cleanup(); this.chunkState[index].busy = false; if (ev.target.status < 200 || ev.target.status >= 300) { const error = new Error('Non 2xx'); error.source = ev.target; defer.reject(error); return; } // This avoids the net::ERR_OUT_OF_MEMORY in Chromium Browsers. this.chunks[index] = null; _classPrivateFieldLooseBase(this, _onPartProgress)[_onPartProgress](index, body.size, body.size); // NOTE This must be allowed by CORS. const etag = ev.target.getResponseHeader('ETag'); if (etag === null) { defer.reject(new Error('AwsS3/Multipart: Could not read the ETag header. This likely means CORS is not configured correctly on the S3 Bucket. See https://uppy.io/docs/aws-s3-multipart#S3-Bucket-Configuration for instructions.')); return; } _classPrivateFieldLooseBase(this, _onPartComplete)[_onPartComplete](index, etag); defer.resolve(); }); xhr.addEventListener('error', ev => { cleanup(); this.chunkState[index].busy = false; const error = new Error('Unknown error'); error.source = ev.target; defer.reject(error); }); xhr.send(body); return promise; } async function _completeUpload2() { // Parts may not have completed uploading in sorted order, if limit > 1. this.parts.sort((a, b) => a.PartNumber - b.PartNumber); try { const result = await this.options.completeMultipartUpload({ key: this.key, uploadId: this.uploadId, parts: this.parts }); this.options.onSuccess(result); } catch (err) { _classPrivateFieldLooseBase(this, _onError)[_onError](err); } } function _abortUpload2() { this.abortController.abort(); this.createdPromise.then(() => { this.options.abortMultipartUpload({ key: this.key, uploadId: this.uploadId }); }, () => {// if the creation failed we do not need to abort }); } function _onError2(err) { if (err && err.name === 'AbortError') { return; } this.options.onError(err); } module.exports = MultipartUploader; },{"@uppy/utils/lib/AbortController":26,"@uppy/utils/lib/delay":32}],3:[function(require,module,exports){ let _class, _temp; const BasePlugin = require('@uppy/core/lib/BasePlugin'); const { Socket, Provider, RequestClient } = require('@uppy/companion-client'); const EventTracker = require('@uppy/utils/lib/EventTracker'); const emitSocketProgress = require('@uppy/utils/lib/emitSocketProgress'); const getSocketHost = require('@uppy/utils/lib/getSocketHost'); const { RateLimitedQueue } = require('@uppy/utils/lib/RateLimitedQueue'); const MultipartUploader = require('./MultipartUploader'); function assertServerError(res) { if (res && res.error) { const error = new Error(res.message); Object.assign(error, res.error); throw error; } return res; } module.exports = (_temp = _class = class AwsS3Multipart extends BasePlugin { constructor(uppy, opts) { super(uppy, opts); this.type = 'uploader'; this.id = this.opts.id || 'AwsS3Multipart'; this.title = 'AWS S3 Multipart'; this.client = new RequestClient(uppy, opts); const defaultOptions = { timeout: 30 * 1000, limit: 0, retryDelays: [0, 1000, 3000, 5000], createMultipartUpload: this.createMultipartUpload.bind(this), listParts: this.listParts.bind(this), prepareUploadParts: this.prepareUploadParts.bind(this), abortMultipartUpload: this.abortMultipartUpload.bind(this), completeMultipartUpload: this.completeMultipartUpload.bind(this) }; this.opts = { ...defaultOptions, ...opts }; this.upload = this.upload.bind(this); this.requests = new RateLimitedQueue(this.opts.limit); this.uploaders = Object.create(null); this.uploaderEvents = Object.create(null); this.uploaderSockets = Object.create(null); } /** * Clean up all references for a file's upload: the MultipartUploader instance, * any events related to the file, and the Companion WebSocket connection. * * Set `opts.abort` to tell S3 that the multipart upload is cancelled and must be removed. * This should be done when the user cancels the upload, not when the upload is completed or errored. */ resetUploaderReferences(fileID, opts) { if (opts === void 0) { opts = {}; } if (this.uploaders[fileID]) { this.uploaders[fileID].abort({ really: opts.abort || false }); this.uploaders[fileID] = null; } if (this.uploaderEvents[fileID]) { this.uploaderEvents[fileID].remove(); this.uploaderEvents[fileID] = null; } if (this.uploaderSockets[fileID]) { this.uploaderSockets[fileID].close(); this.uploaderSockets[fileID] = null; } } assertHost(method) { if (!this.opts.companionUrl) { throw new Error(`Expected a \`companionUrl\` option containing a Companion address, or if you are not using Companion, a custom \`${method}\` implementation.`); } } createMultipartUpload(file) { this.assertHost('createMultipartUpload'); const metadata = {}; Object.keys(file.meta).forEach(key => { if (file.meta[key] != null) { metadata[key] = file.meta[key].toString(); } }); return this.client.post('s3/multipart', { filename: file.name, type: file.type, metadata }).then(assertServerError); } listParts(file, _ref) { let { key, uploadId } = _ref; this.assertHost('listParts'); const filename = encodeURIComponent(key); return this.client.get(`s3/multipart/${uploadId}?key=${filename}`).then(assertServerError); } prepareUploadParts(file, _ref2) { let { key, uploadId, partNumbers } = _ref2; this.assertHost('prepareUploadParts'); const filename = encodeURIComponent(key); return this.client.get(`s3/multipart/${uploadId}/batch?key=${filename}&partNumbers=${partNumbers.join(',')}`).then(assertServerError); } completeMultipartUpload(file, _ref3) { let { key, uploadId, parts } = _ref3; this.assertHost('completeMultipartUpload'); const filename = encodeURIComponent(key); const uploadIdEnc = encodeURIComponent(uploadId); return this.client.post(`s3/multipart/${uploadIdEnc}/complete?key=${filename}`, { parts }).then(assertServerError); } abortMultipartUpload(file, _ref4) { let { key, uploadId } = _ref4; this.assertHost('abortMultipartUpload'); const filename = encodeURIComponent(key); const uploadIdEnc = encodeURIComponent(uploadId); return this.client.delete(`s3/multipart/${uploadIdEnc}?key=${filename}`).then(assertServerError); } uploadFile(file) { return new Promise((resolve, reject) => { const onStart = data => { const cFile = this.uppy.getFile(file.id); this.uppy.setFileState(file.id, { s3Multipart: { ...cFile.s3Multipart, key: data.key, uploadId: data.uploadId } }); }; const onProgress = (bytesUploaded, bytesTotal) => { this.uppy.emit('upload-progress', file, { uploader: this, bytesUploaded, bytesTotal }); }; const onError = err => { this.uppy.log(err); this.uppy.emit('upload-error', file, err); queuedRequest.done(); this.resetUploaderReferences(file.id); reject(err); }; const onSuccess = result => { const uploadResp = { body: { ...result }, uploadURL: result.location }; queuedRequest.done(); this.resetUploaderReferences(file.id); const cFile = this.uppy.getFile(file.id); this.uppy.emit('upload-success', cFile || file, uploadResp); if (result.location) { this.uppy.log(`Download ${upload.file.name} from ${result.location}`); } resolve(upload); }; const onPartComplete = part => { const cFile = this.uppy.getFile(file.id); if (!cFile) { return; } this.uppy.emit('s3-multipart:part-uploaded', cFile, part); }; const upload = new MultipartUploader(file.data, { // .bind to pass the file object to each handler. createMultipartUpload: this.opts.createMultipartUpload.bind(this, file), listParts: this.opts.listParts.bind(this, file), prepareUploadParts: this.opts.prepareUploadParts.bind(this, file), completeMultipartUpload: this.opts.completeMultipartUpload.bind(this, file), abortMultipartUpload: this.opts.abortMultipartUpload.bind(this, file), getChunkSize: this.opts.getChunkSize ? this.opts.getChunkSize.bind(this) : null, onStart, onProgress, onError, onSuccess, onPartComplete, limit: this.opts.limit || 5, retryDelays: this.opts.retryDelays || [], ...file.s3Multipart }); this.uploaders[file.id] = upload; this.uploaderEvents[file.id] = new EventTracker(this.uppy); let queuedRequest = this.requests.run(() => { if (!file.isPaused) { upload.start(); } // Don't do anything here, the caller will take care of cancelling the upload itself // using resetUploaderReferences(). This is because resetUploaderReferences() has to be // called when this request is still in the queue, and has not been started yet, too. At // that point this cancellation function is not going to be called. return () => {}; }); this.onFileRemove(file.id, removed => { queuedRequest.abort(); this.resetUploaderReferences(file.id, { abort: true }); resolve(`upload ${removed.id} was removed`); }); this.onCancelAll(file.id, () => { queuedRequest.abort(); this.resetUploaderReferences(file.id, { abort: true }); resolve(`upload ${file.id} was canceled`); }); this.onFilePause(file.id, isPaused => { if (isPaused) { // Remove this file from the queue so another file can start in its place. queuedRequest.abort(); upload.pause(); } else { // Resuming an upload should be queued, else you could pause and then // resume a queued upload to make it skip the queue. queuedRequest.abort(); queuedRequest = this.requests.run(() => { upload.start(); return () => {}; }); } }); this.onPauseAll(file.id, () => { queuedRequest.abort(); upload.pause(); }); this.onResumeAll(file.id, () => { queuedRequest.abort(); if (file.error) { upload.abort(); } queuedRequest = this.requests.run(() => { upload.start(); return () => {}; }); }); // Don't double-emit upload-started for Golden Retriever-restored files that were already started if (!file.progress.uploadStarted || !file.isRestored) { this.uppy.emit('upload-started', file); } }); } uploadRemote(file) { this.resetUploaderReferences(file.id); // Don't double-emit upload-started for Golden Retriever-restored files that were already started if (!file.progress.uploadStarted || !file.isRestored) { this.uppy.emit('upload-started', file); } if (file.serverToken) { return this.connectToServerSocket(file); } return new Promise((resolve, reject) => { const Client = file.remote.providerOptions.provider ? Provider : RequestClient; const client = new Client(this.uppy, file.remote.providerOptions); client.post(file.remote.url, { ...file.remote.body, protocol: 's3-multipart', size: file.data.size, metadata: file.meta }).then(res => { this.uppy.setFileState(file.id, { serverToken: res.token }); file = this.uppy.getFile(file.id); return file; }).then(file => { return this.connectToServerSocket(file); }).then(() => { resolve(); }).catch(err => { this.uppy.emit('upload-error', file, err); reject(err); }); }); } connectToServerSocket(file) { return new Promise((resolve, reject) => { const token = file.serverToken; const host = getSocketHost(file.remote.companionUrl); const socket = new Socket({ target: `${host}/api/${token}`, autoOpen: false }); this.uploaderSockets[file.id] = socket; this.uploaderEvents[file.id] = new EventTracker(this.uppy); this.onFileRemove(file.id, () => { queuedRequest.abort(); socket.send('cancel', {}); this.resetUploaderReferences(file.id, { abort: true }); resolve(`upload ${file.id} was removed`); }); this.onFilePause(file.id, isPaused => { if (isPaused) { // Remove this file from the queue so another file can start in its place. queuedRequest.abort(); socket.send('pause', {}); } else { // Resuming an upload should be queued, else you could pause and then // resume a queued upload to make it skip the queue. queuedRequest.abort(); queuedRequest = this.requests.run(() => { socket.send('resume', {}); return () => {}; }); } }); this.onPauseAll(file.id, () => { queuedRequest.abort(); socket.send('pause', {}); }); this.onCancelAll(file.id, () => { queuedRequest.abort(); socket.send('cancel', {}); this.resetUploaderReferences(file.id); resolve(`upload ${file.id} was canceled`); }); this.onResumeAll(file.id, () => { queuedRequest.abort(); if (file.error) { socket.send('pause', {}); } queuedRequest = this.requests.run(() => { socket.send('resume', {}); }); }); this.onRetry(file.id, () => { // Only do the retry if the upload is actually in progress; // else we could try to send these messages when the upload is still queued. // We may need a better check for this since the socket may also be closed // for other reasons, like network failures. if (socket.isOpen) { socket.send('pause', {}); socket.send('resume', {}); } }); this.onRetryAll(file.id, () => { if (socket.isOpen) { socket.send('pause', {}); socket.send('resume', {}); } }); socket.on('progress', progressData => emitSocketProgress(this, progressData, file)); socket.on('error', errData => { this.uppy.emit('upload-error', file, new Error(errData.error)); this.resetUploaderReferences(file.id); queuedRequest.done(); reject(new Error(errData.error)); }); socket.on('success', data => { const uploadResp = { uploadURL: data.url }; this.uppy.emit('upload-success', file, uploadResp); this.resetUploaderReferences(file.id); queuedRequest.done(); resolve(); }); let queuedRequest = this.requests.run(() => { socket.open(); if (file.isPaused) { socket.send('pause', {}); } return () => {}; }); }); } upload(fileIDs) { if (fileIDs.length === 0) {return Promise.resolve();} const promises = fileIDs.map(id => { const file = this.uppy.getFile(id); if (file.isRemote) { return this.uploadRemote(file); } return this.uploadFile(file); }); return Promise.all(promises); } onFileRemove(fileID, cb) { this.uploaderEvents[fileID].on('file-removed', file => { if (fileID === file.id) {cb(file.id);} }); } onFilePause(fileID, cb) { this.uploaderEvents[fileID].on('upload-pause', (targetFileID, isPaused) => { if (fileID === targetFileID) { // const isPaused = this.uppy.pauseResume(fileID) cb(isPaused); } }); } onRetry(fileID, cb) { this.uploaderEvents[fileID].on('upload-retry', targetFileID => { if (fileID === targetFileID) { cb(); } }); } onRetryAll(fileID, cb) { this.uploaderEvents[fileID].on('retry-all', () => { if (!this.uppy.getFile(fileID)) {return;} cb(); }); } onPauseAll(fileID, cb) { this.uploaderEvents[fileID].on('pause-all', () => { if (!this.uppy.getFile(fileID)) {return;} cb(); }); } onCancelAll(fileID, cb) { this.uploaderEvents[fileID].on('cancel-all', () => { if (!this.uppy.getFile(fileID)) {return;} cb(); }); } onResumeAll(fileID, cb) { this.uploaderEvents[fileID].on('resume-all', () => { if (!this.uppy.getFile(fileID)) {return;} cb(); }); } install() { const { capabilities } = this.uppy.getState(); this.uppy.setState({ capabilities: { ...capabilities, resumableUploads: true } }); this.uppy.addUploader(this.upload); } uninstall() { const { capabilities } = this.uppy.getState(); this.uppy.setState({ capabilities: { ...capabilities, resumableUploads: false } }); this.uppy.removeUploader(this.upload); } }, _class.VERSION = "2.2.1", _temp); },{"./MultipartUploader":2,"@uppy/companion-client":13,"@uppy/core/lib/BasePlugin":15,"@uppy/utils/lib/EventTracker":27,"@uppy/utils/lib/RateLimitedQueue":30,"@uppy/utils/lib/emitSocketProgress":33,"@uppy/utils/lib/getSocketHost":44}],4:[function(require,module,exports){ let _getOptions, _addEventHandlerForFile, _addEventHandlerIfFileStillExists, _uploadLocalFile, _uploadRemoteFile; function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; } let id = 0; function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; } const { nanoid } = require('nanoid/non-secure'); const { Provider, RequestClient, Socket } = require('@uppy/companion-client'); const emitSocketProgress = require('@uppy/utils/lib/emitSocketProgress'); const getSocketHost = require('@uppy/utils/lib/getSocketHost'); const EventTracker = require('@uppy/utils/lib/EventTracker'); const ProgressTimeout = require('@uppy/utils/lib/ProgressTimeout'); const NetworkError = require('@uppy/utils/lib/NetworkError'); const isNetworkError = require('@uppy/utils/lib/isNetworkError'); const { internalRateLimitedQueue } = require('@uppy/utils/lib/RateLimitedQueue'); // See XHRUpload function buildResponseError(xhr, error) { if (isNetworkError(xhr)) {return new NetworkError(error, xhr);} // TODO: when we drop support for browsers that do not support this syntax, use: // return new Error('Upload error', { cause: error, request: xhr }) const err = new Error('Upload error'); err.cause = error; err.request = xhr; return err; } // See XHRUpload function setTypeInBlob(file) { const dataWithUpdatedType = file.data.slice(0, file.data.size, file.meta.type); return dataWithUpdatedType; } function addMetadata(formData, meta, opts) { const metaFields = Array.isArray(opts.metaFields) ? opts.metaFields // Send along all fields by default. : Object.keys(meta); metaFields.forEach(item => { formData.append(item, meta[item]); }); } function createFormDataUpload(file, opts) { const formPost = new FormData(); addMetadata(formPost, file.meta, opts); const dataWithUpdatedType = setTypeInBlob(file); if (file.name) { formPost.append(opts.fieldName, dataWithUpdatedType, file.meta.name); } else { formPost.append(opts.fieldName, dataWithUpdatedType); } return formPost; } const createBareUpload = file => file.data; module.exports = (_getOptions = /*#__PURE__*/_classPrivateFieldLooseKey("getOptions"), _addEventHandlerForFile = /*#__PURE__*/_classPrivateFieldLooseKey("addEventHandlerForFile"), _addEventHandlerIfFileStillExists = /*#__PURE__*/_classPrivateFieldLooseKey("addEventHandlerIfFileStillExists"), _uploadLocalFile = /*#__PURE__*/_classPrivateFieldLooseKey("uploadLocalFile"), _uploadRemoteFile = /*#__PURE__*/_classPrivateFieldLooseKey("uploadRemoteFile"), class MiniXHRUpload { constructor(_uppy, _opts) { Object.defineProperty(this, _uploadRemoteFile, { value: _uploadRemoteFile2 }); Object.defineProperty(this, _uploadLocalFile, { value: _uploadLocalFile2 }); Object.defineProperty(this, _addEventHandlerIfFileStillExists, { value: _addEventHandlerIfFileStillExists2 }); Object.defineProperty(this, _addEventHandlerForFile, { value: _addEventHandlerForFile2 }); Object.defineProperty(this, _getOptions, { value: _getOptions2 }); this.uppy = _uppy; this.opts = { validateStatus(status) { return status >= 200 && status < 300; }, ..._opts }; this.requests = _opts[internalRateLimitedQueue]; this.uploaderEvents = Object.create(null); this.i18n = _opts.i18n; } uploadFile(id, current, total) { const file = this.uppy.getFile(id); if (file.error) { throw new Error(file.error); } else if (file.isRemote) { return _classPrivateFieldLooseBase(this, _uploadRemoteFile)[_uploadRemoteFile](file, current, total); } return _classPrivateFieldLooseBase(this, _uploadLocalFile)[_uploadLocalFile](file, current, total); } }); function _getOptions2(file) { let _file$xhrUpload; const { uppy } = this; const overrides = uppy.getState().xhrUpload; const opts = { ...this.opts, ...(overrides || {}), ...(file.xhrUpload || {}), headers: { ...this.opts.headers, ...(overrides == null ? void 0 : overrides.headers), ...((_file$xhrUpload = file.xhrUpload) == null ? void 0 : _file$xhrUpload.headers) } }; return opts; } function _addEventHandlerForFile2(eventName, fileID, eventHandler) { this.uploaderEvents[fileID].on(eventName, targetFileID => { if (fileID === targetFileID) {eventHandler();} }); } function _addEventHandlerIfFileStillExists2(eventName, fileID, eventHandler) { this.uploaderEvents[fileID].on(eventName, () => { if (this.uppy.getFile(fileID)) {eventHandler();} }); } function _uploadLocalFile2(file, current, total) { const opts = _classPrivateFieldLooseBase(this, _getOptions)[_getOptions](file); this.uppy.log(`uploading ${current} of ${total}`); return new Promise((resolve, reject) => { // This is done in index.js in the S3 plugin. // this.uppy.emit('upload-started', file) const data = opts.formData ? createFormDataUpload(file, opts) : createBareUpload(file, opts); const xhr = new XMLHttpRequest(); this.uploaderEvents[file.id] = new EventTracker(this.uppy); const timer = new ProgressTimeout(opts.timeout, () => { xhr.abort(); // eslint-disable-next-line no-use-before-define queuedRequest.done(); const error = new Error(this.i18n('timedOut', { seconds: Math.ceil(opts.timeout / 1000) })); this.uppy.emit('upload-error', file, error); reject(error); }); const id = nanoid(); xhr.upload.addEventListener('loadstart', () => { this.uppy.log(`[AwsS3/XHRUpload] ${id} started`); }); xhr.upload.addEventListener('progress', ev => { this.uppy.log(`[AwsS3/XHRUpload] ${id} progress: ${ev.loaded} / ${ev.total}`); // Begin checking for timeouts when progress starts, instead of loading, // to avoid timing out requests on browser concurrency queue timer.progress(); if (ev.lengthComputable) { this.uppy.emit('upload-progress', file, { uploader: this, bytesUploaded: ev.loaded, bytesTotal: ev.total }); } }); xhr.addEventListener('load', ev => { this.uppy.log(`[AwsS3/XHRUpload] ${id} finished`); timer.done(); // eslint-disable-next-line no-use-before-define queuedRequest.done(); if (this.uploaderEvents[file.id]) { this.uploaderEvents[file.id].remove(); this.uploaderEvents[file.id] = null; } if (opts.validateStatus(ev.target.status, xhr.responseText, xhr)) { const body = opts.getResponseData(xhr.responseText, xhr); const uploadURL = body[opts.responseUrlFieldName]; const uploadResp = { status: ev.target.status, body, uploadURL }; this.uppy.emit('upload-success', file, uploadResp); if (uploadURL) { this.uppy.log(`Download ${file.name} from ${uploadURL}`); } return resolve(file); } const body = opts.getResponseData(xhr.responseText, xhr); const error = buildResponseError(xhr, opts.getResponseError(xhr.responseText, xhr)); const response = { status: ev.target.status, body }; this.uppy.emit('upload-error', file, error, response); return reject(error); }); xhr.addEventListener('error', () => { this.uppy.log(`[AwsS3/XHRUpload] ${id} errored`); timer.done(); // eslint-disable-next-line no-use-before-define queuedRequest.done(); if (this.uploaderEvents[file.id]) { this.uploaderEvents[file.id].remove(); this.uploaderEvents[file.id] = null; } const error = buildResponseError(xhr, opts.getResponseError(xhr.responseText, xhr)); this.uppy.emit('upload-error', file, error); return reject(error); }); xhr.open(opts.method.toUpperCase(), opts.endpoint, true); // IE10 does not allow setting `withCredentials` and `responseType` // before `open()` is called. It’s important to set withCredentials // to a boolean, otherwise React Native crashes xhr.withCredentials = Boolean(opts.withCredentials); if (opts.responseType !== '') { xhr.responseType = opts.responseType; } Object.keys(opts.headers).forEach(header => { xhr.setRequestHeader(header, opts.headers[header]); }); const queuedRequest = this.requests.run(() => { xhr.send(data); return () => { // eslint-disable-next-line no-use-before-define timer.done(); xhr.abort(); }; }, { priority: 1 }); _classPrivateFieldLooseBase(this, _addEventHandlerForFile)[_addEventHandlerForFile]('file-removed', file.id, () => { queuedRequest.abort(); reject(new Error('File removed')); }); _classPrivateFieldLooseBase(this, _addEventHandlerIfFileStillExists)[_addEventHandlerIfFileStillExists]('cancel-all', file.id, () => { queuedRequest.abort(); reject(new Error('Upload cancelled')); }); }); } function _uploadRemoteFile2(file) { const opts = _classPrivateFieldLooseBase(this, _getOptions)[_getOptions](file); // This is done in index.js in the S3 plugin. // this.uppy.emit('upload-started', file) const metaFields = Array.isArray(opts.metaFields) ? opts.metaFields // Send along all fields by default. : Object.keys(file.meta); const Client = file.remote.providerOptions.provider ? Provider : RequestClient; const client = new Client(this.uppy, file.remote.providerOptions); return client.post(file.remote.url, { ...file.remote.body, endpoint: opts.endpoint, size: file.data.size, fieldname: opts.fieldName, metadata: Object.fromEntries(metaFields.map(name => [name, file.meta[name]])), httpMethod: opts.method, useFormData: opts.formData, headers: opts.headers }).then(res => new Promise((resolve, reject) => { const { token } = res; const host = getSocketHost(file.remote.companionUrl); const socket = new Socket({ target: `${host}/api/${token}`, autoOpen: false }); this.uploaderEvents[file.id] = new EventTracker(this.uppy); const queuedRequest = this.requests.run(() => { socket.open(); if (file.isPaused) { socket.send('pause', {}); } return () => socket.close(); }); _classPrivateFieldLooseBase(this, _addEventHandlerForFile)[_addEventHandlerForFile]('file-removed', file.id, () => { socket.send('cancel', {}); queuedRequest.abort(); resolve(`upload ${file.id} was removed`); }); _classPrivateFieldLooseBase(this, _addEventHandlerIfFileStillExists)[_addEventHandlerIfFileStillExists]('cancel-all', file.id, () => { socket.send('cancel', {}); queuedRequest.abort(); resolve(`upload ${file.id} was canceled`); }); _classPrivateFieldLooseBase(this, _addEventHandlerForFile)[_addEventHandlerForFile]('upload-retry', file.id, () => { socket.send('pause', {}); socket.send('resume', {}); }); _classPrivateFieldLooseBase(this, _addEventHandlerIfFileStillExists)[_addEventHandlerIfFileStillExists]('retry-all', file.id, () => { socket.send('pause', {}); socket.send('resume', {}); }); socket.on('progress', progressData => emitSocketProgress(this, progressData, file)); socket.on('success', data => { const body = opts.getResponseData(data.response.responseText, data.response); const uploadURL = body[opts.responseUrlFieldName]; const uploadResp = { status: data.response.status, body, uploadURL, bytesUploaded: data.bytesUploaded }; this.uppy.emit('upload-success', file, uploadResp); queuedRequest.done(); if (this.uploaderEvents[file.id]) { this.uploaderEvents[file.id].remove(); this.uploaderEvents[file.id] = null; } return resolve(); }); socket.on('error', errData => { const resp = errData.response; const error = resp ? opts.getResponseError(resp.responseText, resp) : Object.assign(new Error(errData.error.message), { cause: errData.error }); this.uppy.emit('upload-error', file, error); queuedRequest.done(); if (this.uploaderEvents[file.id]) { this.uploaderEvents[file.id].remove(); this.uploaderEvents[file.id] = null; } reject(error); }); }).catch(err => { this.uppy.emit('upload-error', file, err); return Promise.reject(err); })); } },{"@uppy/companion-client":13,"@uppy/utils/lib/EventTracker":27,"@uppy/utils/lib/NetworkError":28,"@uppy/utils/lib/ProgressTimeout":29,"@uppy/utils/lib/RateLimitedQueue":30,"@uppy/utils/lib/emitSocketProgress":33,"@uppy/utils/lib/getSocketHost":44,"@uppy/utils/lib/isNetworkError":48,"nanoid/non-secure":57}],5:[function(require,module,exports){ let _class, _client, _requests, _uploader, _handleUpload, _temp; function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; } let id = 0; function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; } /** * This plugin is currently a A Big Hack™! The core reason for that is how this plugin * interacts with Uppy's current pipeline design. The pipeline can handle files in steps, * including preprocessing, uploading, and postprocessing steps. This plugin initially * was designed to do its work in a preprocessing step, and let XHRUpload deal with the * actual file upload as an uploading step. However, Uppy runs steps on all files at once, * sequentially: first, all files go through a preprocessing step, then, once they are all * done, they go through the uploading step. * * For S3, this causes severely broken behaviour when users upload many files. The * preprocessing step will request S3 upload URLs that are valid for a short time only, * but it has to do this for _all_ files, which can take a long time if there are hundreds * or even thousands of files. By the time the uploader step starts, the first URLs may * already have expired. If not, the uploading might take such a long time that later URLs * will expire before some files can be uploaded. * * The long-term solution to this problem is to change the upload pipeline so that files * can be sent to the next step individually. That requires a breaking change, so it is * planned for some future Uppy version. * * In the mean time, this plugin is stuck with a hackier approach: the necessary parts * of the XHRUpload implementation were copied into this plugin, as the MiniXHRUpload * class, and this plugin calls into it immediately once it receives an upload URL. * This isn't as nicely modular as we'd like and requires us to maintain two copies of * the XHRUpload code, but at least it's not horrifically broken :) */ const BasePlugin = require('@uppy/core/lib/BasePlugin'); const { RateLimitedQueue, internalRateLimitedQueue } = require('@uppy/utils/lib/RateLimitedQueue'); const { RequestClient } = require('@uppy/companion-client'); const MiniXHRUpload = require('./MiniXHRUpload'); const isXml = require('./isXml'); const locale = require('./locale'); function resolveUrl(origin, link) { return new URL(link, origin || undefined).toString(); } /** * Get the contents of a named tag in an XML source string. * * @param {string} source - The XML source string. * @param {string} tagName - The name of the tag. * @returns {string} The contents of the tag, or the empty string if the tag does not exist. */ function getXmlValue(source, tagName) { const start = source.indexOf(`<${tagName}>`); const end = source.indexOf(``, start); return start !== -1 && end !== -1 ? source.slice(start + tagName.length + 2, end) : ''; } function assertServerError(res) { if (res && res.error) { const error = new Error(res.message); Object.assign(error, res.error); throw error; } return res; } function validateParameters(file, params) { const valid = params != null && typeof params.url === 'string' && (typeof params.fields === 'object' || params.fields == null); if (!valid) { const err = new TypeError(`AwsS3: got incorrect result from 'getUploadParameters()' for file '${file.name}', expected an object '{ url, method, fields, headers }' but got '${JSON.stringify(params)}' instead.\nSee https://uppy.io/docs/aws-s3/#getUploadParameters-file for more on the expected format.`); throw err; } const methodIsValid = params.method == null || /^p(u|os)t$/i.test(params.method); if (!methodIsValid) { const err = new TypeError(`AwsS3: got incorrect method from 'getUploadParameters()' for file '${file.name}', expected 'put' or 'post' but got '${params.method}' instead.\nSee https://uppy.io/docs/aws-s3/#getUploadParameters-file for more on the expected format.`); throw err; } } // Get the error data from a failed XMLHttpRequest instance. // `content` is the S3 response as a string. // `xhr` is the XMLHttpRequest instance. function defaultGetResponseError(content, xhr) { // If no response, we don't have a specific error message, use the default. if (!isXml(content, xhr)) { return undefined; } const error = getXmlValue(content, 'Message'); return new Error(error); } // warning deduplication flag: see `getResponseData()` XHRUpload option definition let warnedSuccessActionStatus = false; module.exports = (_temp = (_client = /*#__PURE__*/_classPrivateFieldLooseKey("client"), _requests = /*#__PURE__*/_classPrivateFieldLooseKey("requests"), _uploader = /*#__PURE__*/_classPrivateFieldLooseKey("uploader"), _handleUpload = /*#__PURE__*/_classPrivateFieldLooseKey("handleUpload"), _class = class AwsS3 extends BasePlugin { // eslint-disable-next-line global-require constructor(uppy, opts) { super(uppy, opts); Object.defineProperty(this, _client, { writable: true, value: void 0 }); Object.defineProperty(this, _requests, { writable: true, value: void 0 }); Object.defineProperty(this, _uploader, { writable: true, value: void 0 }); Object.defineProperty(this, _handleUpload, { writable: true, value: fileIDs => { /** * keep track of `getUploadParameters()` responses * so we can cancel the calls individually using just a file ID * * @type {object.} */ const paramsPromises = Object.create(null); function onremove(file) { let _paramsPromises$id; const { id } = file; (_paramsPromises$id = paramsPromises[id]) == null ? void 0 : _paramsPromises$id.abort(); } this.uppy.on('file-removed', onremove); fileIDs.forEach(id => { const file = this.uppy.getFile(id); this.uppy.emit('upload-started', file); }); const getUploadParameters = _classPrivateFieldLooseBase(this, _requests)[_requests].wrapPromiseFunction(file => { return this.opts.getUploadParameters(file); }); const numberOfFiles = fileIDs.length; return Promise.allSettled(fileIDs.map((id, index) => { paramsPromises[id] = getUploadParameters(this.uppy.getFile(id)); return paramsPromises[id].then(params => { delete paramsPromises[id]; const file = this.uppy.getFile(id); validateParameters(file, params); const { method = 'post', url, fields, headers } = params; const xhrOpts = { method, formData: method.toLowerCase() === 'post', endpoint: url, metaFields: fields ? Object.keys(fields) : [] }; if (headers) { xhrOpts.headers = headers; } this.uppy.setFileState(file.id, { meta: { ...file.meta, ...fields }, xhrUpload: xhrOpts }); return _classPrivateFieldLooseBase(this, _uploader)[_uploader].uploadFile(file.id, index, numberOfFiles); }).catch(error => { delete paramsPromises[id]; const file = this.uppy.getFile(id); this.uppy.emit('upload-error', file, error); return Promise.reject(error); }); })).finally(() => { // cleanup. this.uppy.off('file-removed', onremove); }); } }); this.type = 'uploader'; this.id = this.opts.id || 'AwsS3'; this.title = 'AWS S3'; this.defaultLocale = locale; const defaultOptions = { timeout: 30 * 1000, limit: 0, metaFields: [], // have to opt in getUploadParameters: this.getUploadParameters.bind(this) }; this.opts = { ...defaultOptions, ...opts }; // TODO: remove i18n once we can depend on XHRUpload instead of MiniXHRUpload this.i18nInit(); _classPrivateFieldLooseBase(this, _client)[_client] = new RequestClient(uppy, opts); _classPrivateFieldLooseBase(this, _requests)[_requests] = new RateLimitedQueue(this.opts.limit); } getUploadParameters(file) { if (!this.opts.companionUrl) { throw new Error('Expected a `companionUrl` option containing a Companion address.'); } const filename = file.meta.name; const { type } = file.meta; const metadata = Object.fromEntries(this.opts.metaFields.filter(key => file.meta[key] != null).map(key => [`metadata[${key}]`, file.meta[key].toString()])); const query = new URLSearchParams({ filename, type, ...metadata }); return _classPrivateFieldLooseBase(this, _client)[_client].get(`s3/params?${query}`).then(assertServerError); } install() { const { uppy } = this; uppy.addUploader(_classPrivateFieldLooseBase(this, _handleUpload)[_handleUpload]); // Get the response data from a successful XMLHttpRequest instance. // `content` is the S3 response as a string. // `xhr` is the XMLHttpRequest instance. function defaultGetResponseData(content, xhr) { const opts = this; // If no response, we've hopefully done a PUT request to the file // in the bucket on its full URL. if (!isXml(content, xhr)) { if (opts.method.toUpperCase() === 'POST') { if (!warnedSuccessActionStatus) { uppy.log('[AwsS3] No response data found, make sure to set the success_action_status AWS SDK option to 201. See https://uppy.io/docs/aws-s3/#POST-Uploads', 'warning'); warnedSuccessActionStatus = true; } // The responseURL won't contain the object key. Give up. return { location: null }; } // responseURL is not available in older browsers. if (!xhr.responseURL) { return { location: null }; } // Trim the query string because it's going to be a bunch of presign // parameters for a PUT request—doing a GET request with those will // always result in an error return { location: xhr.responseURL.replace(/\?.*$/, '') }; } return { // Some S3 alternatives do not reply with an absolute URL. // Eg DigitalOcean Spaces uses /$bucketName/xyz location: resolveUrl(xhr.responseURL, getXmlValue(content, 'Location')), bucket: getXmlValue(content, 'Bucket'), key: getXmlValue(content, 'Key'), etag: getXmlValue(content, 'ETag') }; } const xhrOptions = { fieldName: 'file', responseUrlFieldName: 'location', timeout: this.opts.timeout, // Share the rate limiting queue with XHRUpload. [internalRateLimitedQueue]: _classPrivateFieldLooseBase(this, _requests)[_requests], responseType: 'text', getResponseData: this.opts.getResponseData || defaultGetResponseData, getResponseError: defaultGetResponseError }; // TODO: remove i18n once we can depend on XHRUpload instead of MiniXHRUpload xhrOptions.i18n = this.i18n; // Revert to `uppy.use(XHRUpload)` once the big comment block at the top of // this file is solved _classPrivateFieldLooseBase(this, _uploader)[_uploader] = new MiniXHRUpload(uppy, xhrOptions); } uninstall() { this.uppy.removeUploader(_classPrivateFieldLooseBase(this, _handleUpload)[_handleUpload]); } }), _class.VERSION = "2.0.8", _temp); },{"./MiniXHRUpload":4,"./isXml":6,"./locale":7,"@uppy/companion-client":13,"@uppy/core/lib/BasePlugin":15,"@uppy/utils/lib/RateLimitedQueue":30}],6:[function(require,module,exports){ /** * Remove parameters like `charset=utf-8` from the end of a mime type string. * * @param {string} mimeType - The mime type string that may have optional parameters. * @returns {string} The "base" mime type, i.e. only 'category/type'. */ function removeMimeParameters(mimeType) { return mimeType.replace(/;.*$/, ''); } /** * Check if a response contains XML based on the response object and its text content. * * @param {string} content - The text body of the response. * @param {object|XMLHttpRequest} xhr - The XHR object or response object from Companion. * @returns {bool} Whether the content is (probably) XML. */ function isXml(content, xhr) { const rawContentType = xhr.headers ? xhr.headers['content-type'] : xhr.getResponseHeader('Content-Type'); if (typeof rawContentType === 'string') { const contentType = removeMimeParameters(rawContentType).toLowerCase(); if (contentType === 'application/xml' || contentType === 'text/xml') { return true; } // GCS uses text/html for some reason // https://github.com/transloadit/uppy/issues/896 if (contentType === 'text/html' && /^<\?xml /.test(content)) { return true; } } return false; } module.exports = isXml; },{}],7:[function(require,module,exports){ module.exports = { strings: { timedOut: 'Upload stalled for %{seconds} seconds, aborting.' } }; },{}],8:[function(require,module,exports){ 'use strict'; class AuthError extends Error { constructor() { super('Authorization required'); this.name = 'AuthError'; this.isAuthError = true; } } module.exports = AuthError; },{}],9:[function(require,module,exports){ 'use strict'; const RequestClient = require('./RequestClient'); const tokenStorage = require('./tokenStorage'); const getName = id => { return id.split('-').map(s => s.charAt(0).toUpperCase() + s.slice(1)).join(' '); }; module.exports = class Provider extends RequestClient { constructor(uppy, opts) { super(uppy, opts); this.provider = opts.provider; this.id = this.provider; this.name = this.opts.name || getName(this.id); this.pluginId = this.opts.pluginId; this.tokenKey = `companion-${this.pluginId}-auth-token`; this.companionKeysParams = this.opts.companionKeysParams; this.preAuthToken = null; } headers() { return Promise.all([super.headers(), this.getAuthToken()]).then(_ref => { let [headers, token] = _ref; const authHeaders = {}; if (token) { authHeaders['uppy-auth-token'] = token; } if (this.companionKeysParams) { authHeaders['uppy-credentials-params'] = btoa(JSON.stringify({ params: this.companionKeysParams })); } return { ...headers, ...authHeaders }; }); } onReceiveResponse(response) { response = super.onReceiveResponse(response); const plugin = this.uppy.getPlugin(this.pluginId); const oldAuthenticated = plugin.getPluginState().authenticated; const authenticated = oldAuthenticated ? response.status !== 401 : response.status < 400; plugin.setPluginState({ authenticated }); return response; } setAuthToken(token) { return this.uppy.getPlugin(this.pluginId).storage.setItem(this.tokenKey, token); } getAuthToken() { return this.uppy.getPlugin(this.pluginId).storage.getItem(this.tokenKey); } /** * Ensure we have a preauth token if necessary. Attempts to fetch one if we don't, * or rejects if loading one fails. */ async ensurePreAuth() { if (this.companionKeysParams && !this.preAuthToken) { await this.fetchPreAuthToken(); if (!this.preAuthToken) { throw new Error('Could not load authentication data required for third-party login. Please try again later.'); } } } authUrl(queries) { if (queries === void 0) { queries = {}; } const params = new URLSearchParams(queries); if (this.preAuthToken) { params.set('uppyPreAuthToken', this.preAuthToken); } return `${this.hostname}/${this.id}/connect?${params}`; } fileUrl(id) { return `${this.hostname}/${this.id}/get/${id}`; } async fetchPreAuthToken() { if (!this.companionKeysParams) { return; } try { const res = await this.post(`${this.id}/preauth/`, { params: this.companionKeysParams }); this.preAuthToken = res.token; } catch (err) { this.uppy.log(`[CompanionClient] unable to fetch preAuthToken ${err}`, 'warning'); } } list(directory) { return this.get(`${this.id}/list/${directory || ''}`); } logout() { return this.get(`${this.id}/logout`).then(response => Promise.all([response, this.uppy.getPlugin(this.pluginId).storage.removeItem(this.tokenKey)])).then(_ref2 => { let [response] = _ref2; return response; }); } static initPlugin(plugin, opts, defaultOpts) { plugin.type = 'acquirer'; plugin.files = []; if (defaultOpts) { plugin.opts = { ...defaultOpts, ...opts }; } if (opts.serverUrl || opts.serverPattern) { throw new Error('`serverUrl` and `serverPattern` have been renamed to `companionUrl` and `companionAllowedHosts` respectively in the 0.30.5 release. Please consult the docs (for example, https://uppy.io/docs/instagram/ for the Instagram plugin) and use the updated options.`'); } if (opts.companionAllowedHosts) { const pattern = opts.companionAllowedHosts; // validate companionAllowedHosts param if (typeof pattern !== 'string' && !Array.isArray(pattern) && !(pattern instanceof RegExp)) { throw new TypeError(`${plugin.id}: the option "companionAllowedHosts" must be one of string, Array, RegExp`); } plugin.opts.companionAllowedHosts = pattern; } else if (/^(?!https?:\/\/).*$/i.test(opts.companionUrl)) { // does not start with https:// plugin.opts.companionAllowedHosts = `https://${opts.companionUrl.replace(/^\/\//, '')}`; } else { plugin.opts.companionAllowedHosts = new URL(opts.companionUrl).origin; } plugin.storage = plugin.opts.storage || tokenStorage; } }; },{"./RequestClient":10,"./tokenStorage":14}],10:[function(require,module,exports){ 'use strict'; let _class, _getPostResponseFunc, _getUrl, _errorHandler, _temp; function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; } let id = 0; function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; } const fetchWithNetworkError = require('@uppy/utils/lib/fetchWithNetworkError'); const AuthError = require('./AuthError'); // Remove the trailing slash so we can always safely append /xyz. function stripSlash(url) { return url.replace(/\/$/, ''); } async function handleJSONResponse(res) { if (res.status === 401) { throw new AuthError(); } const jsonPromise = res.json(); if (res.status < 200 || res.status > 300) { let errMsg = `Failed request with status: ${res.status}. ${res.statusText}`; try { const errData = await jsonPromise; errMsg = errData.message ? `${errMsg} message: ${errData.message}` : errMsg; errMsg = errData.requestId ? `${errMsg} request-Id: ${errData.requestId}` : errMsg; } finally { // eslint-disable-next-line no-unsafe-finally throw new Error(errMsg); } } return jsonPromise; } module.exports = (_temp = (_getPostResponseFunc = /*#__PURE__*/_classPrivateFieldLooseKey("getPostResponseFunc"), _getUrl = /*#__PURE__*/_classPrivateFieldLooseKey("getUrl"), _errorHandler = /*#__PURE__*/_classPrivateFieldLooseKey("errorHandler"), _class = class RequestClient { // eslint-disable-next-line global-require constructor(uppy, opts) { Object.defineProperty(this, _errorHandler, { value: _errorHandler2 }); Object.defineProperty(this, _getUrl, { value: _getUrl2 }); Object.defineProperty(this, _getPostResponseFunc, { writable: true, value: skip => response => skip ? response : this.onReceiveResponse(response) }); this.uppy = uppy; this.opts = opts; this.onReceiveResponse = this.onReceiveResponse.bind(this); this.allowedHeaders = ['accept', 'content-type', 'uppy-auth-token']; this.preflightDone = false; } get hostname() { const { companion } = this.uppy.getState(); const host = this.opts.companionUrl; return stripSlash(companion && companion[host] ? companion[host] : host); } headers() { const userHeaders = this.opts.companionHeaders || {}; return Promise.resolve({ ...RequestClient.defaultHeaders, ...userHeaders }); } onReceiveResponse(response) { const state = this.uppy.getState(); const companion = state.companion || {}; const host = this.opts.companionUrl; const { headers } = response; // Store the self-identified domain name for the Companion instance we just hit. if (headers.has('i-am') && headers.get('i-am') !== companion[host]) { this.uppy.setState({ companion: { ...companion, [host]: headers.get('i-am') } }); } return response; } preflight(path) { if (this.preflightDone) { return Promise.resolve(this.allowedHeaders.slice()); } return fetch(_classPrivateFieldLooseBase(this, _getUrl)[_getUrl](path), { method: 'OPTIONS' }).then(response => { if (response.headers.has('access-control-allow-headers')) { this.allowedHeaders = response.headers.get('access-control-allow-headers').split(',').map(headerName => headerName.trim().toLowerCase()); } this.preflightDone = true; return this.allowedHeaders.slice(); }).catch(err => { this.uppy.log(`[CompanionClient] unable to make preflight request ${err}`, 'warning'); this.preflightDone = true; return this.allowedHeaders.slice(); }); } preflightAndHeaders(path) { return Promise.all([this.preflight(path), this.headers()]).then(_ref => { let [allowedHeaders, headers] = _ref; // filter to keep only allowed Headers Object.keys(headers).forEach(header => { if (!allowedHeaders.includes(header.toLowerCase())) { this.uppy.log(`[CompanionClient] excluding disallowed header ${header}`); delete headers[header]; // eslint-disable-line no-param-reassign } }); return headers; }); } get(path, skipPostResponse) { const method = 'get'; return this.preflightAndHeaders(path).then(headers => fetchWithNetworkError(_classPrivateFieldLooseBase(this, _getUrl)[_getUrl](path), { method, headers, credentials: this.opts.companionCookiesRule || 'same-origin' })).then(_classPrivateFieldLooseBase(this, _getPostResponseFunc)[_getPostResponseFunc](skipPostResponse)).then(handleJSONResponse).catch(_classPrivateFieldLooseBase(this, _errorHandler)[_errorHandler](method, path)); } post(path, data, skipPostResponse) { const method = 'post'; return this.preflightAndHeaders(path).then(headers => fetchWithNetworkError(_classPrivateFieldLooseBase(this, _getUrl)[_getUrl](path), { method, headers, credentials: this.opts.companionCookiesRule || 'same-origin', body: JSON.stringify(data) })).then(_classPrivateFieldLooseBase(this, _getPostResponseFunc)[_getPostResponseFunc](skipPostResponse)).then(handleJSONResponse).catch(_classPrivateFieldLooseBase(this, _errorHandler)[_errorHandler](method, path)); } delete(path, data, skipPostResponse) { const method = 'delete'; return this.preflightAndHeaders(path).then(headers => fetchWithNetworkError(`${this.hostname}/${path}`, { method, headers, credentials: this.opts.companionCookiesRule || 'same-origin', body: data ? JSON.stringify(data) : null })).then(_classPrivateFieldLooseBase(this, _getPostResponseFunc)[_getPostResponseFunc](skipPostResponse)).then(handleJSONResponse).catch(_classPrivateFieldLooseBase(this, _errorHandler)[_errorHandler](method, path)); } }), _class.VERSION = "2.0.5", _class.defaultHeaders = { Accept: 'application/json', 'Content-Type': 'application/json', 'Uppy-Versions': `@uppy/companion-client=${_class.VERSION}` }, _temp); function _getUrl2(url) { if (/^(https?:|)\/\//.test(url)) { return url; } return `${this.hostname}/${url}`; } function _errorHandler2(method, path) { return err => { let _err; if (!((_err = err) != null && _err.isAuthError)) { const error = new Error(`Could not ${method} ${_classPrivateFieldLooseBase(this, _getUrl)[_getUrl](path)}`); error.cause = err; err = error; // eslint-disable-line no-param-reassign } return Promise.reject(err); }; } },{"./AuthError":8,"@uppy/utils/lib/fetchWithNetworkError":34}],11:[function(require,module,exports){ 'use strict'; const RequestClient = require('./RequestClient'); const getName = id => { return id.split('-').map(s => s.charAt(0).toUpperCase() + s.slice(1)).join(' '); }; module.exports = class SearchProvider extends RequestClient { constructor(uppy, opts) { super(uppy, opts); this.provider = opts.provider; this.id = this.provider; this.name = this.opts.name || getName(this.id); this.pluginId = this.opts.pluginId; } fileUrl(id) { return `${this.hostname}/search/${this.id}/get/${id}`; } search(text, queries) { queries = queries ? `&${queries}` : ''; return this.get(`search/${this.id}/list?q=${encodeURIComponent(text)}${queries}`); } }; },{"./RequestClient":10}],12:[function(require,module,exports){ let _queued, _emitter, _isOpen, _socket, _handleMessage; let _Symbol$for, _Symbol$for2; function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; } let id = 0; function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; } const ee = require('namespace-emitter'); module.exports = (_queued = /*#__PURE__*/_classPrivateFieldLooseKey("queued"), _emitter = /*#__PURE__*/_classPrivateFieldLooseKey("emitter"), _isOpen = /*#__PURE__*/_classPrivateFieldLooseKey("isOpen"), _socket = /*#__PURE__*/_classPrivateFieldLooseKey("socket"), _handleMessage = /*#__PURE__*/_classPrivateFieldLooseKey("handleMessage"), _Symbol$for = Symbol.for('uppy test: getSocket'), _Symbol$for2 = Symbol.for('uppy test: getQueued'), class UppySocket { constructor(opts) { Object.defineProperty(this, _queued, { writable: true, value: [] }); Object.defineProperty(this, _emitter, { writable: true, value: ee() }); Object.defineProperty(this, _isOpen, { writable: true, value: false }); Object.defineProperty(this, _socket, { writable: true, value: void 0 }); Object.defineProperty(this, _handleMessage, { writable: true, value: e => { try { const message = JSON.parse(e.data); this.emit(message.action, message.payload); } catch (err) { // TODO: use a more robust error handler. console.log(err); // eslint-disable-line no-console } } }); this.opts = opts; if (!opts || opts.autoOpen !== false) { this.open(); } } get isOpen() { return _classPrivateFieldLooseBase(this, _isOpen)[_isOpen]; } [_Symbol$for]() { return _classPrivateFieldLooseBase(this, _socket)[_socket]; } [_Symbol$for2]() { return _classPrivateFieldLooseBase(this, _queued)[_queued]; } open() { _classPrivateFieldLooseBase(this, _socket)[_socket] = new WebSocket(this.opts.target); _classPrivateFieldLooseBase(this, _socket)[_socket].onopen = () => { _classPrivateFieldLooseBase(this, _isOpen)[_isOpen] = true; while (_classPrivateFieldLooseBase(this, _queued)[_queued].length > 0 && _classPrivateFieldLooseBase(this, _isOpen)[_isOpen]) { const first = _classPrivateFieldLooseBase(this, _queued)[_queued].shift(); this.send(first.action, first.payload); } }; _classPrivateFieldLooseBase(this, _socket)[_socket].onclose = () => { _classPrivateFieldLooseBase(this, _isOpen)[_isOpen] = false; }; _classPrivateFieldLooseBase(this, _socket)[_socket].onmessage = _classPrivateFieldLooseBase(this, _handleMessage)[_handleMessage]; } close() { let _classPrivateFieldLoo; (_classPrivateFieldLoo = _classPrivateFieldLooseBase(this, _socket)[_socket]) == null ? void 0 : _classPrivateFieldLoo.close(); } send(action, payload) { // attach uuid if (!_classPrivateFieldLooseBase(this, _isOpen)[_isOpen]) { _classPrivateFieldLooseBase(this, _queued)[_queued].push({ action, payload }); return; } _classPrivateFieldLooseBase(this, _socket)[_socket].send(JSON.stringify({ action, payload })); } on(action, handler) { _classPrivateFieldLooseBase(this, _emitter)[_emitter].on(action, handler); } emit(action, payload) { _classPrivateFieldLooseBase(this, _emitter)[_emitter].emit(action, payload); } once(action, handler) { _classPrivateFieldLooseBase(this, _emitter)[_emitter].once(action, handler); } }); },{"namespace-emitter":56}],13:[function(require,module,exports){ 'use strict'; /** * Manages communications with Companion */ const RequestClient = require('./RequestClient'); const Provider = require('./Provider'); const SearchProvider = require('./SearchProvider'); const Socket = require('./Socket'); module.exports = { RequestClient, Provider, SearchProvider, Socket }; },{"./Provider":9,"./RequestClient":10,"./SearchProvider":11,"./Socket":12}],14:[function(require,module,exports){ 'use strict'; /** * This module serves as an Async wrapper for LocalStorage */ module.exports.setItem = (key, value) => { return new Promise(resolve => { localStorage.setItem(key, value); resolve(); }); }; module.exports.getItem = key => { return Promise.resolve(localStorage.getItem(key)); }; module.exports.removeItem = key => { return new Promise(resolve => { localStorage.removeItem(key); resolve(); }); }; },{}],15:[function(require,module,exports){ /** * Core plugin logic that all plugins share. * * BasePlugin does not contain DOM rendering so it can be used for plugins * without a user interface. * * See `Plugin` for the extended version with Preact rendering for interfaces. */ const Translator = require('@uppy/utils/lib/Translator'); module.exports = class BasePlugin { constructor(uppy, opts) { if (opts === void 0) { opts = {}; } this.uppy = uppy; this.opts = opts; } getPluginState() { const { plugins } = this.uppy.getState(); return plugins[this.id] || {}; } setPluginState(update) { const { plugins } = this.uppy.getState(); this.uppy.setState({ plugins: { ...plugins, [this.id]: { ...plugins[this.id], ...update } } }); } setOptions(newOpts) { this.opts = { ...this.opts, ...newOpts }; this.setPluginState(); // so that UI re-renders with new options this.i18nInit(); } i18nInit() { const translator = new Translator([this.defaultLocale, this.uppy.locale, this.opts.locale]); this.i18n = translator.translate.bind(translator); this.i18nArray = translator.translateArray.bind(translator); this.setPluginState(); // so that UI re-renders and we see the updated locale } /** * Extendable methods * ================== * These methods are here to serve as an overview of the extendable methods as well as * making them not conditional in use, such as `if (this.afterUpdate)`. */ // eslint-disable-next-line class-methods-use-this addTarget() { throw new Error('Extend the addTarget method to add your plugin to another plugin\'s target'); } // eslint-disable-next-line class-methods-use-this install() {} // eslint-disable-next-line class-methods-use-this uninstall() {} /** * Called when plugin is mounted, whether in DOM or into another plugin. * Needed because sometimes plugins are mounted separately/after `install`, * so this.el and this.parent might not be available in `install`. * This is the case with @uppy/react plugins, for example. */ render() { throw new Error('Extend the render method to add your plugin to a DOM element'); } // eslint-disable-next-line class-methods-use-this update() {} // Called after every state update, after everything's mounted. Debounced. // eslint-disable-next-line class-methods-use-this afterUpdate() {} }; },{"@uppy/utils/lib/Translator":31}],16:[function(require,module,exports){ /* eslint-disable max-classes-per-file, class-methods-use-this */ /* global AggregateError */ const prettierBytes = require('@transloadit/prettier-bytes'); const match = require('mime-match'); const defaultOptions = { maxFileSize: null, minFileSize: null, maxTotalFileSize: null, maxNumberOfFiles: null, minNumberOfFiles: null, allowedFileTypes: null, requiredMetaFields: [] }; class RestrictionError extends Error { constructor() { super(); this.isRestriction = true; } } if (typeof AggregateError === 'undefined') { // eslint-disable-next-line no-global-assign // TODO: remove this "polyfill" in the next major. globalThis.AggregateError = class AggregateError extends Error { constructor(errors, message) { super(message); this.errors = errors; } }; } class Restricter { constructor(getOpts, i18n) { this.i18n = i18n; this.getOpts = () => { const opts = getOpts(); if (opts.restrictions.allowedFileTypes != null && !Array.isArray(opts.restrictions.allowedFileTypes)) { throw new TypeError('`restrictions.allowedFileTypes` must be an array'); } return opts; }; } validate(file, files) { const { maxFileSize, minFileSize, maxTotalFileSize, maxNumberOfFiles, allowedFileTypes } = this.getOpts().restrictions; if (maxNumberOfFiles && files.length + 1 > maxNumberOfFiles) { throw new RestrictionError(`${this.i18n('youCanOnlyUploadX', { smart_count: maxNumberOfFiles })}`); } if (allowedFileTypes) { const isCorrectFileType = allowedFileTypes.some(type => { // check if this is a mime-type if (type.includes('/')) { if (!file.type) {return false;} return match(file.type.replace(/;.*?$/, ''), type); } // otherwise this is likely an extension if (type[0] === '.' && file.extension) { return file.extension.toLowerCase() === type.substr(1).toLowerCase(); } return false; }); if (!isCorrectFileType) { const allowedFileTypesString = allowedFileTypes.join(', '); throw new RestrictionError(this.i18n('youCanOnlyUploadFileTypes', { types: allowedFileTypesString })); } } // We can't check maxTotalFileSize if the size is unknown. if (maxTotalFileSize && file.size != null) { const totalFilesSize = files.reduce((total, f) => total + f.size, file.size); if (totalFilesSize > maxTotalFileSize) { throw new RestrictionError(this.i18n('exceedsSize', { size: prettierBytes(maxTotalFileSize), file: file.name })); } } // We can't check maxFileSize if the size is unknown. if (maxFileSize && file.size != null && file.size > maxFileSize) { throw new RestrictionError(this.i18n('exceedsSize', { size: prettierBytes(maxFileSize), file: file.name })); } // We can't check minFileSize if the size is unknown. if (minFileSize && file.size != null && file.size < minFileSize) { throw new RestrictionError(this.i18n('inferiorSize', { size: prettierBytes(minFileSize) })); } } validateMinNumberOfFiles(files) { const { minNumberOfFiles } = this.getOpts().restrictions; if (Object.keys(files).length < minNumberOfFiles) { throw new RestrictionError(this.i18n('youHaveToAtLeastSelectX', { smart_count: minNumberOfFiles })); } } getMissingRequiredMetaFields(file) { const error = new RestrictionError(this.i18n('missingRequiredMetaFieldOnFile', { fileName: file.name })); const { requiredMetaFields } = this.getOpts().restrictions; // TODO: migrate to Object.hasOwn in the next major. const own = Object.prototype.hasOwnProperty; const missingFields = []; for (const field of requiredMetaFields) { if (!own.call(file.meta, field) || file.meta[field] === '') { missingFields.push(field); } } return { missingFields, error }; } } module.exports = { Restricter, defaultOptions, RestrictionError }; },{"@transloadit/prettier-bytes":1,"mime-match":55}],17:[function(require,module,exports){ function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; } let id = 0; function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; } const { render } = require('preact'); const findDOMElement = require('@uppy/utils/lib/findDOMElement'); const BasePlugin = require('./BasePlugin'); /** * Defer a frequent call to the microtask queue. * * @param {() => T} fn * @returns {Promise} */ function debounce(fn) { let calling = null; let latestArgs = null; return function () { for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } latestArgs = args; if (!calling) { calling = Promise.resolve().then(() => { calling = null; // At this point `args` may be different from the most // recent state, if multiple calls happened since this task // was queued. So we use the `latestArgs`, which definitely // is the most recent call. return fn(...latestArgs); }); } return calling; }; } /** * UIPlugin is the extended version of BasePlugin to incorporate rendering with Preact. * Use this for plugins that need a user interface. * * For plugins without an user interface, see BasePlugin. */ let _updateUI = /*#__PURE__*/_classPrivateFieldLooseKey("updateUI"); class UIPlugin extends BasePlugin { constructor() { super(); Object.defineProperty(this, _updateUI, { writable: true, value: void 0 }); } /** * Check if supplied `target` is a DOM element or an `object`. * If it’s an object — target is a plugin, and we search `plugins` * for a plugin with same name and return its target. */ mount(target, plugin) { const callerPluginName = plugin.id; const targetElement = findDOMElement(target); if (targetElement) { this.isTargetDOMEl = true; // When target is with a single
element, // Preact thinks it’s the Uppy root element in there when doing a diff, // and destroys it. So we are creating a fragment (could be empty div) const uppyRootElement = document.createDocumentFragment(); // API for plugins that require a synchronous rerender. _classPrivateFieldLooseBase(this, _updateUI)[_updateUI] = debounce(state => { // plugin could be removed, but this.rerender is debounced below, // so it could still be called even after uppy.removePlugin or uppy.close // hence the check if (!this.uppy.getPlugin(this.id)) {return;} render(this.render(state), uppyRootElement); this.afterUpdate(); }); this.uppy.log(`Installing ${callerPluginName} to a DOM element '${target}'`); if (this.opts.replaceTargetContent) { // Doing render(h(null), targetElement), which should have been // a better way, since because the component might need to do additional cleanup when it is removed, // stopped working — Preact just adds null into target, not replacing targetElement.innerHTML = ''; } render(this.render(this.uppy.getState()), uppyRootElement); this.el = uppyRootElement.firstElementChild; targetElement.appendChild(uppyRootElement); this.onMount(); return this.el; } let targetPlugin; if (typeof target === 'object' && target instanceof UIPlugin) { // Targeting a plugin *instance* targetPlugin = target; } else if (typeof target === 'function') { // Targeting a plugin type const Target = target; // Find the target plugin instance. this.uppy.iteratePlugins(p => { if (p instanceof Target) { targetPlugin = p; return false; } }); } if (targetPlugin) { this.uppy.log(`Installing ${callerPluginName} to ${targetPlugin.id}`); this.parent = targetPlugin; this.el = targetPlugin.addTarget(plugin); this.onMount(); return this.el; } this.uppy.log(`Not installing ${callerPluginName}`); let message = `Invalid target option given to ${callerPluginName}.`; if (typeof target === 'function') { message += ' The given target is not a Plugin class. ' + 'Please check that you\'re not specifying a React Component instead of a plugin. ' + 'If you are using @uppy/* packages directly, make sure you have only 1 version of @uppy/core installed: ' + 'run `npm ls @uppy/core` on the command line and verify that all the versions match and are deduped correctly.'; } else { message += 'If you meant to target an HTML element, please make sure that the element exists. ' + 'Check that the