2022-06-22 12:45:35 +02:00
// discourse-skip-module
2022-06-22 09:14:55 +02:00
( function ( ) { function r ( e , n , t ) { function o ( i , f ) { if ( ! n [ i ] ) { if ( ! e [ i ] ) { let c = "function" === typeof require && require ; if ( ! f && c ) { return c ( i , ! 0 ) ; } if ( u ) { return u ( i , ! 0 ) ; } let a = new Error ( "Cannot find module '" + i + "'" ) ; throw a . code = "MODULE_NOT_FOUND" , a ; } let p = n [ i ] = { exports : { } } ; e [ i ] [ 0 ] . call ( p . exports , function ( r ) { let n = e [ i ] [ 1 ] [ r ] ; return o ( n || r ) ; } , p , p . exports , r , e , n , t ) ; } return n [ i ] . exports ; } for ( var u = "function" === typeof require && require , i = 0 ; i < t . length ; i ++ ) { o ( t [ i ] ) ; } return o ; } return r ; } ) ( ) ( { 1 : [ function ( require , module , exports ) {
// Adapted from https://github.com/Flet/prettier-bytes/
// Changing 1000 bytes to 1024, so we can keep uppercase KB vs kB
// ISC License (c) Dan Flettre https://github.com/Flet/prettier-bytes/blob/master/LICENSE
module . exports = function prettierBytes ( num ) {
if ( typeof num !== 'number' || isNaN ( num ) ) {
throw new TypeError ( 'Expected a number, got ' + typeof num ) ;
}
let neg = num < 0 ;
let units = [ 'B' , 'KB' , 'MB' , 'GB' , 'TB' , 'PB' , 'EB' , 'ZB' , 'YB' ] ;
if ( neg ) {
num = - num ;
}
if ( num < 1 ) {
return ( neg ? '-' : '' ) + num + ' B' ;
}
let exponent = Math . min ( Math . floor ( Math . log ( num ) / Math . log ( 1024 ) ) , units . length - 1 ) ;
num = Number ( num / Math . pow ( 1024 , exponent ) ) ;
let unit = units [ exponent ] ;
if ( num >= 10 || num % 1 === 0 ) {
// Do not show decimals when the number is two-digit, or if the number has no
// decimal component.
return ( neg ? '-' : '' ) + num . toFixed ( 0 ) + ' ' + unit ;
} else {
return ( neg ? '-' : '' ) + num . toFixed ( 1 ) + ' ' + unit ;
}
} ;
} , { } ] , 2 : [ function ( require , module , exports ) {
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
const {
AbortController ,
createAbortError
} = require ( '@uppy/utils/lib/AbortController' ) ;
const delay = require ( '@uppy/utils/lib/delay' ) ;
const MB = 1024 * 1024 ;
const defaultOptions = {
limit : 1 ,
retryDelays : [ 0 , 1000 , 3000 , 5000 ] ,
getChunkSize ( file ) {
return Math . ceil ( file . size / 10000 ) ;
} ,
onStart ( ) { } ,
onProgress ( ) { } ,
onPartComplete ( ) { } ,
onSuccess ( ) { } ,
onError ( err ) {
throw err ;
}
} ;
function ensureInt ( value ) {
if ( typeof value === 'string' ) {
return parseInt ( value , 10 ) ;
}
if ( typeof value === 'number' ) {
return value ;
}
throw new TypeError ( 'Expected a number' ) ;
}
let _aborted = /*#__PURE__*/ _classPrivateFieldLooseKey ( "aborted" ) ;
let _initChunks = /*#__PURE__*/ _classPrivateFieldLooseKey ( "initChunks" ) ;
let _createUpload = /*#__PURE__*/ _classPrivateFieldLooseKey ( "createUpload" ) ;
let _resumeUpload = /*#__PURE__*/ _classPrivateFieldLooseKey ( "resumeUpload" ) ;
let _uploadParts = /*#__PURE__*/ _classPrivateFieldLooseKey ( "uploadParts" ) ;
let _retryable = /*#__PURE__*/ _classPrivateFieldLooseKey ( "retryable" ) ;
let _prepareUploadParts = /*#__PURE__*/ _classPrivateFieldLooseKey ( "prepareUploadParts" ) ;
let _uploadPartRetryable = /*#__PURE__*/ _classPrivateFieldLooseKey ( "uploadPartRetryable" ) ;
let _uploadPart = /*#__PURE__*/ _classPrivateFieldLooseKey ( "uploadPart" ) ;
let _onPartProgress = /*#__PURE__*/ _classPrivateFieldLooseKey ( "onPartProgress" ) ;
let _onPartComplete = /*#__PURE__*/ _classPrivateFieldLooseKey ( "onPartComplete" ) ;
let _uploadPartBytes = /*#__PURE__*/ _classPrivateFieldLooseKey ( "uploadPartBytes" ) ;
let _completeUpload = /*#__PURE__*/ _classPrivateFieldLooseKey ( "completeUpload" ) ;
let _abortUpload = /*#__PURE__*/ _classPrivateFieldLooseKey ( "abortUpload" ) ;
let _onError = /*#__PURE__*/ _classPrivateFieldLooseKey ( "onError" ) ;
class MultipartUploader {
constructor ( file , options ) {
Object . defineProperty ( this , _onError , {
value : _onError2
} ) ;
Object . defineProperty ( this , _abortUpload , {
value : _abortUpload2
} ) ;
Object . defineProperty ( this , _completeUpload , {
value : _completeUpload2
} ) ;
Object . defineProperty ( this , _uploadPartBytes , {
value : _uploadPartBytes2
} ) ;
Object . defineProperty ( this , _onPartComplete , {
value : _onPartComplete2
} ) ;
Object . defineProperty ( this , _onPartProgress , {
value : _onPartProgress2
} ) ;
Object . defineProperty ( this , _uploadPart , {
value : _uploadPart2
} ) ;
Object . defineProperty ( this , _uploadPartRetryable , {
value : _uploadPartRetryable2
} ) ;
Object . defineProperty ( this , _prepareUploadParts , {
value : _prepareUploadParts2
} ) ;
Object . defineProperty ( this , _retryable , {
value : _retryable2
} ) ;
Object . defineProperty ( this , _uploadParts , {
value : _uploadParts2
} ) ;
Object . defineProperty ( this , _resumeUpload , {
value : _resumeUpload2
} ) ;
Object . defineProperty ( this , _createUpload , {
value : _createUpload2
} ) ;
Object . defineProperty ( this , _initChunks , {
value : _initChunks2
} ) ;
Object . defineProperty ( this , _aborted , {
value : _aborted2
} ) ;
this . options = { ... defaultOptions ,
... options
} ; // Use default `getChunkSize` if it was null or something
if ( ! this . options . getChunkSize ) {
this . options . getChunkSize = defaultOptions . getChunkSize ;
}
this . file = file ;
this . abortController = new AbortController ( ) ;
this . key = this . options . key || null ;
this . uploadId = this . options . uploadId || null ;
this . parts = [ ] ; // Do `this.createdPromise.then(OP)` to execute an operation `OP` _only_ if the
// upload was created already. That also ensures that the sequencing is right
// (so the `OP` definitely happens if the upload is created).
//
// This mostly exists to make `#abortUpload` work well: only sending the abort request if
// the upload was already created, and if the createMultipartUpload request is still in flight,
// aborting it immediately after it finishes.
this . createdPromise = Promise . reject ( ) ; // eslint-disable-line prefer-promise-reject-errors
this . isPaused = false ;
this . partsInProgress = 0 ;
this . chunks = null ;
this . chunkState = null ;
_classPrivateFieldLooseBase ( this , _initChunks ) [ _initChunks ] ( ) ;
this . createdPromise . catch ( ( ) => { } ) ; // silence uncaught rejection warning
}
/ * *
* Was this upload aborted ?
*
* If yes , we may need to throw an AbortError .
*
* @ returns { boolean }
* /
start ( ) {
this . isPaused = false ;
if ( this . uploadId ) {
_classPrivateFieldLooseBase ( this , _resumeUpload ) [ _resumeUpload ] ( ) ;
} else {
_classPrivateFieldLooseBase ( this , _createUpload ) [ _createUpload ] ( ) ;
}
}
pause ( ) {
this . abortController . abort ( ) ; // Swap it out for a new controller, because this instance may be resumed later.
this . abortController = new AbortController ( ) ;
this . isPaused = true ;
}
abort ( opts ) {
let _opts ;
if ( opts === void 0 ) {
opts = undefined ;
}
if ( ( _opts = opts ) != null && _opts . really ) { _classPrivateFieldLooseBase ( this , _abortUpload ) [ _abortUpload ] ( ) ; } else { this . pause ( ) ; }
}
}
function _aborted2 ( ) {
return this . abortController . signal . aborted ;
}
function _initChunks2 ( ) {
const chunks = [ ] ;
const desiredChunkSize = this . options . getChunkSize ( this . file ) ; // at least 5MB per request, at most 10k requests
const minChunkSize = Math . max ( 5 * MB , Math . ceil ( this . file . size / 10000 ) ) ;
const chunkSize = Math . max ( desiredChunkSize , minChunkSize ) ; // Upload zero-sized files in one zero-sized chunk
if ( this . file . size === 0 ) {
chunks . push ( this . file ) ;
} else {
for ( let i = 0 ; i < this . file . size ; i += chunkSize ) {
const end = Math . min ( this . file . size , i + chunkSize ) ;
chunks . push ( this . file . slice ( i , end ) ) ;
}
}
this . chunks = chunks ;
this . chunkState = chunks . map ( ( ) => ( {
uploaded : 0 ,
busy : false ,
done : false
} ) ) ;
}
function _createUpload2 ( ) {
this . createdPromise = Promise . resolve ( ) . then ( ( ) => this . options . createMultipartUpload ( ) ) ;
return this . createdPromise . then ( result => {
if ( _classPrivateFieldLooseBase ( this , _aborted ) [ _aborted ] ( ) ) { throw createAbortError ( ) ; }
const valid = typeof result === 'object' && result && typeof result . uploadId === 'string' && typeof result . key === 'string' ;
if ( ! valid ) {
throw new TypeError ( 'AwsS3/Multipart: Got incorrect result from `createMultipartUpload()`, expected an object `{ uploadId, key }`.' ) ;
}
this . key = result . key ;
this . uploadId = result . uploadId ;
this . options . onStart ( result ) ;
_classPrivateFieldLooseBase ( this , _uploadParts ) [ _uploadParts ] ( ) ;
} ) . catch ( err => {
_classPrivateFieldLooseBase ( this , _onError ) [ _onError ] ( err ) ;
} ) ;
}
async function _resumeUpload2 ( ) {
try {
const parts = await this . options . listParts ( {
uploadId : this . uploadId ,
key : this . key
} ) ;
if ( _classPrivateFieldLooseBase ( this , _aborted ) [ _aborted ] ( ) ) { throw createAbortError ( ) ; }
parts . forEach ( part => {
const i = part . PartNumber - 1 ;
this . chunkState [ i ] = {
uploaded : ensureInt ( part . Size ) ,
etag : part . ETag ,
done : true
} ; // Only add if we did not yet know about this part.
if ( ! this . parts . some ( p => p . PartNumber === part . PartNumber ) ) {
this . parts . push ( {
PartNumber : part . PartNumber ,
ETag : part . ETag
} ) ;
}
} ) ;
_classPrivateFieldLooseBase ( this , _uploadParts ) [ _uploadParts ] ( ) ;
} catch ( err ) {
_classPrivateFieldLooseBase ( this , _onError ) [ _onError ] ( err ) ;
}
}
function _uploadParts2 ( ) {
if ( this . isPaused ) { return ; } // All parts are uploaded.
if ( this . chunkState . every ( state => state . done ) ) {
_classPrivateFieldLooseBase ( this , _completeUpload ) [ _completeUpload ] ( ) ;
return ;
} // For a 100MB file, with the default min chunk size of 5MB and a limit of 10:
//
// Total 20 parts
// ---------
// Need 1 is 10
// Need 2 is 5
// Need 3 is 5
const need = this . options . limit - this . partsInProgress ;
const completeChunks = this . chunkState . filter ( state => state . done ) . length ;
const remainingChunks = this . chunks . length - completeChunks ;
let minNeeded = Math . ceil ( this . options . limit / 2 ) ;
if ( minNeeded > remainingChunks ) {
minNeeded = remainingChunks ;
}
if ( need < minNeeded ) { return ; }
const candidates = [ ] ;
for ( let i = 0 ; i < this . chunkState . length ; i ++ ) {
const state = this . chunkState [ i ] ; // eslint-disable-next-line no-continue
if ( state . done || state . busy ) { continue ; }
candidates . push ( i ) ;
if ( candidates . length >= need ) {
break ;
}
}
if ( candidates . length === 0 ) { return ; }
_classPrivateFieldLooseBase ( this , _prepareUploadParts ) [ _prepareUploadParts ] ( candidates ) . then ( result => {
candidates . forEach ( index => {
const partNumber = index + 1 ;
const prePreparedPart = {
url : result . presignedUrls [ partNumber ] ,
headers : result . headers
} ;
_classPrivateFieldLooseBase ( this , _uploadPartRetryable ) [ _uploadPartRetryable ] ( index , prePreparedPart ) . then ( ( ) => {
_classPrivateFieldLooseBase ( this , _uploadParts ) [ _uploadParts ] ( ) ;
} , err => {
_classPrivateFieldLooseBase ( this , _onError ) [ _onError ] ( err ) ;
} ) ;
} ) ;
} ) ;
}
function _retryable2 ( _ref ) {
let {
before ,
attempt ,
after
} = _ref ;
const {
retryDelays
} = this . options ;
const {
signal
} = this . abortController ;
if ( before ) { before ( ) ; }
function shouldRetry ( err ) {
if ( err . source && typeof err . source . status === 'number' ) {
const {
status
} = err . source ; // 0 probably indicates network failure
return status === 0 || status === 409 || status === 423 || status >= 500 && status < 600 ;
}
return false ;
}
const doAttempt = retryAttempt => attempt ( ) . catch ( err => {
if ( _classPrivateFieldLooseBase ( this , _aborted ) [ _aborted ] ( ) ) { throw createAbortError ( ) ; }
if ( shouldRetry ( err ) && retryAttempt < retryDelays . length ) {
return delay ( retryDelays [ retryAttempt ] , {
signal
} ) . then ( ( ) => doAttempt ( retryAttempt + 1 ) ) ;
}
throw err ;
} ) ;
return doAttempt ( 0 ) . then ( result => {
if ( after ) { after ( ) ; }
return result ;
} , err => {
if ( after ) { after ( ) ; }
throw err ;
} ) ;
}
async function _prepareUploadParts2 ( candidates ) {
candidates . forEach ( i => {
this . chunkState [ i ] . busy = true ;
} ) ;
const result = await _classPrivateFieldLooseBase ( this , _retryable ) [ _retryable ] ( {
attempt : ( ) => this . options . prepareUploadParts ( {
key : this . key ,
uploadId : this . uploadId ,
partNumbers : candidates . map ( index => index + 1 ) ,
chunks : candidates . reduce ( ( chunks , candidate ) => ( { ... chunks ,
// Use the part number as the index
[ candidate + 1 ] : this . chunks [ candidate ]
} ) , { } )
} )
} ) ;
if ( typeof ( result == null ? void 0 : result . presignedUrls ) !== 'object' ) {
throw new TypeError ( 'AwsS3/Multipart: Got incorrect result from `prepareUploadParts()`, expected an object `{ presignedUrls }`.' ) ;
}
return result ;
}
function _uploadPartRetryable2 ( index , prePreparedPart ) {
return _classPrivateFieldLooseBase ( this , _retryable ) [ _retryable ] ( {
before : ( ) => {
this . partsInProgress += 1 ;
} ,
attempt : ( ) => _classPrivateFieldLooseBase ( this , _uploadPart ) [ _uploadPart ] ( index , prePreparedPart ) ,
after : ( ) => {
this . partsInProgress -= 1 ;
}
} ) ;
}
function _uploadPart2 ( index , prePreparedPart ) {
this . chunkState [ index ] . busy = true ;
const valid = typeof ( prePreparedPart == null ? void 0 : prePreparedPart . url ) === 'string' ;
if ( ! valid ) {
throw new TypeError ( 'AwsS3/Multipart: Got incorrect result for `prePreparedPart`, expected an object `{ url }`.' ) ;
}
const {
url ,
headers
} = prePreparedPart ;
if ( _classPrivateFieldLooseBase ( this , _aborted ) [ _aborted ] ( ) ) {
this . chunkState [ index ] . busy = false ;
throw createAbortError ( ) ;
}
return _classPrivateFieldLooseBase ( this , _uploadPartBytes ) [ _uploadPartBytes ] ( index , url , headers ) ;
}
function _onPartProgress2 ( index , sent ) {
this . chunkState [ index ] . uploaded = ensureInt ( sent ) ;
const totalUploaded = this . chunkState . reduce ( ( n , c ) => n + c . uploaded , 0 ) ;
this . options . onProgress ( totalUploaded , this . file . size ) ;
}
function _onPartComplete2 ( index , etag ) {
this . chunkState [ index ] . etag = etag ;
this . chunkState [ index ] . done = true ;
const part = {
PartNumber : index + 1 ,
ETag : etag
} ;
this . parts . push ( part ) ;
this . options . onPartComplete ( part ) ;
}
function _uploadPartBytes2 ( index , url , headers ) {
const body = this . chunks [ index ] ;
const {
signal
} = this . abortController ;
let defer ;
const promise = new Promise ( ( resolve , reject ) => {
defer = {
resolve ,
reject
} ;
} ) ;
const xhr = new XMLHttpRequest ( ) ;
xhr . open ( 'PUT' , url , true ) ;
if ( headers ) {
Object . keys ( headers ) . forEach ( key => {
xhr . setRequestHeader ( key , headers [ key ] ) ;
} ) ;
}
xhr . responseType = 'text' ;
function cleanup ( ) {
// eslint-disable-next-line no-use-before-define
signal . removeEventListener ( 'abort' , onabort ) ;
}
function onabort ( ) {
xhr . abort ( ) ;
}
signal . addEventListener ( 'abort' , onabort ) ;
xhr . upload . addEventListener ( 'progress' , ev => {
if ( ! ev . lengthComputable ) { return ; }
_classPrivateFieldLooseBase ( this , _onPartProgress ) [ _onPartProgress ] ( index , ev . loaded , ev . total ) ;
} ) ;
xhr . addEventListener ( 'abort' , ( ) => {
cleanup ( ) ;
this . chunkState [ index ] . busy = false ;
defer . reject ( createAbortError ( ) ) ;
} ) ;
xhr . addEventListener ( 'load' , ev => {
cleanup ( ) ;
this . chunkState [ index ] . busy = false ;
if ( ev . target . status < 200 || ev . target . status >= 300 ) {
const error = new Error ( 'Non 2xx' ) ;
error . source = ev . target ;
defer . reject ( error ) ;
return ;
} // This avoids the net::ERR_OUT_OF_MEMORY in Chromium Browsers.
this . chunks [ index ] = null ;
_classPrivateFieldLooseBase ( this , _onPartProgress ) [ _onPartProgress ] ( index , body . size , body . size ) ; // NOTE This must be allowed by CORS.
const etag = ev . target . getResponseHeader ( 'ETag' ) ;
if ( etag === null ) {
defer . reject ( new Error ( 'AwsS3/Multipart: Could not read the ETag header. This likely means CORS is not configured correctly on the S3 Bucket. See https://uppy.io/docs/aws-s3-multipart#S3-Bucket-Configuration for instructions.' ) ) ;
return ;
}
_classPrivateFieldLooseBase ( this , _onPartComplete ) [ _onPartComplete ] ( index , etag ) ;
defer . resolve ( ) ;
} ) ;
xhr . addEventListener ( 'error' , ev => {
cleanup ( ) ;
this . chunkState [ index ] . busy = false ;
const error = new Error ( 'Unknown error' ) ;
error . source = ev . target ;
defer . reject ( error ) ;
} ) ;
xhr . send ( body ) ;
return promise ;
}
async function _completeUpload2 ( ) {
// Parts may not have completed uploading in sorted order, if limit > 1.
this . parts . sort ( ( a , b ) => a . PartNumber - b . PartNumber ) ;
try {
const result = await this . options . completeMultipartUpload ( {
key : this . key ,
uploadId : this . uploadId ,
parts : this . parts
} ) ;
this . options . onSuccess ( result ) ;
} catch ( err ) {
_classPrivateFieldLooseBase ( this , _onError ) [ _onError ] ( err ) ;
}
}
function _abortUpload2 ( ) {
this . abortController . abort ( ) ;
this . createdPromise . then ( ( ) => {
this . options . abortMultipartUpload ( {
key : this . key ,
uploadId : this . uploadId
} ) ;
} , ( ) => { // if the creation failed we do not need to abort
} ) ;
}
function _onError2 ( err ) {
if ( err && err . name === 'AbortError' ) {
return ;
}
this . options . onError ( err ) ;
}
module . exports = MultipartUploader ;
} , { "@uppy/utils/lib/AbortController" : 26 , "@uppy/utils/lib/delay" : 32 } ] , 3 : [ function ( require , module , exports ) {
let _class , _temp ;
const BasePlugin = require ( '@uppy/core/lib/BasePlugin' ) ;
const {
Socket ,
Provider ,
RequestClient
} = require ( '@uppy/companion-client' ) ;
const EventTracker = require ( '@uppy/utils/lib/EventTracker' ) ;
const emitSocketProgress = require ( '@uppy/utils/lib/emitSocketProgress' ) ;
const getSocketHost = require ( '@uppy/utils/lib/getSocketHost' ) ;
const {
RateLimitedQueue
} = require ( '@uppy/utils/lib/RateLimitedQueue' ) ;
const MultipartUploader = require ( './MultipartUploader' ) ;
function assertServerError ( res ) {
if ( res && res . error ) {
const error = new Error ( res . message ) ;
Object . assign ( error , res . error ) ;
throw error ;
}
return res ;
}
module . exports = ( _temp = _class = class AwsS3Multipart extends BasePlugin {
constructor ( uppy , opts ) {
super ( uppy , opts ) ;
this . type = 'uploader' ;
this . id = this . opts . id || 'AwsS3Multipart' ;
this . title = 'AWS S3 Multipart' ;
this . client = new RequestClient ( uppy , opts ) ;
const defaultOptions = {
timeout : 30 * 1000 ,
limit : 0 ,
retryDelays : [ 0 , 1000 , 3000 , 5000 ] ,
createMultipartUpload : this . createMultipartUpload . bind ( this ) ,
listParts : this . listParts . bind ( this ) ,
prepareUploadParts : this . prepareUploadParts . bind ( this ) ,
abortMultipartUpload : this . abortMultipartUpload . bind ( this ) ,
completeMultipartUpload : this . completeMultipartUpload . bind ( this )
} ;
this . opts = { ... defaultOptions ,
... opts
} ;
this . upload = this . upload . bind ( this ) ;
this . requests = new RateLimitedQueue ( this . opts . limit ) ;
this . uploaders = Object . create ( null ) ;
this . uploaderEvents = Object . create ( null ) ;
this . uploaderSockets = Object . create ( null ) ;
}
/ * *
* Clean up all references for a file ' s upload : the MultipartUploader instance ,
* any events related to the file , and the Companion WebSocket connection .
*
* Set ` opts.abort ` to tell S3 that the multipart upload is cancelled and must be removed .
* This should be done when the user cancels the upload , not when the upload is completed or errored .
* /
resetUploaderReferences ( fileID , opts ) {
if ( opts === void 0 ) {
opts = { } ;
}
if ( this . uploaders [ fileID ] ) {
this . uploaders [ fileID ] . abort ( {
really : opts . abort || false
} ) ;
this . uploaders [ fileID ] = null ;
}
if ( this . uploaderEvents [ fileID ] ) {
this . uploaderEvents [ fileID ] . remove ( ) ;
this . uploaderEvents [ fileID ] = null ;
}
if ( this . uploaderSockets [ fileID ] ) {
this . uploaderSockets [ fileID ] . close ( ) ;
this . uploaderSockets [ fileID ] = null ;
}
}
assertHost ( method ) {
if ( ! this . opts . companionUrl ) {
throw new Error ( ` Expected a \` companionUrl \` option containing a Companion address, or if you are not using Companion, a custom \` ${ method } \` implementation. ` ) ;
}
}
createMultipartUpload ( file ) {
this . assertHost ( 'createMultipartUpload' ) ;
const metadata = { } ;
Object . keys ( file . meta ) . forEach ( key => {
if ( file . meta [ key ] != null ) {
metadata [ key ] = file . meta [ key ] . toString ( ) ;
}
} ) ;
return this . client . post ( 's3/multipart' , {
filename : file . name ,
type : file . type ,
metadata
} ) . then ( assertServerError ) ;
}
listParts ( file , _ref ) {
let {
key ,
uploadId
} = _ref ;
this . assertHost ( 'listParts' ) ;
const filename = encodeURIComponent ( key ) ;
return this . client . get ( ` s3/multipart/ ${ uploadId } ?key= ${ filename } ` ) . then ( assertServerError ) ;
}
prepareUploadParts ( file , _ref2 ) {
let {
key ,
uploadId ,
partNumbers
} = _ref2 ;
this . assertHost ( 'prepareUploadParts' ) ;
const filename = encodeURIComponent ( key ) ;
return this . client . get ( ` s3/multipart/ ${ uploadId } /batch?key= ${ filename } &partNumbers= ${ partNumbers . join ( ',' ) } ` ) . then ( assertServerError ) ;
}
completeMultipartUpload ( file , _ref3 ) {
let {
key ,
uploadId ,
parts
} = _ref3 ;
this . assertHost ( 'completeMultipartUpload' ) ;
const filename = encodeURIComponent ( key ) ;
const uploadIdEnc = encodeURIComponent ( uploadId ) ;
return this . client . post ( ` s3/multipart/ ${ uploadIdEnc } /complete?key= ${ filename } ` , {
parts
} ) . then ( assertServerError ) ;
}
abortMultipartUpload ( file , _ref4 ) {
let {
key ,
uploadId
} = _ref4 ;
this . assertHost ( 'abortMultipartUpload' ) ;
const filename = encodeURIComponent ( key ) ;
const uploadIdEnc = encodeURIComponent ( uploadId ) ;
return this . client . delete ( ` s3/multipart/ ${ uploadIdEnc } ?key= ${ filename } ` ) . then ( assertServerError ) ;
}
uploadFile ( file ) {
return new Promise ( ( resolve , reject ) => {
const onStart = data => {
const cFile = this . uppy . getFile ( file . id ) ;
this . uppy . setFileState ( file . id , {
s3Multipart : { ... cFile . s3Multipart ,
key : data . key ,
uploadId : data . uploadId
}
} ) ;
} ;
const onProgress = ( bytesUploaded , bytesTotal ) => {
this . uppy . emit ( 'upload-progress' , file , {
uploader : this ,
bytesUploaded ,
bytesTotal
} ) ;
} ;
const onError = err => {
this . uppy . log ( err ) ;
this . uppy . emit ( 'upload-error' , file , err ) ;
queuedRequest . done ( ) ;
this . resetUploaderReferences ( file . id ) ;
reject ( err ) ;
} ;
const onSuccess = result => {
const uploadResp = {
body : { ... result
} ,
uploadURL : result . location
} ;
queuedRequest . done ( ) ;
this . resetUploaderReferences ( file . id ) ;
const cFile = this . uppy . getFile ( file . id ) ;
this . uppy . emit ( 'upload-success' , cFile || file , uploadResp ) ;
if ( result . location ) {
this . uppy . log ( ` Download ${ upload . file . name } from ${ result . location } ` ) ;
}
resolve ( upload ) ;
} ;
const onPartComplete = part => {
const cFile = this . uppy . getFile ( file . id ) ;
if ( ! cFile ) {
return ;
}
this . uppy . emit ( 's3-multipart:part-uploaded' , cFile , part ) ;
} ;
const upload = new MultipartUploader ( file . data , {
// .bind to pass the file object to each handler.
createMultipartUpload : this . opts . createMultipartUpload . bind ( this , file ) ,
listParts : this . opts . listParts . bind ( this , file ) ,
prepareUploadParts : this . opts . prepareUploadParts . bind ( this , file ) ,
completeMultipartUpload : this . opts . completeMultipartUpload . bind ( this , file ) ,
abortMultipartUpload : this . opts . abortMultipartUpload . bind ( this , file ) ,
getChunkSize : this . opts . getChunkSize ? this . opts . getChunkSize . bind ( this ) : null ,
onStart ,
onProgress ,
onError ,
onSuccess ,
onPartComplete ,
limit : this . opts . limit || 5 ,
retryDelays : this . opts . retryDelays || [ ] ,
... file . s3Multipart
} ) ;
this . uploaders [ file . id ] = upload ;
this . uploaderEvents [ file . id ] = new EventTracker ( this . uppy ) ;
let queuedRequest = this . requests . run ( ( ) => {
if ( ! file . isPaused ) {
upload . start ( ) ;
} // Don't do anything here, the caller will take care of cancelling the upload itself
// using resetUploaderReferences(). This is because resetUploaderReferences() has to be
// called when this request is still in the queue, and has not been started yet, too. At
// that point this cancellation function is not going to be called.
return ( ) => { } ;
} ) ;
this . onFileRemove ( file . id , removed => {
queuedRequest . abort ( ) ;
this . resetUploaderReferences ( file . id , {
abort : true
} ) ;
resolve ( ` upload ${ removed . id } was removed ` ) ;
} ) ;
this . onCancelAll ( file . id , ( ) => {
queuedRequest . abort ( ) ;
this . resetUploaderReferences ( file . id , {
abort : true
} ) ;
resolve ( ` upload ${ file . id } was canceled ` ) ;
} ) ;
this . onFilePause ( file . id , isPaused => {
if ( isPaused ) {
// Remove this file from the queue so another file can start in its place.
queuedRequest . abort ( ) ;
upload . pause ( ) ;
} else {
// Resuming an upload should be queued, else you could pause and then
// resume a queued upload to make it skip the queue.
queuedRequest . abort ( ) ;
queuedRequest = this . requests . run ( ( ) => {
upload . start ( ) ;
return ( ) => { } ;
} ) ;
}
} ) ;
this . onPauseAll ( file . id , ( ) => {
queuedRequest . abort ( ) ;
upload . pause ( ) ;
} ) ;
this . onResumeAll ( file . id , ( ) => {
queuedRequest . abort ( ) ;
if ( file . error ) {
upload . abort ( ) ;
}
queuedRequest = this . requests . run ( ( ) => {
upload . start ( ) ;
return ( ) => { } ;
} ) ;
} ) ; // Don't double-emit upload-started for Golden Retriever-restored files that were already started
if ( ! file . progress . uploadStarted || ! file . isRestored ) {
this . uppy . emit ( 'upload-started' , file ) ;
}
} ) ;
}
uploadRemote ( file ) {
this . resetUploaderReferences ( file . id ) ; // Don't double-emit upload-started for Golden Retriever-restored files that were already started
if ( ! file . progress . uploadStarted || ! file . isRestored ) {
this . uppy . emit ( 'upload-started' , file ) ;
}
if ( file . serverToken ) {
return this . connectToServerSocket ( file ) ;
}
return new Promise ( ( resolve , reject ) => {
const Client = file . remote . providerOptions . provider ? Provider : RequestClient ;
const client = new Client ( this . uppy , file . remote . providerOptions ) ;
client . post ( file . remote . url , { ... file . remote . body ,
protocol : 's3-multipart' ,
size : file . data . size ,
metadata : file . meta
} ) . then ( res => {
this . uppy . setFileState ( file . id , {
serverToken : res . token
} ) ;
file = this . uppy . getFile ( file . id ) ;
return file ;
} ) . then ( file => {
return this . connectToServerSocket ( file ) ;
} ) . then ( ( ) => {
resolve ( ) ;
} ) . catch ( err => {
this . uppy . emit ( 'upload-error' , file , err ) ;
reject ( err ) ;
} ) ;
} ) ;
}
connectToServerSocket ( file ) {
return new Promise ( ( resolve , reject ) => {
const token = file . serverToken ;
const host = getSocketHost ( file . remote . companionUrl ) ;
const socket = new Socket ( {
target : ` ${ host } /api/ ${ token } ` ,
autoOpen : false
} ) ;
this . uploaderSockets [ file . id ] = socket ;
this . uploaderEvents [ file . id ] = new EventTracker ( this . uppy ) ;
this . onFileRemove ( file . id , ( ) => {
queuedRequest . abort ( ) ;
socket . send ( 'cancel' , { } ) ;
this . resetUploaderReferences ( file . id , {
abort : true
} ) ;
resolve ( ` upload ${ file . id } was removed ` ) ;
} ) ;
this . onFilePause ( file . id , isPaused => {
if ( isPaused ) {
// Remove this file from the queue so another file can start in its place.
queuedRequest . abort ( ) ;
socket . send ( 'pause' , { } ) ;
} else {
// Resuming an upload should be queued, else you could pause and then
// resume a queued upload to make it skip the queue.
queuedRequest . abort ( ) ;
queuedRequest = this . requests . run ( ( ) => {
socket . send ( 'resume' , { } ) ;
return ( ) => { } ;
} ) ;
}
} ) ;
this . onPauseAll ( file . id , ( ) => {
queuedRequest . abort ( ) ;
socket . send ( 'pause' , { } ) ;
} ) ;
this . onCancelAll ( file . id , ( ) => {
queuedRequest . abort ( ) ;
socket . send ( 'cancel' , { } ) ;
this . resetUploaderReferences ( file . id ) ;
resolve ( ` upload ${ file . id } was canceled ` ) ;
} ) ;
this . onResumeAll ( file . id , ( ) => {
queuedRequest . abort ( ) ;
if ( file . error ) {
socket . send ( 'pause' , { } ) ;
}
queuedRequest = this . requests . run ( ( ) => {
socket . send ( 'resume' , { } ) ;
} ) ;
} ) ;
this . onRetry ( file . id , ( ) => {
// Only do the retry if the upload is actually in progress;
// else we could try to send these messages when the upload is still queued.
// We may need a better check for this since the socket may also be closed
// for other reasons, like network failures.
if ( socket . isOpen ) {
socket . send ( 'pause' , { } ) ;
socket . send ( 'resume' , { } ) ;
}
} ) ;
this . onRetryAll ( file . id , ( ) => {
if ( socket . isOpen ) {
socket . send ( 'pause' , { } ) ;
socket . send ( 'resume' , { } ) ;
}
} ) ;
socket . on ( 'progress' , progressData => emitSocketProgress ( this , progressData , file ) ) ;
socket . on ( 'error' , errData => {
this . uppy . emit ( 'upload-error' , file , new Error ( errData . error ) ) ;
this . resetUploaderReferences ( file . id ) ;
queuedRequest . done ( ) ;
reject ( new Error ( errData . error ) ) ;
} ) ;
socket . on ( 'success' , data => {
const uploadResp = {
uploadURL : data . url
} ;
this . uppy . emit ( 'upload-success' , file , uploadResp ) ;
this . resetUploaderReferences ( file . id ) ;
queuedRequest . done ( ) ;
resolve ( ) ;
} ) ;
let queuedRequest = this . requests . run ( ( ) => {
socket . open ( ) ;
if ( file . isPaused ) {
socket . send ( 'pause' , { } ) ;
}
return ( ) => { } ;
} ) ;
} ) ;
}
upload ( fileIDs ) {
if ( fileIDs . length === 0 ) { return Promise . resolve ( ) ; }
const promises = fileIDs . map ( id => {
const file = this . uppy . getFile ( id ) ;
if ( file . isRemote ) {
return this . uploadRemote ( file ) ;
}
return this . uploadFile ( file ) ;
} ) ;
return Promise . all ( promises ) ;
}
onFileRemove ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'file-removed' , file => {
if ( fileID === file . id ) { cb ( file . id ) ; }
} ) ;
}
onFilePause ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'upload-pause' , ( targetFileID , isPaused ) => {
if ( fileID === targetFileID ) {
// const isPaused = this.uppy.pauseResume(fileID)
cb ( isPaused ) ;
}
} ) ;
}
onRetry ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'upload-retry' , targetFileID => {
if ( fileID === targetFileID ) {
cb ( ) ;
}
} ) ;
}
onRetryAll ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'retry-all' , ( ) => {
if ( ! this . uppy . getFile ( fileID ) ) { return ; }
cb ( ) ;
} ) ;
}
onPauseAll ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'pause-all' , ( ) => {
if ( ! this . uppy . getFile ( fileID ) ) { return ; }
cb ( ) ;
} ) ;
}
onCancelAll ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'cancel-all' , ( ) => {
if ( ! this . uppy . getFile ( fileID ) ) { return ; }
cb ( ) ;
} ) ;
}
onResumeAll ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'resume-all' , ( ) => {
if ( ! this . uppy . getFile ( fileID ) ) { return ; }
cb ( ) ;
} ) ;
}
install ( ) {
const {
capabilities
} = this . uppy . getState ( ) ;
this . uppy . setState ( {
capabilities : { ... capabilities ,
resumableUploads : true
}
} ) ;
this . uppy . addUploader ( this . upload ) ;
}
uninstall ( ) {
const {
capabilities
} = this . uppy . getState ( ) ;
this . uppy . setState ( {
capabilities : { ... capabilities ,
resumableUploads : false
}
} ) ;
this . uppy . removeUploader ( this . upload ) ;
}
} , _class . VERSION = "2.2.1" , _temp ) ;
} , { "./MultipartUploader" : 2 , "@uppy/companion-client" : 13 , "@uppy/core/lib/BasePlugin" : 15 , "@uppy/utils/lib/EventTracker" : 27 , "@uppy/utils/lib/RateLimitedQueue" : 30 , "@uppy/utils/lib/emitSocketProgress" : 33 , "@uppy/utils/lib/getSocketHost" : 44 } ] , 4 : [ function ( require , module , exports ) {
let _getOptions , _addEventHandlerForFile , _addEventHandlerIfFileStillExists , _uploadLocalFile , _uploadRemoteFile ;
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
const {
nanoid
} = require ( 'nanoid/non-secure' ) ;
const {
Provider ,
RequestClient ,
Socket
} = require ( '@uppy/companion-client' ) ;
const emitSocketProgress = require ( '@uppy/utils/lib/emitSocketProgress' ) ;
const getSocketHost = require ( '@uppy/utils/lib/getSocketHost' ) ;
const EventTracker = require ( '@uppy/utils/lib/EventTracker' ) ;
const ProgressTimeout = require ( '@uppy/utils/lib/ProgressTimeout' ) ;
const NetworkError = require ( '@uppy/utils/lib/NetworkError' ) ;
const isNetworkError = require ( '@uppy/utils/lib/isNetworkError' ) ;
const {
internalRateLimitedQueue
} = require ( '@uppy/utils/lib/RateLimitedQueue' ) ; // See XHRUpload
function buildResponseError ( xhr , error ) {
if ( isNetworkError ( xhr ) ) { return new NetworkError ( error , xhr ) ; } // TODO: when we drop support for browsers that do not support this syntax, use:
// return new Error('Upload error', { cause: error, request: xhr })
const err = new Error ( 'Upload error' ) ;
err . cause = error ;
err . request = xhr ;
return err ;
} // See XHRUpload
function setTypeInBlob ( file ) {
const dataWithUpdatedType = file . data . slice ( 0 , file . data . size , file . meta . type ) ;
return dataWithUpdatedType ;
}
function addMetadata ( formData , meta , opts ) {
const metaFields = Array . isArray ( opts . metaFields ) ? opts . metaFields // Send along all fields by default.
: Object . keys ( meta ) ;
metaFields . forEach ( item => {
formData . append ( item , meta [ item ] ) ;
} ) ;
}
function createFormDataUpload ( file , opts ) {
const formPost = new FormData ( ) ;
addMetadata ( formPost , file . meta , opts ) ;
const dataWithUpdatedType = setTypeInBlob ( file ) ;
if ( file . name ) {
formPost . append ( opts . fieldName , dataWithUpdatedType , file . meta . name ) ;
} else {
formPost . append ( opts . fieldName , dataWithUpdatedType ) ;
}
return formPost ;
}
const createBareUpload = file => file . data ;
module . exports = ( _getOptions = /*#__PURE__*/ _classPrivateFieldLooseKey ( "getOptions" ) , _addEventHandlerForFile = /*#__PURE__*/ _classPrivateFieldLooseKey ( "addEventHandlerForFile" ) , _addEventHandlerIfFileStillExists = /*#__PURE__*/ _classPrivateFieldLooseKey ( "addEventHandlerIfFileStillExists" ) , _uploadLocalFile = /*#__PURE__*/ _classPrivateFieldLooseKey ( "uploadLocalFile" ) , _uploadRemoteFile = /*#__PURE__*/ _classPrivateFieldLooseKey ( "uploadRemoteFile" ) , class MiniXHRUpload {
constructor ( _uppy , _opts ) {
Object . defineProperty ( this , _uploadRemoteFile , {
value : _uploadRemoteFile2
} ) ;
Object . defineProperty ( this , _uploadLocalFile , {
value : _uploadLocalFile2
} ) ;
Object . defineProperty ( this , _addEventHandlerIfFileStillExists , {
value : _addEventHandlerIfFileStillExists2
} ) ;
Object . defineProperty ( this , _addEventHandlerForFile , {
value : _addEventHandlerForFile2
} ) ;
Object . defineProperty ( this , _getOptions , {
value : _getOptions2
} ) ;
this . uppy = _uppy ;
this . opts = {
validateStatus ( status ) {
return status >= 200 && status < 300 ;
} ,
... _opts
} ;
this . requests = _opts [ internalRateLimitedQueue ] ;
this . uploaderEvents = Object . create ( null ) ;
this . i18n = _opts . i18n ;
}
uploadFile ( id , current , total ) {
const file = this . uppy . getFile ( id ) ;
if ( file . error ) {
throw new Error ( file . error ) ;
} else if ( file . isRemote ) {
return _classPrivateFieldLooseBase ( this , _uploadRemoteFile ) [ _uploadRemoteFile ] ( file , current , total ) ;
}
return _classPrivateFieldLooseBase ( this , _uploadLocalFile ) [ _uploadLocalFile ] ( file , current , total ) ;
}
} ) ;
function _getOptions2 ( file ) {
let _file$xhrUpload ;
const {
uppy
} = this ;
const overrides = uppy . getState ( ) . xhrUpload ;
const opts = { ... this . opts ,
... ( overrides || { } ) ,
... ( file . xhrUpload || { } ) ,
headers : { ... this . opts . headers ,
... ( overrides == null ? void 0 : overrides . headers ) ,
... ( ( _file$xhrUpload = file . xhrUpload ) == null ? void 0 : _file$xhrUpload . headers )
}
} ;
return opts ;
}
function _addEventHandlerForFile2 ( eventName , fileID , eventHandler ) {
this . uploaderEvents [ fileID ] . on ( eventName , targetFileID => {
if ( fileID === targetFileID ) { eventHandler ( ) ; }
} ) ;
}
function _addEventHandlerIfFileStillExists2 ( eventName , fileID , eventHandler ) {
this . uploaderEvents [ fileID ] . on ( eventName , ( ) => {
if ( this . uppy . getFile ( fileID ) ) { eventHandler ( ) ; }
} ) ;
}
function _uploadLocalFile2 ( file , current , total ) {
const opts = _classPrivateFieldLooseBase ( this , _getOptions ) [ _getOptions ] ( file ) ;
this . uppy . log ( ` uploading ${ current } of ${ total } ` ) ;
return new Promise ( ( resolve , reject ) => {
// This is done in index.js in the S3 plugin.
// this.uppy.emit('upload-started', file)
const data = opts . formData ? createFormDataUpload ( file , opts ) : createBareUpload ( file , opts ) ;
const xhr = new XMLHttpRequest ( ) ;
this . uploaderEvents [ file . id ] = new EventTracker ( this . uppy ) ;
const timer = new ProgressTimeout ( opts . timeout , ( ) => {
xhr . abort ( ) ; // eslint-disable-next-line no-use-before-define
queuedRequest . done ( ) ;
const error = new Error ( this . i18n ( 'timedOut' , {
seconds : Math . ceil ( opts . timeout / 1000 )
} ) ) ;
this . uppy . emit ( 'upload-error' , file , error ) ;
reject ( error ) ;
} ) ;
const id = nanoid ( ) ;
xhr . upload . addEventListener ( 'loadstart' , ( ) => {
this . uppy . log ( ` [AwsS3/XHRUpload] ${ id } started ` ) ;
} ) ;
xhr . upload . addEventListener ( 'progress' , ev => {
this . uppy . log ( ` [AwsS3/XHRUpload] ${ id } progress: ${ ev . loaded } / ${ ev . total } ` ) ; // Begin checking for timeouts when progress starts, instead of loading,
// to avoid timing out requests on browser concurrency queue
timer . progress ( ) ;
if ( ev . lengthComputable ) {
this . uppy . emit ( 'upload-progress' , file , {
uploader : this ,
bytesUploaded : ev . loaded ,
bytesTotal : ev . total
} ) ;
}
} ) ;
xhr . addEventListener ( 'load' , ev => {
this . uppy . log ( ` [AwsS3/XHRUpload] ${ id } finished ` ) ;
timer . done ( ) ; // eslint-disable-next-line no-use-before-define
queuedRequest . done ( ) ;
if ( this . uploaderEvents [ file . id ] ) {
this . uploaderEvents [ file . id ] . remove ( ) ;
this . uploaderEvents [ file . id ] = null ;
}
if ( opts . validateStatus ( ev . target . status , xhr . responseText , xhr ) ) {
const body = opts . getResponseData ( xhr . responseText , xhr ) ;
const uploadURL = body [ opts . responseUrlFieldName ] ;
const uploadResp = {
status : ev . target . status ,
body ,
uploadURL
} ;
this . uppy . emit ( 'upload-success' , file , uploadResp ) ;
if ( uploadURL ) {
this . uppy . log ( ` Download ${ file . name } from ${ uploadURL } ` ) ;
}
return resolve ( file ) ;
}
const body = opts . getResponseData ( xhr . responseText , xhr ) ;
const error = buildResponseError ( xhr , opts . getResponseError ( xhr . responseText , xhr ) ) ;
const response = {
status : ev . target . status ,
body
} ;
this . uppy . emit ( 'upload-error' , file , error , response ) ;
return reject ( error ) ;
} ) ;
xhr . addEventListener ( 'error' , ( ) => {
this . uppy . log ( ` [AwsS3/XHRUpload] ${ id } errored ` ) ;
timer . done ( ) ; // eslint-disable-next-line no-use-before-define
queuedRequest . done ( ) ;
if ( this . uploaderEvents [ file . id ] ) {
this . uploaderEvents [ file . id ] . remove ( ) ;
this . uploaderEvents [ file . id ] = null ;
}
const error = buildResponseError ( xhr , opts . getResponseError ( xhr . responseText , xhr ) ) ;
this . uppy . emit ( 'upload-error' , file , error ) ;
return reject ( error ) ;
} ) ;
xhr . open ( opts . method . toUpperCase ( ) , opts . endpoint , true ) ; // IE10 does not allow setting `withCredentials` and `responseType`
// before `open()` is called. It’ s important to set withCredentials
// to a boolean, otherwise React Native crashes
xhr . withCredentials = Boolean ( opts . withCredentials ) ;
if ( opts . responseType !== '' ) {
xhr . responseType = opts . responseType ;
}
Object . keys ( opts . headers ) . forEach ( header => {
xhr . setRequestHeader ( header , opts . headers [ header ] ) ;
} ) ;
const queuedRequest = this . requests . run ( ( ) => {
xhr . send ( data ) ;
return ( ) => {
// eslint-disable-next-line no-use-before-define
timer . done ( ) ;
xhr . abort ( ) ;
} ;
} , {
priority : 1
} ) ;
_classPrivateFieldLooseBase ( this , _addEventHandlerForFile ) [ _addEventHandlerForFile ] ( 'file-removed' , file . id , ( ) => {
queuedRequest . abort ( ) ;
reject ( new Error ( 'File removed' ) ) ;
} ) ;
_classPrivateFieldLooseBase ( this , _addEventHandlerIfFileStillExists ) [ _addEventHandlerIfFileStillExists ] ( 'cancel-all' , file . id , ( ) => {
queuedRequest . abort ( ) ;
reject ( new Error ( 'Upload cancelled' ) ) ;
} ) ;
} ) ;
}
function _uploadRemoteFile2 ( file ) {
const opts = _classPrivateFieldLooseBase ( this , _getOptions ) [ _getOptions ] ( file ) ; // This is done in index.js in the S3 plugin.
// this.uppy.emit('upload-started', file)
const metaFields = Array . isArray ( opts . metaFields ) ? opts . metaFields // Send along all fields by default.
: Object . keys ( file . meta ) ;
const Client = file . remote . providerOptions . provider ? Provider : RequestClient ;
const client = new Client ( this . uppy , file . remote . providerOptions ) ;
return client . post ( file . remote . url , { ... file . remote . body ,
endpoint : opts . endpoint ,
size : file . data . size ,
fieldname : opts . fieldName ,
metadata : Object . fromEntries ( metaFields . map ( name => [ name , file . meta [ name ] ] ) ) ,
httpMethod : opts . method ,
useFormData : opts . formData ,
headers : opts . headers
} ) . then ( res => new Promise ( ( resolve , reject ) => {
const {
token
} = res ;
const host = getSocketHost ( file . remote . companionUrl ) ;
const socket = new Socket ( {
target : ` ${ host } /api/ ${ token } ` ,
autoOpen : false
} ) ;
this . uploaderEvents [ file . id ] = new EventTracker ( this . uppy ) ;
const queuedRequest = this . requests . run ( ( ) => {
socket . open ( ) ;
if ( file . isPaused ) {
socket . send ( 'pause' , { } ) ;
}
return ( ) => socket . close ( ) ;
} ) ;
_classPrivateFieldLooseBase ( this , _addEventHandlerForFile ) [ _addEventHandlerForFile ] ( 'file-removed' , file . id , ( ) => {
socket . send ( 'cancel' , { } ) ;
queuedRequest . abort ( ) ;
resolve ( ` upload ${ file . id } was removed ` ) ;
} ) ;
_classPrivateFieldLooseBase ( this , _addEventHandlerIfFileStillExists ) [ _addEventHandlerIfFileStillExists ] ( 'cancel-all' , file . id , ( ) => {
socket . send ( 'cancel' , { } ) ;
queuedRequest . abort ( ) ;
resolve ( ` upload ${ file . id } was canceled ` ) ;
} ) ;
_classPrivateFieldLooseBase ( this , _addEventHandlerForFile ) [ _addEventHandlerForFile ] ( 'upload-retry' , file . id , ( ) => {
socket . send ( 'pause' , { } ) ;
socket . send ( 'resume' , { } ) ;
} ) ;
_classPrivateFieldLooseBase ( this , _addEventHandlerIfFileStillExists ) [ _addEventHandlerIfFileStillExists ] ( 'retry-all' , file . id , ( ) => {
socket . send ( 'pause' , { } ) ;
socket . send ( 'resume' , { } ) ;
} ) ;
socket . on ( 'progress' , progressData => emitSocketProgress ( this , progressData , file ) ) ;
socket . on ( 'success' , data => {
const body = opts . getResponseData ( data . response . responseText , data . response ) ;
const uploadURL = body [ opts . responseUrlFieldName ] ;
const uploadResp = {
status : data . response . status ,
body ,
uploadURL ,
bytesUploaded : data . bytesUploaded
} ;
this . uppy . emit ( 'upload-success' , file , uploadResp ) ;
queuedRequest . done ( ) ;
if ( this . uploaderEvents [ file . id ] ) {
this . uploaderEvents [ file . id ] . remove ( ) ;
this . uploaderEvents [ file . id ] = null ;
}
return resolve ( ) ;
} ) ;
socket . on ( 'error' , errData => {
const resp = errData . response ;
const error = resp ? opts . getResponseError ( resp . responseText , resp ) : Object . assign ( new Error ( errData . error . message ) , {
cause : errData . error
} ) ;
this . uppy . emit ( 'upload-error' , file , error ) ;
queuedRequest . done ( ) ;
if ( this . uploaderEvents [ file . id ] ) {
this . uploaderEvents [ file . id ] . remove ( ) ;
this . uploaderEvents [ file . id ] = null ;
}
reject ( error ) ;
} ) ;
} ) . catch ( err => {
this . uppy . emit ( 'upload-error' , file , err ) ;
return Promise . reject ( err ) ;
} ) ) ;
}
} , { "@uppy/companion-client" : 13 , "@uppy/utils/lib/EventTracker" : 27 , "@uppy/utils/lib/NetworkError" : 28 , "@uppy/utils/lib/ProgressTimeout" : 29 , "@uppy/utils/lib/RateLimitedQueue" : 30 , "@uppy/utils/lib/emitSocketProgress" : 33 , "@uppy/utils/lib/getSocketHost" : 44 , "@uppy/utils/lib/isNetworkError" : 48 , "nanoid/non-secure" : 57 } ] , 5 : [ function ( require , module , exports ) {
let _class , _client , _requests , _uploader , _handleUpload , _temp ;
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
/ * *
* This plugin is currently a A Big Hack ™ ! The core reason for that is how this plugin
* interacts with Uppy ' s current pipeline design . The pipeline can handle files in steps ,
* including preprocessing , uploading , and postprocessing steps . This plugin initially
* was designed to do its work in a preprocessing step , and let XHRUpload deal with the
* actual file upload as an uploading step . However , Uppy runs steps on all files at once ,
* sequentially : first , all files go through a preprocessing step , then , once they are all
* done , they go through the uploading step .
*
* For S3 , this causes severely broken behaviour when users upload many files . The
* preprocessing step will request S3 upload URLs that are valid for a short time only ,
* but it has to do this for _all _ files , which can take a long time if there are hundreds
* or even thousands of files . By the time the uploader step starts , the first URLs may
* already have expired . If not , the uploading might take such a long time that later URLs
* will expire before some files can be uploaded .
*
* The long - term solution to this problem is to change the upload pipeline so that files
* can be sent to the next step individually . That requires a breaking change , so it is
* planned for some future Uppy version .
*
* In the mean time , this plugin is stuck with a hackier approach : the necessary parts
* of the XHRUpload implementation were copied into this plugin , as the MiniXHRUpload
* class , and this plugin calls into it immediately once it receives an upload URL .
* This isn 't as nicely modular as we' d like and requires us to maintain two copies of
* the XHRUpload code , but at least it ' s not horrifically broken : )
* /
const BasePlugin = require ( '@uppy/core/lib/BasePlugin' ) ;
const {
RateLimitedQueue ,
internalRateLimitedQueue
} = require ( '@uppy/utils/lib/RateLimitedQueue' ) ;
const {
RequestClient
} = require ( '@uppy/companion-client' ) ;
const MiniXHRUpload = require ( './MiniXHRUpload' ) ;
const isXml = require ( './isXml' ) ;
const locale = require ( './locale' ) ;
function resolveUrl ( origin , link ) {
return new URL ( link , origin || undefined ) . toString ( ) ;
}
/ * *
* Get the contents of a named tag in an XML source string .
*
* @ param { string } source - The XML source string .
* @ param { string } tagName - The name of the tag .
* @ returns { string } The contents of the tag , or the empty string if the tag does not exist .
* /
function getXmlValue ( source , tagName ) {
const start = source . indexOf ( ` < ${ tagName } > ` ) ;
const end = source . indexOf ( ` </ ${ tagName } > ` , start ) ;
return start !== - 1 && end !== - 1 ? source . slice ( start + tagName . length + 2 , end ) : '' ;
}
function assertServerError ( res ) {
if ( res && res . error ) {
const error = new Error ( res . message ) ;
Object . assign ( error , res . error ) ;
throw error ;
}
return res ;
}
function validateParameters ( file , params ) {
const valid = params != null && typeof params . url === 'string' && ( typeof params . fields === 'object' || params . fields == null ) ;
if ( ! valid ) {
const err = new TypeError ( ` AwsS3: got incorrect result from 'getUploadParameters()' for file ' ${ file . name } ', expected an object '{ url, method, fields, headers }' but got ' ${ JSON . stringify ( params ) } ' instead. \n See https://uppy.io/docs/aws-s3/#getUploadParameters-file for more on the expected format. ` ) ;
throw err ;
}
const methodIsValid = params . method == null || /^p(u|os)t$/i . test ( params . method ) ;
if ( ! methodIsValid ) {
const err = new TypeError ( ` AwsS3: got incorrect method from 'getUploadParameters()' for file ' ${ file . name } ', expected 'put' or 'post' but got ' ${ params . method } ' instead. \n See https://uppy.io/docs/aws-s3/#getUploadParameters-file for more on the expected format. ` ) ;
throw err ;
}
} // Get the error data from a failed XMLHttpRequest instance.
// `content` is the S3 response as a string.
// `xhr` is the XMLHttpRequest instance.
function defaultGetResponseError ( content , xhr ) {
// If no response, we don't have a specific error message, use the default.
if ( ! isXml ( content , xhr ) ) {
return undefined ;
}
const error = getXmlValue ( content , 'Message' ) ;
return new Error ( error ) ;
} // warning deduplication flag: see `getResponseData()` XHRUpload option definition
let warnedSuccessActionStatus = false ;
module . exports = ( _temp = ( _client = /*#__PURE__*/ _classPrivateFieldLooseKey ( "client" ) , _requests = /*#__PURE__*/ _classPrivateFieldLooseKey ( "requests" ) , _uploader = /*#__PURE__*/ _classPrivateFieldLooseKey ( "uploader" ) , _handleUpload = /*#__PURE__*/ _classPrivateFieldLooseKey ( "handleUpload" ) , _class = class AwsS3 extends BasePlugin {
// eslint-disable-next-line global-require
constructor ( uppy , opts ) {
super ( uppy , opts ) ;
Object . defineProperty ( this , _client , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _requests , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _uploader , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _handleUpload , {
writable : true ,
value : fileIDs => {
/ * *
* keep track of ` getUploadParameters() ` responses
* so we can cancel the calls individually using just a file ID
*
* @ type { object . < string , Promise > }
* /
const paramsPromises = Object . create ( null ) ;
function onremove ( file ) {
let _paramsPromises$id ;
const {
id
} = file ;
( _paramsPromises$id = paramsPromises [ id ] ) == null ? void 0 : _paramsPromises$id . abort ( ) ;
}
this . uppy . on ( 'file-removed' , onremove ) ;
fileIDs . forEach ( id => {
const file = this . uppy . getFile ( id ) ;
this . uppy . emit ( 'upload-started' , file ) ;
} ) ;
const getUploadParameters = _classPrivateFieldLooseBase ( this , _requests ) [ _requests ] . wrapPromiseFunction ( file => {
return this . opts . getUploadParameters ( file ) ;
} ) ;
const numberOfFiles = fileIDs . length ;
return Promise . allSettled ( fileIDs . map ( ( id , index ) => {
paramsPromises [ id ] = getUploadParameters ( this . uppy . getFile ( id ) ) ;
return paramsPromises [ id ] . then ( params => {
delete paramsPromises [ id ] ;
const file = this . uppy . getFile ( id ) ;
validateParameters ( file , params ) ;
const {
method = 'post' ,
url ,
fields ,
headers
} = params ;
const xhrOpts = {
method ,
formData : method . toLowerCase ( ) === 'post' ,
endpoint : url ,
metaFields : fields ? Object . keys ( fields ) : [ ]
} ;
if ( headers ) {
xhrOpts . headers = headers ;
}
this . uppy . setFileState ( file . id , {
meta : { ... file . meta ,
... fields
} ,
xhrUpload : xhrOpts
} ) ;
return _classPrivateFieldLooseBase ( this , _uploader ) [ _uploader ] . uploadFile ( file . id , index , numberOfFiles ) ;
} ) . catch ( error => {
delete paramsPromises [ id ] ;
const file = this . uppy . getFile ( id ) ;
this . uppy . emit ( 'upload-error' , file , error ) ;
return Promise . reject ( error ) ;
} ) ;
} ) ) . finally ( ( ) => {
// cleanup.
this . uppy . off ( 'file-removed' , onremove ) ;
} ) ;
}
} ) ;
this . type = 'uploader' ;
this . id = this . opts . id || 'AwsS3' ;
this . title = 'AWS S3' ;
this . defaultLocale = locale ;
const defaultOptions = {
timeout : 30 * 1000 ,
limit : 0 ,
metaFields : [ ] ,
// have to opt in
getUploadParameters : this . getUploadParameters . bind ( this )
} ;
this . opts = { ... defaultOptions ,
... opts
} ; // TODO: remove i18n once we can depend on XHRUpload instead of MiniXHRUpload
this . i18nInit ( ) ;
_classPrivateFieldLooseBase ( this , _client ) [ _client ] = new RequestClient ( uppy , opts ) ;
_classPrivateFieldLooseBase ( this , _requests ) [ _requests ] = new RateLimitedQueue ( this . opts . limit ) ;
}
getUploadParameters ( file ) {
if ( ! this . opts . companionUrl ) {
throw new Error ( 'Expected a `companionUrl` option containing a Companion address.' ) ;
}
const filename = file . meta . name ;
const {
type
} = file . meta ;
const metadata = Object . fromEntries ( this . opts . metaFields . filter ( key => file . meta [ key ] != null ) . map ( key => [ ` metadata[ ${ key } ] ` , file . meta [ key ] . toString ( ) ] ) ) ;
const query = new URLSearchParams ( {
filename ,
type ,
... metadata
} ) ;
return _classPrivateFieldLooseBase ( this , _client ) [ _client ] . get ( ` s3/params? ${ query } ` ) . then ( assertServerError ) ;
}
install ( ) {
const {
uppy
} = this ;
uppy . addUploader ( _classPrivateFieldLooseBase ( this , _handleUpload ) [ _handleUpload ] ) ; // Get the response data from a successful XMLHttpRequest instance.
// `content` is the S3 response as a string.
// `xhr` is the XMLHttpRequest instance.
function defaultGetResponseData ( content , xhr ) {
const opts = this ; // If no response, we've hopefully done a PUT request to the file
// in the bucket on its full URL.
if ( ! isXml ( content , xhr ) ) {
if ( opts . method . toUpperCase ( ) === 'POST' ) {
if ( ! warnedSuccessActionStatus ) {
uppy . log ( '[AwsS3] No response data found, make sure to set the success_action_status AWS SDK option to 201. See https://uppy.io/docs/aws-s3/#POST-Uploads' , 'warning' ) ;
warnedSuccessActionStatus = true ;
} // The responseURL won't contain the object key. Give up.
return {
location : null
} ;
} // responseURL is not available in older browsers.
if ( ! xhr . responseURL ) {
return {
location : null
} ;
} // Trim the query string because it's going to be a bunch of presign
// parameters for a PUT request—doing a GET request with those will
// always result in an error
return {
location : xhr . responseURL . replace ( /\?.*$/ , '' )
} ;
}
return {
// Some S3 alternatives do not reply with an absolute URL.
// Eg DigitalOcean Spaces uses /$bucketName/xyz
location : resolveUrl ( xhr . responseURL , getXmlValue ( content , 'Location' ) ) ,
bucket : getXmlValue ( content , 'Bucket' ) ,
key : getXmlValue ( content , 'Key' ) ,
etag : getXmlValue ( content , 'ETag' )
} ;
}
const xhrOptions = {
fieldName : 'file' ,
responseUrlFieldName : 'location' ,
timeout : this . opts . timeout ,
// Share the rate limiting queue with XHRUpload.
[ internalRateLimitedQueue ] : _classPrivateFieldLooseBase ( this , _requests ) [ _requests ] ,
responseType : 'text' ,
getResponseData : this . opts . getResponseData || defaultGetResponseData ,
getResponseError : defaultGetResponseError
} ; // TODO: remove i18n once we can depend on XHRUpload instead of MiniXHRUpload
xhrOptions . i18n = this . i18n ; // Revert to `uppy.use(XHRUpload)` once the big comment block at the top of
// this file is solved
_classPrivateFieldLooseBase ( this , _uploader ) [ _uploader ] = new MiniXHRUpload ( uppy , xhrOptions ) ;
}
uninstall ( ) {
this . uppy . removeUploader ( _classPrivateFieldLooseBase ( this , _handleUpload ) [ _handleUpload ] ) ;
}
} ) , _class . VERSION = "2.0.8" , _temp ) ;
} , { "./MiniXHRUpload" : 4 , "./isXml" : 6 , "./locale" : 7 , "@uppy/companion-client" : 13 , "@uppy/core/lib/BasePlugin" : 15 , "@uppy/utils/lib/RateLimitedQueue" : 30 } ] , 6 : [ function ( require , module , exports ) {
/ * *
* Remove parameters like ` charset=utf-8 ` from the end of a mime type string .
*
* @ param { string } mimeType - The mime type string that may have optional parameters .
* @ returns { string } The "base" mime type , i . e . only 'category/type' .
* /
function removeMimeParameters ( mimeType ) {
return mimeType . replace ( /;.*$/ , '' ) ;
}
/ * *
* Check if a response contains XML based on the response object and its text content .
*
* @ param { string } content - The text body of the response .
* @ param { object | XMLHttpRequest } xhr - The XHR object or response object from Companion .
* @ returns { bool } Whether the content is ( probably ) XML .
* /
function isXml ( content , xhr ) {
const rawContentType = xhr . headers ? xhr . headers [ 'content-type' ] : xhr . getResponseHeader ( 'Content-Type' ) ;
if ( typeof rawContentType === 'string' ) {
const contentType = removeMimeParameters ( rawContentType ) . toLowerCase ( ) ;
if ( contentType === 'application/xml' || contentType === 'text/xml' ) {
return true ;
} // GCS uses text/html for some reason
// https://github.com/transloadit/uppy/issues/896
if ( contentType === 'text/html' && /^<\?xml / . test ( content ) ) {
return true ;
}
}
return false ;
}
module . exports = isXml ;
} , { } ] , 7 : [ function ( require , module , exports ) {
module . exports = {
strings : {
timedOut : 'Upload stalled for %{seconds} seconds, aborting.'
}
} ;
} , { } ] , 8 : [ function ( require , module , exports ) {
'use strict' ;
class AuthError extends Error {
constructor ( ) {
super ( 'Authorization required' ) ;
this . name = 'AuthError' ;
this . isAuthError = true ;
}
}
module . exports = AuthError ;
} , { } ] , 9 : [ function ( require , module , exports ) {
'use strict' ;
const RequestClient = require ( './RequestClient' ) ;
const tokenStorage = require ( './tokenStorage' ) ;
const getName = id => {
return id . split ( '-' ) . map ( s => s . charAt ( 0 ) . toUpperCase ( ) + s . slice ( 1 ) ) . join ( ' ' ) ;
} ;
module . exports = class Provider extends RequestClient {
constructor ( uppy , opts ) {
super ( uppy , opts ) ;
this . provider = opts . provider ;
this . id = this . provider ;
this . name = this . opts . name || getName ( this . id ) ;
this . pluginId = this . opts . pluginId ;
this . tokenKey = ` companion- ${ this . pluginId } -auth-token ` ;
this . companionKeysParams = this . opts . companionKeysParams ;
this . preAuthToken = null ;
}
headers ( ) {
return Promise . all ( [ super . headers ( ) , this . getAuthToken ( ) ] ) . then ( _ref => {
let [ headers , token ] = _ref ;
const authHeaders = { } ;
if ( token ) {
authHeaders [ 'uppy-auth-token' ] = token ;
}
if ( this . companionKeysParams ) {
authHeaders [ 'uppy-credentials-params' ] = btoa ( JSON . stringify ( {
params : this . companionKeysParams
} ) ) ;
}
return { ... headers ,
... authHeaders
} ;
} ) ;
}
onReceiveResponse ( response ) {
response = super . onReceiveResponse ( response ) ;
const plugin = this . uppy . getPlugin ( this . pluginId ) ;
const oldAuthenticated = plugin . getPluginState ( ) . authenticated ;
const authenticated = oldAuthenticated ? response . status !== 401 : response . status < 400 ;
plugin . setPluginState ( {
authenticated
} ) ;
return response ;
}
setAuthToken ( token ) {
return this . uppy . getPlugin ( this . pluginId ) . storage . setItem ( this . tokenKey , token ) ;
}
getAuthToken ( ) {
return this . uppy . getPlugin ( this . pluginId ) . storage . getItem ( this . tokenKey ) ;
}
/ * *
* Ensure we have a preauth token if necessary . Attempts to fetch one if we don ' t ,
* or rejects if loading one fails .
* /
async ensurePreAuth ( ) {
if ( this . companionKeysParams && ! this . preAuthToken ) {
await this . fetchPreAuthToken ( ) ;
if ( ! this . preAuthToken ) {
throw new Error ( 'Could not load authentication data required for third-party login. Please try again later.' ) ;
}
}
}
authUrl ( queries ) {
if ( queries === void 0 ) {
queries = { } ;
}
const params = new URLSearchParams ( queries ) ;
if ( this . preAuthToken ) {
params . set ( 'uppyPreAuthToken' , this . preAuthToken ) ;
}
return ` ${ this . hostname } / ${ this . id } /connect? ${ params } ` ;
}
fileUrl ( id ) {
return ` ${ this . hostname } / ${ this . id } /get/ ${ id } ` ;
}
async fetchPreAuthToken ( ) {
if ( ! this . companionKeysParams ) {
return ;
}
try {
const res = await this . post ( ` ${ this . id } /preauth/ ` , {
params : this . companionKeysParams
} ) ;
this . preAuthToken = res . token ;
} catch ( err ) {
this . uppy . log ( ` [CompanionClient] unable to fetch preAuthToken ${ err } ` , 'warning' ) ;
}
}
list ( directory ) {
return this . get ( ` ${ this . id } /list/ ${ directory || '' } ` ) ;
}
logout ( ) {
return this . get ( ` ${ this . id } /logout ` ) . then ( response => Promise . all ( [ response , this . uppy . getPlugin ( this . pluginId ) . storage . removeItem ( this . tokenKey ) ] ) ) . then ( _ref2 => {
let [ response ] = _ref2 ;
return response ;
} ) ;
}
static initPlugin ( plugin , opts , defaultOpts ) {
plugin . type = 'acquirer' ;
plugin . files = [ ] ;
if ( defaultOpts ) {
plugin . opts = { ... defaultOpts ,
... opts
} ;
}
if ( opts . serverUrl || opts . serverPattern ) {
throw new Error ( '`serverUrl` and `serverPattern` have been renamed to `companionUrl` and `companionAllowedHosts` respectively in the 0.30.5 release. Please consult the docs (for example, https://uppy.io/docs/instagram/ for the Instagram plugin) and use the updated options.`' ) ;
}
if ( opts . companionAllowedHosts ) {
const pattern = opts . companionAllowedHosts ; // validate companionAllowedHosts param
if ( typeof pattern !== 'string' && ! Array . isArray ( pattern ) && ! ( pattern instanceof RegExp ) ) {
throw new TypeError ( ` ${ plugin . id } : the option "companionAllowedHosts" must be one of string, Array, RegExp ` ) ;
}
plugin . opts . companionAllowedHosts = pattern ;
} else if ( /^(?!https?:\/\/).*$/i . test ( opts . companionUrl ) ) {
// does not start with https://
plugin . opts . companionAllowedHosts = ` https:// ${ opts . companionUrl . replace ( /^\/\// , '' ) } ` ;
} else {
plugin . opts . companionAllowedHosts = new URL ( opts . companionUrl ) . origin ;
}
plugin . storage = plugin . opts . storage || tokenStorage ;
}
} ;
} , { "./RequestClient" : 10 , "./tokenStorage" : 14 } ] , 10 : [ function ( require , module , exports ) {
'use strict' ;
let _class , _getPostResponseFunc , _getUrl , _errorHandler , _temp ;
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
const fetchWithNetworkError = require ( '@uppy/utils/lib/fetchWithNetworkError' ) ;
const AuthError = require ( './AuthError' ) ; // Remove the trailing slash so we can always safely append /xyz.
function stripSlash ( url ) {
return url . replace ( /\/$/ , '' ) ;
}
async function handleJSONResponse ( res ) {
if ( res . status === 401 ) {
throw new AuthError ( ) ;
}
const jsonPromise = res . json ( ) ;
if ( res . status < 200 || res . status > 300 ) {
let errMsg = ` Failed request with status: ${ res . status } . ${ res . statusText } ` ;
try {
const errData = await jsonPromise ;
errMsg = errData . message ? ` ${ errMsg } message: ${ errData . message } ` : errMsg ;
errMsg = errData . requestId ? ` ${ errMsg } request-Id: ${ errData . requestId } ` : errMsg ;
} finally {
// eslint-disable-next-line no-unsafe-finally
throw new Error ( errMsg ) ;
}
}
return jsonPromise ;
}
module . exports = ( _temp = ( _getPostResponseFunc = /*#__PURE__*/ _classPrivateFieldLooseKey ( "getPostResponseFunc" ) , _getUrl = /*#__PURE__*/ _classPrivateFieldLooseKey ( "getUrl" ) , _errorHandler = /*#__PURE__*/ _classPrivateFieldLooseKey ( "errorHandler" ) , _class = class RequestClient {
// eslint-disable-next-line global-require
constructor ( uppy , opts ) {
Object . defineProperty ( this , _errorHandler , {
value : _errorHandler2
} ) ;
Object . defineProperty ( this , _getUrl , {
value : _getUrl2
} ) ;
Object . defineProperty ( this , _getPostResponseFunc , {
writable : true ,
value : skip => response => skip ? response : this . onReceiveResponse ( response )
} ) ;
this . uppy = uppy ;
this . opts = opts ;
this . onReceiveResponse = this . onReceiveResponse . bind ( this ) ;
this . allowedHeaders = [ 'accept' , 'content-type' , 'uppy-auth-token' ] ;
this . preflightDone = false ;
}
get hostname ( ) {
const {
companion
} = this . uppy . getState ( ) ;
const host = this . opts . companionUrl ;
return stripSlash ( companion && companion [ host ] ? companion [ host ] : host ) ;
}
headers ( ) {
const userHeaders = this . opts . companionHeaders || { } ;
return Promise . resolve ( { ... RequestClient . defaultHeaders ,
... userHeaders
} ) ;
}
onReceiveResponse ( response ) {
const state = this . uppy . getState ( ) ;
const companion = state . companion || { } ;
const host = this . opts . companionUrl ;
const {
headers
} = response ; // Store the self-identified domain name for the Companion instance we just hit.
if ( headers . has ( 'i-am' ) && headers . get ( 'i-am' ) !== companion [ host ] ) {
this . uppy . setState ( {
companion : { ... companion ,
[ host ] : headers . get ( 'i-am' )
}
} ) ;
}
return response ;
}
preflight ( path ) {
if ( this . preflightDone ) {
return Promise . resolve ( this . allowedHeaders . slice ( ) ) ;
}
return fetch ( _classPrivateFieldLooseBase ( this , _getUrl ) [ _getUrl ] ( path ) , {
method : 'OPTIONS'
} ) . then ( response => {
if ( response . headers . has ( 'access-control-allow-headers' ) ) {
this . allowedHeaders = response . headers . get ( 'access-control-allow-headers' ) . split ( ',' ) . map ( headerName => headerName . trim ( ) . toLowerCase ( ) ) ;
}
this . preflightDone = true ;
return this . allowedHeaders . slice ( ) ;
} ) . catch ( err => {
this . uppy . log ( ` [CompanionClient] unable to make preflight request ${ err } ` , 'warning' ) ;
this . preflightDone = true ;
return this . allowedHeaders . slice ( ) ;
} ) ;
}
preflightAndHeaders ( path ) {
return Promise . all ( [ this . preflight ( path ) , this . headers ( ) ] ) . then ( _ref => {
let [ allowedHeaders , headers ] = _ref ;
// filter to keep only allowed Headers
Object . keys ( headers ) . forEach ( header => {
if ( ! allowedHeaders . includes ( header . toLowerCase ( ) ) ) {
this . uppy . log ( ` [CompanionClient] excluding disallowed header ${ header } ` ) ;
delete headers [ header ] ; // eslint-disable-line no-param-reassign
}
} ) ;
return headers ;
} ) ;
}
get ( path , skipPostResponse ) {
const method = 'get' ;
return this . preflightAndHeaders ( path ) . then ( headers => fetchWithNetworkError ( _classPrivateFieldLooseBase ( this , _getUrl ) [ _getUrl ] ( path ) , {
method ,
headers ,
credentials : this . opts . companionCookiesRule || 'same-origin'
} ) ) . then ( _classPrivateFieldLooseBase ( this , _getPostResponseFunc ) [ _getPostResponseFunc ] ( skipPostResponse ) ) . then ( handleJSONResponse ) . catch ( _classPrivateFieldLooseBase ( this , _errorHandler ) [ _errorHandler ] ( method , path ) ) ;
}
post ( path , data , skipPostResponse ) {
const method = 'post' ;
return this . preflightAndHeaders ( path ) . then ( headers => fetchWithNetworkError ( _classPrivateFieldLooseBase ( this , _getUrl ) [ _getUrl ] ( path ) , {
method ,
headers ,
credentials : this . opts . companionCookiesRule || 'same-origin' ,
body : JSON . stringify ( data )
} ) ) . then ( _classPrivateFieldLooseBase ( this , _getPostResponseFunc ) [ _getPostResponseFunc ] ( skipPostResponse ) ) . then ( handleJSONResponse ) . catch ( _classPrivateFieldLooseBase ( this , _errorHandler ) [ _errorHandler ] ( method , path ) ) ;
}
delete ( path , data , skipPostResponse ) {
const method = 'delete' ;
return this . preflightAndHeaders ( path ) . then ( headers => fetchWithNetworkError ( ` ${ this . hostname } / ${ path } ` , {
method ,
headers ,
credentials : this . opts . companionCookiesRule || 'same-origin' ,
body : data ? JSON . stringify ( data ) : null
} ) ) . then ( _classPrivateFieldLooseBase ( this , _getPostResponseFunc ) [ _getPostResponseFunc ] ( skipPostResponse ) ) . then ( handleJSONResponse ) . catch ( _classPrivateFieldLooseBase ( this , _errorHandler ) [ _errorHandler ] ( method , path ) ) ;
}
} ) , _class . VERSION = "2.0.5" , _class . defaultHeaders = {
Accept : 'application/json' ,
'Content-Type' : 'application/json' ,
'Uppy-Versions' : ` @uppy/companion-client= ${ _class . VERSION } `
} , _temp ) ;
function _getUrl2 ( url ) {
if ( /^(https?:|)\/\// . test ( url ) ) {
return url ;
}
return ` ${ this . hostname } / ${ url } ` ;
}
function _errorHandler2 ( method , path ) {
return err => {
let _err ;
if ( ! ( ( _err = err ) != null && _err . isAuthError ) ) {
const error = new Error ( ` Could not ${ method } ${ _classPrivateFieldLooseBase ( this , _getUrl ) [ _getUrl ] ( path ) } ` ) ;
error . cause = err ;
err = error ; // eslint-disable-line no-param-reassign
}
return Promise . reject ( err ) ;
} ;
}
} , { "./AuthError" : 8 , "@uppy/utils/lib/fetchWithNetworkError" : 34 } ] , 11 : [ function ( require , module , exports ) {
'use strict' ;
const RequestClient = require ( './RequestClient' ) ;
const getName = id => {
return id . split ( '-' ) . map ( s => s . charAt ( 0 ) . toUpperCase ( ) + s . slice ( 1 ) ) . join ( ' ' ) ;
} ;
module . exports = class SearchProvider extends RequestClient {
constructor ( uppy , opts ) {
super ( uppy , opts ) ;
this . provider = opts . provider ;
this . id = this . provider ;
this . name = this . opts . name || getName ( this . id ) ;
this . pluginId = this . opts . pluginId ;
}
fileUrl ( id ) {
return ` ${ this . hostname } /search/ ${ this . id } /get/ ${ id } ` ;
}
search ( text , queries ) {
queries = queries ? ` & ${ queries } ` : '' ;
return this . get ( ` search/ ${ this . id } /list?q= ${ encodeURIComponent ( text ) } ${ queries } ` ) ;
}
} ;
} , { "./RequestClient" : 10 } ] , 12 : [ function ( require , module , exports ) {
let _queued , _emitter , _isOpen , _socket , _handleMessage ;
let _Symbol$for , _Symbol$for2 ;
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
const ee = require ( 'namespace-emitter' ) ;
module . exports = ( _queued = /*#__PURE__*/ _classPrivateFieldLooseKey ( "queued" ) , _emitter = /*#__PURE__*/ _classPrivateFieldLooseKey ( "emitter" ) , _isOpen = /*#__PURE__*/ _classPrivateFieldLooseKey ( "isOpen" ) , _socket = /*#__PURE__*/ _classPrivateFieldLooseKey ( "socket" ) , _handleMessage = /*#__PURE__*/ _classPrivateFieldLooseKey ( "handleMessage" ) , _Symbol$for = Symbol . for ( 'uppy test: getSocket' ) , _Symbol$for2 = Symbol . for ( 'uppy test: getQueued' ) , class UppySocket {
constructor ( opts ) {
Object . defineProperty ( this , _queued , {
writable : true ,
value : [ ]
} ) ;
Object . defineProperty ( this , _emitter , {
writable : true ,
value : ee ( )
} ) ;
Object . defineProperty ( this , _isOpen , {
writable : true ,
value : false
} ) ;
Object . defineProperty ( this , _socket , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _handleMessage , {
writable : true ,
value : e => {
try {
const message = JSON . parse ( e . data ) ;
this . emit ( message . action , message . payload ) ;
} catch ( err ) {
// TODO: use a more robust error handler.
console . log ( err ) ; // eslint-disable-line no-console
}
}
} ) ;
this . opts = opts ;
if ( ! opts || opts . autoOpen !== false ) {
this . open ( ) ;
}
}
get isOpen ( ) {
return _classPrivateFieldLooseBase ( this , _isOpen ) [ _isOpen ] ;
}
[ _Symbol$for ] ( ) {
return _classPrivateFieldLooseBase ( this , _socket ) [ _socket ] ;
}
[ _Symbol$for2 ] ( ) {
return _classPrivateFieldLooseBase ( this , _queued ) [ _queued ] ;
}
open ( ) {
_classPrivateFieldLooseBase ( this , _socket ) [ _socket ] = new WebSocket ( this . opts . target ) ;
_classPrivateFieldLooseBase ( this , _socket ) [ _socket ] . onopen = ( ) => {
_classPrivateFieldLooseBase ( this , _isOpen ) [ _isOpen ] = true ;
while ( _classPrivateFieldLooseBase ( this , _queued ) [ _queued ] . length > 0 && _classPrivateFieldLooseBase ( this , _isOpen ) [ _isOpen ] ) {
const first = _classPrivateFieldLooseBase ( this , _queued ) [ _queued ] . shift ( ) ;
this . send ( first . action , first . payload ) ;
}
} ;
_classPrivateFieldLooseBase ( this , _socket ) [ _socket ] . onclose = ( ) => {
_classPrivateFieldLooseBase ( this , _isOpen ) [ _isOpen ] = false ;
} ;
_classPrivateFieldLooseBase ( this , _socket ) [ _socket ] . onmessage = _classPrivateFieldLooseBase ( this , _handleMessage ) [ _handleMessage ] ;
}
close ( ) {
let _classPrivateFieldLoo ;
( _classPrivateFieldLoo = _classPrivateFieldLooseBase ( this , _socket ) [ _socket ] ) == null ? void 0 : _classPrivateFieldLoo . close ( ) ;
}
send ( action , payload ) {
// attach uuid
if ( ! _classPrivateFieldLooseBase ( this , _isOpen ) [ _isOpen ] ) {
_classPrivateFieldLooseBase ( this , _queued ) [ _queued ] . push ( {
action ,
payload
} ) ;
return ;
}
_classPrivateFieldLooseBase ( this , _socket ) [ _socket ] . send ( JSON . stringify ( {
action ,
payload
} ) ) ;
}
on ( action , handler ) {
_classPrivateFieldLooseBase ( this , _emitter ) [ _emitter ] . on ( action , handler ) ;
}
emit ( action , payload ) {
_classPrivateFieldLooseBase ( this , _emitter ) [ _emitter ] . emit ( action , payload ) ;
}
once ( action , handler ) {
_classPrivateFieldLooseBase ( this , _emitter ) [ _emitter ] . once ( action , handler ) ;
}
} ) ;
} , { "namespace-emitter" : 56 } ] , 13 : [ function ( require , module , exports ) {
'use strict' ;
/ * *
* Manages communications with Companion
* /
const RequestClient = require ( './RequestClient' ) ;
const Provider = require ( './Provider' ) ;
const SearchProvider = require ( './SearchProvider' ) ;
const Socket = require ( './Socket' ) ;
module . exports = {
RequestClient ,
Provider ,
SearchProvider ,
Socket
} ;
} , { "./Provider" : 9 , "./RequestClient" : 10 , "./SearchProvider" : 11 , "./Socket" : 12 } ] , 14 : [ function ( require , module , exports ) {
'use strict' ;
/ * *
* This module serves as an Async wrapper for LocalStorage
* /
module . exports . setItem = ( key , value ) => {
return new Promise ( resolve => {
localStorage . setItem ( key , value ) ;
resolve ( ) ;
} ) ;
} ;
module . exports . getItem = key => {
return Promise . resolve ( localStorage . getItem ( key ) ) ;
} ;
module . exports . removeItem = key => {
return new Promise ( resolve => {
localStorage . removeItem ( key ) ;
resolve ( ) ;
} ) ;
} ;
} , { } ] , 15 : [ function ( require , module , exports ) {
/ * *
* Core plugin logic that all plugins share .
*
* BasePlugin does not contain DOM rendering so it can be used for plugins
* without a user interface .
*
* See ` Plugin ` for the extended version with Preact rendering for interfaces .
* /
const Translator = require ( '@uppy/utils/lib/Translator' ) ;
module . exports = class BasePlugin {
constructor ( uppy , opts ) {
if ( opts === void 0 ) {
opts = { } ;
}
this . uppy = uppy ;
this . opts = opts ;
}
getPluginState ( ) {
const {
plugins
} = this . uppy . getState ( ) ;
return plugins [ this . id ] || { } ;
}
setPluginState ( update ) {
const {
plugins
} = this . uppy . getState ( ) ;
this . uppy . setState ( {
plugins : { ... plugins ,
[ this . id ] : { ... plugins [ this . id ] ,
... update
}
}
} ) ;
}
setOptions ( newOpts ) {
this . opts = { ... this . opts ,
... newOpts
} ;
this . setPluginState ( ) ; // so that UI re-renders with new options
this . i18nInit ( ) ;
}
i18nInit ( ) {
const translator = new Translator ( [ this . defaultLocale , this . uppy . locale , this . opts . locale ] ) ;
this . i18n = translator . translate . bind ( translator ) ;
this . i18nArray = translator . translateArray . bind ( translator ) ;
this . setPluginState ( ) ; // so that UI re-renders and we see the updated locale
}
/ * *
* Extendable methods
* === === === === === ===
* These methods are here to serve as an overview of the extendable methods as well as
* making them not conditional in use , such as ` if (this.afterUpdate) ` .
* /
// eslint-disable-next-line class-methods-use-this
addTarget ( ) {
throw new Error ( 'Extend the addTarget method to add your plugin to another plugin\'s target' ) ;
} // eslint-disable-next-line class-methods-use-this
install ( ) { } // eslint-disable-next-line class-methods-use-this
uninstall ( ) { }
/ * *
* Called when plugin is mounted , whether in DOM or into another plugin .
* Needed because sometimes plugins are mounted separately / after ` install ` ,
* so this . el and this . parent might not be available in ` install ` .
* This is the case with @ uppy / react plugins , for example .
* /
render ( ) {
throw new Error ( 'Extend the render method to add your plugin to a DOM element' ) ;
} // eslint-disable-next-line class-methods-use-this
update ( ) { } // Called after every state update, after everything's mounted. Debounced.
// eslint-disable-next-line class-methods-use-this
afterUpdate ( ) { }
} ;
} , { "@uppy/utils/lib/Translator" : 31 } ] , 16 : [ function ( require , module , exports ) {
/* eslint-disable max-classes-per-file, class-methods-use-this */
/* global AggregateError */
const prettierBytes = require ( '@transloadit/prettier-bytes' ) ;
const match = require ( 'mime-match' ) ;
const defaultOptions = {
maxFileSize : null ,
minFileSize : null ,
maxTotalFileSize : null ,
maxNumberOfFiles : null ,
minNumberOfFiles : null ,
allowedFileTypes : null ,
requiredMetaFields : [ ]
} ;
class RestrictionError extends Error {
constructor ( ) {
2022-06-22 12:45:35 +02:00
super ( ) ;
2022-06-22 09:14:55 +02:00
this . isRestriction = true ;
}
}
if ( typeof AggregateError === 'undefined' ) {
// eslint-disable-next-line no-global-assign
// TODO: remove this "polyfill" in the next major.
globalThis . AggregateError = class AggregateError extends Error {
constructor ( errors , message ) {
super ( message ) ;
this . errors = errors ;
}
} ;
}
class Restricter {
constructor ( getOpts , i18n ) {
this . i18n = i18n ;
this . getOpts = ( ) => {
const opts = getOpts ( ) ;
if ( opts . restrictions . allowedFileTypes != null && ! Array . isArray ( opts . restrictions . allowedFileTypes ) ) {
throw new TypeError ( '`restrictions.allowedFileTypes` must be an array' ) ;
}
return opts ;
} ;
}
validate ( file , files ) {
const {
maxFileSize ,
minFileSize ,
maxTotalFileSize ,
maxNumberOfFiles ,
allowedFileTypes
} = this . getOpts ( ) . restrictions ;
if ( maxNumberOfFiles && files . length + 1 > maxNumberOfFiles ) {
throw new RestrictionError ( ` ${ this . i18n ( 'youCanOnlyUploadX' , {
smart _count : maxNumberOfFiles
} ) } ` );
}
if ( allowedFileTypes ) {
const isCorrectFileType = allowedFileTypes . some ( type => {
// check if this is a mime-type
if ( type . includes ( '/' ) ) {
if ( ! file . type ) { return false ; }
return match ( file . type . replace ( /;.*?$/ , '' ) , type ) ;
} // otherwise this is likely an extension
if ( type [ 0 ] === '.' && file . extension ) {
return file . extension . toLowerCase ( ) === type . substr ( 1 ) . toLowerCase ( ) ;
}
return false ;
} ) ;
if ( ! isCorrectFileType ) {
const allowedFileTypesString = allowedFileTypes . join ( ', ' ) ;
throw new RestrictionError ( this . i18n ( 'youCanOnlyUploadFileTypes' , {
types : allowedFileTypesString
} ) ) ;
}
} // We can't check maxTotalFileSize if the size is unknown.
if ( maxTotalFileSize && file . size != null ) {
const totalFilesSize = files . reduce ( ( total , f ) => total + f . size , file . size ) ;
if ( totalFilesSize > maxTotalFileSize ) {
throw new RestrictionError ( this . i18n ( 'exceedsSize' , {
size : prettierBytes ( maxTotalFileSize ) ,
file : file . name
} ) ) ;
}
} // We can't check maxFileSize if the size is unknown.
if ( maxFileSize && file . size != null && file . size > maxFileSize ) {
throw new RestrictionError ( this . i18n ( 'exceedsSize' , {
size : prettierBytes ( maxFileSize ) ,
file : file . name
} ) ) ;
} // We can't check minFileSize if the size is unknown.
if ( minFileSize && file . size != null && file . size < minFileSize ) {
throw new RestrictionError ( this . i18n ( 'inferiorSize' , {
size : prettierBytes ( minFileSize )
} ) ) ;
}
}
validateMinNumberOfFiles ( files ) {
const {
minNumberOfFiles
} = this . getOpts ( ) . restrictions ;
if ( Object . keys ( files ) . length < minNumberOfFiles ) {
throw new RestrictionError ( this . i18n ( 'youHaveToAtLeastSelectX' , {
smart _count : minNumberOfFiles
} ) ) ;
}
}
getMissingRequiredMetaFields ( file ) {
const error = new RestrictionError ( this . i18n ( 'missingRequiredMetaFieldOnFile' , {
fileName : file . name
} ) ) ;
const {
requiredMetaFields
} = this . getOpts ( ) . restrictions ; // TODO: migrate to Object.hasOwn in the next major.
const own = Object . prototype . hasOwnProperty ;
const missingFields = [ ] ;
for ( const field of requiredMetaFields ) {
if ( ! own . call ( file . meta , field ) || file . meta [ field ] === '' ) {
missingFields . push ( field ) ;
}
}
return {
missingFields ,
error
} ;
}
}
module . exports = {
Restricter ,
defaultOptions ,
RestrictionError
} ;
} , { "@transloadit/prettier-bytes" : 1 , "mime-match" : 55 } ] , 17 : [ function ( require , module , exports ) {
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
const {
render
} = require ( 'preact' ) ;
const findDOMElement = require ( '@uppy/utils/lib/findDOMElement' ) ;
const BasePlugin = require ( './BasePlugin' ) ;
/ * *
* Defer a frequent call to the microtask queue .
*
* @ param { ( ) => T } fn
* @ returns { Promise < T > }
* /
function debounce ( fn ) {
let calling = null ;
let latestArgs = null ;
return function ( ) {
for ( var _len = arguments . length , args = new Array ( _len ) , _key = 0 ; _key < _len ; _key ++ ) {
args [ _key ] = arguments [ _key ] ;
}
latestArgs = args ;
if ( ! calling ) {
calling = Promise . resolve ( ) . then ( ( ) => {
calling = null ; // At this point `args` may be different from the most
// recent state, if multiple calls happened since this task
// was queued. So we use the `latestArgs`, which definitely
// is the most recent call.
return fn ( ... latestArgs ) ;
} ) ;
}
return calling ;
} ;
}
/ * *
* UIPlugin is the extended version of BasePlugin to incorporate rendering with Preact .
* Use this for plugins that need a user interface .
*
* For plugins without an user interface , see BasePlugin .
* /
let _updateUI = /*#__PURE__*/ _classPrivateFieldLooseKey ( "updateUI" ) ;
class UIPlugin extends BasePlugin {
constructor ( ) {
2022-06-22 12:45:35 +02:00
super ( ) ;
2022-06-22 09:14:55 +02:00
Object . defineProperty ( this , _updateUI , {
writable : true ,
value : void 0
} ) ;
}
/ * *
* Check if supplied ` target ` is a DOM element or an ` object ` .
* If it ’ s an object — target is a plugin , and we search ` plugins `
* for a plugin with same name and return its target .
* /
mount ( target , plugin ) {
const callerPluginName = plugin . id ;
const targetElement = findDOMElement ( target ) ;
if ( targetElement ) {
this . isTargetDOMEl = true ; // When target is <body> with a single <div> element,
// Preact thinks it’ s the Uppy root element in there when doing a diff,
// and destroys it. So we are creating a fragment (could be empty div)
const uppyRootElement = document . createDocumentFragment ( ) ; // API for plugins that require a synchronous rerender.
_classPrivateFieldLooseBase ( this , _updateUI ) [ _updateUI ] = debounce ( state => {
// plugin could be removed, but this.rerender is debounced below,
// so it could still be called even after uppy.removePlugin or uppy.close
// hence the check
if ( ! this . uppy . getPlugin ( this . id ) ) { return ; }
render ( this . render ( state ) , uppyRootElement ) ;
this . afterUpdate ( ) ;
} ) ;
this . uppy . log ( ` Installing ${ callerPluginName } to a DOM element ' ${ target } ' ` ) ;
if ( this . opts . replaceTargetContent ) {
// Doing render(h(null), targetElement), which should have been
// a better way, since because the component might need to do additional cleanup when it is removed,
// stopped working — Preact just adds null into target, not replacing
targetElement . innerHTML = '' ;
}
render ( this . render ( this . uppy . getState ( ) ) , uppyRootElement ) ;
this . el = uppyRootElement . firstElementChild ;
targetElement . appendChild ( uppyRootElement ) ;
this . onMount ( ) ;
return this . el ;
}
let targetPlugin ;
if ( typeof target === 'object' && target instanceof UIPlugin ) {
// Targeting a plugin *instance*
targetPlugin = target ;
} else if ( typeof target === 'function' ) {
// Targeting a plugin type
const Target = target ; // Find the target plugin instance.
this . uppy . iteratePlugins ( p => {
if ( p instanceof Target ) {
targetPlugin = p ;
return false ;
}
} ) ;
}
if ( targetPlugin ) {
this . uppy . log ( ` Installing ${ callerPluginName } to ${ targetPlugin . id } ` ) ;
this . parent = targetPlugin ;
this . el = targetPlugin . addTarget ( plugin ) ;
this . onMount ( ) ;
return this . el ;
}
this . uppy . log ( ` Not installing ${ callerPluginName } ` ) ;
let message = ` Invalid target option given to ${ callerPluginName } . ` ;
if ( typeof target === 'function' ) {
message += ' The given target is not a Plugin class. ' + 'Please check that you\'re not specifying a React Component instead of a plugin. ' + 'If you are using @uppy/* packages directly, make sure you have only 1 version of @uppy/core installed: ' + 'run `npm ls @uppy/core` on the command line and verify that all the versions match and are deduped correctly.' ;
} else {
message += 'If you meant to target an HTML element, please make sure that the element exists. ' + 'Check that the <script> tag initializing Uppy is right before the closing </body> tag at the end of the page. ' + '(see https://github.com/transloadit/uppy/issues/1042)\n\n' + 'If you meant to target a plugin, please confirm that your `import` statements or `require` calls are correct.' ;
}
throw new Error ( message ) ;
}
update ( state ) {
if ( this . el != null ) {
let _classPrivateFieldLoo , _classPrivateFieldLoo2 ;
( _classPrivateFieldLoo = ( _classPrivateFieldLoo2 = _classPrivateFieldLooseBase ( this , _updateUI ) ) [ _updateUI ] ) == null ? void 0 : _classPrivateFieldLoo . call ( _classPrivateFieldLoo2 , state ) ;
}
}
unmount ( ) {
if ( this . isTargetDOMEl ) {
let _this$el ;
( _this$el = this . el ) == null ? void 0 : _this$el . remove ( ) ;
}
this . onUnmount ( ) ;
} // eslint-disable-next-line class-methods-use-this
onMount ( ) { } // eslint-disable-next-line class-methods-use-this
onUnmount ( ) { }
}
module . exports = UIPlugin ;
} , { "./BasePlugin" : 15 , "@uppy/utils/lib/findDOMElement" : 35 , "preact" : 58 } ] , 18 : [ function ( require , module , exports ) {
/* eslint-disable max-classes-per-file */
/* global AggregateError */
'use strict' ;
let _Symbol$for , _Symbol$for2 ;
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
const Translator = require ( '@uppy/utils/lib/Translator' ) ;
const ee = require ( 'namespace-emitter' ) ;
const {
nanoid
} = require ( 'nanoid/non-secure' ) ;
const throttle = require ( 'lodash.throttle' ) ;
const DefaultStore = require ( '@uppy/store-default' ) ;
const getFileType = require ( '@uppy/utils/lib/getFileType' ) ;
const getFileNameAndExtension = require ( '@uppy/utils/lib/getFileNameAndExtension' ) ;
const generateFileID = require ( '@uppy/utils/lib/generateFileID' ) ;
const supportsUploadProgress = require ( './supportsUploadProgress' ) ;
const getFileName = require ( './getFileName' ) ;
const {
justErrorsLogger ,
debugLogger
} = require ( './loggers' ) ;
const {
Restricter ,
defaultOptions : defaultRestrictionOptions ,
RestrictionError
} = require ( './Restricter' ) ;
const locale = require ( './locale' ) ; // Exported from here.
/ * *
* Uppy Core module .
* Manages plugins , state updates , acts as an event bus ,
* adds / removes files and metadata .
* /
let _plugins = /*#__PURE__*/ _classPrivateFieldLooseKey ( "plugins" ) ;
let _restricter = /*#__PURE__*/ _classPrivateFieldLooseKey ( "restricter" ) ;
let _storeUnsubscribe = /*#__PURE__*/ _classPrivateFieldLooseKey ( "storeUnsubscribe" ) ;
let _emitter = /*#__PURE__*/ _classPrivateFieldLooseKey ( "emitter" ) ;
let _preProcessors = /*#__PURE__*/ _classPrivateFieldLooseKey ( "preProcessors" ) ;
let _uploaders = /*#__PURE__*/ _classPrivateFieldLooseKey ( "uploaders" ) ;
let _postProcessors = /*#__PURE__*/ _classPrivateFieldLooseKey ( "postProcessors" ) ;
let _informAndEmit = /*#__PURE__*/ _classPrivateFieldLooseKey ( "informAndEmit" ) ;
let _checkRequiredMetaFieldsOnFile = /*#__PURE__*/ _classPrivateFieldLooseKey ( "checkRequiredMetaFieldsOnFile" ) ;
let _checkRequiredMetaFields = /*#__PURE__*/ _classPrivateFieldLooseKey ( "checkRequiredMetaFields" ) ;
let _assertNewUploadAllowed = /*#__PURE__*/ _classPrivateFieldLooseKey ( "assertNewUploadAllowed" ) ;
let _checkAndCreateFileStateObject = /*#__PURE__*/ _classPrivateFieldLooseKey ( "checkAndCreateFileStateObject" ) ;
let _startIfAutoProceed = /*#__PURE__*/ _classPrivateFieldLooseKey ( "startIfAutoProceed" ) ;
let _addListeners = /*#__PURE__*/ _classPrivateFieldLooseKey ( "addListeners" ) ;
let _updateOnlineStatus = /*#__PURE__*/ _classPrivateFieldLooseKey ( "updateOnlineStatus" ) ;
let _createUpload = /*#__PURE__*/ _classPrivateFieldLooseKey ( "createUpload" ) ;
let _getUpload = /*#__PURE__*/ _classPrivateFieldLooseKey ( "getUpload" ) ;
let _removeUpload = /*#__PURE__*/ _classPrivateFieldLooseKey ( "removeUpload" ) ;
let _runUpload = /*#__PURE__*/ _classPrivateFieldLooseKey ( "runUpload" ) ;
_Symbol$for = Symbol . for ( 'uppy test: getPlugins' ) ;
_Symbol$for2 = Symbol . for ( 'uppy test: createUpload' ) ;
class Uppy {
// eslint-disable-next-line global-require
/** @type {Record<string, BasePlugin[]>} */
/ * *
* Instantiate Uppy
*
* @ param { object } opts — Uppy options
* /
constructor ( _opts ) {
Object . defineProperty ( this , _runUpload , {
value : _runUpload2
} ) ;
Object . defineProperty ( this , _removeUpload , {
value : _removeUpload2
} ) ;
Object . defineProperty ( this , _getUpload , {
value : _getUpload2
} ) ;
Object . defineProperty ( this , _createUpload , {
value : _createUpload2
} ) ;
Object . defineProperty ( this , _addListeners , {
value : _addListeners2
} ) ;
Object . defineProperty ( this , _startIfAutoProceed , {
value : _startIfAutoProceed2
} ) ;
Object . defineProperty ( this , _checkAndCreateFileStateObject , {
value : _checkAndCreateFileStateObject2
} ) ;
Object . defineProperty ( this , _assertNewUploadAllowed , {
value : _assertNewUploadAllowed2
} ) ;
Object . defineProperty ( this , _checkRequiredMetaFields , {
value : _checkRequiredMetaFields2
} ) ;
Object . defineProperty ( this , _checkRequiredMetaFieldsOnFile , {
value : _checkRequiredMetaFieldsOnFile2
} ) ;
Object . defineProperty ( this , _informAndEmit , {
value : _informAndEmit2
} ) ;
Object . defineProperty ( this , _plugins , {
writable : true ,
value : Object . create ( null )
} ) ;
Object . defineProperty ( this , _restricter , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _storeUnsubscribe , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _emitter , {
writable : true ,
value : ee ( )
} ) ;
Object . defineProperty ( this , _preProcessors , {
writable : true ,
value : new Set ( )
} ) ;
Object . defineProperty ( this , _uploaders , {
writable : true ,
value : new Set ( )
} ) ;
Object . defineProperty ( this , _postProcessors , {
writable : true ,
value : new Set ( )
} ) ;
Object . defineProperty ( this , _updateOnlineStatus , {
writable : true ,
value : this . updateOnlineStatus . bind ( this )
} ) ;
this . defaultLocale = locale ;
const defaultOptions = {
id : 'uppy' ,
autoProceed : false ,
/ * *
* @ deprecated The method should not be used
* /
allowMultipleUploads : true ,
allowMultipleUploadBatches : true ,
debug : false ,
restrictions : defaultRestrictionOptions ,
meta : { } ,
onBeforeFileAdded : currentFile => currentFile ,
onBeforeUpload : files => files ,
store : DefaultStore ( ) ,
logger : justErrorsLogger ,
infoTimeout : 5000
} ; // Merge default options with the ones set by user,
// making sure to merge restrictions too
this . opts = { ... defaultOptions ,
... _opts ,
restrictions : { ... defaultOptions . restrictions ,
... ( _opts && _opts . restrictions )
}
} ; // Support debug: true for backwards-compatability, unless logger is set in opts
// opts instead of this.opts to avoid comparing objects — we set logger: justErrorsLogger in defaultOptions
if ( _opts && _opts . logger && _opts . debug ) {
this . log ( 'You are using a custom `logger`, but also set `debug: true`, which uses built-in logger to output logs to console. Ignoring `debug: true` and using your custom `logger`.' , 'warning' ) ;
} else if ( _opts && _opts . debug ) {
this . opts . logger = debugLogger ;
}
this . log ( ` Using Core v ${ this . constructor . VERSION } ` ) ;
this . i18nInit ( ) ; // ___Why throttle at 500ms?
// - We must throttle at >250ms for superfocus in Dashboard to work well
// (because animation takes 0.25s, and we want to wait for all animations to be over before refocusing).
// [Practical Check]: if thottle is at 100ms, then if you are uploading a file,
// and click 'ADD MORE FILES', - focus won't activate in Firefox.
// - We must throttle at around >500ms to avoid performance lags.
// [Practical Check] Firefox, try to upload a big file for a prolonged period of time. Laptop will start to heat up.
this . calculateProgress = throttle ( this . calculateProgress . bind ( this ) , 500 , {
leading : true ,
trailing : true
} ) ;
this . store = this . opts . store ;
this . setState ( {
plugins : { } ,
files : { } ,
currentUploads : { } ,
allowNewUpload : true ,
capabilities : {
uploadProgress : supportsUploadProgress ( ) ,
individualCancellation : true ,
resumableUploads : false
} ,
totalProgress : 0 ,
meta : { ... this . opts . meta
} ,
info : [ ] ,
recoveredState : null
} ) ;
_classPrivateFieldLooseBase ( this , _restricter ) [ _restricter ] = new Restricter ( ( ) => this . opts , this . i18n ) ;
_classPrivateFieldLooseBase ( this , _storeUnsubscribe ) [ _storeUnsubscribe ] = this . store . subscribe ( ( prevState , nextState , patch ) => {
this . emit ( 'state-update' , prevState , nextState , patch ) ;
this . updateAll ( nextState ) ;
} ) ; // Exposing uppy object on window for debugging and testing
if ( this . opts . debug && typeof window !== 'undefined' ) {
window [ this . opts . id ] = this ;
}
_classPrivateFieldLooseBase ( this , _addListeners ) [ _addListeners ] ( ) ;
}
emit ( event ) {
for ( var _len = arguments . length , args = new Array ( _len > 1 ? _len - 1 : 0 ) , _key = 1 ; _key < _len ; _key ++ ) {
args [ _key - 1 ] = arguments [ _key ] ;
}
_classPrivateFieldLooseBase ( this , _emitter ) [ _emitter ] . emit ( event , ... args ) ;
}
on ( event , callback ) {
_classPrivateFieldLooseBase ( this , _emitter ) [ _emitter ] . on ( event , callback ) ;
return this ;
}
once ( event , callback ) {
_classPrivateFieldLooseBase ( this , _emitter ) [ _emitter ] . once ( event , callback ) ;
return this ;
}
off ( event , callback ) {
_classPrivateFieldLooseBase ( this , _emitter ) [ _emitter ] . off ( event , callback ) ;
return this ;
}
/ * *
* Iterate on all plugins and run ` update ` on them .
* Called each time state changes .
*
* /
updateAll ( state ) {
this . iteratePlugins ( plugin => {
plugin . update ( state ) ;
} ) ;
}
/ * *
* Updates state with a patch
*
* @ param { object } patch { foo : 'bar' }
* /
setState ( patch ) {
this . store . setState ( patch ) ;
}
/ * *
* Returns current state .
*
* @ returns { object }
* /
getState ( ) {
return this . store . getState ( ) ;
}
/ * *
* Back compat for when uppy . state is used instead of uppy . getState ( ) .
*
* @ deprecated
* /
get state ( ) {
// Here, state is a non-enumerable property.
return this . getState ( ) ;
}
/ * *
* Shorthand to set state for a specific file .
* /
setFileState ( fileID , state ) {
if ( ! this . getState ( ) . files [ fileID ] ) {
throw new Error ( ` Can’ t set state for ${ fileID } (the file could have been removed) ` ) ;
}
this . setState ( {
files : { ... this . getState ( ) . files ,
[ fileID ] : { ... this . getState ( ) . files [ fileID ] ,
... state
}
}
} ) ;
}
i18nInit ( ) {
const translator = new Translator ( [ this . defaultLocale , this . opts . locale ] ) ;
this . i18n = translator . translate . bind ( translator ) ;
this . i18nArray = translator . translateArray . bind ( translator ) ;
this . locale = translator . locale ;
}
setOptions ( newOpts ) {
this . opts = { ... this . opts ,
... newOpts ,
restrictions : { ... this . opts . restrictions ,
... ( newOpts && newOpts . restrictions )
}
} ;
if ( newOpts . meta ) {
this . setMeta ( newOpts . meta ) ;
}
this . i18nInit ( ) ;
if ( newOpts . locale ) {
this . iteratePlugins ( plugin => {
plugin . setOptions ( ) ;
} ) ;
} // Note: this is not the preact `setState`, it's an internal function that has the same name.
this . setState ( ) ; // so that UI re-renders with new options
}
resetProgress ( ) {
const defaultProgress = {
percentage : 0 ,
bytesUploaded : 0 ,
uploadComplete : false ,
uploadStarted : null
} ;
const files = { ... this . getState ( ) . files
} ;
const updatedFiles = { } ;
Object . keys ( files ) . forEach ( fileID => {
const updatedFile = { ... files [ fileID ]
} ;
updatedFile . progress = { ... updatedFile . progress ,
... defaultProgress
} ;
updatedFiles [ fileID ] = updatedFile ;
} ) ;
this . setState ( {
files : updatedFiles ,
totalProgress : 0
} ) ;
this . emit ( 'reset-progress' ) ;
}
addPreProcessor ( fn ) {
_classPrivateFieldLooseBase ( this , _preProcessors ) [ _preProcessors ] . add ( fn ) ;
}
removePreProcessor ( fn ) {
return _classPrivateFieldLooseBase ( this , _preProcessors ) [ _preProcessors ] . delete ( fn ) ;
}
addPostProcessor ( fn ) {
_classPrivateFieldLooseBase ( this , _postProcessors ) [ _postProcessors ] . add ( fn ) ;
}
removePostProcessor ( fn ) {
return _classPrivateFieldLooseBase ( this , _postProcessors ) [ _postProcessors ] . delete ( fn ) ;
}
addUploader ( fn ) {
_classPrivateFieldLooseBase ( this , _uploaders ) [ _uploaders ] . add ( fn ) ;
}
removeUploader ( fn ) {
return _classPrivateFieldLooseBase ( this , _uploaders ) [ _uploaders ] . delete ( fn ) ;
}
setMeta ( data ) {
const updatedMeta = { ... this . getState ( ) . meta ,
... data
} ;
const updatedFiles = { ... this . getState ( ) . files
} ;
Object . keys ( updatedFiles ) . forEach ( fileID => {
updatedFiles [ fileID ] = { ... updatedFiles [ fileID ] ,
meta : { ... updatedFiles [ fileID ] . meta ,
... data
}
} ;
} ) ;
this . log ( 'Adding metadata:' ) ;
this . log ( data ) ;
this . setState ( {
meta : updatedMeta ,
files : updatedFiles
} ) ;
}
setFileMeta ( fileID , data ) {
const updatedFiles = { ... this . getState ( ) . files
} ;
if ( ! updatedFiles [ fileID ] ) {
this . log ( 'Was trying to set metadata for a file that has been removed: ' , fileID ) ;
return ;
}
const newMeta = { ... updatedFiles [ fileID ] . meta ,
... data
} ;
updatedFiles [ fileID ] = { ... updatedFiles [ fileID ] ,
meta : newMeta
} ;
this . setState ( {
files : updatedFiles
} ) ;
}
/ * *
* Get a file object .
*
* @ param { string } fileID The ID of the file object to return .
* /
getFile ( fileID ) {
return this . getState ( ) . files [ fileID ] ;
}
/ * *
* Get all files in an array .
* /
getFiles ( ) {
const {
files
} = this . getState ( ) ;
return Object . values ( files ) ;
}
getObjectOfFilesPerState ( ) {
const {
files : filesObject ,
totalProgress ,
error
} = this . getState ( ) ;
const files = Object . values ( filesObject ) ;
const inProgressFiles = files . filter ( _ref => {
let {
progress
} = _ref ;
return ! progress . uploadComplete && progress . uploadStarted ;
} ) ;
const newFiles = files . filter ( file => ! file . progress . uploadStarted ) ;
const startedFiles = files . filter ( file => file . progress . uploadStarted || file . progress . preprocess || file . progress . postprocess ) ;
const uploadStartedFiles = files . filter ( file => file . progress . uploadStarted ) ;
const pausedFiles = files . filter ( file => file . isPaused ) ;
const completeFiles = files . filter ( file => file . progress . uploadComplete ) ;
const erroredFiles = files . filter ( file => file . error ) ;
const inProgressNotPausedFiles = inProgressFiles . filter ( file => ! file . isPaused ) ;
const processingFiles = files . filter ( file => file . progress . preprocess || file . progress . postprocess ) ;
return {
newFiles ,
startedFiles ,
uploadStartedFiles ,
pausedFiles ,
completeFiles ,
erroredFiles ,
inProgressFiles ,
inProgressNotPausedFiles ,
processingFiles ,
isUploadStarted : uploadStartedFiles . length > 0 ,
isAllComplete : totalProgress === 100 && completeFiles . length === files . length && processingFiles . length === 0 ,
isAllErrored : ! ! error && erroredFiles . length === files . length ,
isAllPaused : inProgressFiles . length !== 0 && pausedFiles . length === inProgressFiles . length ,
isUploadInProgress : inProgressFiles . length > 0 ,
isSomeGhost : files . some ( file => file . isGhost )
} ;
}
/ *
* @ constructs
* @ param { Error } error
* @ param { undefined } file
* /
/ *
* @ constructs
* @ param { RestrictionError } error
* @ param { UppyFile | undefined } file
* /
validateRestrictions ( file , files ) {
if ( files === void 0 ) {
files = this . getFiles ( ) ;
}
// TODO: directly return the Restriction error in next major version.
// we create RestrictionError's just to discard immediately, which doesn't make sense.
try {
_classPrivateFieldLooseBase ( this , _restricter ) [ _restricter ] . validate ( file , files ) ;
return {
result : true
} ;
} catch ( err ) {
return {
result : false ,
reason : err . message
} ;
}
}
checkIfFileAlreadyExists ( fileID ) {
const {
files
} = this . getState ( ) ;
if ( files [ fileID ] && ! files [ fileID ] . isGhost ) {
return true ;
}
return false ;
}
/ * *
* Create a file state object based on user - provided ` addFile() ` options .
*
* Note this is extremely side - effectful and should only be done when a file state object
* will be added to state immediately afterward !
*
* The ` files ` value is passed in because it may be updated by the caller without updating the store .
* /
/ * *
* Add a new file to ` state.files ` . This will run ` onBeforeFileAdded ` ,
* try to guess file type in a clever way , check file against restrictions ,
* and start an upload if ` autoProceed === true ` .
*
* @ param { object } file object to add
* @ returns { string } id for the added file
* /
addFile ( file ) {
_classPrivateFieldLooseBase ( this , _assertNewUploadAllowed ) [ _assertNewUploadAllowed ] ( file ) ;
const {
files
} = this . getState ( ) ;
let newFile = _classPrivateFieldLooseBase ( this , _checkAndCreateFileStateObject ) [ _checkAndCreateFileStateObject ] ( files , file ) ; // Users are asked to re-select recovered files without data,
// and to keep the progress, meta and everthing else, we only replace said data
if ( files [ newFile . id ] && files [ newFile . id ] . isGhost ) {
newFile = { ... files [ newFile . id ] ,
data : file . data ,
isGhost : false
} ;
this . log ( ` Replaced the blob in the restored ghost file: ${ newFile . name } , ${ newFile . id } ` ) ;
}
this . setState ( {
files : { ... files ,
[ newFile . id ] : newFile
}
} ) ;
this . emit ( 'file-added' , newFile ) ;
this . emit ( 'files-added' , [ newFile ] ) ;
this . log ( ` Added file: ${ newFile . name } , ${ newFile . id } , mime type: ${ newFile . type } ` ) ;
_classPrivateFieldLooseBase ( this , _startIfAutoProceed ) [ _startIfAutoProceed ] ( ) ;
return newFile . id ;
}
/ * *
* Add multiple files to ` state.files ` . See the ` addFile() ` documentation .
*
* If an error occurs while adding a file , it is logged and the user is notified .
* This is good for UI plugins , but not for programmatic use .
* Programmatic users should usually still use ` addFile() ` on individual files .
* /
addFiles ( fileDescriptors ) {
_classPrivateFieldLooseBase ( this , _assertNewUploadAllowed ) [ _assertNewUploadAllowed ] ( ) ; // create a copy of the files object only once
const files = { ... this . getState ( ) . files
} ;
const newFiles = [ ] ;
const errors = [ ] ;
for ( let i = 0 ; i < fileDescriptors . length ; i ++ ) {
try {
let newFile = _classPrivateFieldLooseBase ( this , _checkAndCreateFileStateObject ) [ _checkAndCreateFileStateObject ] ( files , fileDescriptors [ i ] ) ; // Users are asked to re-select recovered files without data,
// and to keep the progress, meta and everthing else, we only replace said data
if ( files [ newFile . id ] && files [ newFile . id ] . isGhost ) {
newFile = { ... files [ newFile . id ] ,
data : fileDescriptors [ i ] . data ,
isGhost : false
} ;
this . log ( ` Replaced blob in a ghost file: ${ newFile . name } , ${ newFile . id } ` ) ;
}
files [ newFile . id ] = newFile ;
newFiles . push ( newFile ) ;
} catch ( err ) {
if ( ! err . isRestriction ) {
errors . push ( err ) ;
}
}
}
this . setState ( {
files
} ) ;
newFiles . forEach ( newFile => {
this . emit ( 'file-added' , newFile ) ;
} ) ;
this . emit ( 'files-added' , newFiles ) ;
if ( newFiles . length > 5 ) {
this . log ( ` Added batch of ${ newFiles . length } files ` ) ;
} else {
Object . keys ( newFiles ) . forEach ( fileID => {
this . log ( ` Added file: ${ newFiles [ fileID ] . name } \n id: ${ newFiles [ fileID ] . id } \n type: ${ newFiles [ fileID ] . type } ` ) ;
} ) ;
}
if ( newFiles . length > 0 ) {
_classPrivateFieldLooseBase ( this , _startIfAutoProceed ) [ _startIfAutoProceed ] ( ) ;
}
if ( errors . length > 0 ) {
let message = 'Multiple errors occurred while adding files:\n' ;
errors . forEach ( subError => {
message += ` \n * ${ subError . message } ` ;
} ) ;
this . info ( {
message : this . i18n ( 'addBulkFilesFailed' , {
smart _count : errors . length
} ) ,
details : message
} , 'error' , this . opts . infoTimeout ) ;
if ( typeof AggregateError === 'function' ) {
throw new AggregateError ( errors , message ) ;
} else {
const err = new Error ( message ) ;
err . errors = errors ;
throw err ;
}
}
}
removeFiles ( fileIDs , reason ) {
const {
files ,
currentUploads
} = this . getState ( ) ;
const updatedFiles = { ... files
} ;
const updatedUploads = { ... currentUploads
} ;
const removedFiles = Object . create ( null ) ;
fileIDs . forEach ( fileID => {
if ( files [ fileID ] ) {
removedFiles [ fileID ] = files [ fileID ] ;
delete updatedFiles [ fileID ] ;
}
} ) ; // Remove files from the `fileIDs` list in each upload.
function fileIsNotRemoved ( uploadFileID ) {
return removedFiles [ uploadFileID ] === undefined ;
}
Object . keys ( updatedUploads ) . forEach ( uploadID => {
const newFileIDs = currentUploads [ uploadID ] . fileIDs . filter ( fileIsNotRemoved ) ; // Remove the upload if no files are associated with it anymore.
if ( newFileIDs . length === 0 ) {
delete updatedUploads [ uploadID ] ;
return ;
}
updatedUploads [ uploadID ] = { ... currentUploads [ uploadID ] ,
fileIDs : newFileIDs
} ;
} ) ;
const stateUpdate = {
currentUploads : updatedUploads ,
files : updatedFiles
} ; // If all files were removed - allow new uploads,
// and clear recoveredState
if ( Object . keys ( updatedFiles ) . length === 0 ) {
stateUpdate . allowNewUpload = true ;
stateUpdate . error = null ;
stateUpdate . recoveredState = null ;
}
this . setState ( stateUpdate ) ;
this . calculateTotalProgress ( ) ;
const removedFileIDs = Object . keys ( removedFiles ) ;
removedFileIDs . forEach ( fileID => {
this . emit ( 'file-removed' , removedFiles [ fileID ] , reason ) ;
} ) ;
if ( removedFileIDs . length > 5 ) {
this . log ( ` Removed ${ removedFileIDs . length } files ` ) ;
} else {
this . log ( ` Removed files: ${ removedFileIDs . join ( ', ' ) } ` ) ;
}
}
removeFile ( fileID , reason ) {
if ( reason === void 0 ) {
reason = null ;
}
this . removeFiles ( [ fileID ] , reason ) ;
}
pauseResume ( fileID ) {
if ( ! this . getState ( ) . capabilities . resumableUploads || this . getFile ( fileID ) . uploadComplete ) {
return undefined ;
}
const wasPaused = this . getFile ( fileID ) . isPaused || false ;
const isPaused = ! wasPaused ;
this . setFileState ( fileID , {
isPaused
} ) ;
this . emit ( 'upload-pause' , fileID , isPaused ) ;
return isPaused ;
}
pauseAll ( ) {
const updatedFiles = { ... this . getState ( ) . files
} ;
const inProgressUpdatedFiles = Object . keys ( updatedFiles ) . filter ( file => {
return ! updatedFiles [ file ] . progress . uploadComplete && updatedFiles [ file ] . progress . uploadStarted ;
} ) ;
inProgressUpdatedFiles . forEach ( file => {
const updatedFile = { ... updatedFiles [ file ] ,
isPaused : true
} ;
updatedFiles [ file ] = updatedFile ;
} ) ;
this . setState ( {
files : updatedFiles
} ) ;
this . emit ( 'pause-all' ) ;
}
resumeAll ( ) {
const updatedFiles = { ... this . getState ( ) . files
} ;
const inProgressUpdatedFiles = Object . keys ( updatedFiles ) . filter ( file => {
return ! updatedFiles [ file ] . progress . uploadComplete && updatedFiles [ file ] . progress . uploadStarted ;
} ) ;
inProgressUpdatedFiles . forEach ( file => {
const updatedFile = { ... updatedFiles [ file ] ,
isPaused : false ,
error : null
} ;
updatedFiles [ file ] = updatedFile ;
} ) ;
this . setState ( {
files : updatedFiles
} ) ;
this . emit ( 'resume-all' ) ;
}
retryAll ( ) {
const updatedFiles = { ... this . getState ( ) . files
} ;
const filesToRetry = Object . keys ( updatedFiles ) . filter ( file => {
return updatedFiles [ file ] . error ;
} ) ;
filesToRetry . forEach ( file => {
const updatedFile = { ... updatedFiles [ file ] ,
isPaused : false ,
error : null
} ;
updatedFiles [ file ] = updatedFile ;
} ) ;
this . setState ( {
files : updatedFiles ,
error : null
} ) ;
this . emit ( 'retry-all' , filesToRetry ) ;
if ( filesToRetry . length === 0 ) {
return Promise . resolve ( {
successful : [ ] ,
failed : [ ]
} ) ;
}
const uploadID = _classPrivateFieldLooseBase ( this , _createUpload ) [ _createUpload ] ( filesToRetry , {
forceAllowNewUpload : true // create new upload even if allowNewUpload: false
} ) ;
return _classPrivateFieldLooseBase ( this , _runUpload ) [ _runUpload ] ( uploadID ) ;
}
cancelAll ( ) {
this . emit ( 'cancel-all' ) ;
const {
files
} = this . getState ( ) ;
const fileIDs = Object . keys ( files ) ;
if ( fileIDs . length ) {
this . removeFiles ( fileIDs , 'cancel-all' ) ;
}
this . setState ( {
totalProgress : 0 ,
error : null ,
recoveredState : null
} ) ;
}
retryUpload ( fileID ) {
this . setFileState ( fileID , {
error : null ,
isPaused : false
} ) ;
this . emit ( 'upload-retry' , fileID ) ;
const uploadID = _classPrivateFieldLooseBase ( this , _createUpload ) [ _createUpload ] ( [ fileID ] , {
forceAllowNewUpload : true // create new upload even if allowNewUpload: false
} ) ;
return _classPrivateFieldLooseBase ( this , _runUpload ) [ _runUpload ] ( uploadID ) ;
}
reset ( ) {
this . cancelAll ( ) ;
}
logout ( ) {
this . iteratePlugins ( plugin => {
if ( plugin . provider && plugin . provider . logout ) {
plugin . provider . logout ( ) ;
}
} ) ;
}
calculateProgress ( file , data ) {
if ( ! this . getFile ( file . id ) ) {
this . log ( ` Not setting progress for a file that has been removed: ${ file . id } ` ) ;
return ;
} // bytesTotal may be null or zero; in that case we can't divide by it
const canHavePercentage = Number . isFinite ( data . bytesTotal ) && data . bytesTotal > 0 ;
this . setFileState ( file . id , {
progress : { ... this . getFile ( file . id ) . progress ,
bytesUploaded : data . bytesUploaded ,
bytesTotal : data . bytesTotal ,
percentage : canHavePercentage ? Math . round ( data . bytesUploaded / data . bytesTotal * 100 ) : 0
}
} ) ;
this . calculateTotalProgress ( ) ;
}
calculateTotalProgress ( ) {
// calculate total progress, using the number of files currently uploading,
// multiplied by 100 and the summ of individual progress of each file
const files = this . getFiles ( ) ;
const inProgress = files . filter ( file => {
return file . progress . uploadStarted || file . progress . preprocess || file . progress . postprocess ;
} ) ;
if ( inProgress . length === 0 ) {
this . emit ( 'progress' , 0 ) ;
this . setState ( {
totalProgress : 0
} ) ;
return ;
}
const sizedFiles = inProgress . filter ( file => file . progress . bytesTotal != null ) ;
const unsizedFiles = inProgress . filter ( file => file . progress . bytesTotal == null ) ;
if ( sizedFiles . length === 0 ) {
const progressMax = inProgress . length * 100 ;
const currentProgress = unsizedFiles . reduce ( ( acc , file ) => {
return acc + file . progress . percentage ;
} , 0 ) ;
const totalProgress = Math . round ( currentProgress / progressMax * 100 ) ;
this . setState ( {
totalProgress
} ) ;
return ;
}
let totalSize = sizedFiles . reduce ( ( acc , file ) => {
return acc + file . progress . bytesTotal ;
} , 0 ) ;
const averageSize = totalSize / sizedFiles . length ;
totalSize += averageSize * unsizedFiles . length ;
let uploadedSize = 0 ;
sizedFiles . forEach ( file => {
uploadedSize += file . progress . bytesUploaded ;
} ) ;
unsizedFiles . forEach ( file => {
uploadedSize += averageSize * ( file . progress . percentage || 0 ) / 100 ;
} ) ;
let totalProgress = totalSize === 0 ? 0 : Math . round ( uploadedSize / totalSize * 100 ) ; // hot fix, because:
// uploadedSize ended up larger than totalSize, resulting in 1325% total
if ( totalProgress > 100 ) {
totalProgress = 100 ;
}
this . setState ( {
totalProgress
} ) ;
this . emit ( 'progress' , totalProgress ) ;
}
/ * *
* Registers listeners for all global actions , like :
* ` error ` , ` file-removed ` , ` upload-progress `
* /
updateOnlineStatus ( ) {
const online = typeof window . navigator . onLine !== 'undefined' ? window . navigator . onLine : true ;
if ( ! online ) {
this . emit ( 'is-offline' ) ;
this . info ( this . i18n ( 'noInternetConnection' ) , 'error' , 0 ) ;
this . wasOffline = true ;
} else {
this . emit ( 'is-online' ) ;
if ( this . wasOffline ) {
this . emit ( 'back-online' ) ;
this . info ( this . i18n ( 'connectedToInternet' ) , 'success' , 3000 ) ;
this . wasOffline = false ;
}
}
}
getID ( ) {
return this . opts . id ;
}
/ * *
* Registers a plugin with Core .
*
* @ param { object } Plugin object
* @ param { object } [ opts ] object with options to be passed to Plugin
* @ returns { object } self for chaining
* /
// eslint-disable-next-line no-shadow
use ( Plugin , opts ) {
if ( typeof Plugin !== 'function' ) {
const msg = ` Expected a plugin class, but got ${ Plugin === null ? 'null' : typeof Plugin } . ` + ' Please verify that the plugin was imported and spelled correctly.' ;
throw new TypeError ( msg ) ;
} // Instantiate
const plugin = new Plugin ( this , opts ) ;
const pluginId = plugin . id ;
if ( ! pluginId ) {
throw new Error ( 'Your plugin must have an id' ) ;
}
if ( ! plugin . type ) {
throw new Error ( 'Your plugin must have a type' ) ;
}
const existsPluginAlready = this . getPlugin ( pluginId ) ;
if ( existsPluginAlready ) {
const msg = ` Already found a plugin named ' ${ existsPluginAlready . id } '. ` + ` Tried to use: ' ${ pluginId } '. \n ` + 'Uppy plugins must have unique `id` options. See https://uppy.io/docs/plugins/#id.' ;
throw new Error ( msg ) ;
}
if ( Plugin . VERSION ) {
this . log ( ` Using ${ pluginId } v ${ Plugin . VERSION } ` ) ;
}
if ( plugin . type in _classPrivateFieldLooseBase ( this , _plugins ) [ _plugins ] ) {
_classPrivateFieldLooseBase ( this , _plugins ) [ _plugins ] [ plugin . type ] . push ( plugin ) ;
} else {
_classPrivateFieldLooseBase ( this , _plugins ) [ _plugins ] [ plugin . type ] = [ plugin ] ;
}
plugin . install ( ) ;
return this ;
}
/ * *
* Find one Plugin by name .
*
* @ param { string } id plugin id
* @ returns { BasePlugin | undefined }
* /
getPlugin ( id ) {
for ( const plugins of Object . values ( _classPrivateFieldLooseBase ( this , _plugins ) [ _plugins ] ) ) {
const foundPlugin = plugins . find ( plugin => plugin . id === id ) ;
if ( foundPlugin != null ) { return foundPlugin ; }
}
return undefined ;
}
[ _Symbol$for ] ( type ) {
return _classPrivateFieldLooseBase ( this , _plugins ) [ _plugins ] [ type ] ;
}
/ * *
* Iterate through all ` use ` d plugins .
*
* @ param { Function } method that will be run on each plugin
* /
iteratePlugins ( method ) {
Object . values ( _classPrivateFieldLooseBase ( this , _plugins ) [ _plugins ] ) . flat ( 1 ) . forEach ( method ) ;
}
/ * *
* Uninstall and remove a plugin .
*
* @ param { object } instance The plugin instance to remove .
* /
removePlugin ( instance ) {
this . log ( ` Removing plugin ${ instance . id } ` ) ;
this . emit ( 'plugin-remove' , instance ) ;
if ( instance . uninstall ) {
instance . uninstall ( ) ;
}
const list = _classPrivateFieldLooseBase ( this , _plugins ) [ _plugins ] [ instance . type ] ; // list.indexOf failed here, because Vue3 converted the plugin instance
// to a Proxy object, which failed the strict comparison test:
// obj !== objProxy
const index = list . findIndex ( item => item . id === instance . id ) ;
if ( index !== - 1 ) {
list . splice ( index , 1 ) ;
}
const state = this . getState ( ) ;
const updatedState = {
plugins : { ... state . plugins ,
[ instance . id ] : undefined
}
} ;
this . setState ( updatedState ) ;
}
/ * *
* Uninstall all plugins and close down this Uppy instance .
* /
close ( ) {
this . log ( ` Closing Uppy instance ${ this . opts . id } : removing all files and uninstalling plugins ` ) ;
this . reset ( ) ;
_classPrivateFieldLooseBase ( this , _storeUnsubscribe ) [ _storeUnsubscribe ] ( ) ;
this . iteratePlugins ( plugin => {
this . removePlugin ( plugin ) ;
} ) ;
if ( typeof window !== 'undefined' && window . removeEventListener ) {
window . removeEventListener ( 'online' , _classPrivateFieldLooseBase ( this , _updateOnlineStatus ) [ _updateOnlineStatus ] ) ;
window . removeEventListener ( 'offline' , _classPrivateFieldLooseBase ( this , _updateOnlineStatus ) [ _updateOnlineStatus ] ) ;
}
}
hideInfo ( ) {
const {
info
} = this . getState ( ) ;
this . setState ( {
info : info . slice ( 1 )
} ) ;
this . emit ( 'info-hidden' ) ;
}
/ * *
* Set info message in ` state.info ` , so that UI plugins like ` Informer `
* can display the message .
*
* @ param { string | object } message Message to be displayed by the informer
* @ param { string } [ type ]
* @ param { number } [ duration ]
* /
info ( message , type , duration ) {
if ( type === void 0 ) {
type = 'info' ;
}
if ( duration === void 0 ) {
duration = 3000 ;
}
const isComplexMessage = typeof message === 'object' ;
this . setState ( {
info : [ ... this . getState ( ) . info , {
type ,
message : isComplexMessage ? message . message : message ,
details : isComplexMessage ? message . details : null
} ]
} ) ;
setTimeout ( ( ) => this . hideInfo ( ) , duration ) ;
this . emit ( 'info-visible' ) ;
}
/ * *
* Passes messages to a function , provided in ` opts.logger ` .
* If ` opts.logger: Uppy.debugLogger ` or ` opts.debug: true ` , logs to the browser console .
*
* @ param { string | object } message to log
* @ param { string } [ type ] optional ` error ` or ` warning `
* /
log ( message , type ) {
const {
logger
} = this . opts ;
switch ( type ) {
case 'error' :
logger . error ( message ) ;
break ;
case 'warning' :
logger . warn ( message ) ;
break ;
default :
logger . debug ( message ) ;
break ;
}
}
/ * *
* Restore an upload by its ID .
* /
restore ( uploadID ) {
this . log ( ` Core: attempting to restore upload " ${ uploadID } " ` ) ;
if ( ! this . getState ( ) . currentUploads [ uploadID ] ) {
_classPrivateFieldLooseBase ( this , _removeUpload ) [ _removeUpload ] ( uploadID ) ;
return Promise . reject ( new Error ( 'Nonexistent upload' ) ) ;
}
return _classPrivateFieldLooseBase ( this , _runUpload ) [ _runUpload ] ( uploadID ) ;
}
/ * *
* Create an upload for a bunch of files .
*
* @ param { Array < string > } fileIDs File IDs to include in this upload .
* @ returns { string } ID of this upload .
* /
[ _Symbol$for2 ] ( ) {
return _classPrivateFieldLooseBase ( this , _createUpload ) [ _createUpload ] ( ... arguments ) ;
}
/ * *
* Add data to an upload ' s result object .
*
* @ param { string } uploadID The ID of the upload .
* @ param { object } data Data properties to add to the result object .
* /
addResultData ( uploadID , data ) {
if ( ! _classPrivateFieldLooseBase ( this , _getUpload ) [ _getUpload ] ( uploadID ) ) {
this . log ( ` Not setting result for an upload that has been removed: ${ uploadID } ` ) ;
return ;
}
const {
currentUploads
} = this . getState ( ) ;
const currentUpload = { ... currentUploads [ uploadID ] ,
result : { ... currentUploads [ uploadID ] . result ,
... data
}
} ;
this . setState ( {
currentUploads : { ... currentUploads ,
[ uploadID ] : currentUpload
}
} ) ;
}
/ * *
* Remove an upload , eg . if it has been canceled or completed .
*
* @ param { string } uploadID The ID of the upload .
* /
/ * *
* Start an upload for all the files that are not currently being uploaded .
*
* @ returns { Promise }
* /
upload ( ) {
let _classPrivateFieldLoo ;
if ( ! ( ( _classPrivateFieldLoo = _classPrivateFieldLooseBase ( this , _plugins ) [ _plugins ] . uploader ) != null && _classPrivateFieldLoo . length ) ) {
this . log ( 'No uploader type plugins are used' , 'warning' ) ;
}
let {
files
} = this . getState ( ) ;
const onBeforeUploadResult = this . opts . onBeforeUpload ( files ) ;
if ( onBeforeUploadResult === false ) {
return Promise . reject ( new Error ( 'Not starting the upload because onBeforeUpload returned false' ) ) ;
}
if ( onBeforeUploadResult && typeof onBeforeUploadResult === 'object' ) {
files = onBeforeUploadResult ; // Updating files in state, because uploader plugins receive file IDs,
// and then fetch the actual file object from state
this . setState ( {
files
} ) ;
}
return Promise . resolve ( ) . then ( ( ) => _classPrivateFieldLooseBase ( this , _restricter ) [ _restricter ] . validateMinNumberOfFiles ( files ) ) . catch ( err => {
_classPrivateFieldLooseBase ( this , _informAndEmit ) [ _informAndEmit ] ( err ) ;
throw err ;
} ) . then ( ( ) => {
if ( ! _classPrivateFieldLooseBase ( this , _checkRequiredMetaFields ) [ _checkRequiredMetaFields ] ( files ) ) {
throw new RestrictionError ( this . i18n ( 'missingRequiredMetaField' ) ) ;
}
} ) . catch ( err => {
// Doing this in a separate catch because we already emited and logged
// all the errors in `checkRequiredMetaFields` so we only throw a generic
// missing fields error here.
throw err ;
} ) . then ( ( ) => {
const {
currentUploads
} = this . getState ( ) ; // get a list of files that are currently assigned to uploads
const currentlyUploadingFiles = Object . values ( currentUploads ) . flatMap ( curr => curr . fileIDs ) ;
const waitingFileIDs = [ ] ;
Object . keys ( files ) . forEach ( fileID => {
const file = this . getFile ( fileID ) ; // if the file hasn't started uploading and hasn't already been assigned to an upload..
if ( ! file . progress . uploadStarted && currentlyUploadingFiles . indexOf ( fileID ) === - 1 ) {
waitingFileIDs . push ( file . id ) ;
}
} ) ;
const uploadID = _classPrivateFieldLooseBase ( this , _createUpload ) [ _createUpload ] ( waitingFileIDs ) ;
return _classPrivateFieldLooseBase ( this , _runUpload ) [ _runUpload ] ( uploadID ) ;
} ) . catch ( err => {
this . emit ( 'error' , err ) ;
this . log ( err , 'error' ) ;
throw err ;
} ) ;
}
}
function _informAndEmit2 ( error , file ) {
const {
message ,
details = ''
} = error ;
if ( error . isRestriction ) {
this . emit ( 'restriction-failed' , file , error ) ;
} else {
this . emit ( 'error' , error ) ;
}
this . info ( {
message ,
details
} , 'error' , this . opts . infoTimeout ) ;
this . log ( ` ${ message } ${ details } ` . trim ( ) , 'error' ) ;
}
function _checkRequiredMetaFieldsOnFile2 ( file ) {
const {
missingFields ,
error
} = _classPrivateFieldLooseBase ( this , _restricter ) [ _restricter ] . getMissingRequiredMetaFields ( file ) ;
if ( missingFields . length > 0 ) {
this . setFileState ( file . id , {
missingRequiredMetaFields : missingFields
} ) ;
this . log ( error . message ) ;
this . emit ( 'restriction-failed' , file , error ) ;
return false ;
}
return true ;
}
function _checkRequiredMetaFields2 ( files ) {
let success = true ;
for ( const file of Object . values ( files ) ) {
if ( ! _classPrivateFieldLooseBase ( this , _checkRequiredMetaFieldsOnFile ) [ _checkRequiredMetaFieldsOnFile ] ( file ) ) {
success = false ;
}
}
return success ;
}
function _assertNewUploadAllowed2 ( file ) {
const {
allowNewUpload
} = this . getState ( ) ;
if ( allowNewUpload === false ) {
const error = new RestrictionError ( this . i18n ( 'noMoreFilesAllowed' ) ) ;
_classPrivateFieldLooseBase ( this , _informAndEmit ) [ _informAndEmit ] ( error , file ) ;
throw error ;
}
}
function _checkAndCreateFileStateObject2 ( files , fileDescriptor ) {
const fileType = getFileType ( fileDescriptor ) ;
const fileName = getFileName ( fileType , fileDescriptor ) ;
const fileExtension = getFileNameAndExtension ( fileName ) . extension ;
const isRemote = Boolean ( fileDescriptor . isRemote ) ;
const fileID = generateFileID ( { ... fileDescriptor ,
type : fileType
} ) ;
if ( this . checkIfFileAlreadyExists ( fileID ) ) {
const error = new RestrictionError ( this . i18n ( 'noDuplicates' , {
fileName
} ) ) ;
_classPrivateFieldLooseBase ( this , _informAndEmit ) [ _informAndEmit ] ( error , fileDescriptor ) ;
throw error ;
}
const meta = fileDescriptor . meta || { } ;
meta . name = fileName ;
meta . type = fileType ; // `null` means the size is unknown.
const size = Number . isFinite ( fileDescriptor . data . size ) ? fileDescriptor . data . size : null ;
let newFile = {
source : fileDescriptor . source || '' ,
id : fileID ,
name : fileName ,
extension : fileExtension || '' ,
meta : { ... this . getState ( ) . meta ,
... meta
} ,
type : fileType ,
data : fileDescriptor . data ,
progress : {
percentage : 0 ,
bytesUploaded : 0 ,
bytesTotal : size ,
uploadComplete : false ,
uploadStarted : null
} ,
size ,
isRemote ,
remote : fileDescriptor . remote || '' ,
preview : fileDescriptor . preview
} ;
const onBeforeFileAddedResult = this . opts . onBeforeFileAdded ( newFile , files ) ;
if ( onBeforeFileAddedResult === false ) {
// Don’ t show UI info for this error, as it should be done by the developer
const error = new RestrictionError ( 'Cannot add the file because onBeforeFileAdded returned false.' ) ;
this . emit ( 'restriction-failed' , fileDescriptor , error ) ;
throw error ;
} else if ( typeof onBeforeFileAddedResult === 'object' && onBeforeFileAddedResult !== null ) {
newFile = onBeforeFileAddedResult ;
}
try {
const filesArray = Object . keys ( files ) . map ( i => files [ i ] ) ;
_classPrivateFieldLooseBase ( this , _restricter ) [ _restricter ] . validate ( newFile , filesArray ) ;
} catch ( err ) {
_classPrivateFieldLooseBase ( this , _informAndEmit ) [ _informAndEmit ] ( err , newFile ) ;
throw err ;
}
return newFile ;
}
function _startIfAutoProceed2 ( ) {
if ( this . opts . autoProceed && ! this . scheduledAutoProceed ) {
this . scheduledAutoProceed = setTimeout ( ( ) => {
this . scheduledAutoProceed = null ;
this . upload ( ) . catch ( err => {
if ( ! err . isRestriction ) {
this . log ( err . stack || err . message || err ) ;
}
} ) ;
} , 4 ) ;
}
}
function _addListeners2 ( ) {
/ * *
* @ param { Error } error
* @ param { object } [ file ]
* @ param { object } [ response ]
* /
const errorHandler = ( error , file , response ) => {
let errorMsg = error . message || 'Unknown error' ;
if ( error . details ) {
errorMsg += ` ${ error . details } ` ;
}
this . setState ( {
error : errorMsg
} ) ;
if ( file != null && file . id in this . getState ( ) . files ) {
this . setFileState ( file . id , {
error : errorMsg ,
response
} ) ;
}
} ;
this . on ( 'error' , errorHandler ) ;
this . on ( 'upload-error' , ( file , error , response ) => {
errorHandler ( error , file , response ) ;
if ( typeof error === 'object' && error . message ) {
const newError = new Error ( error . message ) ;
newError . details = error . message ;
if ( error . details ) {
newError . details += ` ${ error . details } ` ;
}
newError . message = this . i18n ( 'failedToUpload' , {
file : file . name
} ) ;
_classPrivateFieldLooseBase ( this , _informAndEmit ) [ _informAndEmit ] ( newError ) ;
} else {
_classPrivateFieldLooseBase ( this , _informAndEmit ) [ _informAndEmit ] ( error ) ;
}
} ) ;
this . on ( 'upload' , ( ) => {
this . setState ( {
error : null
} ) ;
} ) ;
this . on ( 'upload-started' , file => {
if ( ! this . getFile ( file . id ) ) {
this . log ( ` Not setting progress for a file that has been removed: ${ file . id } ` ) ;
return ;
}
this . setFileState ( file . id , {
progress : {
uploadStarted : Date . now ( ) ,
uploadComplete : false ,
percentage : 0 ,
bytesUploaded : 0 ,
bytesTotal : file . size
}
} ) ;
} ) ;
this . on ( 'upload-progress' , this . calculateProgress ) ;
this . on ( 'upload-success' , ( file , uploadResp ) => {
if ( ! this . getFile ( file . id ) ) {
this . log ( ` Not setting progress for a file that has been removed: ${ file . id } ` ) ;
return ;
}
const currentProgress = this . getFile ( file . id ) . progress ;
this . setFileState ( file . id , {
progress : { ... currentProgress ,
postprocess : _classPrivateFieldLooseBase ( this , _postProcessors ) [ _postProcessors ] . size > 0 ? {
mode : 'indeterminate'
} : null ,
uploadComplete : true ,
percentage : 100 ,
bytesUploaded : currentProgress . bytesTotal
} ,
response : uploadResp ,
uploadURL : uploadResp . uploadURL ,
isPaused : false
} ) ; // Remote providers sometimes don't tell us the file size,
// but we can know how many bytes we uploaded once the upload is complete.
if ( file . size == null ) {
this . setFileState ( file . id , {
size : uploadResp . bytesUploaded || currentProgress . bytesTotal
} ) ;
}
this . calculateTotalProgress ( ) ;
} ) ;
this . on ( 'preprocess-progress' , ( file , progress ) => {
if ( ! this . getFile ( file . id ) ) {
this . log ( ` Not setting progress for a file that has been removed: ${ file . id } ` ) ;
return ;
}
this . setFileState ( file . id , {
progress : { ... this . getFile ( file . id ) . progress ,
preprocess : progress
}
} ) ;
} ) ;
this . on ( 'preprocess-complete' , file => {
if ( ! this . getFile ( file . id ) ) {
this . log ( ` Not setting progress for a file that has been removed: ${ file . id } ` ) ;
return ;
}
const files = { ... this . getState ( ) . files
} ;
files [ file . id ] = { ... files [ file . id ] ,
progress : { ... files [ file . id ] . progress
}
} ;
delete files [ file . id ] . progress . preprocess ;
this . setState ( {
files
} ) ;
} ) ;
this . on ( 'postprocess-progress' , ( file , progress ) => {
if ( ! this . getFile ( file . id ) ) {
this . log ( ` Not setting progress for a file that has been removed: ${ file . id } ` ) ;
return ;
}
this . setFileState ( file . id , {
progress : { ... this . getState ( ) . files [ file . id ] . progress ,
postprocess : progress
}
} ) ;
} ) ;
this . on ( 'postprocess-complete' , file => {
if ( ! this . getFile ( file . id ) ) {
this . log ( ` Not setting progress for a file that has been removed: ${ file . id } ` ) ;
return ;
}
const files = { ... this . getState ( ) . files
} ;
files [ file . id ] = { ... files [ file . id ] ,
progress : { ... files [ file . id ] . progress
}
} ;
delete files [ file . id ] . progress . postprocess ;
this . setState ( {
files
} ) ;
} ) ;
this . on ( 'restored' , ( ) => {
// Files may have changed--ensure progress is still accurate.
this . calculateTotalProgress ( ) ;
} ) ;
this . on ( 'dashboard:file-edit-complete' , file => {
if ( file ) {
_classPrivateFieldLooseBase ( this , _checkRequiredMetaFieldsOnFile ) [ _checkRequiredMetaFieldsOnFile ] ( file ) ;
}
} ) ; // show informer if offline
if ( typeof window !== 'undefined' && window . addEventListener ) {
window . addEventListener ( 'online' , _classPrivateFieldLooseBase ( this , _updateOnlineStatus ) [ _updateOnlineStatus ] ) ;
window . addEventListener ( 'offline' , _classPrivateFieldLooseBase ( this , _updateOnlineStatus ) [ _updateOnlineStatus ] ) ;
setTimeout ( _classPrivateFieldLooseBase ( this , _updateOnlineStatus ) [ _updateOnlineStatus ] , 3000 ) ;
}
}
function _createUpload2 ( fileIDs , opts ) {
if ( opts === void 0 ) {
opts = { } ;
}
// uppy.retryAll sets this to true — when retrying we want to ignore `allowNewUpload: false`
const {
forceAllowNewUpload = false
} = opts ;
const {
allowNewUpload ,
currentUploads
} = this . getState ( ) ;
if ( ! allowNewUpload && ! forceAllowNewUpload ) {
throw new Error ( 'Cannot create a new upload: already uploading.' ) ;
}
const uploadID = nanoid ( ) ;
this . emit ( 'upload' , {
id : uploadID ,
fileIDs
} ) ;
this . setState ( {
allowNewUpload : this . opts . allowMultipleUploadBatches !== false && this . opts . allowMultipleUploads !== false ,
currentUploads : { ... currentUploads ,
[ uploadID ] : {
fileIDs ,
step : 0 ,
result : { }
}
}
} ) ;
return uploadID ;
}
function _getUpload2 ( uploadID ) {
const {
currentUploads
} = this . getState ( ) ;
return currentUploads [ uploadID ] ;
}
function _removeUpload2 ( uploadID ) {
const currentUploads = { ... this . getState ( ) . currentUploads
} ;
delete currentUploads [ uploadID ] ;
this . setState ( {
currentUploads
} ) ;
}
async function _runUpload2 ( uploadID ) {
let {
currentUploads
} = this . getState ( ) ;
let currentUpload = currentUploads [ uploadID ] ;
const restoreStep = currentUpload . step || 0 ;
const steps = [ ... _classPrivateFieldLooseBase ( this , _preProcessors ) [ _preProcessors ] , ... _classPrivateFieldLooseBase ( this , _uploaders ) [ _uploaders ] , ... _classPrivateFieldLooseBase ( this , _postProcessors ) [ _postProcessors ] ] ;
try {
for ( let step = restoreStep ; step < steps . length ; step ++ ) {
if ( ! currentUpload ) {
break ;
}
const fn = steps [ step ] ;
const updatedUpload = { ... currentUpload ,
step
} ;
this . setState ( {
currentUploads : { ... currentUploads ,
[ uploadID ] : updatedUpload
}
} ) ; // TODO give this the `updatedUpload` object as its only parameter maybe?
// Otherwise when more metadata may be added to the upload this would keep getting more parameters
await fn ( updatedUpload . fileIDs , uploadID ) ; // Update currentUpload value in case it was modified asynchronously.
currentUploads = this . getState ( ) . currentUploads ;
currentUpload = currentUploads [ uploadID ] ;
}
} catch ( err ) {
_classPrivateFieldLooseBase ( this , _removeUpload ) [ _removeUpload ] ( uploadID ) ;
throw err ;
} // Set result data.
if ( currentUpload ) {
// Mark postprocessing step as complete if necessary; this addresses a case where we might get
// stuck in the postprocessing UI while the upload is fully complete.
// If the postprocessing steps do not do any work, they may not emit postprocessing events at
// all, and never mark the postprocessing as complete. This is fine on its own but we
// introduced code in the @uppy/core upload-success handler to prepare postprocessing progress
// state if any postprocessors are registered. That is to avoid a "flash of completed state"
// before the postprocessing plugins can emit events.
//
// So, just in case an upload with postprocessing plugins *has* completed *without* emitting
// postprocessing completion, we do it instead.
currentUpload . fileIDs . forEach ( fileID => {
const file = this . getFile ( fileID ) ;
if ( file && file . progress . postprocess ) {
this . emit ( 'postprocess-complete' , file ) ;
}
} ) ;
const files = currentUpload . fileIDs . map ( fileID => this . getFile ( fileID ) ) ;
const successful = files . filter ( file => ! file . error ) ;
const failed = files . filter ( file => file . error ) ;
await this . addResultData ( uploadID , {
successful ,
failed ,
uploadID
} ) ; // Update currentUpload value in case it was modified asynchronously.
currentUploads = this . getState ( ) . currentUploads ;
currentUpload = currentUploads [ uploadID ] ;
} // Emit completion events.
// This is in a separate function so that the `currentUploads` variable
// always refers to the latest state. In the handler right above it refers
// to an outdated object without the `.result` property.
let result ;
if ( currentUpload ) {
result = currentUpload . result ;
this . emit ( 'complete' , result ) ;
_classPrivateFieldLooseBase ( this , _removeUpload ) [ _removeUpload ] ( uploadID ) ;
}
if ( result == null ) {
this . log ( ` Not setting result for an upload that has been removed: ${ uploadID } ` ) ;
}
return result ;
}
Uppy . VERSION = "2.1.6" ;
module . exports = Uppy ;
} , { "./Restricter" : 16 , "./getFileName" : 19 , "./locale" : 21 , "./loggers" : 22 , "./supportsUploadProgress" : 23 , "@uppy/store-default" : 25 , "@uppy/utils/lib/Translator" : 31 , "@uppy/utils/lib/generateFileID" : 36 , "@uppy/utils/lib/getFileNameAndExtension" : 42 , "@uppy/utils/lib/getFileType" : 43 , "lodash.throttle" : 54 , "namespace-emitter" : 56 , "nanoid/non-secure" : 57 } ] , 19 : [ function ( require , module , exports ) {
module . exports = function getFileName ( fileType , fileDescriptor ) {
if ( fileDescriptor . name ) {
return fileDescriptor . name ;
}
if ( fileType . split ( '/' ) [ 0 ] === 'image' ) {
return ` ${ fileType . split ( '/' ) [ 0 ] } . ${ fileType . split ( '/' ) [ 1 ] } ` ;
}
return 'noname' ;
} ;
} , { } ] , 20 : [ function ( require , module , exports ) {
'use strict' ;
const Uppy = require ( './Uppy' ) ;
const UIPlugin = require ( './UIPlugin' ) ;
const BasePlugin = require ( './BasePlugin' ) ;
const {
debugLogger
} = require ( './loggers' ) ;
module . exports = Uppy ;
module . exports . Uppy = Uppy ;
module . exports . UIPlugin = UIPlugin ;
module . exports . BasePlugin = BasePlugin ;
module . exports . debugLogger = debugLogger ;
} , { "./BasePlugin" : 15 , "./UIPlugin" : 17 , "./Uppy" : 18 , "./loggers" : 22 } ] , 21 : [ function ( require , module , exports ) {
module . exports = {
strings : {
addBulkFilesFailed : {
0 : 'Failed to add %{smart_count} file due to an internal error' ,
1 : 'Failed to add %{smart_count} files due to internal errors'
} ,
youCanOnlyUploadX : {
0 : 'You can only upload %{smart_count} file' ,
1 : 'You can only upload %{smart_count} files'
} ,
youHaveToAtLeastSelectX : {
0 : 'You have to select at least %{smart_count} file' ,
1 : 'You have to select at least %{smart_count} files'
} ,
exceedsSize : '%{file} exceeds maximum allowed size of %{size}' ,
missingRequiredMetaField : 'Missing required meta fields' ,
missingRequiredMetaFieldOnFile : 'Missing required meta fields in %{fileName}' ,
inferiorSize : 'This file is smaller than the allowed size of %{size}' ,
youCanOnlyUploadFileTypes : 'You can only upload: %{types}' ,
noMoreFilesAllowed : 'Cannot add more files' ,
noDuplicates : "Cannot add the duplicate file '%{fileName}', it already exists" ,
companionError : 'Connection with Companion failed' ,
authAborted : 'Authentication aborted' ,
companionUnauthorizeHint : 'To unauthorize to your %{provider} account, please go to %{url}' ,
failedToUpload : 'Failed to upload %{file}' ,
noInternetConnection : 'No Internet connection' ,
connectedToInternet : 'Connected to the Internet' ,
// Strings for remote providers
noFilesFound : 'You have no files or folders here' ,
selectX : {
0 : 'Select %{smart_count}' ,
1 : 'Select %{smart_count}'
} ,
allFilesFromFolderNamed : 'All files from folder %{name}' ,
openFolderNamed : 'Open folder %{name}' ,
cancel : 'Cancel' ,
logOut : 'Log out' ,
filter : 'Filter' ,
resetFilter : 'Reset filter' ,
loading : 'Loading...' ,
authenticateWithTitle : 'Please authenticate with %{pluginName} to select files' ,
authenticateWith : 'Connect to %{pluginName}' ,
signInWithGoogle : 'Sign in with Google' ,
searchImages : 'Search for images' ,
enterTextToSearch : 'Enter text to search for images' ,
search : 'Search' ,
emptyFolderAdded : 'No files were added from empty folder' ,
folderAlreadyAdded : 'The folder "%{folder}" was already added' ,
folderAdded : {
0 : 'Added %{smart_count} file from %{folder}' ,
1 : 'Added %{smart_count} files from %{folder}'
}
}
} ;
} , { } ] , 22 : [ function ( require , module , exports ) {
/* eslint-disable no-console */
const getTimeStamp = require ( '@uppy/utils/lib/getTimeStamp' ) ; // Swallow all logs, except errors.
// default if logger is not set or debug: false
const justErrorsLogger = {
debug : ( ) => { } ,
warn : ( ) => { } ,
error : function ( ) {
for ( var _len = arguments . length , args = new Array ( _len ) , _key = 0 ; _key < _len ; _key ++ ) {
args [ _key ] = arguments [ _key ] ;
}
return console . error ( ` [Uppy] [ ${ getTimeStamp ( ) } ] ` , ... args ) ;
}
} ; // Print logs to console with namespace + timestamp,
// set by logger: Uppy.debugLogger or debug: true
const debugLogger = {
debug : function ( ) {
for ( var _len2 = arguments . length , args = new Array ( _len2 ) , _key2 = 0 ; _key2 < _len2 ; _key2 ++ ) {
args [ _key2 ] = arguments [ _key2 ] ;
}
return console . debug ( ` [Uppy] [ ${ getTimeStamp ( ) } ] ` , ... args ) ;
} ,
warn : function ( ) {
for ( var _len3 = arguments . length , args = new Array ( _len3 ) , _key3 = 0 ; _key3 < _len3 ; _key3 ++ ) {
args [ _key3 ] = arguments [ _key3 ] ;
}
return console . warn ( ` [Uppy] [ ${ getTimeStamp ( ) } ] ` , ... args ) ;
} ,
error : function ( ) {
for ( var _len4 = arguments . length , args = new Array ( _len4 ) , _key4 = 0 ; _key4 < _len4 ; _key4 ++ ) {
args [ _key4 ] = arguments [ _key4 ] ;
}
return console . error ( ` [Uppy] [ ${ getTimeStamp ( ) } ] ` , ... args ) ;
}
} ;
module . exports = {
justErrorsLogger ,
debugLogger
} ;
} , { "@uppy/utils/lib/getTimeStamp" : 45 } ] , 23 : [ function ( require , module , exports ) {
// Edge 15.x does not fire 'progress' events on uploads.
// See https://github.com/transloadit/uppy/issues/945
// And https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/12224510/
module . exports = function supportsUploadProgress ( userAgent ) {
// Allow passing in userAgent for tests
if ( userAgent == null ) {
userAgent = typeof navigator !== 'undefined' ? navigator . userAgent : null ;
} // Assume it works because basically everything supports progress events.
if ( ! userAgent ) { return true ; }
const m = /Edge\/(\d+\.\d+)/ . exec ( userAgent ) ;
if ( ! m ) { return true ; }
const edgeVersion = m [ 1 ] ;
let [ major , minor ] = edgeVersion . split ( '.' ) ;
major = parseInt ( major , 10 ) ;
minor = parseInt ( minor , 10 ) ; // Worked before:
// Edge 40.15063.0.0
// Microsoft EdgeHTML 15.15063
if ( major < 15 || major === 15 && minor < 15063 ) {
return true ;
} // Fixed in:
// Microsoft EdgeHTML 18.18218
if ( major > 18 || major === 18 && minor >= 18218 ) {
return true ;
} // other versions don't work.
return false ;
} ;
} , { } ] , 24 : [ function ( require , module , exports ) {
let _class , _temp ;
const BasePlugin = require ( '@uppy/core/lib/BasePlugin' ) ;
const getDroppedFiles = require ( '@uppy/utils/lib/getDroppedFiles' ) ;
const toArray = require ( '@uppy/utils/lib/toArray' ) ;
/ * *
* Drop Target plugin
*
* /
module . exports = ( _temp = _class = class DropTarget extends BasePlugin {
constructor ( uppy , opts ) {
super ( uppy , opts ) ;
this . addFiles = files => {
const descriptors = files . map ( file => ( {
source : this . id ,
name : file . name ,
type : file . type ,
data : file ,
meta : {
// path of the file relative to the ancestor directory the user selected.
// e.g. 'docs/Old Prague/airbnb.pdf'
relativePath : file . relativePath || null
}
} ) ) ;
try {
this . uppy . addFiles ( descriptors ) ;
} catch ( err ) {
this . uppy . log ( err ) ;
}
} ;
this . isFileTransfer = event => {
let _event$dataTransfer$t ;
const transferTypes = ( _event$dataTransfer$t = event . dataTransfer . types ) != null ? _event$dataTransfer$t : [ ] ;
return transferTypes . some ( type => type === 'Files' ) ;
} ;
this . handleDrop = async event => {
let _this$opts$onDrop , _this$opts ;
if ( ! this . isFileTransfer ( event ) ) {
return ;
}
event . preventDefault ( ) ;
event . stopPropagation ( ) ;
clearTimeout ( this . removeDragOverClassTimeout ) ; // Remove dragover class
event . currentTarget . classList . remove ( 'uppy-is-drag-over' ) ;
this . setPluginState ( {
isDraggingOver : false
} ) ; // Let any acquirer plugin (Url/Webcam/etc.) handle drops to the root
this . uppy . iteratePlugins ( plugin => {
if ( plugin . type === 'acquirer' ) {
// Every Plugin with .type acquirer can define handleRootDrop(event)
plugin . handleRootDrop == null ? void 0 : plugin . handleRootDrop ( event ) ;
}
} ) ; // Add all dropped files, handle errors
let executedDropErrorOnce = false ;
const logDropError = error => {
this . uppy . log ( error , 'error' ) ; // In practice all drop errors are most likely the same,
// so let's just show one to avoid overwhelming the user
if ( ! executedDropErrorOnce ) {
this . uppy . info ( error . message , 'error' ) ;
executedDropErrorOnce = true ;
}
} ;
const files = await getDroppedFiles ( event . dataTransfer , {
logDropError
} ) ;
if ( files . length > 0 ) {
this . uppy . log ( '[DropTarget] Files were dropped' ) ;
this . addFiles ( files ) ;
}
( _this$opts$onDrop = ( _this$opts = this . opts ) . onDrop ) == null ? void 0 : _this$opts$onDrop . call ( _this$opts , event ) ;
} ;
this . handleDragOver = event => {
let _this$opts$onDragOver , _this$opts2 ;
if ( ! this . isFileTransfer ( event ) ) {
return ;
}
event . preventDefault ( ) ;
event . stopPropagation ( ) ; // Add a small (+) icon on drop
// (and prevent browsers from interpreting this as files being _moved_ into the browser,
// https://github.com/transloadit/uppy/issues/1978)
event . dataTransfer . dropEffect = 'copy' ;
clearTimeout ( this . removeDragOverClassTimeout ) ;
event . currentTarget . classList . add ( 'uppy-is-drag-over' ) ;
this . setPluginState ( {
isDraggingOver : true
} ) ;
( _this$opts$onDragOver = ( _this$opts2 = this . opts ) . onDragOver ) == null ? void 0 : _this$opts$onDragOver . call ( _this$opts2 , event ) ;
} ;
this . handleDragLeave = event => {
let _this$opts$onDragLeav , _this$opts3 ;
if ( ! this . isFileTransfer ( event ) ) {
return ;
}
event . preventDefault ( ) ;
event . stopPropagation ( ) ;
const {
currentTarget
} = event ;
clearTimeout ( this . removeDragOverClassTimeout ) ; // Timeout against flickering, this solution is taken from drag-drop library.
// Solution with 'pointer-events: none' didn't work across browsers.
this . removeDragOverClassTimeout = setTimeout ( ( ) => {
currentTarget . classList . remove ( 'uppy-is-drag-over' ) ;
this . setPluginState ( {
isDraggingOver : false
} ) ;
} , 50 ) ;
( _this$opts$onDragLeav = ( _this$opts3 = this . opts ) . onDragLeave ) == null ? void 0 : _this$opts$onDragLeav . call ( _this$opts3 , event ) ;
} ;
this . addListeners = ( ) => {
const {
target
} = this . opts ;
if ( target instanceof Element ) {
this . nodes = [ target ] ;
} else if ( typeof target === 'string' ) {
this . nodes = toArray ( document . querySelectorAll ( target ) ) ;
}
if ( ! this . nodes && ! this . nodes . length > 0 ) {
throw new Error ( ` " ${ target } " does not match any HTML elements ` ) ;
}
this . nodes . forEach ( node => {
node . addEventListener ( 'dragover' , this . handleDragOver , false ) ;
node . addEventListener ( 'dragleave' , this . handleDragLeave , false ) ;
node . addEventListener ( 'drop' , this . handleDrop , false ) ;
} ) ;
} ;
this . removeListeners = ( ) => {
if ( this . nodes ) {
this . nodes . forEach ( node => {
node . removeEventListener ( 'dragover' , this . handleDragOver , false ) ;
node . removeEventListener ( 'dragleave' , this . handleDragLeave , false ) ;
node . removeEventListener ( 'drop' , this . handleDrop , false ) ;
} ) ;
}
} ;
this . type = 'acquirer' ;
this . id = this . opts . id || 'DropTarget' ;
this . title = 'Drop Target' ; // Default options
const defaultOpts = {
target : null
} ; // Merge default options with the ones set by user
this . opts = { ... defaultOpts ,
... opts
} ;
this . removeDragOverClassTimeout = null ;
}
install ( ) {
this . setPluginState ( {
isDraggingOver : false
} ) ;
this . addListeners ( ) ;
}
uninstall ( ) {
this . removeListeners ( ) ;
}
} , _class . VERSION = "1.1.2" , _temp ) ;
} , { "@uppy/core/lib/BasePlugin" : 15 , "@uppy/utils/lib/getDroppedFiles" : 37 , "@uppy/utils/lib/toArray" : 51 } ] , 25 : [ function ( require , module , exports ) {
"use strict" ;
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
let _publish = /*#__PURE__*/ _classPrivateFieldLooseKey ( "publish" ) ;
/ * *
* Default store that keeps state in a simple object .
* /
class DefaultStore {
constructor ( ) {
Object . defineProperty ( this , _publish , {
value : _publish2
} ) ;
this . state = { } ;
this . callbacks = [ ] ;
}
getState ( ) {
return this . state ;
}
setState ( patch ) {
const prevState = { ... this . state
} ;
const nextState = { ... this . state ,
... patch
} ;
this . state = nextState ;
_classPrivateFieldLooseBase ( this , _publish ) [ _publish ] ( prevState , nextState , patch ) ;
}
subscribe ( listener ) {
this . callbacks . push ( listener ) ;
return ( ) => {
// Remove the listener.
this . callbacks . splice ( this . callbacks . indexOf ( listener ) , 1 ) ;
} ;
}
}
function _publish2 ( ) {
for ( var _len = arguments . length , args = new Array ( _len ) , _key = 0 ; _key < _len ; _key ++ ) {
args [ _key ] = arguments [ _key ] ;
}
this . callbacks . forEach ( listener => {
listener ( ... args ) ;
} ) ;
}
DefaultStore . VERSION = "2.0.3" ;
module . exports = function defaultStore ( ) {
return new DefaultStore ( ) ;
} ;
} , { } ] , 26 : [ function ( require , module , exports ) {
"use strict" ;
/ * *
* Little AbortController proxy module so we can swap out the implementation easily later .
* /
exports . AbortController = globalThis . AbortController ;
exports . AbortSignal = globalThis . AbortSignal ;
exports . createAbortError = function ( message ) {
if ( message === void 0 ) {
message = 'Aborted' ;
}
return new DOMException ( message , 'AbortError' ) ;
} ;
} , { } ] , 27 : [ function ( require , module , exports ) {
"use strict" ;
let _emitter , _events ;
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
/ * *
* Create a wrapper around an event emitter with a ` remove ` method to remove
* all events that were added using the wrapped emitter .
* /
module . exports = ( _emitter = /*#__PURE__*/ _classPrivateFieldLooseKey ( "emitter" ) , _events = /*#__PURE__*/ _classPrivateFieldLooseKey ( "events" ) , class EventTracker {
constructor ( emitter ) {
Object . defineProperty ( this , _emitter , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _events , {
writable : true ,
value : [ ]
} ) ;
_classPrivateFieldLooseBase ( this , _emitter ) [ _emitter ] = emitter ;
}
on ( event , fn ) {
_classPrivateFieldLooseBase ( this , _events ) [ _events ] . push ( [ event , fn ] ) ;
return _classPrivateFieldLooseBase ( this , _emitter ) [ _emitter ] . on ( event , fn ) ;
}
remove ( ) {
for ( const [ event , fn ] of _classPrivateFieldLooseBase ( this , _events ) [ _events ] . splice ( 0 ) ) {
_classPrivateFieldLooseBase ( this , _emitter ) [ _emitter ] . off ( event , fn ) ;
}
}
} ) ;
} , { } ] , 28 : [ function ( require , module , exports ) {
"use strict" ;
class NetworkError extends Error {
constructor ( error , xhr ) {
if ( xhr === void 0 ) {
xhr = null ;
}
super ( ` This looks like a network error, the endpoint might be blocked by an internet provider or a firewall. ` ) ;
this . cause = error ;
this . isNetworkError = true ;
this . request = xhr ;
}
}
module . exports = NetworkError ;
} , { } ] , 29 : [ function ( require , module , exports ) {
"use strict" ;
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
let _aliveTimer = /*#__PURE__*/ _classPrivateFieldLooseKey ( "aliveTimer" ) ;
let _isDone = /*#__PURE__*/ _classPrivateFieldLooseKey ( "isDone" ) ;
let _onTimedOut = /*#__PURE__*/ _classPrivateFieldLooseKey ( "onTimedOut" ) ;
let _timeout = /*#__PURE__*/ _classPrivateFieldLooseKey ( "timeout" ) ;
/ * *
* Helper to abort upload requests if there has not been any progress for ` timeout ` ms .
* Create an instance using ` timer = new ProgressTimeout(10000, onTimeout) `
* Call ` timer.progress() ` to signal that there has been progress of any kind .
* Call ` timer.done() ` when the upload has completed .
* /
class ProgressTimeout {
constructor ( timeout , timeoutHandler ) {
Object . defineProperty ( this , _aliveTimer , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _isDone , {
writable : true ,
value : false
} ) ;
Object . defineProperty ( this , _onTimedOut , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _timeout , {
writable : true ,
value : void 0
} ) ;
_classPrivateFieldLooseBase ( this , _timeout ) [ _timeout ] = timeout ;
_classPrivateFieldLooseBase ( this , _onTimedOut ) [ _onTimedOut ] = timeoutHandler ;
}
progress ( ) {
// Some browsers fire another progress event when the upload is
// cancelled, so we have to ignore progress after the timer was
// told to stop.
if ( _classPrivateFieldLooseBase ( this , _isDone ) [ _isDone ] ) { return ; }
if ( _classPrivateFieldLooseBase ( this , _timeout ) [ _timeout ] > 0 ) {
clearTimeout ( _classPrivateFieldLooseBase ( this , _aliveTimer ) [ _aliveTimer ] ) ;
_classPrivateFieldLooseBase ( this , _aliveTimer ) [ _aliveTimer ] = setTimeout ( _classPrivateFieldLooseBase ( this , _onTimedOut ) [ _onTimedOut ] , _classPrivateFieldLooseBase ( this , _timeout ) [ _timeout ] ) ;
}
}
done ( ) {
if ( ! _classPrivateFieldLooseBase ( this , _isDone ) [ _isDone ] ) {
clearTimeout ( _classPrivateFieldLooseBase ( this , _aliveTimer ) [ _aliveTimer ] ) ;
_classPrivateFieldLooseBase ( this , _aliveTimer ) [ _aliveTimer ] = null ;
_classPrivateFieldLooseBase ( this , _isDone ) [ _isDone ] = true ;
}
}
}
module . exports = ProgressTimeout ;
} , { } ] , 30 : [ function ( require , module , exports ) {
"use strict" ;
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
function createCancelError ( ) {
return new Error ( 'Cancelled' ) ;
}
let _activeRequests = /*#__PURE__*/ _classPrivateFieldLooseKey ( "activeRequests" ) ;
let _queuedHandlers = /*#__PURE__*/ _classPrivateFieldLooseKey ( "queuedHandlers" ) ;
let _paused = /*#__PURE__*/ _classPrivateFieldLooseKey ( "paused" ) ;
let _pauseTimer = /*#__PURE__*/ _classPrivateFieldLooseKey ( "pauseTimer" ) ;
let _downLimit = /*#__PURE__*/ _classPrivateFieldLooseKey ( "downLimit" ) ;
let _upperLimit = /*#__PURE__*/ _classPrivateFieldLooseKey ( "upperLimit" ) ;
let _rateLimitingTimer = /*#__PURE__*/ _classPrivateFieldLooseKey ( "rateLimitingTimer" ) ;
let _call = /*#__PURE__*/ _classPrivateFieldLooseKey ( "call" ) ;
let _queueNext = /*#__PURE__*/ _classPrivateFieldLooseKey ( "queueNext" ) ;
let _next = /*#__PURE__*/ _classPrivateFieldLooseKey ( "next" ) ;
let _queue = /*#__PURE__*/ _classPrivateFieldLooseKey ( "queue" ) ;
let _dequeue = /*#__PURE__*/ _classPrivateFieldLooseKey ( "dequeue" ) ;
let _resume = /*#__PURE__*/ _classPrivateFieldLooseKey ( "resume" ) ;
let _increaseLimit = /*#__PURE__*/ _classPrivateFieldLooseKey ( "increaseLimit" ) ;
class RateLimitedQueue {
constructor ( limit ) {
Object . defineProperty ( this , _dequeue , {
value : _dequeue2
} ) ;
Object . defineProperty ( this , _queue , {
value : _queue2
} ) ;
Object . defineProperty ( this , _next , {
value : _next2
} ) ;
Object . defineProperty ( this , _queueNext , {
value : _queueNext2
} ) ;
Object . defineProperty ( this , _call , {
value : _call2
} ) ;
Object . defineProperty ( this , _activeRequests , {
writable : true ,
value : 0
} ) ;
Object . defineProperty ( this , _queuedHandlers , {
writable : true ,
value : [ ]
} ) ;
Object . defineProperty ( this , _paused , {
writable : true ,
value : false
} ) ;
Object . defineProperty ( this , _pauseTimer , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _downLimit , {
writable : true ,
value : 1
} ) ;
Object . defineProperty ( this , _upperLimit , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _rateLimitingTimer , {
writable : true ,
value : void 0
} ) ;
Object . defineProperty ( this , _resume , {
writable : true ,
value : ( ) => this . resume ( )
} ) ;
Object . defineProperty ( this , _increaseLimit , {
writable : true ,
value : ( ) => {
if ( _classPrivateFieldLooseBase ( this , _paused ) [ _paused ] ) {
_classPrivateFieldLooseBase ( this , _rateLimitingTimer ) [ _rateLimitingTimer ] = setTimeout ( _classPrivateFieldLooseBase ( this , _increaseLimit ) [ _increaseLimit ] , 0 ) ;
return ;
}
_classPrivateFieldLooseBase ( this , _downLimit ) [ _downLimit ] = this . limit ;
this . limit = Math . ceil ( ( _classPrivateFieldLooseBase ( this , _upperLimit ) [ _upperLimit ] + _classPrivateFieldLooseBase ( this , _downLimit ) [ _downLimit ] ) / 2 ) ;
for ( let i = _classPrivateFieldLooseBase ( this , _downLimit ) [ _downLimit ] ; i <= this . limit ; i ++ ) {
_classPrivateFieldLooseBase ( this , _queueNext ) [ _queueNext ] ( ) ;
}
if ( _classPrivateFieldLooseBase ( this , _upperLimit ) [ _upperLimit ] - _classPrivateFieldLooseBase ( this , _downLimit ) [ _downLimit ] > 3 ) {
_classPrivateFieldLooseBase ( this , _rateLimitingTimer ) [ _rateLimitingTimer ] = setTimeout ( _classPrivateFieldLooseBase ( this , _increaseLimit ) [ _increaseLimit ] , 2000 ) ;
} else {
_classPrivateFieldLooseBase ( this , _downLimit ) [ _downLimit ] = Math . floor ( _classPrivateFieldLooseBase ( this , _downLimit ) [ _downLimit ] / 2 ) ;
}
}
} ) ;
if ( typeof limit !== 'number' || limit === 0 ) {
this . limit = Infinity ;
} else {
this . limit = limit ;
}
}
run ( fn , queueOptions ) {
if ( ! _classPrivateFieldLooseBase ( this , _paused ) [ _paused ] && _classPrivateFieldLooseBase ( this , _activeRequests ) [ _activeRequests ] < this . limit ) {
return _classPrivateFieldLooseBase ( this , _call ) [ _call ] ( fn ) ;
}
return _classPrivateFieldLooseBase ( this , _queue ) [ _queue ] ( fn , queueOptions ) ;
}
wrapPromiseFunction ( fn , queueOptions ) {
let _this = this ;
return function ( ) {
for ( var _len = arguments . length , args = new Array ( _len ) , _key = 0 ; _key < _len ; _key ++ ) {
args [ _key ] = arguments [ _key ] ;
}
let queuedRequest ;
const outerPromise = new Promise ( ( resolve , reject ) => {
queuedRequest = _this . run ( ( ) => {
let cancelError ;
let innerPromise ;
try {
innerPromise = Promise . resolve ( fn ( ... args ) ) ;
} catch ( err ) {
innerPromise = Promise . reject ( err ) ;
}
innerPromise . then ( result => {
if ( cancelError ) {
reject ( cancelError ) ;
} else {
queuedRequest . done ( ) ;
resolve ( result ) ;
}
} , err => {
if ( cancelError ) {
reject ( cancelError ) ;
} else {
queuedRequest . done ( ) ;
reject ( err ) ;
}
} ) ;
return ( ) => {
cancelError = createCancelError ( ) ;
} ;
} , queueOptions ) ;
} ) ;
outerPromise . abort = ( ) => {
queuedRequest . abort ( ) ;
} ;
return outerPromise ;
} ;
}
resume ( ) {
_classPrivateFieldLooseBase ( this , _paused ) [ _paused ] = false ;
clearTimeout ( _classPrivateFieldLooseBase ( this , _pauseTimer ) [ _pauseTimer ] ) ;
for ( let i = 0 ; i < this . limit ; i ++ ) {
_classPrivateFieldLooseBase ( this , _queueNext ) [ _queueNext ] ( ) ;
}
}
/ * *
* Freezes the queue for a while or indefinitely .
*
* @ param { number | null } [ duration ] Duration for the pause to happen , in milliseconds .
* If omitted , the queue won ' t resume automatically .
* /
pause ( duration ) {
if ( duration === void 0 ) {
duration = null ;
}
_classPrivateFieldLooseBase ( this , _paused ) [ _paused ] = true ;
clearTimeout ( _classPrivateFieldLooseBase ( this , _pauseTimer ) [ _pauseTimer ] ) ;
if ( duration != null ) {
_classPrivateFieldLooseBase ( this , _pauseTimer ) [ _pauseTimer ] = setTimeout ( _classPrivateFieldLooseBase ( this , _resume ) [ _resume ] , duration ) ;
}
}
/ * *
* Pauses the queue for a duration , and lower the limit of concurrent requests
* when the queue resumes . When the queue resumes , it tries to progressively
* increase the limit in ` this.#increaseLimit ` until another call is made to
* ` this.rateLimit ` .
* Call this function when using the RateLimitedQueue for network requests and
* the remote server responds with 429 HTTP code .
*
* @ param { number } duration in milliseconds .
* /
rateLimit ( duration ) {
clearTimeout ( _classPrivateFieldLooseBase ( this , _rateLimitingTimer ) [ _rateLimitingTimer ] ) ;
this . pause ( duration ) ;
if ( this . limit > 1 && Number . isFinite ( this . limit ) ) {
_classPrivateFieldLooseBase ( this , _upperLimit ) [ _upperLimit ] = this . limit - 1 ;
this . limit = _classPrivateFieldLooseBase ( this , _downLimit ) [ _downLimit ] ;
_classPrivateFieldLooseBase ( this , _rateLimitingTimer ) [ _rateLimitingTimer ] = setTimeout ( _classPrivateFieldLooseBase ( this , _increaseLimit ) [ _increaseLimit ] , duration ) ;
}
}
get isPaused ( ) {
return _classPrivateFieldLooseBase ( this , _paused ) [ _paused ] ;
}
}
function _call2 ( fn ) {
_classPrivateFieldLooseBase ( this , _activeRequests ) [ _activeRequests ] += 1 ;
let done = false ;
let cancelActive ;
try {
cancelActive = fn ( ) ;
} catch ( err ) {
_classPrivateFieldLooseBase ( this , _activeRequests ) [ _activeRequests ] -= 1 ;
throw err ;
}
return {
abort : ( ) => {
if ( done ) { return ; }
done = true ;
_classPrivateFieldLooseBase ( this , _activeRequests ) [ _activeRequests ] -= 1 ;
cancelActive ( ) ;
_classPrivateFieldLooseBase ( this , _queueNext ) [ _queueNext ] ( ) ;
} ,
done : ( ) => {
if ( done ) { return ; }
done = true ;
_classPrivateFieldLooseBase ( this , _activeRequests ) [ _activeRequests ] -= 1 ;
_classPrivateFieldLooseBase ( this , _queueNext ) [ _queueNext ] ( ) ;
}
} ;
}
function _queueNext2 ( ) {
// Do it soon but not immediately, this allows clearing out the entire queue synchronously
// one by one without continuously _advancing_ it (and starting new tasks before immediately
// aborting them)
queueMicrotask ( ( ) => _classPrivateFieldLooseBase ( this , _next ) [ _next ] ( ) ) ;
}
function _next2 ( ) {
if ( _classPrivateFieldLooseBase ( this , _paused ) [ _paused ] || _classPrivateFieldLooseBase ( this , _activeRequests ) [ _activeRequests ] >= this . limit ) {
return ;
}
if ( _classPrivateFieldLooseBase ( this , _queuedHandlers ) [ _queuedHandlers ] . length === 0 ) {
return ;
} // Dispatch the next request, and update the abort/done handlers
// so that cancelling it does the Right Thing (and doesn't just try
// to dequeue an already-running request).
const next = _classPrivateFieldLooseBase ( this , _queuedHandlers ) [ _queuedHandlers ] . shift ( ) ;
const handler = _classPrivateFieldLooseBase ( this , _call ) [ _call ] ( next . fn ) ;
next . abort = handler . abort ;
next . done = handler . done ;
}
function _queue2 ( fn , options ) {
if ( options === void 0 ) {
options = { } ;
}
const handler = {
fn ,
priority : options . priority || 0 ,
abort : ( ) => {
_classPrivateFieldLooseBase ( this , _dequeue ) [ _dequeue ] ( handler ) ;
} ,
done : ( ) => {
throw new Error ( 'Cannot mark a queued request as done: this indicates a bug' ) ;
}
} ;
const index = _classPrivateFieldLooseBase ( this , _queuedHandlers ) [ _queuedHandlers ] . findIndex ( other => {
return handler . priority > other . priority ;
} ) ;
if ( index === - 1 ) {
_classPrivateFieldLooseBase ( this , _queuedHandlers ) [ _queuedHandlers ] . push ( handler ) ;
} else {
_classPrivateFieldLooseBase ( this , _queuedHandlers ) [ _queuedHandlers ] . splice ( index , 0 , handler ) ;
}
return handler ;
}
function _dequeue2 ( handler ) {
const index = _classPrivateFieldLooseBase ( this , _queuedHandlers ) [ _queuedHandlers ] . indexOf ( handler ) ;
if ( index !== - 1 ) {
_classPrivateFieldLooseBase ( this , _queuedHandlers ) [ _queuedHandlers ] . splice ( index , 1 ) ;
}
}
module . exports = {
RateLimitedQueue ,
internalRateLimitedQueue : Symbol ( '__queue' )
} ;
} , { } ] , 31 : [ function ( require , module , exports ) {
"use strict" ;
let _apply ;
function _classPrivateFieldLooseBase ( receiver , privateKey ) { if ( ! Object . prototype . hasOwnProperty . call ( receiver , privateKey ) ) { throw new TypeError ( "attempted to use private field on non-instance" ) ; } return receiver ; }
let id = 0 ;
function _classPrivateFieldLooseKey ( name ) { return "__private_" + id ++ + "_" + name ; }
const has = require ( './hasProperty' ) ;
function insertReplacement ( source , rx , replacement ) {
const newParts = [ ] ;
source . forEach ( chunk => {
// When the source contains multiple placeholders for interpolation,
// we should ignore chunks that are not strings, because those
// can be JSX objects and will be otherwise incorrectly turned into strings.
// Without this condition we’ d get this: [object Object] hello [object Object] my <button>
if ( typeof chunk !== 'string' ) {
return newParts . push ( chunk ) ;
}
return rx [ Symbol . split ] ( chunk ) . forEach ( ( raw , i , list ) => {
if ( raw !== '' ) {
newParts . push ( raw ) ;
} // Interlace with the `replacement` value
if ( i < list . length - 1 ) {
newParts . push ( replacement ) ;
}
} ) ;
} ) ;
return newParts ;
}
/ * *
* Takes a string with placeholder variables like ` %{smart_count} file selected `
* and replaces it with values from options ` {smart_count: 5} `
*
* @ license https : //github.com/airbnb/polyglot.js/blob/master/LICENSE
* taken from https : //github.com/airbnb/polyglot.js/blob/master/lib/polyglot.js#L299
*
* @ param { string } phrase that needs interpolation , with placeholders
* @ param { object } options with values that will be used to replace placeholders
* @ returns { any [ ] } interpolated
* /
function interpolate ( phrase , options ) {
const dollarRegex = /\$/g ;
const dollarBillsYall = '$$$$' ;
let interpolated = [ phrase ] ;
if ( options == null ) { return interpolated ; }
for ( const arg of Object . keys ( options ) ) {
if ( arg !== '_' ) {
// Ensure replacement value is escaped to prevent special $-prefixed
// regex replace tokens. the "$$$$" is needed because each "$" needs to
// be escaped with "$" itself, and we need two in the resulting output.
let replacement = options [ arg ] ;
if ( typeof replacement === 'string' ) {
replacement = dollarRegex [ Symbol . replace ] ( replacement , dollarBillsYall ) ;
} // We create a new `RegExp` each time instead of using a more-efficient
// string replace so that the same argument can be replaced multiple times
// in the same phrase.
interpolated = insertReplacement ( interpolated , new RegExp ( ` % \\ { ${ arg } \\ } ` , 'g' ) , replacement ) ;
}
}
return interpolated ;
}
/ * *
* Translates strings with interpolation & pluralization support .
* Extensible with custom dictionaries and pluralization functions .
*
* Borrows heavily from and inspired by Polyglot https : //github.com/airbnb/polyglot.js,
* basically a stripped - down version of it . Differences : pluralization functions are not hardcoded
* and can be easily added among with dictionaries , nested objects are used for pluralization
* as opposed to ` |||| ` delimeter
*
* Usage example : ` translator.translate('files_chosen', {smart_count: 3}) `
* /
module . exports = ( _apply = /*#__PURE__*/ _classPrivateFieldLooseKey ( "apply" ) , class Translator {
/ * *
* @ param { object | Array < object > } locales - locale or list of locales .
* /
constructor ( locales ) {
Object . defineProperty ( this , _apply , {
value : _apply2
} ) ;
this . locale = {
strings : { } ,
pluralize ( n ) {
if ( n === 1 ) {
return 0 ;
}
return 1 ;
}
} ;
if ( Array . isArray ( locales ) ) {
locales . forEach ( _classPrivateFieldLooseBase ( this , _apply ) [ _apply ] , this ) ;
} else {
_classPrivateFieldLooseBase ( this , _apply ) [ _apply ] ( locales ) ;
}
}
/ * *
* Public translate method
*
* @ param { string } key
* @ param { object } options with values that will be used later to replace placeholders in string
* @ returns { string } translated ( and interpolated )
* /
translate ( key , options ) {
return this . translateArray ( key , options ) . join ( '' ) ;
}
/ * *
* Get a translation and return the translated and interpolated parts as an array .
*
* @ param { string } key
* @ param { object } options with values that will be used to replace placeholders
* @ returns { Array } The translated and interpolated parts , in order .
* /
translateArray ( key , options ) {
if ( ! has ( this . locale . strings , key ) ) {
throw new Error ( ` missing string: ${ key } ` ) ;
}
const string = this . locale . strings [ key ] ;
const hasPluralForms = typeof string === 'object' ;
if ( hasPluralForms ) {
if ( options && typeof options . smart _count !== 'undefined' ) {
const plural = this . locale . pluralize ( options . smart _count ) ;
return interpolate ( string [ plural ] , options ) ;
}
throw new Error ( 'Attempted to use a string with plural forms, but no value was given for %{smart_count}' ) ;
}
return interpolate ( string , options ) ;
}
} ) ;
function _apply2 ( locale ) {
if ( ! ( locale != null && locale . strings ) ) {
return ;
}
const prevLocale = this . locale ;
this . locale = { ... prevLocale ,
strings : { ... prevLocale . strings ,
... locale . strings
}
} ;
this . locale . pluralize = locale . pluralize || prevLocale . pluralize ;
}
} , { "./hasProperty" : 46 } ] , 32 : [ function ( require , module , exports ) {
"use strict" ;
const {
createAbortError
} = require ( './AbortController' ) ;
/ * *
* Return a Promise that resolves after ` ms ` milliseconds .
*
* @ param { number } ms - Number of milliseconds to wait .
* @ param { { signal ? : AbortSignal } } [ opts ] - An abort signal that can be used to cancel the delay early .
* @ returns { Promise < void > } A Promise that resolves after the given amount of ` ms ` .
* /
module . exports = function delay ( ms , opts ) {
return new Promise ( ( resolve , reject ) => {
let _opts$signal , _opts$signal2 ;
if ( opts != null && ( _opts$signal = opts . signal ) != null && _opts$signal . aborted ) {
return reject ( createAbortError ( ) ) ;
}
const timeout = setTimeout ( ( ) => {
cleanup ( ) ; // eslint-disable-line no-use-before-define
resolve ( ) ;
} , ms ) ;
function onabort ( ) {
clearTimeout ( timeout ) ;
cleanup ( ) ; // eslint-disable-line no-use-before-define
reject ( createAbortError ( ) ) ;
}
opts == null ? void 0 : ( _opts$signal2 = opts . signal ) == null ? void 0 : _opts$signal2 . addEventListener ( 'abort' , onabort ) ;
function cleanup ( ) {
let _opts$signal3 ;
opts == null ? void 0 : ( _opts$signal3 = opts . signal ) == null ? void 0 : _opts$signal3 . removeEventListener ( 'abort' , onabort ) ;
}
return undefined ;
} ) ;
} ;
} , { "./AbortController" : 26 } ] , 33 : [ function ( require , module , exports ) {
"use strict" ;
const throttle = require ( 'lodash.throttle' ) ;
function emitSocketProgress ( uploader , progressData , file ) {
const {
progress ,
bytesUploaded ,
bytesTotal
} = progressData ;
if ( progress ) {
uploader . uppy . log ( ` Upload progress: ${ progress } ` ) ;
uploader . uppy . emit ( 'upload-progress' , file , {
uploader ,
bytesUploaded ,
bytesTotal
} ) ;
}
}
module . exports = throttle ( emitSocketProgress , 300 , {
leading : true ,
trailing : true
} ) ;
} , { "lodash.throttle" : 54 } ] , 34 : [ function ( require , module , exports ) {
"use strict" ;
const NetworkError = require ( './NetworkError' ) ;
/ * *
* Wrapper around window . fetch that throws a NetworkError when appropriate
* /
module . exports = function fetchWithNetworkError ( ) {
return fetch ( ... arguments ) . catch ( err => {
if ( err . name === 'AbortError' ) {
throw err ;
} else {
throw new NetworkError ( err ) ;
}
} ) ;
} ;
} , { "./NetworkError" : 28 } ] , 35 : [ function ( require , module , exports ) {
"use strict" ;
const isDOMElement = require ( './isDOMElement' ) ;
/ * *
* Find a DOM element .
*
* @ param { Node | string } element
* @ returns { Node | null }
* /
module . exports = function findDOMElement ( element , context ) {
if ( context === void 0 ) {
context = document ;
}
if ( typeof element === 'string' ) {
return context . querySelector ( element ) ;
}
if ( isDOMElement ( element ) ) {
return element ;
}
return null ;
} ;
} , { "./isDOMElement" : 47 } ] , 36 : [ function ( require , module , exports ) {
"use strict" ;
function encodeCharacter ( character ) {
return character . charCodeAt ( 0 ) . toString ( 32 ) ;
}
function encodeFilename ( name ) {
let suffix = '' ;
return name . replace ( /[^A-Z0-9]/ig , character => {
suffix += ` - ${ encodeCharacter ( character ) } ` ;
return '/' ;
} ) + suffix ;
}
/ * *
* Takes a file object and turns it into fileID , by converting file . name to lowercase ,
* removing extra characters and adding type , size and lastModified
*
* @ param { object } file
* @ returns { string } the fileID
* /
module . exports = function generateFileID ( file ) {
// It's tempting to do `[items].filter(Boolean).join('-')` here, but that
// is slower! simple string concatenation is fast
let id = 'uppy' ;
if ( typeof file . name === 'string' ) {
id += ` - ${ encodeFilename ( file . name . toLowerCase ( ) ) } ` ;
}
if ( file . type !== undefined ) {
id += ` - ${ file . type } ` ;
}
if ( file . meta && typeof file . meta . relativePath === 'string' ) {
id += ` - ${ encodeFilename ( file . meta . relativePath . toLowerCase ( ) ) } ` ;
}
if ( file . data . size !== undefined ) {
id += ` - ${ file . data . size } ` ;
}
if ( file . data . lastModified !== undefined ) {
id += ` - ${ file . data . lastModified } ` ;
}
return id ;
} ;
} , { } ] , 37 : [ function ( require , module , exports ) {
"use strict" ;
const webkitGetAsEntryApi = require ( './utils/webkitGetAsEntryApi/index' ) ;
const fallbackApi = require ( './utils/fallbackApi' ) ;
/ * *
* Returns a promise that resolves to the array of dropped files ( if a folder is
* dropped , and browser supports folder parsing - promise resolves to the flat
* array of all files in all directories ) .
* Each file has . relativePath prop appended to it ( e . g . "/docs/Prague/ticket_from_prague_to_ufa.pdf" )
* if browser supports it . Otherwise it ' s undefined .
*
* @ param { DataTransfer } dataTransfer
* @ param { Function } logDropError - a function that ' s called every time some
* folder or some file error out ( e . g . because of the folder name being too long
* on Windows ) . Notice that resulting promise will always be resolved anyway .
*
* @ returns { Promise } - Array < File >
* /
module . exports = function getDroppedFiles ( dataTransfer , _temp ) {
let _dataTransfer$items ;
let {
logDropError = ( ) => { }
} = _temp === void 0 ? { } : _temp ;
// Get all files from all subdirs. Works (at least) in Chrome, Mozilla, and Safari
if ( ( _dataTransfer$items = dataTransfer . items ) != null && _dataTransfer$items [ 0 ] && 'webkitGetAsEntry' in dataTransfer . items [ 0 ] ) {
return webkitGetAsEntryApi ( dataTransfer , logDropError ) ; // Otherwise just return all first-order files
}
return fallbackApi ( dataTransfer ) ;
} ;
} , { "./utils/fallbackApi" : 38 , "./utils/webkitGetAsEntryApi/index" : 41 } ] , 38 : [ function ( require , module , exports ) {
"use strict" ;
const toArray = require ( '../../toArray' ) ; // .files fallback, should be implemented in any browser
module . exports = function fallbackApi ( dataTransfer ) {
const files = toArray ( dataTransfer . files ) ;
return Promise . resolve ( files ) ;
} ;
} , { "../../toArray" : 51 } ] , 39 : [ function ( require , module , exports ) {
"use strict" ;
/ * *
* Recursive function , calls the original callback ( ) when the directory is entirely parsed .
*
* @ param { FileSystemDirectoryReader } directoryReader
* @ param { Array } oldEntries
* @ param { Function } logDropError
* @ param { Function } callback - called with ( [ all files and directories in that directoryReader ] )
* /
module . exports = function getFilesAndDirectoriesFromDirectory ( directoryReader , oldEntries , logDropError , _ref ) {
let {
onSuccess
} = _ref ;
directoryReader . readEntries ( entries => {
const newEntries = [ ... oldEntries , ... entries ] ; // According to the FileSystem API spec, getFilesAndDirectoriesFromDirectory()
// must be called until it calls the onSuccess with an empty array.
if ( entries . length ) {
setTimeout ( ( ) => {
getFilesAndDirectoriesFromDirectory ( directoryReader , newEntries , logDropError , {
onSuccess
} ) ;
} , 0 ) ; // Done iterating this particular directory
} else {
onSuccess ( newEntries ) ;
}
} , // Make sure we resolve on error anyway, it's fine if only one directory couldn't be parsed!
error => {
logDropError ( error ) ;
onSuccess ( oldEntries ) ;
} ) ;
} ;
} , { } ] , 40 : [ function ( require , module , exports ) {
"use strict" ;
/ * *
* Get the relative path from the FileEntry # fullPath , because File # webkitRelativePath is always '' , at least onDrop .
*
* @ param { FileEntry } fileEntry
*
* @ returns { string | null } - if file is not in a folder - return null ( this is to
* be consistent with . relativePath - s of files selected from My Device ) . If file
* is in a folder - return its fullPath , e . g . '/simpsons/hi.jpeg' .
* /
module . exports = function getRelativePath ( fileEntry ) {
// fileEntry.fullPath - "/simpsons/hi.jpeg" or undefined (for browsers that don't support it)
// fileEntry.name - "hi.jpeg"
if ( ! fileEntry . fullPath || fileEntry . fullPath === ` / ${ fileEntry . name } ` ) {
return null ;
}
return fileEntry . fullPath ;
} ;
} , { } ] , 41 : [ function ( require , module , exports ) {
"use strict" ;
const toArray = require ( '../../../toArray' ) ;
const getRelativePath = require ( './getRelativePath' ) ;
const getFilesAndDirectoriesFromDirectory = require ( './getFilesAndDirectoriesFromDirectory' ) ;
module . exports = function webkitGetAsEntryApi ( dataTransfer , logDropError ) {
const files = [ ] ;
const rootPromises = [ ] ;
/ * *
* Returns a resolved promise , when : files array is enhanced
*
* @ param { ( FileSystemFileEntry | FileSystemDirectoryEntry ) } entry
* @ returns { Promise } - empty promise that resolves when : files is enhanced with a file
* /
const createPromiseToAddFileOrParseDirectory = entry => new Promise ( resolve => {
// This is a base call
if ( entry . isFile ) {
// Creates a new File object which can be used to read the file.
entry . file ( file => {
// eslint-disable-next-line no-param-reassign
file . relativePath = getRelativePath ( entry ) ;
files . push ( file ) ;
resolve ( ) ;
} , // Make sure we resolve on error anyway, it's fine if only one file couldn't be read!
error => {
logDropError ( error ) ;
resolve ( ) ;
} ) ; // This is a recursive call
} else if ( entry . isDirectory ) {
const directoryReader = entry . createReader ( ) ;
getFilesAndDirectoriesFromDirectory ( directoryReader , [ ] , logDropError , {
onSuccess : entries => resolve ( Promise . all ( entries . map ( createPromiseToAddFileOrParseDirectory ) ) )
} ) ;
}
} ) ; // For each dropped item, - make sure it's a file/directory, and start deepening in!
toArray ( dataTransfer . items ) . forEach ( item => {
const entry = item . webkitGetAsEntry ( ) ; // :entry can be null when we drop the url e.g.
if ( entry ) {
rootPromises . push ( createPromiseToAddFileOrParseDirectory ( entry ) ) ;
}
} ) ;
return Promise . all ( rootPromises ) . then ( ( ) => files ) ;
} ;
} , { "../../../toArray" : 51 , "./getFilesAndDirectoriesFromDirectory" : 39 , "./getRelativePath" : 40 } ] , 42 : [ function ( require , module , exports ) {
"use strict" ;
/ * *
* Takes a full filename string and returns an object { name , extension }
*
* @ param { string } fullFileName
* @ returns { object } { name , extension }
* /
module . exports = function getFileNameAndExtension ( fullFileName ) {
const lastDot = fullFileName . lastIndexOf ( '.' ) ; // these count as no extension: "no-dot", "trailing-dot."
if ( lastDot === - 1 || lastDot === fullFileName . length - 1 ) {
return {
name : fullFileName ,
extension : undefined
} ;
}
return {
name : fullFileName . slice ( 0 , lastDot ) ,
extension : fullFileName . slice ( lastDot + 1 )
} ;
} ;
} , { } ] , 43 : [ function ( require , module , exports ) {
"use strict" ;
const getFileNameAndExtension = require ( './getFileNameAndExtension' ) ;
const mimeTypes = require ( './mimeTypes' ) ;
module . exports = function getFileType ( file ) {
let _getFileNameAndExtens ;
if ( file . type ) { return file . type ; }
const fileExtension = file . name ? ( _getFileNameAndExtens = getFileNameAndExtension ( file . name ) . extension ) == null ? void 0 : _getFileNameAndExtens . toLowerCase ( ) : null ;
if ( fileExtension && fileExtension in mimeTypes ) {
// else, see if we can map extension to a mime type
return mimeTypes [ fileExtension ] ;
} // if all fails, fall back to a generic byte stream type
return 'application/octet-stream' ;
} ;
} , { "./getFileNameAndExtension" : 42 , "./mimeTypes" : 49 } ] , 44 : [ function ( require , module , exports ) {
"use strict" ;
module . exports = function getSocketHost ( url ) {
// get the host domain
const regex = /^(?:https?:\/\/|\/\/)?(?:[^@\n]+@)?(?:www\.)?([^\n]+)/i ;
const host = regex . exec ( url ) [ 1 ] ;
const socketProtocol = /^http:\/\//i . test ( url ) ? 'ws' : 'wss' ;
return ` ${ socketProtocol } :// ${ host } ` ;
} ;
} , { } ] , 45 : [ function ( require , module , exports ) {
"use strict" ;
/ * *
* Adds zero to strings shorter than two characters .
*
* @ param { number } number
* @ returns { string }
* /
function pad ( number ) {
return number < 10 ? ` 0 ${ number } ` : number . toString ( ) ;
}
/ * *
* Returns a timestamp in the format of ` hours:minutes:seconds `
* /
module . exports = function getTimeStamp ( ) {
const date = new Date ( ) ;
const hours = pad ( date . getHours ( ) ) ;
const minutes = pad ( date . getMinutes ( ) ) ;
const seconds = pad ( date . getSeconds ( ) ) ;
return ` ${ hours } : ${ minutes } : ${ seconds } ` ;
} ;
} , { } ] , 46 : [ function ( require , module , exports ) {
"use strict" ;
module . exports = function has ( object , key ) {
return Object . prototype . hasOwnProperty . call ( object , key ) ;
} ;
} , { } ] , 47 : [ function ( require , module , exports ) {
"use strict" ;
/ * *
* Check if an object is a DOM element . Duck - typing based on ` nodeType ` .
*
* @ param { * } obj
* /
module . exports = function isDOMElement ( obj ) {
return ( obj == null ? void 0 : obj . nodeType ) === Node . ELEMENT _NODE ;
} ;
} , { } ] , 48 : [ function ( require , module , exports ) {
"use strict" ;
function isNetworkError ( xhr ) {
if ( ! xhr ) {
return false ;
}
return xhr . readyState !== 0 && xhr . readyState !== 4 || xhr . status === 0 ;
}
module . exports = isNetworkError ;
} , { } ] , 49 : [ function ( require , module , exports ) {
"use strict" ;
// ___Why not add the mime-types package?
// It's 19.7kB gzipped, and we only need mime types for well-known extensions (for file previews).
// ___Where to take new extensions from?
// https://github.com/jshttp/mime-db/blob/master/db.json
module . exports = {
md : 'text/markdown' ,
markdown : 'text/markdown' ,
mp4 : 'video/mp4' ,
mp3 : 'audio/mp3' ,
svg : 'image/svg+xml' ,
jpg : 'image/jpeg' ,
png : 'image/png' ,
gif : 'image/gif' ,
heic : 'image/heic' ,
heif : 'image/heif' ,
yaml : 'text/yaml' ,
yml : 'text/yaml' ,
csv : 'text/csv' ,
tsv : 'text/tab-separated-values' ,
tab : 'text/tab-separated-values' ,
avi : 'video/x-msvideo' ,
mks : 'video/x-matroska' ,
mkv : 'video/x-matroska' ,
mov : 'video/quicktime' ,
doc : 'application/msword' ,
docm : 'application/vnd.ms-word.document.macroenabled.12' ,
docx : 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' ,
dot : 'application/msword' ,
dotm : 'application/vnd.ms-word.template.macroenabled.12' ,
dotx : 'application/vnd.openxmlformats-officedocument.wordprocessingml.template' ,
xla : 'application/vnd.ms-excel' ,
xlam : 'application/vnd.ms-excel.addin.macroenabled.12' ,
xlc : 'application/vnd.ms-excel' ,
xlf : 'application/x-xliff+xml' ,
xlm : 'application/vnd.ms-excel' ,
xls : 'application/vnd.ms-excel' ,
xlsb : 'application/vnd.ms-excel.sheet.binary.macroenabled.12' ,
xlsm : 'application/vnd.ms-excel.sheet.macroenabled.12' ,
xlsx : 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ,
xlt : 'application/vnd.ms-excel' ,
xltm : 'application/vnd.ms-excel.template.macroenabled.12' ,
xltx : 'application/vnd.openxmlformats-officedocument.spreadsheetml.template' ,
xlw : 'application/vnd.ms-excel' ,
txt : 'text/plain' ,
text : 'text/plain' ,
conf : 'text/plain' ,
log : 'text/plain' ,
pdf : 'application/pdf' ,
zip : 'application/zip' ,
'7z' : 'application/x-7z-compressed' ,
rar : 'application/x-rar-compressed' ,
tar : 'application/x-tar' ,
gz : 'application/gzip' ,
dmg : 'application/x-apple-diskimage'
} ;
} , { } ] , 50 : [ function ( require , module , exports ) {
"use strict" ;
module . exports = function settle ( promises ) {
const resolutions = [ ] ;
const rejections = [ ] ;
function resolved ( value ) {
resolutions . push ( value ) ;
}
function rejected ( error ) {
rejections . push ( error ) ;
}
const wait = Promise . all ( promises . map ( promise => promise . then ( resolved , rejected ) ) ) ;
return wait . then ( ( ) => {
return {
successful : resolutions ,
failed : rejections
} ;
} ) ;
} ;
} , { } ] , 51 : [ function ( require , module , exports ) {
"use strict" ;
/ * *
* Converts list into array
* /
module . exports = Array . from ;
} , { } ] , 52 : [ function ( require , module , exports ) {
"use strict" ;
let _class , _temp ;
const BasePlugin = require ( '@uppy/core/lib/BasePlugin' ) ;
const {
nanoid
} = require ( 'nanoid/non-secure' ) ;
const {
Provider ,
RequestClient ,
Socket
} = require ( '@uppy/companion-client' ) ;
const emitSocketProgress = require ( '@uppy/utils/lib/emitSocketProgress' ) ;
const getSocketHost = require ( '@uppy/utils/lib/getSocketHost' ) ;
const settle = require ( '@uppy/utils/lib/settle' ) ;
const EventTracker = require ( '@uppy/utils/lib/EventTracker' ) ;
const ProgressTimeout = require ( '@uppy/utils/lib/ProgressTimeout' ) ;
const {
RateLimitedQueue ,
internalRateLimitedQueue
} = require ( '@uppy/utils/lib/RateLimitedQueue' ) ;
const NetworkError = require ( '@uppy/utils/lib/NetworkError' ) ;
const isNetworkError = require ( '@uppy/utils/lib/isNetworkError' ) ;
const locale = require ( './locale' ) ;
function buildResponseError ( xhr , err ) {
let error = err ; // No error message
if ( ! error ) { error = new Error ( 'Upload error' ) ; } // Got an error message string
if ( typeof error === 'string' ) { error = new Error ( error ) ; } // Got something else
if ( ! ( error instanceof Error ) ) {
error = Object . assign ( new Error ( 'Upload error' ) , {
data : error
} ) ;
}
if ( isNetworkError ( xhr ) ) {
error = new NetworkError ( error , xhr ) ;
return error ;
}
error . request = xhr ;
return error ;
}
/ * *
* Set ` data.type ` in the blob to ` file.meta.type ` ,
* because we might have detected a more accurate file type in Uppy
* https : //stackoverflow.com/a/50875615
*
* @ param { object } file File object with ` data ` , ` size ` and ` meta ` properties
* @ returns { object } blob updated with the new ` type ` set from ` file.meta.type `
* /
function setTypeInBlob ( file ) {
const dataWithUpdatedType = file . data . slice ( 0 , file . data . size , file . meta . type ) ;
return dataWithUpdatedType ;
}
module . exports = ( _temp = _class = class XHRUpload extends BasePlugin {
// eslint-disable-next-line global-require
constructor ( uppy , opts ) {
super ( uppy , opts ) ;
this . type = 'uploader' ;
this . id = this . opts . id || 'XHRUpload' ;
this . title = 'XHRUpload' ;
this . defaultLocale = locale ; // Default options
const defaultOptions = {
formData : true ,
fieldName : opts . bundle ? 'files[]' : 'file' ,
method : 'post' ,
metaFields : null ,
responseUrlFieldName : 'url' ,
bundle : false ,
headers : { } ,
timeout : 30 * 1000 ,
limit : 5 ,
withCredentials : false ,
responseType : '' ,
/ * *
* @ typedef respObj
* @ property { string } responseText
* @ property { number } status
* @ property { string } statusText
* @ property { object . < string , string > } headers
*
* @ param { string } responseText the response body string
* @ param { XMLHttpRequest | respObj } response the response object ( XHR or similar )
* /
getResponseData ( responseText ) {
let parsedResponse = { } ;
try {
parsedResponse = JSON . parse ( responseText ) ;
} catch ( err ) {
uppy . log ( err ) ;
}
return parsedResponse ;
} ,
/ * *
*
* @ param { string } responseText the response body string
* @ param { XMLHttpRequest | respObj } response the response object ( XHR or similar )
* /
getResponseError ( _ , response ) {
let error = new Error ( 'Upload error' ) ;
if ( isNetworkError ( response ) ) {
error = new NetworkError ( error , response ) ;
}
return error ;
} ,
/ * *
* Check if the response from the upload endpoint indicates that the upload was successful .
*
* @ param { number } status the response status code
* /
validateStatus ( status ) {
return status >= 200 && status < 300 ;
}
} ;
this . opts = { ... defaultOptions ,
... opts
} ;
this . i18nInit ( ) ;
this . handleUpload = this . handleUpload . bind ( this ) ; // Simultaneous upload limiting is shared across all uploads with this plugin.
if ( internalRateLimitedQueue in this . opts ) {
this . requests = this . opts [ internalRateLimitedQueue ] ;
} else {
this . requests = new RateLimitedQueue ( this . opts . limit ) ;
}
if ( this . opts . bundle && ! this . opts . formData ) {
throw new Error ( '`opts.formData` must be true when `opts.bundle` is enabled.' ) ;
}
this . uploaderEvents = Object . create ( null ) ;
}
getOptions ( file ) {
const overrides = this . uppy . getState ( ) . xhrUpload ;
const {
headers
} = this . opts ;
const opts = { ... this . opts ,
... ( overrides || { } ) ,
... ( file . xhrUpload || { } ) ,
headers : { }
} ; // Support for `headers` as a function, only in the XHRUpload settings.
// Options set by other plugins in Uppy state or on the files themselves are still merged in afterward.
//
// ```js
// headers: (file) => ({ expires: file.meta.expires })
// ```
if ( typeof headers === 'function' ) {
opts . headers = headers ( file ) ;
} else {
Object . assign ( opts . headers , this . opts . headers ) ;
}
if ( overrides ) {
Object . assign ( opts . headers , overrides . headers ) ;
}
if ( file . xhrUpload ) {
Object . assign ( opts . headers , file . xhrUpload . headers ) ;
}
return opts ;
} // eslint-disable-next-line class-methods-use-this
addMetadata ( formData , meta , opts ) {
const metaFields = Array . isArray ( opts . metaFields ) ? opts . metaFields : Object . keys ( meta ) ; // Send along all fields by default.
metaFields . forEach ( item => {
formData . append ( item , meta [ item ] ) ;
} ) ;
}
createFormDataUpload ( file , opts ) {
const formPost = new FormData ( ) ;
this . addMetadata ( formPost , file . meta , opts ) ;
const dataWithUpdatedType = setTypeInBlob ( file ) ;
if ( file . name ) {
formPost . append ( opts . fieldName , dataWithUpdatedType , file . meta . name ) ;
} else {
formPost . append ( opts . fieldName , dataWithUpdatedType ) ;
}
return formPost ;
}
createBundledUpload ( files , opts ) {
const formPost = new FormData ( ) ;
const {
meta
} = this . uppy . getState ( ) ;
this . addMetadata ( formPost , meta , opts ) ;
files . forEach ( file => {
const options = this . getOptions ( file ) ;
const dataWithUpdatedType = setTypeInBlob ( file ) ;
if ( file . name ) {
formPost . append ( options . fieldName , dataWithUpdatedType , file . name ) ;
} else {
formPost . append ( options . fieldName , dataWithUpdatedType ) ;
}
} ) ;
return formPost ;
}
upload ( file , current , total ) {
const opts = this . getOptions ( file ) ;
this . uppy . log ( ` uploading ${ current } of ${ total } ` ) ;
return new Promise ( ( resolve , reject ) => {
this . uppy . emit ( 'upload-started' , file ) ;
const data = opts . formData ? this . createFormDataUpload ( file , opts ) : file . data ;
const xhr = new XMLHttpRequest ( ) ;
this . uploaderEvents [ file . id ] = new EventTracker ( this . uppy ) ;
const timer = new ProgressTimeout ( opts . timeout , ( ) => {
xhr . abort ( ) ;
queuedRequest . done ( ) ;
const error = new Error ( this . i18n ( 'timedOut' , {
seconds : Math . ceil ( opts . timeout / 1000 )
} ) ) ;
this . uppy . emit ( 'upload-error' , file , error ) ;
reject ( error ) ;
} ) ;
const id = nanoid ( ) ;
xhr . upload . addEventListener ( 'loadstart' , ( ) => {
this . uppy . log ( ` [XHRUpload] ${ id } started ` ) ;
} ) ;
xhr . upload . addEventListener ( 'progress' , ev => {
this . uppy . log ( ` [XHRUpload] ${ id } progress: ${ ev . loaded } / ${ ev . total } ` ) ; // Begin checking for timeouts when progress starts, instead of loading,
// to avoid timing out requests on browser concurrency queue
timer . progress ( ) ;
if ( ev . lengthComputable ) {
this . uppy . emit ( 'upload-progress' , file , {
uploader : this ,
bytesUploaded : ev . loaded ,
bytesTotal : ev . total
} ) ;
}
} ) ;
xhr . addEventListener ( 'load' , ev => {
this . uppy . log ( ` [XHRUpload] ${ id } finished ` ) ;
timer . done ( ) ;
queuedRequest . done ( ) ;
if ( this . uploaderEvents [ file . id ] ) {
this . uploaderEvents [ file . id ] . remove ( ) ;
this . uploaderEvents [ file . id ] = null ;
}
if ( opts . validateStatus ( ev . target . status , xhr . responseText , xhr ) ) {
const body = opts . getResponseData ( xhr . responseText , xhr ) ;
const uploadURL = body [ opts . responseUrlFieldName ] ;
const uploadResp = {
status : ev . target . status ,
body ,
uploadURL
} ;
this . uppy . emit ( 'upload-success' , file , uploadResp ) ;
if ( uploadURL ) {
this . uppy . log ( ` Download ${ file . name } from ${ uploadURL } ` ) ;
}
return resolve ( file ) ;
}
const body = opts . getResponseData ( xhr . responseText , xhr ) ;
const error = buildResponseError ( xhr , opts . getResponseError ( xhr . responseText , xhr ) ) ;
const response = {
status : ev . target . status ,
body
} ;
this . uppy . emit ( 'upload-error' , file , error , response ) ;
return reject ( error ) ;
} ) ;
xhr . addEventListener ( 'error' , ( ) => {
this . uppy . log ( ` [XHRUpload] ${ id } errored ` ) ;
timer . done ( ) ;
queuedRequest . done ( ) ;
if ( this . uploaderEvents [ file . id ] ) {
this . uploaderEvents [ file . id ] . remove ( ) ;
this . uploaderEvents [ file . id ] = null ;
}
const error = buildResponseError ( xhr , opts . getResponseError ( xhr . responseText , xhr ) ) ;
this . uppy . emit ( 'upload-error' , file , error ) ;
return reject ( error ) ;
} ) ;
xhr . open ( opts . method . toUpperCase ( ) , opts . endpoint , true ) ; // IE10 does not allow setting `withCredentials` and `responseType`
// before `open()` is called.
xhr . withCredentials = opts . withCredentials ;
if ( opts . responseType !== '' ) {
xhr . responseType = opts . responseType ;
}
const queuedRequest = this . requests . run ( ( ) => {
this . uppy . emit ( 'upload-started' , file ) ; // When using an authentication system like JWT, the bearer token goes as a header. This
// header needs to be fresh each time the token is refreshed so computing and setting the
// headers just before the upload starts enables this kind of authentication to work properly.
// Otherwise, half-way through the list of uploads the token could be stale and the upload would fail.
const currentOpts = this . getOptions ( file ) ;
Object . keys ( currentOpts . headers ) . forEach ( header => {
xhr . setRequestHeader ( header , currentOpts . headers [ header ] ) ;
} ) ;
xhr . send ( data ) ;
return ( ) => {
timer . done ( ) ;
xhr . abort ( ) ;
} ;
} ) ;
this . onFileRemove ( file . id , ( ) => {
queuedRequest . abort ( ) ;
reject ( new Error ( 'File removed' ) ) ;
} ) ;
this . onCancelAll ( file . id , ( ) => {
queuedRequest . abort ( ) ;
reject ( new Error ( 'Upload cancelled' ) ) ;
} ) ;
} ) ;
}
uploadRemote ( file ) {
const opts = this . getOptions ( file ) ;
return new Promise ( ( resolve , reject ) => {
this . uppy . emit ( 'upload-started' , file ) ;
const fields = { } ;
const metaFields = Array . isArray ( opts . metaFields ) ? opts . metaFields // Send along all fields by default.
: Object . keys ( file . meta ) ;
metaFields . forEach ( name => {
fields [ name ] = file . meta [ name ] ;
} ) ;
const Client = file . remote . providerOptions . provider ? Provider : RequestClient ;
const client = new Client ( this . uppy , file . remote . providerOptions ) ;
client . post ( file . remote . url , { ... file . remote . body ,
endpoint : opts . endpoint ,
size : file . data . size ,
fieldname : opts . fieldName ,
metadata : fields ,
httpMethod : opts . method ,
useFormData : opts . formData ,
headers : opts . headers
} ) . then ( res => {
const {
token
} = res ;
const host = getSocketHost ( file . remote . companionUrl ) ;
const socket = new Socket ( {
target : ` ${ host } /api/ ${ token } ` ,
autoOpen : false
} ) ;
this . uploaderEvents [ file . id ] = new EventTracker ( this . uppy ) ;
this . onFileRemove ( file . id , ( ) => {
socket . send ( 'cancel' , { } ) ;
queuedRequest . abort ( ) ;
resolve ( ` upload ${ file . id } was removed ` ) ;
} ) ;
this . onCancelAll ( file . id , ( ) => {
socket . send ( 'cancel' , { } ) ;
queuedRequest . abort ( ) ;
resolve ( ` upload ${ file . id } was canceled ` ) ;
} ) ;
this . onRetry ( file . id , ( ) => {
socket . send ( 'pause' , { } ) ;
socket . send ( 'resume' , { } ) ;
} ) ;
this . onRetryAll ( file . id , ( ) => {
socket . send ( 'pause' , { } ) ;
socket . send ( 'resume' , { } ) ;
} ) ;
socket . on ( 'progress' , progressData => emitSocketProgress ( this , progressData , file ) ) ;
socket . on ( 'success' , data => {
const body = opts . getResponseData ( data . response . responseText , data . response ) ;
const uploadURL = body [ opts . responseUrlFieldName ] ;
const uploadResp = {
status : data . response . status ,
body ,
uploadURL
} ;
this . uppy . emit ( 'upload-success' , file , uploadResp ) ;
queuedRequest . done ( ) ;
if ( this . uploaderEvents [ file . id ] ) {
this . uploaderEvents [ file . id ] . remove ( ) ;
this . uploaderEvents [ file . id ] = null ;
}
return resolve ( ) ;
} ) ;
socket . on ( 'error' , errData => {
const resp = errData . response ;
const error = resp ? opts . getResponseError ( resp . responseText , resp ) : Object . assign ( new Error ( errData . error . message ) , {
cause : errData . error
} ) ;
this . uppy . emit ( 'upload-error' , file , error ) ;
queuedRequest . done ( ) ;
if ( this . uploaderEvents [ file . id ] ) {
this . uploaderEvents [ file . id ] . remove ( ) ;
this . uploaderEvents [ file . id ] = null ;
}
reject ( error ) ;
} ) ;
const queuedRequest = this . requests . run ( ( ) => {
socket . open ( ) ;
if ( file . isPaused ) {
socket . send ( 'pause' , { } ) ;
}
return ( ) => socket . close ( ) ;
} ) ;
} ) . catch ( err => {
this . uppy . emit ( 'upload-error' , file , err ) ;
reject ( err ) ;
} ) ;
} ) ;
}
uploadBundle ( files ) {
return new Promise ( ( resolve , reject ) => {
const {
endpoint
} = this . opts ;
const {
method
} = this . opts ;
const optsFromState = this . uppy . getState ( ) . xhrUpload ;
const formData = this . createBundledUpload ( files , { ... this . opts ,
... ( optsFromState || { } )
} ) ;
const xhr = new XMLHttpRequest ( ) ;
const timer = new ProgressTimeout ( this . opts . timeout , ( ) => {
xhr . abort ( ) ;
const error = new Error ( this . i18n ( 'timedOut' , {
seconds : Math . ceil ( this . opts . timeout / 1000 )
} ) ) ;
emitError ( error ) ;
reject ( error ) ;
} ) ;
const emitError = error => {
files . forEach ( file => {
this . uppy . emit ( 'upload-error' , file , error ) ;
} ) ;
} ;
xhr . upload . addEventListener ( 'loadstart' , ( ) => {
this . uppy . log ( '[XHRUpload] started uploading bundle' ) ;
timer . progress ( ) ;
} ) ;
xhr . upload . addEventListener ( 'progress' , ev => {
timer . progress ( ) ;
if ( ! ev . lengthComputable ) { return ; }
files . forEach ( file => {
this . uppy . emit ( 'upload-progress' , file , {
uploader : this ,
bytesUploaded : ev . loaded / ev . total * file . size ,
bytesTotal : file . size
} ) ;
} ) ;
} ) ;
xhr . addEventListener ( 'load' , ev => {
timer . done ( ) ;
if ( this . opts . validateStatus ( ev . target . status , xhr . responseText , xhr ) ) {
const body = this . opts . getResponseData ( xhr . responseText , xhr ) ;
const uploadResp = {
status : ev . target . status ,
body
} ;
files . forEach ( file => {
this . uppy . emit ( 'upload-success' , file , uploadResp ) ;
} ) ;
return resolve ( ) ;
}
const error = this . opts . getResponseError ( xhr . responseText , xhr ) || new Error ( 'Upload error' ) ;
error . request = xhr ;
emitError ( error ) ;
return reject ( error ) ;
} ) ;
xhr . addEventListener ( 'error' , ( ) => {
timer . done ( ) ;
const error = this . opts . getResponseError ( xhr . responseText , xhr ) || new Error ( 'Upload error' ) ;
emitError ( error ) ;
return reject ( error ) ;
} ) ;
this . uppy . on ( 'cancel-all' , ( ) => {
timer . done ( ) ;
xhr . abort ( ) ;
} ) ;
xhr . open ( method . toUpperCase ( ) , endpoint , true ) ; // IE10 does not allow setting `withCredentials` and `responseType`
// before `open()` is called.
xhr . withCredentials = this . opts . withCredentials ;
if ( this . opts . responseType !== '' ) {
xhr . responseType = this . opts . responseType ;
}
Object . keys ( this . opts . headers ) . forEach ( header => {
xhr . setRequestHeader ( header , this . opts . headers [ header ] ) ;
} ) ;
xhr . send ( formData ) ;
files . forEach ( file => {
this . uppy . emit ( 'upload-started' , file ) ;
} ) ;
} ) ;
}
uploadFiles ( files ) {
const promises = files . map ( ( file , i ) => {
const current = parseInt ( i , 10 ) + 1 ;
const total = files . length ;
if ( file . error ) {
return Promise . reject ( new Error ( file . error ) ) ;
}
if ( file . isRemote ) {
return this . uploadRemote ( file , current , total ) ;
}
return this . upload ( file , current , total ) ;
} ) ;
return settle ( promises ) ;
}
onFileRemove ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'file-removed' , file => {
if ( fileID === file . id ) { cb ( file . id ) ; }
} ) ;
}
onRetry ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'upload-retry' , targetFileID => {
if ( fileID === targetFileID ) {
cb ( ) ;
}
} ) ;
}
onRetryAll ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'retry-all' , ( ) => {
if ( ! this . uppy . getFile ( fileID ) ) { return ; }
cb ( ) ;
} ) ;
}
onCancelAll ( fileID , cb ) {
this . uploaderEvents [ fileID ] . on ( 'cancel-all' , ( ) => {
if ( ! this . uppy . getFile ( fileID ) ) { return ; }
cb ( ) ;
} ) ;
}
handleUpload ( fileIDs ) {
if ( fileIDs . length === 0 ) {
this . uppy . log ( '[XHRUpload] No files to upload!' ) ;
return Promise . resolve ( ) ;
} // No limit configured by the user, and no RateLimitedQueue passed in by a "parent" plugin
// (basically just AwsS3) using the internal symbol
if ( this . opts . limit === 0 && ! this . opts [ internalRateLimitedQueue ] ) {
this . uppy . log ( '[XHRUpload] When uploading multiple files at once, consider setting the `limit` option (to `10` for example), to limit the number of concurrent uploads, which helps prevent memory and network issues: https://uppy.io/docs/xhr-upload/#limit-0' , 'warning' ) ;
}
this . uppy . log ( '[XHRUpload] Uploading...' ) ;
const files = fileIDs . map ( fileID => this . uppy . getFile ( fileID ) ) ;
if ( this . opts . bundle ) {
// if bundle: true, we don’ t support remote uploads
const isSomeFileRemote = files . some ( file => file . isRemote ) ;
if ( isSomeFileRemote ) {
throw new Error ( 'Can’ t upload remote files when the `bundle: true` option is set' ) ;
}
if ( typeof this . opts . headers === 'function' ) {
throw new TypeError ( '`headers` may not be a function when the `bundle: true` option is set' ) ;
}
return this . uploadBundle ( files ) ;
}
return this . uploadFiles ( files ) . then ( ( ) => null ) ;
}
install ( ) {
if ( this . opts . bundle ) {
const {
capabilities
} = this . uppy . getState ( ) ;
this . uppy . setState ( {
capabilities : { ... capabilities ,
individualCancellation : false
}
} ) ;
}
this . uppy . addUploader ( this . handleUpload ) ;
}
uninstall ( ) {
if ( this . opts . bundle ) {
const {
capabilities
} = this . uppy . getState ( ) ;
this . uppy . setState ( {
capabilities : { ... capabilities ,
individualCancellation : true
}
} ) ;
}
this . uppy . removeUploader ( this . handleUpload ) ;
}
} , _class . VERSION = "2.0.7" , _temp ) ;
} , { "./locale" : 53 , "@uppy/companion-client" : 13 , "@uppy/core/lib/BasePlugin" : 15 , "@uppy/utils/lib/EventTracker" : 27 , "@uppy/utils/lib/NetworkError" : 28 , "@uppy/utils/lib/ProgressTimeout" : 29 , "@uppy/utils/lib/RateLimitedQueue" : 30 , "@uppy/utils/lib/emitSocketProgress" : 33 , "@uppy/utils/lib/getSocketHost" : 44 , "@uppy/utils/lib/isNetworkError" : 48 , "@uppy/utils/lib/settle" : 50 , "nanoid/non-secure" : 57 } ] , 53 : [ function ( require , module , exports ) {
"use strict" ;
module . exports = {
strings : {
// Shown in the Informer if an upload is being canceled because it stalled for too long.
timedOut : 'Upload stalled for %{seconds} seconds, aborting.'
}
} ;
} , { } ] , 54 : [ function ( require , module , exports ) {
( function ( global ) { ( function ( ) {
/ * *
* lodash ( Custom Build ) < https : //lodash.com/>
* Build : ` lodash modularize exports="npm" -o ./ `
* Copyright jQuery Foundation and other contributors < https : //jquery.org/>
* Released under MIT license < https : //lodash.com/license>
* Based on Underscore . js 1.8 . 3 < http : //underscorejs.org/LICENSE>
* Copyright Jeremy Ashkenas , DocumentCloud and Investigative Reporters & Editors
* /
/** Used as the `TypeError` message for "Functions" methods. */
let FUNC _ERROR _TEXT = 'Expected a function' ;
/** Used as references for various `Number` constants. */
let NAN = 0 / 0 ;
/** `Object#toString` result references. */
let symbolTag = '[object Symbol]' ;
/** Used to match leading and trailing whitespace. */
let reTrim = /^\s+|\s+$/g ;
/** Used to detect bad signed hexadecimal string values. */
let reIsBadHex = /^[-+]0x[0-9a-f]+$/i ;
/** Used to detect binary string values. */
let reIsBinary = /^0b[01]+$/i ;
/** Used to detect octal string values. */
let reIsOctal = /^0o[0-7]+$/i ;
/** Built-in method references without a dependency on `root`. */
let freeParseInt = parseInt ;
/** Detect free variable `global` from Node.js. */
let freeGlobal = typeof global === 'object' && global && global . Object === Object && global ;
/** Detect free variable `self`. */
let freeSelf = typeof self === 'object' && self && self . Object === Object && self ;
/** Used as a reference to the global object. */
let root = freeGlobal || freeSelf || Function ( 'return this' ) ( ) ;
/** Used for built-in method references. */
let objectProto = Object . prototype ;
/ * *
* Used to resolve the
* [ ` toStringTag ` ] ( http : //ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
* of values .
* /
let objectToString = objectProto . toString ;
/* Built-in method references for those with the same name as other `lodash` methods. */
let nativeMax = Math . max ,
nativeMin = Math . min ;
/ * *
* Gets the timestamp of the number of milliseconds that have elapsed since
* the Unix epoch ( 1 January 1970 00 : 00 : 00 UTC ) .
*
* @ static
* @ memberOf _
* @ since 2.4 . 0
* @ category Date
* @ returns { number } Returns the timestamp .
* @ example
*
* _ . defer ( function ( stamp ) {
* console . log ( _ . now ( ) - stamp ) ;
* } , _ . now ( ) ) ;
* // => Logs the number of milliseconds it took for the deferred invocation.
* /
let now = function ( ) {
return root . Date . now ( ) ;
} ;
/ * *
* Creates a debounced function that delays invoking ` func ` until after ` wait `
* milliseconds have elapsed since the last time the debounced function was
* invoked . The debounced function comes with a ` cancel ` method to cancel
* delayed ` func ` invocations and a ` flush ` method to immediately invoke them .
* Provide ` options ` to indicate whether ` func ` should be invoked on the
* leading and / or trailing edge of the ` wait ` timeout . The ` func ` is invoked
* with the last arguments provided to the debounced function . Subsequent
* calls to the debounced function return the result of the last ` func `
* invocation .
*
* * * Note : * * If ` leading ` and ` trailing ` options are ` true ` , ` func ` is
* invoked on the trailing edge of the timeout only if the debounced function
* is invoked more than once during the ` wait ` timeout .
*
* If ` wait ` is ` 0 ` and ` leading ` is ` false ` , ` func ` invocation is deferred
* until to the next tick , similar to ` setTimeout ` with a timeout of ` 0 ` .
*
* See [ David Corbacho ' s article ] ( https : //css-tricks.com/debouncing-throttling-explained-examples/)
* for details over the differences between ` _.debounce ` and ` _.throttle ` .
*
* @ static
* @ memberOf _
* @ since 0.1 . 0
* @ category Function
* @ param { Function } func The function to debounce .
* @ param { number } [ wait = 0 ] The number of milliseconds to delay .
* @ param { Object } [ options = { } ] The options object .
* @ param { boolean } [ options . leading = false ]
* Specify invoking on the leading edge of the timeout .
* @ param { number } [ options . maxWait ]
* The maximum time ` func ` is allowed to be delayed before it ' s invoked .
* @ param { boolean } [ options . trailing = true ]
* Specify invoking on the trailing edge of the timeout .
* @ returns { Function } Returns the new debounced function .
* @ example
*
* // Avoid costly calculations while the window size is in flux.
* jQuery ( window ) . on ( 'resize' , _ . debounce ( calculateLayout , 150 ) ) ;
*
* // Invoke `sendMail` when clicked, debouncing subsequent calls.
* jQuery ( element ) . on ( 'click' , _ . debounce ( sendMail , 300 , {
* 'leading' : true ,
* 'trailing' : false
* } ) ) ;
*
* // Ensure `batchLog` is invoked once after 1 second of debounced calls.
* var debounced = _ . debounce ( batchLog , 250 , { 'maxWait' : 1000 } ) ;
* var source = new EventSource ( '/stream' ) ;
* jQuery ( source ) . on ( 'message' , debounced ) ;
*
* // Cancel the trailing debounced invocation.
* jQuery ( window ) . on ( 'popstate' , debounced . cancel ) ;
* /
function debounce ( func , wait , options ) {
let lastArgs ,
lastThis ,
maxWait ,
result ,
timerId ,
lastCallTime ,
lastInvokeTime = 0 ,
leading = false ,
maxing = false ,
trailing = true ;
if ( typeof func !== 'function' ) {
throw new TypeError ( FUNC _ERROR _TEXT ) ;
}
wait = toNumber ( wait ) || 0 ;
if ( isObject ( options ) ) {
leading = ! ! options . leading ;
maxing = 'maxWait' in options ;
maxWait = maxing ? nativeMax ( toNumber ( options . maxWait ) || 0 , wait ) : maxWait ;
trailing = 'trailing' in options ? ! ! options . trailing : trailing ;
}
function invokeFunc ( time ) {
let args = lastArgs ,
thisArg = lastThis ;
lastArgs = lastThis = undefined ;
lastInvokeTime = time ;
result = func . apply ( thisArg , args ) ;
return result ;
}
function leadingEdge ( time ) {
// Reset any `maxWait` timer.
lastInvokeTime = time ;
// Start the timer for the trailing edge.
timerId = setTimeout ( timerExpired , wait ) ;
// Invoke the leading edge.
return leading ? invokeFunc ( time ) : result ;
}
function remainingWait ( time ) {
let timeSinceLastCall = time - lastCallTime ,
timeSinceLastInvoke = time - lastInvokeTime ,
result = wait - timeSinceLastCall ;
return maxing ? nativeMin ( result , maxWait - timeSinceLastInvoke ) : result ;
}
function shouldInvoke ( time ) {
let timeSinceLastCall = time - lastCallTime ,
timeSinceLastInvoke = time - lastInvokeTime ;
// Either this is the first call, activity has stopped and we're at the
// trailing edge, the system time has gone backwards and we're treating
// it as the trailing edge, or we've hit the `maxWait` limit.
return ( lastCallTime === undefined || ( timeSinceLastCall >= wait ) ||
( timeSinceLastCall < 0 ) || ( maxing && timeSinceLastInvoke >= maxWait ) ) ;
}
function timerExpired ( ) {
let time = now ( ) ;
if ( shouldInvoke ( time ) ) {
return trailingEdge ( time ) ;
}
// Restart the timer.
timerId = setTimeout ( timerExpired , remainingWait ( time ) ) ;
}
function trailingEdge ( time ) {
timerId = undefined ;
// Only invoke if we have `lastArgs` which means `func` has been
// debounced at least once.
if ( trailing && lastArgs ) {
return invokeFunc ( time ) ;
}
lastArgs = lastThis = undefined ;
return result ;
}
function cancel ( ) {
if ( timerId !== undefined ) {
clearTimeout ( timerId ) ;
}
lastInvokeTime = 0 ;
lastArgs = lastCallTime = lastThis = timerId = undefined ;
}
function flush ( ) {
return timerId === undefined ? result : trailingEdge ( now ( ) ) ;
}
function debounced ( ) {
let time = now ( ) ,
isInvoking = shouldInvoke ( time ) ;
lastArgs = arguments ;
lastThis = this ;
lastCallTime = time ;
if ( isInvoking ) {
if ( timerId === undefined ) {
return leadingEdge ( lastCallTime ) ;
}
if ( maxing ) {
// Handle invocations in a tight loop.
timerId = setTimeout ( timerExpired , wait ) ;
return invokeFunc ( lastCallTime ) ;
}
}
if ( timerId === undefined ) {
timerId = setTimeout ( timerExpired , wait ) ;
}
return result ;
}
debounced . cancel = cancel ;
debounced . flush = flush ;
return debounced ;
}
/ * *
* Creates a throttled function that only invokes ` func ` at most once per
* every ` wait ` milliseconds . The throttled function comes with a ` cancel `
* method to cancel delayed ` func ` invocations and a ` flush ` method to
* immediately invoke them . Provide ` options ` to indicate whether ` func `
* should be invoked on the leading and / or trailing edge of the ` wait `
* timeout . The ` func ` is invoked with the last arguments provided to the
* throttled function . Subsequent calls to the throttled function return the
* result of the last ` func ` invocation .
*
* * * Note : * * If ` leading ` and ` trailing ` options are ` true ` , ` func ` is
* invoked on the trailing edge of the timeout only if the throttled function
* is invoked more than once during the ` wait ` timeout .
*
* If ` wait ` is ` 0 ` and ` leading ` is ` false ` , ` func ` invocation is deferred
* until to the next tick , similar to ` setTimeout ` with a timeout of ` 0 ` .
*
* See [ David Corbacho ' s article ] ( https : //css-tricks.com/debouncing-throttling-explained-examples/)
* for details over the differences between ` _.throttle ` and ` _.debounce ` .
*
* @ static
* @ memberOf _
* @ since 0.1 . 0
* @ category Function
* @ param { Function } func The function to throttle .
* @ param { number } [ wait = 0 ] The number of milliseconds to throttle invocations to .
* @ param { Object } [ options = { } ] The options object .
* @ param { boolean } [ options . leading = true ]
* Specify invoking on the leading edge of the timeout .
* @ param { boolean } [ options . trailing = true ]
* Specify invoking on the trailing edge of the timeout .
* @ returns { Function } Returns the new throttled function .
* @ example
*
* // Avoid excessively updating the position while scrolling.
* jQuery ( window ) . on ( 'scroll' , _ . throttle ( updatePosition , 100 ) ) ;
*
* // Invoke `renewToken` when the click event is fired, but not more than once every 5 minutes.
* var throttled = _ . throttle ( renewToken , 300000 , { 'trailing' : false } ) ;
* jQuery ( element ) . on ( 'click' , throttled ) ;
*
* // Cancel the trailing throttled invocation.
* jQuery ( window ) . on ( 'popstate' , throttled . cancel ) ;
* /
function throttle ( func , wait , options ) {
let leading = true ,
trailing = true ;
if ( typeof func !== 'function' ) {
throw new TypeError ( FUNC _ERROR _TEXT ) ;
}
if ( isObject ( options ) ) {
leading = 'leading' in options ? ! ! options . leading : leading ;
trailing = 'trailing' in options ? ! ! options . trailing : trailing ;
}
return debounce ( func , wait , {
leading ,
'maxWait' : wait ,
trailing
} ) ;
}
/ * *
* Checks if ` value ` is the
* [ language type ] ( http : //www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
* of ` Object ` . ( e . g . arrays , functions , objects , regexes , ` new Number(0) ` , and ` new String('') ` )
*
* @ static
* @ memberOf _
* @ since 0.1 . 0
* @ category Lang
* @ param { * } value The value to check .
* @ returns { boolean } Returns ` true ` if ` value ` is an object , else ` false ` .
* @ example
*
* _ . isObject ( { } ) ;
* // => true
*
* _ . isObject ( [ 1 , 2 , 3 ] ) ;
* // => true
*
* _ . isObject ( _ . noop ) ;
* // => true
*
* _ . isObject ( null ) ;
* // => false
* /
function isObject ( value ) {
let type = typeof value ;
return ! ! value && ( type == 'object' || type == 'function' ) ;
}
/ * *
* Checks if ` value ` is object - like . A value is object - like if it ' s not ` null `
* and has a ` typeof ` result of "object" .
*
* @ static
* @ memberOf _
* @ since 4.0 . 0
* @ category Lang
* @ param { * } value The value to check .
* @ returns { boolean } Returns ` true ` if ` value ` is object - like , else ` false ` .
* @ example
*
* _ . isObjectLike ( { } ) ;
* // => true
*
* _ . isObjectLike ( [ 1 , 2 , 3 ] ) ;
* // => true
*
* _ . isObjectLike ( _ . noop ) ;
* // => false
*
* _ . isObjectLike ( null ) ;
* // => false
* /
function isObjectLike ( value ) {
return ! ! value && typeof value === 'object' ;
}
/ * *
* Checks if ` value ` is classified as a ` Symbol ` primitive or object .
*
* @ static
* @ memberOf _
* @ since 4.0 . 0
* @ category Lang
* @ param { * } value The value to check .
* @ returns { boolean } Returns ` true ` if ` value ` is a symbol , else ` false ` .
* @ example
*
* _ . isSymbol ( Symbol . iterator ) ;
* // => true
*
* _ . isSymbol ( 'abc' ) ;
* // => false
* /
function isSymbol ( value ) {
return typeof value === 'symbol' ||
( isObjectLike ( value ) && objectToString . call ( value ) == symbolTag ) ;
}
/ * *
* Converts ` value ` to a number .
*
* @ static
* @ memberOf _
* @ since 4.0 . 0
* @ category Lang
* @ param { * } value The value to process .
* @ returns { number } Returns the number .
* @ example
*
* _ . toNumber ( 3.2 ) ;
* // => 3.2
*
* _ . toNumber ( Number . MIN _VALUE ) ;
* // => 5e-324
*
* _ . toNumber ( Infinity ) ;
* // => Infinity
*
* _ . toNumber ( '3.2' ) ;
* // => 3.2
* /
function toNumber ( value ) {
if ( typeof value === 'number' ) {
return value ;
}
if ( isSymbol ( value ) ) {
return NAN ;
}
if ( isObject ( value ) ) {
let other = typeof value . valueOf === 'function' ? value . valueOf ( ) : value ;
value = isObject ( other ) ? ( other + '' ) : other ;
}
if ( typeof value !== 'string' ) {
return value === 0 ? value : + value ;
}
value = value . replace ( reTrim , '' ) ;
let isBinary = reIsBinary . test ( value ) ;
return ( isBinary || reIsOctal . test ( value ) )
? freeParseInt ( value . slice ( 2 ) , isBinary ? 2 : 8 )
: ( reIsBadHex . test ( value ) ? NAN : + value ) ;
}
module . exports = throttle ;
} ) . call ( this ) ; } ) . call ( this , typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : { } ) ;
} , { } ] , 55 : [ function ( require , module , exports ) {
let wildcard = require ( 'wildcard' ) ;
let reMimePartSplit = /[\/\+\.]/ ;
/ * *
# mime - match
A simple function to checker whether a target mime type matches a mime - type
pattern ( e . g . image / jpeg matches image / jpeg OR image / * ) .
# # Example Usage
<< < example . js
* * /
module . exports = function ( target , pattern ) {
function test ( pattern ) {
let result = wildcard ( pattern , target , reMimePartSplit ) ;
// ensure that we have a valid mime type (should have two parts)
return result && result . length >= 2 ;
}
return pattern ? test ( pattern . split ( ';' ) [ 0 ] ) : test ;
} ;
} , { "wildcard" : 59 } ] , 56 : [ function ( require , module , exports ) {
/ * *
* Create an event emitter with namespaces
* @ name createNamespaceEmitter
* @ example
* var emitter = require ( './index' ) ( )
*
* emitter . on ( '*' , function ( ) {
* console . log ( 'all events emitted' , this . event )
* } )
*
* emitter . on ( 'example' , function ( ) {
* console . log ( 'example event emitted' )
* } )
* /
module . exports = function createNamespaceEmitter ( ) {
let emitter = { } ;
let _fns = emitter . _fns = { } ;
/ * *
* Emit an event . Optionally namespace the event . Handlers are fired in the order in which they were added with exact matches taking precedence . Separate the namespace and event with a ` : `
* @ name emit
* @ param { String } event – the name of the event , with optional namespace
* @ param { ... * } data – up to 6 arguments that are passed to the event listener
* @ example
* emitter . emit ( 'example' )
* emitter . emit ( 'demo:test' )
* emitter . emit ( 'data' , { example : true } , 'a string' , 1 )
* /
emitter . emit = function emit ( event , arg1 , arg2 , arg3 , arg4 , arg5 , arg6 ) {
let toEmit = getListeners ( event ) ;
if ( toEmit . length ) {
emitAll ( event , toEmit , [ arg1 , arg2 , arg3 , arg4 , arg5 , arg6 ] ) ;
}
} ;
/ * *
* Create en event listener .
* @ name on
* @ param { String } event
* @ param { Function } fn
* @ example
* emitter . on ( 'example' , function ( ) { } )
* emitter . on ( 'demo' , function ( ) { } )
* /
emitter . on = function on ( event , fn ) {
if ( ! _fns [ event ] ) {
_fns [ event ] = [ ] ;
}
_fns [ event ] . push ( fn ) ;
} ;
/ * *
* Create en event listener that fires once .
* @ name once
* @ param { String } event
* @ param { Function } fn
* @ example
* emitter . once ( 'example' , function ( ) { } )
* emitter . once ( 'demo' , function ( ) { } )
* /
emitter . once = function once ( event , fn ) {
function one ( ) {
fn . apply ( this , arguments ) ;
emitter . off ( event , one ) ;
}
this . on ( event , one ) ;
} ;
/ * *
* Stop listening to an event . Stop all listeners on an event by only passing the event name . Stop a single listener by passing that event handler as a callback .
* You must be explicit about what will be unsubscribed : ` emitter.off('demo') ` will unsubscribe an ` emitter.on('demo') ` listener ,
* ` emitter.off('demo:example') ` will unsubscribe an ` emitter.on('demo:example') ` listener
* @ name off
* @ param { String } event
* @ param { Function } [ fn ] – the specific handler
* @ example
* emitter . off ( 'example' )
* emitter . off ( 'demo' , function ( ) { } )
* /
emitter . off = function off ( event , fn ) {
let keep = [ ] ;
if ( event && fn ) {
let fns = this . _fns [ event ] ;
let i = 0 ;
let l = fns ? fns . length : 0 ;
for ( i ; i < l ; i ++ ) {
if ( fns [ i ] !== fn ) {
keep . push ( fns [ i ] ) ;
}
}
}
keep . length ? this . _fns [ event ] = keep : delete this . _fns [ event ] ;
} ;
function getListeners ( e ) {
let out = _fns [ e ] ? _fns [ e ] : [ ] ;
let idx = e . indexOf ( ':' ) ;
let args = ( idx === - 1 ) ? [ e ] : [ e . substring ( 0 , idx ) , e . substring ( idx + 1 ) ] ;
let keys = Object . keys ( _fns ) ;
let i = 0 ;
let l = keys . length ;
for ( i ; i < l ; i ++ ) {
let key = keys [ i ] ;
if ( key === '*' ) {
out = out . concat ( _fns [ key ] ) ;
}
if ( args . length === 2 && args [ 0 ] === key ) {
out = out . concat ( _fns [ key ] ) ;
break ;
}
}
return out ;
}
function emitAll ( e , fns , args ) {
let i = 0 ;
let l = fns . length ;
for ( i ; i < l ; i ++ ) {
if ( ! fns [ i ] ) { break ; }
fns [ i ] . event = e ;
fns [ i ] . apply ( fns [ i ] , args ) ;
}
}
return emitter ;
} ;
} , { } ] , 57 : [ function ( require , module , exports ) {
// This alphabet uses `A-Za-z0-9_-` symbols. The genetic algorithm helped
// optimize the gzip compression for this alphabet.
let urlAlphabet =
'ModuleSymbhasOwnPr-0123456789ABCDEFGHNRVfgctiUvz_KqYTJkLxpZXIjQW' ;
let customAlphabet = ( alphabet , size ) => {
return ( ) => {
let id = '' ;
// A compact alternative for `for (var i = 0; i < step; i++)`.
let i = size ;
while ( i -- ) {
// `| 0` is more compact and faster than `Math.floor()`.
id += alphabet [ ( Math . random ( ) * alphabet . length ) | 0 ] ;
}
return id ;
} ;
} ;
let nanoid = ( size = 21 ) => {
let id = '' ;
// A compact alternative for `for (var i = 0; i < step; i++)`.
let i = size ;
while ( i -- ) {
// `| 0` is more compact and faster than `Math.floor()`.
id += urlAlphabet [ ( Math . random ( ) * 64 ) | 0 ] ;
}
return id ;
} ;
module . exports = { nanoid , customAlphabet } ;
} , { } ] , 58 : [ function ( require , module , exports ) {
let n , l , u , t , i , o , r , f , e = { } , c = [ ] , s = /acit|ex(?:s|g|n|p|$)|rph|grid|ows|mnc|ntw|ine[ch]|zoo|^ord|itera/i ; function a ( n , l ) { for ( let u in l ) { n [ u ] = l [ u ] ; } return n ; } function p ( n ) { let l = n . parentNode ; l && l . removeChild ( n ) ; } function v ( l , u , t ) { let i , o , r , f = { } ; for ( r in u ) { "key" == r ? i = u [ r ] : "ref" == r ? o = u [ r ] : f [ r ] = u [ r ] ; } if ( arguments . length > 2 && ( f . children = arguments . length > 3 ? n . call ( arguments , 2 ) : t ) , "function" === typeof l && null != l . defaultProps ) { for ( r in l . defaultProps ) { void 0 === f [ r ] && ( f [ r ] = l . defaultProps [ r ] ) ; } } return h ( l , f , i , o , null ) ; } function h ( n , t , i , o , r ) { let f = { type : n , props : t , key : i , ref : o , _ _k : null , _ _ : null , _ _b : 0 , _ _e : null , _ _d : void 0 , _ _c : null , _ _h : null , constructor : void 0 , _ _v : null == r ? ++ u : r } ; return null != l . vnode && l . vnode ( f ) , f ; } function y ( n ) { return n . children ; } function d ( n , l ) { this . props = n , this . context = l ; } function _ ( n , l ) { if ( null == l ) { return n . _ _ ? _ ( n . _ _ , n . _ _ . _ _k . indexOf ( n ) + 1 ) : null ; } for ( var u ; l < n . _ _k . length ; l ++ ) { if ( null != ( u = n . _ _k [ l ] ) && null != u . _ _e ) { return u . _ _e ; } } return "function" === typeof n . type ? _ ( n ) : null ; } function k ( n ) { let l , u ; if ( null != ( n = n . _ _ ) && null != n . _ _c ) { for ( n . _ _e = n . _ _c . base = null , l = 0 ; l < n . _ _k . length ; l ++ ) { if ( null != ( u = n . _ _k [ l ] ) && null != u . _ _e ) { n . _ _e = n . _ _c . base = u . _ _e ; break ; } } return k ( n ) ; } } function x ( n ) { ( ! n . _ _d && ( n . _ _d = ! 0 ) && i . push ( n ) && ! b . _ _r ++ || r !== l . debounceRendering ) && ( ( r = l . debounceRendering ) || o ) ( b ) ; } function b ( ) { for ( var n ; b . _ _r = i . length ; ) { n = i . sort ( function ( n , l ) { return n . _ _v . _ _b - l . _ _v . _ _b ; } ) , i = [ ] , n . some ( function ( n ) { let l , u , t , i , o , r ; n . _ _d && ( o = ( i = ( l = n ) . _ _v ) . _ _e , ( r = l . _ _P ) && ( u = [ ] , ( t = a ( { } , i ) ) . _ _v = i . _ _v + 1 , I ( r , i , t , l . _ _n , void 0 !== r . ownerSVGElement , null != i . _ _h ? [ o ] : null , u , null == o ? _ ( i ) : o , i . _ _h ) , T ( u , i ) , i . _ _e != o && k ( i ) ) ) ; } ) ; } } function m ( n , l , u , t , i , o , r , f , s , a ) { let p , v , d , k , x , b , m , A = t && t . _ _k || c , P = A . length ; for ( u . _ _k = [ ] , p = 0 ; p < l . length ; p ++ ) { if ( null != ( k = u . _ _k [ p ] = null == ( k = l [ p ] ) || "boolean" === typeof k ? null : "string" === typeof k || "number" === typeof k || "bigint" === typeof k ? h ( null , k , null , null , k ) : Array . isArray ( k ) ? h ( y , { children : k } , null , null , null ) : k . _ _b > 0 ? h ( k . type , k . props , k . key , null , k . _ _v ) : k ) ) { if ( k . _ _ = u , k . _ _b = u . _ _b + 1 , null === ( d = A [ p ] ) || d && k . key == d . key && k . type === d . type ) { A [ p ] = void 0 ; } else { for ( v = 0 ; v < P ; v ++ ) { if ( ( d = A [ v ] ) && k . key == d . key && k . type === d . type ) { A [ v ] = void 0 ; break ; } d = null ; } } I ( n , k , d = d || e , i , o , r , f , s , a ) , x = k . _ _e , ( v = k . ref ) && d . ref != v && ( m || ( m = [ ] ) , d . ref && m . push ( d . ref , null , k ) , m . push ( v , k . _ _c || x , k ) ) , null != x ? ( null == b && ( b = x ) , "function" === typeof k . type && null != k . _ _k && k . _ _k === d . _ _k ? k . _ _d = s = g ( k , s , n ) : s = w ( n , k , d , A , x , s ) , a || "option" !== u . type ? "function" === typeof u . type && ( u . _ _d = s ) : n . value = "" ) : s && d . _ _e == s && s . parentNode != n && ( s = _ ( d ) ) ; } } for ( u . _ _e = b , p = P ; p -- ; ) { null != A [ p ] && ( "function" === typeof u . type && null != A [ p ] . _ _e && A [ p ] . _ _e == u . _ _d && ( u . _ _d = _ ( t , p + 1 ) ) , L ( A [ p ] , A [ p ] ) ) ; } if ( m ) { for ( p = 0 ; p < m . length ; p ++ ) { z ( m [ p ] , m [ ++ p ] , m [ ++ p ] ) ; } } } function g ( n , l , u ) { let t , i ; for ( t = 0 ; t < n . _ _k . length ; t ++ ) { ( i = n . _ _k [ t ] ) && ( i . _ _ = n , l = "function" === typeof i . type ? g ( i , l , u ) : w ( u , i , i , n . _ _k , i . _ _e , l ) ) ; } return l ; } function w ( n , l , u , t , i , o ) { let r , f , e ; if ( void 0 !== l . _ _d ) { r = l . _ _d , l . _ _d = void 0 ; } else if ( null == u || i != o || null == i . parentNode ) { n : if ( null == o || o . parentNode !== n ) { n . appendChild ( i ) , r = null ; } else { for ( f = o , e = 0 ; ( f = f . nextSibling ) && e < t . length ; e += 2 ) { if ( f == i ) { break n ; } } n . insertBefore ( i , o ) , r = o ; } } return void 0 !== r ? r : i . nextSibling ; } function A ( n , l , u , t , i ) { let o ; for ( o in u ) { "children" === o || "key" === o || o in l || C ( n , o , null , u [ o ] , t ) ; } for ( o in l ) { i && "function" !== typeof l [ o ] || "children" === o || "key" === o || "value" === o || "checked" === o || u [ o ] === l [ o ] || C ( n , o , l [ o ] , u [ o ] , t ) ; } } function P ( n , l , u ) { "-" === l [ 0 ] ? n . setProperty ( l , u ) : n [ l ] = null == u ? "" : "number" !== typeof u || s . test ( l ) ? u : u + "px" ; } function C ( n , l , u , t , i ) { let o ; n : if ( "style" === l ) { if ( "string" === typeof u ) { n . style . cssText = u ; } else { if ( "string" === typeof t && ( n . style . cssText = t = "" ) , t ) { for ( l in t ) { u && l in u || P ( n . style , l , "" ) ; } } if ( u ) { for ( l in u ) { t && u [ l ] === t [ l ] || P ( n . style , l , u [ l ] ) ; } } } } else if ( "o" === l [ 0 ] && "n" === l [ 1 ] ) { o = l !== ( l = l . replace ( /Capture$/ , "" ) ) , l = l . toLowerCase ( ) in n ? l . toLowerCase ( ) . slice ( 2 ) : l . slice ( 2 ) , n . l || ( n . l = { } ) , n . l [ l + o ] = u , u ? t || n . addEventListener ( l , o ? H : $ , o ) : n . removeEventListener ( l , o ? H : $ , o ) ; } else if ( "dangerouslySetInnerHTML" !== l ) { if ( i ) { l = l . replace ( /xlink[H:h]/ , "h" ) . replace ( /sName$/ , "s" ) ; } else if ( "href" !== l && "list" !== l && "form" !== l && "tabIndex" !== l && "download" !== l && l in n ) { try { n [ l ] = nul
} , { } ] , 59 : [ function ( require , module , exports ) {
/* jshint node: true */
'use strict' ;
/ * *
# wildcard
Very simple wildcard matching , which is designed to provide the same
functionality that is found in the
[ eve ] ( https : //github.com/adobe-webplatform/eve) eventing library.
# # Usage
It works with strings :
<< < examples / strings . js
Arrays :
<< < examples / arrays . js
Objects ( matching against keys ) :
<< < examples / objects . js
While the library works in Node , if you are are looking for file - based
wildcard matching then you should have a look at :
< https : //github.com/isaacs/node-glob>
* * /
function WildcardMatcher ( text , separator ) {
this . text = text = text || '' ;
this . hasWild = ~ text . indexOf ( '*' ) ;
this . separator = separator ;
this . parts = text . split ( separator ) ;
}
WildcardMatcher . prototype . match = function ( input ) {
let matches = true ;
let parts = this . parts ;
let ii ;
let partsCount = parts . length ;
let testParts ;
if ( typeof input === 'string' || input instanceof String ) {
if ( ! this . hasWild && this . text != input ) {
matches = false ;
} else {
testParts = ( input || '' ) . split ( this . separator ) ;
for ( ii = 0 ; matches && ii < partsCount ; ii ++ ) {
if ( parts [ ii ] === '*' ) {
continue ;
} else if ( ii < testParts . length ) {
matches = parts [ ii ] === testParts [ ii ] ;
} else {
matches = false ;
}
}
// If matches, then return the component parts
matches = matches && testParts ;
}
}
else if ( typeof input . splice === 'function' ) {
matches = [ ] ;
for ( ii = input . length ; ii -- ; ) {
if ( this . match ( input [ ii ] ) ) {
matches [ matches . length ] = input [ ii ] ;
}
}
}
else if ( typeof input === 'object' ) {
matches = { } ;
for ( let key in input ) {
if ( this . match ( key ) ) {
matches [ key ] = input [ key ] ;
}
}
}
return matches ;
} ;
module . exports = function ( text , test , separator ) {
let matcher = new WildcardMatcher ( text , separator || /[\/\.]/ ) ;
if ( typeof test !== 'undefined' ) {
return matcher . match ( test ) ;
}
return matcher ;
} ;
} , { } ] , 60 : [ function ( require , module , exports ) {
// We need a custom build of Uppy because we do not use webpack for
// our JS modules/build. The only way to get what you want from Uppy
// is to use the webpack modules or to include the entire Uppy project
// including all plugins in a single JS file. This way we can just
// use the plugins we actually want.
window . Uppy = { } ;
Uppy . Core = require ( '@uppy/core' ) ;
Uppy . XHRUpload = require ( '@uppy/xhr-upload' ) ;
Uppy . AwsS3 = require ( '@uppy/aws-s3' ) ;
Uppy . AwsS3Multipart = require ( '@uppy/aws-s3-multipart' ) ;
Uppy . DropTarget = require ( '@uppy/drop-target' ) ;
Uppy . Utils = {
delay : require ( '@uppy/utils/lib/delay' ) ,
EventTracker : require ( '@uppy/utils/lib/EventTracker' ) ,
AbortControllerLib : require ( '@uppy/utils/lib/AbortController' )
} ;
} , { "@uppy/aws-s3" : 5 , "@uppy/aws-s3-multipart" : 3 , "@uppy/core" : 20 , "@uppy/drop-target" : 24 , "@uppy/utils/lib/AbortController" : 26 , "@uppy/utils/lib/EventTracker" : 27 , "@uppy/utils/lib/delay" : 32 , "@uppy/xhr-upload" : 52 } ] } , { } , [ 60 ] ) ;