2022-11-10 13:20:38 +01:00
/******/ ( ( ) => { // webpackBootstrap
2020-08-27 10:23:33 +02:00
/******/ var _ _webpack _modules _ _ = ( {
2020-08-26 01:57:08 +02:00
2022-11-10 11:43:16 +01:00
/***/ 7351 :
2021-02-26 05:00:44 +01:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-27 10:23:33 +02:00
"use strict" ;
2022-11-10 11:43:16 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
2020-08-27 10:23:33 +02:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2022-11-10 11:43:16 +01:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
2020-08-27 10:23:33 +02:00
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-11-10 11:43:16 +01:00
exports . issue = exports . issueCommand = void 0 ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
2020-08-27 10:23:33 +02:00
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
}
}
function escapeData ( s ) {
2021-02-26 05:00:44 +01:00
return utils _1 . toCommandValue ( s )
2020-08-27 10:23:33 +02:00
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
2021-02-26 05:00:44 +01:00
return utils _1 . toCommandValue ( s )
2020-08-27 10:23:33 +02:00
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
2020-08-26 01:57:08 +02:00
/***/ } ) ,
2022-11-10 11:43:16 +01:00
/***/ 2186 :
2021-02-26 05:00:44 +01:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-26 01:57:08 +02:00
"use strict" ;
2022-11-10 11:43:16 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-08-26 01:57:08 +02:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2020-08-27 10:23:33 +02:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-11-10 11:43:16 +01:00
exports . getIDToken = exports . getState = exports . saveState = exports . group = exports . endGroup = exports . startGroup = exports . info = exports . notice = exports . warning = exports . error = exports . debug = exports . isDebug = exports . setFailed = exports . setCommandEcho = exports . setOutput = exports . getBooleanInput = exports . getMultilineInput = exports . getInput = exports . addPath = exports . setSecret = exports . exportVariable = exports . ExitCode = void 0 ;
const command _1 = _ _nccwpck _require _ _ ( 7351 ) ;
2021-02-26 05:00:44 +01:00
const file _command _1 = _ _nccwpck _require _ _ ( 717 ) ;
2022-11-10 11:43:16 +01:00
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const oidc _utils _1 = _ _nccwpck _require _ _ ( 8041 ) ;
2020-08-26 01:57:08 +02:00
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
2021-02-26 05:00:44 +01:00
const convertedVal = utils _1 . toCommandValue ( val ) ;
2020-08-26 01:57:08 +02:00
process . env [ name ] = convertedVal ;
2021-02-26 05:00:44 +01:00
const filePath = process . env [ 'GITHUB_ENV' ] || '' ;
if ( filePath ) {
2022-11-10 11:43:16 +01:00
return file _command _1 . issueFileCommand ( 'ENV' , file _command _1 . prepareKeyValueMessage ( name , val ) ) ;
2021-02-26 05:00:44 +01:00
}
2022-11-10 11:43:16 +01:00
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
2020-08-26 01:57:08 +02:00
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
2021-02-26 05:00:44 +01:00
const filePath = process . env [ 'GITHUB_PATH' ] || '' ;
if ( filePath ) {
2022-11-10 11:43:16 +01:00
file _command _1 . issueFileCommand ( 'PATH' , inputPath ) ;
2021-02-26 05:00:44 +01:00
}
else {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
}
2020-08-26 01:57:08 +02:00
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
2022-11-10 11:43:16 +01:00
* Gets the value of an input .
* Unless trimWhitespace is set to false in InputOptions , the value is also trimmed .
* Returns an empty string if the value is not defined .
2020-08-26 01:57:08 +02:00
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
}
2022-11-10 11:43:16 +01:00
if ( options && options . trimWhitespace === false ) {
return val ;
}
2020-08-26 01:57:08 +02:00
return val . trim ( ) ;
}
exports . getInput = getInput ;
2022-11-10 11:43:16 +01:00
/ * *
* Gets the values of an multiline input . Each value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string [ ]
*
* /
function getMultilineInput ( name , options ) {
const inputs = getInput ( name , options )
. split ( '\n' )
. filter ( x => x !== '' ) ;
if ( options && options . trimWhitespace === false ) {
return inputs ;
}
return inputs . map ( input => input . trim ( ) ) ;
}
exports . getMultilineInput = getMultilineInput ;
/ * *
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification .
* Support boolean input list : ` true | True | TRUE | false | False | FALSE ` .
* The return value is also in boolean type .
* ref : https : //yaml.org/spec/1.2/spec.html#id2804923
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns boolean
* /
function getBooleanInput ( name , options ) {
const trueValue = [ 'true' , 'True' , 'TRUE' ] ;
const falseValue = [ 'false' , 'False' , 'FALSE' ] ;
const val = getInput ( name , options ) ;
if ( trueValue . includes ( val ) )
return true ;
if ( falseValue . includes ( val ) )
return false ;
throw new TypeError ( ` Input does not meet YAML 1.2 "Core Schema" specification: ${ name } \n ` +
` Support boolean input list: \` true | True | TRUE | false | False | FALSE \` ` ) ;
}
exports . getBooleanInput = getBooleanInput ;
2020-08-26 01:57:08 +02:00
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
2022-11-10 11:43:16 +01:00
const filePath = process . env [ 'GITHUB_OUTPUT' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'OUTPUT' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
process . stdout . write ( os . EOL ) ;
command _1 . issueCommand ( 'set-output' , { name } , utils _1 . toCommandValue ( value ) ) ;
2020-08-26 01:57:08 +02:00
}
exports . setOutput = setOutput ;
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
2022-11-10 11:43:16 +01:00
* @ param properties optional properties to add to the annotation .
2020-08-26 01:57:08 +02:00
* /
2022-11-10 11:43:16 +01:00
function error ( message , properties = { } ) {
command _1 . issueCommand ( 'error' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2020-08-26 01:57:08 +02:00
}
exports . error = error ;
/ * *
2022-11-10 11:43:16 +01:00
* Adds a warning issue
2020-08-26 01:57:08 +02:00
* @ param message warning issue message . Errors will be converted to string via toString ( )
2022-11-10 11:43:16 +01:00
* @ param properties optional properties to add to the annotation .
2020-08-26 01:57:08 +02:00
* /
2022-11-10 11:43:16 +01:00
function warning ( message , properties = { } ) {
command _1 . issueCommand ( 'warning' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2020-08-26 01:57:08 +02:00
}
exports . warning = warning ;
2022-11-10 11:43:16 +01:00
/ * *
* Adds a notice issue
* @ param message notice issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
* /
function notice ( message , properties = { } ) {
command _1 . issueCommand ( 'notice' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . notice = notice ;
2020-08-26 01:57:08 +02:00
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
2022-11-10 11:43:16 +01:00
const filePath = process . env [ 'GITHUB_STATE' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'STATE' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
command _1 . issueCommand ( 'save-state' , { name } , utils _1 . toCommandValue ( value ) ) ;
2020-08-26 01:57:08 +02:00
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
2022-11-10 11:43:16 +01:00
function getIDToken ( aud ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield oidc _utils _1 . OidcClient . getIDToken ( aud ) ;
} ) ;
}
exports . getIDToken = getIDToken ;
/ * *
* Summary exports
* /
var summary _1 = _ _nccwpck _require _ _ ( 1327 ) ;
Object . defineProperty ( exports , "summary" , ( { enumerable : true , get : function ( ) { return summary _1 . summary ; } } ) ) ;
/ * *
* @ deprecated use core . summary
* /
var summary _2 = _ _nccwpck _require _ _ ( 1327 ) ;
Object . defineProperty ( exports , "markdownSummary" , ( { enumerable : true , get : function ( ) { return summary _2 . markdownSummary ; } } ) ) ;
/ * *
* Path exports
* /
var path _utils _1 = _ _nccwpck _require _ _ ( 2981 ) ;
Object . defineProperty ( exports , "toPosixPath" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toPosixPath ; } } ) ) ;
Object . defineProperty ( exports , "toWin32Path" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toWin32Path ; } } ) ) ;
Object . defineProperty ( exports , "toPlatformPath" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toPlatformPath ; } } ) ) ;
2020-08-26 01:57:08 +02:00
//# sourceMappingURL=core.js.map
/***/ } ) ,
2021-02-26 05:00:44 +01:00
/***/ 717 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
// For internal use, subject to change.
2022-11-10 11:43:16 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
2021-02-26 05:00:44 +01:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2022-11-10 11:43:16 +01:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
2021-02-26 05:00:44 +01:00
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-11-10 11:43:16 +01:00
exports . prepareKeyValueMessage = exports . issueFileCommand = void 0 ;
2021-02-26 05:00:44 +01:00
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
2022-11-10 11:43:16 +01:00
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
2022-11-10 13:17:10 +01:00
const uuid _1 = _ _nccwpck _require _ _ ( 8974 ) ;
2022-11-10 11:43:16 +01:00
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
function issueFileCommand ( command , message ) {
2021-02-26 05:00:44 +01:00
const filePath = process . env [ ` GITHUB_ ${ command } ` ] ;
if ( ! filePath ) {
throw new Error ( ` Unable to find environment variable for file command ${ command } ` ) ;
}
if ( ! fs . existsSync ( filePath ) ) {
throw new Error ( ` Missing file at path: ${ filePath } ` ) ;
}
fs . appendFileSync ( filePath , ` ${ utils _1 . toCommandValue ( message ) } ${ os . EOL } ` , {
encoding : 'utf8'
} ) ;
}
2022-11-10 11:43:16 +01:00
exports . issueFileCommand = issueFileCommand ;
function prepareKeyValueMessage ( key , value ) {
const delimiter = ` ghadelimiter_ ${ uuid _1 . v4 ( ) } ` ;
const convertedValue = utils _1 . toCommandValue ( value ) ;
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if ( key . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: name should not contain the delimiter " ${ delimiter } " ` ) ;
}
if ( convertedValue . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: value should not contain the delimiter " ${ delimiter } " ` ) ;
}
return ` ${ key } << ${ delimiter } ${ os . EOL } ${ convertedValue } ${ os . EOL } ${ delimiter } ` ;
}
exports . prepareKeyValueMessage = prepareKeyValueMessage ;
2021-02-26 05:00:44 +01:00
//# sourceMappingURL=file-command.js.map
/***/ } ) ,
2022-11-10 11:43:16 +01:00
/***/ 8041 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-02-26 05:00:44 +01:00
"use strict" ;
2022-11-10 11:43:16 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2021-02-26 05:00:44 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-11-10 11:43:16 +01:00
exports . OidcClient = void 0 ;
const http _client _1 = _ _nccwpck _require _ _ ( 6255 ) ;
const auth _1 = _ _nccwpck _require _ _ ( 5526 ) ;
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
class OidcClient {
static createHttpClient ( allowRetry = true , maxRetry = 10 ) {
const requestOptions = {
allowRetries : allowRetry ,
maxRetries : maxRetry
} ;
return new http _client _1 . HttpClient ( 'actions/oidc-client' , [ new auth _1 . BearerCredentialHandler ( OidcClient . getRequestToken ( ) ) ] , requestOptions ) ;
2021-02-26 05:00:44 +01:00
}
2022-11-10 11:43:16 +01:00
static getRequestToken ( ) {
const token = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_TOKEN' ] ;
if ( ! token ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable' ) ;
}
return token ;
}
static getIDTokenUrl ( ) {
const runtimeUrl = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_URL' ] ;
if ( ! runtimeUrl ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable' ) ;
}
return runtimeUrl ;
}
static getCall ( id _token _url ) {
var _a ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpclient = OidcClient . createHttpClient ( ) ;
const res = yield httpclient
. getJson ( id _token _url )
. catch ( error => {
throw new Error ( ` Failed to get ID Token. \n
Error Code : $ { error . statusCode } \ n
Error Message : $ { error . result . message } ` );
} ) ;
const id _token = ( _a = res . result ) === null || _a === void 0 ? void 0 : _a . value ;
if ( ! id _token ) {
throw new Error ( 'Response json body do not have ID Token field' ) ;
}
return id _token ;
} ) ;
}
static getIDToken ( audience ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
// New ID Token is requested from action service
let id _token _url = OidcClient . getIDTokenUrl ( ) ;
if ( audience ) {
const encodedAudience = encodeURIComponent ( audience ) ;
id _token _url = ` ${ id _token _url } &audience= ${ encodedAudience } ` ;
}
core _1 . debug ( ` ID token url is ${ id _token _url } ` ) ;
const id _token = yield OidcClient . getCall ( id _token _url ) ;
core _1 . setSecret ( id _token ) ;
return id _token ;
}
catch ( error ) {
throw new Error ( ` Error message: ${ error . message } ` ) ;
}
} ) ;
2021-02-26 05:00:44 +01:00
}
}
2022-11-10 11:43:16 +01:00
exports . OidcClient = OidcClient ;
//# sourceMappingURL=oidc-utils.js.map
2021-02-26 05:00:44 +01:00
/***/ } ) ,
2022-11-10 11:43:16 +01:00
/***/ 2981 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-26 02:20:01 +02:00
"use strict" ;
2022-11-10 11:43:16 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-08-27 10:23:33 +02:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-11-10 11:43:16 +01:00
exports . toPlatformPath = exports . toWin32Path = exports . toPosixPath = void 0 ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
/ * *
* toPosixPath converts the given path to the posix form . On Windows , \ \ will be
* replaced with / .
*
* @ param pth . Path to transform .
* @ return string Posix path .
* /
function toPosixPath ( pth ) {
return pth . replace ( /[\\]/g , '/' ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
exports . toPosixPath = toPosixPath ;
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* toWin32Path converts the given path to the win32 form . On Linux , / w i l l b e
* replaced with \ \ .
2020-08-26 02:20:01 +02:00
*
2022-11-10 11:43:16 +01:00
* @ param pth . Path to transform .
* @ return string Win32 path .
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
function toWin32Path ( pth ) {
return pth . replace ( /[/]/g , '\\' ) ;
}
exports . toWin32Path = toWin32Path ;
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* toPlatformPath converts the given path to a platform - specific path . It does
* this by replacing instances of / a n d \ w i t h t h e p l a t f o r m - s p e c i f i c p a t h
* separator .
2020-08-26 02:20:01 +02:00
*
2022-11-10 11:43:16 +01:00
* @ param pth The path to platformize .
* @ return string The platform - specific path .
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
function toPlatformPath ( pth ) {
return pth . replace ( /[/\\]/g , path . sep ) ;
}
exports . toPlatformPath = toPlatformPath ;
//# sourceMappingURL=path-utils.js.map
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/***/ } ) ,
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/***/ 1327 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
"use strict" ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . summary = exports . markdownSummary = exports . SUMMARY _DOCS _URL = exports . SUMMARY _ENV _VAR = void 0 ;
const os _1 = _ _nccwpck _require _ _ ( 2037 ) ;
const fs _1 = _ _nccwpck _require _ _ ( 7147 ) ;
const { access , appendFile , writeFile } = fs _1 . promises ;
exports . SUMMARY _ENV _VAR = 'GITHUB_STEP_SUMMARY' ;
exports . SUMMARY _DOCS _URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary' ;
class Summary {
constructor ( ) {
this . _buffer = '' ;
}
/ * *
* Finds the summary file path from the environment , rejects if env var is not found or file does not exist
* Also checks r / w permissions .
*
* @ returns step summary file path
* /
filePath ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _filePath ) {
return this . _filePath ;
}
const pathFromEnv = process . env [ exports . SUMMARY _ENV _VAR ] ;
if ( ! pathFromEnv ) {
throw new Error ( ` Unable to find environment variable for $ ${ exports . SUMMARY _ENV _VAR } . Check if your runtime environment supports job summaries. ` ) ;
}
try {
yield access ( pathFromEnv , fs _1 . constants . R _OK | fs _1 . constants . W _OK ) ;
}
catch ( _a ) {
throw new Error ( ` Unable to access summary file: ' ${ pathFromEnv } '. Check if the file has correct read/write permissions. ` ) ;
}
this . _filePath = pathFromEnv ;
return this . _filePath ;
} ) ;
}
/ * *
* Wraps content in an HTML tag , adding any HTML attributes
*
* @ param { string } tag HTML tag to wrap
* @ param { string | null } content content within the tag
* @ param { [ attribute : string ] : string } attrs key - value list of HTML attributes to add
*
* @ returns { string } content wrapped in HTML element
* /
wrap ( tag , content , attrs = { } ) {
const htmlAttrs = Object . entries ( attrs )
. map ( ( [ key , value ] ) => ` ${ key } =" ${ value } " ` )
. join ( '' ) ;
if ( ! content ) {
return ` < ${ tag } ${ htmlAttrs } > ` ;
}
return ` < ${ tag } ${ htmlAttrs } > ${ content } </ ${ tag } > ` ;
}
/ * *
* Writes text in the buffer to the summary buffer file and empties buffer . Will append by default .
*
* @ param { SummaryWriteOptions } [ options ] ( optional ) options for write operation
*
* @ returns { Promise < Summary > } summary instance
* /
write ( options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const overwrite = ! ! ( options === null || options === void 0 ? void 0 : options . overwrite ) ;
const filePath = yield this . filePath ( ) ;
const writeFunc = overwrite ? writeFile : appendFile ;
yield writeFunc ( filePath , this . _buffer , { encoding : 'utf8' } ) ;
return this . emptyBuffer ( ) ;
} ) ;
}
/ * *
* Clears the summary buffer and wipes the summary file
*
* @ returns { Summary } summary instance
* /
clear ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . emptyBuffer ( ) . write ( { overwrite : true } ) ;
} ) ;
}
/ * *
* Returns the current summary buffer as a string
*
* @ returns { string } string of summary buffer
* /
stringify ( ) {
return this . _buffer ;
}
/ * *
* If the summary buffer is empty
*
* @ returns { boolen } true if the buffer is empty
* /
isEmptyBuffer ( ) {
return this . _buffer . length === 0 ;
}
/ * *
* Resets the summary buffer without writing to summary file
*
* @ returns { Summary } summary instance
* /
emptyBuffer ( ) {
this . _buffer = '' ;
return this ;
}
/ * *
* Adds raw text to the summary buffer
*
* @ param { string } text content to add
* @ param { boolean } [ addEOL = false ] ( optional ) append an EOL to the raw text ( default : false )
*
* @ returns { Summary } summary instance
* /
addRaw ( text , addEOL = false ) {
this . _buffer += text ;
return addEOL ? this . addEOL ( ) : this ;
}
/ * *
* Adds the operating system - specific end - of - line marker to the buffer
*
* @ returns { Summary } summary instance
* /
addEOL ( ) {
return this . addRaw ( os _1 . EOL ) ;
}
/ * *
* Adds an HTML codeblock to the summary buffer
*
* @ param { string } code content to render within fenced code block
* @ param { string } lang ( optional ) language to syntax highlight code
*
* @ returns { Summary } summary instance
* /
addCodeBlock ( code , lang ) {
const attrs = Object . assign ( { } , ( lang && { lang } ) ) ;
const element = this . wrap ( 'pre' , this . wrap ( 'code' , code ) , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML list to the summary buffer
*
* @ param { string [ ] } items list of items to render
* @ param { boolean } [ ordered = false ] ( optional ) if the rendered list should be ordered or not ( default : false )
*
* @ returns { Summary } summary instance
* /
addList ( items , ordered = false ) {
const tag = ordered ? 'ol' : 'ul' ;
const listItems = items . map ( item => this . wrap ( 'li' , item ) ) . join ( '' ) ;
const element = this . wrap ( tag , listItems ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML table to the summary buffer
*
* @ param { SummaryTableCell [ ] } rows table rows
*
* @ returns { Summary } summary instance
* /
addTable ( rows ) {
const tableBody = rows
. map ( row => {
const cells = row
. map ( cell => {
if ( typeof cell === 'string' ) {
return this . wrap ( 'td' , cell ) ;
}
const { header , data , colspan , rowspan } = cell ;
const tag = header ? 'th' : 'td' ;
const attrs = Object . assign ( Object . assign ( { } , ( colspan && { colspan } ) ) , ( rowspan && { rowspan } ) ) ;
return this . wrap ( tag , data , attrs ) ;
} )
. join ( '' ) ;
return this . wrap ( 'tr' , cells ) ;
} )
. join ( '' ) ;
const element = this . wrap ( 'table' , tableBody ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds a collapsable HTML details element to the summary buffer
*
* @ param { string } label text for the closed state
* @ param { string } content collapsable content
*
* @ returns { Summary } summary instance
* /
addDetails ( label , content ) {
const element = this . wrap ( 'details' , this . wrap ( 'summary' , label ) + content ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML image tag to the summary buffer
*
* @ param { string } src path to the image you to embed
* @ param { string } alt text description of the image
* @ param { SummaryImageOptions } options ( optional ) addition image attributes
*
* @ returns { Summary } summary instance
* /
addImage ( src , alt , options ) {
const { width , height } = options || { } ;
const attrs = Object . assign ( Object . assign ( { } , ( width && { width } ) ) , ( height && { height } ) ) ;
const element = this . wrap ( 'img' , null , Object . assign ( { src , alt } , attrs ) ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML section heading element
*
* @ param { string } text heading text
* @ param { number | string } [ level = 1 ] ( optional ) the heading level , default : 1
*
* @ returns { Summary } summary instance
* /
addHeading ( text , level ) {
const tag = ` h ${ level } ` ;
const allowedTag = [ 'h1' , 'h2' , 'h3' , 'h4' , 'h5' , 'h6' ] . includes ( tag )
? tag
: 'h1' ;
const element = this . wrap ( allowedTag , text ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML thematic break ( < hr > ) to the summary buffer
*
* @ returns { Summary } summary instance
* /
addSeparator ( ) {
const element = this . wrap ( 'hr' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML line break ( < br > ) to the summary buffer
*
* @ returns { Summary } summary instance
* /
addBreak ( ) {
const element = this . wrap ( 'br' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML blockquote to the summary buffer
*
* @ param { string } text quote text
* @ param { string } cite ( optional ) citation url
*
* @ returns { Summary } summary instance
* /
addQuote ( text , cite ) {
const attrs = Object . assign ( { } , ( cite && { cite } ) ) ;
const element = this . wrap ( 'blockquote' , text , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML anchor tag to the summary buffer
*
* @ param { string } text link text / content
* @ param { string } href hyperlink
*
* @ returns { Summary } summary instance
* /
addLink ( text , href ) {
const element = this . wrap ( 'a' , text , { href } ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
}
const _summary = new Summary ( ) ;
/ * *
* @ deprecated use ` core.summary `
* /
exports . markdownSummary = _summary ;
exports . summary = _summary ;
//# sourceMappingURL=summary.js.map
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/***/ } ) ,
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/***/ 5278 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
"use strict" ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . toCommandProperties = exports . toCommandValue = void 0 ;
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
}
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
}
exports . toCommandValue = toCommandValue ;
2020-08-26 02:20:01 +02:00
/ * *
*
2022-11-10 11:43:16 +01:00
* @ param annotationProperties
* @ returns The command properties to send with the actual annotation command
* See IssueCommandProperties : https : //github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
function toCommandProperties ( annotationProperties ) {
if ( ! Object . keys ( annotationProperties ) . length ) {
return { } ;
}
return {
title : annotationProperties . title ,
file : annotationProperties . file ,
line : annotationProperties . startLine ,
endLine : annotationProperties . endLine ,
col : annotationProperties . startColumn ,
endColumn : annotationProperties . endColumn
} ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
exports . toCommandProperties = toCommandProperties ;
//# sourceMappingURL=utils.js.map
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/***/ } ) ,
2020-08-26 02:20:01 +02:00
2022-11-10 13:17:10 +01:00
/***/ 8974 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
"use strict" ;
2022-11-10 13:17:10 +01:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
Object . defineProperty ( exports , "v1" , ( {
enumerable : true ,
get : function ( ) {
return _v . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v3" , ( {
enumerable : true ,
get : function ( ) {
return _v2 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v4" , ( {
enumerable : true ,
get : function ( ) {
return _v3 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v5" , ( {
enumerable : true ,
get : function ( ) {
return _v4 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "NIL" , ( {
enumerable : true ,
get : function ( ) {
return _nil . default ;
}
} ) ) ;
Object . defineProperty ( exports , "version" , ( {
enumerable : true ,
get : function ( ) {
return _version . default ;
}
} ) ) ;
Object . defineProperty ( exports , "validate" , ( {
enumerable : true ,
get : function ( ) {
return _validate . default ;
}
} ) ) ;
Object . defineProperty ( exports , "stringify" , ( {
enumerable : true ,
get : function ( ) {
return _stringify . default ;
}
} ) ) ;
Object . defineProperty ( exports , "parse" , ( {
enumerable : true ,
get : function ( ) {
return _parse . default ;
}
} ) ) ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1595 ) ) ;
var _v2 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6993 ) ) ;
var _v3 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1472 ) ) ;
var _v4 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6217 ) ) ;
var _nil = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2381 ) ) ;
var _version = _interopRequireDefault ( _ _nccwpck _require _ _ ( 427 ) ) ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6385 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
/***/ } ) ,
/***/ 5842 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function md5 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'md5' ) . update ( bytes ) . digest ( ) ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
var _default = md5 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 2381 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _default = '00000000-0000-0000-0000-000000000000' ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6385 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function parse ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
let v ;
const arr = new Uint8Array ( 16 ) ; // Parse ########-....-....-....-............
arr [ 0 ] = ( v = parseInt ( uuid . slice ( 0 , 8 ) , 16 ) ) >>> 24 ;
arr [ 1 ] = v >>> 16 & 0xff ;
arr [ 2 ] = v >>> 8 & 0xff ;
arr [ 3 ] = v & 0xff ; // Parse ........-####-....-....-............
arr [ 4 ] = ( v = parseInt ( uuid . slice ( 9 , 13 ) , 16 ) ) >>> 8 ;
arr [ 5 ] = v & 0xff ; // Parse ........-....-####-....-............
arr [ 6 ] = ( v = parseInt ( uuid . slice ( 14 , 18 ) , 16 ) ) >>> 8 ;
arr [ 7 ] = v & 0xff ; // Parse ........-....-....-####-............
arr [ 8 ] = ( v = parseInt ( uuid . slice ( 19 , 23 ) , 16 ) ) >>> 8 ;
arr [ 9 ] = v & 0xff ; // Parse ........-....-....-....-############
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
arr [ 10 ] = ( v = parseInt ( uuid . slice ( 24 , 36 ) , 16 ) ) / 0x10000000000 & 0xff ;
arr [ 11 ] = v / 0x100000000 & 0xff ;
arr [ 12 ] = v >>> 24 & 0xff ;
arr [ 13 ] = v >>> 16 & 0xff ;
arr [ 14 ] = v >>> 8 & 0xff ;
arr [ 15 ] = v & 0xff ;
return arr ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
var _default = parse ;
exports [ "default" ] = _default ;
2022-11-10 11:43:16 +01:00
/***/ } ) ,
2022-11-10 13:17:10 +01:00
/***/ 6230 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2022-11-10 11:43:16 +01:00
"use strict" ;
2022-11-10 13:17:10 +01:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
2022-11-10 11:43:16 +01:00
} ) ) ;
2022-11-10 13:17:10 +01:00
exports [ "default" ] = void 0 ;
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 9784 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = rng ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const rnds8Pool = new Uint8Array ( 256 ) ; // # of random values to pre-allocate
let poolPtr = rnds8Pool . length ;
function rng ( ) {
if ( poolPtr > rnds8Pool . length - 16 ) {
_crypto . default . randomFillSync ( rnds8Pool ) ;
poolPtr = 0 ;
}
return rnds8Pool . slice ( poolPtr , poolPtr += 16 ) ;
}
/***/ } ) ,
/***/ 8844 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function sha1 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'sha1' ) . update ( bytes ) . digest ( ) ;
}
var _default = sha1 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 1458 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
2022-11-10 11:43:16 +01:00
/ * *
2022-11-10 13:17:10 +01:00
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
2022-11-10 11:43:16 +01:00
* /
2022-11-10 13:17:10 +01:00
const byteToHex = [ ] ;
for ( let i = 0 ; i < 256 ; ++ i ) {
byteToHex . push ( ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ) ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
function stringify ( arr , offset = 0 ) {
// Note: Be careful editing this code! It's been tuned for performance
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
const uuid = ( byteToHex [ arr [ offset + 0 ] ] + byteToHex [ arr [ offset + 1 ] ] + byteToHex [ arr [ offset + 2 ] ] + byteToHex [ arr [ offset + 3 ] ] + '-' + byteToHex [ arr [ offset + 4 ] ] + byteToHex [ arr [ offset + 5 ] ] + '-' + byteToHex [ arr [ offset + 6 ] ] + byteToHex [ arr [ offset + 7 ] ] + '-' + byteToHex [ arr [ offset + 8 ] ] + byteToHex [ arr [ offset + 9 ] ] + '-' + byteToHex [ arr [ offset + 10 ] ] + byteToHex [ arr [ offset + 11 ] ] + byteToHex [ arr [ offset + 12 ] ] + byteToHex [ arr [ offset + 13 ] ] + byteToHex [ arr [ offset + 14 ] ] + byteToHex [ arr [ offset + 15 ] ] ) . toLowerCase ( ) ; // Consistency check for valid UUID. If this throws, it's likely due to one
// of the following:
// - One or more input array values don't map to a hex octet (leading to
// "undefined" in the uuid)
// - Invalid input values for the RFC `version` or `variant` fields
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Stringified UUID is invalid' ) ;
}
return uuid ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
var _default = stringify ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 1595 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 9784 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
let _nodeId ;
let _clockseq ; // Previous uuid creation time
let _lastMSecs = 0 ;
let _lastNSecs = 0 ; // See https://github.com/uuidjs/uuid for API details
function v1 ( options , buf , offset ) {
let i = buf && offset || 0 ;
const b = buf || new Array ( 16 ) ;
options = options || { } ;
let node = options . node || _nodeId ;
let clockseq = options . clockseq !== undefined ? options . clockseq : _clockseq ; // node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
if ( node == null || clockseq == null ) {
const seedBytes = options . random || ( options . rng || _rng . default ) ( ) ;
if ( node == null ) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [ seedBytes [ 0 ] | 0x01 , seedBytes [ 1 ] , seedBytes [ 2 ] , seedBytes [ 3 ] , seedBytes [ 4 ] , seedBytes [ 5 ] ] ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
if ( clockseq == null ) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = ( seedBytes [ 6 ] << 8 | seedBytes [ 7 ] ) & 0x3fff ;
}
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
let msecs = options . msecs !== undefined ? options . msecs : Date . now ( ) ; // Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
let nsecs = options . nsecs !== undefined ? options . nsecs : _lastNSecs + 1 ; // Time since last uuid creation (in msecs)
const dt = msecs - _lastMSecs + ( nsecs - _lastNSecs ) / 10000 ; // Per 4.2.1.2, Bump clockseq on clock regression
if ( dt < 0 && options . clockseq === undefined ) {
clockseq = clockseq + 1 & 0x3fff ;
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
if ( ( dt < 0 || msecs > _lastMSecs ) && options . nsecs === undefined ) {
nsecs = 0 ;
} // Per 4.2.1.2 Throw error if too many uuids are requested
if ( nsecs >= 10000 ) {
throw new Error ( "uuid.v1(): Can't create more than 10M uuids/sec" ) ;
}
_lastMSecs = msecs ;
_lastNSecs = nsecs ;
_clockseq = clockseq ; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
msecs += 12219292800000 ; // `time_low`
const tl = ( ( msecs & 0xfffffff ) * 10000 + nsecs ) % 0x100000000 ;
b [ i ++ ] = tl >>> 24 & 0xff ;
b [ i ++ ] = tl >>> 16 & 0xff ;
b [ i ++ ] = tl >>> 8 & 0xff ;
b [ i ++ ] = tl & 0xff ; // `time_mid`
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff ;
b [ i ++ ] = tmh >>> 8 & 0xff ;
b [ i ++ ] = tmh & 0xff ; // `time_high_and_version`
b [ i ++ ] = tmh >>> 24 & 0xf | 0x10 ; // include version
b [ i ++ ] = tmh >>> 16 & 0xff ; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
b [ i ++ ] = clockseq >>> 8 | 0x80 ; // `clock_seq_low`
b [ i ++ ] = clockseq & 0xff ; // `node`
for ( let n = 0 ; n < 6 ; ++ n ) {
b [ i + n ] = node [ n ] ;
}
return buf || ( 0 , _stringify . default ) ( b ) ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
var _default = v1 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6993 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5920 ) ) ;
var _md = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5842 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v3 = ( 0 , _v . default ) ( 'v3' , 0x30 , _md . default ) ;
var _default = v3 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 5920 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = _default ;
exports . URL = exports . DNS = void 0 ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6385 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function stringToBytes ( str ) {
str = unescape ( encodeURIComponent ( str ) ) ; // UTF8 escape
const bytes = [ ] ;
for ( let i = 0 ; i < str . length ; ++ i ) {
bytes . push ( str . charCodeAt ( i ) ) ;
}
return bytes ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8' ;
exports . DNS = DNS ;
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8' ;
exports . URL = URL ;
function _default ( name , version , hashfunc ) {
function generateUUID ( value , namespace , buf , offset ) {
if ( typeof value === 'string' ) {
value = stringToBytes ( value ) ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
if ( typeof namespace === 'string' ) {
namespace = ( 0 , _parse . default ) ( namespace ) ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
if ( namespace . length !== 16 ) {
throw TypeError ( 'Namespace must be array-like (16 iterable integer values, 0-255)' ) ;
} // Compute hash of namespace and value, Per 4.3
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
// hashfunc([...namespace, ... value])`
let bytes = new Uint8Array ( 16 + value . length ) ;
bytes . set ( namespace ) ;
bytes . set ( value , namespace . length ) ;
bytes = hashfunc ( bytes ) ;
bytes [ 6 ] = bytes [ 6 ] & 0x0f | version ;
bytes [ 8 ] = bytes [ 8 ] & 0x3f | 0x80 ;
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = bytes [ i ] ;
}
return buf ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
return ( 0 , _stringify . default ) ( bytes ) ;
} // Function#name is not settable on some platforms (#270)
try {
generateUUID . name = name ; // eslint-disable-next-line no-empty
} catch ( err ) { } // For CommonJS default export support
generateUUID . DNS = DNS ;
generateUUID . URL = URL ;
return generateUUID ;
}
/***/ } ) ,
/***/ 1472 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 9784 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function v4 ( options , buf , offset ) {
options = options || { } ;
const rnds = options . random || ( options . rng || _rng . default ) ( ) ; // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = rnds [ 6 ] & 0x0f | 0x40 ;
rnds [ 8 ] = rnds [ 8 ] & 0x3f | 0x80 ; // Copy bytes to buffer, if provided
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = rnds [ i ] ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
return buf ;
}
return ( 0 , _stringify . default ) ( rnds ) ;
}
var _default = v4 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6217 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5920 ) ) ;
var _sha = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8844 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v5 = ( 0 , _v . default ) ( 'v5' , 0x50 , _sha . default ) ;
var _default = v5 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 2609 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _regex = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6230 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function validate ( uuid ) {
return typeof uuid === 'string' && _regex . default . test ( uuid ) ;
}
var _default = validate ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 427 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function version ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
return parseInt ( uuid . substr ( 14 , 1 ) , 16 ) ;
}
var _default = version ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 5526 :
/***/ ( function ( _ _unused _webpack _module , exports ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . PersonalAccessTokenCredentialHandler = exports . BearerCredentialHandler = exports . BasicCredentialHandler = void 0 ;
class BasicCredentialHandler {
constructor ( username , password ) {
this . username = username ;
this . password = password ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` ${ this . username } : ${ this . password } ` ) . toString ( 'base64' ) } ` ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
handleAuthentication ( ) {
2022-11-10 11:43:16 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2022-11-10 13:17:10 +01:00
throw new Error ( 'not implemented' ) ;
2022-11-10 11:43:16 +01:00
} ) ;
}
2022-11-10 13:17:10 +01:00
}
exports . BasicCredentialHandler = BasicCredentialHandler ;
class BearerCredentialHandler {
constructor ( token ) {
this . token = token ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Bearer ${ this . token } ` ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
2022-11-10 11:43:16 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2022-11-10 13:17:10 +01:00
throw new Error ( 'not implemented' ) ;
2022-11-10 11:43:16 +01:00
} ) ;
}
2022-11-10 13:17:10 +01:00
}
exports . BearerCredentialHandler = BearerCredentialHandler ;
class PersonalAccessTokenCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` PAT: ${ this . token } ` ) . toString ( 'base64' ) } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
2022-11-10 11:43:16 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2022-11-10 13:17:10 +01:00
throw new Error ( 'not implemented' ) ;
2022-11-10 11:43:16 +01:00
} ) ;
}
2022-11-10 13:17:10 +01:00
}
exports . PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler ;
//# sourceMappingURL=auth.js.map
/***/ } ) ,
/***/ 6255 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
/* eslint-disable @typescript-eslint/no-explicit-any */
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . HttpClient = exports . isHttps = exports . HttpClientResponse = exports . HttpClientError = exports . getProxyUrl = exports . MediaTypes = exports . Headers = exports . HttpCodes = void 0 ;
const http = _ _importStar ( _ _nccwpck _require _ _ ( 3685 ) ) ;
const https = _ _importStar ( _ _nccwpck _require _ _ ( 5687 ) ) ;
const pm = _ _importStar ( _ _nccwpck _require _ _ ( 9835 ) ) ;
const tunnel = _ _importStar ( _ _nccwpck _require _ _ ( 4294 ) ) ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
/ * *
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
function getProxyUrl ( serverUrl ) {
const proxyUrl = pm . getProxyUrl ( new URL ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
}
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientError extends Error {
constructor ( message , statusCode ) {
super ( message ) ;
this . name = 'HttpClientError' ;
this . statusCode = statusCode ;
Object . setPrototypeOf ( this , HttpClientError . prototype ) ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
}
exports . HttpClientError = HttpClientError ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
readBody ( ) {
2022-11-10 11:43:16 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2022-11-10 13:17:10 +01:00
return new Promise ( ( resolve ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ) ;
2022-11-10 11:43:16 +01:00
} ) ;
}
2022-11-10 13:17:10 +01:00
}
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
const parsedUrl = new URL ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
2022-11-10 11:43:16 +01:00
}
}
2022-11-10 13:17:10 +01:00
}
options ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
2022-11-10 11:43:16 +01:00
} ) ;
2022-11-10 13:17:10 +01:00
}
get ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
2022-11-10 11:43:16 +01:00
} ) ;
2022-11-10 13:17:10 +01:00
}
del ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
head ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
} ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
getJson ( requestUrl , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
const res = yield this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
postJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
putJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
patchJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
request ( verb , requestUrl , data , headers ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
}
const parsedUrl = new URL ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
const maxTries = this . _allowRetries && RetryableHttpVerbs . includes ( verb )
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
do {
response = yield this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( const handler of this . handlers ) {
if ( handler . canHandleAuthentication ( response ) ) {
authenticationHandler = handler ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
}
let redirectsRemaining = this . _maxRedirects ;
while ( response . message . statusCode &&
HttpRedirectCodes . includes ( response . message . statusCode ) &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
const parsedRedirectUrl = new URL ( redirectUrl ) ;
if ( parsedUrl . protocol === 'https:' &&
parsedUrl . protocol !== parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
yield response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( const header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = yield this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
}
if ( ! response . message . statusCode ||
! HttpResponseRetryCodes . includes ( response . message . statusCode ) ) {
// If not a retry code, return immediately instead of retrying
return response ;
}
numTries += 1 ;
if ( numTries < maxTries ) {
yield response . readBody ( ) ;
yield this . _performExponentialBackoff ( numTries ) ;
}
} while ( numTries < maxTries ) ;
return response ;
} ) ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => {
function callbackForResult ( err , res ) {
if ( err ) {
reject ( err ) ;
}
else if ( ! res ) {
// If `err` is not passed, then `res` must be passed.
reject ( new Error ( 'Unknown error' ) ) ;
}
else {
resolve ( res ) ;
}
}
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
if ( typeof data === 'string' ) {
if ( ! info . options . headers ) {
info . options . headers = { } ;
}
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
}
let callbackCalled = false ;
function handleResult ( err , res ) {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
}
}
const req = info . httpModule . request ( info . options , ( msg ) => {
const res = new HttpClientResponse ( msg ) ;
handleResult ( undefined , res ) ;
} ) ;
let socket ;
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( ` Request timeout: ${ info . options . path } ` ) ) ;
2022-11-10 11:43:16 +01:00
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err ) ;
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
}
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
const parsedUrl = new URL ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
for ( const handler of this . handlers ) {
handler . prepareRequest ( info . options ) ;
}
}
return info ;
}
_mergeHeaders ( headers ) {
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers || { } ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
const proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
const useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
if ( proxyUrl && proxyUrl . hostname ) {
const agentOptions = {
maxSockets ,
keepAlive : this . _keepAlive ,
proxy : Object . assign ( Object . assign ( { } , ( ( proxyUrl . username || proxyUrl . password ) && {
proxyAuth : ` ${ proxyUrl . username } : ${ proxyUrl . password } `
} ) ) , { host : proxyUrl . hostname , port : proxyUrl . port } )
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
}
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
}
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
} ) ;
}
_processResponse ( res , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const statusCode = res . message . statusCode || 0 ;
const response = {
statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode === HttpCodes . NotFound ) {
resolve ( response ) ;
}
// get the result from the body
2022-11-10 13:17:10 +01:00
function dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
const a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
}
return value ;
}
let obj ;
let contents ;
try {
contents = yield res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = ` Failed request: ( ${ statusCode } ) ` ;
}
const err = new HttpClientError ( msg , statusCode ) ;
err . result = response . result ;
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ) ;
} ) ;
}
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
exports . HttpClient = HttpClient ;
const lowercaseKeys = ( obj ) => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
//# sourceMappingURL=index.js.map
2022-11-10 11:43:16 +01:00
/***/ } ) ,
2022-11-10 13:17:10 +01:00
/***/ 9835 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2022-11-10 11:43:16 +01:00
"use strict" ;
2022-11-10 13:17:10 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . checkBypass = exports . getProxyUrl = void 0 ;
function getProxyUrl ( reqUrl ) {
const usingSsl = reqUrl . protocol === 'https:' ;
if ( checkBypass ( reqUrl ) ) {
return undefined ;
}
const proxyVar = ( ( ) => {
if ( usingSsl ) {
return process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
}
else {
return process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
}
} ) ( ) ;
if ( proxyVar ) {
return new URL ( proxyVar ) ;
}
else {
return undefined ;
}
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
const noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
const upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
for ( const upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
//# sourceMappingURL=proxy.js.map
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
/***/ } ) ,
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
/***/ 7760 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
/*! node-domexception. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
if ( ! globalThis . DOMException ) {
try {
const { MessageChannel } = _ _nccwpck _require _ _ ( 1267 ) ,
port = new MessageChannel ( ) . port1 ,
ab = new ArrayBuffer ( )
port . postMessage ( ab , [ ab , ab ] )
} catch ( err ) {
err . constructor . name === 'DOMException' && (
globalThis . DOMException = err . constructor
)
}
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
module . exports = globalThis . DOMException
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
/***/ } ) ,
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
/***/ 4294 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
module . exports = _ _nccwpck _require _ _ ( 4219 ) ;
2022-11-10 11:43:16 +01:00
/***/ } ) ,
2022-11-10 13:17:10 +01:00
/***/ 4219 :
2022-11-10 11:43:16 +01:00
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
2022-11-10 13:17:10 +01:00
var net = _ _nccwpck _require _ _ ( 1808 ) ;
var tls = _ _nccwpck _require _ _ ( 4404 ) ;
var http = _ _nccwpck _require _ _ ( 3685 ) ;
var https = _ _nccwpck _require _ _ ( 5687 ) ;
var events = _ _nccwpck _require _ _ ( 2361 ) ;
var assert = _ _nccwpck _require _ _ ( 9491 ) ;
var util = _ _nccwpck _require _ _ ( 3837 ) ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
}
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
}
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
}
} ) ;
} ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
}
} ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
}
this . sockets . splice ( pos , 1 ) ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
} ) ;
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
}
}
}
}
return target ;
}
2022-11-10 11:43:16 +01:00
2022-11-10 13:17:10 +01:00
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
} else {
debug = function ( ) { } ;
2022-11-10 11:43:16 +01:00
}
2022-11-10 13:17:10 +01:00
exports . debug = debug ; // for test
2022-11-10 11:43:16 +01:00
/***/ } ) ,
/***/ 1452 :
/***/ ( function ( _ _unused _webpack _module , exports ) {
/ * *
* web - streams - polyfill v3 . 2.1
* /
( function ( global , factory ) {
true ? factory ( exports ) :
0 ;
} ( this , ( function ( exports ) { 'use strict' ;
/// <reference lib="es2015.symbol" />
const SymbolPolyfill = typeof Symbol === 'function' && typeof Symbol . iterator === 'symbol' ?
Symbol :
description => ` Symbol( ${ description } ) ` ;
/// <reference lib="dom" />
function noop ( ) {
return undefined ;
}
function getGlobals ( ) {
if ( typeof self !== 'undefined' ) {
return self ;
}
else if ( typeof window !== 'undefined' ) {
return window ;
}
else if ( typeof global !== 'undefined' ) {
return global ;
}
return undefined ;
}
const globals = getGlobals ( ) ;
function typeIsObject ( x ) {
return ( typeof x === 'object' && x !== null ) || typeof x === 'function' ;
}
const rethrowAssertionErrorRejection = noop ;
const originalPromise = Promise ;
const originalPromiseThen = Promise . prototype . then ;
const originalPromiseResolve = Promise . resolve . bind ( originalPromise ) ;
const originalPromiseReject = Promise . reject . bind ( originalPromise ) ;
function newPromise ( executor ) {
return new originalPromise ( executor ) ;
}
function promiseResolvedWith ( value ) {
return originalPromiseResolve ( value ) ;
}
function promiseRejectedWith ( reason ) {
return originalPromiseReject ( reason ) ;
}
function PerformPromiseThen ( promise , onFulfilled , onRejected ) {
// There doesn't appear to be any way to correctly emulate the behaviour from JavaScript, so this is just an
// approximation.
return originalPromiseThen . call ( promise , onFulfilled , onRejected ) ;
}
function uponPromise ( promise , onFulfilled , onRejected ) {
PerformPromiseThen ( PerformPromiseThen ( promise , onFulfilled , onRejected ) , undefined , rethrowAssertionErrorRejection ) ;
}
function uponFulfillment ( promise , onFulfilled ) {
uponPromise ( promise , onFulfilled ) ;
}
function uponRejection ( promise , onRejected ) {
uponPromise ( promise , undefined , onRejected ) ;
}
function transformPromiseWith ( promise , fulfillmentHandler , rejectionHandler ) {
return PerformPromiseThen ( promise , fulfillmentHandler , rejectionHandler ) ;
}
function setPromiseIsHandledToTrue ( promise ) {
PerformPromiseThen ( promise , undefined , rethrowAssertionErrorRejection ) ;
}
const queueMicrotask = ( ( ) => {
const globalQueueMicrotask = globals && globals . queueMicrotask ;
if ( typeof globalQueueMicrotask === 'function' ) {
return globalQueueMicrotask ;
}
const resolvedPromise = promiseResolvedWith ( undefined ) ;
return ( fn ) => PerformPromiseThen ( resolvedPromise , fn ) ;
} ) ( ) ;
function reflectCall ( F , V , args ) {
if ( typeof F !== 'function' ) {
throw new TypeError ( 'Argument is not a function' ) ;
}
return Function . prototype . apply . call ( F , V , args ) ;
}
function promiseCall ( F , V , args ) {
try {
return promiseResolvedWith ( reflectCall ( F , V , args ) ) ;
}
catch ( value ) {
return promiseRejectedWith ( value ) ;
}
}
// Original from Chromium
// https://chromium.googlesource.com/chromium/src/+/0aee4434a4dba42a42abaea9bfbc0cd196a63bc1/third_party/blink/renderer/core/streams/SimpleQueue.js
const QUEUE _MAX _ARRAY _SIZE = 16384 ;
/ * *
* Simple queue structure .
*
* Avoids scalability issues with using a packed array directly by using
* multiple arrays in a linked list and keeping the array size bounded .
* /
class SimpleQueue {
constructor ( ) {
this . _cursor = 0 ;
this . _size = 0 ;
// _front and _back are always defined.
this . _front = {
_elements : [ ] ,
_next : undefined
} ;
this . _back = this . _front ;
// The cursor is used to avoid calling Array.shift().
// It contains the index of the front element of the array inside the
// front-most node. It is always in the range [0, QUEUE_MAX_ARRAY_SIZE).
this . _cursor = 0 ;
// When there is only one node, size === elements.length - cursor.
this . _size = 0 ;
}
get length ( ) {
return this . _size ;
}
// For exception safety, this method is structured in order:
// 1. Read state
// 2. Calculate required state mutations
// 3. Perform state mutations
push ( element ) {
const oldBack = this . _back ;
let newBack = oldBack ;
if ( oldBack . _elements . length === QUEUE _MAX _ARRAY _SIZE - 1 ) {
newBack = {
_elements : [ ] ,
_next : undefined
} ;
}
// push() is the mutation most likely to throw an exception, so it
// goes first.
oldBack . _elements . push ( element ) ;
if ( newBack !== oldBack ) {
this . _back = newBack ;
oldBack . _next = newBack ;
}
++ this . _size ;
}
// Like push(), shift() follows the read -> calculate -> mutate pattern for
// exception safety.
shift ( ) { // must not be called on an empty queue
const oldFront = this . _front ;
let newFront = oldFront ;
const oldCursor = this . _cursor ;
let newCursor = oldCursor + 1 ;
const elements = oldFront . _elements ;
const element = elements [ oldCursor ] ;
if ( newCursor === QUEUE _MAX _ARRAY _SIZE ) {
newFront = oldFront . _next ;
newCursor = 0 ;
}
// No mutations before this point.
-- this . _size ;
this . _cursor = newCursor ;
if ( oldFront !== newFront ) {
this . _front = newFront ;
}
// Permit shifted element to be garbage collected.
elements [ oldCursor ] = undefined ;
return element ;
}
// The tricky thing about forEach() is that it can be called
// re-entrantly. The queue may be mutated inside the callback. It is easy to
// see that push() within the callback has no negative effects since the end
// of the queue is checked for on every iteration. If shift() is called
// repeatedly within the callback then the next iteration may return an
// element that has been removed. In this case the callback will be called
// with undefined values until we either "catch up" with elements that still
// exist or reach the back of the queue.
forEach ( callback ) {
let i = this . _cursor ;
let node = this . _front ;
let elements = node . _elements ;
while ( i !== elements . length || node . _next !== undefined ) {
if ( i === elements . length ) {
node = node . _next ;
elements = node . _elements ;
i = 0 ;
if ( elements . length === 0 ) {
break ;
}
}
callback ( elements [ i ] ) ;
++ i ;
}
}
// Return the element that would be returned if shift() was called now,
// without modifying the queue.
peek ( ) { // must not be called on an empty queue
const front = this . _front ;
const cursor = this . _cursor ;
return front . _elements [ cursor ] ;
}
}
function ReadableStreamReaderGenericInitialize ( reader , stream ) {
reader . _ownerReadableStream = stream ;
stream . _reader = reader ;
if ( stream . _state === 'readable' ) {
defaultReaderClosedPromiseInitialize ( reader ) ;
}
else if ( stream . _state === 'closed' ) {
defaultReaderClosedPromiseInitializeAsResolved ( reader ) ;
}
else {
defaultReaderClosedPromiseInitializeAsRejected ( reader , stream . _storedError ) ;
}
}
// A client of ReadableStreamDefaultReader and ReadableStreamBYOBReader may use these functions directly to bypass state
// check.
function ReadableStreamReaderGenericCancel ( reader , reason ) {
const stream = reader . _ownerReadableStream ;
return ReadableStreamCancel ( stream , reason ) ;
}
function ReadableStreamReaderGenericRelease ( reader ) {
if ( reader . _ownerReadableStream . _state === 'readable' ) {
defaultReaderClosedPromiseReject ( reader , new TypeError ( ` Reader was released and can no longer be used to monitor the stream's closedness ` ) ) ;
}
else {
defaultReaderClosedPromiseResetToRejected ( reader , new TypeError ( ` Reader was released and can no longer be used to monitor the stream's closedness ` ) ) ;
}
reader . _ownerReadableStream . _reader = undefined ;
reader . _ownerReadableStream = undefined ;
}
// Helper functions for the readers.
function readerLockException ( name ) {
return new TypeError ( 'Cannot ' + name + ' a stream using a released reader' ) ;
}
// Helper functions for the ReadableStreamDefaultReader.
function defaultReaderClosedPromiseInitialize ( reader ) {
reader . _closedPromise = newPromise ( ( resolve , reject ) => {
reader . _closedPromise _resolve = resolve ;
reader . _closedPromise _reject = reject ;
} ) ;
}
function defaultReaderClosedPromiseInitializeAsRejected ( reader , reason ) {
defaultReaderClosedPromiseInitialize ( reader ) ;
defaultReaderClosedPromiseReject ( reader , reason ) ;
}
function defaultReaderClosedPromiseInitializeAsResolved ( reader ) {
defaultReaderClosedPromiseInitialize ( reader ) ;
defaultReaderClosedPromiseResolve ( reader ) ;
}
function defaultReaderClosedPromiseReject ( reader , reason ) {
if ( reader . _closedPromise _reject === undefined ) {
return ;
}
setPromiseIsHandledToTrue ( reader . _closedPromise ) ;
reader . _closedPromise _reject ( reason ) ;
reader . _closedPromise _resolve = undefined ;
reader . _closedPromise _reject = undefined ;
}
function defaultReaderClosedPromiseResetToRejected ( reader , reason ) {
defaultReaderClosedPromiseInitializeAsRejected ( reader , reason ) ;
}
function defaultReaderClosedPromiseResolve ( reader ) {
if ( reader . _closedPromise _resolve === undefined ) {
return ;
}
reader . _closedPromise _resolve ( undefined ) ;
reader . _closedPromise _resolve = undefined ;
reader . _closedPromise _reject = undefined ;
}
const AbortSteps = SymbolPolyfill ( '[[AbortSteps]]' ) ;
const ErrorSteps = SymbolPolyfill ( '[[ErrorSteps]]' ) ;
const CancelSteps = SymbolPolyfill ( '[[CancelSteps]]' ) ;
const PullSteps = SymbolPolyfill ( '[[PullSteps]]' ) ;
/// <reference lib="es2015.core" />
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite#Polyfill
const NumberIsFinite = Number . isFinite || function ( x ) {
return typeof x === 'number' && isFinite ( x ) ;
} ;
/// <reference lib="es2015.core" />
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/trunc#Polyfill
const MathTrunc = Math . trunc || function ( v ) {
return v < 0 ? Math . ceil ( v ) : Math . floor ( v ) ;
} ;
// https://heycam.github.io/webidl/#idl-dictionaries
function isDictionary ( x ) {
return typeof x === 'object' || typeof x === 'function' ;
}
function assertDictionary ( obj , context ) {
if ( obj !== undefined && ! isDictionary ( obj ) ) {
throw new TypeError ( ` ${ context } is not an object. ` ) ;
}
}
// https://heycam.github.io/webidl/#idl-callback-functions
function assertFunction ( x , context ) {
if ( typeof x !== 'function' ) {
throw new TypeError ( ` ${ context } is not a function. ` ) ;
}
}
// https://heycam.github.io/webidl/#idl-object
function isObject ( x ) {
return ( typeof x === 'object' && x !== null ) || typeof x === 'function' ;
}
function assertObject ( x , context ) {
if ( ! isObject ( x ) ) {
throw new TypeError ( ` ${ context } is not an object. ` ) ;
}
}
function assertRequiredArgument ( x , position , context ) {
if ( x === undefined ) {
throw new TypeError ( ` Parameter ${ position } is required in ' ${ context } '. ` ) ;
}
}
function assertRequiredField ( x , field , context ) {
if ( x === undefined ) {
throw new TypeError ( ` ${ field } is required in ' ${ context } '. ` ) ;
}
}
// https://heycam.github.io/webidl/#idl-unrestricted-double
function convertUnrestrictedDouble ( value ) {
return Number ( value ) ;
}
function censorNegativeZero ( x ) {
return x === 0 ? 0 : x ;
}
function integerPart ( x ) {
return censorNegativeZero ( MathTrunc ( x ) ) ;
}
// https://heycam.github.io/webidl/#idl-unsigned-long-long
function convertUnsignedLongLongWithEnforceRange ( value , context ) {
const lowerBound = 0 ;
const upperBound = Number . MAX _SAFE _INTEGER ;
let x = Number ( value ) ;
x = censorNegativeZero ( x ) ;
if ( ! NumberIsFinite ( x ) ) {
throw new TypeError ( ` ${ context } is not a finite number ` ) ;
}
x = integerPart ( x ) ;
if ( x < lowerBound || x > upperBound ) {
throw new TypeError ( ` ${ context } is outside the accepted range of ${ lowerBound } to ${ upperBound } , inclusive ` ) ;
}
if ( ! NumberIsFinite ( x ) || x === 0 ) {
return 0 ;
}
// TODO Use BigInt if supported?
// let xBigInt = BigInt(integerPart(x));
// xBigInt = BigInt.asUintN(64, xBigInt);
// return Number(xBigInt);
return x ;
}
function assertReadableStream ( x , context ) {
if ( ! IsReadableStream ( x ) ) {
throw new TypeError ( ` ${ context } is not a ReadableStream. ` ) ;
}
}
// Abstract operations for the ReadableStream.
function AcquireReadableStreamDefaultReader ( stream ) {
return new ReadableStreamDefaultReader ( stream ) ;
}
// ReadableStream API exposed for controllers.
function ReadableStreamAddReadRequest ( stream , readRequest ) {
stream . _reader . _readRequests . push ( readRequest ) ;
}
function ReadableStreamFulfillReadRequest ( stream , chunk , done ) {
const reader = stream . _reader ;
const readRequest = reader . _readRequests . shift ( ) ;
if ( done ) {
readRequest . _closeSteps ( ) ;
}
else {
readRequest . _chunkSteps ( chunk ) ;
}
}
function ReadableStreamGetNumReadRequests ( stream ) {
return stream . _reader . _readRequests . length ;
}
function ReadableStreamHasDefaultReader ( stream ) {
const reader = stream . _reader ;
if ( reader === undefined ) {
return false ;
}
if ( ! IsReadableStreamDefaultReader ( reader ) ) {
return false ;
}
return true ;
}
/ * *
* A default reader vended by a { @ link ReadableStream } .
*
* @ public
* /
class ReadableStreamDefaultReader {
constructor ( stream ) {
assertRequiredArgument ( stream , 1 , 'ReadableStreamDefaultReader' ) ;
assertReadableStream ( stream , 'First parameter' ) ;
if ( IsReadableStreamLocked ( stream ) ) {
throw new TypeError ( 'This stream has already been locked for exclusive reading by another reader' ) ;
}
ReadableStreamReaderGenericInitialize ( this , stream ) ;
this . _readRequests = new SimpleQueue ( ) ;
}
/ * *
* Returns a promise that will be fulfilled when the stream becomes closed ,
* or rejected if the stream ever errors or the reader ' s lock is released before the stream finishes closing .
* /
get closed ( ) {
if ( ! IsReadableStreamDefaultReader ( this ) ) {
return promiseRejectedWith ( defaultReaderBrandCheckException ( 'closed' ) ) ;
}
return this . _closedPromise ;
}
/ * *
* If the reader is active , behaves the same as { @ link ReadableStream . cancel | stream . cancel ( reason ) } .
* /
cancel ( reason = undefined ) {
if ( ! IsReadableStreamDefaultReader ( this ) ) {
return promiseRejectedWith ( defaultReaderBrandCheckException ( 'cancel' ) ) ;
}
if ( this . _ownerReadableStream === undefined ) {
return promiseRejectedWith ( readerLockException ( 'cancel' ) ) ;
}
return ReadableStreamReaderGenericCancel ( this , reason ) ;
}
/ * *
* Returns a promise that allows access to the next chunk from the stream ' s internal queue , if available .
*
* If reading a chunk causes the queue to become empty , more data will be pulled from the underlying source .
* /
read ( ) {
if ( ! IsReadableStreamDefaultReader ( this ) ) {
return promiseRejectedWith ( defaultReaderBrandCheckException ( 'read' ) ) ;
}
if ( this . _ownerReadableStream === undefined ) {
return promiseRejectedWith ( readerLockException ( 'read from' ) ) ;
}
let resolvePromise ;
let rejectPromise ;
const promise = newPromise ( ( resolve , reject ) => {
resolvePromise = resolve ;
rejectPromise = reject ;
} ) ;
const readRequest = {
_chunkSteps : chunk => resolvePromise ( { value : chunk , done : false } ) ,
_closeSteps : ( ) => resolvePromise ( { value : undefined , done : true } ) ,
_errorSteps : e => rejectPromise ( e )
} ;
ReadableStreamDefaultReaderRead ( this , readRequest ) ;
return promise ;
}
/ * *
* Releases the reader ' s lock on the corresponding stream . After the lock is released , the reader is no longer active .
* If the associated stream is errored when the lock is released , the reader will appear errored in the same way
* from now on ; otherwise , the reader will appear closed .
*
* A reader ' s lock cannot be released while it still has a pending read request , i . e . , if a promise returned by
* the reader ' s { @ link ReadableStreamDefaultReader . read | read ( ) } method has not yet been settled . Attempting to
* do so will throw a ` TypeError ` and leave the reader locked to the stream .
* /
releaseLock ( ) {
if ( ! IsReadableStreamDefaultReader ( this ) ) {
throw defaultReaderBrandCheckException ( 'releaseLock' ) ;
}
if ( this . _ownerReadableStream === undefined ) {
return ;
}
if ( this . _readRequests . length > 0 ) {
throw new TypeError ( 'Tried to release a reader lock when that reader has pending read() calls un-settled' ) ;
}
ReadableStreamReaderGenericRelease ( this ) ;
}
}
Object . defineProperties ( ReadableStreamDefaultReader . prototype , {
cancel : { enumerable : true } ,
read : { enumerable : true } ,
releaseLock : { enumerable : true } ,
closed : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( ReadableStreamDefaultReader . prototype , SymbolPolyfill . toStringTag , {
value : 'ReadableStreamDefaultReader' ,
configurable : true
} ) ;
}
// Abstract operations for the readers.
function IsReadableStreamDefaultReader ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_readRequests' ) ) {
return false ;
}
return x instanceof ReadableStreamDefaultReader ;
}
function ReadableStreamDefaultReaderRead ( reader , readRequest ) {
const stream = reader . _ownerReadableStream ;
stream . _disturbed = true ;
if ( stream . _state === 'closed' ) {
readRequest . _closeSteps ( ) ;
}
else if ( stream . _state === 'errored' ) {
readRequest . _errorSteps ( stream . _storedError ) ;
}
else {
stream . _readableStreamController [ PullSteps ] ( readRequest ) ;
}
}
// Helper functions for the ReadableStreamDefaultReader.
function defaultReaderBrandCheckException ( name ) {
return new TypeError ( ` ReadableStreamDefaultReader.prototype. ${ name } can only be used on a ReadableStreamDefaultReader ` ) ;
}
/// <reference lib="es2018.asynciterable" />
/* eslint-disable @typescript-eslint/no-empty-function */
const AsyncIteratorPrototype = Object . getPrototypeOf ( Object . getPrototypeOf ( async function * ( ) { } ) . prototype ) ;
/// <reference lib="es2018.asynciterable" />
class ReadableStreamAsyncIteratorImpl {
constructor ( reader , preventCancel ) {
this . _ongoingPromise = undefined ;
this . _isFinished = false ;
this . _reader = reader ;
this . _preventCancel = preventCancel ;
}
next ( ) {
const nextSteps = ( ) => this . _nextSteps ( ) ;
this . _ongoingPromise = this . _ongoingPromise ?
transformPromiseWith ( this . _ongoingPromise , nextSteps , nextSteps ) :
nextSteps ( ) ;
return this . _ongoingPromise ;
}
return ( value ) {
const returnSteps = ( ) => this . _returnSteps ( value ) ;
return this . _ongoingPromise ?
transformPromiseWith ( this . _ongoingPromise , returnSteps , returnSteps ) :
returnSteps ( ) ;
}
_nextSteps ( ) {
if ( this . _isFinished ) {
return Promise . resolve ( { value : undefined , done : true } ) ;
}
const reader = this . _reader ;
if ( reader . _ownerReadableStream === undefined ) {
return promiseRejectedWith ( readerLockException ( 'iterate' ) ) ;
}
let resolvePromise ;
let rejectPromise ;
const promise = newPromise ( ( resolve , reject ) => {
resolvePromise = resolve ;
rejectPromise = reject ;
} ) ;
const readRequest = {
_chunkSteps : chunk => {
this . _ongoingPromise = undefined ;
// This needs to be delayed by one microtask, otherwise we stop pulling too early which breaks a test.
// FIXME Is this a bug in the specification, or in the test?
queueMicrotask ( ( ) => resolvePromise ( { value : chunk , done : false } ) ) ;
} ,
_closeSteps : ( ) => {
this . _ongoingPromise = undefined ;
this . _isFinished = true ;
ReadableStreamReaderGenericRelease ( reader ) ;
resolvePromise ( { value : undefined , done : true } ) ;
} ,
_errorSteps : reason => {
this . _ongoingPromise = undefined ;
this . _isFinished = true ;
ReadableStreamReaderGenericRelease ( reader ) ;
rejectPromise ( reason ) ;
}
} ;
ReadableStreamDefaultReaderRead ( reader , readRequest ) ;
return promise ;
}
_returnSteps ( value ) {
if ( this . _isFinished ) {
return Promise . resolve ( { value , done : true } ) ;
}
this . _isFinished = true ;
const reader = this . _reader ;
if ( reader . _ownerReadableStream === undefined ) {
return promiseRejectedWith ( readerLockException ( 'finish iterating' ) ) ;
}
if ( ! this . _preventCancel ) {
const result = ReadableStreamReaderGenericCancel ( reader , value ) ;
ReadableStreamReaderGenericRelease ( reader ) ;
return transformPromiseWith ( result , ( ) => ( { value , done : true } ) ) ;
}
ReadableStreamReaderGenericRelease ( reader ) ;
return promiseResolvedWith ( { value , done : true } ) ;
}
}
const ReadableStreamAsyncIteratorPrototype = {
next ( ) {
if ( ! IsReadableStreamAsyncIterator ( this ) ) {
return promiseRejectedWith ( streamAsyncIteratorBrandCheckException ( 'next' ) ) ;
}
return this . _asyncIteratorImpl . next ( ) ;
} ,
return ( value ) {
if ( ! IsReadableStreamAsyncIterator ( this ) ) {
return promiseRejectedWith ( streamAsyncIteratorBrandCheckException ( 'return' ) ) ;
}
return this . _asyncIteratorImpl . return ( value ) ;
}
} ;
if ( AsyncIteratorPrototype !== undefined ) {
Object . setPrototypeOf ( ReadableStreamAsyncIteratorPrototype , AsyncIteratorPrototype ) ;
}
// Abstract operations for the ReadableStream.
function AcquireReadableStreamAsyncIterator ( stream , preventCancel ) {
const reader = AcquireReadableStreamDefaultReader ( stream ) ;
const impl = new ReadableStreamAsyncIteratorImpl ( reader , preventCancel ) ;
const iterator = Object . create ( ReadableStreamAsyncIteratorPrototype ) ;
iterator . _asyncIteratorImpl = impl ;
return iterator ;
}
function IsReadableStreamAsyncIterator ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_asyncIteratorImpl' ) ) {
return false ;
}
try {
// noinspection SuspiciousTypeOfGuard
return x . _asyncIteratorImpl instanceof
ReadableStreamAsyncIteratorImpl ;
}
catch ( _a ) {
return false ;
}
}
// Helper functions for the ReadableStream.
function streamAsyncIteratorBrandCheckException ( name ) {
return new TypeError ( ` ReadableStreamAsyncIterator. ${ name } can only be used on a ReadableSteamAsyncIterator ` ) ;
}
/// <reference lib="es2015.core" />
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN#Polyfill
const NumberIsNaN = Number . isNaN || function ( x ) {
// eslint-disable-next-line no-self-compare
return x !== x ;
} ;
function CreateArrayFromList ( elements ) {
// We use arrays to represent lists, so this is basically a no-op.
// Do a slice though just in case we happen to depend on the unique-ness.
return elements . slice ( ) ;
}
function CopyDataBlockBytes ( dest , destOffset , src , srcOffset , n ) {
new Uint8Array ( dest ) . set ( new Uint8Array ( src , srcOffset , n ) , destOffset ) ;
}
// Not implemented correctly
function TransferArrayBuffer ( O ) {
return O ;
}
// Not implemented correctly
// eslint-disable-next-line @typescript-eslint/no-unused-vars
function IsDetachedBuffer ( O ) {
return false ;
}
function ArrayBufferSlice ( buffer , begin , end ) {
// ArrayBuffer.prototype.slice is not available on IE10
// https://www.caniuse.com/mdn-javascript_builtins_arraybuffer_slice
if ( buffer . slice ) {
return buffer . slice ( begin , end ) ;
}
const length = end - begin ;
const slice = new ArrayBuffer ( length ) ;
CopyDataBlockBytes ( slice , 0 , buffer , begin , length ) ;
return slice ;
}
function IsNonNegativeNumber ( v ) {
if ( typeof v !== 'number' ) {
return false ;
}
if ( NumberIsNaN ( v ) ) {
return false ;
}
if ( v < 0 ) {
return false ;
}
return true ;
}
function CloneAsUint8Array ( O ) {
const buffer = ArrayBufferSlice ( O . buffer , O . byteOffset , O . byteOffset + O . byteLength ) ;
return new Uint8Array ( buffer ) ;
}
function DequeueValue ( container ) {
const pair = container . _queue . shift ( ) ;
container . _queueTotalSize -= pair . size ;
if ( container . _queueTotalSize < 0 ) {
container . _queueTotalSize = 0 ;
}
return pair . value ;
}
function EnqueueValueWithSize ( container , value , size ) {
if ( ! IsNonNegativeNumber ( size ) || size === Infinity ) {
throw new RangeError ( 'Size must be a finite, non-NaN, non-negative number.' ) ;
}
container . _queue . push ( { value , size } ) ;
container . _queueTotalSize += size ;
}
function PeekQueueValue ( container ) {
const pair = container . _queue . peek ( ) ;
return pair . value ;
}
function ResetQueue ( container ) {
container . _queue = new SimpleQueue ( ) ;
container . _queueTotalSize = 0 ;
}
/ * *
* A pull - into request in a { @ link ReadableByteStreamController } .
*
* @ public
* /
class ReadableStreamBYOBRequest {
constructor ( ) {
throw new TypeError ( 'Illegal constructor' ) ;
}
/ * *
* Returns the view for writing in to , or ` null ` if the BYOB request has already been responded to .
* /
get view ( ) {
if ( ! IsReadableStreamBYOBRequest ( this ) ) {
throw byobRequestBrandCheckException ( 'view' ) ;
}
return this . _view ;
}
respond ( bytesWritten ) {
if ( ! IsReadableStreamBYOBRequest ( this ) ) {
throw byobRequestBrandCheckException ( 'respond' ) ;
}
assertRequiredArgument ( bytesWritten , 1 , 'respond' ) ;
bytesWritten = convertUnsignedLongLongWithEnforceRange ( bytesWritten , 'First parameter' ) ;
if ( this . _associatedReadableByteStreamController === undefined ) {
throw new TypeError ( 'This BYOB request has been invalidated' ) ;
}
if ( IsDetachedBuffer ( this . _view . buffer ) ) ;
ReadableByteStreamControllerRespond ( this . _associatedReadableByteStreamController , bytesWritten ) ;
}
respondWithNewView ( view ) {
if ( ! IsReadableStreamBYOBRequest ( this ) ) {
throw byobRequestBrandCheckException ( 'respondWithNewView' ) ;
}
assertRequiredArgument ( view , 1 , 'respondWithNewView' ) ;
if ( ! ArrayBuffer . isView ( view ) ) {
throw new TypeError ( 'You can only respond with array buffer views' ) ;
}
if ( this . _associatedReadableByteStreamController === undefined ) {
throw new TypeError ( 'This BYOB request has been invalidated' ) ;
}
if ( IsDetachedBuffer ( view . buffer ) ) ;
ReadableByteStreamControllerRespondWithNewView ( this . _associatedReadableByteStreamController , view ) ;
}
}
Object . defineProperties ( ReadableStreamBYOBRequest . prototype , {
respond : { enumerable : true } ,
respondWithNewView : { enumerable : true } ,
view : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( ReadableStreamBYOBRequest . prototype , SymbolPolyfill . toStringTag , {
value : 'ReadableStreamBYOBRequest' ,
configurable : true
} ) ;
}
/ * *
* Allows control of a { @ link ReadableStream | readable byte stream } ' s state and internal queue .
*
* @ public
* /
class ReadableByteStreamController {
constructor ( ) {
throw new TypeError ( 'Illegal constructor' ) ;
}
/ * *
* Returns the current BYOB pull request , or ` null ` if there isn ' t one .
* /
get byobRequest ( ) {
if ( ! IsReadableByteStreamController ( this ) ) {
throw byteStreamControllerBrandCheckException ( 'byobRequest' ) ;
}
return ReadableByteStreamControllerGetBYOBRequest ( this ) ;
}
/ * *
* Returns the desired size to fill the controlled stream ' s internal queue . It can be negative , if the queue is
* over - full . An underlying byte source ought to use this information to determine when and how to apply backpressure .
* /
get desiredSize ( ) {
if ( ! IsReadableByteStreamController ( this ) ) {
throw byteStreamControllerBrandCheckException ( 'desiredSize' ) ;
}
return ReadableByteStreamControllerGetDesiredSize ( this ) ;
}
/ * *
* Closes the controlled readable stream . Consumers will still be able to read any previously - enqueued chunks from
* the stream , but once those are read , the stream will become closed .
* /
close ( ) {
if ( ! IsReadableByteStreamController ( this ) ) {
throw byteStreamControllerBrandCheckException ( 'close' ) ;
}
if ( this . _closeRequested ) {
throw new TypeError ( 'The stream has already been closed; do not close it again!' ) ;
}
const state = this . _controlledReadableByteStream . _state ;
if ( state !== 'readable' ) {
throw new TypeError ( ` The stream (in ${ state } state) is not in the readable state and cannot be closed ` ) ;
}
ReadableByteStreamControllerClose ( this ) ;
}
enqueue ( chunk ) {
if ( ! IsReadableByteStreamController ( this ) ) {
throw byteStreamControllerBrandCheckException ( 'enqueue' ) ;
}
assertRequiredArgument ( chunk , 1 , 'enqueue' ) ;
if ( ! ArrayBuffer . isView ( chunk ) ) {
throw new TypeError ( 'chunk must be an array buffer view' ) ;
}
if ( chunk . byteLength === 0 ) {
throw new TypeError ( 'chunk must have non-zero byteLength' ) ;
}
if ( chunk . buffer . byteLength === 0 ) {
throw new TypeError ( ` chunk's buffer must have non-zero byteLength ` ) ;
}
if ( this . _closeRequested ) {
throw new TypeError ( 'stream is closed or draining' ) ;
}
const state = this . _controlledReadableByteStream . _state ;
if ( state !== 'readable' ) {
throw new TypeError ( ` The stream (in ${ state } state) is not in the readable state and cannot be enqueued to ` ) ;
}
ReadableByteStreamControllerEnqueue ( this , chunk ) ;
}
/ * *
* Errors the controlled readable stream , making all future interactions with it fail with the given error ` e ` .
* /
error ( e = undefined ) {
if ( ! IsReadableByteStreamController ( this ) ) {
throw byteStreamControllerBrandCheckException ( 'error' ) ;
}
ReadableByteStreamControllerError ( this , e ) ;
}
/** @internal */
[ CancelSteps ] ( reason ) {
ReadableByteStreamControllerClearPendingPullIntos ( this ) ;
ResetQueue ( this ) ;
const result = this . _cancelAlgorithm ( reason ) ;
ReadableByteStreamControllerClearAlgorithms ( this ) ;
return result ;
}
/** @internal */
[ PullSteps ] ( readRequest ) {
const stream = this . _controlledReadableByteStream ;
if ( this . _queueTotalSize > 0 ) {
const entry = this . _queue . shift ( ) ;
this . _queueTotalSize -= entry . byteLength ;
ReadableByteStreamControllerHandleQueueDrain ( this ) ;
const view = new Uint8Array ( entry . buffer , entry . byteOffset , entry . byteLength ) ;
readRequest . _chunkSteps ( view ) ;
return ;
}
const autoAllocateChunkSize = this . _autoAllocateChunkSize ;
if ( autoAllocateChunkSize !== undefined ) {
let buffer ;
try {
buffer = new ArrayBuffer ( autoAllocateChunkSize ) ;
}
catch ( bufferE ) {
readRequest . _errorSteps ( bufferE ) ;
return ;
}
const pullIntoDescriptor = {
buffer ,
bufferByteLength : autoAllocateChunkSize ,
byteOffset : 0 ,
byteLength : autoAllocateChunkSize ,
bytesFilled : 0 ,
elementSize : 1 ,
viewConstructor : Uint8Array ,
readerType : 'default'
} ;
this . _pendingPullIntos . push ( pullIntoDescriptor ) ;
}
ReadableStreamAddReadRequest ( stream , readRequest ) ;
ReadableByteStreamControllerCallPullIfNeeded ( this ) ;
}
}
Object . defineProperties ( ReadableByteStreamController . prototype , {
close : { enumerable : true } ,
enqueue : { enumerable : true } ,
error : { enumerable : true } ,
byobRequest : { enumerable : true } ,
desiredSize : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( ReadableByteStreamController . prototype , SymbolPolyfill . toStringTag , {
value : 'ReadableByteStreamController' ,
configurable : true
} ) ;
}
// Abstract operations for the ReadableByteStreamController.
function IsReadableByteStreamController ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_controlledReadableByteStream' ) ) {
return false ;
}
return x instanceof ReadableByteStreamController ;
}
function IsReadableStreamBYOBRequest ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_associatedReadableByteStreamController' ) ) {
return false ;
}
return x instanceof ReadableStreamBYOBRequest ;
}
function ReadableByteStreamControllerCallPullIfNeeded ( controller ) {
const shouldPull = ReadableByteStreamControllerShouldCallPull ( controller ) ;
if ( ! shouldPull ) {
return ;
}
if ( controller . _pulling ) {
controller . _pullAgain = true ;
return ;
}
controller . _pulling = true ;
// TODO: Test controller argument
const pullPromise = controller . _pullAlgorithm ( ) ;
uponPromise ( pullPromise , ( ) => {
controller . _pulling = false ;
if ( controller . _pullAgain ) {
controller . _pullAgain = false ;
ReadableByteStreamControllerCallPullIfNeeded ( controller ) ;
}
} , e => {
ReadableByteStreamControllerError ( controller , e ) ;
} ) ;
}
function ReadableByteStreamControllerClearPendingPullIntos ( controller ) {
ReadableByteStreamControllerInvalidateBYOBRequest ( controller ) ;
controller . _pendingPullIntos = new SimpleQueue ( ) ;
}
function ReadableByteStreamControllerCommitPullIntoDescriptor ( stream , pullIntoDescriptor ) {
let done = false ;
if ( stream . _state === 'closed' ) {
done = true ;
}
const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor ( pullIntoDescriptor ) ;
if ( pullIntoDescriptor . readerType === 'default' ) {
ReadableStreamFulfillReadRequest ( stream , filledView , done ) ;
}
else {
ReadableStreamFulfillReadIntoRequest ( stream , filledView , done ) ;
}
}
function ReadableByteStreamControllerConvertPullIntoDescriptor ( pullIntoDescriptor ) {
const bytesFilled = pullIntoDescriptor . bytesFilled ;
const elementSize = pullIntoDescriptor . elementSize ;
return new pullIntoDescriptor . viewConstructor ( pullIntoDescriptor . buffer , pullIntoDescriptor . byteOffset , bytesFilled / elementSize ) ;
}
function ReadableByteStreamControllerEnqueueChunkToQueue ( controller , buffer , byteOffset , byteLength ) {
controller . _queue . push ( { buffer , byteOffset , byteLength } ) ;
controller . _queueTotalSize += byteLength ;
}
function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue ( controller , pullIntoDescriptor ) {
const elementSize = pullIntoDescriptor . elementSize ;
const currentAlignedBytes = pullIntoDescriptor . bytesFilled - pullIntoDescriptor . bytesFilled % elementSize ;
const maxBytesToCopy = Math . min ( controller . _queueTotalSize , pullIntoDescriptor . byteLength - pullIntoDescriptor . bytesFilled ) ;
const maxBytesFilled = pullIntoDescriptor . bytesFilled + maxBytesToCopy ;
const maxAlignedBytes = maxBytesFilled - maxBytesFilled % elementSize ;
let totalBytesToCopyRemaining = maxBytesToCopy ;
let ready = false ;
if ( maxAlignedBytes > currentAlignedBytes ) {
totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor . bytesFilled ;
ready = true ;
}
const queue = controller . _queue ;
while ( totalBytesToCopyRemaining > 0 ) {
const headOfQueue = queue . peek ( ) ;
const bytesToCopy = Math . min ( totalBytesToCopyRemaining , headOfQueue . byteLength ) ;
const destStart = pullIntoDescriptor . byteOffset + pullIntoDescriptor . bytesFilled ;
CopyDataBlockBytes ( pullIntoDescriptor . buffer , destStart , headOfQueue . buffer , headOfQueue . byteOffset , bytesToCopy ) ;
if ( headOfQueue . byteLength === bytesToCopy ) {
queue . shift ( ) ;
}
else {
headOfQueue . byteOffset += bytesToCopy ;
headOfQueue . byteLength -= bytesToCopy ;
}
controller . _queueTotalSize -= bytesToCopy ;
ReadableByteStreamControllerFillHeadPullIntoDescriptor ( controller , bytesToCopy , pullIntoDescriptor ) ;
totalBytesToCopyRemaining -= bytesToCopy ;
}
return ready ;
}
function ReadableByteStreamControllerFillHeadPullIntoDescriptor ( controller , size , pullIntoDescriptor ) {
pullIntoDescriptor . bytesFilled += size ;
}
function ReadableByteStreamControllerHandleQueueDrain ( controller ) {
if ( controller . _queueTotalSize === 0 && controller . _closeRequested ) {
ReadableByteStreamControllerClearAlgorithms ( controller ) ;
ReadableStreamClose ( controller . _controlledReadableByteStream ) ;
}
else {
ReadableByteStreamControllerCallPullIfNeeded ( controller ) ;
}
}
function ReadableByteStreamControllerInvalidateBYOBRequest ( controller ) {
if ( controller . _byobRequest === null ) {
return ;
}
controller . _byobRequest . _associatedReadableByteStreamController = undefined ;
controller . _byobRequest . _view = null ;
controller . _byobRequest = null ;
}
function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue ( controller ) {
while ( controller . _pendingPullIntos . length > 0 ) {
if ( controller . _queueTotalSize === 0 ) {
return ;
}
const pullIntoDescriptor = controller . _pendingPullIntos . peek ( ) ;
if ( ReadableByteStreamControllerFillPullIntoDescriptorFromQueue ( controller , pullIntoDescriptor ) ) {
ReadableByteStreamControllerShiftPendingPullInto ( controller ) ;
ReadableByteStreamControllerCommitPullIntoDescriptor ( controller . _controlledReadableByteStream , pullIntoDescriptor ) ;
}
}
}
function ReadableByteStreamControllerPullInto ( controller , view , readIntoRequest ) {
const stream = controller . _controlledReadableByteStream ;
let elementSize = 1 ;
if ( view . constructor !== DataView ) {
elementSize = view . constructor . BYTES _PER _ELEMENT ;
}
const ctor = view . constructor ;
// try {
const buffer = TransferArrayBuffer ( view . buffer ) ;
// } catch (e) {
// readIntoRequest._errorSteps(e);
// return;
// }
const pullIntoDescriptor = {
buffer ,
bufferByteLength : buffer . byteLength ,
byteOffset : view . byteOffset ,
byteLength : view . byteLength ,
bytesFilled : 0 ,
elementSize ,
viewConstructor : ctor ,
readerType : 'byob'
} ;
if ( controller . _pendingPullIntos . length > 0 ) {
controller . _pendingPullIntos . push ( pullIntoDescriptor ) ;
// No ReadableByteStreamControllerCallPullIfNeeded() call since:
// - No change happens on desiredSize
// - The source has already been notified of that there's at least 1 pending read(view)
ReadableStreamAddReadIntoRequest ( stream , readIntoRequest ) ;
return ;
}
if ( stream . _state === 'closed' ) {
const emptyView = new ctor ( pullIntoDescriptor . buffer , pullIntoDescriptor . byteOffset , 0 ) ;
readIntoRequest . _closeSteps ( emptyView ) ;
return ;
}
if ( controller . _queueTotalSize > 0 ) {
if ( ReadableByteStreamControllerFillPullIntoDescriptorFromQueue ( controller , pullIntoDescriptor ) ) {
const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor ( pullIntoDescriptor ) ;
ReadableByteStreamControllerHandleQueueDrain ( controller ) ;
readIntoRequest . _chunkSteps ( filledView ) ;
return ;
}
if ( controller . _closeRequested ) {
const e = new TypeError ( 'Insufficient bytes to fill elements in the given buffer' ) ;
ReadableByteStreamControllerError ( controller , e ) ;
readIntoRequest . _errorSteps ( e ) ;
return ;
}
}
controller . _pendingPullIntos . push ( pullIntoDescriptor ) ;
ReadableStreamAddReadIntoRequest ( stream , readIntoRequest ) ;
ReadableByteStreamControllerCallPullIfNeeded ( controller ) ;
}
function ReadableByteStreamControllerRespondInClosedState ( controller , firstDescriptor ) {
const stream = controller . _controlledReadableByteStream ;
if ( ReadableStreamHasBYOBReader ( stream ) ) {
while ( ReadableStreamGetNumReadIntoRequests ( stream ) > 0 ) {
const pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto ( controller ) ;
ReadableByteStreamControllerCommitPullIntoDescriptor ( stream , pullIntoDescriptor ) ;
}
}
}
function ReadableByteStreamControllerRespondInReadableState ( controller , bytesWritten , pullIntoDescriptor ) {
ReadableByteStreamControllerFillHeadPullIntoDescriptor ( controller , bytesWritten , pullIntoDescriptor ) ;
if ( pullIntoDescriptor . bytesFilled < pullIntoDescriptor . elementSize ) {
return ;
}
ReadableByteStreamControllerShiftPendingPullInto ( controller ) ;
const remainderSize = pullIntoDescriptor . bytesFilled % pullIntoDescriptor . elementSize ;
if ( remainderSize > 0 ) {
const end = pullIntoDescriptor . byteOffset + pullIntoDescriptor . bytesFilled ;
const remainder = ArrayBufferSlice ( pullIntoDescriptor . buffer , end - remainderSize , end ) ;
ReadableByteStreamControllerEnqueueChunkToQueue ( controller , remainder , 0 , remainder . byteLength ) ;
}
pullIntoDescriptor . bytesFilled -= remainderSize ;
ReadableByteStreamControllerCommitPullIntoDescriptor ( controller . _controlledReadableByteStream , pullIntoDescriptor ) ;
ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue ( controller ) ;
}
function ReadableByteStreamControllerRespondInternal ( controller , bytesWritten ) {
const firstDescriptor = controller . _pendingPullIntos . peek ( ) ;
ReadableByteStreamControllerInvalidateBYOBRequest ( controller ) ;
const state = controller . _controlledReadableByteStream . _state ;
if ( state === 'closed' ) {
ReadableByteStreamControllerRespondInClosedState ( controller ) ;
}
else {
ReadableByteStreamControllerRespondInReadableState ( controller , bytesWritten , firstDescriptor ) ;
}
ReadableByteStreamControllerCallPullIfNeeded ( controller ) ;
}
function ReadableByteStreamControllerShiftPendingPullInto ( controller ) {
const descriptor = controller . _pendingPullIntos . shift ( ) ;
return descriptor ;
}
function ReadableByteStreamControllerShouldCallPull ( controller ) {
const stream = controller . _controlledReadableByteStream ;
if ( stream . _state !== 'readable' ) {
return false ;
}
if ( controller . _closeRequested ) {
return false ;
}
if ( ! controller . _started ) {
return false ;
}
if ( ReadableStreamHasDefaultReader ( stream ) && ReadableStreamGetNumReadRequests ( stream ) > 0 ) {
return true ;
}
if ( ReadableStreamHasBYOBReader ( stream ) && ReadableStreamGetNumReadIntoRequests ( stream ) > 0 ) {
return true ;
}
const desiredSize = ReadableByteStreamControllerGetDesiredSize ( controller ) ;
if ( desiredSize > 0 ) {
return true ;
}
return false ;
}
function ReadableByteStreamControllerClearAlgorithms ( controller ) {
controller . _pullAlgorithm = undefined ;
controller . _cancelAlgorithm = undefined ;
}
// A client of ReadableByteStreamController may use these functions directly to bypass state check.
function ReadableByteStreamControllerClose ( controller ) {
const stream = controller . _controlledReadableByteStream ;
if ( controller . _closeRequested || stream . _state !== 'readable' ) {
return ;
}
if ( controller . _queueTotalSize > 0 ) {
controller . _closeRequested = true ;
return ;
}
if ( controller . _pendingPullIntos . length > 0 ) {
const firstPendingPullInto = controller . _pendingPullIntos . peek ( ) ;
if ( firstPendingPullInto . bytesFilled > 0 ) {
const e = new TypeError ( 'Insufficient bytes to fill elements in the given buffer' ) ;
ReadableByteStreamControllerError ( controller , e ) ;
throw e ;
}
}
ReadableByteStreamControllerClearAlgorithms ( controller ) ;
ReadableStreamClose ( stream ) ;
}
function ReadableByteStreamControllerEnqueue ( controller , chunk ) {
const stream = controller . _controlledReadableByteStream ;
if ( controller . _closeRequested || stream . _state !== 'readable' ) {
return ;
}
const buffer = chunk . buffer ;
const byteOffset = chunk . byteOffset ;
const byteLength = chunk . byteLength ;
const transferredBuffer = TransferArrayBuffer ( buffer ) ;
if ( controller . _pendingPullIntos . length > 0 ) {
const firstPendingPullInto = controller . _pendingPullIntos . peek ( ) ;
if ( IsDetachedBuffer ( firstPendingPullInto . buffer ) ) ;
firstPendingPullInto . buffer = TransferArrayBuffer ( firstPendingPullInto . buffer ) ;
}
ReadableByteStreamControllerInvalidateBYOBRequest ( controller ) ;
if ( ReadableStreamHasDefaultReader ( stream ) ) {
if ( ReadableStreamGetNumReadRequests ( stream ) === 0 ) {
ReadableByteStreamControllerEnqueueChunkToQueue ( controller , transferredBuffer , byteOffset , byteLength ) ;
}
else {
if ( controller . _pendingPullIntos . length > 0 ) {
ReadableByteStreamControllerShiftPendingPullInto ( controller ) ;
}
const transferredView = new Uint8Array ( transferredBuffer , byteOffset , byteLength ) ;
ReadableStreamFulfillReadRequest ( stream , transferredView , false ) ;
}
}
else if ( ReadableStreamHasBYOBReader ( stream ) ) {
// TODO: Ideally in this branch detaching should happen only if the buffer is not consumed fully.
ReadableByteStreamControllerEnqueueChunkToQueue ( controller , transferredBuffer , byteOffset , byteLength ) ;
ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue ( controller ) ;
}
else {
ReadableByteStreamControllerEnqueueChunkToQueue ( controller , transferredBuffer , byteOffset , byteLength ) ;
}
ReadableByteStreamControllerCallPullIfNeeded ( controller ) ;
}
function ReadableByteStreamControllerError ( controller , e ) {
const stream = controller . _controlledReadableByteStream ;
if ( stream . _state !== 'readable' ) {
return ;
}
ReadableByteStreamControllerClearPendingPullIntos ( controller ) ;
ResetQueue ( controller ) ;
ReadableByteStreamControllerClearAlgorithms ( controller ) ;
ReadableStreamError ( stream , e ) ;
}
function ReadableByteStreamControllerGetBYOBRequest ( controller ) {
if ( controller . _byobRequest === null && controller . _pendingPullIntos . length > 0 ) {
const firstDescriptor = controller . _pendingPullIntos . peek ( ) ;
const view = new Uint8Array ( firstDescriptor . buffer , firstDescriptor . byteOffset + firstDescriptor . bytesFilled , firstDescriptor . byteLength - firstDescriptor . bytesFilled ) ;
const byobRequest = Object . create ( ReadableStreamBYOBRequest . prototype ) ;
SetUpReadableStreamBYOBRequest ( byobRequest , controller , view ) ;
controller . _byobRequest = byobRequest ;
}
return controller . _byobRequest ;
}
function ReadableByteStreamControllerGetDesiredSize ( controller ) {
const state = controller . _controlledReadableByteStream . _state ;
if ( state === 'errored' ) {
return null ;
}
if ( state === 'closed' ) {
return 0 ;
}
return controller . _strategyHWM - controller . _queueTotalSize ;
}
function ReadableByteStreamControllerRespond ( controller , bytesWritten ) {
const firstDescriptor = controller . _pendingPullIntos . peek ( ) ;
const state = controller . _controlledReadableByteStream . _state ;
if ( state === 'closed' ) {
if ( bytesWritten !== 0 ) {
throw new TypeError ( 'bytesWritten must be 0 when calling respond() on a closed stream' ) ;
}
}
else {
if ( bytesWritten === 0 ) {
throw new TypeError ( 'bytesWritten must be greater than 0 when calling respond() on a readable stream' ) ;
}
if ( firstDescriptor . bytesFilled + bytesWritten > firstDescriptor . byteLength ) {
throw new RangeError ( 'bytesWritten out of range' ) ;
}
}
firstDescriptor . buffer = TransferArrayBuffer ( firstDescriptor . buffer ) ;
ReadableByteStreamControllerRespondInternal ( controller , bytesWritten ) ;
}
function ReadableByteStreamControllerRespondWithNewView ( controller , view ) {
const firstDescriptor = controller . _pendingPullIntos . peek ( ) ;
const state = controller . _controlledReadableByteStream . _state ;
if ( state === 'closed' ) {
if ( view . byteLength !== 0 ) {
throw new TypeError ( 'The view\'s length must be 0 when calling respondWithNewView() on a closed stream' ) ;
}
}
else {
if ( view . byteLength === 0 ) {
throw new TypeError ( 'The view\'s length must be greater than 0 when calling respondWithNewView() on a readable stream' ) ;
}
}
if ( firstDescriptor . byteOffset + firstDescriptor . bytesFilled !== view . byteOffset ) {
throw new RangeError ( 'The region specified by view does not match byobRequest' ) ;
}
if ( firstDescriptor . bufferByteLength !== view . buffer . byteLength ) {
throw new RangeError ( 'The buffer of view has different capacity than byobRequest' ) ;
}
if ( firstDescriptor . bytesFilled + view . byteLength > firstDescriptor . byteLength ) {
throw new RangeError ( 'The region specified by view is larger than byobRequest' ) ;
}
const viewByteLength = view . byteLength ;
firstDescriptor . buffer = TransferArrayBuffer ( view . buffer ) ;
ReadableByteStreamControllerRespondInternal ( controller , viewByteLength ) ;
}
function SetUpReadableByteStreamController ( stream , controller , startAlgorithm , pullAlgorithm , cancelAlgorithm , highWaterMark , autoAllocateChunkSize ) {
controller . _controlledReadableByteStream = stream ;
controller . _pullAgain = false ;
controller . _pulling = false ;
controller . _byobRequest = null ;
// Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.
controller . _queue = controller . _queueTotalSize = undefined ;
ResetQueue ( controller ) ;
controller . _closeRequested = false ;
controller . _started = false ;
controller . _strategyHWM = highWaterMark ;
controller . _pullAlgorithm = pullAlgorithm ;
controller . _cancelAlgorithm = cancelAlgorithm ;
controller . _autoAllocateChunkSize = autoAllocateChunkSize ;
controller . _pendingPullIntos = new SimpleQueue ( ) ;
stream . _readableStreamController = controller ;
const startResult = startAlgorithm ( ) ;
uponPromise ( promiseResolvedWith ( startResult ) , ( ) => {
controller . _started = true ;
ReadableByteStreamControllerCallPullIfNeeded ( controller ) ;
} , r => {
ReadableByteStreamControllerError ( controller , r ) ;
} ) ;
}
function SetUpReadableByteStreamControllerFromUnderlyingSource ( stream , underlyingByteSource , highWaterMark ) {
const controller = Object . create ( ReadableByteStreamController . prototype ) ;
let startAlgorithm = ( ) => undefined ;
let pullAlgorithm = ( ) => promiseResolvedWith ( undefined ) ;
let cancelAlgorithm = ( ) => promiseResolvedWith ( undefined ) ;
if ( underlyingByteSource . start !== undefined ) {
startAlgorithm = ( ) => underlyingByteSource . start ( controller ) ;
}
if ( underlyingByteSource . pull !== undefined ) {
pullAlgorithm = ( ) => underlyingByteSource . pull ( controller ) ;
}
if ( underlyingByteSource . cancel !== undefined ) {
cancelAlgorithm = reason => underlyingByteSource . cancel ( reason ) ;
}
const autoAllocateChunkSize = underlyingByteSource . autoAllocateChunkSize ;
if ( autoAllocateChunkSize === 0 ) {
throw new TypeError ( 'autoAllocateChunkSize must be greater than 0' ) ;
}
SetUpReadableByteStreamController ( stream , controller , startAlgorithm , pullAlgorithm , cancelAlgorithm , highWaterMark , autoAllocateChunkSize ) ;
}
function SetUpReadableStreamBYOBRequest ( request , controller , view ) {
request . _associatedReadableByteStreamController = controller ;
request . _view = view ;
}
// Helper functions for the ReadableStreamBYOBRequest.
function byobRequestBrandCheckException ( name ) {
return new TypeError ( ` ReadableStreamBYOBRequest.prototype. ${ name } can only be used on a ReadableStreamBYOBRequest ` ) ;
}
// Helper functions for the ReadableByteStreamController.
function byteStreamControllerBrandCheckException ( name ) {
return new TypeError ( ` ReadableByteStreamController.prototype. ${ name } can only be used on a ReadableByteStreamController ` ) ;
}
// Abstract operations for the ReadableStream.
function AcquireReadableStreamBYOBReader ( stream ) {
return new ReadableStreamBYOBReader ( stream ) ;
}
// ReadableStream API exposed for controllers.
function ReadableStreamAddReadIntoRequest ( stream , readIntoRequest ) {
stream . _reader . _readIntoRequests . push ( readIntoRequest ) ;
}
function ReadableStreamFulfillReadIntoRequest ( stream , chunk , done ) {
const reader = stream . _reader ;
const readIntoRequest = reader . _readIntoRequests . shift ( ) ;
if ( done ) {
readIntoRequest . _closeSteps ( chunk ) ;
}
else {
readIntoRequest . _chunkSteps ( chunk ) ;
}
}
function ReadableStreamGetNumReadIntoRequests ( stream ) {
return stream . _reader . _readIntoRequests . length ;
}
function ReadableStreamHasBYOBReader ( stream ) {
const reader = stream . _reader ;
if ( reader === undefined ) {
return false ;
}
if ( ! IsReadableStreamBYOBReader ( reader ) ) {
return false ;
}
return true ;
}
/ * *
* A BYOB reader vended by a { @ link ReadableStream } .
*
* @ public
* /
class ReadableStreamBYOBReader {
constructor ( stream ) {
assertRequiredArgument ( stream , 1 , 'ReadableStreamBYOBReader' ) ;
assertReadableStream ( stream , 'First parameter' ) ;
if ( IsReadableStreamLocked ( stream ) ) {
throw new TypeError ( 'This stream has already been locked for exclusive reading by another reader' ) ;
}
if ( ! IsReadableByteStreamController ( stream . _readableStreamController ) ) {
throw new TypeError ( 'Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte ' +
'source' ) ;
}
ReadableStreamReaderGenericInitialize ( this , stream ) ;
this . _readIntoRequests = new SimpleQueue ( ) ;
}
/ * *
* Returns a promise that will be fulfilled when the stream becomes closed , or rejected if the stream ever errors or
* the reader ' s lock is released before the stream finishes closing .
* /
get closed ( ) {
if ( ! IsReadableStreamBYOBReader ( this ) ) {
return promiseRejectedWith ( byobReaderBrandCheckException ( 'closed' ) ) ;
}
return this . _closedPromise ;
}
/ * *
* If the reader is active , behaves the same as { @ link ReadableStream . cancel | stream . cancel ( reason ) } .
* /
cancel ( reason = undefined ) {
if ( ! IsReadableStreamBYOBReader ( this ) ) {
return promiseRejectedWith ( byobReaderBrandCheckException ( 'cancel' ) ) ;
}
if ( this . _ownerReadableStream === undefined ) {
return promiseRejectedWith ( readerLockException ( 'cancel' ) ) ;
}
return ReadableStreamReaderGenericCancel ( this , reason ) ;
}
/ * *
* Attempts to reads bytes into view , and returns a promise resolved with the result .
*
* If reading a chunk causes the queue to become empty , more data will be pulled from the underlying source .
* /
read ( view ) {
if ( ! IsReadableStreamBYOBReader ( this ) ) {
return promiseRejectedWith ( byobReaderBrandCheckException ( 'read' ) ) ;
}
if ( ! ArrayBuffer . isView ( view ) ) {
return promiseRejectedWith ( new TypeError ( 'view must be an array buffer view' ) ) ;
}
if ( view . byteLength === 0 ) {
return promiseRejectedWith ( new TypeError ( 'view must have non-zero byteLength' ) ) ;
}
if ( view . buffer . byteLength === 0 ) {
return promiseRejectedWith ( new TypeError ( ` view's buffer must have non-zero byteLength ` ) ) ;
}
if ( IsDetachedBuffer ( view . buffer ) ) ;
if ( this . _ownerReadableStream === undefined ) {
return promiseRejectedWith ( readerLockException ( 'read from' ) ) ;
}
let resolvePromise ;
let rejectPromise ;
const promise = newPromise ( ( resolve , reject ) => {
resolvePromise = resolve ;
rejectPromise = reject ;
} ) ;
const readIntoRequest = {
_chunkSteps : chunk => resolvePromise ( { value : chunk , done : false } ) ,
_closeSteps : chunk => resolvePromise ( { value : chunk , done : true } ) ,
_errorSteps : e => rejectPromise ( e )
} ;
ReadableStreamBYOBReaderRead ( this , view , readIntoRequest ) ;
return promise ;
}
/ * *
* Releases the reader ' s lock on the corresponding stream . After the lock is released , the reader is no longer active .
* If the associated stream is errored when the lock is released , the reader will appear errored in the same way
* from now on ; otherwise , the reader will appear closed .
*
* A reader ' s lock cannot be released while it still has a pending read request , i . e . , if a promise returned by
* the reader ' s { @ link ReadableStreamBYOBReader . read | read ( ) } method has not yet been settled . Attempting to
* do so will throw a ` TypeError ` and leave the reader locked to the stream .
* /
releaseLock ( ) {
if ( ! IsReadableStreamBYOBReader ( this ) ) {
throw byobReaderBrandCheckException ( 'releaseLock' ) ;
}
if ( this . _ownerReadableStream === undefined ) {
return ;
}
if ( this . _readIntoRequests . length > 0 ) {
throw new TypeError ( 'Tried to release a reader lock when that reader has pending read() calls un-settled' ) ;
}
ReadableStreamReaderGenericRelease ( this ) ;
}
}
Object . defineProperties ( ReadableStreamBYOBReader . prototype , {
cancel : { enumerable : true } ,
read : { enumerable : true } ,
releaseLock : { enumerable : true } ,
closed : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( ReadableStreamBYOBReader . prototype , SymbolPolyfill . toStringTag , {
value : 'ReadableStreamBYOBReader' ,
configurable : true
} ) ;
}
// Abstract operations for the readers.
function IsReadableStreamBYOBReader ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_readIntoRequests' ) ) {
return false ;
}
return x instanceof ReadableStreamBYOBReader ;
}
function ReadableStreamBYOBReaderRead ( reader , view , readIntoRequest ) {
const stream = reader . _ownerReadableStream ;
stream . _disturbed = true ;
if ( stream . _state === 'errored' ) {
readIntoRequest . _errorSteps ( stream . _storedError ) ;
}
else {
ReadableByteStreamControllerPullInto ( stream . _readableStreamController , view , readIntoRequest ) ;
}
}
// Helper functions for the ReadableStreamBYOBReader.
function byobReaderBrandCheckException ( name ) {
return new TypeError ( ` ReadableStreamBYOBReader.prototype. ${ name } can only be used on a ReadableStreamBYOBReader ` ) ;
}
function ExtractHighWaterMark ( strategy , defaultHWM ) {
const { highWaterMark } = strategy ;
if ( highWaterMark === undefined ) {
return defaultHWM ;
}
if ( NumberIsNaN ( highWaterMark ) || highWaterMark < 0 ) {
throw new RangeError ( 'Invalid highWaterMark' ) ;
}
return highWaterMark ;
}
function ExtractSizeAlgorithm ( strategy ) {
const { size } = strategy ;
if ( ! size ) {
return ( ) => 1 ;
}
return size ;
}
function convertQueuingStrategy ( init , context ) {
assertDictionary ( init , context ) ;
const highWaterMark = init === null || init === void 0 ? void 0 : init . highWaterMark ;
const size = init === null || init === void 0 ? void 0 : init . size ;
return {
highWaterMark : highWaterMark === undefined ? undefined : convertUnrestrictedDouble ( highWaterMark ) ,
size : size === undefined ? undefined : convertQueuingStrategySize ( size , ` ${ context } has member 'size' that ` )
} ;
}
function convertQueuingStrategySize ( fn , context ) {
assertFunction ( fn , context ) ;
return chunk => convertUnrestrictedDouble ( fn ( chunk ) ) ;
}
function convertUnderlyingSink ( original , context ) {
assertDictionary ( original , context ) ;
const abort = original === null || original === void 0 ? void 0 : original . abort ;
const close = original === null || original === void 0 ? void 0 : original . close ;
const start = original === null || original === void 0 ? void 0 : original . start ;
const type = original === null || original === void 0 ? void 0 : original . type ;
const write = original === null || original === void 0 ? void 0 : original . write ;
return {
abort : abort === undefined ?
undefined :
convertUnderlyingSinkAbortCallback ( abort , original , ` ${ context } has member 'abort' that ` ) ,
close : close === undefined ?
undefined :
convertUnderlyingSinkCloseCallback ( close , original , ` ${ context } has member 'close' that ` ) ,
start : start === undefined ?
undefined :
convertUnderlyingSinkStartCallback ( start , original , ` ${ context } has member 'start' that ` ) ,
write : write === undefined ?
undefined :
convertUnderlyingSinkWriteCallback ( write , original , ` ${ context } has member 'write' that ` ) ,
type
} ;
}
function convertUnderlyingSinkAbortCallback ( fn , original , context ) {
assertFunction ( fn , context ) ;
return ( reason ) => promiseCall ( fn , original , [ reason ] ) ;
}
function convertUnderlyingSinkCloseCallback ( fn , original , context ) {
assertFunction ( fn , context ) ;
return ( ) => promiseCall ( fn , original , [ ] ) ;
}
function convertUnderlyingSinkStartCallback ( fn , original , context ) {
assertFunction ( fn , context ) ;
return ( controller ) => reflectCall ( fn , original , [ controller ] ) ;
}
function convertUnderlyingSinkWriteCallback ( fn , original , context ) {
assertFunction ( fn , context ) ;
return ( chunk , controller ) => promiseCall ( fn , original , [ chunk , controller ] ) ;
}
function assertWritableStream ( x , context ) {
if ( ! IsWritableStream ( x ) ) {
throw new TypeError ( ` ${ context } is not a WritableStream. ` ) ;
}
}
function isAbortSignal ( value ) {
if ( typeof value !== 'object' || value === null ) {
return false ;
}
try {
return typeof value . aborted === 'boolean' ;
}
catch ( _a ) {
// AbortSignal.prototype.aborted throws if its brand check fails
return false ;
}
}
const supportsAbortController = typeof AbortController === 'function' ;
/ * *
* Construct a new AbortController , if supported by the platform .
*
* @ internal
* /
function createAbortController ( ) {
if ( supportsAbortController ) {
return new AbortController ( ) ;
}
return undefined ;
}
/ * *
* A writable stream represents a destination for data , into which you can write .
*
* @ public
* /
class WritableStream {
constructor ( rawUnderlyingSink = { } , rawStrategy = { } ) {
if ( rawUnderlyingSink === undefined ) {
rawUnderlyingSink = null ;
}
else {
assertObject ( rawUnderlyingSink , 'First parameter' ) ;
}
const strategy = convertQueuingStrategy ( rawStrategy , 'Second parameter' ) ;
const underlyingSink = convertUnderlyingSink ( rawUnderlyingSink , 'First parameter' ) ;
InitializeWritableStream ( this ) ;
const type = underlyingSink . type ;
if ( type !== undefined ) {
throw new RangeError ( 'Invalid type is specified' ) ;
}
const sizeAlgorithm = ExtractSizeAlgorithm ( strategy ) ;
const highWaterMark = ExtractHighWaterMark ( strategy , 1 ) ;
SetUpWritableStreamDefaultControllerFromUnderlyingSink ( this , underlyingSink , highWaterMark , sizeAlgorithm ) ;
}
/ * *
* Returns whether or not the writable stream is locked to a writer .
* /
get locked ( ) {
if ( ! IsWritableStream ( this ) ) {
throw streamBrandCheckException$2 ( 'locked' ) ;
}
return IsWritableStreamLocked ( this ) ;
}
/ * *
* Aborts the stream , signaling that the producer can no longer successfully write to the stream and it is to be
* immediately moved to an errored state , with any queued - up writes discarded . This will also execute any abort
* mechanism of the underlying sink .
*
* The returned promise will fulfill if the stream shuts down successfully , or reject if the underlying sink signaled
* that there was an error doing so . Additionally , it will reject with a ` TypeError ` ( without attempting to cancel
* the stream ) if the stream is currently locked .
* /
abort ( reason = undefined ) {
if ( ! IsWritableStream ( this ) ) {
return promiseRejectedWith ( streamBrandCheckException$2 ( 'abort' ) ) ;
}
if ( IsWritableStreamLocked ( this ) ) {
return promiseRejectedWith ( new TypeError ( 'Cannot abort a stream that already has a writer' ) ) ;
}
return WritableStreamAbort ( this , reason ) ;
}
/ * *
* Closes the stream . The underlying sink will finish processing any previously - written chunks , before invoking its
* close behavior . During this time any further attempts to write will fail ( without erroring the stream ) .
*
* The method returns a promise that will fulfill if all remaining chunks are successfully written and the stream
* successfully closes , or rejects if an error is encountered during this process . Additionally , it will reject with
* a ` TypeError ` ( without attempting to cancel the stream ) if the stream is currently locked .
* /
close ( ) {
if ( ! IsWritableStream ( this ) ) {
return promiseRejectedWith ( streamBrandCheckException$2 ( 'close' ) ) ;
}
if ( IsWritableStreamLocked ( this ) ) {
return promiseRejectedWith ( new TypeError ( 'Cannot close a stream that already has a writer' ) ) ;
}
if ( WritableStreamCloseQueuedOrInFlight ( this ) ) {
return promiseRejectedWith ( new TypeError ( 'Cannot close an already-closing stream' ) ) ;
}
return WritableStreamClose ( this ) ;
}
/ * *
* Creates a { @ link WritableStreamDefaultWriter | writer } and locks the stream to the new writer . While the stream
* is locked , no other writer can be acquired until this one is released .
*
* This functionality is especially useful for creating abstractions that desire the ability to write to a stream
* without interruption or interleaving . By getting a writer for the stream , you can ensure nobody else can write at
* the same time , which would cause the resulting written data to be unpredictable and probably useless .
* /
getWriter ( ) {
if ( ! IsWritableStream ( this ) ) {
throw streamBrandCheckException$2 ( 'getWriter' ) ;
}
return AcquireWritableStreamDefaultWriter ( this ) ;
}
}
Object . defineProperties ( WritableStream . prototype , {
abort : { enumerable : true } ,
close : { enumerable : true } ,
getWriter : { enumerable : true } ,
locked : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( WritableStream . prototype , SymbolPolyfill . toStringTag , {
value : 'WritableStream' ,
configurable : true
} ) ;
}
// Abstract operations for the WritableStream.
function AcquireWritableStreamDefaultWriter ( stream ) {
return new WritableStreamDefaultWriter ( stream ) ;
}
// Throws if and only if startAlgorithm throws.
function CreateWritableStream ( startAlgorithm , writeAlgorithm , closeAlgorithm , abortAlgorithm , highWaterMark = 1 , sizeAlgorithm = ( ) => 1 ) {
const stream = Object . create ( WritableStream . prototype ) ;
InitializeWritableStream ( stream ) ;
const controller = Object . create ( WritableStreamDefaultController . prototype ) ;
SetUpWritableStreamDefaultController ( stream , controller , startAlgorithm , writeAlgorithm , closeAlgorithm , abortAlgorithm , highWaterMark , sizeAlgorithm ) ;
return stream ;
}
function InitializeWritableStream ( stream ) {
stream . _state = 'writable' ;
// The error that will be reported by new method calls once the state becomes errored. Only set when [[state]] is
// 'erroring' or 'errored'. May be set to an undefined value.
stream . _storedError = undefined ;
stream . _writer = undefined ;
// Initialize to undefined first because the constructor of the controller checks this
// variable to validate the caller.
stream . _writableStreamController = undefined ;
// This queue is placed here instead of the writer class in order to allow for passing a writer to the next data
// producer without waiting for the queued writes to finish.
stream . _writeRequests = new SimpleQueue ( ) ;
// Write requests are removed from _writeRequests when write() is called on the underlying sink. This prevents
// them from being erroneously rejected on error. If a write() call is in-flight, the request is stored here.
stream . _inFlightWriteRequest = undefined ;
// The promise that was returned from writer.close(). Stored here because it may be fulfilled after the writer
// has been detached.
stream . _closeRequest = undefined ;
// Close request is removed from _closeRequest when close() is called on the underlying sink. This prevents it
// from being erroneously rejected on error. If a close() call is in-flight, the request is stored here.
stream . _inFlightCloseRequest = undefined ;
// The promise that was returned from writer.abort(). This may also be fulfilled after the writer has detached.
stream . _pendingAbortRequest = undefined ;
// The backpressure signal set by the controller.
stream . _backpressure = false ;
}
function IsWritableStream ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_writableStreamController' ) ) {
return false ;
}
return x instanceof WritableStream ;
}
function IsWritableStreamLocked ( stream ) {
if ( stream . _writer === undefined ) {
return false ;
}
return true ;
}
function WritableStreamAbort ( stream , reason ) {
var _a ;
if ( stream . _state === 'closed' || stream . _state === 'errored' ) {
return promiseResolvedWith ( undefined ) ;
}
stream . _writableStreamController . _abortReason = reason ;
( _a = stream . _writableStreamController . _abortController ) === null || _a === void 0 ? void 0 : _a . abort ( ) ;
// TypeScript narrows the type of `stream._state` down to 'writable' | 'erroring',
// but it doesn't know that signaling abort runs author code that might have changed the state.
// Widen the type again by casting to WritableStreamState.
const state = stream . _state ;
if ( state === 'closed' || state === 'errored' ) {
return promiseResolvedWith ( undefined ) ;
}
if ( stream . _pendingAbortRequest !== undefined ) {
return stream . _pendingAbortRequest . _promise ;
}
let wasAlreadyErroring = false ;
if ( state === 'erroring' ) {
wasAlreadyErroring = true ;
// reason will not be used, so don't keep a reference to it.
reason = undefined ;
}
const promise = newPromise ( ( resolve , reject ) => {
stream . _pendingAbortRequest = {
_promise : undefined ,
_resolve : resolve ,
_reject : reject ,
_reason : reason ,
_wasAlreadyErroring : wasAlreadyErroring
} ;
} ) ;
stream . _pendingAbortRequest . _promise = promise ;
if ( ! wasAlreadyErroring ) {
WritableStreamStartErroring ( stream , reason ) ;
}
return promise ;
}
function WritableStreamClose ( stream ) {
const state = stream . _state ;
if ( state === 'closed' || state === 'errored' ) {
return promiseRejectedWith ( new TypeError ( ` The stream (in ${ state } state) is not in the writable state and cannot be closed ` ) ) ;
}
const promise = newPromise ( ( resolve , reject ) => {
const closeRequest = {
_resolve : resolve ,
_reject : reject
} ;
stream . _closeRequest = closeRequest ;
} ) ;
const writer = stream . _writer ;
if ( writer !== undefined && stream . _backpressure && state === 'writable' ) {
defaultWriterReadyPromiseResolve ( writer ) ;
}
WritableStreamDefaultControllerClose ( stream . _writableStreamController ) ;
return promise ;
}
// WritableStream API exposed for controllers.
function WritableStreamAddWriteRequest ( stream ) {
const promise = newPromise ( ( resolve , reject ) => {
const writeRequest = {
_resolve : resolve ,
_reject : reject
} ;
stream . _writeRequests . push ( writeRequest ) ;
} ) ;
return promise ;
}
function WritableStreamDealWithRejection ( stream , error ) {
const state = stream . _state ;
if ( state === 'writable' ) {
WritableStreamStartErroring ( stream , error ) ;
return ;
}
WritableStreamFinishErroring ( stream ) ;
}
function WritableStreamStartErroring ( stream , reason ) {
const controller = stream . _writableStreamController ;
stream . _state = 'erroring' ;
stream . _storedError = reason ;
const writer = stream . _writer ;
if ( writer !== undefined ) {
WritableStreamDefaultWriterEnsureReadyPromiseRejected ( writer , reason ) ;
}
if ( ! WritableStreamHasOperationMarkedInFlight ( stream ) && controller . _started ) {
WritableStreamFinishErroring ( stream ) ;
}
}
function WritableStreamFinishErroring ( stream ) {
stream . _state = 'errored' ;
stream . _writableStreamController [ ErrorSteps ] ( ) ;
const storedError = stream . _storedError ;
stream . _writeRequests . forEach ( writeRequest => {
writeRequest . _reject ( storedError ) ;
} ) ;
stream . _writeRequests = new SimpleQueue ( ) ;
if ( stream . _pendingAbortRequest === undefined ) {
WritableStreamRejectCloseAndClosedPromiseIfNeeded ( stream ) ;
return ;
}
const abortRequest = stream . _pendingAbortRequest ;
stream . _pendingAbortRequest = undefined ;
if ( abortRequest . _wasAlreadyErroring ) {
abortRequest . _reject ( storedError ) ;
WritableStreamRejectCloseAndClosedPromiseIfNeeded ( stream ) ;
return ;
}
const promise = stream . _writableStreamController [ AbortSteps ] ( abortRequest . _reason ) ;
uponPromise ( promise , ( ) => {
abortRequest . _resolve ( ) ;
WritableStreamRejectCloseAndClosedPromiseIfNeeded ( stream ) ;
} , ( reason ) => {
abortRequest . _reject ( reason ) ;
WritableStreamRejectCloseAndClosedPromiseIfNeeded ( stream ) ;
} ) ;
}
function WritableStreamFinishInFlightWrite ( stream ) {
stream . _inFlightWriteRequest . _resolve ( undefined ) ;
stream . _inFlightWriteRequest = undefined ;
}
function WritableStreamFinishInFlightWriteWithError ( stream , error ) {
stream . _inFlightWriteRequest . _reject ( error ) ;
stream . _inFlightWriteRequest = undefined ;
WritableStreamDealWithRejection ( stream , error ) ;
}
function WritableStreamFinishInFlightClose ( stream ) {
stream . _inFlightCloseRequest . _resolve ( undefined ) ;
stream . _inFlightCloseRequest = undefined ;
const state = stream . _state ;
if ( state === 'erroring' ) {
// The error was too late to do anything, so it is ignored.
stream . _storedError = undefined ;
if ( stream . _pendingAbortRequest !== undefined ) {
stream . _pendingAbortRequest . _resolve ( ) ;
stream . _pendingAbortRequest = undefined ;
}
}
stream . _state = 'closed' ;
const writer = stream . _writer ;
if ( writer !== undefined ) {
defaultWriterClosedPromiseResolve ( writer ) ;
}
}
function WritableStreamFinishInFlightCloseWithError ( stream , error ) {
stream . _inFlightCloseRequest . _reject ( error ) ;
stream . _inFlightCloseRequest = undefined ;
// Never execute sink abort() after sink close().
if ( stream . _pendingAbortRequest !== undefined ) {
stream . _pendingAbortRequest . _reject ( error ) ;
stream . _pendingAbortRequest = undefined ;
}
WritableStreamDealWithRejection ( stream , error ) ;
}
// TODO(ricea): Fix alphabetical order.
function WritableStreamCloseQueuedOrInFlight ( stream ) {
if ( stream . _closeRequest === undefined && stream . _inFlightCloseRequest === undefined ) {
return false ;
}
return true ;
}
function WritableStreamHasOperationMarkedInFlight ( stream ) {
if ( stream . _inFlightWriteRequest === undefined && stream . _inFlightCloseRequest === undefined ) {
return false ;
}
return true ;
}
function WritableStreamMarkCloseRequestInFlight ( stream ) {
stream . _inFlightCloseRequest = stream . _closeRequest ;
stream . _closeRequest = undefined ;
}
function WritableStreamMarkFirstWriteRequestInFlight ( stream ) {
stream . _inFlightWriteRequest = stream . _writeRequests . shift ( ) ;
}
function WritableStreamRejectCloseAndClosedPromiseIfNeeded ( stream ) {
if ( stream . _closeRequest !== undefined ) {
stream . _closeRequest . _reject ( stream . _storedError ) ;
stream . _closeRequest = undefined ;
}
const writer = stream . _writer ;
if ( writer !== undefined ) {
defaultWriterClosedPromiseReject ( writer , stream . _storedError ) ;
}
}
function WritableStreamUpdateBackpressure ( stream , backpressure ) {
const writer = stream . _writer ;
if ( writer !== undefined && backpressure !== stream . _backpressure ) {
if ( backpressure ) {
defaultWriterReadyPromiseReset ( writer ) ;
}
else {
defaultWriterReadyPromiseResolve ( writer ) ;
}
}
stream . _backpressure = backpressure ;
}
/ * *
* A default writer vended by a { @ link WritableStream } .
*
* @ public
* /
class WritableStreamDefaultWriter {
constructor ( stream ) {
assertRequiredArgument ( stream , 1 , 'WritableStreamDefaultWriter' ) ;
assertWritableStream ( stream , 'First parameter' ) ;
if ( IsWritableStreamLocked ( stream ) ) {
throw new TypeError ( 'This stream has already been locked for exclusive writing by another writer' ) ;
}
this . _ownerWritableStream = stream ;
stream . _writer = this ;
const state = stream . _state ;
if ( state === 'writable' ) {
if ( ! WritableStreamCloseQueuedOrInFlight ( stream ) && stream . _backpressure ) {
defaultWriterReadyPromiseInitialize ( this ) ;
}
else {
defaultWriterReadyPromiseInitializeAsResolved ( this ) ;
}
defaultWriterClosedPromiseInitialize ( this ) ;
}
else if ( state === 'erroring' ) {
defaultWriterReadyPromiseInitializeAsRejected ( this , stream . _storedError ) ;
defaultWriterClosedPromiseInitialize ( this ) ;
}
else if ( state === 'closed' ) {
defaultWriterReadyPromiseInitializeAsResolved ( this ) ;
defaultWriterClosedPromiseInitializeAsResolved ( this ) ;
}
else {
const storedError = stream . _storedError ;
defaultWriterReadyPromiseInitializeAsRejected ( this , storedError ) ;
defaultWriterClosedPromiseInitializeAsRejected ( this , storedError ) ;
}
}
/ * *
* Returns a promise that will be fulfilled when the stream becomes closed , or rejected if the stream ever errors or
* the writer ’ s lock is released before the stream finishes closing .
* /
get closed ( ) {
if ( ! IsWritableStreamDefaultWriter ( this ) ) {
return promiseRejectedWith ( defaultWriterBrandCheckException ( 'closed' ) ) ;
}
return this . _closedPromise ;
}
/ * *
* Returns the desired size to fill the stream ’ s internal queue . It can be negative , if the queue is over - full .
* A producer can use this information to determine the right amount of data to write .
*
* It will be ` null ` if the stream cannot be successfully written to ( due to either being errored , or having an abort
* queued up ) . It will return zero if the stream is closed . And the getter will throw an exception if invoked when
* the writer ’ s lock is released .
* /
get desiredSize ( ) {
if ( ! IsWritableStreamDefaultWriter ( this ) ) {
throw defaultWriterBrandCheckException ( 'desiredSize' ) ;
}
if ( this . _ownerWritableStream === undefined ) {
throw defaultWriterLockException ( 'desiredSize' ) ;
}
return WritableStreamDefaultWriterGetDesiredSize ( this ) ;
}
/ * *
* Returns a promise that will be fulfilled when the desired size to fill the stream ’ s internal queue transitions
* from non - positive to positive , signaling that it is no longer applying backpressure . Once the desired size dips
* back to zero or below , the getter will return a new promise that stays pending until the next transition .
*
* If the stream becomes errored or aborted , or the writer ’ s lock is released , the returned promise will become
* rejected .
* /
get ready ( ) {
if ( ! IsWritableStreamDefaultWriter ( this ) ) {
return promiseRejectedWith ( defaultWriterBrandCheckException ( 'ready' ) ) ;
}
return this . _readyPromise ;
}
/ * *
* If the reader is active , behaves the same as { @ link WritableStream . abort | stream . abort ( reason ) } .
* /
abort ( reason = undefined ) {
if ( ! IsWritableStreamDefaultWriter ( this ) ) {
return promiseRejectedWith ( defaultWriterBrandCheckException ( 'abort' ) ) ;
}
if ( this . _ownerWritableStream === undefined ) {
return promiseRejectedWith ( defaultWriterLockException ( 'abort' ) ) ;
}
return WritableStreamDefaultWriterAbort ( this , reason ) ;
}
/ * *
* If the reader is active , behaves the same as { @ link WritableStream . close | stream . close ( ) } .
* /
close ( ) {
if ( ! IsWritableStreamDefaultWriter ( this ) ) {
return promiseRejectedWith ( defaultWriterBrandCheckException ( 'close' ) ) ;
}
const stream = this . _ownerWritableStream ;
if ( stream === undefined ) {
return promiseRejectedWith ( defaultWriterLockException ( 'close' ) ) ;
}
if ( WritableStreamCloseQueuedOrInFlight ( stream ) ) {
return promiseRejectedWith ( new TypeError ( 'Cannot close an already-closing stream' ) ) ;
}
return WritableStreamDefaultWriterClose ( this ) ;
}
/ * *
* Releases the writer ’ s lock on the corresponding stream . After the lock is released , the writer is no longer active .
* If the associated stream is errored when the lock is released , the writer will appear errored in the same way from
* now on ; otherwise , the writer will appear closed .
*
* Note that the lock can still be released even if some ongoing writes have not yet finished ( i . e . even if the
* promises returned from previous calls to { @ link WritableStreamDefaultWriter . write | write ( ) } have not yet settled ) .
* It ’ s not necessary to hold the lock on the writer for the duration of the write ; the lock instead simply prevents
* other producers from writing in an interleaved manner .
* /
releaseLock ( ) {
if ( ! IsWritableStreamDefaultWriter ( this ) ) {
throw defaultWriterBrandCheckException ( 'releaseLock' ) ;
}
const stream = this . _ownerWritableStream ;
if ( stream === undefined ) {
return ;
}
WritableStreamDefaultWriterRelease ( this ) ;
}
write ( chunk = undefined ) {
if ( ! IsWritableStreamDefaultWriter ( this ) ) {
return promiseRejectedWith ( defaultWriterBrandCheckException ( 'write' ) ) ;
}
if ( this . _ownerWritableStream === undefined ) {
return promiseRejectedWith ( defaultWriterLockException ( 'write to' ) ) ;
}
return WritableStreamDefaultWriterWrite ( this , chunk ) ;
}
}
Object . defineProperties ( WritableStreamDefaultWriter . prototype , {
abort : { enumerable : true } ,
close : { enumerable : true } ,
releaseLock : { enumerable : true } ,
write : { enumerable : true } ,
closed : { enumerable : true } ,
desiredSize : { enumerable : true } ,
ready : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( WritableStreamDefaultWriter . prototype , SymbolPolyfill . toStringTag , {
value : 'WritableStreamDefaultWriter' ,
configurable : true
} ) ;
}
// Abstract operations for the WritableStreamDefaultWriter.
function IsWritableStreamDefaultWriter ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_ownerWritableStream' ) ) {
return false ;
}
return x instanceof WritableStreamDefaultWriter ;
}
// A client of WritableStreamDefaultWriter may use these functions directly to bypass state check.
function WritableStreamDefaultWriterAbort ( writer , reason ) {
const stream = writer . _ownerWritableStream ;
return WritableStreamAbort ( stream , reason ) ;
}
function WritableStreamDefaultWriterClose ( writer ) {
const stream = writer . _ownerWritableStream ;
return WritableStreamClose ( stream ) ;
}
function WritableStreamDefaultWriterCloseWithErrorPropagation ( writer ) {
const stream = writer . _ownerWritableStream ;
const state = stream . _state ;
if ( WritableStreamCloseQueuedOrInFlight ( stream ) || state === 'closed' ) {
return promiseResolvedWith ( undefined ) ;
}
if ( state === 'errored' ) {
return promiseRejectedWith ( stream . _storedError ) ;
}
return WritableStreamDefaultWriterClose ( writer ) ;
}
function WritableStreamDefaultWriterEnsureClosedPromiseRejected ( writer , error ) {
if ( writer . _closedPromiseState === 'pending' ) {
defaultWriterClosedPromiseReject ( writer , error ) ;
}
else {
defaultWriterClosedPromiseResetToRejected ( writer , error ) ;
}
}
function WritableStreamDefaultWriterEnsureReadyPromiseRejected ( writer , error ) {
if ( writer . _readyPromiseState === 'pending' ) {
defaultWriterReadyPromiseReject ( writer , error ) ;
}
else {
defaultWriterReadyPromiseResetToRejected ( writer , error ) ;
}
}
function WritableStreamDefaultWriterGetDesiredSize ( writer ) {
const stream = writer . _ownerWritableStream ;
const state = stream . _state ;
if ( state === 'errored' || state === 'erroring' ) {
return null ;
}
if ( state === 'closed' ) {
return 0 ;
}
return WritableStreamDefaultControllerGetDesiredSize ( stream . _writableStreamController ) ;
}
function WritableStreamDefaultWriterRelease ( writer ) {
const stream = writer . _ownerWritableStream ;
const releasedError = new TypeError ( ` Writer was released and can no longer be used to monitor the stream's closedness ` ) ;
WritableStreamDefaultWriterEnsureReadyPromiseRejected ( writer , releasedError ) ;
// The state transitions to "errored" before the sink abort() method runs, but the writer.closed promise is not
// rejected until afterwards. This means that simply testing state will not work.
WritableStreamDefaultWriterEnsureClosedPromiseRejected ( writer , releasedError ) ;
stream . _writer = undefined ;
writer . _ownerWritableStream = undefined ;
}
function WritableStreamDefaultWriterWrite ( writer , chunk ) {
const stream = writer . _ownerWritableStream ;
const controller = stream . _writableStreamController ;
const chunkSize = WritableStreamDefaultControllerGetChunkSize ( controller , chunk ) ;
if ( stream !== writer . _ownerWritableStream ) {
return promiseRejectedWith ( defaultWriterLockException ( 'write to' ) ) ;
}
const state = stream . _state ;
if ( state === 'errored' ) {
return promiseRejectedWith ( stream . _storedError ) ;
}
if ( WritableStreamCloseQueuedOrInFlight ( stream ) || state === 'closed' ) {
return promiseRejectedWith ( new TypeError ( 'The stream is closing or closed and cannot be written to' ) ) ;
}
if ( state === 'erroring' ) {
return promiseRejectedWith ( stream . _storedError ) ;
}
const promise = WritableStreamAddWriteRequest ( stream ) ;
WritableStreamDefaultControllerWrite ( controller , chunk , chunkSize ) ;
return promise ;
}
const closeSentinel = { } ;
/ * *
* Allows control of a { @ link WritableStream | writable stream } ' s state and internal queue .
*
* @ public
* /
class WritableStreamDefaultController {
constructor ( ) {
throw new TypeError ( 'Illegal constructor' ) ;
}
/ * *
* The reason which was passed to ` WritableStream.abort(reason) ` when the stream was aborted .
*
* @ deprecated
* This property has been removed from the specification , see https : //github.com/whatwg/streams/pull/1177.
* Use { @ link WritableStreamDefaultController . signal } ' s ` reason ` instead .
* /
get abortReason ( ) {
if ( ! IsWritableStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException$2 ( 'abortReason' ) ;
}
return this . _abortReason ;
}
/ * *
* An ` AbortSignal ` that can be used to abort the pending write or close operation when the stream is aborted .
* /
get signal ( ) {
if ( ! IsWritableStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException$2 ( 'signal' ) ;
}
if ( this . _abortController === undefined ) {
// Older browsers or older Node versions may not support `AbortController` or `AbortSignal`.
// We don't want to bundle and ship an `AbortController` polyfill together with our polyfill,
// so instead we only implement support for `signal` if we find a global `AbortController` constructor.
throw new TypeError ( 'WritableStreamDefaultController.prototype.signal is not supported' ) ;
}
return this . _abortController . signal ;
}
/ * *
* Closes the controlled writable stream , making all future interactions with it fail with the given error ` e ` .
*
* This method is rarely used , since usually it suffices to return a rejected promise from one of the underlying
* sink ' s methods . However , it can be useful for suddenly shutting down a stream in response to an event outside the
* normal lifecycle of interactions with the underlying sink .
* /
error ( e = undefined ) {
if ( ! IsWritableStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException$2 ( 'error' ) ;
}
const state = this . _controlledWritableStream . _state ;
if ( state !== 'writable' ) {
// The stream is closed, errored or will be soon. The sink can't do anything useful if it gets an error here, so
// just treat it as a no-op.
return ;
}
WritableStreamDefaultControllerError ( this , e ) ;
}
/** @internal */
[ AbortSteps ] ( reason ) {
const result = this . _abortAlgorithm ( reason ) ;
WritableStreamDefaultControllerClearAlgorithms ( this ) ;
return result ;
}
/** @internal */
[ ErrorSteps ] ( ) {
ResetQueue ( this ) ;
}
}
Object . defineProperties ( WritableStreamDefaultController . prototype , {
abortReason : { enumerable : true } ,
signal : { enumerable : true } ,
error : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( WritableStreamDefaultController . prototype , SymbolPolyfill . toStringTag , {
value : 'WritableStreamDefaultController' ,
configurable : true
} ) ;
}
// Abstract operations implementing interface required by the WritableStream.
function IsWritableStreamDefaultController ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_controlledWritableStream' ) ) {
return false ;
}
return x instanceof WritableStreamDefaultController ;
}
function SetUpWritableStreamDefaultController ( stream , controller , startAlgorithm , writeAlgorithm , closeAlgorithm , abortAlgorithm , highWaterMark , sizeAlgorithm ) {
controller . _controlledWritableStream = stream ;
stream . _writableStreamController = controller ;
// Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.
controller . _queue = undefined ;
controller . _queueTotalSize = undefined ;
ResetQueue ( controller ) ;
controller . _abortReason = undefined ;
controller . _abortController = createAbortController ( ) ;
controller . _started = false ;
controller . _strategySizeAlgorithm = sizeAlgorithm ;
controller . _strategyHWM = highWaterMark ;
controller . _writeAlgorithm = writeAlgorithm ;
controller . _closeAlgorithm = closeAlgorithm ;
controller . _abortAlgorithm = abortAlgorithm ;
const backpressure = WritableStreamDefaultControllerGetBackpressure ( controller ) ;
WritableStreamUpdateBackpressure ( stream , backpressure ) ;
const startResult = startAlgorithm ( ) ;
const startPromise = promiseResolvedWith ( startResult ) ;
uponPromise ( startPromise , ( ) => {
controller . _started = true ;
WritableStreamDefaultControllerAdvanceQueueIfNeeded ( controller ) ;
} , r => {
controller . _started = true ;
WritableStreamDealWithRejection ( stream , r ) ;
} ) ;
}
function SetUpWritableStreamDefaultControllerFromUnderlyingSink ( stream , underlyingSink , highWaterMark , sizeAlgorithm ) {
const controller = Object . create ( WritableStreamDefaultController . prototype ) ;
let startAlgorithm = ( ) => undefined ;
let writeAlgorithm = ( ) => promiseResolvedWith ( undefined ) ;
let closeAlgorithm = ( ) => promiseResolvedWith ( undefined ) ;
let abortAlgorithm = ( ) => promiseResolvedWith ( undefined ) ;
if ( underlyingSink . start !== undefined ) {
startAlgorithm = ( ) => underlyingSink . start ( controller ) ;
}
if ( underlyingSink . write !== undefined ) {
writeAlgorithm = chunk => underlyingSink . write ( chunk , controller ) ;
}
if ( underlyingSink . close !== undefined ) {
closeAlgorithm = ( ) => underlyingSink . close ( ) ;
}
if ( underlyingSink . abort !== undefined ) {
abortAlgorithm = reason => underlyingSink . abort ( reason ) ;
}
SetUpWritableStreamDefaultController ( stream , controller , startAlgorithm , writeAlgorithm , closeAlgorithm , abortAlgorithm , highWaterMark , sizeAlgorithm ) ;
}
// ClearAlgorithms may be called twice. Erroring the same stream in multiple ways will often result in redundant calls.
function WritableStreamDefaultControllerClearAlgorithms ( controller ) {
controller . _writeAlgorithm = undefined ;
controller . _closeAlgorithm = undefined ;
controller . _abortAlgorithm = undefined ;
controller . _strategySizeAlgorithm = undefined ;
}
function WritableStreamDefaultControllerClose ( controller ) {
EnqueueValueWithSize ( controller , closeSentinel , 0 ) ;
WritableStreamDefaultControllerAdvanceQueueIfNeeded ( controller ) ;
}
function WritableStreamDefaultControllerGetChunkSize ( controller , chunk ) {
try {
return controller . _strategySizeAlgorithm ( chunk ) ;
}
catch ( chunkSizeE ) {
WritableStreamDefaultControllerErrorIfNeeded ( controller , chunkSizeE ) ;
return 1 ;
}
}
function WritableStreamDefaultControllerGetDesiredSize ( controller ) {
return controller . _strategyHWM - controller . _queueTotalSize ;
}
function WritableStreamDefaultControllerWrite ( controller , chunk , chunkSize ) {
try {
EnqueueValueWithSize ( controller , chunk , chunkSize ) ;
}
catch ( enqueueE ) {
WritableStreamDefaultControllerErrorIfNeeded ( controller , enqueueE ) ;
return ;
}
const stream = controller . _controlledWritableStream ;
if ( ! WritableStreamCloseQueuedOrInFlight ( stream ) && stream . _state === 'writable' ) {
const backpressure = WritableStreamDefaultControllerGetBackpressure ( controller ) ;
WritableStreamUpdateBackpressure ( stream , backpressure ) ;
}
WritableStreamDefaultControllerAdvanceQueueIfNeeded ( controller ) ;
}
// Abstract operations for the WritableStreamDefaultController.
function WritableStreamDefaultControllerAdvanceQueueIfNeeded ( controller ) {
const stream = controller . _controlledWritableStream ;
if ( ! controller . _started ) {
return ;
}
if ( stream . _inFlightWriteRequest !== undefined ) {
return ;
}
const state = stream . _state ;
if ( state === 'erroring' ) {
WritableStreamFinishErroring ( stream ) ;
return ;
}
if ( controller . _queue . length === 0 ) {
return ;
}
const value = PeekQueueValue ( controller ) ;
if ( value === closeSentinel ) {
WritableStreamDefaultControllerProcessClose ( controller ) ;
}
else {
WritableStreamDefaultControllerProcessWrite ( controller , value ) ;
}
}
function WritableStreamDefaultControllerErrorIfNeeded ( controller , error ) {
if ( controller . _controlledWritableStream . _state === 'writable' ) {
WritableStreamDefaultControllerError ( controller , error ) ;
}
}
function WritableStreamDefaultControllerProcessClose ( controller ) {
const stream = controller . _controlledWritableStream ;
WritableStreamMarkCloseRequestInFlight ( stream ) ;
DequeueValue ( controller ) ;
const sinkClosePromise = controller . _closeAlgorithm ( ) ;
WritableStreamDefaultControllerClearAlgorithms ( controller ) ;
uponPromise ( sinkClosePromise , ( ) => {
WritableStreamFinishInFlightClose ( stream ) ;
} , reason => {
WritableStreamFinishInFlightCloseWithError ( stream , reason ) ;
} ) ;
}
function WritableStreamDefaultControllerProcessWrite ( controller , chunk ) {
const stream = controller . _controlledWritableStream ;
WritableStreamMarkFirstWriteRequestInFlight ( stream ) ;
const sinkWritePromise = controller . _writeAlgorithm ( chunk ) ;
uponPromise ( sinkWritePromise , ( ) => {
WritableStreamFinishInFlightWrite ( stream ) ;
const state = stream . _state ;
DequeueValue ( controller ) ;
if ( ! WritableStreamCloseQueuedOrInFlight ( stream ) && state === 'writable' ) {
const backpressure = WritableStreamDefaultControllerGetBackpressure ( controller ) ;
WritableStreamUpdateBackpressure ( stream , backpressure ) ;
}
WritableStreamDefaultControllerAdvanceQueueIfNeeded ( controller ) ;
} , reason => {
if ( stream . _state === 'writable' ) {
WritableStreamDefaultControllerClearAlgorithms ( controller ) ;
}
WritableStreamFinishInFlightWriteWithError ( stream , reason ) ;
} ) ;
}
function WritableStreamDefaultControllerGetBackpressure ( controller ) {
const desiredSize = WritableStreamDefaultControllerGetDesiredSize ( controller ) ;
return desiredSize <= 0 ;
}
// A client of WritableStreamDefaultController may use these functions directly to bypass state check.
function WritableStreamDefaultControllerError ( controller , error ) {
const stream = controller . _controlledWritableStream ;
WritableStreamDefaultControllerClearAlgorithms ( controller ) ;
WritableStreamStartErroring ( stream , error ) ;
}
// Helper functions for the WritableStream.
function streamBrandCheckException$2 ( name ) {
return new TypeError ( ` WritableStream.prototype. ${ name } can only be used on a WritableStream ` ) ;
}
// Helper functions for the WritableStreamDefaultController.
function defaultControllerBrandCheckException$2 ( name ) {
return new TypeError ( ` WritableStreamDefaultController.prototype. ${ name } can only be used on a WritableStreamDefaultController ` ) ;
}
// Helper functions for the WritableStreamDefaultWriter.
function defaultWriterBrandCheckException ( name ) {
return new TypeError ( ` WritableStreamDefaultWriter.prototype. ${ name } can only be used on a WritableStreamDefaultWriter ` ) ;
}
function defaultWriterLockException ( name ) {
return new TypeError ( 'Cannot ' + name + ' a stream using a released writer' ) ;
}
function defaultWriterClosedPromiseInitialize ( writer ) {
writer . _closedPromise = newPromise ( ( resolve , reject ) => {
writer . _closedPromise _resolve = resolve ;
writer . _closedPromise _reject = reject ;
writer . _closedPromiseState = 'pending' ;
} ) ;
}
function defaultWriterClosedPromiseInitializeAsRejected ( writer , reason ) {
defaultWriterClosedPromiseInitialize ( writer ) ;
defaultWriterClosedPromiseReject ( writer , reason ) ;
}
function defaultWriterClosedPromiseInitializeAsResolved ( writer ) {
defaultWriterClosedPromiseInitialize ( writer ) ;
defaultWriterClosedPromiseResolve ( writer ) ;
}
function defaultWriterClosedPromiseReject ( writer , reason ) {
if ( writer . _closedPromise _reject === undefined ) {
return ;
}
setPromiseIsHandledToTrue ( writer . _closedPromise ) ;
writer . _closedPromise _reject ( reason ) ;
writer . _closedPromise _resolve = undefined ;
writer . _closedPromise _reject = undefined ;
writer . _closedPromiseState = 'rejected' ;
}
function defaultWriterClosedPromiseResetToRejected ( writer , reason ) {
defaultWriterClosedPromiseInitializeAsRejected ( writer , reason ) ;
}
function defaultWriterClosedPromiseResolve ( writer ) {
if ( writer . _closedPromise _resolve === undefined ) {
return ;
}
writer . _closedPromise _resolve ( undefined ) ;
writer . _closedPromise _resolve = undefined ;
writer . _closedPromise _reject = undefined ;
writer . _closedPromiseState = 'resolved' ;
}
function defaultWriterReadyPromiseInitialize ( writer ) {
writer . _readyPromise = newPromise ( ( resolve , reject ) => {
writer . _readyPromise _resolve = resolve ;
writer . _readyPromise _reject = reject ;
} ) ;
writer . _readyPromiseState = 'pending' ;
}
function defaultWriterReadyPromiseInitializeAsRejected ( writer , reason ) {
defaultWriterReadyPromiseInitialize ( writer ) ;
defaultWriterReadyPromiseReject ( writer , reason ) ;
}
function defaultWriterReadyPromiseInitializeAsResolved ( writer ) {
defaultWriterReadyPromiseInitialize ( writer ) ;
defaultWriterReadyPromiseResolve ( writer ) ;
}
function defaultWriterReadyPromiseReject ( writer , reason ) {
if ( writer . _readyPromise _reject === undefined ) {
return ;
}
setPromiseIsHandledToTrue ( writer . _readyPromise ) ;
writer . _readyPromise _reject ( reason ) ;
writer . _readyPromise _resolve = undefined ;
writer . _readyPromise _reject = undefined ;
writer . _readyPromiseState = 'rejected' ;
}
function defaultWriterReadyPromiseReset ( writer ) {
defaultWriterReadyPromiseInitialize ( writer ) ;
}
function defaultWriterReadyPromiseResetToRejected ( writer , reason ) {
defaultWriterReadyPromiseInitializeAsRejected ( writer , reason ) ;
}
function defaultWriterReadyPromiseResolve ( writer ) {
if ( writer . _readyPromise _resolve === undefined ) {
return ;
}
writer . _readyPromise _resolve ( undefined ) ;
writer . _readyPromise _resolve = undefined ;
writer . _readyPromise _reject = undefined ;
writer . _readyPromiseState = 'fulfilled' ;
}
/// <reference lib="dom" />
const NativeDOMException = typeof DOMException !== 'undefined' ? DOMException : undefined ;
/// <reference types="node" />
function isDOMExceptionConstructor ( ctor ) {
if ( ! ( typeof ctor === 'function' || typeof ctor === 'object' ) ) {
return false ;
}
try {
new ctor ( ) ;
return true ;
}
catch ( _a ) {
return false ;
}
}
function createDOMExceptionPolyfill ( ) {
// eslint-disable-next-line no-shadow
const ctor = function DOMException ( message , name ) {
this . message = message || '' ;
this . name = name || 'Error' ;
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
} ;
ctor . prototype = Object . create ( Error . prototype ) ;
Object . defineProperty ( ctor . prototype , 'constructor' , { value : ctor , writable : true , configurable : true } ) ;
return ctor ;
}
// eslint-disable-next-line no-redeclare
const DOMException$1 = isDOMExceptionConstructor ( NativeDOMException ) ? NativeDOMException : createDOMExceptionPolyfill ( ) ;
function ReadableStreamPipeTo ( source , dest , preventClose , preventAbort , preventCancel , signal ) {
const reader = AcquireReadableStreamDefaultReader ( source ) ;
const writer = AcquireWritableStreamDefaultWriter ( dest ) ;
source . _disturbed = true ;
let shuttingDown = false ;
// This is used to keep track of the spec's requirement that we wait for ongoing writes during shutdown.
let currentWrite = promiseResolvedWith ( undefined ) ;
return newPromise ( ( resolve , reject ) => {
let abortAlgorithm ;
if ( signal !== undefined ) {
abortAlgorithm = ( ) => {
const error = new DOMException$1 ( 'Aborted' , 'AbortError' ) ;
const actions = [ ] ;
if ( ! preventAbort ) {
actions . push ( ( ) => {
if ( dest . _state === 'writable' ) {
return WritableStreamAbort ( dest , error ) ;
}
return promiseResolvedWith ( undefined ) ;
} ) ;
}
if ( ! preventCancel ) {
actions . push ( ( ) => {
if ( source . _state === 'readable' ) {
return ReadableStreamCancel ( source , error ) ;
}
return promiseResolvedWith ( undefined ) ;
} ) ;
}
shutdownWithAction ( ( ) => Promise . all ( actions . map ( action => action ( ) ) ) , true , error ) ;
} ;
if ( signal . aborted ) {
abortAlgorithm ( ) ;
return ;
}
signal . addEventListener ( 'abort' , abortAlgorithm ) ;
}
// Using reader and writer, read all chunks from this and write them to dest
// - Backpressure must be enforced
// - Shutdown must stop all activity
function pipeLoop ( ) {
return newPromise ( ( resolveLoop , rejectLoop ) => {
function next ( done ) {
if ( done ) {
resolveLoop ( ) ;
}
else {
// Use `PerformPromiseThen` instead of `uponPromise` to avoid
// adding unnecessary `.catch(rethrowAssertionErrorRejection)` handlers
PerformPromiseThen ( pipeStep ( ) , next , rejectLoop ) ;
}
}
next ( false ) ;
} ) ;
}
function pipeStep ( ) {
if ( shuttingDown ) {
return promiseResolvedWith ( true ) ;
}
return PerformPromiseThen ( writer . _readyPromise , ( ) => {
return newPromise ( ( resolveRead , rejectRead ) => {
ReadableStreamDefaultReaderRead ( reader , {
_chunkSteps : chunk => {
currentWrite = PerformPromiseThen ( WritableStreamDefaultWriterWrite ( writer , chunk ) , undefined , noop ) ;
resolveRead ( false ) ;
} ,
_closeSteps : ( ) => resolveRead ( true ) ,
_errorSteps : rejectRead
} ) ;
} ) ;
} ) ;
}
// Errors must be propagated forward
isOrBecomesErrored ( source , reader . _closedPromise , storedError => {
if ( ! preventAbort ) {
shutdownWithAction ( ( ) => WritableStreamAbort ( dest , storedError ) , true , storedError ) ;
}
else {
shutdown ( true , storedError ) ;
}
} ) ;
// Errors must be propagated backward
isOrBecomesErrored ( dest , writer . _closedPromise , storedError => {
if ( ! preventCancel ) {
shutdownWithAction ( ( ) => ReadableStreamCancel ( source , storedError ) , true , storedError ) ;
}
else {
shutdown ( true , storedError ) ;
}
} ) ;
// Closing must be propagated forward
isOrBecomesClosed ( source , reader . _closedPromise , ( ) => {
if ( ! preventClose ) {
shutdownWithAction ( ( ) => WritableStreamDefaultWriterCloseWithErrorPropagation ( writer ) ) ;
}
else {
shutdown ( ) ;
}
} ) ;
// Closing must be propagated backward
if ( WritableStreamCloseQueuedOrInFlight ( dest ) || dest . _state === 'closed' ) {
const destClosed = new TypeError ( 'the destination writable stream closed before all data could be piped to it' ) ;
if ( ! preventCancel ) {
shutdownWithAction ( ( ) => ReadableStreamCancel ( source , destClosed ) , true , destClosed ) ;
}
else {
shutdown ( true , destClosed ) ;
}
}
setPromiseIsHandledToTrue ( pipeLoop ( ) ) ;
function waitForWritesToFinish ( ) {
// Another write may have started while we were waiting on this currentWrite, so we have to be sure to wait
// for that too.
const oldCurrentWrite = currentWrite ;
return PerformPromiseThen ( currentWrite , ( ) => oldCurrentWrite !== currentWrite ? waitForWritesToFinish ( ) : undefined ) ;
}
function isOrBecomesErrored ( stream , promise , action ) {
if ( stream . _state === 'errored' ) {
action ( stream . _storedError ) ;
}
else {
uponRejection ( promise , action ) ;
}
}
function isOrBecomesClosed ( stream , promise , action ) {
if ( stream . _state === 'closed' ) {
action ( ) ;
}
else {
uponFulfillment ( promise , action ) ;
}
}
function shutdownWithAction ( action , originalIsError , originalError ) {
if ( shuttingDown ) {
return ;
}
shuttingDown = true ;
if ( dest . _state === 'writable' && ! WritableStreamCloseQueuedOrInFlight ( dest ) ) {
uponFulfillment ( waitForWritesToFinish ( ) , doTheRest ) ;
}
else {
doTheRest ( ) ;
}
function doTheRest ( ) {
uponPromise ( action ( ) , ( ) => finalize ( originalIsError , originalError ) , newError => finalize ( true , newError ) ) ;
}
}
function shutdown ( isError , error ) {
if ( shuttingDown ) {
return ;
}
shuttingDown = true ;
if ( dest . _state === 'writable' && ! WritableStreamCloseQueuedOrInFlight ( dest ) ) {
uponFulfillment ( waitForWritesToFinish ( ) , ( ) => finalize ( isError , error ) ) ;
}
else {
finalize ( isError , error ) ;
}
}
function finalize ( isError , error ) {
WritableStreamDefaultWriterRelease ( writer ) ;
ReadableStreamReaderGenericRelease ( reader ) ;
if ( signal !== undefined ) {
signal . removeEventListener ( 'abort' , abortAlgorithm ) ;
}
if ( isError ) {
reject ( error ) ;
}
else {
resolve ( undefined ) ;
}
}
} ) ;
}
/ * *
* Allows control of a { @ link ReadableStream | readable stream } ' s state and internal queue .
*
* @ public
* /
class ReadableStreamDefaultController {
constructor ( ) {
throw new TypeError ( 'Illegal constructor' ) ;
}
/ * *
* Returns the desired size to fill the controlled stream ' s internal queue . It can be negative , if the queue is
* over - full . An underlying source ought to use this information to determine when and how to apply backpressure .
* /
get desiredSize ( ) {
if ( ! IsReadableStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException$1 ( 'desiredSize' ) ;
}
return ReadableStreamDefaultControllerGetDesiredSize ( this ) ;
}
/ * *
* Closes the controlled readable stream . Consumers will still be able to read any previously - enqueued chunks from
* the stream , but once those are read , the stream will become closed .
* /
close ( ) {
if ( ! IsReadableStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException$1 ( 'close' ) ;
}
if ( ! ReadableStreamDefaultControllerCanCloseOrEnqueue ( this ) ) {
throw new TypeError ( 'The stream is not in a state that permits close' ) ;
}
ReadableStreamDefaultControllerClose ( this ) ;
}
enqueue ( chunk = undefined ) {
if ( ! IsReadableStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException$1 ( 'enqueue' ) ;
}
if ( ! ReadableStreamDefaultControllerCanCloseOrEnqueue ( this ) ) {
throw new TypeError ( 'The stream is not in a state that permits enqueue' ) ;
}
return ReadableStreamDefaultControllerEnqueue ( this , chunk ) ;
}
/ * *
* Errors the controlled readable stream , making all future interactions with it fail with the given error ` e ` .
* /
error ( e = undefined ) {
if ( ! IsReadableStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException$1 ( 'error' ) ;
}
ReadableStreamDefaultControllerError ( this , e ) ;
}
/** @internal */
[ CancelSteps ] ( reason ) {
ResetQueue ( this ) ;
const result = this . _cancelAlgorithm ( reason ) ;
ReadableStreamDefaultControllerClearAlgorithms ( this ) ;
return result ;
}
/** @internal */
[ PullSteps ] ( readRequest ) {
const stream = this . _controlledReadableStream ;
if ( this . _queue . length > 0 ) {
const chunk = DequeueValue ( this ) ;
if ( this . _closeRequested && this . _queue . length === 0 ) {
ReadableStreamDefaultControllerClearAlgorithms ( this ) ;
ReadableStreamClose ( stream ) ;
}
else {
ReadableStreamDefaultControllerCallPullIfNeeded ( this ) ;
}
readRequest . _chunkSteps ( chunk ) ;
}
else {
ReadableStreamAddReadRequest ( stream , readRequest ) ;
ReadableStreamDefaultControllerCallPullIfNeeded ( this ) ;
}
}
}
Object . defineProperties ( ReadableStreamDefaultController . prototype , {
close : { enumerable : true } ,
enqueue : { enumerable : true } ,
error : { enumerable : true } ,
desiredSize : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( ReadableStreamDefaultController . prototype , SymbolPolyfill . toStringTag , {
value : 'ReadableStreamDefaultController' ,
configurable : true
} ) ;
}
// Abstract operations for the ReadableStreamDefaultController.
function IsReadableStreamDefaultController ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_controlledReadableStream' ) ) {
return false ;
}
return x instanceof ReadableStreamDefaultController ;
}
function ReadableStreamDefaultControllerCallPullIfNeeded ( controller ) {
const shouldPull = ReadableStreamDefaultControllerShouldCallPull ( controller ) ;
if ( ! shouldPull ) {
return ;
}
if ( controller . _pulling ) {
controller . _pullAgain = true ;
return ;
}
controller . _pulling = true ;
const pullPromise = controller . _pullAlgorithm ( ) ;
uponPromise ( pullPromise , ( ) => {
controller . _pulling = false ;
if ( controller . _pullAgain ) {
controller . _pullAgain = false ;
ReadableStreamDefaultControllerCallPullIfNeeded ( controller ) ;
}
} , e => {
ReadableStreamDefaultControllerError ( controller , e ) ;
} ) ;
}
function ReadableStreamDefaultControllerShouldCallPull ( controller ) {
const stream = controller . _controlledReadableStream ;
if ( ! ReadableStreamDefaultControllerCanCloseOrEnqueue ( controller ) ) {
return false ;
}
if ( ! controller . _started ) {
return false ;
}
if ( IsReadableStreamLocked ( stream ) && ReadableStreamGetNumReadRequests ( stream ) > 0 ) {
return true ;
}
const desiredSize = ReadableStreamDefaultControllerGetDesiredSize ( controller ) ;
if ( desiredSize > 0 ) {
return true ;
}
return false ;
}
function ReadableStreamDefaultControllerClearAlgorithms ( controller ) {
controller . _pullAlgorithm = undefined ;
controller . _cancelAlgorithm = undefined ;
controller . _strategySizeAlgorithm = undefined ;
}
// A client of ReadableStreamDefaultController may use these functions directly to bypass state check.
function ReadableStreamDefaultControllerClose ( controller ) {
if ( ! ReadableStreamDefaultControllerCanCloseOrEnqueue ( controller ) ) {
return ;
}
const stream = controller . _controlledReadableStream ;
controller . _closeRequested = true ;
if ( controller . _queue . length === 0 ) {
ReadableStreamDefaultControllerClearAlgorithms ( controller ) ;
ReadableStreamClose ( stream ) ;
}
}
function ReadableStreamDefaultControllerEnqueue ( controller , chunk ) {
if ( ! ReadableStreamDefaultControllerCanCloseOrEnqueue ( controller ) ) {
return ;
}
const stream = controller . _controlledReadableStream ;
if ( IsReadableStreamLocked ( stream ) && ReadableStreamGetNumReadRequests ( stream ) > 0 ) {
ReadableStreamFulfillReadRequest ( stream , chunk , false ) ;
}
else {
let chunkSize ;
try {
chunkSize = controller . _strategySizeAlgorithm ( chunk ) ;
}
catch ( chunkSizeE ) {
ReadableStreamDefaultControllerError ( controller , chunkSizeE ) ;
throw chunkSizeE ;
}
try {
EnqueueValueWithSize ( controller , chunk , chunkSize ) ;
}
catch ( enqueueE ) {
ReadableStreamDefaultControllerError ( controller , enqueueE ) ;
throw enqueueE ;
}
}
ReadableStreamDefaultControllerCallPullIfNeeded ( controller ) ;
}
function ReadableStreamDefaultControllerError ( controller , e ) {
const stream = controller . _controlledReadableStream ;
if ( stream . _state !== 'readable' ) {
return ;
}
ResetQueue ( controller ) ;
ReadableStreamDefaultControllerClearAlgorithms ( controller ) ;
ReadableStreamError ( stream , e ) ;
}
function ReadableStreamDefaultControllerGetDesiredSize ( controller ) {
const state = controller . _controlledReadableStream . _state ;
if ( state === 'errored' ) {
return null ;
}
if ( state === 'closed' ) {
return 0 ;
}
return controller . _strategyHWM - controller . _queueTotalSize ;
}
// This is used in the implementation of TransformStream.
function ReadableStreamDefaultControllerHasBackpressure ( controller ) {
if ( ReadableStreamDefaultControllerShouldCallPull ( controller ) ) {
return false ;
}
return true ;
}
function ReadableStreamDefaultControllerCanCloseOrEnqueue ( controller ) {
const state = controller . _controlledReadableStream . _state ;
if ( ! controller . _closeRequested && state === 'readable' ) {
return true ;
}
return false ;
}
function SetUpReadableStreamDefaultController ( stream , controller , startAlgorithm , pullAlgorithm , cancelAlgorithm , highWaterMark , sizeAlgorithm ) {
controller . _controlledReadableStream = stream ;
controller . _queue = undefined ;
controller . _queueTotalSize = undefined ;
ResetQueue ( controller ) ;
controller . _started = false ;
controller . _closeRequested = false ;
controller . _pullAgain = false ;
controller . _pulling = false ;
controller . _strategySizeAlgorithm = sizeAlgorithm ;
controller . _strategyHWM = highWaterMark ;
controller . _pullAlgorithm = pullAlgorithm ;
controller . _cancelAlgorithm = cancelAlgorithm ;
stream . _readableStreamController = controller ;
const startResult = startAlgorithm ( ) ;
uponPromise ( promiseResolvedWith ( startResult ) , ( ) => {
controller . _started = true ;
ReadableStreamDefaultControllerCallPullIfNeeded ( controller ) ;
} , r => {
ReadableStreamDefaultControllerError ( controller , r ) ;
} ) ;
}
function SetUpReadableStreamDefaultControllerFromUnderlyingSource ( stream , underlyingSource , highWaterMark , sizeAlgorithm ) {
const controller = Object . create ( ReadableStreamDefaultController . prototype ) ;
let startAlgorithm = ( ) => undefined ;
let pullAlgorithm = ( ) => promiseResolvedWith ( undefined ) ;
let cancelAlgorithm = ( ) => promiseResolvedWith ( undefined ) ;
if ( underlyingSource . start !== undefined ) {
startAlgorithm = ( ) => underlyingSource . start ( controller ) ;
}
if ( underlyingSource . pull !== undefined ) {
pullAlgorithm = ( ) => underlyingSource . pull ( controller ) ;
}
if ( underlyingSource . cancel !== undefined ) {
cancelAlgorithm = reason => underlyingSource . cancel ( reason ) ;
}
SetUpReadableStreamDefaultController ( stream , controller , startAlgorithm , pullAlgorithm , cancelAlgorithm , highWaterMark , sizeAlgorithm ) ;
}
// Helper functions for the ReadableStreamDefaultController.
function defaultControllerBrandCheckException$1 ( name ) {
return new TypeError ( ` ReadableStreamDefaultController.prototype. ${ name } can only be used on a ReadableStreamDefaultController ` ) ;
}
function ReadableStreamTee ( stream , cloneForBranch2 ) {
if ( IsReadableByteStreamController ( stream . _readableStreamController ) ) {
return ReadableByteStreamTee ( stream ) ;
}
return ReadableStreamDefaultTee ( stream ) ;
}
function ReadableStreamDefaultTee ( stream , cloneForBranch2 ) {
const reader = AcquireReadableStreamDefaultReader ( stream ) ;
let reading = false ;
let readAgain = false ;
let canceled1 = false ;
let canceled2 = false ;
let reason1 ;
let reason2 ;
let branch1 ;
let branch2 ;
let resolveCancelPromise ;
const cancelPromise = newPromise ( resolve => {
resolveCancelPromise = resolve ;
} ) ;
function pullAlgorithm ( ) {
if ( reading ) {
readAgain = true ;
return promiseResolvedWith ( undefined ) ;
}
reading = true ;
const readRequest = {
_chunkSteps : chunk => {
// This needs to be delayed a microtask because it takes at least a microtask to detect errors (using
// reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let
// successful synchronously-available reads get ahead of asynchronously-available errors.
queueMicrotask ( ( ) => {
readAgain = false ;
const chunk1 = chunk ;
const chunk2 = chunk ;
// There is no way to access the cloning code right now in the reference implementation.
// If we add one then we'll need an implementation for serializable objects.
// if (!canceled2 && cloneForBranch2) {
// chunk2 = StructuredDeserialize(StructuredSerialize(chunk2));
// }
if ( ! canceled1 ) {
ReadableStreamDefaultControllerEnqueue ( branch1 . _readableStreamController , chunk1 ) ;
}
if ( ! canceled2 ) {
ReadableStreamDefaultControllerEnqueue ( branch2 . _readableStreamController , chunk2 ) ;
}
reading = false ;
if ( readAgain ) {
pullAlgorithm ( ) ;
}
} ) ;
} ,
_closeSteps : ( ) => {
reading = false ;
if ( ! canceled1 ) {
ReadableStreamDefaultControllerClose ( branch1 . _readableStreamController ) ;
}
if ( ! canceled2 ) {
ReadableStreamDefaultControllerClose ( branch2 . _readableStreamController ) ;
}
if ( ! canceled1 || ! canceled2 ) {
resolveCancelPromise ( undefined ) ;
}
} ,
_errorSteps : ( ) => {
reading = false ;
}
} ;
ReadableStreamDefaultReaderRead ( reader , readRequest ) ;
return promiseResolvedWith ( undefined ) ;
}
function cancel1Algorithm ( reason ) {
canceled1 = true ;
reason1 = reason ;
if ( canceled2 ) {
const compositeReason = CreateArrayFromList ( [ reason1 , reason2 ] ) ;
const cancelResult = ReadableStreamCancel ( stream , compositeReason ) ;
resolveCancelPromise ( cancelResult ) ;
}
return cancelPromise ;
}
function cancel2Algorithm ( reason ) {
canceled2 = true ;
reason2 = reason ;
if ( canceled1 ) {
const compositeReason = CreateArrayFromList ( [ reason1 , reason2 ] ) ;
const cancelResult = ReadableStreamCancel ( stream , compositeReason ) ;
resolveCancelPromise ( cancelResult ) ;
}
return cancelPromise ;
}
function startAlgorithm ( ) {
// do nothing
}
branch1 = CreateReadableStream ( startAlgorithm , pullAlgorithm , cancel1Algorithm ) ;
branch2 = CreateReadableStream ( startAlgorithm , pullAlgorithm , cancel2Algorithm ) ;
uponRejection ( reader . _closedPromise , ( r ) => {
ReadableStreamDefaultControllerError ( branch1 . _readableStreamController , r ) ;
ReadableStreamDefaultControllerError ( branch2 . _readableStreamController , r ) ;
if ( ! canceled1 || ! canceled2 ) {
resolveCancelPromise ( undefined ) ;
}
} ) ;
return [ branch1 , branch2 ] ;
}
function ReadableByteStreamTee ( stream ) {
let reader = AcquireReadableStreamDefaultReader ( stream ) ;
let reading = false ;
let readAgainForBranch1 = false ;
let readAgainForBranch2 = false ;
let canceled1 = false ;
let canceled2 = false ;
let reason1 ;
let reason2 ;
let branch1 ;
let branch2 ;
let resolveCancelPromise ;
const cancelPromise = newPromise ( resolve => {
resolveCancelPromise = resolve ;
} ) ;
function forwardReaderError ( thisReader ) {
uponRejection ( thisReader . _closedPromise , r => {
if ( thisReader !== reader ) {
return ;
}
ReadableByteStreamControllerError ( branch1 . _readableStreamController , r ) ;
ReadableByteStreamControllerError ( branch2 . _readableStreamController , r ) ;
if ( ! canceled1 || ! canceled2 ) {
resolveCancelPromise ( undefined ) ;
}
} ) ;
}
function pullWithDefaultReader ( ) {
if ( IsReadableStreamBYOBReader ( reader ) ) {
ReadableStreamReaderGenericRelease ( reader ) ;
reader = AcquireReadableStreamDefaultReader ( stream ) ;
forwardReaderError ( reader ) ;
}
const readRequest = {
_chunkSteps : chunk => {
// This needs to be delayed a microtask because it takes at least a microtask to detect errors (using
// reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let
// successful synchronously-available reads get ahead of asynchronously-available errors.
queueMicrotask ( ( ) => {
readAgainForBranch1 = false ;
readAgainForBranch2 = false ;
const chunk1 = chunk ;
let chunk2 = chunk ;
if ( ! canceled1 && ! canceled2 ) {
try {
chunk2 = CloneAsUint8Array ( chunk ) ;
}
catch ( cloneE ) {
ReadableByteStreamControllerError ( branch1 . _readableStreamController , cloneE ) ;
ReadableByteStreamControllerError ( branch2 . _readableStreamController , cloneE ) ;
resolveCancelPromise ( ReadableStreamCancel ( stream , cloneE ) ) ;
return ;
}
}
if ( ! canceled1 ) {
ReadableByteStreamControllerEnqueue ( branch1 . _readableStreamController , chunk1 ) ;
}
if ( ! canceled2 ) {
ReadableByteStreamControllerEnqueue ( branch2 . _readableStreamController , chunk2 ) ;
}
reading = false ;
if ( readAgainForBranch1 ) {
pull1Algorithm ( ) ;
}
else if ( readAgainForBranch2 ) {
pull2Algorithm ( ) ;
}
} ) ;
} ,
_closeSteps : ( ) => {
reading = false ;
if ( ! canceled1 ) {
ReadableByteStreamControllerClose ( branch1 . _readableStreamController ) ;
}
if ( ! canceled2 ) {
ReadableByteStreamControllerClose ( branch2 . _readableStreamController ) ;
}
if ( branch1 . _readableStreamController . _pendingPullIntos . length > 0 ) {
ReadableByteStreamControllerRespond ( branch1 . _readableStreamController , 0 ) ;
}
if ( branch2 . _readableStreamController . _pendingPullIntos . length > 0 ) {
ReadableByteStreamControllerRespond ( branch2 . _readableStreamController , 0 ) ;
}
if ( ! canceled1 || ! canceled2 ) {
resolveCancelPromise ( undefined ) ;
}
} ,
_errorSteps : ( ) => {
reading = false ;
}
} ;
ReadableStreamDefaultReaderRead ( reader , readRequest ) ;
}
function pullWithBYOBReader ( view , forBranch2 ) {
if ( IsReadableStreamDefaultReader ( reader ) ) {
ReadableStreamReaderGenericRelease ( reader ) ;
reader = AcquireReadableStreamBYOBReader ( stream ) ;
forwardReaderError ( reader ) ;
}
const byobBranch = forBranch2 ? branch2 : branch1 ;
const otherBranch = forBranch2 ? branch1 : branch2 ;
const readIntoRequest = {
_chunkSteps : chunk => {
// This needs to be delayed a microtask because it takes at least a microtask to detect errors (using
// reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let
// successful synchronously-available reads get ahead of asynchronously-available errors.
queueMicrotask ( ( ) => {
readAgainForBranch1 = false ;
readAgainForBranch2 = false ;
const byobCanceled = forBranch2 ? canceled2 : canceled1 ;
const otherCanceled = forBranch2 ? canceled1 : canceled2 ;
if ( ! otherCanceled ) {
let clonedChunk ;
try {
clonedChunk = CloneAsUint8Array ( chunk ) ;
}
catch ( cloneE ) {
ReadableByteStreamControllerError ( byobBranch . _readableStreamController , cloneE ) ;
ReadableByteStreamControllerError ( otherBranch . _readableStreamController , cloneE ) ;
resolveCancelPromise ( ReadableStreamCancel ( stream , cloneE ) ) ;
return ;
}
if ( ! byobCanceled ) {
ReadableByteStreamControllerRespondWithNewView ( byobBranch . _readableStreamController , chunk ) ;
}
ReadableByteStreamControllerEnqueue ( otherBranch . _readableStreamController , clonedChunk ) ;
}
else if ( ! byobCanceled ) {
ReadableByteStreamControllerRespondWithNewView ( byobBranch . _readableStreamController , chunk ) ;
}
reading = false ;
if ( readAgainForBranch1 ) {
pull1Algorithm ( ) ;
}
else if ( readAgainForBranch2 ) {
pull2Algorithm ( ) ;
}
} ) ;
} ,
_closeSteps : chunk => {
reading = false ;
const byobCanceled = forBranch2 ? canceled2 : canceled1 ;
const otherCanceled = forBranch2 ? canceled1 : canceled2 ;
if ( ! byobCanceled ) {
ReadableByteStreamControllerClose ( byobBranch . _readableStreamController ) ;
}
if ( ! otherCanceled ) {
ReadableByteStreamControllerClose ( otherBranch . _readableStreamController ) ;
}
if ( chunk !== undefined ) {
if ( ! byobCanceled ) {
ReadableByteStreamControllerRespondWithNewView ( byobBranch . _readableStreamController , chunk ) ;
}
if ( ! otherCanceled && otherBranch . _readableStreamController . _pendingPullIntos . length > 0 ) {
ReadableByteStreamControllerRespond ( otherBranch . _readableStreamController , 0 ) ;
}
}
if ( ! byobCanceled || ! otherCanceled ) {
resolveCancelPromise ( undefined ) ;
}
} ,
_errorSteps : ( ) => {
reading = false ;
}
} ;
ReadableStreamBYOBReaderRead ( reader , view , readIntoRequest ) ;
}
function pull1Algorithm ( ) {
if ( reading ) {
readAgainForBranch1 = true ;
return promiseResolvedWith ( undefined ) ;
}
reading = true ;
const byobRequest = ReadableByteStreamControllerGetBYOBRequest ( branch1 . _readableStreamController ) ;
if ( byobRequest === null ) {
pullWithDefaultReader ( ) ;
}
else {
pullWithBYOBReader ( byobRequest . _view , false ) ;
}
return promiseResolvedWith ( undefined ) ;
}
function pull2Algorithm ( ) {
if ( reading ) {
readAgainForBranch2 = true ;
return promiseResolvedWith ( undefined ) ;
}
reading = true ;
const byobRequest = ReadableByteStreamControllerGetBYOBRequest ( branch2 . _readableStreamController ) ;
if ( byobRequest === null ) {
pullWithDefaultReader ( ) ;
}
else {
pullWithBYOBReader ( byobRequest . _view , true ) ;
}
return promiseResolvedWith ( undefined ) ;
}
function cancel1Algorithm ( reason ) {
canceled1 = true ;
reason1 = reason ;
if ( canceled2 ) {
const compositeReason = CreateArrayFromList ( [ reason1 , reason2 ] ) ;
const cancelResult = ReadableStreamCancel ( stream , compositeReason ) ;
resolveCancelPromise ( cancelResult ) ;
}
return cancelPromise ;
}
function cancel2Algorithm ( reason ) {
canceled2 = true ;
reason2 = reason ;
if ( canceled1 ) {
const compositeReason = CreateArrayFromList ( [ reason1 , reason2 ] ) ;
const cancelResult = ReadableStreamCancel ( stream , compositeReason ) ;
resolveCancelPromise ( cancelResult ) ;
}
return cancelPromise ;
}
function startAlgorithm ( ) {
return ;
}
branch1 = CreateReadableByteStream ( startAlgorithm , pull1Algorithm , cancel1Algorithm ) ;
branch2 = CreateReadableByteStream ( startAlgorithm , pull2Algorithm , cancel2Algorithm ) ;
forwardReaderError ( reader ) ;
return [ branch1 , branch2 ] ;
}
function convertUnderlyingDefaultOrByteSource ( source , context ) {
assertDictionary ( source , context ) ;
const original = source ;
const autoAllocateChunkSize = original === null || original === void 0 ? void 0 : original . autoAllocateChunkSize ;
const cancel = original === null || original === void 0 ? void 0 : original . cancel ;
const pull = original === null || original === void 0 ? void 0 : original . pull ;
const start = original === null || original === void 0 ? void 0 : original . start ;
const type = original === null || original === void 0 ? void 0 : original . type ;
return {
autoAllocateChunkSize : autoAllocateChunkSize === undefined ?
undefined :
convertUnsignedLongLongWithEnforceRange ( autoAllocateChunkSize , ` ${ context } has member 'autoAllocateChunkSize' that ` ) ,
cancel : cancel === undefined ?
undefined :
convertUnderlyingSourceCancelCallback ( cancel , original , ` ${ context } has member 'cancel' that ` ) ,
pull : pull === undefined ?
undefined :
convertUnderlyingSourcePullCallback ( pull , original , ` ${ context } has member 'pull' that ` ) ,
start : start === undefined ?
undefined :
convertUnderlyingSourceStartCallback ( start , original , ` ${ context } has member 'start' that ` ) ,
type : type === undefined ? undefined : convertReadableStreamType ( type , ` ${ context } has member 'type' that ` )
} ;
}
function convertUnderlyingSourceCancelCallback ( fn , original , context ) {
assertFunction ( fn , context ) ;
return ( reason ) => promiseCall ( fn , original , [ reason ] ) ;
}
function convertUnderlyingSourcePullCallback ( fn , original , context ) {
assertFunction ( fn , context ) ;
return ( controller ) => promiseCall ( fn , original , [ controller ] ) ;
}
function convertUnderlyingSourceStartCallback ( fn , original , context ) {
assertFunction ( fn , context ) ;
return ( controller ) => reflectCall ( fn , original , [ controller ] ) ;
}
function convertReadableStreamType ( type , context ) {
type = ` ${ type } ` ;
if ( type !== 'bytes' ) {
throw new TypeError ( ` ${ context } ' ${ type } ' is not a valid enumeration value for ReadableStreamType ` ) ;
}
return type ;
}
function convertReaderOptions ( options , context ) {
assertDictionary ( options , context ) ;
const mode = options === null || options === void 0 ? void 0 : options . mode ;
return {
mode : mode === undefined ? undefined : convertReadableStreamReaderMode ( mode , ` ${ context } has member 'mode' that ` )
} ;
}
function convertReadableStreamReaderMode ( mode , context ) {
mode = ` ${ mode } ` ;
if ( mode !== 'byob' ) {
throw new TypeError ( ` ${ context } ' ${ mode } ' is not a valid enumeration value for ReadableStreamReaderMode ` ) ;
}
return mode ;
}
function convertIteratorOptions ( options , context ) {
assertDictionary ( options , context ) ;
const preventCancel = options === null || options === void 0 ? void 0 : options . preventCancel ;
return { preventCancel : Boolean ( preventCancel ) } ;
}
function convertPipeOptions ( options , context ) {
assertDictionary ( options , context ) ;
const preventAbort = options === null || options === void 0 ? void 0 : options . preventAbort ;
const preventCancel = options === null || options === void 0 ? void 0 : options . preventCancel ;
const preventClose = options === null || options === void 0 ? void 0 : options . preventClose ;
const signal = options === null || options === void 0 ? void 0 : options . signal ;
if ( signal !== undefined ) {
assertAbortSignal ( signal , ` ${ context } has member 'signal' that ` ) ;
}
return {
preventAbort : Boolean ( preventAbort ) ,
preventCancel : Boolean ( preventCancel ) ,
preventClose : Boolean ( preventClose ) ,
signal
} ;
}
function assertAbortSignal ( signal , context ) {
if ( ! isAbortSignal ( signal ) ) {
throw new TypeError ( ` ${ context } is not an AbortSignal. ` ) ;
}
}
function convertReadableWritablePair ( pair , context ) {
assertDictionary ( pair , context ) ;
const readable = pair === null || pair === void 0 ? void 0 : pair . readable ;
assertRequiredField ( readable , 'readable' , 'ReadableWritablePair' ) ;
assertReadableStream ( readable , ` ${ context } has member 'readable' that ` ) ;
const writable = pair === null || pair === void 0 ? void 0 : pair . writable ;
assertRequiredField ( writable , 'writable' , 'ReadableWritablePair' ) ;
assertWritableStream ( writable , ` ${ context } has member 'writable' that ` ) ;
return { readable , writable } ;
}
/ * *
* A readable stream represents a source of data , from which you can read .
*
* @ public
* /
class ReadableStream {
constructor ( rawUnderlyingSource = { } , rawStrategy = { } ) {
if ( rawUnderlyingSource === undefined ) {
rawUnderlyingSource = null ;
}
else {
assertObject ( rawUnderlyingSource , 'First parameter' ) ;
}
const strategy = convertQueuingStrategy ( rawStrategy , 'Second parameter' ) ;
const underlyingSource = convertUnderlyingDefaultOrByteSource ( rawUnderlyingSource , 'First parameter' ) ;
InitializeReadableStream ( this ) ;
if ( underlyingSource . type === 'bytes' ) {
if ( strategy . size !== undefined ) {
throw new RangeError ( 'The strategy for a byte stream cannot have a size function' ) ;
}
const highWaterMark = ExtractHighWaterMark ( strategy , 0 ) ;
SetUpReadableByteStreamControllerFromUnderlyingSource ( this , underlyingSource , highWaterMark ) ;
}
else {
const sizeAlgorithm = ExtractSizeAlgorithm ( strategy ) ;
const highWaterMark = ExtractHighWaterMark ( strategy , 1 ) ;
SetUpReadableStreamDefaultControllerFromUnderlyingSource ( this , underlyingSource , highWaterMark , sizeAlgorithm ) ;
}
}
/ * *
* Whether or not the readable stream is locked to a { @ link ReadableStreamDefaultReader | reader } .
* /
get locked ( ) {
if ( ! IsReadableStream ( this ) ) {
throw streamBrandCheckException$1 ( 'locked' ) ;
}
return IsReadableStreamLocked ( this ) ;
}
/ * *
* Cancels the stream , signaling a loss of interest in the stream by a consumer .
*
* The supplied ` reason ` argument will be given to the underlying source ' s { @ link UnderlyingSource . cancel | cancel ( ) }
* method , which might or might not use it .
* /
cancel ( reason = undefined ) {
if ( ! IsReadableStream ( this ) ) {
return promiseRejectedWith ( streamBrandCheckException$1 ( 'cancel' ) ) ;
}
if ( IsReadableStreamLocked ( this ) ) {
return promiseRejectedWith ( new TypeError ( 'Cannot cancel a stream that already has a reader' ) ) ;
}
return ReadableStreamCancel ( this , reason ) ;
}
getReader ( rawOptions = undefined ) {
if ( ! IsReadableStream ( this ) ) {
throw streamBrandCheckException$1 ( 'getReader' ) ;
}
const options = convertReaderOptions ( rawOptions , 'First parameter' ) ;
if ( options . mode === undefined ) {
return AcquireReadableStreamDefaultReader ( this ) ;
}
return AcquireReadableStreamBYOBReader ( this ) ;
}
pipeThrough ( rawTransform , rawOptions = { } ) {
if ( ! IsReadableStream ( this ) ) {
throw streamBrandCheckException$1 ( 'pipeThrough' ) ;
}
assertRequiredArgument ( rawTransform , 1 , 'pipeThrough' ) ;
const transform = convertReadableWritablePair ( rawTransform , 'First parameter' ) ;
const options = convertPipeOptions ( rawOptions , 'Second parameter' ) ;
if ( IsReadableStreamLocked ( this ) ) {
throw new TypeError ( 'ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream' ) ;
}
if ( IsWritableStreamLocked ( transform . writable ) ) {
throw new TypeError ( 'ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream' ) ;
}
const promise = ReadableStreamPipeTo ( this , transform . writable , options . preventClose , options . preventAbort , options . preventCancel , options . signal ) ;
setPromiseIsHandledToTrue ( promise ) ;
return transform . readable ;
}
pipeTo ( destination , rawOptions = { } ) {
if ( ! IsReadableStream ( this ) ) {
return promiseRejectedWith ( streamBrandCheckException$1 ( 'pipeTo' ) ) ;
}
if ( destination === undefined ) {
return promiseRejectedWith ( ` Parameter 1 is required in 'pipeTo'. ` ) ;
}
if ( ! IsWritableStream ( destination ) ) {
return promiseRejectedWith ( new TypeError ( ` ReadableStream.prototype.pipeTo's first argument must be a WritableStream ` ) ) ;
}
let options ;
try {
options = convertPipeOptions ( rawOptions , 'Second parameter' ) ;
}
catch ( e ) {
return promiseRejectedWith ( e ) ;
}
if ( IsReadableStreamLocked ( this ) ) {
return promiseRejectedWith ( new TypeError ( 'ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream' ) ) ;
}
if ( IsWritableStreamLocked ( destination ) ) {
return promiseRejectedWith ( new TypeError ( 'ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream' ) ) ;
}
return ReadableStreamPipeTo ( this , destination , options . preventClose , options . preventAbort , options . preventCancel , options . signal ) ;
}
/ * *
* Tees this readable stream , returning a two - element array containing the two resulting branches as
* new { @ link ReadableStream } instances .
*
* Teeing a stream will lock it , preventing any other consumer from acquiring a reader .
* To cancel the stream , cancel both of the resulting branches ; a composite cancellation reason will then be
* propagated to the stream ' s underlying source .
*
* Note that the chunks seen in each branch will be the same object . If the chunks are not immutable ,
* this could allow interference between the two branches .
* /
tee ( ) {
if ( ! IsReadableStream ( this ) ) {
throw streamBrandCheckException$1 ( 'tee' ) ;
}
const branches = ReadableStreamTee ( this ) ;
return CreateArrayFromList ( branches ) ;
}
values ( rawOptions = undefined ) {
if ( ! IsReadableStream ( this ) ) {
throw streamBrandCheckException$1 ( 'values' ) ;
}
const options = convertIteratorOptions ( rawOptions , 'First parameter' ) ;
return AcquireReadableStreamAsyncIterator ( this , options . preventCancel ) ;
}
}
Object . defineProperties ( ReadableStream . prototype , {
cancel : { enumerable : true } ,
getReader : { enumerable : true } ,
pipeThrough : { enumerable : true } ,
pipeTo : { enumerable : true } ,
tee : { enumerable : true } ,
values : { enumerable : true } ,
locked : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( ReadableStream . prototype , SymbolPolyfill . toStringTag , {
value : 'ReadableStream' ,
configurable : true
} ) ;
}
if ( typeof SymbolPolyfill . asyncIterator === 'symbol' ) {
Object . defineProperty ( ReadableStream . prototype , SymbolPolyfill . asyncIterator , {
value : ReadableStream . prototype . values ,
writable : true ,
configurable : true
} ) ;
}
// Abstract operations for the ReadableStream.
// Throws if and only if startAlgorithm throws.
function CreateReadableStream ( startAlgorithm , pullAlgorithm , cancelAlgorithm , highWaterMark = 1 , sizeAlgorithm = ( ) => 1 ) {
const stream = Object . create ( ReadableStream . prototype ) ;
InitializeReadableStream ( stream ) ;
const controller = Object . create ( ReadableStreamDefaultController . prototype ) ;
SetUpReadableStreamDefaultController ( stream , controller , startAlgorithm , pullAlgorithm , cancelAlgorithm , highWaterMark , sizeAlgorithm ) ;
return stream ;
}
// Throws if and only if startAlgorithm throws.
function CreateReadableByteStream ( startAlgorithm , pullAlgorithm , cancelAlgorithm ) {
const stream = Object . create ( ReadableStream . prototype ) ;
InitializeReadableStream ( stream ) ;
const controller = Object . create ( ReadableByteStreamController . prototype ) ;
SetUpReadableByteStreamController ( stream , controller , startAlgorithm , pullAlgorithm , cancelAlgorithm , 0 , undefined ) ;
return stream ;
}
function InitializeReadableStream ( stream ) {
stream . _state = 'readable' ;
stream . _reader = undefined ;
stream . _storedError = undefined ;
stream . _disturbed = false ;
}
function IsReadableStream ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_readableStreamController' ) ) {
return false ;
}
return x instanceof ReadableStream ;
}
function IsReadableStreamLocked ( stream ) {
if ( stream . _reader === undefined ) {
return false ;
}
return true ;
}
// ReadableStream API exposed for controllers.
function ReadableStreamCancel ( stream , reason ) {
stream . _disturbed = true ;
if ( stream . _state === 'closed' ) {
return promiseResolvedWith ( undefined ) ;
}
if ( stream . _state === 'errored' ) {
return promiseRejectedWith ( stream . _storedError ) ;
}
ReadableStreamClose ( stream ) ;
const reader = stream . _reader ;
if ( reader !== undefined && IsReadableStreamBYOBReader ( reader ) ) {
reader . _readIntoRequests . forEach ( readIntoRequest => {
readIntoRequest . _closeSteps ( undefined ) ;
} ) ;
reader . _readIntoRequests = new SimpleQueue ( ) ;
}
const sourceCancelPromise = stream . _readableStreamController [ CancelSteps ] ( reason ) ;
return transformPromiseWith ( sourceCancelPromise , noop ) ;
}
function ReadableStreamClose ( stream ) {
stream . _state = 'closed' ;
const reader = stream . _reader ;
if ( reader === undefined ) {
return ;
}
defaultReaderClosedPromiseResolve ( reader ) ;
if ( IsReadableStreamDefaultReader ( reader ) ) {
reader . _readRequests . forEach ( readRequest => {
readRequest . _closeSteps ( ) ;
} ) ;
reader . _readRequests = new SimpleQueue ( ) ;
}
}
function ReadableStreamError ( stream , e ) {
stream . _state = 'errored' ;
stream . _storedError = e ;
const reader = stream . _reader ;
if ( reader === undefined ) {
return ;
}
defaultReaderClosedPromiseReject ( reader , e ) ;
if ( IsReadableStreamDefaultReader ( reader ) ) {
reader . _readRequests . forEach ( readRequest => {
readRequest . _errorSteps ( e ) ;
} ) ;
reader . _readRequests = new SimpleQueue ( ) ;
}
else {
reader . _readIntoRequests . forEach ( readIntoRequest => {
readIntoRequest . _errorSteps ( e ) ;
} ) ;
reader . _readIntoRequests = new SimpleQueue ( ) ;
}
}
// Helper functions for the ReadableStream.
function streamBrandCheckException$1 ( name ) {
return new TypeError ( ` ReadableStream.prototype. ${ name } can only be used on a ReadableStream ` ) ;
}
function convertQueuingStrategyInit ( init , context ) {
assertDictionary ( init , context ) ;
const highWaterMark = init === null || init === void 0 ? void 0 : init . highWaterMark ;
assertRequiredField ( highWaterMark , 'highWaterMark' , 'QueuingStrategyInit' ) ;
return {
highWaterMark : convertUnrestrictedDouble ( highWaterMark )
} ;
}
// The size function must not have a prototype property nor be a constructor
const byteLengthSizeFunction = ( chunk ) => {
return chunk . byteLength ;
} ;
try {
Object . defineProperty ( byteLengthSizeFunction , 'name' , {
value : 'size' ,
configurable : true
} ) ;
}
catch ( _a ) {
// This property is non-configurable in older browsers, so ignore if this throws.
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility
}
/ * *
* A queuing strategy that counts the number of bytes in each chunk .
*
* @ public
* /
class ByteLengthQueuingStrategy {
constructor ( options ) {
assertRequiredArgument ( options , 1 , 'ByteLengthQueuingStrategy' ) ;
options = convertQueuingStrategyInit ( options , 'First parameter' ) ;
this . _byteLengthQueuingStrategyHighWaterMark = options . highWaterMark ;
}
/ * *
* Returns the high water mark provided to the constructor .
* /
get highWaterMark ( ) {
if ( ! IsByteLengthQueuingStrategy ( this ) ) {
throw byteLengthBrandCheckException ( 'highWaterMark' ) ;
}
return this . _byteLengthQueuingStrategyHighWaterMark ;
}
/ * *
* Measures the size of ` chunk ` by returning the value of its ` byteLength ` property .
* /
get size ( ) {
if ( ! IsByteLengthQueuingStrategy ( this ) ) {
throw byteLengthBrandCheckException ( 'size' ) ;
}
return byteLengthSizeFunction ;
}
}
Object . defineProperties ( ByteLengthQueuingStrategy . prototype , {
highWaterMark : { enumerable : true } ,
size : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( ByteLengthQueuingStrategy . prototype , SymbolPolyfill . toStringTag , {
value : 'ByteLengthQueuingStrategy' ,
configurable : true
} ) ;
}
// Helper functions for the ByteLengthQueuingStrategy.
function byteLengthBrandCheckException ( name ) {
return new TypeError ( ` ByteLengthQueuingStrategy.prototype. ${ name } can only be used on a ByteLengthQueuingStrategy ` ) ;
}
function IsByteLengthQueuingStrategy ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_byteLengthQueuingStrategyHighWaterMark' ) ) {
return false ;
}
return x instanceof ByteLengthQueuingStrategy ;
}
// The size function must not have a prototype property nor be a constructor
const countSizeFunction = ( ) => {
return 1 ;
} ;
try {
Object . defineProperty ( countSizeFunction , 'name' , {
value : 'size' ,
configurable : true
} ) ;
}
catch ( _a ) {
// This property is non-configurable in older browsers, so ignore if this throws.
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility
}
/ * *
* A queuing strategy that counts the number of chunks .
*
* @ public
* /
class CountQueuingStrategy {
constructor ( options ) {
assertRequiredArgument ( options , 1 , 'CountQueuingStrategy' ) ;
options = convertQueuingStrategyInit ( options , 'First parameter' ) ;
this . _countQueuingStrategyHighWaterMark = options . highWaterMark ;
}
/ * *
* Returns the high water mark provided to the constructor .
* /
get highWaterMark ( ) {
if ( ! IsCountQueuingStrategy ( this ) ) {
throw countBrandCheckException ( 'highWaterMark' ) ;
}
return this . _countQueuingStrategyHighWaterMark ;
}
/ * *
* Measures the size of ` chunk ` by always returning 1.
* This ensures that the total queue size is a count of the number of chunks in the queue .
* /
get size ( ) {
if ( ! IsCountQueuingStrategy ( this ) ) {
throw countBrandCheckException ( 'size' ) ;
}
return countSizeFunction ;
}
}
Object . defineProperties ( CountQueuingStrategy . prototype , {
highWaterMark : { enumerable : true } ,
size : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( CountQueuingStrategy . prototype , SymbolPolyfill . toStringTag , {
value : 'CountQueuingStrategy' ,
configurable : true
} ) ;
}
// Helper functions for the CountQueuingStrategy.
function countBrandCheckException ( name ) {
return new TypeError ( ` CountQueuingStrategy.prototype. ${ name } can only be used on a CountQueuingStrategy ` ) ;
}
function IsCountQueuingStrategy ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_countQueuingStrategyHighWaterMark' ) ) {
return false ;
}
return x instanceof CountQueuingStrategy ;
}
function convertTransformer ( original , context ) {
assertDictionary ( original , context ) ;
const flush = original === null || original === void 0 ? void 0 : original . flush ;
const readableType = original === null || original === void 0 ? void 0 : original . readableType ;
const start = original === null || original === void 0 ? void 0 : original . start ;
const transform = original === null || original === void 0 ? void 0 : original . transform ;
const writableType = original === null || original === void 0 ? void 0 : original . writableType ;
return {
flush : flush === undefined ?
undefined :
convertTransformerFlushCallback ( flush , original , ` ${ context } has member 'flush' that ` ) ,
readableType ,
start : start === undefined ?
undefined :
convertTransformerStartCallback ( start , original , ` ${ context } has member 'start' that ` ) ,
transform : transform === undefined ?
undefined :
convertTransformerTransformCallback ( transform , original , ` ${ context } has member 'transform' that ` ) ,
writableType
} ;
}
function convertTransformerFlushCallback ( fn , original , context ) {
assertFunction ( fn , context ) ;
return ( controller ) => promiseCall ( fn , original , [ controller ] ) ;
}
function convertTransformerStartCallback ( fn , original , context ) {
assertFunction ( fn , context ) ;
return ( controller ) => reflectCall ( fn , original , [ controller ] ) ;
}
function convertTransformerTransformCallback ( fn , original , context ) {
assertFunction ( fn , context ) ;
return ( chunk , controller ) => promiseCall ( fn , original , [ chunk , controller ] ) ;
}
// Class TransformStream
/ * *
* A transform stream consists of a pair of streams : a { @ link WritableStream | writable stream } ,
* known as its writable side , and a { @ link ReadableStream | readable stream } , known as its readable side .
* In a manner specific to the transform stream in question , writes to the writable side result in new data being
* made available for reading from the readable side .
*
* @ public
* /
class TransformStream {
constructor ( rawTransformer = { } , rawWritableStrategy = { } , rawReadableStrategy = { } ) {
if ( rawTransformer === undefined ) {
rawTransformer = null ;
}
const writableStrategy = convertQueuingStrategy ( rawWritableStrategy , 'Second parameter' ) ;
const readableStrategy = convertQueuingStrategy ( rawReadableStrategy , 'Third parameter' ) ;
const transformer = convertTransformer ( rawTransformer , 'First parameter' ) ;
if ( transformer . readableType !== undefined ) {
throw new RangeError ( 'Invalid readableType specified' ) ;
}
if ( transformer . writableType !== undefined ) {
throw new RangeError ( 'Invalid writableType specified' ) ;
}
const readableHighWaterMark = ExtractHighWaterMark ( readableStrategy , 0 ) ;
const readableSizeAlgorithm = ExtractSizeAlgorithm ( readableStrategy ) ;
const writableHighWaterMark = ExtractHighWaterMark ( writableStrategy , 1 ) ;
const writableSizeAlgorithm = ExtractSizeAlgorithm ( writableStrategy ) ;
let startPromise _resolve ;
const startPromise = newPromise ( resolve => {
startPromise _resolve = resolve ;
} ) ;
InitializeTransformStream ( this , startPromise , writableHighWaterMark , writableSizeAlgorithm , readableHighWaterMark , readableSizeAlgorithm ) ;
SetUpTransformStreamDefaultControllerFromTransformer ( this , transformer ) ;
if ( transformer . start !== undefined ) {
startPromise _resolve ( transformer . start ( this . _transformStreamController ) ) ;
}
else {
startPromise _resolve ( undefined ) ;
}
}
/ * *
* The readable side of the transform stream .
* /
get readable ( ) {
if ( ! IsTransformStream ( this ) ) {
throw streamBrandCheckException ( 'readable' ) ;
}
return this . _readable ;
}
/ * *
* The writable side of the transform stream .
* /
get writable ( ) {
if ( ! IsTransformStream ( this ) ) {
throw streamBrandCheckException ( 'writable' ) ;
}
return this . _writable ;
}
}
Object . defineProperties ( TransformStream . prototype , {
readable : { enumerable : true } ,
writable : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( TransformStream . prototype , SymbolPolyfill . toStringTag , {
value : 'TransformStream' ,
configurable : true
} ) ;
}
function InitializeTransformStream ( stream , startPromise , writableHighWaterMark , writableSizeAlgorithm , readableHighWaterMark , readableSizeAlgorithm ) {
function startAlgorithm ( ) {
return startPromise ;
}
function writeAlgorithm ( chunk ) {
return TransformStreamDefaultSinkWriteAlgorithm ( stream , chunk ) ;
}
function abortAlgorithm ( reason ) {
return TransformStreamDefaultSinkAbortAlgorithm ( stream , reason ) ;
}
function closeAlgorithm ( ) {
return TransformStreamDefaultSinkCloseAlgorithm ( stream ) ;
}
stream . _writable = CreateWritableStream ( startAlgorithm , writeAlgorithm , closeAlgorithm , abortAlgorithm , writableHighWaterMark , writableSizeAlgorithm ) ;
function pullAlgorithm ( ) {
return TransformStreamDefaultSourcePullAlgorithm ( stream ) ;
}
function cancelAlgorithm ( reason ) {
TransformStreamErrorWritableAndUnblockWrite ( stream , reason ) ;
return promiseResolvedWith ( undefined ) ;
}
stream . _readable = CreateReadableStream ( startAlgorithm , pullAlgorithm , cancelAlgorithm , readableHighWaterMark , readableSizeAlgorithm ) ;
// The [[backpressure]] slot is set to undefined so that it can be initialised by TransformStreamSetBackpressure.
stream . _backpressure = undefined ;
stream . _backpressureChangePromise = undefined ;
stream . _backpressureChangePromise _resolve = undefined ;
TransformStreamSetBackpressure ( stream , true ) ;
stream . _transformStreamController = undefined ;
}
function IsTransformStream ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_transformStreamController' ) ) {
return false ;
}
return x instanceof TransformStream ;
}
// This is a no-op if both sides are already errored.
function TransformStreamError ( stream , e ) {
ReadableStreamDefaultControllerError ( stream . _readable . _readableStreamController , e ) ;
TransformStreamErrorWritableAndUnblockWrite ( stream , e ) ;
}
function TransformStreamErrorWritableAndUnblockWrite ( stream , e ) {
TransformStreamDefaultControllerClearAlgorithms ( stream . _transformStreamController ) ;
WritableStreamDefaultControllerErrorIfNeeded ( stream . _writable . _writableStreamController , e ) ;
if ( stream . _backpressure ) {
// Pretend that pull() was called to permit any pending write() calls to complete. TransformStreamSetBackpressure()
// cannot be called from enqueue() or pull() once the ReadableStream is errored, so this will will be the final time
// _backpressure is set.
TransformStreamSetBackpressure ( stream , false ) ;
}
}
function TransformStreamSetBackpressure ( stream , backpressure ) {
// Passes also when called during construction.
if ( stream . _backpressureChangePromise !== undefined ) {
stream . _backpressureChangePromise _resolve ( ) ;
}
stream . _backpressureChangePromise = newPromise ( resolve => {
stream . _backpressureChangePromise _resolve = resolve ;
} ) ;
stream . _backpressure = backpressure ;
}
// Class TransformStreamDefaultController
/ * *
* Allows control of the { @ link ReadableStream } and { @ link WritableStream } of the associated { @ link TransformStream } .
*
* @ public
* /
class TransformStreamDefaultController {
constructor ( ) {
throw new TypeError ( 'Illegal constructor' ) ;
}
/ * *
* Returns the desired size to fill the readable side ’ s internal queue . It can be negative , if the queue is over - full .
* /
get desiredSize ( ) {
if ( ! IsTransformStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException ( 'desiredSize' ) ;
}
const readableController = this . _controlledTransformStream . _readable . _readableStreamController ;
return ReadableStreamDefaultControllerGetDesiredSize ( readableController ) ;
}
enqueue ( chunk = undefined ) {
if ( ! IsTransformStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException ( 'enqueue' ) ;
}
TransformStreamDefaultControllerEnqueue ( this , chunk ) ;
}
/ * *
* Errors both the readable side and the writable side of the controlled transform stream , making all future
* interactions with it fail with the given error ` e ` . Any chunks queued for transformation will be discarded .
* /
error ( reason = undefined ) {
if ( ! IsTransformStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException ( 'error' ) ;
}
TransformStreamDefaultControllerError ( this , reason ) ;
}
/ * *
* Closes the readable side and errors the writable side of the controlled transform stream . This is useful when the
* transformer only needs to consume a portion of the chunks written to the writable side .
* /
terminate ( ) {
if ( ! IsTransformStreamDefaultController ( this ) ) {
throw defaultControllerBrandCheckException ( 'terminate' ) ;
}
TransformStreamDefaultControllerTerminate ( this ) ;
}
}
Object . defineProperties ( TransformStreamDefaultController . prototype , {
enqueue : { enumerable : true } ,
error : { enumerable : true } ,
terminate : { enumerable : true } ,
desiredSize : { enumerable : true }
} ) ;
if ( typeof SymbolPolyfill . toStringTag === 'symbol' ) {
Object . defineProperty ( TransformStreamDefaultController . prototype , SymbolPolyfill . toStringTag , {
value : 'TransformStreamDefaultController' ,
configurable : true
} ) ;
}
// Transform Stream Default Controller Abstract Operations
function IsTransformStreamDefaultController ( x ) {
if ( ! typeIsObject ( x ) ) {
return false ;
}
if ( ! Object . prototype . hasOwnProperty . call ( x , '_controlledTransformStream' ) ) {
return false ;
}
return x instanceof TransformStreamDefaultController ;
}
function SetUpTransformStreamDefaultController ( stream , controller , transformAlgorithm , flushAlgorithm ) {
controller . _controlledTransformStream = stream ;
stream . _transformStreamController = controller ;
controller . _transformAlgorithm = transformAlgorithm ;
controller . _flushAlgorithm = flushAlgorithm ;
}
function SetUpTransformStreamDefaultControllerFromTransformer ( stream , transformer ) {
const controller = Object . create ( TransformStreamDefaultController . prototype ) ;
let transformAlgorithm = ( chunk ) => {
try {
TransformStreamDefaultControllerEnqueue ( controller , chunk ) ;
return promiseResolvedWith ( undefined ) ;
}
catch ( transformResultE ) {
return promiseRejectedWith ( transformResultE ) ;
}
} ;
let flushAlgorithm = ( ) => promiseResolvedWith ( undefined ) ;
if ( transformer . transform !== undefined ) {
transformAlgorithm = chunk => transformer . transform ( chunk , controller ) ;
}
if ( transformer . flush !== undefined ) {
flushAlgorithm = ( ) => transformer . flush ( controller ) ;
}
SetUpTransformStreamDefaultController ( stream , controller , transformAlgorithm , flushAlgorithm ) ;
}
function TransformStreamDefaultControllerClearAlgorithms ( controller ) {
controller . _transformAlgorithm = undefined ;
controller . _flushAlgorithm = undefined ;
}
function TransformStreamDefaultControllerEnqueue ( controller , chunk ) {
const stream = controller . _controlledTransformStream ;
const readableController = stream . _readable . _readableStreamController ;
if ( ! ReadableStreamDefaultControllerCanCloseOrEnqueue ( readableController ) ) {
throw new TypeError ( 'Readable side is not in a state that permits enqueue' ) ;
}
// We throttle transform invocations based on the backpressure of the ReadableStream, but we still
// accept TransformStreamDefaultControllerEnqueue() calls.
try {
ReadableStreamDefaultControllerEnqueue ( readableController , chunk ) ;
}
catch ( e ) {
// This happens when readableStrategy.size() throws.
TransformStreamErrorWritableAndUnblockWrite ( stream , e ) ;
throw stream . _readable . _storedError ;
}
const backpressure = ReadableStreamDefaultControllerHasBackpressure ( readableController ) ;
if ( backpressure !== stream . _backpressure ) {
TransformStreamSetBackpressure ( stream , true ) ;
}
}
function TransformStreamDefaultControllerError ( controller , e ) {
TransformStreamError ( controller . _controlledTransformStream , e ) ;
}
function TransformStreamDefaultControllerPerformTransform ( controller , chunk ) {
const transformPromise = controller . _transformAlgorithm ( chunk ) ;
return transformPromiseWith ( transformPromise , undefined , r => {
TransformStreamError ( controller . _controlledTransformStream , r ) ;
throw r ;
} ) ;
}
function TransformStreamDefaultControllerTerminate ( controller ) {
const stream = controller . _controlledTransformStream ;
const readableController = stream . _readable . _readableStreamController ;
ReadableStreamDefaultControllerClose ( readableController ) ;
const error = new TypeError ( 'TransformStream terminated' ) ;
TransformStreamErrorWritableAndUnblockWrite ( stream , error ) ;
}
// TransformStreamDefaultSink Algorithms
function TransformStreamDefaultSinkWriteAlgorithm ( stream , chunk ) {
const controller = stream . _transformStreamController ;
if ( stream . _backpressure ) {
const backpressureChangePromise = stream . _backpressureChangePromise ;
return transformPromiseWith ( backpressureChangePromise , ( ) => {
const writable = stream . _writable ;
const state = writable . _state ;
if ( state === 'erroring' ) {
throw writable . _storedError ;
}
return TransformStreamDefaultControllerPerformTransform ( controller , chunk ) ;
} ) ;
}
return TransformStreamDefaultControllerPerformTransform ( controller , chunk ) ;
}
function TransformStreamDefaultSinkAbortAlgorithm ( stream , reason ) {
// abort() is not called synchronously, so it is possible for abort() to be called when the stream is already
// errored.
TransformStreamError ( stream , reason ) ;
return promiseResolvedWith ( undefined ) ;
}
function TransformStreamDefaultSinkCloseAlgorithm ( stream ) {
// stream._readable cannot change after construction, so caching it across a call to user code is safe.
const readable = stream . _readable ;
const controller = stream . _transformStreamController ;
const flushPromise = controller . _flushAlgorithm ( ) ;
TransformStreamDefaultControllerClearAlgorithms ( controller ) ;
// Return a promise that is fulfilled with undefined on success.
return transformPromiseWith ( flushPromise , ( ) => {
if ( readable . _state === 'errored' ) {
throw readable . _storedError ;
}
ReadableStreamDefaultControllerClose ( readable . _readableStreamController ) ;
} , r => {
TransformStreamError ( stream , r ) ;
throw readable . _storedError ;
} ) ;
}
// TransformStreamDefaultSource Algorithms
function TransformStreamDefaultSourcePullAlgorithm ( stream ) {
// Invariant. Enforced by the promises returned by start() and pull().
TransformStreamSetBackpressure ( stream , false ) ;
// Prevent the next pull() call until there is backpressure.
return stream . _backpressureChangePromise ;
}
// Helper functions for the TransformStreamDefaultController.
function defaultControllerBrandCheckException ( name ) {
return new TypeError ( ` TransformStreamDefaultController.prototype. ${ name } can only be used on a TransformStreamDefaultController ` ) ;
}
// Helper functions for the TransformStream.
function streamBrandCheckException ( name ) {
return new TypeError ( ` TransformStream.prototype. ${ name } can only be used on a TransformStream ` ) ;
}
exports . ByteLengthQueuingStrategy = ByteLengthQueuingStrategy ;
exports . CountQueuingStrategy = CountQueuingStrategy ;
exports . ReadableByteStreamController = ReadableByteStreamController ;
exports . ReadableStream = ReadableStream ;
exports . ReadableStreamBYOBReader = ReadableStreamBYOBReader ;
exports . ReadableStreamBYOBRequest = ReadableStreamBYOBRequest ;
exports . ReadableStreamDefaultController = ReadableStreamDefaultController ;
exports . ReadableStreamDefaultReader = ReadableStreamDefaultReader ;
exports . TransformStream = TransformStream ;
exports . TransformStreamDefaultController = TransformStreamDefaultController ;
exports . WritableStream = WritableStream ;
exports . WritableStreamDefaultController = WritableStreamDefaultController ;
exports . WritableStreamDefaultWriter = WritableStreamDefaultWriter ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
} ) ) ) ;
//# sourceMappingURL=ponyfill.es2018.js.map
2022-11-10 13:20:38 +01:00
/***/ } ) ,
/***/ 9047 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . http = void 0 ;
2022-11-10 13:40:17 +01:00
const fetch = _ _nccwpck _require _ _ ( 4429 ) ;
2022-11-10 13:20:38 +01:00
var https = _ _nccwpck _require _ _ ( 5687 ) ;
2022-11-10 13:40:17 +01:00
class Http {
make ( url , headers , body , ignoreCertificate ) {
return new Promise ( ( resolve , reject ) => {
fetch ( url , this . getOptions ( 'post' , headers , body , ignoreCertificate ) ) . then ( ( res ) => resolve ( res ) ) ;
2022-11-10 13:20:38 +01:00
} ) ;
2022-11-10 13:40:17 +01:00
}
getOptions ( method , headers , body , ignoreCertificate ) {
const options = {
2022-11-10 13:20:38 +01:00
headers : headers ? JSON . parse ( headers ) : { } ,
2022-11-10 13:40:17 +01:00
method
2022-11-10 13:20:38 +01:00
} ;
if ( body ) {
options . body = body ;
}
if ( ignoreCertificate ) {
options . agent = new https . Agent ( { rejectUnauthorized : false } ) ;
}
options . headers [ 'content-type' ] = 'application/json' ;
return options ;
2022-11-10 13:40:17 +01:00
}
}
2022-11-10 13:20:38 +01:00
exports . http = new Http ( ) ;
/***/ } ) ,
/***/ 399 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-11-10 13:40:17 +01:00
const core = _ _nccwpck _require _ _ ( 2186 ) ;
const http _1 = _ _nccwpck _require _ _ ( 9047 ) ;
2022-11-10 13:20:38 +01:00
function run ( ) {
2022-11-10 13:40:17 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const url = core . getInput ( 'url' )
? core . getInput ( 'url' )
: process . env . WEBHOOK _URL
? process . env . WEBHOOK _URL
: '' ;
const headers = core . getInput ( 'headers' )
? core . getInput ( 'headers' )
: process . env . headers
? process . env . headers
: null ;
const body = core . getInput ( 'body' )
? core . getInput ( 'body' )
: process . env . data
? process . env . data
: null ;
const insecure = core . getInput ( 'insecure' )
? core . getInput ( 'insecure' ) == 'true'
: process . env . insecure
? process . env . insecure == 'true'
: false ;
if ( ! url ) {
core . setFailed ( 'A url is required to run this action.' ) ;
throw new Error ( 'A url is required to run this action.' ) ;
}
2022-11-10 13:42:50 +01:00
core . info ( ` Sending webhook request to ${ url } ` ) ;
2022-11-10 13:40:17 +01:00
http _1 . http
. make ( url , headers , body , insecure )
. then ( res => {
if ( res . status >= 400 ) {
error ( res . status ) ;
2022-11-10 13:20:38 +01:00
return ;
2022-11-10 13:40:17 +01:00
}
} )
. catch ( err => {
2022-11-10 13:42:50 +01:00
core . info ( ` Error: ${ err } ` ) ;
2022-11-10 13:40:17 +01:00
error ( err . status ) ;
return ;
2022-11-10 13:20:38 +01:00
} ) ;
} ) ;
}
function error ( statusCode ) {
2022-11-10 13:40:17 +01:00
core . setFailed ( ` Received status code: ${ statusCode } ` ) ;
throw new Error ( ` Request failed with status code: ${ statusCode } ` ) ;
2022-11-10 13:20:38 +01:00
}
run ( ) ;
2022-11-10 11:43:16 +01:00
/***/ } ) ,
/***/ 9491 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "assert" ) ;
/***/ } ) ,
/***/ 4300 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "buffer" ) ;
/***/ } ) ,
/***/ 6113 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "crypto" ) ;
/***/ } ) ,
/***/ 2361 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "events" ) ;
/***/ } ) ,
/***/ 7147 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "fs" ) ;
/***/ } ) ,
/***/ 3685 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "http" ) ;
/***/ } ) ,
/***/ 5687 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "https" ) ;
/***/ } ) ,
/***/ 1808 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "net" ) ;
/***/ } ) ,
/***/ 7742 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "node:process" ) ;
/***/ } ) ,
/***/ 2477 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "node:stream/web" ) ;
/***/ } ) ,
/***/ 2037 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "os" ) ;
/***/ } ) ,
/***/ 1017 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "path" ) ;
/***/ } ) ,
/***/ 4404 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "tls" ) ;
/***/ } ) ,
/***/ 3837 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "util" ) ;
/***/ } ) ,
/***/ 1267 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "worker_threads" ) ;
/***/ } ) ,
/***/ 8572 :
/***/ ( ( _ _unused _webpack _module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
/* c8 ignore start */
// 64 KiB (same size chrome slice theirs blob into Uint8array's)
const POOL _SIZE = 65536
if ( ! globalThis . ReadableStream ) {
// `node:stream/web` got introduced in v16.5.0 as experimental
// and it's preferred over the polyfilled version. So we also
// suppress the warning that gets emitted by NodeJS for using it.
try {
const process = _ _nccwpck _require _ _ ( 7742 )
const { emitWarning } = process
try {
process . emitWarning = ( ) => { }
Object . assign ( globalThis , _ _nccwpck _require _ _ ( 2477 ) )
process . emitWarning = emitWarning
} catch ( error ) {
process . emitWarning = emitWarning
throw error
}
} catch ( error ) {
// fallback to polyfill implementation
Object . assign ( globalThis , _ _nccwpck _require _ _ ( 1452 ) )
}
}
try {
// Don't use node: prefix for this, require+node: is not supported until node v14.14
// Only `import()` can use prefix in 12.20 and later
const { Blob } = _ _nccwpck _require _ _ ( 4300 )
if ( Blob && ! Blob . prototype . stream ) {
Blob . prototype . stream = function name ( params ) {
let position = 0
const blob = this
return new ReadableStream ( {
type : 'bytes' ,
async pull ( ctrl ) {
const chunk = blob . slice ( position , Math . min ( blob . size , position + POOL _SIZE ) )
const buffer = await chunk . arrayBuffer ( )
position += buffer . byteLength
ctrl . enqueue ( new Uint8Array ( buffer ) )
if ( position === blob . size ) {
ctrl . close ( )
}
}
} )
}
}
} catch ( error ) { }
/* c8 ignore end */
/***/ } ) ,
/***/ 3213 :
/***/ ( ( _ _unused _webpack _ _ _webpack _module _ _ , _ _webpack _exports _ _ , _ _nccwpck _require _ _ ) => {
"use strict" ;
/* harmony export */ _ _nccwpck _require _ _ . d ( _ _webpack _exports _ _ , {
/* harmony export */ "Z" : ( ) => ( _ _WEBPACK _DEFAULT _EXPORT _ _ )
/* harmony export */ } ) ;
/* unused harmony export File */
/* harmony import */ var _index _js _ _WEBPACK _IMPORTED _MODULE _0 _ _ = _ _nccwpck _require _ _ ( 1410 ) ;
const _File = class File extends _index _js _ _WEBPACK _IMPORTED _MODULE _0 _ _ /* ["default"] */ . Z {
# lastModified = 0
# name = ''
/ * *
* @ param { * [ ] } fileBits
* @ param { string } fileName
* @ param { { lastModified ? : number , type ? : string } } options
* /// @ts-ignore
constructor ( fileBits , fileName , options = { } ) {
if ( arguments . length < 2 ) {
throw new TypeError ( ` Failed to construct 'File': 2 arguments required, but only ${ arguments . length } present. ` )
}
super ( fileBits , options )
if ( options === null ) options = { }
// Simulate WebIDL type casting for NaN value in lastModified option.
const lastModified = options . lastModified === undefined ? Date . now ( ) : Number ( options . lastModified )
if ( ! Number . isNaN ( lastModified ) ) {
this . # lastModified = lastModified
}
this . # name = String ( fileName )
}
get name ( ) {
return this . # name
}
get lastModified ( ) {
return this . # lastModified
}
get [ Symbol . toStringTag ] ( ) {
return 'File'
}
static [ Symbol . hasInstance ] ( object ) {
return ! ! object && object instanceof _index _js _ _WEBPACK _IMPORTED _MODULE _0 _ _ /* ["default"] */ . Z &&
/^(File)$/ . test ( object [ Symbol . toStringTag ] )
}
}
/** @type {typeof globalThis.File} */ // @ts-ignore
const File = _File
/* harmony default export */ const _ _WEBPACK _DEFAULT _EXPORT _ _ = ( File ) ;
/***/ } ) ,
/***/ 2777 :
/***/ ( ( _ _unused _webpack _ _ _webpack _module _ _ , _ _webpack _exports _ _ , _ _nccwpck _require _ _ ) => {
"use strict" ;
// EXPORTS
_ _nccwpck _require _ _ . d ( _ _webpack _exports _ _ , {
"t6" : ( ) => ( /* reexport */ fetch _blob /* default */ . Z ) ,
"$B" : ( ) => ( /* reexport */ file /* default */ . Z ) ,
"xB" : ( ) => ( /* binding */ blobFrom ) ,
"SX" : ( ) => ( /* binding */ blobFromSync ) ,
"e2" : ( ) => ( /* binding */ fileFrom ) ,
"RA" : ( ) => ( /* binding */ fileFromSync )
} ) ;
// UNUSED EXPORTS: default
; // CONCATENATED MODULE: external "node:fs"
const external _node _fs _namespaceObject = require ( "node:fs" ) ;
; // CONCATENATED MODULE: external "node:path"
const external _node _path _namespaceObject = require ( "node:path" ) ;
// EXTERNAL MODULE: ./node_modules/node-domexception/index.js
var node _domexception = _ _nccwpck _require _ _ ( 7760 ) ;
// EXTERNAL MODULE: ./node_modules/fetch-blob/file.js
var file = _ _nccwpck _require _ _ ( 3213 ) ;
// EXTERNAL MODULE: ./node_modules/fetch-blob/index.js
var fetch _blob = _ _nccwpck _require _ _ ( 1410 ) ;
; // CONCATENATED MODULE: ./node_modules/fetch-blob/from.js
const { stat } = external _node _fs _namespaceObject . promises
/ * *
* @ param { string } path filepath on the disk
* @ param { string } [ type ] mimetype to use
* /
const blobFromSync = ( path , type ) => fromBlob ( ( 0 , external _node _fs _namespaceObject . statSync ) ( path ) , path , type )
/ * *
* @ param { string } path filepath on the disk
* @ param { string } [ type ] mimetype to use
* @ returns { Promise < Blob > }
* /
const blobFrom = ( path , type ) => stat ( path ) . then ( stat => fromBlob ( stat , path , type ) )
/ * *
* @ param { string } path filepath on the disk
* @ param { string } [ type ] mimetype to use
* @ returns { Promise < File > }
* /
const fileFrom = ( path , type ) => stat ( path ) . then ( stat => fromFile ( stat , path , type ) )
/ * *
* @ param { string } path filepath on the disk
* @ param { string } [ type ] mimetype to use
* /
const fileFromSync = ( path , type ) => fromFile ( ( 0 , external _node _fs _namespaceObject . statSync ) ( path ) , path , type )
// @ts-ignore
const fromBlob = ( stat , path , type = '' ) => new fetch _blob /* default */ . Z ( [ new BlobDataItem ( {
path ,
size : stat . size ,
lastModified : stat . mtimeMs ,
start : 0
} ) ] , { type } )
// @ts-ignore
const fromFile = ( stat , path , type = '' ) => new file /* default */ . Z ( [ new BlobDataItem ( {
path ,
size : stat . size ,
lastModified : stat . mtimeMs ,
start : 0
} ) ] , ( 0 , external _node _path _namespaceObject . basename ) ( path ) , { type , lastModified : stat . mtimeMs } )
/ * *
* This is a blob backed up by a file on the disk
* with minium requirement . Its wrapped around a Blob as a blobPart
* so you have no direct access to this .
*
* @ private
* /
class BlobDataItem {
# path
# start
constructor ( options ) {
this . # path = options . path
this . # start = options . start
this . size = options . size
this . lastModified = options . lastModified
}
/ * *
* Slicing arguments is first validated and formatted
* to not be out of range by Blob . prototype . slice
* /
slice ( start , end ) {
return new BlobDataItem ( {
path : this . # path ,
lastModified : this . lastModified ,
size : end - start ,
start : this . # start + start
} )
}
async * stream ( ) {
const { mtimeMs } = await stat ( this . # path )
if ( mtimeMs > this . lastModified ) {
throw new node _domexception ( 'The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.' , 'NotReadableError' )
}
yield * ( 0 , external _node _fs _namespaceObject . createReadStream ) ( this . # path , {
start : this . # start ,
end : this . # start + this . size - 1
} )
}
get [ Symbol . toStringTag ] ( ) {
return 'Blob'
}
}
/* harmony default export */ const from = ( ( /* unused pure expression or super */ null && ( blobFromSync ) ) ) ;
/***/ } ) ,
/***/ 1410 :
/***/ ( ( _ _unused _webpack _ _ _webpack _module _ _ , _ _webpack _exports _ _ , _ _nccwpck _require _ _ ) => {
"use strict" ;
/* harmony export */ _ _nccwpck _require _ _ . d ( _ _webpack _exports _ _ , {
/* harmony export */ "Z" : ( ) => ( _ _WEBPACK _DEFAULT _EXPORT _ _ )
/* harmony export */ } ) ;
/* unused harmony export Blob */
/* harmony import */ var _streams _cjs _ _WEBPACK _IMPORTED _MODULE _0 _ _ = _ _nccwpck _require _ _ ( 8572 ) ;
/*! fetch-blob. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)
// Node has recently added whatwg stream into core
// 64 KiB (same size chrome slice theirs blob into Uint8array's)
const POOL _SIZE = 65536
/** @param {(Blob | Uint8Array)[]} parts */
async function * toIterator ( parts , clone = true ) {
for ( const part of parts ) {
if ( 'stream' in part ) {
yield * ( /** @type {AsyncIterableIterator<Uint8Array>} */ ( part . stream ( ) ) )
} else if ( ArrayBuffer . isView ( part ) ) {
if ( clone ) {
let position = part . byteOffset
const end = part . byteOffset + part . byteLength
while ( position !== end ) {
const size = Math . min ( end - position , POOL _SIZE )
const chunk = part . buffer . slice ( position , position + size )
position += chunk . byteLength
yield new Uint8Array ( chunk )
}
} else {
yield part
}
/* c8 ignore next 10 */
} else {
// For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)
let position = 0 , b = ( /** @type {Blob} */ ( part ) )
while ( position !== b . size ) {
const chunk = b . slice ( position , Math . min ( b . size , position + POOL _SIZE ) )
const buffer = await chunk . arrayBuffer ( )
position += buffer . byteLength
yield new Uint8Array ( buffer )
}
}
}
}
const _Blob = class Blob {
/** @type {Array.<(Blob|Uint8Array)>} */
# parts = [ ]
# type = ''
# size = 0
# endings = 'transparent'
/ * *
* The Blob ( ) constructor returns a new Blob object . The content
* of the blob consists of the concatenation of the values given
* in the parameter array .
*
* @ param { * } blobParts
* @ param { { type ? : string , endings ? : string } } [ options ]
* /
constructor ( blobParts = [ ] , options = { } ) {
if ( typeof blobParts !== 'object' || blobParts === null ) {
throw new TypeError ( 'Failed to construct \'Blob\': The provided value cannot be converted to a sequence.' )
}
if ( typeof blobParts [ Symbol . iterator ] !== 'function' ) {
throw new TypeError ( 'Failed to construct \'Blob\': The object must have a callable @@iterator property.' )
}
if ( typeof options !== 'object' && typeof options !== 'function' ) {
throw new TypeError ( 'Failed to construct \'Blob\': parameter 2 cannot convert to dictionary.' )
}
if ( options === null ) options = { }
const encoder = new TextEncoder ( )
for ( const element of blobParts ) {
let part
if ( ArrayBuffer . isView ( element ) ) {
part = new Uint8Array ( element . buffer . slice ( element . byteOffset , element . byteOffset + element . byteLength ) )
} else if ( element instanceof ArrayBuffer ) {
part = new Uint8Array ( element . slice ( 0 ) )
} else if ( element instanceof Blob ) {
part = element
} else {
part = encoder . encode ( ` ${ element } ` )
}
this . # size += ArrayBuffer . isView ( part ) ? part . byteLength : part . size
this . # parts . push ( part )
}
this . # endings = ` ${ options . endings === undefined ? 'transparent' : options . endings } `
const type = options . type === undefined ? '' : String ( options . type )
this . # type = /^[\x20-\x7E]*$/ . test ( type ) ? type : ''
}
/ * *
* The Blob interface ' s size property returns the
* size of the Blob in bytes .
* /
get size ( ) {
return this . # size
}
/ * *
* The type property of a Blob object returns the MIME type of the file .
* /
get type ( ) {
return this . # type
}
/ * *
* The text ( ) method in the Blob interface returns a Promise
* that resolves with a string containing the contents of
* the blob , interpreted as UTF - 8.
*
* @ return { Promise < string > }
* /
async text ( ) {
// More optimized than using this.arrayBuffer()
// that requires twice as much ram
const decoder = new TextDecoder ( )
let str = ''
for await ( const part of toIterator ( this . # parts , false ) ) {
str += decoder . decode ( part , { stream : true } )
}
// Remaining
str += decoder . decode ( )
return str
}
/ * *
* The arrayBuffer ( ) method in the Blob interface returns a
* Promise that resolves with the contents of the blob as
* binary data contained in an ArrayBuffer .
*
* @ return { Promise < ArrayBuffer > }
* /
async arrayBuffer ( ) {
// Easier way... Just a unnecessary overhead
// const view = new Uint8Array(this.size);
// await this.stream().getReader({mode: 'byob'}).read(view);
// return view.buffer;
const data = new Uint8Array ( this . size )
let offset = 0
for await ( const chunk of toIterator ( this . # parts , false ) ) {
data . set ( chunk , offset )
offset += chunk . length
}
return data . buffer
}
stream ( ) {
const it = toIterator ( this . # parts , true )
return new globalThis . ReadableStream ( {
// @ts-ignore
type : 'bytes' ,
async pull ( ctrl ) {
const chunk = await it . next ( )
chunk . done ? ctrl . close ( ) : ctrl . enqueue ( chunk . value )
} ,
async cancel ( ) {
await it . return ( )
}
} )
}
/ * *
* The Blob interface ' s slice ( ) method creates and returns a
* new Blob object which contains data from a subset of the
* blob on which it ' s called .
*
* @ param { number } [ start ]
* @ param { number } [ end ]
* @ param { string } [ type ]
* /
slice ( start = 0 , end = this . size , type = '' ) {
const { size } = this
let relativeStart = start < 0 ? Math . max ( size + start , 0 ) : Math . min ( start , size )
let relativeEnd = end < 0 ? Math . max ( size + end , 0 ) : Math . min ( end , size )
const span = Math . max ( relativeEnd - relativeStart , 0 )
const parts = this . # parts
const blobParts = [ ]
let added = 0
for ( const part of parts ) {
// don't add the overflow to new blobParts
if ( added >= span ) {
break
}
const size = ArrayBuffer . isView ( part ) ? part . byteLength : part . size
if ( relativeStart && size <= relativeStart ) {
// Skip the beginning and change the relative
// start & end position as we skip the unwanted parts
relativeStart -= size
relativeEnd -= size
} else {
let chunk
if ( ArrayBuffer . isView ( part ) ) {
chunk = part . subarray ( relativeStart , Math . min ( size , relativeEnd ) )
added += chunk . byteLength
} else {
chunk = part . slice ( relativeStart , Math . min ( size , relativeEnd ) )
added += chunk . size
}
relativeEnd -= size
blobParts . push ( chunk )
relativeStart = 0 // All next sequential parts should start at 0
}
}
const blob = new Blob ( [ ] , { type : String ( type ) . toLowerCase ( ) } )
blob . # size = span
blob . # parts = blobParts
return blob
}
get [ Symbol . toStringTag ] ( ) {
return 'Blob'
}
static [ Symbol . hasInstance ] ( object ) {
return (
object &&
typeof object === 'object' &&
typeof object . constructor === 'function' &&
(
typeof object . stream === 'function' ||
typeof object . arrayBuffer === 'function'
) &&
/^(Blob|File)$/ . test ( object [ Symbol . toStringTag ] )
)
}
}
Object . defineProperties ( _Blob . prototype , {
size : { enumerable : true } ,
type : { enumerable : true } ,
slice : { enumerable : true }
} )
/** @type {typeof globalThis.Blob} */
const Blob = _Blob
/* harmony default export */ const _ _WEBPACK _DEFAULT _EXPORT _ _ = ( Blob ) ;
/***/ } ) ,
/***/ 8010 :
/***/ ( ( _ _unused _webpack _ _ _webpack _module _ _ , _ _webpack _exports _ _ , _ _nccwpck _require _ _ ) => {
"use strict" ;
/* harmony export */ _ _nccwpck _require _ _ . d ( _ _webpack _exports _ _ , {
/* harmony export */ "Ct" : ( ) => ( /* binding */ FormData ) ,
/* harmony export */ "au" : ( ) => ( /* binding */ formDataToBlob )
/* harmony export */ } ) ;
/* unused harmony export File */
/* harmony import */ var fetch _blob _ _WEBPACK _IMPORTED _MODULE _0 _ _ = _ _nccwpck _require _ _ ( 1410 ) ;
/* harmony import */ var fetch _blob _file _js _ _WEBPACK _IMPORTED _MODULE _1 _ _ = _ _nccwpck _require _ _ ( 3213 ) ;
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
var { toStringTag : t , iterator : i , hasInstance : h } = Symbol ,
r = Math . random ,
m = 'append,set,get,getAll,delete,keys,values,entries,forEach,constructor' . split ( ',' ) ,
f = ( a , b , c ) => ( a += '' , /^(Blob|File)$/ . test ( b && b [ t ] ) ? [ ( c = c !== void 0 ? c + '' : b [ t ] == 'File' ? b . name : 'blob' , a ) , b . name !== c || b [ t ] == 'blob' ? new fetch _blob _file _js _ _WEBPACK _IMPORTED _MODULE _1 _ _ /* ["default"] */ . Z ( [ b ] , c , b ) : b ] : [ a , b + '' ] ) ,
e = ( c , f ) => ( f ? c : c . replace ( /\r?\n|\r/g , '\r\n' ) ) . replace ( /\n/g , '%0A' ) . replace ( /\r/g , '%0D' ) . replace ( /"/g , '%22' ) ,
x = ( n , a , e ) => { if ( a . length < e ) { throw new TypeError ( ` Failed to execute ' ${ n } ' on 'FormData': ${ e } arguments required, but only ${ a . length } present. ` ) } }
const File = ( /* unused pure expression or super */ null && ( F ) )
/** @type {typeof globalThis.FormData} */
const FormData = class FormData {
# d = [ ] ;
constructor ( ... a ) { if ( a . length ) throw new TypeError ( ` Failed to construct 'FormData': parameter 1 is not of type 'HTMLFormElement'. ` ) }
get [ t ] ( ) { return 'FormData' }
[ i ] ( ) { return this . entries ( ) }
static [ h ] ( o ) { return o && typeof o === 'object' && o [ t ] === 'FormData' && ! m . some ( m => typeof o [ m ] != 'function' ) }
append ( ... a ) { x ( 'append' , arguments , 2 ) ; this . # d . push ( f ( ... a ) ) }
delete ( a ) { x ( 'delete' , arguments , 1 ) ; a += '' ; this . # d = this . # d . filter ( ( [ b ] ) => b !== a ) }
get ( a ) { x ( 'get' , arguments , 1 ) ; a += '' ; for ( var b = this . # d , l = b . length , c = 0 ; c < l ; c ++ ) if ( b [ c ] [ 0 ] === a ) return b [ c ] [ 1 ] ; return null }
getAll ( a , b ) { x ( 'getAll' , arguments , 1 ) ; b = [ ] ; a += '' ; this . # d . forEach ( c => c [ 0 ] === a && b . push ( c [ 1 ] ) ) ; return b }
has ( a ) { x ( 'has' , arguments , 1 ) ; a += '' ; return this . # d . some ( b => b [ 0 ] === a ) }
forEach ( a , b ) { x ( 'forEach' , arguments , 1 ) ; for ( var [ c , d ] of this ) a . call ( b , d , c , this ) }
set ( ... a ) { x ( 'set' , arguments , 2 ) ; var b = [ ] , c = ! 0 ; a = f ( ... a ) ; this . # d . forEach ( d => { d [ 0 ] === a [ 0 ] ? c && ( c = ! b . push ( a ) ) : b . push ( d ) } ) ; c && b . push ( a ) ; this . # d = b }
* entries ( ) { yield * this . # d }
* keys ( ) { for ( var [ a ] of this ) yield a }
* values ( ) { for ( var [ , a ] of this ) yield a } }
/** @param {FormData} F */
function formDataToBlob ( F , B = fetch _blob _ _WEBPACK _IMPORTED _MODULE _0 _ _ /* ["default"] */ . Z ) {
var b = ` ${ r ( ) } ${ r ( ) } ` . replace ( /\./g , '' ) . slice ( - 28 ) . padStart ( 32 , '-' ) , c = [ ] , p = ` -- ${ b } \r \n Content-Disposition: form-data; name=" `
F . forEach ( ( v , n ) => typeof v == 'string'
? c . push ( p + e ( n ) + ` " \r \n \r \n ${ v . replace ( /\r(?!\n)|(?<!\r)\n/g , '\r\n' ) } \r \n ` )
: c . push ( p + e ( n ) + ` "; filename=" ${ e ( v . name , 1 ) } " \r \n Content-Type: ${ v . type || "application/octet-stream" } \r \n \r \n ` , v , '\r\n' ) )
c . push ( ` -- ${ b } -- ` )
return new B ( c , { type : "multipart/form-data; boundary=" + b } ) }
/***/ } ) ,
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/***/ 4429 :
/***/ ( ( _ _unused _webpack _ _ _webpack _module _ _ , _ _webpack _exports _ _ , _ _nccwpck _require _ _ ) => {
"use strict" ;
// ESM COMPAT FLAG
_ _nccwpck _require _ _ . r ( _ _webpack _exports _ _ ) ;
// EXPORTS
_ _nccwpck _require _ _ . d ( _ _webpack _exports _ _ , {
"AbortError" : ( ) => ( /* reexport */ AbortError ) ,
"Blob" : ( ) => ( /* reexport */ from /* Blob */ . t6 ) ,
"FetchError" : ( ) => ( /* reexport */ FetchError ) ,
"File" : ( ) => ( /* reexport */ from /* File */ . $B ) ,
"FormData" : ( ) => ( /* reexport */ esm _min /* FormData */ . Ct ) ,
"Headers" : ( ) => ( /* reexport */ Headers ) ,
"Request" : ( ) => ( /* reexport */ Request ) ,
"Response" : ( ) => ( /* reexport */ Response ) ,
"blobFrom" : ( ) => ( /* reexport */ from /* blobFrom */ . xB ) ,
"blobFromSync" : ( ) => ( /* reexport */ from /* blobFromSync */ . SX ) ,
"default" : ( ) => ( /* binding */ fetch ) ,
"fileFrom" : ( ) => ( /* reexport */ from /* fileFrom */ . e2 ) ,
"fileFromSync" : ( ) => ( /* reexport */ from /* fileFromSync */ . RA ) ,
"isRedirect" : ( ) => ( /* reexport */ isRedirect )
} ) ;
; // CONCATENATED MODULE: external "node:http"
const external _node _http _namespaceObject = require ( "node:http" ) ;
; // CONCATENATED MODULE: external "node:https"
const external _node _https _namespaceObject = require ( "node:https" ) ;
; // CONCATENATED MODULE: external "node:zlib"
const external _node _zlib _namespaceObject = require ( "node:zlib" ) ;
; // CONCATENATED MODULE: external "node:stream"
const external _node _stream _namespaceObject = require ( "node:stream" ) ;
; // CONCATENATED MODULE: external "node:buffer"
const external _node _buffer _namespaceObject = require ( "node:buffer" ) ;
; // CONCATENATED MODULE: ./node_modules/data-uri-to-buffer/dist/index.js
/ * *
* Returns a ` Buffer ` instance from the given data URI ` uri ` .
*
* @ param { String } uri Data URI to turn into a Buffer instance
* @ returns { Buffer } Buffer instance from Data URI
* @ api public
* /
function dataUriToBuffer ( uri ) {
if ( ! /^data:/i . test ( uri ) ) {
throw new TypeError ( '`uri` does not appear to be a Data URI (must begin with "data:")' ) ;
}
// strip newlines
uri = uri . replace ( /\r?\n/g , '' ) ;
// split the URI up into the "metadata" and the "data" portions
const firstComma = uri . indexOf ( ',' ) ;
if ( firstComma === - 1 || firstComma <= 4 ) {
throw new TypeError ( 'malformed data: URI' ) ;
}
// remove the "data:" scheme and parse the metadata
const meta = uri . substring ( 5 , firstComma ) . split ( ';' ) ;
let charset = '' ;
let base64 = false ;
const type = meta [ 0 ] || 'text/plain' ;
let typeFull = type ;
for ( let i = 1 ; i < meta . length ; i ++ ) {
if ( meta [ i ] === 'base64' ) {
base64 = true ;
}
else {
typeFull += ` ; ${ meta [ i ] } ` ;
if ( meta [ i ] . indexOf ( 'charset=' ) === 0 ) {
charset = meta [ i ] . substring ( 8 ) ;
}
}
}
// defaults to US-ASCII only if type is not provided
if ( ! meta [ 0 ] && ! charset . length ) {
typeFull += ';charset=US-ASCII' ;
charset = 'US-ASCII' ;
}
// get the encoded data portion and decode URI-encoded chars
const encoding = base64 ? 'base64' : 'ascii' ;
const data = unescape ( uri . substring ( firstComma + 1 ) ) ;
const buffer = Buffer . from ( data , encoding ) ;
// set `.type` and `.typeFull` properties to MIME type
buffer . type = type ;
buffer . typeFull = typeFull ;
// set the `.charset` property
buffer . charset = charset ;
return buffer ;
}
/* harmony default export */ const dist = ( dataUriToBuffer ) ;
//# sourceMappingURL=index.js.map
; // CONCATENATED MODULE: external "node:util"
const external _node _util _namespaceObject = require ( "node:util" ) ;
// EXTERNAL MODULE: ./node_modules/fetch-blob/index.js
var fetch _blob = _ _nccwpck _require _ _ ( 1410 ) ;
// EXTERNAL MODULE: ./node_modules/formdata-polyfill/esm.min.js
var esm _min = _ _nccwpck _require _ _ ( 8010 ) ;
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/errors/base.js
class FetchBaseError extends Error {
constructor ( message , type ) {
super ( message ) ;
// Hide custom error implementation details from end-users
Error . captureStackTrace ( this , this . constructor ) ;
this . type = type ;
}
get name ( ) {
return this . constructor . name ;
}
get [ Symbol . toStringTag ] ( ) {
return this . constructor . name ;
}
}
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/errors/fetch-error.js
/ * *
* @ typedef { { address ? : string , code : string , dest ? : string , errno : number , info ? : object , message : string , path ? : string , port ? : number , syscall : string } } SystemError
* /
/ * *
* FetchError interface for operational errors
* /
class FetchError extends FetchBaseError {
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* @ param { string } message - Error message for human
* @ param { string } [ type ] - Error type for machine
* @ param { SystemError } [ systemError ] - For Node . js system error
* /
constructor ( message , type , systemError ) {
super ( message , type ) ;
// When err.type is `system`, err.erroredSysCall contains system error and err.code contains system error code
if ( systemError ) {
// eslint-disable-next-line no-multi-assign
this . code = this . errno = systemError . code ;
this . erroredSysCall = systemError . syscall ;
}
}
}
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/is.js
/ * *
* Is . js
*
* Object type checks .
* /
const NAME = Symbol . toStringTag ;
/ * *
* Check if ` obj ` is a URLSearchParams object
* ref : https : //github.com/node-fetch/node-fetch/issues/296#issuecomment-307598143
* @ param { * } object - Object to check for
* @ return { boolean }
* /
const isURLSearchParameters = object => {
return (
typeof object === 'object' &&
typeof object . append === 'function' &&
typeof object . delete === 'function' &&
typeof object . get === 'function' &&
typeof object . getAll === 'function' &&
typeof object . has === 'function' &&
typeof object . set === 'function' &&
typeof object . sort === 'function' &&
object [ NAME ] === 'URLSearchParams'
) ;
} ;
/ * *
* Check if ` object ` is a W3C ` Blob ` object ( which ` File ` inherits from )
* @ param { * } object - Object to check for
* @ return { boolean }
* /
const isBlob = object => {
return (
object &&
typeof object === 'object' &&
typeof object . arrayBuffer === 'function' &&
typeof object . type === 'string' &&
typeof object . stream === 'function' &&
typeof object . constructor === 'function' &&
/^(Blob|File)$/ . test ( object [ NAME ] )
) ;
} ;
/ * *
* Check if ` obj ` is an instance of AbortSignal .
* @ param { * } object - Object to check for
* @ return { boolean }
* /
const isAbortSignal = object => {
return (
typeof object === 'object' && (
object [ NAME ] === 'AbortSignal' ||
object [ NAME ] === 'EventTarget'
)
) ;
} ;
/ * *
* isDomainOrSubdomain reports whether sub is a subdomain ( or exact match ) of
* the parent domain .
*
* Both domains must already be in canonical form .
* @ param { string | URL } original
* @ param { string | URL } destination
* /
const isDomainOrSubdomain = ( destination , original ) => {
const orig = new URL ( original ) . hostname ;
const dest = new URL ( destination ) . hostname ;
return orig === dest || orig . endsWith ( ` . ${ dest } ` ) ;
} ;
/ * *
* isSameProtocol reports whether the two provided URLs use the same protocol .
*
* Both domains must already be in canonical form .
* @ param { string | URL } original
* @ param { string | URL } destination
* /
const isSameProtocol = ( destination , original ) => {
const orig = new URL ( original ) . protocol ;
const dest = new URL ( destination ) . protocol ;
return orig === dest ;
} ;
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/body.js
/ * *
* Body . js
*
* Body interface provides common methods for Request and Response
* /
const pipeline = ( 0 , external _node _util _namespaceObject . promisify ) ( external _node _stream _namespaceObject . pipeline ) ;
const INTERNALS = Symbol ( 'Body internals' ) ;
/ * *
* Body mixin
*
* Ref : https : //fetch.spec.whatwg.org/#body
*
* @ param Stream body Readable stream
* @ param Object opts Response options
* @ return Void
* /
class Body {
constructor ( body , {
size = 0
} = { } ) {
let boundary = null ;
if ( body === null ) {
// Body is undefined or null
body = null ;
} else if ( isURLSearchParameters ( body ) ) {
// Body is a URLSearchParams
body = external _node _buffer _namespaceObject . Buffer . from ( body . toString ( ) ) ;
} else if ( isBlob ( body ) ) {
// Body is blob
} else if ( external _node _buffer _namespaceObject . Buffer . isBuffer ( body ) ) {
// Body is Buffer
} else if ( external _node _util _namespaceObject . types . isAnyArrayBuffer ( body ) ) {
// Body is ArrayBuffer
body = external _node _buffer _namespaceObject . Buffer . from ( body ) ;
} else if ( ArrayBuffer . isView ( body ) ) {
// Body is ArrayBufferView
body = external _node _buffer _namespaceObject . Buffer . from ( body . buffer , body . byteOffset , body . byteLength ) ;
} else if ( body instanceof external _node _stream _namespaceObject ) {
// Body is stream
} else if ( body instanceof esm _min /* FormData */ . Ct ) {
// Body is FormData
body = ( 0 , esm _min /* formDataToBlob */ . au ) ( body ) ;
boundary = body . type . split ( '=' ) [ 1 ] ;
} else {
// None of the above
// coerce to string then buffer
body = external _node _buffer _namespaceObject . Buffer . from ( String ( body ) ) ;
}
let stream = body ;
if ( external _node _buffer _namespaceObject . Buffer . isBuffer ( body ) ) {
stream = external _node _stream _namespaceObject . Readable . from ( body ) ;
} else if ( isBlob ( body ) ) {
stream = external _node _stream _namespaceObject . Readable . from ( body . stream ( ) ) ;
}
this [ INTERNALS ] = {
body ,
stream ,
boundary ,
disturbed : false ,
error : null
} ;
this . size = size ;
if ( body instanceof external _node _stream _namespaceObject ) {
body . on ( 'error' , error _ => {
const error = error _ instanceof FetchBaseError ?
error _ :
new FetchError ( ` Invalid response body while trying to fetch ${ this . url } : ${ error _ . message } ` , 'system' , error _ ) ;
this [ INTERNALS ] . error = error ;
2020-08-26 02:20:01 +02:00
} ) ;
2022-11-10 11:43:16 +01:00
}
}
get body ( ) {
return this [ INTERNALS ] . stream ;
}
get bodyUsed ( ) {
return this [ INTERNALS ] . disturbed ;
}
/ * *
* Decode response as ArrayBuffer
*
* @ return Promise
* /
async arrayBuffer ( ) {
const { buffer , byteOffset , byteLength } = await consumeBody ( this ) ;
return buffer . slice ( byteOffset , byteOffset + byteLength ) ;
}
async formData ( ) {
const ct = this . headers . get ( 'content-type' ) ;
if ( ct . startsWith ( 'application/x-www-form-urlencoded' ) ) {
const formData = new esm _min /* FormData */ . Ct ( ) ;
const parameters = new URLSearchParams ( await this . text ( ) ) ;
for ( const [ name , value ] of parameters ) {
formData . append ( name , value ) ;
}
return formData ;
}
const { toFormData } = await _ _nccwpck _require _ _ . e ( /* import() */ 37 ) . then ( _ _nccwpck _require _ _ . bind ( _ _nccwpck _require _ _ , 4037 ) ) ;
return toFormData ( this . body , ct ) ;
}
/ * *
* Return raw response as Blob
*
* @ return Promise
* /
async blob ( ) {
const ct = ( this . headers && this . headers . get ( 'content-type' ) ) || ( this [ INTERNALS ] . body && this [ INTERNALS ] . body . type ) || '' ;
const buf = await this . arrayBuffer ( ) ;
return new fetch _blob /* default */ . Z ( [ buf ] , {
type : ct
2020-08-26 02:20:01 +02:00
} ) ;
2022-11-10 11:43:16 +01:00
}
/ * *
* Decode response as json
*
* @ return Promise
* /
async json ( ) {
const text = await this . text ( ) ;
return JSON . parse ( text ) ;
}
/ * *
* Decode response as text
*
* @ return Promise
* /
async text ( ) {
const buffer = await consumeBody ( this ) ;
return new TextDecoder ( ) . decode ( buffer ) ;
}
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* Decode response as buffer ( non - spec api )
*
* @ return Promise
* /
buffer ( ) {
return consumeBody ( this ) ;
}
}
Body . prototype . buffer = ( 0 , external _node _util _namespaceObject . deprecate ) ( Body . prototype . buffer , 'Please use \'response.arrayBuffer()\' instead of \'response.buffer()\'' , 'node-fetch#buffer' ) ;
// In browsers, all properties are enumerable.
Object . defineProperties ( Body . prototype , {
body : { enumerable : true } ,
bodyUsed : { enumerable : true } ,
arrayBuffer : { enumerable : true } ,
blob : { enumerable : true } ,
json : { enumerable : true } ,
text : { enumerable : true } ,
data : { get : ( 0 , external _node _util _namespaceObject . deprecate ) ( ( ) => { } ,
'data doesn\'t exist, use json(), text(), arrayBuffer(), or body instead' ,
'https://github.com/node-fetch/node-fetch/issues/1000 (response)' ) }
} ) ;
/ * *
* Consume and convert an entire Body to a Buffer .
*
* Ref : https : //fetch.spec.whatwg.org/#concept-body-consume-body
*
* @ return Promise
* /
async function consumeBody ( data ) {
if ( data [ INTERNALS ] . disturbed ) {
throw new TypeError ( ` body used already for: ${ data . url } ` ) ;
}
data [ INTERNALS ] . disturbed = true ;
if ( data [ INTERNALS ] . error ) {
throw data [ INTERNALS ] . error ;
}
const { body } = data ;
// Body is null
if ( body === null ) {
return external _node _buffer _namespaceObject . Buffer . alloc ( 0 ) ;
}
/* c8 ignore next 3 */
if ( ! ( body instanceof external _node _stream _namespaceObject ) ) {
return external _node _buffer _namespaceObject . Buffer . alloc ( 0 ) ;
}
// Body is stream
// get ready to actually consume the body
const accum = [ ] ;
let accumBytes = 0 ;
try {
for await ( const chunk of body ) {
if ( data . size > 0 && accumBytes + chunk . length > data . size ) {
const error = new FetchError ( ` content size at ${ data . url } over limit: ${ data . size } ` , 'max-size' ) ;
body . destroy ( error ) ;
throw error ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
accumBytes += chunk . length ;
accum . push ( chunk ) ;
}
} catch ( error ) {
const error _ = error instanceof FetchBaseError ? error : new FetchError ( ` Invalid response body while trying to fetch ${ data . url } : ${ error . message } ` , 'system' , error ) ;
throw error _ ;
}
if ( body . readableEnded === true || body . _readableState . ended === true ) {
try {
if ( accum . every ( c => typeof c === 'string' ) ) {
return external _node _buffer _namespaceObject . Buffer . from ( accum . join ( '' ) ) ;
}
return external _node _buffer _namespaceObject . Buffer . concat ( accum , accumBytes ) ;
} catch ( error ) {
throw new FetchError ( ` Could not create Buffer from response body for ${ data . url } : ${ error . message } ` , 'system' , error ) ;
}
} else {
throw new FetchError ( ` Premature close of server response while trying to fetch ${ data . url } ` ) ;
}
}
/ * *
* Clone body given Res / Req instance
*
* @ param Mixed instance Response or Request instance
* @ param String highWaterMark highWaterMark for both PassThrough body streams
* @ return Mixed
* /
const clone = ( instance , highWaterMark ) => {
let p1 ;
let p2 ;
let { body } = instance [ INTERNALS ] ;
// Don't allow cloning a used body
if ( instance . bodyUsed ) {
throw new Error ( 'cannot clone body after it is used' ) ;
}
// Check that body is a stream and not form-data object
// note: we can't clone the form-data object without having it as a dependency
if ( ( body instanceof external _node _stream _namespaceObject ) && ( typeof body . getBoundary !== 'function' ) ) {
// Tee instance body
p1 = new external _node _stream _namespaceObject . PassThrough ( { highWaterMark } ) ;
p2 = new external _node _stream _namespaceObject . PassThrough ( { highWaterMark } ) ;
body . pipe ( p1 ) ;
body . pipe ( p2 ) ;
// Set instance body to teed body and return the other teed body
instance [ INTERNALS ] . stream = p1 ;
body = p2 ;
}
return body ;
} ;
const getNonSpecFormDataBoundary = ( 0 , external _node _util _namespaceObject . deprecate ) (
body => body . getBoundary ( ) ,
'form-data doesn\'t follow the spec and requires special treatment. Use alternative package' ,
'https://github.com/node-fetch/node-fetch/issues/1167'
) ;
/ * *
* Performs the operation "extract a `Content-Type` value from |object|" as
* specified in the specification :
* https : //fetch.spec.whatwg.org/#concept-bodyinit-extract
*
* This function assumes that instance . body is present .
*
* @ param { any } body Any options . body input
* @ returns { string | null }
* /
const extractContentType = ( body , request ) => {
// Body is null or undefined
if ( body === null ) {
return null ;
}
// Body is string
if ( typeof body === 'string' ) {
return 'text/plain;charset=UTF-8' ;
}
// Body is a URLSearchParams
if ( isURLSearchParameters ( body ) ) {
return 'application/x-www-form-urlencoded;charset=UTF-8' ;
}
// Body is blob
if ( isBlob ( body ) ) {
return body . type || null ;
}
// Body is a Buffer (Buffer, ArrayBuffer or ArrayBufferView)
if ( external _node _buffer _namespaceObject . Buffer . isBuffer ( body ) || external _node _util _namespaceObject . types . isAnyArrayBuffer ( body ) || ArrayBuffer . isView ( body ) ) {
return null ;
}
if ( body instanceof esm _min /* FormData */ . Ct ) {
return ` multipart/form-data; boundary= ${ request [ INTERNALS ] . boundary } ` ;
}
// Detect form data input from form-data module
if ( body && typeof body . getBoundary === 'function' ) {
return ` multipart/form-data;boundary= ${ getNonSpecFormDataBoundary ( body ) } ` ;
}
// Body is stream - can't really do much about this
if ( body instanceof external _node _stream _namespaceObject ) {
return null ;
}
// Body constructor defaults other things to string
return 'text/plain;charset=UTF-8' ;
} ;
/ * *
* The Fetch Standard treats this as if "total bytes" is a property on the body .
* For us , we have to explicitly get it with a function .
*
* ref : https : //fetch.spec.whatwg.org/#concept-body-total-bytes
*
* @ param { any } obj . body Body object from the Body instance .
* @ returns { number | null }
* /
const getTotalBytes = request => {
const { body } = request [ INTERNALS ] ;
// Body is null or undefined
if ( body === null ) {
return 0 ;
}
// Body is Blob
if ( isBlob ( body ) ) {
return body . size ;
}
// Body is Buffer
if ( external _node _buffer _namespaceObject . Buffer . isBuffer ( body ) ) {
return body . length ;
}
// Detect form data input from form-data module
if ( body && typeof body . getLengthSync === 'function' ) {
return body . hasKnownLength && body . hasKnownLength ( ) ? body . getLengthSync ( ) : null ;
}
// Body is stream
return null ;
} ;
/ * *
* Write a Body to a Node . js WritableStream ( e . g . http . Request ) object .
*
* @ param { Stream . Writable } dest The stream to write to .
* @ param obj . body Body object from the Body instance .
* @ returns { Promise < void > }
* /
const writeToStream = async ( dest , { body } ) => {
if ( body === null ) {
// Body is null
dest . end ( ) ;
} else {
// Body is stream
await pipeline ( body , dest ) ;
}
} ;
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/headers.js
/ * *
* Headers . js
*
* Headers class offers convenient helpers
* /
/* c8 ignore next 9 */
const validateHeaderName = typeof external _node _http _namespaceObject . validateHeaderName === 'function' ?
external _node _http _namespaceObject . validateHeaderName :
name => {
if ( ! /^[\^`\-\w!#$%&'*+.|~]+$/ . test ( name ) ) {
const error = new TypeError ( ` Header name must be a valid HTTP token [ ${ name } ] ` ) ;
Object . defineProperty ( error , 'code' , { value : 'ERR_INVALID_HTTP_TOKEN' } ) ;
throw error ;
}
} ;
/* c8 ignore next 9 */
const validateHeaderValue = typeof external _node _http _namespaceObject . validateHeaderValue === 'function' ?
external _node _http _namespaceObject . validateHeaderValue :
( name , value ) => {
if ( /[^\t\u0020-\u007E\u0080-\u00FF]/ . test ( value ) ) {
const error = new TypeError ( ` Invalid character in header content [" ${ name } "] ` ) ;
Object . defineProperty ( error , 'code' , { value : 'ERR_INVALID_CHAR' } ) ;
throw error ;
}
} ;
/ * *
* @ typedef { Headers | Record < string , string > | Iterable < readonly [ string , string ] > | Iterable < Iterable < string >> } HeadersInit
* /
/ * *
* This Fetch API interface allows you to perform various actions on HTTP request and response headers .
* These actions include retrieving , setting , adding to , and removing .
* A Headers object has an associated header list , which is initially empty and consists of zero or more name and value pairs .
* You can add to this using methods like append ( ) ( see Examples . )
* In all methods of this interface , header names are matched by case - insensitive byte sequence .
*
* /
class Headers extends URLSearchParams {
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* Headers class
*
* @ constructor
* @ param { HeadersInit } [ init ] - Response headers
* /
constructor ( init ) {
// Validate and normalize init object in [name, value(s)][]
/** @type {string[][]} */
let result = [ ] ;
if ( init instanceof Headers ) {
const raw = init . raw ( ) ;
for ( const [ name , values ] of Object . entries ( raw ) ) {
result . push ( ... values . map ( value => [ name , value ] ) ) ;
}
} else if ( init == null ) { // eslint-disable-line no-eq-null, eqeqeq
// No op
} else if ( typeof init === 'object' && ! external _node _util _namespaceObject . types . isBoxedPrimitive ( init ) ) {
const method = init [ Symbol . iterator ] ;
// eslint-disable-next-line no-eq-null, eqeqeq
if ( method == null ) {
// Record<ByteString, ByteString>
result . push ( ... Object . entries ( init ) ) ;
} else {
if ( typeof method !== 'function' ) {
throw new TypeError ( 'Header pairs must be iterable' ) ;
}
// Sequence<sequence<ByteString>>
// Note: per spec we have to first exhaust the lists then process them
result = [ ... init ]
. map ( pair => {
if (
typeof pair !== 'object' || external _node _util _namespaceObject . types . isBoxedPrimitive ( pair )
) {
throw new TypeError ( 'Each header pair must be an iterable object' ) ;
}
return [ ... pair ] ;
} ) . map ( pair => {
if ( pair . length !== 2 ) {
throw new TypeError ( 'Each header pair must be a name/value tuple' ) ;
}
return [ ... pair ] ;
} ) ;
}
} else {
throw new TypeError ( 'Failed to construct \'Headers\': The provided value is not of type \'(sequence<sequence<ByteString>> or record<ByteString, ByteString>)' ) ;
}
// Validate and lowercase
result =
result . length > 0 ?
result . map ( ( [ name , value ] ) => {
validateHeaderName ( name ) ;
validateHeaderValue ( name , String ( value ) ) ;
return [ String ( name ) . toLowerCase ( ) , String ( value ) ] ;
} ) :
undefined ;
super ( result ) ;
// Returning a Proxy that will lowercase key names, validate parameters and sort keys
// eslint-disable-next-line no-constructor-return
return new Proxy ( this , {
get ( target , p , receiver ) {
switch ( p ) {
case 'append' :
case 'set' :
return ( name , value ) => {
validateHeaderName ( name ) ;
validateHeaderValue ( name , String ( value ) ) ;
return URLSearchParams . prototype [ p ] . call (
target ,
String ( name ) . toLowerCase ( ) ,
String ( value )
) ;
} ;
case 'delete' :
case 'has' :
case 'getAll' :
return name => {
validateHeaderName ( name ) ;
return URLSearchParams . prototype [ p ] . call (
target ,
String ( name ) . toLowerCase ( )
) ;
} ;
case 'keys' :
return ( ) => {
target . sort ( ) ;
return new Set ( URLSearchParams . prototype . keys . call ( target ) ) . keys ( ) ;
} ;
default :
return Reflect . get ( target , p , receiver ) ;
}
}
2020-08-26 02:20:01 +02:00
} ) ;
2022-11-10 11:43:16 +01:00
/* c8 ignore next */
}
get [ Symbol . toStringTag ] ( ) {
return this . constructor . name ;
}
toString ( ) {
return Object . prototype . toString . call ( this ) ;
}
get ( name ) {
const values = this . getAll ( name ) ;
if ( values . length === 0 ) {
return null ;
}
let value = values . join ( ', ' ) ;
if ( /^content-encoding$/i . test ( name ) ) {
value = value . toLowerCase ( ) ;
}
return value ;
}
forEach ( callback , thisArg = undefined ) {
for ( const name of this . keys ( ) ) {
Reflect . apply ( callback , thisArg , [ this . get ( name ) , name , this ] ) ;
}
}
* values ( ) {
for ( const name of this . keys ( ) ) {
yield this . get ( name ) ;
}
}
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* @ type { ( ) => IterableIterator < [ string , string ] > }
* /
* entries ( ) {
for ( const name of this . keys ( ) ) {
yield [ name , this . get ( name ) ] ;
}
}
[ Symbol . iterator ] ( ) {
return this . entries ( ) ;
}
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* Node - fetch non - spec method
* returning all headers and their values as array
* @ returns { Record < string , string [ ] > }
* /
raw ( ) {
return [ ... this . keys ( ) ] . reduce ( ( result , key ) => {
result [ key ] = this . getAll ( key ) ;
return result ;
} , { } ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
/ * *
* For better console . log ( headers ) and also to convert Headers into Node . js Request compatible format
* /
[ Symbol . for ( 'nodejs.util.inspect.custom' ) ] ( ) {
return [ ... this . keys ( ) ] . reduce ( ( result , key ) => {
const values = this . getAll ( key ) ;
// Http.request() only supports string as Host header.
// This hack makes specifying custom Host header possible.
if ( key === 'host' ) {
result [ key ] = values [ 0 ] ;
} else {
result [ key ] = values . length > 1 ? values : values [ 0 ] ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
return result ;
} , { } ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
}
/ * *
* Re - shaping object for Web IDL tests
* Only need to do it for overridden methods
* /
Object . defineProperties (
Headers . prototype ,
[ 'get' , 'entries' , 'forEach' , 'values' ] . reduce ( ( result , property ) => {
result [ property ] = { enumerable : true } ;
return result ;
} , { } )
) ;
/ * *
* Create a Headers object from an http . IncomingMessage . rawHeaders , ignoring those that do
* not conform to HTTP grammar productions .
* @ param { import ( 'http' ) . IncomingMessage [ 'rawHeaders' ] } headers
* /
function fromRawHeaders ( headers = [ ] ) {
return new Headers (
headers
// Split into pairs
. reduce ( ( result , value , index , array ) => {
if ( index % 2 === 0 ) {
result . push ( array . slice ( index , index + 2 ) ) ;
}
return result ;
} , [ ] )
. filter ( ( [ name , value ] ) => {
try {
validateHeaderName ( name ) ;
validateHeaderValue ( name , String ( value ) ) ;
return true ;
} catch {
return false ;
}
} )
) ;
}
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/is-redirect.js
const redirectStatus = new Set ( [ 301 , 302 , 303 , 307 , 308 ] ) ;
/ * *
* Redirect code matching
*
* @ param { number } code - Status code
* @ return { boolean }
* /
const isRedirect = code => {
return redirectStatus . has ( code ) ;
2020-08-26 02:20:01 +02:00
} ;
2022-11-10 11:43:16 +01:00
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/response.js
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* Response . js
2020-08-26 02:20:01 +02:00
*
2022-11-10 11:43:16 +01:00
* Response class provides content decoding
* /
const response _INTERNALS = Symbol ( 'Response internals' ) ;
/ * *
* Response class
2020-08-26 02:20:01 +02:00
*
2022-11-10 11:43:16 +01:00
* Ref : https : //fetch.spec.whatwg.org/#response-class
*
* @ param Stream body Readable stream
* @ param Object opts Response options
* @ return Void
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
class Response extends Body {
constructor ( body = null , options = { } ) {
super ( body , options ) ;
// eslint-disable-next-line no-eq-null, eqeqeq, no-negated-condition
const status = options . status != null ? options . status : 200 ;
const headers = new Headers ( options . headers ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
if ( body !== null && ! headers . has ( 'Content-Type' ) ) {
const contentType = extractContentType ( body , this ) ;
if ( contentType ) {
headers . append ( 'Content-Type' , contentType ) ;
}
}
this [ response _INTERNALS ] = {
type : 'default' ,
url : options . url ,
status ,
statusText : options . statusText || '' ,
headers ,
counter : options . counter ,
highWaterMark : options . highWaterMark
} ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
get type ( ) {
return this [ response _INTERNALS ] . type ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
get url ( ) {
return this [ response _INTERNALS ] . url || '' ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
get status ( ) {
return this [ response _INTERNALS ] . status ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
/ * *
* Convenience property representing if the request ended normally
* /
get ok ( ) {
return this [ response _INTERNALS ] . status >= 200 && this [ response _INTERNALS ] . status < 300 ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
get redirected ( ) {
return this [ response _INTERNALS ] . counter > 0 ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
get statusText ( ) {
return this [ response _INTERNALS ] . statusText ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
get headers ( ) {
return this [ response _INTERNALS ] . headers ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
get highWaterMark ( ) {
return this [ response _INTERNALS ] . highWaterMark ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/ * *
* Clone this response
*
* @ return Response
* /
clone ( ) {
return new Response ( clone ( this , this . highWaterMark ) , {
type : this . type ,
url : this . url ,
status : this . status ,
statusText : this . statusText ,
headers : this . headers ,
ok : this . ok ,
redirected : this . redirected ,
size : this . size ,
highWaterMark : this . highWaterMark
2020-08-26 02:20:01 +02:00
} ) ;
2022-11-10 11:43:16 +01:00
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/ * *
* @ param { string } url The URL that the new response is to originate from .
* @ param { number } status An optional status code for the response ( e . g . , 302. )
* @ returns { Response } A Response object .
* /
static redirect ( url , status = 302 ) {
if ( ! isRedirect ( status ) ) {
throw new RangeError ( 'Failed to execute "redirect" on "response": Invalid status code' ) ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
return new Response ( null , {
headers : {
location : new URL ( url ) . toString ( )
} ,
status
2020-08-26 02:20:01 +02:00
} ) ;
}
2022-11-10 11:43:16 +01:00
static error ( ) {
const response = new Response ( null , { status : 0 , statusText : '' } ) ;
response [ response _INTERNALS ] . type = 'error' ;
return response ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
get [ Symbol . toStringTag ] ( ) {
return 'Response' ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
Object . defineProperties ( Response . prototype , {
type : { enumerable : true } ,
url : { enumerable : true } ,
status : { enumerable : true } ,
ok : { enumerable : true } ,
redirected : { enumerable : true } ,
statusText : { enumerable : true } ,
headers : { enumerable : true } ,
clone : { enumerable : true }
} ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
; // CONCATENATED MODULE: external "node:url"
const external _node _url _namespaceObject = require ( "node:url" ) ;
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/get-search.js
const getSearch = parsedURL => {
if ( parsedURL . search ) {
return parsedURL . search ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
const lastOffset = parsedURL . href . length - 1 ;
const hash = parsedURL . hash || ( parsedURL . href [ lastOffset ] === '#' ? '#' : '' ) ;
return parsedURL . href [ lastOffset - hash . length ] === '?' ? '?' : '' ;
} ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
; // CONCATENATED MODULE: external "node:net"
const external _node _net _namespaceObject = require ( "node:net" ) ;
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/utils/referrer.js
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* @ external URL
* @ see { @ link https : //developer.mozilla.org/en-US/docs/Web/API/URL|URL}
2020-08-26 02:20:01 +02:00
* /
/ * *
2022-11-10 11:43:16 +01:00
* @ module utils / referrer
* @ private
2020-08-26 02:20:01 +02:00
* /
/ * *
2022-11-10 11:43:16 +01:00
* @ see { @ link https : //w3c.github.io/webappsec-referrer-policy/#strip-url|Referrer Policy §8.4. Strip url for use as a referrer}
* @ param { string } URL
* @ param { boolean } [ originOnly = false ]
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
function stripURLForUseAsAReferrer ( url , originOnly = false ) {
// 1. If url is null, return no referrer.
if ( url == null ) { // eslint-disable-line no-eq-null, eqeqeq
return 'no-referrer' ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
url = new URL ( url ) ;
// 2. If url's scheme is a local scheme, then return no referrer.
if ( /^(about|blob|data):$/ . test ( url . protocol ) ) {
return 'no-referrer' ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 3. Set url's username to the empty string.
url . username = '' ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 4. Set url's password to null.
// Note: `null` appears to be a mistake as this actually results in the password being `"null"`.
url . password = '' ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 5. Set url's fragment to null.
// Note: `null` appears to be a mistake as this actually results in the fragment being `"#null"`.
url . hash = '' ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 6. If the origin-only flag is true, then:
if ( originOnly ) {
// 6.1. Set url's path to null.
// Note: `null` appears to be a mistake as this actually results in the path being `"/null"`.
url . pathname = '' ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 6.2. Set url's query to null.
// Note: `null` appears to be a mistake as this actually results in the query being `"?null"`.
url . search = '' ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 7. Return url.
return url ;
2020-08-26 02:20:01 +02:00
}
/ * *
2022-11-10 11:43:16 +01:00
* @ see { @ link https : //w3c.github.io/webappsec-referrer-policy/#enumdef-referrerpolicy|enum ReferrerPolicy}
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
const ReferrerPolicy = new Set ( [
'' ,
'no-referrer' ,
'no-referrer-when-downgrade' ,
'same-origin' ,
'origin' ,
'strict-origin' ,
'origin-when-cross-origin' ,
'strict-origin-when-cross-origin' ,
'unsafe-url'
] ) ;
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* @ see { @ link https : //w3c.github.io/webappsec-referrer-policy/#default-referrer-policy|default referrer policy}
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
const DEFAULT _REFERRER _POLICY = 'strict-origin-when-cross-origin' ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/ * *
* @ see { @ link https : //w3c.github.io/webappsec-referrer-policy/#referrer-policies|Referrer Policy §3. Referrer Policies}
* @ param { string } referrerPolicy
* @ returns { string } referrerPolicy
* /
function validateReferrerPolicy ( referrerPolicy ) {
if ( ! ReferrerPolicy . has ( referrerPolicy ) ) {
throw new TypeError ( ` Invalid referrerPolicy: ${ referrerPolicy } ` ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
return referrerPolicy ;
2020-08-26 02:20:01 +02:00
}
/ * *
2022-11-10 11:43:16 +01:00
* @ see { @ link https : //w3c.github.io/webappsec-secure-contexts/#is-origin-trustworthy|Referrer Policy §3.2. Is origin potentially trustworthy?}
* @ param { external : URL } url
* @ returns ` true ` : "Potentially Trustworthy" , ` false ` : "Not Trustworthy"
* /
function isOriginPotentiallyTrustworthy ( url ) {
// 1. If origin is an opaque origin, return "Not Trustworthy".
// Not applicable
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 2. Assert: origin is a tuple origin.
// Not for implementations
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 3. If origin's scheme is either "https" or "wss", return "Potentially Trustworthy".
if ( /^(http|ws)s:$/ . test ( url . protocol ) ) {
return true ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 4. If origin's host component matches one of the CIDR notations 127.0.0.0/8 or ::1/128 [RFC4632], return "Potentially Trustworthy".
const hostIp = url . host . replace ( /(^\[)|(]$)/g , '' ) ;
const hostIPVersion = ( 0 , external _node _net _namespaceObject . isIP ) ( hostIp ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
if ( hostIPVersion === 4 && /^127\./ . test ( hostIp ) ) {
return true ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
if ( hostIPVersion === 6 && /^(((0+:){7})|(::(0+:){0,6}))0*1$/ . test ( hostIp ) ) {
return true ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 5. If origin's host component is "localhost" or falls within ".localhost", and the user agent conforms to the name resolution rules in [let-localhost-be-localhost], return "Potentially Trustworthy".
// We are returning FALSE here because we cannot ensure conformance to
// let-localhost-be-loalhost (https://tools.ietf.org/html/draft-west-let-localhost-be-localhost)
if ( url . host === 'localhost' || url . host . endsWith ( '.localhost' ) ) {
return false ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 6. If origin's scheme component is file, return "Potentially Trustworthy".
if ( url . protocol === 'file:' ) {
return true ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 7. If origin's scheme component is one which the user agent considers to be authenticated, return "Potentially Trustworthy".
// Not supported
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 8. If origin has been configured as a trustworthy origin, return "Potentially Trustworthy".
// Not supported
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 9. Return "Not Trustworthy".
return false ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/ * *
* @ see { @ link https : //w3c.github.io/webappsec-secure-contexts/#is-url-trustworthy|Referrer Policy §3.3. Is url potentially trustworthy?}
* @ param { external : URL } url
* @ returns ` true ` : "Potentially Trustworthy" , ` false ` : "Not Trustworthy"
* /
function isUrlPotentiallyTrustworthy ( url ) {
// 1. If url is "about:blank" or "about:srcdoc", return "Potentially Trustworthy".
if ( /^about:(blank|srcdoc)$/ . test ( url ) ) {
return true ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 2. If url's scheme is "data", return "Potentially Trustworthy".
if ( url . protocol === 'data:' ) {
return true ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// Note: The origin of blob: and filesystem: URLs is the origin of the context in which they were
// created. Therefore, blobs created in a trustworthy origin will themselves be potentially
// trustworthy.
if ( /^(blob|filesystem):$/ . test ( url . protocol ) ) {
return true ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 3. Return the result of executing §3.2 Is origin potentially trustworthy? on url's origin.
return isOriginPotentiallyTrustworthy ( url ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
/ * *
* Modifies the referrerURL to enforce any extra security policy considerations .
* @ see { @ link https : //w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7
* @ callback module : utils / referrer ~ referrerURLCallback
* @ param { external : URL } referrerURL
* @ returns { external : URL } modified referrerURL
* /
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/ * *
* Modifies the referrerOrigin to enforce any extra security policy considerations .
* @ see { @ link https : //w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}, step 7
* @ callback module : utils / referrer ~ referrerOriginCallback
* @ param { external : URL } referrerOrigin
* @ returns { external : URL } modified referrerOrigin
* /
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/ * *
* @ see { @ link https : //w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer|Referrer Policy §8.3. Determine request's Referrer}
* @ param { Request } request
* @ param { object } o
* @ param { module : utils / referrer ~ referrerURLCallback } o . referrerURLCallback
* @ param { module : utils / referrer ~ referrerOriginCallback } o . referrerOriginCallback
* @ returns { external : URL } Request ' s referrer
* /
function determineRequestsReferrer ( request , { referrerURLCallback , referrerOriginCallback } = { } ) {
// There are 2 notes in the specification about invalid pre-conditions. We return null, here, for
// these cases:
// > Note: If request's referrer is "no-referrer", Fetch will not call into this algorithm.
// > Note: If request's referrer policy is the empty string, Fetch will not call into this
// > algorithm.
if ( request . referrer === 'no-referrer' || request . referrerPolicy === '' ) {
return null ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 1. Let policy be request's associated referrer policy.
const policy = request . referrerPolicy ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 2. Let environment be request's client.
// not applicable to node.js
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 3. Switch on request's referrer:
if ( request . referrer === 'about:client' ) {
return 'no-referrer' ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// "a URL": Let referrerSource be request's referrer.
const referrerSource = request . referrer ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 4. Let request's referrerURL be the result of stripping referrerSource for use as a referrer.
let referrerURL = stripURLForUseAsAReferrer ( referrerSource ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 5. Let referrerOrigin be the result of stripping referrerSource for use as a referrer, with the
// origin-only flag set to true.
let referrerOrigin = stripURLForUseAsAReferrer ( referrerSource , true ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 6. If the result of serializing referrerURL is a string whose length is greater than 4096, set
// referrerURL to referrerOrigin.
if ( referrerURL . toString ( ) . length > 4096 ) {
referrerURL = referrerOrigin ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 7. The user agent MAY alter referrerURL or referrerOrigin at this point to enforce arbitrary
// policy considerations in the interests of minimizing data leakage. For example, the user
// agent could strip the URL down to an origin, modify its host, replace it with an empty
// string, etc.
if ( referrerURLCallback ) {
referrerURL = referrerURLCallback ( referrerURL ) ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
if ( referrerOriginCallback ) {
referrerOrigin = referrerOriginCallback ( referrerOrigin ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 8.Execute the statements corresponding to the value of policy:
const currentURL = new URL ( request . url ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
switch ( policy ) {
case 'no-referrer' :
return 'no-referrer' ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
case 'origin' :
return referrerOrigin ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
case 'unsafe-url' :
return referrerURL ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
case 'strict-origin' :
// 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a
// potentially trustworthy URL, then return no referrer.
if ( isUrlPotentiallyTrustworthy ( referrerURL ) && ! isUrlPotentiallyTrustworthy ( currentURL ) ) {
return 'no-referrer' ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 2. Return referrerOrigin.
return referrerOrigin . toString ( ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
case 'strict-origin-when-cross-origin' :
// 1. If the origin of referrerURL and the origin of request's current URL are the same, then
// return referrerURL.
if ( referrerURL . origin === currentURL . origin ) {
return referrerURL ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 2. If referrerURL is a potentially trustworthy URL and request's current URL is not a
// potentially trustworthy URL, then return no referrer.
if ( isUrlPotentiallyTrustworthy ( referrerURL ) && ! isUrlPotentiallyTrustworthy ( currentURL ) ) {
return 'no-referrer' ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 3. Return referrerOrigin.
return referrerOrigin ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
case 'same-origin' :
// 1. If the origin of referrerURL and the origin of request's current URL are the same, then
// return referrerURL.
if ( referrerURL . origin === currentURL . origin ) {
return referrerURL ;
}
// 2. Return no referrer.
return 'no-referrer' ;
case 'origin-when-cross-origin' :
// 1. If the origin of referrerURL and the origin of request's current URL are the same, then
// return referrerURL.
if ( referrerURL . origin === currentURL . origin ) {
return referrerURL ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// Return referrerOrigin.
return referrerOrigin ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
case 'no-referrer-when-downgrade' :
// 1. If referrerURL is a potentially trustworthy URL and request's current URL is not a
// potentially trustworthy URL, then return no referrer.
if ( isUrlPotentiallyTrustworthy ( referrerURL ) && ! isUrlPotentiallyTrustworthy ( currentURL ) ) {
return 'no-referrer' ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// 2. Return referrerURL.
return referrerURL ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
default :
throw new TypeError ( ` Invalid referrerPolicy: ${ policy } ` ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
/ * *
* @ see { @ link https : //w3c.github.io/webappsec-referrer-policy/#parse-referrer-policy-from-header|Referrer Policy §8.1. Parse a referrer policy from a Referrer-Policy header}
* @ param { Headers } headers Response headers
* @ returns { string } policy
* /
function parseReferrerPolicyFromHeader ( headers ) {
// 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy`
// and response’ s header list.
const policyTokens = ( headers . get ( 'referrer-policy' ) || '' ) . split ( /[,\s]+/ ) ;
// 2. Let policy be the empty string.
let policy = '' ;
// 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty
// string, then set policy to token.
// Note: This algorithm loops over multiple policy values to allow deployment of new policy
// values with fallbacks for older user agents, as described in § 11.1 Unknown Policy Values.
for ( const token of policyTokens ) {
if ( token && ReferrerPolicy . has ( token ) ) {
policy = token ;
}
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// 4. Return policy.
return policy ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/request.js
/ * *
* Request . js
*
* Request class contains server only options
*
* All spec algorithm step numbers are based on https : //fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
* /
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
const request _INTERNALS = Symbol ( 'Request internals' ) ;
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* Check if ` obj ` is an instance of Request .
2020-08-26 02:20:01 +02:00
*
2022-11-10 11:43:16 +01:00
* @ param { * } object
* @ return { boolean }
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
const isRequest = object => {
return (
typeof object === 'object' &&
typeof object [ request _INTERNALS ] === 'object'
) ;
} ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
const doBadDataWarn = ( 0 , external _node _util _namespaceObject . deprecate ) ( ( ) => { } ,
'.data is not a valid RequestInit property, use .body instead' ,
'https://github.com/node-fetch/node-fetch/issues/1000 (request)' ) ;
2020-08-26 02:20:01 +02:00
/ * *
* Request class
*
2022-11-10 11:43:16 +01:00
* Ref : https : //fetch.spec.whatwg.org/#request-class
*
2020-08-26 02:20:01 +02:00
* @ param Mixed input Url or Request instance
* @ param Object init Custom options
* @ return Void
* /
2022-11-10 11:43:16 +01:00
class Request extends Body {
constructor ( input , init = { } ) {
2020-08-26 02:20:01 +02:00
let parsedURL ;
2022-11-10 11:43:16 +01:00
// Normalize input and force URL to be encoded as UTF-8 (https://github.com/node-fetch/node-fetch/issues/245)
if ( isRequest ( input ) ) {
parsedURL = new URL ( input . url ) ;
2020-08-26 02:20:01 +02:00
} else {
2022-11-10 11:43:16 +01:00
parsedURL = new URL ( input ) ;
input = { } ;
}
if ( parsedURL . username !== '' || parsedURL . password !== '' ) {
throw new TypeError ( ` ${ parsedURL } is an url with embedded credentials. ` ) ;
2020-08-26 02:20:01 +02:00
}
let method = init . method || input . method || 'GET' ;
2022-11-10 11:43:16 +01:00
if ( /^(delete|get|head|options|post|put)$/i . test ( method ) ) {
method = method . toUpperCase ( ) ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
if ( ! isRequest ( init ) && 'data' in init ) {
doBadDataWarn ( ) ;
}
// eslint-disable-next-line no-eq-null, eqeqeq
if ( ( init . body != null || ( isRequest ( input ) && input . body !== null ) ) &&
( method === 'GET' || method === 'HEAD' ) ) {
2020-08-26 02:20:01 +02:00
throw new TypeError ( 'Request with GET/HEAD method cannot have body' ) ;
}
2022-11-10 11:43:16 +01:00
const inputBody = init . body ?
init . body :
( isRequest ( input ) && input . body !== null ?
clone ( input ) :
null ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
super ( inputBody , {
2020-08-26 02:20:01 +02:00
size : init . size || input . size || 0
} ) ;
const headers = new Headers ( init . headers || input . headers || { } ) ;
2022-11-10 11:43:16 +01:00
if ( inputBody !== null && ! headers . has ( 'Content-Type' ) ) {
const contentType = extractContentType ( inputBody , this ) ;
2020-08-26 02:20:01 +02:00
if ( contentType ) {
2022-11-10 11:43:16 +01:00
headers . set ( 'Content-Type' , contentType ) ;
2020-08-26 02:20:01 +02:00
}
}
2022-11-10 11:43:16 +01:00
let signal = isRequest ( input ) ?
input . signal :
null ;
if ( 'signal' in init ) {
signal = init . signal ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// eslint-disable-next-line no-eq-null, eqeqeq
2020-08-26 02:20:01 +02:00
if ( signal != null && ! isAbortSignal ( signal ) ) {
2022-11-10 11:43:16 +01:00
throw new TypeError ( 'Expected signal to be an instanceof AbortSignal or EventTarget' ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
// §5.4, Request constructor steps, step 15.1
// eslint-disable-next-line no-eq-null, eqeqeq
let referrer = init . referrer == null ? input . referrer : init . referrer ;
if ( referrer === '' ) {
// §5.4, Request constructor steps, step 15.2
referrer = 'no-referrer' ;
} else if ( referrer ) {
// §5.4, Request constructor steps, step 15.3.1, 15.3.2
const parsedReferrer = new URL ( referrer ) ;
// §5.4, Request constructor steps, step 15.3.3, 15.3.4
referrer = /^about:(\/\/)?client$/ . test ( parsedReferrer ) ? 'client' : parsedReferrer ;
} else {
referrer = undefined ;
}
this [ request _INTERNALS ] = {
2020-08-26 02:20:01 +02:00
method ,
redirect : init . redirect || input . redirect || 'follow' ,
headers ,
parsedURL ,
2022-11-10 11:43:16 +01:00
signal ,
referrer
2020-08-26 02:20:01 +02:00
} ;
2022-11-10 11:43:16 +01:00
// Node-fetch-only options
this . follow = init . follow === undefined ? ( input . follow === undefined ? 20 : input . follow ) : init . follow ;
this . compress = init . compress === undefined ? ( input . compress === undefined ? true : input . compress ) : init . compress ;
2020-08-26 02:20:01 +02:00
this . counter = init . counter || input . counter || 0 ;
this . agent = init . agent || input . agent ;
2022-11-10 11:43:16 +01:00
this . highWaterMark = init . highWaterMark || input . highWaterMark || 16384 ;
this . insecureHTTPParser = init . insecureHTTPParser || input . insecureHTTPParser || false ;
// §5.4, Request constructor steps, step 16.
// Default is empty string per https://fetch.spec.whatwg.org/#concept-request-referrer-policy
this . referrerPolicy = init . referrerPolicy || input . referrerPolicy || '' ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
/** @returns {string} */
2020-08-26 02:20:01 +02:00
get method ( ) {
2022-11-10 11:43:16 +01:00
return this [ request _INTERNALS ] . method ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
/** @returns {string} */
2020-08-26 02:20:01 +02:00
get url ( ) {
2022-11-10 11:43:16 +01:00
return ( 0 , external _node _url _namespaceObject . format ) ( this [ request _INTERNALS ] . parsedURL ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
/** @returns {Headers} */
2020-08-26 02:20:01 +02:00
get headers ( ) {
2022-11-10 11:43:16 +01:00
return this [ request _INTERNALS ] . headers ;
2020-08-26 02:20:01 +02:00
}
get redirect ( ) {
2022-11-10 11:43:16 +01:00
return this [ request _INTERNALS ] . redirect ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
/** @returns {AbortSignal} */
2020-08-26 02:20:01 +02:00
get signal ( ) {
2022-11-10 11:43:16 +01:00
return this [ request _INTERNALS ] . signal ;
}
// https://fetch.spec.whatwg.org/#dom-request-referrer
get referrer ( ) {
if ( this [ request _INTERNALS ] . referrer === 'no-referrer' ) {
return '' ;
}
if ( this [ request _INTERNALS ] . referrer === 'client' ) {
return 'about:client' ;
}
if ( this [ request _INTERNALS ] . referrer ) {
return this [ request _INTERNALS ] . referrer . toString ( ) ;
}
return undefined ;
}
get referrerPolicy ( ) {
return this [ request _INTERNALS ] . referrerPolicy ;
}
set referrerPolicy ( referrerPolicy ) {
this [ request _INTERNALS ] . referrerPolicy = validateReferrerPolicy ( referrerPolicy ) ;
2020-08-26 02:20:01 +02:00
}
/ * *
2022-11-10 11:43:16 +01:00
* Clone this request
*
* @ return Request
* /
2020-08-26 02:20:01 +02:00
clone ( ) {
return new Request ( this ) ;
}
2022-11-10 11:43:16 +01:00
get [ Symbol . toStringTag ] ( ) {
return 'Request' ;
}
}
2020-08-26 02:20:01 +02:00
Object . defineProperties ( Request . prototype , {
2022-11-10 11:43:16 +01:00
method : { enumerable : true } ,
url : { enumerable : true } ,
headers : { enumerable : true } ,
redirect : { enumerable : true } ,
clone : { enumerable : true } ,
signal : { enumerable : true } ,
referrer : { enumerable : true } ,
referrerPolicy : { enumerable : true }
2020-08-26 02:20:01 +02:00
} ) ;
/ * *
* Convert a Request to Node . js http request options .
*
2022-11-10 11:43:16 +01:00
* @ param { Request } request - A Request instance
* @ return The options object to be passed to http . request
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
const getNodeRequestOptions = request => {
const { parsedURL } = request [ request _INTERNALS ] ;
const headers = new Headers ( request [ request _INTERNALS ] . headers ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// Fetch step 1.3
2020-08-26 02:20:01 +02:00
if ( ! headers . has ( 'Accept' ) ) {
headers . set ( 'Accept' , '*/*' ) ;
}
// HTTP-network-or-cache fetch steps 2.4-2.7
let contentLengthValue = null ;
2022-11-10 11:43:16 +01:00
if ( request . body === null && /^(post|put)$/i . test ( request . method ) ) {
2020-08-26 02:20:01 +02:00
contentLengthValue = '0' ;
}
2022-11-10 11:43:16 +01:00
if ( request . body !== null ) {
2020-08-26 02:20:01 +02:00
const totalBytes = getTotalBytes ( request ) ;
2022-11-10 11:43:16 +01:00
// Set Content-Length if totalBytes is a number (that is not NaN)
if ( typeof totalBytes === 'number' && ! Number . isNaN ( totalBytes ) ) {
2020-08-26 02:20:01 +02:00
contentLengthValue = String ( totalBytes ) ;
}
}
2022-11-10 11:43:16 +01:00
2020-08-26 02:20:01 +02:00
if ( contentLengthValue ) {
headers . set ( 'Content-Length' , contentLengthValue ) ;
}
2022-11-10 11:43:16 +01:00
// 4.1. Main fetch, step 2.6
// > If request's referrer policy is the empty string, then set request's referrer policy to the
// > default referrer policy.
if ( request . referrerPolicy === '' ) {
request . referrerPolicy = DEFAULT _REFERRER _POLICY ;
}
// 4.1. Main fetch, step 2.7
// > If request's referrer is not "no-referrer", set request's referrer to the result of invoking
// > determine request's referrer.
if ( request . referrer && request . referrer !== 'no-referrer' ) {
request [ request _INTERNALS ] . referrer = determineRequestsReferrer ( request ) ;
} else {
request [ request _INTERNALS ] . referrer = 'no-referrer' ;
}
// 4.5. HTTP-network-or-cache fetch, step 6.9
// > If httpRequest's referrer is a URL, then append `Referer`/httpRequest's referrer, serialized
// > and isomorphic encoded, to httpRequest's header list.
if ( request [ request _INTERNALS ] . referrer instanceof URL ) {
headers . set ( 'Referer' , request . referrer ) ;
}
2020-08-26 02:20:01 +02:00
// HTTP-network-or-cache fetch step 2.11
if ( ! headers . has ( 'User-Agent' ) ) {
2022-11-10 11:43:16 +01:00
headers . set ( 'User-Agent' , 'node-fetch' ) ;
2020-08-26 02:20:01 +02:00
}
// HTTP-network-or-cache fetch step 2.15
if ( request . compress && ! headers . has ( 'Accept-Encoding' ) ) {
2022-11-10 11:43:16 +01:00
headers . set ( 'Accept-Encoding' , 'gzip, deflate, br' ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
let { agent } = request ;
2020-08-26 02:20:01 +02:00
if ( typeof agent === 'function' ) {
agent = agent ( parsedURL ) ;
}
if ( ! headers . has ( 'Connection' ) && ! agent ) {
headers . set ( 'Connection' , 'close' ) ;
}
// HTTP-network fetch step 4.2
// chunked encoding is handled by Node.js
2022-11-10 11:43:16 +01:00
const search = getSearch ( parsedURL ) ;
// Pass the full URL directly to request(), but overwrite the following
// options:
const options = {
// Overwrite search to retain trailing ? (issue #776)
path : parsedURL . pathname + search ,
// The following options are not expressed in the URL
2020-08-26 02:20:01 +02:00
method : request . method ,
2022-11-10 11:43:16 +01:00
headers : headers [ Symbol . for ( 'nodejs.util.inspect.custom' ) ] ( ) ,
insecureHTTPParser : request . insecureHTTPParser ,
2020-08-26 02:20:01 +02:00
agent
2022-11-10 11:43:16 +01:00
} ;
return {
/** @type {URL} */
parsedURL ,
options
} ;
} ;
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/errors/abort-error.js
2020-08-26 02:20:01 +02:00
/ * *
* AbortError interface for cancelled requests
* /
2022-11-10 11:43:16 +01:00
class AbortError extends FetchBaseError {
constructor ( message , type = 'aborted' ) {
super ( message , type ) ;
}
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// EXTERNAL MODULE: ./node_modules/fetch-blob/from.js + 2 modules
var from = _ _nccwpck _require _ _ ( 2777 ) ;
; // CONCATENATED MODULE: ./node_modules/node-fetch/src/index.js
2020-08-26 02:20:01 +02:00
/ * *
2022-11-10 11:43:16 +01:00
* Index . js
2020-08-26 02:20:01 +02:00
*
2022-11-10 11:43:16 +01:00
* a request API compatible with window . fetch
*
* All spec algorithm step numbers are based on https : //fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
const supportedSchemas = new Set ( [ 'data:' , 'http:' , 'https:' ] ) ;
2020-08-26 02:20:01 +02:00
/ * *
* Fetch function
*
2022-11-10 11:43:16 +01:00
* @ param { string | URL | import ( './request' ) . default } url - Absolute url or Request instance
* @ param { * } [ options _ ] - Fetch options
* @ return { Promise < import ( './response' ) . default > }
2020-08-26 02:20:01 +02:00
* /
2022-11-10 11:43:16 +01:00
async function fetch ( url , options _ ) {
return new Promise ( ( resolve , reject ) => {
// Build request object
const request = new Request ( url , options _ ) ;
const { parsedURL , options } = getNodeRequestOptions ( request ) ;
if ( ! supportedSchemas . has ( parsedURL . protocol ) ) {
throw new TypeError ( ` node-fetch cannot load ${ url } . URL scheme " ${ parsedURL . protocol . replace ( /:$/ , '' ) } " is not supported. ` ) ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
if ( parsedURL . protocol === 'data:' ) {
const data = dist ( request . url ) ;
const response = new Response ( data , { headers : { 'Content-Type' : data . typeFull } } ) ;
resolve ( response ) ;
return ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// Wrap http.request into fetch
const send = ( parsedURL . protocol === 'https:' ? external _node _https _namespaceObject : external _node _http _namespaceObject ) . request ;
const { signal } = request ;
2020-08-26 02:20:01 +02:00
let response = null ;
2022-11-10 11:43:16 +01:00
const abort = ( ) => {
const error = new AbortError ( 'The operation was aborted.' ) ;
2020-08-26 02:20:01 +02:00
reject ( error ) ;
2022-11-10 11:43:16 +01:00
if ( request . body && request . body instanceof external _node _stream _namespaceObject . Readable ) {
2020-08-26 02:20:01 +02:00
request . body . destroy ( error ) ;
}
2022-11-10 11:43:16 +01:00
if ( ! response || ! response . body ) {
return ;
}
2020-08-26 02:20:01 +02:00
response . body . emit ( 'error' , error ) ;
} ;
if ( signal && signal . aborted ) {
abort ( ) ;
return ;
}
2022-11-10 11:43:16 +01:00
const abortAndFinalize = ( ) => {
2020-08-26 02:20:01 +02:00
abort ( ) ;
finalize ( ) ;
} ;
2022-11-10 11:43:16 +01:00
// Send request
const request _ = send ( parsedURL . toString ( ) , options ) ;
2020-08-26 02:20:01 +02:00
if ( signal ) {
signal . addEventListener ( 'abort' , abortAndFinalize ) ;
}
2022-11-10 11:43:16 +01:00
const finalize = ( ) => {
request _ . abort ( ) ;
if ( signal ) {
signal . removeEventListener ( 'abort' , abortAndFinalize ) ;
}
} ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
request _ . on ( 'error' , error => {
reject ( new FetchError ( ` request to ${ request . url } failed, reason: ${ error . message } ` , 'system' , error ) ) ;
2020-08-26 02:20:01 +02:00
finalize ( ) ;
} ) ;
2022-11-10 11:43:16 +01:00
fixResponseChunkedTransferBadEnding ( request _ , error => {
if ( response && response . body ) {
response . body . destroy ( error ) ;
}
} ) ;
/* c8 ignore next 18 */
if ( process . version < 'v14' ) {
// Before Node.js 14, pipeline() does not fully support async iterators and does not always
// properly handle when the socket close/end events are out of order.
request _ . on ( 'socket' , s => {
let endedWithEventsCount ;
s . prependListener ( 'end' , ( ) => {
endedWithEventsCount = s . _eventsCount ;
} ) ;
s . prependListener ( 'close' , hadError => {
// if end happened before close but the socket didn't emit an error, do it now
if ( response && endedWithEventsCount < s . _eventsCount && ! hadError ) {
const error = new Error ( 'Premature close' ) ;
error . code = 'ERR_STREAM_PREMATURE_CLOSE' ;
response . body . emit ( 'error' , error ) ;
}
} ) ;
} ) ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
request _ . on ( 'response' , response _ => {
request _ . setTimeout ( 0 ) ;
const headers = fromRawHeaders ( response _ . rawHeaders ) ;
2020-08-26 02:20:01 +02:00
// HTTP fetch step 5
2022-11-10 11:43:16 +01:00
if ( isRedirect ( response _ . statusCode ) ) {
2020-08-26 02:20:01 +02:00
// HTTP fetch step 5.2
const location = headers . get ( 'Location' ) ;
// HTTP fetch step 5.3
2022-11-10 11:43:16 +01:00
let locationURL = null ;
try {
locationURL = location === null ? null : new URL ( location , request . url ) ;
} catch {
// error here can only be invalid URL in Location: header
// do not throw when options.redirect == manual
// let the user extract the errorneous redirect URL
if ( request . redirect !== 'manual' ) {
reject ( new FetchError ( ` uri requested responds with an invalid redirect URL: ${ location } ` , 'invalid-redirect' ) ) ;
finalize ( ) ;
return ;
}
}
2020-08-26 02:20:01 +02:00
// HTTP fetch step 5.5
switch ( request . redirect ) {
case 'error' :
2021-02-26 05:00:44 +01:00
reject ( new FetchError ( ` uri requested responds with a redirect, redirect mode is set to error: ${ request . url } ` , 'no-redirect' ) ) ;
2020-08-26 02:20:01 +02:00
finalize ( ) ;
return ;
case 'manual' :
2022-11-10 11:43:16 +01:00
// Nothing to do
2020-08-26 02:20:01 +02:00
break ;
2022-11-10 11:43:16 +01:00
case 'follow' : {
2020-08-26 02:20:01 +02:00
// HTTP-redirect fetch step 2
if ( locationURL === null ) {
break ;
}
// HTTP-redirect fetch step 5
if ( request . counter >= request . follow ) {
reject ( new FetchError ( ` maximum redirect reached at: ${ request . url } ` , 'max-redirect' ) ) ;
finalize ( ) ;
return ;
}
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
2022-11-10 11:43:16 +01:00
const requestOptions = {
2020-08-26 02:20:01 +02:00
headers : new Headers ( request . headers ) ,
follow : request . follow ,
counter : request . counter + 1 ,
agent : request . agent ,
compress : request . compress ,
method : request . method ,
2022-11-10 11:43:16 +01:00
body : clone ( request ) ,
2020-08-26 02:20:01 +02:00
signal : request . signal ,
2022-11-10 11:43:16 +01:00
size : request . size ,
referrer : request . referrer ,
referrerPolicy : request . referrerPolicy
2020-08-26 02:20:01 +02:00
} ;
2022-11-10 11:43:16 +01:00
// when forwarding sensitive headers like "Authorization",
// "WWW-Authenticate", and "Cookie" to untrusted targets,
// headers will be ignored when following a redirect to a domain
// that is not a subdomain match or exact match of the initial domain.
// For example, a redirect from "foo.com" to either "foo.com" or "sub.foo.com"
// will forward the sensitive headers, but a redirect to "bar.com" will not.
// headers will also be ignored when following a redirect to a domain using
// a different protocol. For example, a redirect from "https://foo.com" to "http://foo.com"
// will not forward the sensitive headers
if ( ! isDomainOrSubdomain ( request . url , locationURL ) || ! isSameProtocol ( request . url , locationURL ) ) {
for ( const name of [ 'authorization' , 'www-authenticate' , 'cookie' , 'cookie2' ] ) {
requestOptions . headers . delete ( name ) ;
}
}
2020-08-26 02:20:01 +02:00
// HTTP-redirect fetch step 9
2022-11-10 11:43:16 +01:00
if ( response _ . statusCode !== 303 && request . body && options _ . body instanceof external _node _stream _namespaceObject . Readable ) {
2020-08-26 02:20:01 +02:00
reject ( new FetchError ( 'Cannot follow redirect with body being a readable stream' , 'unsupported-redirect' ) ) ;
finalize ( ) ;
return ;
}
// HTTP-redirect fetch step 11
2022-11-10 11:43:16 +01:00
if ( response _ . statusCode === 303 || ( ( response _ . statusCode === 301 || response _ . statusCode === 302 ) && request . method === 'POST' ) ) {
requestOptions . method = 'GET' ;
requestOptions . body = undefined ;
requestOptions . headers . delete ( 'content-length' ) ;
}
// HTTP-redirect fetch step 14
const responseReferrerPolicy = parseReferrerPolicyFromHeader ( headers ) ;
if ( responseReferrerPolicy ) {
requestOptions . referrerPolicy = responseReferrerPolicy ;
2020-08-26 02:20:01 +02:00
}
// HTTP-redirect fetch step 15
2022-11-10 11:43:16 +01:00
resolve ( fetch ( new Request ( locationURL , requestOptions ) ) ) ;
2020-08-26 02:20:01 +02:00
finalize ( ) ;
return ;
2022-11-10 11:43:16 +01:00
}
default :
return reject ( new TypeError ( ` Redirect option ' ${ request . redirect } ' is not a valid value of RequestRedirect ` ) ) ;
2020-08-26 02:20:01 +02:00
}
}
2022-11-10 11:43:16 +01:00
// Prepare response
if ( signal ) {
response _ . once ( 'end' , ( ) => {
signal . removeEventListener ( 'abort' , abortAndFinalize ) ;
} ) ;
}
let body = ( 0 , external _node _stream _namespaceObject . pipeline ) ( response _ , new external _node _stream _namespaceObject . PassThrough ( ) , error => {
if ( error ) {
reject ( error ) ;
}
2020-08-26 02:20:01 +02:00
} ) ;
2022-11-10 11:43:16 +01:00
// see https://github.com/nodejs/node/pull/29376
/* c8 ignore next 3 */
if ( process . version < 'v12.10' ) {
response _ . on ( 'aborted' , abortAndFinalize ) ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
const responseOptions = {
2020-08-26 02:20:01 +02:00
url : request . url ,
2022-11-10 11:43:16 +01:00
status : response _ . statusCode ,
statusText : response _ . statusMessage ,
headers ,
2020-08-26 02:20:01 +02:00
size : request . size ,
2022-11-10 11:43:16 +01:00
counter : request . counter ,
highWaterMark : request . highWaterMark
2020-08-26 02:20:01 +02:00
} ;
// HTTP-network fetch step 12.1.1.3
const codings = headers . get ( 'Content-Encoding' ) ;
// HTTP-network fetch step 12.1.1.4: handle content codings
// in following scenarios we ignore compression support
// 1. compression support is disabled
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
2022-11-10 11:43:16 +01:00
if ( ! request . compress || request . method === 'HEAD' || codings === null || response _ . statusCode === 204 || response _ . statusCode === 304 ) {
response = new Response ( body , responseOptions ) ;
2020-08-26 02:20:01 +02:00
resolve ( response ) ;
return ;
}
// For Node v6+
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
const zlibOptions = {
2022-11-10 11:43:16 +01:00
flush : external _node _zlib _namespaceObject . Z _SYNC _FLUSH ,
finishFlush : external _node _zlib _namespaceObject . Z _SYNC _FLUSH
2020-08-26 02:20:01 +02:00
} ;
2022-11-10 11:43:16 +01:00
// For gzip
if ( codings === 'gzip' || codings === 'x-gzip' ) {
body = ( 0 , external _node _stream _namespaceObject . pipeline ) ( body , external _node _zlib _namespaceObject . createGunzip ( zlibOptions ) , error => {
if ( error ) {
reject ( error ) ;
}
} ) ;
response = new Response ( body , responseOptions ) ;
2020-08-26 02:20:01 +02:00
resolve ( response ) ;
return ;
}
2022-11-10 11:43:16 +01:00
// For deflate
if ( codings === 'deflate' || codings === 'x-deflate' ) {
// Handle the infamous raw deflate response from old servers
2020-08-26 02:20:01 +02:00
// a hack for old IIS and Apache servers
2022-11-10 11:43:16 +01:00
const raw = ( 0 , external _node _stream _namespaceObject . pipeline ) ( response _ , new external _node _stream _namespaceObject . PassThrough ( ) , error => {
if ( error ) {
reject ( error ) ;
}
} ) ;
raw . once ( 'data' , chunk => {
// See http://stackoverflow.com/questions/37519828
2020-08-26 02:20:01 +02:00
if ( ( chunk [ 0 ] & 0x0F ) === 0x08 ) {
2022-11-10 11:43:16 +01:00
body = ( 0 , external _node _stream _namespaceObject . pipeline ) ( body , external _node _zlib _namespaceObject . createInflate ( ) , error => {
if ( error ) {
reject ( error ) ;
}
} ) ;
2020-08-26 02:20:01 +02:00
} else {
2022-11-10 11:43:16 +01:00
body = ( 0 , external _node _stream _namespaceObject . pipeline ) ( body , external _node _zlib _namespaceObject . createInflateRaw ( ) , error => {
if ( error ) {
reject ( error ) ;
}
} ) ;
2020-08-26 02:20:01 +02:00
}
2022-11-10 11:43:16 +01:00
response = new Response ( body , responseOptions ) ;
2020-08-26 02:20:01 +02:00
resolve ( response ) ;
} ) ;
2022-11-10 11:43:16 +01:00
raw . once ( 'end' , ( ) => {
// Some old IIS servers return zero-length OK deflate responses, so
// 'data' is never emitted. See https://github.com/node-fetch/node-fetch/pull/903
if ( ! response ) {
response = new Response ( body , responseOptions ) ;
resolve ( response ) ;
}
} ) ;
2020-08-26 02:20:01 +02:00
return ;
}
2022-11-10 11:43:16 +01:00
// For br
if ( codings === 'br' ) {
body = ( 0 , external _node _stream _namespaceObject . pipeline ) ( body , external _node _zlib _namespaceObject . createBrotliDecompress ( ) , error => {
if ( error ) {
reject ( error ) ;
}
} ) ;
response = new Response ( body , responseOptions ) ;
2020-08-26 02:20:01 +02:00
resolve ( response ) ;
return ;
}
2022-11-10 11:43:16 +01:00
// Otherwise, use response as-is
response = new Response ( body , responseOptions ) ;
2020-08-26 02:20:01 +02:00
resolve ( response ) ;
} ) ;
2022-11-10 11:43:16 +01:00
// eslint-disable-next-line promise/prefer-await-to-then
writeToStream ( request _ , request ) . catch ( reject ) ;
2020-08-26 02:20:01 +02:00
} ) ;
}
2020-08-26 01:57:08 +02:00
2022-11-10 11:43:16 +01:00
function fixResponseChunkedTransferBadEnding ( request , errorCallback ) {
const LAST _CHUNK = external _node _buffer _namespaceObject . Buffer . from ( '0\r\n\r\n' ) ;
2020-08-26 01:57:08 +02:00
2022-11-10 11:43:16 +01:00
let isChunkedTransfer = false ;
let properLastChunkReceived = false ;
let previousChunk ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
request . on ( 'response' , response => {
const { headers } = response ;
isChunkedTransfer = headers [ 'transfer-encoding' ] === 'chunked' && ! headers [ 'content-length' ] ;
} ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
request . on ( 'socket' , socket => {
const onSocketClose = ( ) => {
if ( isChunkedTransfer && ! properLastChunkReceived ) {
const error = new Error ( 'Premature close' ) ;
error . code = 'ERR_STREAM_PREMATURE_CLOSE' ;
errorCallback ( error ) ;
}
} ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
const onData = buf => {
properLastChunkReceived = external _node _buffer _namespaceObject . Buffer . compare ( buf . slice ( - 5 ) , LAST _CHUNK ) === 0 ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
// Sometimes final 0-length chunk and end of message code are in separate packets
if ( ! properLastChunkReceived && previousChunk ) {
properLastChunkReceived = (
external _node _buffer _namespaceObject . Buffer . compare ( previousChunk . slice ( - 3 ) , LAST _CHUNK . slice ( 0 , 3 ) ) === 0 &&
external _node _buffer _namespaceObject . Buffer . compare ( buf . slice ( - 2 ) , LAST _CHUNK . slice ( 3 ) ) === 0
) ;
}
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
previousChunk = buf ;
} ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
socket . prependListener ( 'close' , onSocketClose ) ;
socket . on ( 'data' , onData ) ;
2020-08-26 02:20:01 +02:00
2022-11-10 11:43:16 +01:00
request . on ( 'close' , ( ) => {
socket . removeListener ( 'close' , onSocketClose ) ;
socket . removeListener ( 'data' , onData ) ;
} ) ;
} ) ;
}
2020-08-26 02:20:01 +02:00
2020-08-26 01:57:08 +02:00
/***/ } )
2020-08-27 10:23:33 +02:00
/******/ } ) ;
/************************************************************************/
/******/ // The module cache
/******/ var _ _webpack _module _cache _ _ = { } ;
/******/
/******/ // The require function
2021-02-26 05:00:44 +01:00
/******/ function _ _nccwpck _require _ _ ( moduleId ) {
2020-08-27 10:23:33 +02:00
/******/ // Check if module is in cache
2022-11-10 11:43:16 +01:00
/******/ var cachedModule = _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ if ( cachedModule !== undefined ) {
/******/ return cachedModule . exports ;
2020-08-27 10:23:33 +02:00
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = _ _webpack _module _cache _ _ [ moduleId ] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ var threw = true ;
/******/ try {
2021-02-26 05:00:44 +01:00
/******/ _ _webpack _modules _ _ [ moduleId ] . call ( module . exports , module , module . exports , _ _nccwpck _require _ _ ) ;
2020-08-27 10:23:33 +02:00
/******/ threw = false ;
/******/ } finally {
/******/ if ( threw ) delete _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ }
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
2022-11-10 11:43:16 +01:00
/******/ // expose the modules object (__webpack_modules__)
/******/ _ _nccwpck _require _ _ . m = _ _webpack _modules _ _ ;
/******/
2020-08-27 10:23:33 +02:00
/************************************************************************/
2022-11-10 11:43:16 +01:00
/******/ /* webpack/runtime/define property getters */
/******/ ( ( ) => {
/******/ // define getter functions for harmony exports
/******/ _ _nccwpck _require _ _ . d = ( exports , definition ) => {
/******/ for ( var key in definition ) {
/******/ if ( _ _nccwpck _require _ _ . o ( definition , key ) && ! _ _nccwpck _require _ _ . o ( exports , key ) ) {
/******/ Object . defineProperty ( exports , key , { enumerable : true , get : definition [ key ] } ) ;
/******/ }
/******/ }
/******/ } ;
/******/ } ) ( ) ;
/******/
/******/ /* webpack/runtime/ensure chunk */
/******/ ( ( ) => {
/******/ _ _nccwpck _require _ _ . f = { } ;
/******/ // This file contains only the entry chunk.
/******/ // The chunk loading function for additional chunks
/******/ _ _nccwpck _require _ _ . e = ( chunkId ) => {
/******/ return Promise . all ( Object . keys ( _ _nccwpck _require _ _ . f ) . reduce ( ( promises , key ) => {
/******/ _ _nccwpck _require _ _ . f [ key ] ( chunkId , promises ) ;
/******/ return promises ;
/******/ } , [ ] ) ) ;
/******/ } ;
/******/ } ) ( ) ;
/******/
/******/ /* webpack/runtime/get javascript chunk filename */
/******/ ( ( ) => {
/******/ // This function allow to reference async chunks
/******/ _ _nccwpck _require _ _ . u = ( chunkId ) => {
/******/ // return url for filenames based on template
/******/ return "" + chunkId + ".index.js" ;
/******/ } ;
/******/ } ) ( ) ;
/******/
/******/ /* webpack/runtime/hasOwnProperty shorthand */
/******/ ( ( ) => {
/******/ _ _nccwpck _require _ _ . o = ( obj , prop ) => ( Object . prototype . hasOwnProperty . call ( obj , prop ) )
/******/ } ) ( ) ;
/******/
/******/ /* webpack/runtime/make namespace object */
/******/ ( ( ) => {
/******/ // define __esModule on exports
/******/ _ _nccwpck _require _ _ . r = ( exports ) => {
/******/ if ( typeof Symbol !== 'undefined' && Symbol . toStringTag ) {
/******/ Object . defineProperty ( exports , Symbol . toStringTag , { value : 'Module' } ) ;
/******/ }
/******/ Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
/******/ } ;
/******/ } ) ( ) ;
/******/
2020-08-27 10:23:33 +02:00
/******/ /* webpack/runtime/compat */
/******/
2022-11-10 11:43:16 +01:00
/******/ if ( typeof _ _nccwpck _require _ _ !== 'undefined' ) _ _nccwpck _require _ _ . ab = _ _dirname + "/" ;
/******/
/******/ /* webpack/runtime/require chunk loading */
/******/ ( ( ) => {
/******/ // no baseURI
/******/
/******/ // object to store loaded chunks
/******/ // "1" means "loaded", otherwise not loaded yet
/******/ var installedChunks = {
/******/ 179 : 1
/******/ } ;
/******/
/******/ // no on chunks loaded
/******/
/******/ var installChunk = ( chunk ) => {
/******/ var moreModules = chunk . modules , chunkIds = chunk . ids , runtime = chunk . runtime ;
/******/ for ( var moduleId in moreModules ) {
/******/ if ( _ _nccwpck _require _ _ . o ( moreModules , moduleId ) ) {
/******/ _ _nccwpck _require _ _ . m [ moduleId ] = moreModules [ moduleId ] ;
/******/ }
/******/ }
/******/ if ( runtime ) runtime ( _ _nccwpck _require _ _ ) ;
/******/ for ( var i = 0 ; i < chunkIds . length ; i ++ )
/******/ installedChunks [ chunkIds [ i ] ] = 1 ;
/******/
/******/ } ;
/******/
/******/ // require() chunk loading for javascript
/******/ _ _nccwpck _require _ _ . f . require = ( chunkId , promises ) => {
/******/ // "1" is the signal for "already loaded"
/******/ if ( ! installedChunks [ chunkId ] ) {
/******/ if ( true ) { // all chunks have JS
/******/ installChunk ( require ( "./" + _ _nccwpck _require _ _ . u ( chunkId ) ) ) ;
/******/ } else installedChunks [ chunkId ] = 1 ;
/******/ }
/******/ } ;
/******/
/******/ // no external install chunk
/******/
/******/ // no HMR
/******/
/******/ // no HMR manifest
/******/ } ) ( ) ;
/******/
/************************************************************************/
/******/
2020-08-27 10:23:33 +02:00
/******/ // startup
/******/ // Load entry module and return exports
2022-11-10 11:43:16 +01:00
/******/ // This entry module is referenced by other modules so it can't be inlined
2022-11-10 13:20:38 +01:00
/******/ var _ _webpack _exports _ _ = _ _nccwpck _require _ _ ( 399 ) ;
2022-11-10 11:43:16 +01:00
/******/ module . exports = _ _webpack _exports _ _ ;
/******/
2020-08-27 10:23:33 +02:00
/******/ } ) ( )
2022-11-10 13:20:38 +01:00
;