2021-07-15 07:24:13 +08:00
require ( './sourcemap-register.js' ) ; /******/ ( ( ) => { // webpackBootstrap
2020-12-06 17:56:38 +08:00
/******/ var _ _webpack _modules _ _ = ( {
2020-08-27 20:39:35 +08:00
2022-08-09 17:17:26 +00:00
/***/ 9039 :
2022-12-25 13:58:23 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-27 20:39:35 +08:00
"use strict" ;
2022-12-25 13:58:23 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-12-06 17:56:38 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2020-08-27 20:39:35 +08:00
exports . install = exports . runCommand = void 0 ;
2022-08-09 17:17:26 +00:00
const tool _cache _1 = _ _nccwpck _require _ _ ( 7784 ) ;
const exec _1 = _ _nccwpck _require _ _ ( 1514 ) ;
2022-12-25 13:58:23 +08:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const action _1 = _ _nccwpck _require _ _ ( 1231 ) ;
2021-12-21 14:51:24 +08:00
async function runCommand ( command ) {
let output = '' ;
const result = await ( 0 , exec _1 . exec ) ( command , [ ] , {
listeners : {
stdout : ( data ) => {
output += data . toString ( ) ;
2020-08-27 20:39:35 +08:00
}
}
} ) ;
2021-12-21 14:51:24 +08:00
if ( result !== 0 ) {
throw new Error ( ` Failed to run command: ${ command } ` ) ;
}
return output . trim ( ) ;
2020-08-27 20:39:35 +08:00
}
exports . runCommand = runCommand ;
2021-12-21 14:51:24 +08:00
async function installOnLinux ( version ) {
const system = runCommand ( 'uname -s' ) ;
const hardware = runCommand ( 'uname -m' ) ;
2022-04-18 06:40:59 +03:00
if ( ! version . startsWith ( 'v' ) && parseInt ( version . split ( '.' ) [ 0 ] , 10 ) >= 2 ) {
version = ` v ${ version } ` ;
}
2021-12-21 14:51:24 +08:00
const url = ` https://github.com/docker/compose/releases/download/ ${ version } /docker-compose- ${ await system } - ${ await hardware } ` ;
const installerPath = await ( 0 , tool _cache _1 . downloadTool ) ( url ) ;
await ( 0 , exec _1 . exec ) ( ` chmod +x ${ installerPath } ` ) ;
const cachedPath = await ( 0 , tool _cache _1 . cacheFile ) ( installerPath , 'docker-compose' , 'docker-compose' , version ) ;
return cachedPath ;
}
2022-12-25 13:58:23 +08:00
async function findLatestVersion ( ) {
const octokit = new action _1 . Octokit ( ) ;
const response = await octokit . repos . getLatestRelease ( {
owner : 'docker' ,
repo : 'compose'
} ) ;
return response . data . tag _name ;
}
2021-12-21 14:51:24 +08:00
async function install ( version ) {
2022-12-25 13:58:23 +08:00
if ( version === 'latest' ) {
version = await findLatestVersion ( ) ;
core . info ( ` Requested to use the latest version: ${ version } ` ) ;
}
2021-12-21 14:51:24 +08:00
switch ( process . platform ) {
case 'linux' :
return installOnLinux ( version ) ;
default :
throw new Error ( ` Unsupported platform: ${ process . platform } ` ) ;
}
2020-08-28 19:41:36 +08:00
}
2020-08-27 20:39:35 +08:00
exports . install = install ;
2020-08-27 11:53:14 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 3109 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-27 11:53:14 +08:00
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2022-03-18 18:32:39 +00:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2020-08-27 11:53:14 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2020-12-06 17:56:38 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2020-08-27 11:53:14 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-12-06 17:56:38 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-08-09 17:17:26 +00:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const install _1 = _ _nccwpck _require _ _ ( 9039 ) ;
2021-12-21 14:51:24 +08:00
async function run ( ) {
try {
const version = core . getInput ( 'version' , {
2022-12-25 13:58:23 +08:00
trimWhitespace : true
2021-12-21 14:51:24 +08:00
} ) ;
const commandPath = await ( 0 , install _1 . install ) ( version ) ;
core . addPath ( commandPath ) ;
}
catch ( error ) {
core . setFailed ( error instanceof Error ? error . message : 'Unknown error' ) ;
}
2020-08-27 11:53:14 +08:00
}
run ( ) ;
2020-08-27 20:39:35 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 7351 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-12-06 17:56:38 +08:00
"use strict" ;
2020-08-27 20:39:35 +08:00
2021-07-15 07:24:13 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
2020-12-06 17:56:38 +08:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2021-07-15 07:24:13 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
2020-12-06 17:56:38 +08:00
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2021-07-15 07:24:13 +08:00
exports . issue = exports . issueCommand = void 0 ;
2022-08-09 17:17:26 +00:00
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
2020-12-06 17:56:38 +08:00
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
}
}
function escapeData ( s ) {
return utils _1 . toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
return utils _1 . toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
2020-08-27 20:39:35 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 2186 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-27 20:39:35 +08:00
"use strict" ;
2021-07-15 07:24:13 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-08-27 20:39:35 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2020-12-06 17:56:38 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2021-09-29 17:14:34 +00:00
exports . getIDToken = exports . getState = exports . saveState = exports . group = exports . endGroup = exports . startGroup = exports . info = exports . notice = exports . warning = exports . error = exports . debug = exports . isDebug = exports . setFailed = exports . setCommandEcho = exports . setOutput = exports . getBooleanInput = exports . getMultilineInput = exports . getInput = exports . addPath = exports . setSecret = exports . exportVariable = exports . ExitCode = void 0 ;
2022-08-09 17:17:26 +00:00
const command _1 = _ _nccwpck _require _ _ ( 7351 ) ;
2021-07-15 07:24:13 +08:00
const file _command _1 = _ _nccwpck _require _ _ ( 717 ) ;
2022-08-09 17:17:26 +00:00
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const oidc _utils _1 = _ _nccwpck _require _ _ ( 8041 ) ;
2020-12-06 17:56:38 +08:00
/ * *
* The code to exit an action
2020-08-27 20:39:35 +08:00
* /
2020-12-06 17:56:38 +08:00
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
const convertedVal = utils _1 . toCommandValue ( val ) ;
process . env [ name ] = convertedVal ;
const filePath = process . env [ 'GITHUB_ENV' ] || '' ;
if ( filePath ) {
2022-10-10 00:56:45 +00:00
return file _command _1 . issueFileCommand ( 'ENV' , file _command _1 . prepareKeyValueMessage ( name , val ) ) ;
2020-08-27 20:39:35 +08:00
}
2022-10-10 00:56:45 +00:00
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
2020-12-06 17:56:38 +08:00
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
const filePath = process . env [ 'GITHUB_PATH' ] || '' ;
if ( filePath ) {
2022-10-10 00:56:45 +00:00
file _command _1 . issueFileCommand ( 'PATH' , inputPath ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
else {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
2021-07-15 07:24:13 +08:00
* Gets the value of an input .
* Unless trimWhitespace is set to false in InputOptions , the value is also trimmed .
* Returns an empty string if the value is not defined .
2020-12-06 17:56:38 +08:00
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
2020-08-27 20:39:35 +08:00
}
2021-07-15 07:24:13 +08:00
if ( options && options . trimWhitespace === false ) {
return val ;
}
2020-12-06 17:56:38 +08:00
return val . trim ( ) ;
}
exports . getInput = getInput ;
2021-07-16 17:22:55 +08:00
/ * *
* Gets the values of an multiline input . Each value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string [ ]
*
* /
function getMultilineInput ( name , options ) {
const inputs = getInput ( name , options )
. split ( '\n' )
. filter ( x => x !== '' ) ;
2022-10-10 00:56:45 +00:00
if ( options && options . trimWhitespace === false ) {
return inputs ;
}
return inputs . map ( input => input . trim ( ) ) ;
2021-07-16 17:22:55 +08:00
}
exports . getMultilineInput = getMultilineInput ;
2021-07-15 07:24:13 +08:00
/ * *
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification .
* Support boolean input list : ` true | True | TRUE | false | False | FALSE ` .
* The return value is also in boolean type .
* ref : https : //yaml.org/spec/1.2/spec.html#id2804923
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns boolean
* /
function getBooleanInput ( name , options ) {
const trueValue = [ 'true' , 'True' , 'TRUE' ] ;
const falseValue = [ 'false' , 'False' , 'FALSE' ] ;
const val = getInput ( name , options ) ;
if ( trueValue . includes ( val ) )
return true ;
if ( falseValue . includes ( val ) )
return false ;
throw new TypeError ( ` Input does not meet YAML 1.2 "Core Schema" specification: ${ name } \n ` +
` Support boolean input list: \` true | True | TRUE | false | False | FALSE \` ` ) ;
}
exports . getBooleanInput = getBooleanInput ;
2020-12-06 17:56:38 +08:00
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
2022-10-10 00:56:45 +00:00
const filePath = process . env [ 'GITHUB_OUTPUT' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'OUTPUT' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
2021-07-15 07:24:13 +08:00
process . stdout . write ( os . EOL ) ;
2022-10-10 00:56:45 +00:00
command _1 . issueCommand ( 'set-output' , { name } , utils _1 . toCommandValue ( value ) ) ;
2020-12-06 17:56:38 +08:00
}
exports . setOutput = setOutput ;
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
2021-08-20 17:05:57 +00:00
* @ param properties optional properties to add to the annotation .
2020-12-06 17:56:38 +08:00
* /
2021-08-20 17:05:57 +00:00
function error ( message , properties = { } ) {
command _1 . issueCommand ( 'error' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2020-12-06 17:56:38 +08:00
}
exports . error = error ;
/ * *
2021-08-20 17:05:57 +00:00
* Adds a warning issue
2020-12-06 17:56:38 +08:00
* @ param message warning issue message . Errors will be converted to string via toString ( )
2021-08-20 17:05:57 +00:00
* @ param properties optional properties to add to the annotation .
2020-12-06 17:56:38 +08:00
* /
2021-08-20 17:05:57 +00:00
function warning ( message , properties = { } ) {
command _1 . issueCommand ( 'warning' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2020-12-06 17:56:38 +08:00
}
exports . warning = warning ;
2021-08-20 17:05:57 +00:00
/ * *
* Adds a notice issue
* @ param message notice issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
* /
function notice ( message , properties = { } ) {
command _1 . issueCommand ( 'notice' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . notice = notice ;
2020-12-06 17:56:38 +08:00
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
2022-10-10 00:56:45 +00:00
const filePath = process . env [ 'GITHUB_STATE' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'STATE' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
command _1 . issueCommand ( 'save-state' , { name } , utils _1 . toCommandValue ( value ) ) ;
2020-12-06 17:56:38 +08:00
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
2021-09-29 17:14:34 +00:00
function getIDToken ( aud ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield oidc _utils _1 . OidcClient . getIDToken ( aud ) ;
} ) ;
}
exports . getIDToken = getIDToken ;
2022-04-25 17:07:43 +00:00
/ * *
2022-05-07 00:33:07 +00:00
* Summary exports
2022-04-25 17:07:43 +00:00
* /
2022-08-09 17:17:26 +00:00
var summary _1 = _ _nccwpck _require _ _ ( 1327 ) ;
2022-05-07 00:33:07 +00:00
Object . defineProperty ( exports , "summary" , ( { enumerable : true , get : function ( ) { return summary _1 . summary ; } } ) ) ;
/ * *
* @ deprecated use core . summary
* /
2022-08-09 17:17:26 +00:00
var summary _2 = _ _nccwpck _require _ _ ( 1327 ) ;
2022-05-07 00:33:07 +00:00
Object . defineProperty ( exports , "markdownSummary" , ( { enumerable : true , get : function ( ) { return summary _2 . markdownSummary ; } } ) ) ;
2022-06-15 17:06:01 +00:00
/ * *
* Path exports
* /
2022-08-09 17:17:26 +00:00
var path _utils _1 = _ _nccwpck _require _ _ ( 2981 ) ;
2022-06-15 17:06:01 +00:00
Object . defineProperty ( exports , "toPosixPath" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toPosixPath ; } } ) ) ;
Object . defineProperty ( exports , "toWin32Path" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toWin32Path ; } } ) ) ;
Object . defineProperty ( exports , "toPlatformPath" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toPlatformPath ; } } ) ) ;
2020-12-06 17:56:38 +08:00
//# sourceMappingURL=core.js.map
/***/ } ) ,
/***/ 717 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-12-06 17:56:38 +08:00
"use strict" ;
// For internal use, subject to change.
2021-07-15 07:24:13 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
2020-12-06 17:56:38 +08:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2021-07-15 07:24:13 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
2020-12-06 17:56:38 +08:00
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-10-10 00:56:45 +00:00
exports . prepareKeyValueMessage = exports . issueFileCommand = void 0 ;
2020-12-06 17:56:38 +08:00
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
2022-08-09 17:17:26 +00:00
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
2022-10-10 00:56:45 +00:00
const uuid _1 = _ _nccwpck _require _ _ ( 8974 ) ;
2022-08-09 17:17:26 +00:00
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
2022-10-10 00:56:45 +00:00
function issueFileCommand ( command , message ) {
2020-12-06 17:56:38 +08:00
const filePath = process . env [ ` GITHUB_ ${ command } ` ] ;
if ( ! filePath ) {
throw new Error ( ` Unable to find environment variable for file command ${ command } ` ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
if ( ! fs . existsSync ( filePath ) ) {
throw new Error ( ` Missing file at path: ${ filePath } ` ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
fs . appendFileSync ( filePath , ` ${ utils _1 . toCommandValue ( message ) } ${ os . EOL } ` , {
encoding : 'utf8'
} ) ;
}
2022-10-10 00:56:45 +00:00
exports . issueFileCommand = issueFileCommand ;
function prepareKeyValueMessage ( key , value ) {
const delimiter = ` ghadelimiter_ ${ uuid _1 . v4 ( ) } ` ;
const convertedValue = utils _1 . toCommandValue ( value ) ;
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if ( key . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: name should not contain the delimiter " ${ delimiter } " ` ) ;
}
if ( convertedValue . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: value should not contain the delimiter " ${ delimiter } " ` ) ;
}
return ` ${ key } << ${ delimiter } ${ os . EOL } ${ convertedValue } ${ os . EOL } ${ delimiter } ` ;
}
exports . prepareKeyValueMessage = prepareKeyValueMessage ;
2020-12-06 17:56:38 +08:00
//# sourceMappingURL=file-command.js.map
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 8041 :
2022-05-07 00:33:07 +00:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . OidcClient = void 0 ;
2022-08-09 17:17:26 +00:00
const http _client _1 = _ _nccwpck _require _ _ ( 6255 ) ;
const auth _1 = _ _nccwpck _require _ _ ( 5526 ) ;
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
2022-05-07 00:33:07 +00:00
class OidcClient {
static createHttpClient ( allowRetry = true , maxRetry = 10 ) {
const requestOptions = {
allowRetries : allowRetry ,
maxRetries : maxRetry
} ;
return new http _client _1 . HttpClient ( 'actions/oidc-client' , [ new auth _1 . BearerCredentialHandler ( OidcClient . getRequestToken ( ) ) ] , requestOptions ) ;
}
static getRequestToken ( ) {
const token = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_TOKEN' ] ;
if ( ! token ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable' ) ;
}
return token ;
}
static getIDTokenUrl ( ) {
const runtimeUrl = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_URL' ] ;
if ( ! runtimeUrl ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable' ) ;
}
return runtimeUrl ;
}
static getCall ( id _token _url ) {
var _a ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpclient = OidcClient . createHttpClient ( ) ;
const res = yield httpclient
. getJson ( id _token _url )
. catch ( error => {
throw new Error ( ` Failed to get ID Token. \n
Error Code : $ { error . statusCode } \ n
Error Message : $ { error . result . message } ` );
} ) ;
const id _token = ( _a = res . result ) === null || _a === void 0 ? void 0 : _a . value ;
if ( ! id _token ) {
throw new Error ( 'Response json body do not have ID Token field' ) ;
}
return id _token ;
} ) ;
}
static getIDToken ( audience ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
// New ID Token is requested from action service
let id _token _url = OidcClient . getIDTokenUrl ( ) ;
if ( audience ) {
const encodedAudience = encodeURIComponent ( audience ) ;
id _token _url = ` ${ id _token _url } &audience= ${ encodedAudience } ` ;
}
core _1 . debug ( ` ID token url is ${ id _token _url } ` ) ;
const id _token = yield OidcClient . getCall ( id _token _url ) ;
core _1 . setSecret ( id _token ) ;
return id _token ;
}
catch ( error ) {
throw new Error ( ` Error message: ${ error . message } ` ) ;
}
} ) ;
}
}
exports . OidcClient = OidcClient ;
//# sourceMappingURL=oidc-utils.js.map
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 2981 :
2022-06-15 17:06:01 +00:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . toPlatformPath = exports . toWin32Path = exports . toPosixPath = void 0 ;
2022-08-09 17:17:26 +00:00
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
2022-06-15 17:06:01 +00:00
/ * *
* toPosixPath converts the given path to the posix form . On Windows , \ \ will be
* replaced with / .
*
* @ param pth . Path to transform .
* @ return string Posix path .
* /
function toPosixPath ( pth ) {
return pth . replace ( /[\\]/g , '/' ) ;
}
exports . toPosixPath = toPosixPath ;
/ * *
* toWin32Path converts the given path to the win32 form . On Linux , / w i l l b e
* replaced with \ \ .
*
* @ param pth . Path to transform .
* @ return string Win32 path .
* /
function toWin32Path ( pth ) {
return pth . replace ( /[/]/g , '\\' ) ;
}
exports . toWin32Path = toWin32Path ;
/ * *
* toPlatformPath converts the given path to a platform - specific path . It does
* this by replacing instances of / a n d \ w i t h t h e p l a t f o r m - s p e c i f i c p a t h
* separator .
*
* @ param pth The path to platformize .
* @ return string The platform - specific path .
* /
function toPlatformPath ( pth ) {
return pth . replace ( /[/\\]/g , path . sep ) ;
}
exports . toPlatformPath = toPlatformPath ;
//# sourceMappingURL=path-utils.js.map
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 1327 :
2022-04-25 17:07:43 +00:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-05-07 00:33:07 +00:00
exports . summary = exports . markdownSummary = exports . SUMMARY _DOCS _URL = exports . SUMMARY _ENV _VAR = void 0 ;
2022-08-09 17:17:26 +00:00
const os _1 = _ _nccwpck _require _ _ ( 2037 ) ;
const fs _1 = _ _nccwpck _require _ _ ( 7147 ) ;
2022-04-25 17:07:43 +00:00
const { access , appendFile , writeFile } = fs _1 . promises ;
exports . SUMMARY _ENV _VAR = 'GITHUB_STEP_SUMMARY' ;
2022-05-07 00:33:07 +00:00
exports . SUMMARY _DOCS _URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary' ;
class Summary {
2022-04-25 17:07:43 +00:00
constructor ( ) {
this . _buffer = '' ;
}
/ * *
* Finds the summary file path from the environment , rejects if env var is not found or file does not exist
* Also checks r / w permissions .
*
* @ returns step summary file path
* /
filePath ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _filePath ) {
return this . _filePath ;
}
const pathFromEnv = process . env [ exports . SUMMARY _ENV _VAR ] ;
if ( ! pathFromEnv ) {
2022-05-07 00:33:07 +00:00
throw new Error ( ` Unable to find environment variable for $ ${ exports . SUMMARY _ENV _VAR } . Check if your runtime environment supports job summaries. ` ) ;
2022-04-25 17:07:43 +00:00
}
try {
yield access ( pathFromEnv , fs _1 . constants . R _OK | fs _1 . constants . W _OK ) ;
}
catch ( _a ) {
throw new Error ( ` Unable to access summary file: ' ${ pathFromEnv } '. Check if the file has correct read/write permissions. ` ) ;
}
this . _filePath = pathFromEnv ;
return this . _filePath ;
} ) ;
}
/ * *
* Wraps content in an HTML tag , adding any HTML attributes
*
* @ param { string } tag HTML tag to wrap
* @ param { string | null } content content within the tag
* @ param { [ attribute : string ] : string } attrs key - value list of HTML attributes to add
*
* @ returns { string } content wrapped in HTML element
* /
wrap ( tag , content , attrs = { } ) {
const htmlAttrs = Object . entries ( attrs )
. map ( ( [ key , value ] ) => ` ${ key } =" ${ value } " ` )
. join ( '' ) ;
if ( ! content ) {
return ` < ${ tag } ${ htmlAttrs } > ` ;
}
return ` < ${ tag } ${ htmlAttrs } > ${ content } </ ${ tag } > ` ;
}
/ * *
* Writes text in the buffer to the summary buffer file and empties buffer . Will append by default .
*
* @ param { SummaryWriteOptions } [ options ] ( optional ) options for write operation
*
2022-05-07 00:33:07 +00:00
* @ returns { Promise < Summary > } summary instance
2022-04-25 17:07:43 +00:00
* /
write ( options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const overwrite = ! ! ( options === null || options === void 0 ? void 0 : options . overwrite ) ;
const filePath = yield this . filePath ( ) ;
const writeFunc = overwrite ? writeFile : appendFile ;
yield writeFunc ( filePath , this . _buffer , { encoding : 'utf8' } ) ;
return this . emptyBuffer ( ) ;
} ) ;
}
/ * *
* Clears the summary buffer and wipes the summary file
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
clear ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . emptyBuffer ( ) . write ( { overwrite : true } ) ;
} ) ;
}
/ * *
* Returns the current summary buffer as a string
*
* @ returns { string } string of summary buffer
* /
stringify ( ) {
return this . _buffer ;
}
/ * *
* If the summary buffer is empty
*
* @ returns { boolen } true if the buffer is empty
* /
isEmptyBuffer ( ) {
return this . _buffer . length === 0 ;
}
/ * *
* Resets the summary buffer without writing to summary file
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
emptyBuffer ( ) {
this . _buffer = '' ;
return this ;
}
/ * *
* Adds raw text to the summary buffer
*
* @ param { string } text content to add
* @ param { boolean } [ addEOL = false ] ( optional ) append an EOL to the raw text ( default : false )
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addRaw ( text , addEOL = false ) {
this . _buffer += text ;
return addEOL ? this . addEOL ( ) : this ;
}
/ * *
* Adds the operating system - specific end - of - line marker to the buffer
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addEOL ( ) {
return this . addRaw ( os _1 . EOL ) ;
}
/ * *
* Adds an HTML codeblock to the summary buffer
*
* @ param { string } code content to render within fenced code block
* @ param { string } lang ( optional ) language to syntax highlight code
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addCodeBlock ( code , lang ) {
const attrs = Object . assign ( { } , ( lang && { lang } ) ) ;
const element = this . wrap ( 'pre' , this . wrap ( 'code' , code ) , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML list to the summary buffer
*
* @ param { string [ ] } items list of items to render
* @ param { boolean } [ ordered = false ] ( optional ) if the rendered list should be ordered or not ( default : false )
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addList ( items , ordered = false ) {
const tag = ordered ? 'ol' : 'ul' ;
const listItems = items . map ( item => this . wrap ( 'li' , item ) ) . join ( '' ) ;
const element = this . wrap ( tag , listItems ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML table to the summary buffer
*
* @ param { SummaryTableCell [ ] } rows table rows
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addTable ( rows ) {
const tableBody = rows
. map ( row => {
const cells = row
. map ( cell => {
if ( typeof cell === 'string' ) {
return this . wrap ( 'td' , cell ) ;
}
const { header , data , colspan , rowspan } = cell ;
const tag = header ? 'th' : 'td' ;
const attrs = Object . assign ( Object . assign ( { } , ( colspan && { colspan } ) ) , ( rowspan && { rowspan } ) ) ;
return this . wrap ( tag , data , attrs ) ;
} )
. join ( '' ) ;
return this . wrap ( 'tr' , cells ) ;
} )
. join ( '' ) ;
const element = this . wrap ( 'table' , tableBody ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds a collapsable HTML details element to the summary buffer
*
* @ param { string } label text for the closed state
* @ param { string } content collapsable content
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addDetails ( label , content ) {
const element = this . wrap ( 'details' , this . wrap ( 'summary' , label ) + content ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML image tag to the summary buffer
*
* @ param { string } src path to the image you to embed
* @ param { string } alt text description of the image
* @ param { SummaryImageOptions } options ( optional ) addition image attributes
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addImage ( src , alt , options ) {
const { width , height } = options || { } ;
const attrs = Object . assign ( Object . assign ( { } , ( width && { width } ) ) , ( height && { height } ) ) ;
const element = this . wrap ( 'img' , null , Object . assign ( { src , alt } , attrs ) ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML section heading element
*
* @ param { string } text heading text
* @ param { number | string } [ level = 1 ] ( optional ) the heading level , default : 1
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addHeading ( text , level ) {
const tag = ` h ${ level } ` ;
const allowedTag = [ 'h1' , 'h2' , 'h3' , 'h4' , 'h5' , 'h6' ] . includes ( tag )
? tag
: 'h1' ;
const element = this . wrap ( allowedTag , text ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML thematic break ( < hr > ) to the summary buffer
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addSeparator ( ) {
const element = this . wrap ( 'hr' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML line break ( < br > ) to the summary buffer
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addBreak ( ) {
const element = this . wrap ( 'br' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML blockquote to the summary buffer
*
* @ param { string } text quote text
* @ param { string } cite ( optional ) citation url
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addQuote ( text , cite ) {
const attrs = Object . assign ( { } , ( cite && { cite } ) ) ;
const element = this . wrap ( 'blockquote' , text , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML anchor tag to the summary buffer
*
* @ param { string } text link text / content
* @ param { string } href hyperlink
*
2022-05-07 00:33:07 +00:00
* @ returns { Summary } summary instance
2022-04-25 17:07:43 +00:00
* /
addLink ( text , href ) {
const element = this . wrap ( 'a' , text , { href } ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
}
2022-05-07 00:33:07 +00:00
const _summary = new Summary ( ) ;
/ * *
* @ deprecated use ` core.summary `
* /
exports . markdownSummary = _summary ;
exports . summary = _summary ;
//# sourceMappingURL=summary.js.map
2021-09-29 17:14:34 +00:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 5278 :
2020-12-06 17:56:38 +08:00
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2021-08-20 17:05:57 +00:00
exports . toCommandProperties = exports . toCommandValue = void 0 ;
2020-12-06 17:56:38 +08:00
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
}
exports . toCommandValue = toCommandValue ;
2021-08-20 17:05:57 +00:00
/ * *
*
* @ param annotationProperties
* @ returns The command properties to send with the actual annotation command
* See IssueCommandProperties : https : //github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
* /
function toCommandProperties ( annotationProperties ) {
if ( ! Object . keys ( annotationProperties ) . length ) {
return { } ;
}
return {
title : annotationProperties . title ,
2021-09-29 17:14:34 +00:00
file : annotationProperties . file ,
2021-08-20 17:05:57 +00:00
line : annotationProperties . startLine ,
endLine : annotationProperties . endLine ,
col : annotationProperties . startColumn ,
endColumn : annotationProperties . endColumn
} ;
}
exports . toCommandProperties = toCommandProperties ;
2020-12-06 17:56:38 +08:00
//# sourceMappingURL=utils.js.map
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 8974 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
Object . defineProperty ( exports , "v1" , ( {
enumerable : true ,
get : function ( ) {
return _v . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v3" , ( {
enumerable : true ,
get : function ( ) {
return _v2 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v4" , ( {
enumerable : true ,
get : function ( ) {
return _v3 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v5" , ( {
enumerable : true ,
get : function ( ) {
return _v4 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "NIL" , ( {
enumerable : true ,
get : function ( ) {
return _nil . default ;
}
} ) ) ;
Object . defineProperty ( exports , "version" , ( {
enumerable : true ,
get : function ( ) {
return _version . default ;
}
} ) ) ;
Object . defineProperty ( exports , "validate" , ( {
enumerable : true ,
get : function ( ) {
return _validate . default ;
}
} ) ) ;
Object . defineProperty ( exports , "stringify" , ( {
enumerable : true ,
get : function ( ) {
return _stringify . default ;
}
} ) ) ;
Object . defineProperty ( exports , "parse" , ( {
enumerable : true ,
get : function ( ) {
return _parse . default ;
}
} ) ) ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1595 ) ) ;
var _v2 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6993 ) ) ;
var _v3 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1472 ) ) ;
var _v4 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6217 ) ) ;
var _nil = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2381 ) ) ;
var _version = _interopRequireDefault ( _ _nccwpck _require _ _ ( 427 ) ) ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6385 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
/***/ } ) ,
/***/ 5842 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function md5 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'md5' ) . update ( bytes ) . digest ( ) ;
}
var _default = md5 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 2381 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _default = '00000000-0000-0000-0000-000000000000' ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6385 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function parse ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
let v ;
const arr = new Uint8Array ( 16 ) ; // Parse ########-....-....-....-............
arr [ 0 ] = ( v = parseInt ( uuid . slice ( 0 , 8 ) , 16 ) ) >>> 24 ;
arr [ 1 ] = v >>> 16 & 0xff ;
arr [ 2 ] = v >>> 8 & 0xff ;
arr [ 3 ] = v & 0xff ; // Parse ........-####-....-....-............
arr [ 4 ] = ( v = parseInt ( uuid . slice ( 9 , 13 ) , 16 ) ) >>> 8 ;
arr [ 5 ] = v & 0xff ; // Parse ........-....-####-....-............
arr [ 6 ] = ( v = parseInt ( uuid . slice ( 14 , 18 ) , 16 ) ) >>> 8 ;
arr [ 7 ] = v & 0xff ; // Parse ........-....-....-####-............
arr [ 8 ] = ( v = parseInt ( uuid . slice ( 19 , 23 ) , 16 ) ) >>> 8 ;
arr [ 9 ] = v & 0xff ; // Parse ........-....-....-....-############
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
arr [ 10 ] = ( v = parseInt ( uuid . slice ( 24 , 36 ) , 16 ) ) / 0x10000000000 & 0xff ;
arr [ 11 ] = v / 0x100000000 & 0xff ;
arr [ 12 ] = v >>> 24 & 0xff ;
arr [ 13 ] = v >>> 16 & 0xff ;
arr [ 14 ] = v >>> 8 & 0xff ;
arr [ 15 ] = v & 0xff ;
return arr ;
}
var _default = parse ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6230 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 9784 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = rng ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const rnds8Pool = new Uint8Array ( 256 ) ; // # of random values to pre-allocate
let poolPtr = rnds8Pool . length ;
function rng ( ) {
if ( poolPtr > rnds8Pool . length - 16 ) {
_crypto . default . randomFillSync ( rnds8Pool ) ;
poolPtr = 0 ;
}
return rnds8Pool . slice ( poolPtr , poolPtr += 16 ) ;
}
/***/ } ) ,
/***/ 8844 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function sha1 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'sha1' ) . update ( bytes ) . digest ( ) ;
}
var _default = sha1 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 1458 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
* /
const byteToHex = [ ] ;
for ( let i = 0 ; i < 256 ; ++ i ) {
byteToHex . push ( ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ) ;
}
function stringify ( arr , offset = 0 ) {
// Note: Be careful editing this code! It's been tuned for performance
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
const uuid = ( byteToHex [ arr [ offset + 0 ] ] + byteToHex [ arr [ offset + 1 ] ] + byteToHex [ arr [ offset + 2 ] ] + byteToHex [ arr [ offset + 3 ] ] + '-' + byteToHex [ arr [ offset + 4 ] ] + byteToHex [ arr [ offset + 5 ] ] + '-' + byteToHex [ arr [ offset + 6 ] ] + byteToHex [ arr [ offset + 7 ] ] + '-' + byteToHex [ arr [ offset + 8 ] ] + byteToHex [ arr [ offset + 9 ] ] + '-' + byteToHex [ arr [ offset + 10 ] ] + byteToHex [ arr [ offset + 11 ] ] + byteToHex [ arr [ offset + 12 ] ] + byteToHex [ arr [ offset + 13 ] ] + byteToHex [ arr [ offset + 14 ] ] + byteToHex [ arr [ offset + 15 ] ] ) . toLowerCase ( ) ; // Consistency check for valid UUID. If this throws, it's likely due to one
// of the following:
// - One or more input array values don't map to a hex octet (leading to
// "undefined" in the uuid)
// - Invalid input values for the RFC `version` or `variant` fields
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Stringified UUID is invalid' ) ;
}
return uuid ;
}
var _default = stringify ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 1595 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 9784 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
let _nodeId ;
let _clockseq ; // Previous uuid creation time
let _lastMSecs = 0 ;
let _lastNSecs = 0 ; // See https://github.com/uuidjs/uuid for API details
function v1 ( options , buf , offset ) {
let i = buf && offset || 0 ;
const b = buf || new Array ( 16 ) ;
options = options || { } ;
let node = options . node || _nodeId ;
let clockseq = options . clockseq !== undefined ? options . clockseq : _clockseq ; // node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
if ( node == null || clockseq == null ) {
const seedBytes = options . random || ( options . rng || _rng . default ) ( ) ;
if ( node == null ) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [ seedBytes [ 0 ] | 0x01 , seedBytes [ 1 ] , seedBytes [ 2 ] , seedBytes [ 3 ] , seedBytes [ 4 ] , seedBytes [ 5 ] ] ;
}
if ( clockseq == null ) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = ( seedBytes [ 6 ] << 8 | seedBytes [ 7 ] ) & 0x3fff ;
}
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
let msecs = options . msecs !== undefined ? options . msecs : Date . now ( ) ; // Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
let nsecs = options . nsecs !== undefined ? options . nsecs : _lastNSecs + 1 ; // Time since last uuid creation (in msecs)
const dt = msecs - _lastMSecs + ( nsecs - _lastNSecs ) / 10000 ; // Per 4.2.1.2, Bump clockseq on clock regression
if ( dt < 0 && options . clockseq === undefined ) {
clockseq = clockseq + 1 & 0x3fff ;
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
if ( ( dt < 0 || msecs > _lastMSecs ) && options . nsecs === undefined ) {
nsecs = 0 ;
} // Per 4.2.1.2 Throw error if too many uuids are requested
if ( nsecs >= 10000 ) {
throw new Error ( "uuid.v1(): Can't create more than 10M uuids/sec" ) ;
}
_lastMSecs = msecs ;
_lastNSecs = nsecs ;
_clockseq = clockseq ; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
msecs += 12219292800000 ; // `time_low`
const tl = ( ( msecs & 0xfffffff ) * 10000 + nsecs ) % 0x100000000 ;
b [ i ++ ] = tl >>> 24 & 0xff ;
b [ i ++ ] = tl >>> 16 & 0xff ;
b [ i ++ ] = tl >>> 8 & 0xff ;
b [ i ++ ] = tl & 0xff ; // `time_mid`
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff ;
b [ i ++ ] = tmh >>> 8 & 0xff ;
b [ i ++ ] = tmh & 0xff ; // `time_high_and_version`
b [ i ++ ] = tmh >>> 24 & 0xf | 0x10 ; // include version
b [ i ++ ] = tmh >>> 16 & 0xff ; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
b [ i ++ ] = clockseq >>> 8 | 0x80 ; // `clock_seq_low`
b [ i ++ ] = clockseq & 0xff ; // `node`
for ( let n = 0 ; n < 6 ; ++ n ) {
b [ i + n ] = node [ n ] ;
}
return buf || ( 0 , _stringify . default ) ( b ) ;
}
var _default = v1 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6993 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5920 ) ) ;
var _md = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5842 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v3 = ( 0 , _v . default ) ( 'v3' , 0x30 , _md . default ) ;
var _default = v3 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 5920 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = _default ;
exports . URL = exports . DNS = void 0 ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6385 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function stringToBytes ( str ) {
str = unescape ( encodeURIComponent ( str ) ) ; // UTF8 escape
const bytes = [ ] ;
for ( let i = 0 ; i < str . length ; ++ i ) {
bytes . push ( str . charCodeAt ( i ) ) ;
}
return bytes ;
}
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8' ;
exports . DNS = DNS ;
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8' ;
exports . URL = URL ;
function _default ( name , version , hashfunc ) {
function generateUUID ( value , namespace , buf , offset ) {
if ( typeof value === 'string' ) {
value = stringToBytes ( value ) ;
}
if ( typeof namespace === 'string' ) {
namespace = ( 0 , _parse . default ) ( namespace ) ;
}
if ( namespace . length !== 16 ) {
throw TypeError ( 'Namespace must be array-like (16 iterable integer values, 0-255)' ) ;
} // Compute hash of namespace and value, Per 4.3
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
// hashfunc([...namespace, ... value])`
let bytes = new Uint8Array ( 16 + value . length ) ;
bytes . set ( namespace ) ;
bytes . set ( value , namespace . length ) ;
bytes = hashfunc ( bytes ) ;
bytes [ 6 ] = bytes [ 6 ] & 0x0f | version ;
bytes [ 8 ] = bytes [ 8 ] & 0x3f | 0x80 ;
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = bytes [ i ] ;
}
return buf ;
}
return ( 0 , _stringify . default ) ( bytes ) ;
} // Function#name is not settable on some platforms (#270)
try {
generateUUID . name = name ; // eslint-disable-next-line no-empty
} catch ( err ) { } // For CommonJS default export support
generateUUID . DNS = DNS ;
generateUUID . URL = URL ;
return generateUUID ;
}
/***/ } ) ,
/***/ 1472 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 9784 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function v4 ( options , buf , offset ) {
options = options || { } ;
const rnds = options . random || ( options . rng || _rng . default ) ( ) ; // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = rnds [ 6 ] & 0x0f | 0x40 ;
rnds [ 8 ] = rnds [ 8 ] & 0x3f | 0x80 ; // Copy bytes to buffer, if provided
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = rnds [ i ] ;
}
return buf ;
}
return ( 0 , _stringify . default ) ( rnds ) ;
}
var _default = v4 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6217 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5920 ) ) ;
var _sha = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8844 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v5 = ( 0 , _v . default ) ( 'v5' , 0x50 , _sha . default ) ;
var _default = v5 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 2609 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _regex = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6230 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function validate ( uuid ) {
return typeof uuid === 'string' && _regex . default . test ( uuid ) ;
}
var _default = validate ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 427 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function version ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
return parseInt ( uuid . substr ( 14 , 1 ) , 16 ) ;
}
var _default = version ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 1514 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-12-06 17:56:38 +08:00
"use strict" ;
2021-07-15 07:24:13 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-12-06 17:56:38 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2021-07-15 07:24:13 +08:00
exports . getExecOutput = exports . exec = void 0 ;
2022-08-09 17:17:26 +00:00
const string _decoder _1 = _ _nccwpck _require _ _ ( 1576 ) ;
const tr = _ _importStar ( _ _nccwpck _require _ _ ( 8159 ) ) ;
2020-12-06 17:56:38 +08:00
/ * *
* Exec a command .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < number > exit code
* /
function exec ( commandLine , args , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const commandArgs = tr . argStringToArray ( commandLine ) ;
if ( commandArgs . length === 0 ) {
throw new Error ( ` Parameter 'commandLine' cannot be null or empty. ` ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
// Path to tool to execute should be first arg
const toolPath = commandArgs [ 0 ] ;
args = commandArgs . slice ( 1 ) . concat ( args || [ ] ) ;
const runner = new tr . ToolRunner ( toolPath , args , options ) ;
return runner . exec ( ) ;
} ) ;
}
exports . exec = exec ;
2021-07-15 07:24:13 +08:00
/ * *
* Exec a command and get the output .
* Output will be streamed to the live console .
* Returns promise with the exit code and collected stdout and stderr
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < ExecOutput > exit code , stdout , and stderr
* /
function getExecOutput ( commandLine , args , options ) {
var _a , _b ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stdout = '' ;
let stderr = '' ;
//Using string decoder covers the case where a mult-byte character is split
const stdoutDecoder = new string _decoder _1 . StringDecoder ( 'utf8' ) ;
const stderrDecoder = new string _decoder _1 . StringDecoder ( 'utf8' ) ;
const originalStdoutListener = ( _a = options === null || options === void 0 ? void 0 : options . listeners ) === null || _a === void 0 ? void 0 : _a . stdout ;
const originalStdErrListener = ( _b = options === null || options === void 0 ? void 0 : options . listeners ) === null || _b === void 0 ? void 0 : _b . stderr ;
const stdErrListener = ( data ) => {
stderr += stderrDecoder . write ( data ) ;
if ( originalStdErrListener ) {
originalStdErrListener ( data ) ;
}
} ;
const stdOutListener = ( data ) => {
stdout += stdoutDecoder . write ( data ) ;
if ( originalStdoutListener ) {
originalStdoutListener ( data ) ;
}
} ;
const listeners = Object . assign ( Object . assign ( { } , options === null || options === void 0 ? void 0 : options . listeners ) , { stdout : stdOutListener , stderr : stdErrListener } ) ;
const exitCode = yield exec ( commandLine , args , Object . assign ( Object . assign ( { } , options ) , { listeners } ) ) ;
//flush any remaining characters
stdout += stdoutDecoder . end ( ) ;
stderr += stderrDecoder . end ( ) ;
return {
exitCode ,
stdout ,
stderr
} ;
} ) ;
}
exports . getExecOutput = getExecOutput ;
2020-12-06 17:56:38 +08:00
//# sourceMappingURL=exec.js.map
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 8159 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-12-06 17:56:38 +08:00
"use strict" ;
2021-07-15 07:24:13 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-12-06 17:56:38 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2021-07-15 07:24:13 +08:00
exports . argStringToArray = exports . ToolRunner = void 0 ;
2022-08-09 17:17:26 +00:00
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const events = _ _importStar ( _ _nccwpck _require _ _ ( 2361 ) ) ;
const child = _ _importStar ( _ _nccwpck _require _ _ ( 2081 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const ioUtil = _ _importStar ( _ _nccwpck _require _ _ ( 1962 ) ) ;
const timers _1 = _ _nccwpck _require _ _ ( 9512 ) ;
2020-12-06 17:56:38 +08:00
/* eslint-disable @typescript-eslint/unbound-method */
const IS _WINDOWS = process . platform === 'win32' ;
/ *
* Class for running command line tools . Handles quoting and arg parsing in a platform agnostic way .
* /
class ToolRunner extends events . EventEmitter {
constructor ( toolPath , args , options ) {
super ( ) ;
if ( ! toolPath ) {
throw new Error ( "Parameter 'toolPath' cannot be null or empty." ) ;
}
this . toolPath = toolPath ;
this . args = args || [ ] ;
this . options = options || { } ;
}
_debug ( message ) {
if ( this . options . listeners && this . options . listeners . debug ) {
this . options . listeners . debug ( message ) ;
}
}
_getCommandString ( options , noPrefix ) {
const toolPath = this . _getSpawnFileName ( ) ;
const args = this . _getSpawnArgs ( options ) ;
let cmd = noPrefix ? '' : '[command]' ; // omit prefix when piped to a second tool
if ( IS _WINDOWS ) {
// Windows + cmd file
if ( this . _isCmdFile ( ) ) {
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows + verbatim
else if ( options . windowsVerbatimArguments ) {
cmd += ` " ${ toolPath } " ` ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows (regular)
else {
cmd += this . _windowsQuoteCmdArg ( toolPath ) ;
for ( const a of args ) {
cmd += ` ${ this . _windowsQuoteCmdArg ( a ) } ` ;
}
}
}
else {
// OSX/Linux - this can likely be improved with some form of quoting.
// creating processes on Unix is fundamentally different than Windows.
// on Unix, execvp() takes an arg array.
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
return cmd ;
}
_processLineBuffer ( data , strBuffer , onLine ) {
try {
let s = strBuffer + data . toString ( ) ;
let n = s . indexOf ( os . EOL ) ;
while ( n > - 1 ) {
const line = s . substring ( 0 , n ) ;
onLine ( line ) ;
// the rest of the string ...
s = s . substring ( n + os . EOL . length ) ;
n = s . indexOf ( os . EOL ) ;
}
2021-07-15 07:24:13 +08:00
return s ;
2020-12-06 17:56:38 +08:00
}
catch ( err ) {
// streaming lines to console is best effort. Don't fail a build.
this . _debug ( ` error processing line. Failed with error ${ err } ` ) ;
2021-07-15 07:24:13 +08:00
return '' ;
2020-12-06 17:56:38 +08:00
}
}
_getSpawnFileName ( ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
return process . env [ 'COMSPEC' ] || 'cmd.exe' ;
}
}
return this . toolPath ;
}
_getSpawnArgs ( options ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
let argline = ` /D /S /C " ${ this . _windowsQuoteCmdArg ( this . toolPath ) } ` ;
for ( const a of this . args ) {
argline += ' ' ;
argline += options . windowsVerbatimArguments
? a
: this . _windowsQuoteCmdArg ( a ) ;
}
argline += '"' ;
return [ argline ] ;
}
}
return this . args ;
}
_endsWith ( str , end ) {
return str . endsWith ( end ) ;
}
_isCmdFile ( ) {
const upperToolPath = this . toolPath . toUpperCase ( ) ;
return ( this . _endsWith ( upperToolPath , '.CMD' ) ||
this . _endsWith ( upperToolPath , '.BAT' ) ) ;
}
_windowsQuoteCmdArg ( arg ) {
// for .exe, apply the normal quoting rules that libuv applies
if ( ! this . _isCmdFile ( ) ) {
return this . _uvQuoteCmdArg ( arg ) ;
}
// otherwise apply quoting rules specific to the cmd.exe command line parser.
// the libuv rules are generic and are not designed specifically for cmd.exe
// command line parser.
//
// for a detailed description of the cmd.exe command line parser, refer to
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
// need quotes for empty arg
if ( ! arg ) {
return '""' ;
}
// determine whether the arg needs to be quoted
const cmdSpecialChars = [
' ' ,
'\t' ,
'&' ,
'(' ,
')' ,
'[' ,
']' ,
'{' ,
'}' ,
'^' ,
'=' ,
';' ,
2020-08-27 20:39:35 +08:00
'!' ,
"'" ,
'+' ,
',' ,
'`' ,
'~' ,
'|' ,
'<' ,
'>' ,
'"'
] ;
let needsQuotes = false ;
for ( const char of arg ) {
if ( cmdSpecialChars . some ( x => x === char ) ) {
needsQuotes = true ;
break ;
}
}
// short-circuit if quotes not needed
if ( ! needsQuotes ) {
return arg ;
}
// the following quoting rules are very similar to the rules that by libuv applies.
//
// 1) wrap the string in quotes
//
// 2) double-up quotes - i.e. " => ""
//
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
// doesn't work well with a cmd.exe command line.
//
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
// for example, the command line:
// foo.exe "myarg:""my val"""
// is parsed by a .NET console app into an arg array:
// [ "myarg:\"my val\"" ]
// which is the same end result when applying libuv quoting rules. although the actual
// command line from libuv quoting rules would look like:
// foo.exe "myarg:\"my val\""
//
// 3) double-up slashes that precede a quote,
// e.g. hello \world => "hello \world"
// hello\"world => "hello\\""world"
// hello\\"world => "hello\\\\""world"
// hello world\ => "hello world\\"
//
// technically this is not required for a cmd.exe command line, or the batch argument parser.
// the reasons for including this as a .cmd quoting rule are:
//
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
//
// b) it's what we've been doing previously (by deferring to node default behavior) and we
// haven't heard any complaints about that aspect.
//
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
// by using %%.
//
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
//
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
// to an external program.
//
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
// % can be escaped within a .cmd file.
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ; // double the slash
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '"' ; // double the quote
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_uvQuoteCmdArg ( arg ) {
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
// is used.
//
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
// pasting copyright notice from Node within this function:
//
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
if ( ! arg ) {
// Need double quotation for empty argument
return '""' ;
}
if ( ! arg . includes ( ' ' ) && ! arg . includes ( '\t' ) && ! arg . includes ( '"' ) ) {
// No quotation needed
return arg ;
}
if ( ! arg . includes ( '"' ) && ! arg . includes ( '\\' ) ) {
// No embedded double quotes or backslashes, so I can just wrap
// quote marks around the whole thing.
return ` " ${ arg } " ` ;
}
// Expected input/output:
// input : hello"world
// output: "hello\"world"
// input : hello""world
// output: "hello\"\"world"
// input : hello\world
// output: hello\world
// input : hello\\world
// output: hello\\world
// input : hello\"world
// output: "hello\\\"world"
// input : hello\\"world
// output: "hello\\\\\"world"
// input : hello world\
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
// but it appears the comment is wrong, it should be "hello world\\"
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ;
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '\\' ;
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_cloneExecOptions ( options ) {
options = options || { } ;
const result = {
cwd : options . cwd || process . cwd ( ) ,
env : options . env || process . env ,
silent : options . silent || false ,
windowsVerbatimArguments : options . windowsVerbatimArguments || false ,
failOnStdErr : options . failOnStdErr || false ,
ignoreReturnCode : options . ignoreReturnCode || false ,
delay : options . delay || 10000
} ;
result . outStream = options . outStream || process . stdout ;
result . errStream = options . errStream || process . stderr ;
return result ;
}
_getSpawnOptions ( options , toolPath ) {
options = options || { } ;
const result = { } ;
result . cwd = options . cwd ;
result . env = options . env ;
result [ 'windowsVerbatimArguments' ] =
options . windowsVerbatimArguments || this . _isCmdFile ( ) ;
if ( options . windowsVerbatimArguments ) {
result . argv0 = ` " ${ toolPath } " ` ;
}
return result ;
}
/ * *
* Exec a tool .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param tool path to tool to exec
* @ param options optional exec options . See ExecOptions
* @ returns number
* /
exec ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// root the tool path if it is unrooted and contains relative pathing
if ( ! ioUtil . isRooted ( this . toolPath ) &&
( this . toolPath . includes ( '/' ) ||
( IS _WINDOWS && this . toolPath . includes ( '\\' ) ) ) ) {
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
this . toolPath = path . resolve ( process . cwd ( ) , this . options . cwd || process . cwd ( ) , this . toolPath ) ;
}
// if the tool is only a file name, then resolve it from the PATH
// otherwise verify it exists (add extension on Windows if necessary)
this . toolPath = yield io . which ( this . toolPath , true ) ;
2021-07-15 07:24:13 +08:00
return new Promise ( ( resolve , reject ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-08-27 20:39:35 +08:00
this . _debug ( ` exec tool: ${ this . toolPath } ` ) ;
this . _debug ( 'arguments:' ) ;
for ( const arg of this . args ) {
this . _debug ( ` ${ arg } ` ) ;
}
const optionsNonNull = this . _cloneExecOptions ( this . options ) ;
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( this . _getCommandString ( optionsNonNull ) + os . EOL ) ;
}
const state = new ExecState ( optionsNonNull , this . toolPath ) ;
state . on ( 'debug' , ( message ) => {
this . _debug ( message ) ;
} ) ;
2021-07-15 07:24:13 +08:00
if ( this . options . cwd && ! ( yield ioUtil . exists ( this . options . cwd ) ) ) {
return reject ( new Error ( ` The cwd: ${ this . options . cwd } does not exist! ` ) ) ;
}
2020-08-27 20:39:35 +08:00
const fileName = this . _getSpawnFileName ( ) ;
const cp = child . spawn ( fileName , this . _getSpawnArgs ( optionsNonNull ) , this . _getSpawnOptions ( this . options , fileName ) ) ;
2021-07-15 07:24:13 +08:00
let stdbuffer = '' ;
2020-08-27 20:39:35 +08:00
if ( cp . stdout ) {
cp . stdout . on ( 'data' , ( data ) => {
if ( this . options . listeners && this . options . listeners . stdout ) {
this . options . listeners . stdout ( data ) ;
}
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( data ) ;
}
2021-07-15 07:24:13 +08:00
stdbuffer = this . _processLineBuffer ( data , stdbuffer , ( line ) => {
2020-08-27 20:39:35 +08:00
if ( this . options . listeners && this . options . listeners . stdline ) {
this . options . listeners . stdline ( line ) ;
}
} ) ;
} ) ;
}
2021-07-15 07:24:13 +08:00
let errbuffer = '' ;
2020-08-27 20:39:35 +08:00
if ( cp . stderr ) {
cp . stderr . on ( 'data' , ( data ) => {
state . processStderr = true ;
if ( this . options . listeners && this . options . listeners . stderr ) {
this . options . listeners . stderr ( data ) ;
}
if ( ! optionsNonNull . silent &&
optionsNonNull . errStream &&
optionsNonNull . outStream ) {
const s = optionsNonNull . failOnStdErr
? optionsNonNull . errStream
: optionsNonNull . outStream ;
s . write ( data ) ;
}
2021-07-15 07:24:13 +08:00
errbuffer = this . _processLineBuffer ( data , errbuffer , ( line ) => {
2020-08-27 20:39:35 +08:00
if ( this . options . listeners && this . options . listeners . errline ) {
this . options . listeners . errline ( line ) ;
}
} ) ;
} ) ;
}
cp . on ( 'error' , ( err ) => {
state . processError = err . message ;
state . processExited = true ;
state . processClosed = true ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'exit' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
this . _debug ( ` Exit code ${ code } received from tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'close' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
state . processClosed = true ;
this . _debug ( ` STDIO streams have closed for tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
state . on ( 'done' , ( error , exitCode ) => {
if ( stdbuffer . length > 0 ) {
this . emit ( 'stdline' , stdbuffer ) ;
}
if ( errbuffer . length > 0 ) {
this . emit ( 'errline' , errbuffer ) ;
}
cp . removeAllListeners ( ) ;
if ( error ) {
reject ( error ) ;
}
else {
resolve ( exitCode ) ;
}
} ) ;
if ( this . options . input ) {
if ( ! cp . stdin ) {
throw new Error ( 'child process missing stdin' ) ;
}
cp . stdin . end ( this . options . input ) ;
}
2021-07-15 07:24:13 +08:00
} ) ) ;
2020-08-27 20:39:35 +08:00
} ) ;
}
}
exports . ToolRunner = ToolRunner ;
/ * *
* Convert an arg string to an array of args . Handles escaping
*
* @ param argString string of arguments
* @ returns string [ ] array of arguments
* /
function argStringToArray ( argString ) {
const args = [ ] ;
let inQuotes = false ;
let escaped = false ;
let arg = '' ;
function append ( c ) {
// we only escape double quotes.
if ( escaped && c !== '"' ) {
arg += '\\' ;
}
arg += c ;
escaped = false ;
}
for ( let i = 0 ; i < argString . length ; i ++ ) {
const c = argString . charAt ( i ) ;
if ( c === '"' ) {
if ( ! escaped ) {
inQuotes = ! inQuotes ;
}
else {
append ( c ) ;
}
continue ;
}
if ( c === '\\' && escaped ) {
append ( c ) ;
continue ;
}
if ( c === '\\' && inQuotes ) {
escaped = true ;
continue ;
}
if ( c === ' ' && ! inQuotes ) {
if ( arg . length > 0 ) {
args . push ( arg ) ;
arg = '' ;
}
continue ;
}
append ( c ) ;
}
if ( arg . length > 0 ) {
args . push ( arg . trim ( ) ) ;
}
return args ;
}
exports . argStringToArray = argStringToArray ;
class ExecState extends events . EventEmitter {
constructor ( options , toolPath ) {
super ( ) ;
this . processClosed = false ; // tracks whether the process has exited and stdio is closed
this . processError = '' ;
this . processExitCode = 0 ;
this . processExited = false ; // tracks whether the process has exited
this . processStderr = false ; // tracks whether stderr was written to
this . delay = 10000 ; // 10 seconds
this . done = false ;
this . timeout = null ;
if ( ! toolPath ) {
throw new Error ( 'toolPath must not be empty' ) ;
}
this . options = options ;
this . toolPath = toolPath ;
if ( options . delay ) {
this . delay = options . delay ;
}
}
CheckComplete ( ) {
if ( this . done ) {
return ;
}
if ( this . processClosed ) {
this . _setResult ( ) ;
}
else if ( this . processExited ) {
2021-07-15 07:24:13 +08:00
this . timeout = timers _1 . setTimeout ( ExecState . HandleTimeout , this . delay , this ) ;
2020-08-27 20:39:35 +08:00
}
}
_debug ( message ) {
this . emit ( 'debug' , message ) ;
}
_setResult ( ) {
// determine whether there is an error
let error ;
if ( this . processExited ) {
if ( this . processError ) {
error = new Error ( ` There was an error when attempting to execute the process ' ${ this . toolPath } '. This may indicate the process failed to start. Error: ${ this . processError } ` ) ;
}
else if ( this . processExitCode !== 0 && ! this . options . ignoreReturnCode ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed with exit code ${ this . processExitCode } ` ) ;
}
else if ( this . processStderr && this . options . failOnStdErr ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed because one or more lines were written to the STDERR stream ` ) ;
}
}
// clear the timeout
if ( this . timeout ) {
clearTimeout ( this . timeout ) ;
this . timeout = null ;
}
this . done = true ;
this . emit ( 'done' , error , this . processExitCode ) ;
}
static HandleTimeout ( state ) {
if ( state . done ) {
return ;
}
if ( ! state . processClosed && state . processExited ) {
const message = ` The STDIO streams did not close within ${ state . delay /
1000 } seconds of the exit event from process '${state.toolPath}' . This may indicate a child process inherited the STDIO streams and has not yet exited . ` ;
state . _debug ( message ) ;
}
state . _setResult ( ) ;
}
}
//# sourceMappingURL=toolrunner.js.map
2021-09-29 17:14:34 +00:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 5526 :
2022-05-17 03:48:15 +00:00
/***/ ( function ( _ _unused _webpack _module , exports ) {
2021-09-29 17:14:34 +00:00
"use strict" ;
2022-05-17 03:48:15 +00:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2021-09-29 17:14:34 +00:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-05-17 03:48:15 +00:00
exports . PersonalAccessTokenCredentialHandler = exports . BearerCredentialHandler = exports . BasicCredentialHandler = void 0 ;
2021-09-29 17:14:34 +00:00
class BasicCredentialHandler {
constructor ( username , password ) {
this . username = username ;
this . password = password ;
}
prepareRequest ( options ) {
2022-05-17 03:48:15 +00:00
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` ${ this . username } : ${ this . password } ` ) . toString ( 'base64' ) } ` ;
2021-09-29 17:14:34 +00:00
}
// This handler cannot handle 401
2022-05-17 03:48:15 +00:00
canHandleAuthentication ( ) {
2021-09-29 17:14:34 +00:00
return false ;
}
2022-05-17 03:48:15 +00:00
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
2021-09-29 17:14:34 +00:00
}
}
exports . BasicCredentialHandler = BasicCredentialHandler ;
class BearerCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
2022-05-17 03:48:15 +00:00
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Bearer ${ this . token } ` ;
2021-09-29 17:14:34 +00:00
}
// This handler cannot handle 401
2022-05-17 03:48:15 +00:00
canHandleAuthentication ( ) {
2021-09-29 17:14:34 +00:00
return false ;
}
2022-05-17 03:48:15 +00:00
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
2021-09-29 17:14:34 +00:00
}
}
exports . BearerCredentialHandler = BearerCredentialHandler ;
class PersonalAccessTokenCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
2022-05-17 03:48:15 +00:00
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` PAT: ${ this . token } ` ) . toString ( 'base64' ) } ` ;
2021-09-29 17:14:34 +00:00
}
// This handler cannot handle 401
2022-05-17 03:48:15 +00:00
canHandleAuthentication ( ) {
2021-09-29 17:14:34 +00:00
return false ;
}
2022-05-17 03:48:15 +00:00
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
2021-09-29 17:14:34 +00:00
}
}
exports . PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler ;
2022-05-17 03:48:15 +00:00
//# sourceMappingURL=auth.js.map
2021-09-29 17:14:34 +00:00
2020-08-27 11:53:14 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 6255 :
2022-05-17 03:48:15 +00:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-27 11:53:14 +08:00
"use strict" ;
2022-05-17 03:48:15 +00:00
/* eslint-disable @typescript-eslint/no-explicit-any */
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2020-12-06 17:56:38 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-05-17 03:48:15 +00:00
exports . HttpClient = exports . isHttps = exports . HttpClientResponse = exports . HttpClientError = exports . getProxyUrl = exports . MediaTypes = exports . Headers = exports . HttpCodes = void 0 ;
2022-08-09 17:17:26 +00:00
const http = _ _importStar ( _ _nccwpck _require _ _ ( 3685 ) ) ;
const https = _ _importStar ( _ _nccwpck _require _ _ ( 5687 ) ) ;
const pm = _ _importStar ( _ _nccwpck _require _ _ ( 9835 ) ) ;
const tunnel = _ _importStar ( _ _nccwpck _require _ _ ( 4294 ) ) ;
2020-12-06 17:56:38 +08:00
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
2020-08-27 11:53:14 +08:00
/ * *
2020-12-06 17:56:38 +08:00
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
2020-08-27 11:53:14 +08:00
* /
2020-12-06 17:56:38 +08:00
function getProxyUrl ( serverUrl ) {
2022-05-17 03:48:15 +00:00
const proxyUrl = pm . getProxyUrl ( new URL ( serverUrl ) ) ;
2020-12-06 17:56:38 +08:00
return proxyUrl ? proxyUrl . href : '' ;
2020-08-27 11:53:14 +08:00
}
2020-12-06 17:56:38 +08:00
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientError extends Error {
constructor ( message , statusCode ) {
super ( message ) ;
this . name = 'HttpClientError' ;
this . statusCode = statusCode ;
Object . setPrototypeOf ( this , HttpClientError . prototype ) ;
2020-08-27 11:53:14 +08:00
}
}
2020-12-06 17:56:38 +08:00
exports . HttpClientError = HttpClientError ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ) ;
2020-12-06 17:56:38 +08:00
} ) ;
}
2020-08-27 11:53:14 +08:00
}
2020-12-06 17:56:38 +08:00
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
2022-05-17 03:48:15 +00:00
const parsedUrl = new URL ( requestUrl ) ;
2020-12-06 17:56:38 +08:00
return parsedUrl . protocol === 'https:' ;
2020-08-27 11:53:14 +08:00
}
2020-12-06 17:56:38 +08:00
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
2020-08-27 11:53:14 +08:00
}
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
options ( requestUrl , additionalHeaders ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
get ( requestUrl , additionalHeaders ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
del ( requestUrl , additionalHeaders ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
post ( requestUrl , data , additionalHeaders ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
patch ( requestUrl , data , additionalHeaders ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
put ( requestUrl , data , additionalHeaders ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
head ( requestUrl , additionalHeaders ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
2022-05-17 03:48:15 +00:00
getJson ( requestUrl , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
const res = yield this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
postJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
putJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
patchJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
2020-08-27 11:53:14 +08:00
}
2020-12-06 17:56:38 +08:00
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
2022-05-17 03:48:15 +00:00
request ( verb , requestUrl , data , headers ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
}
const parsedUrl = new URL ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
const maxTries = this . _allowRetries && RetryableHttpVerbs . includes ( verb )
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
do {
response = yield this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( const handler of this . handlers ) {
if ( handler . canHandleAuthentication ( response ) ) {
authenticationHandler = handler ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
2020-12-06 17:56:38 +08:00
}
}
2022-05-17 03:48:15 +00:00
let redirectsRemaining = this . _maxRedirects ;
while ( response . message . statusCode &&
HttpRedirectCodes . includes ( response . message . statusCode ) &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
const parsedRedirectUrl = new URL ( redirectUrl ) ;
if ( parsedUrl . protocol === 'https:' &&
parsedUrl . protocol !== parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
yield response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( const header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = yield this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
2020-12-06 17:56:38 +08:00
}
2022-05-17 03:48:15 +00:00
if ( ! response . message . statusCode ||
! HttpResponseRetryCodes . includes ( response . message . statusCode ) ) {
// If not a retry code, return immediately instead of retrying
2020-12-06 17:56:38 +08:00
return response ;
}
2022-05-17 03:48:15 +00:00
numTries += 1 ;
if ( numTries < maxTries ) {
yield response . readBody ( ) ;
yield this . _performExponentialBackoff ( numTries ) ;
2020-08-27 11:53:14 +08:00
}
2022-05-17 03:48:15 +00:00
} while ( numTries < maxTries ) ;
return response ;
} ) ;
2020-08-27 11:53:14 +08:00
}
2020-12-06 17:56:38 +08:00
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
2020-08-27 11:53:14 +08:00
}
2020-12-06 17:56:38 +08:00
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => {
function callbackForResult ( err , res ) {
if ( err ) {
reject ( err ) ;
}
else if ( ! res ) {
// If `err` is not passed, then `res` must be passed.
reject ( new Error ( 'Unknown error' ) ) ;
}
else {
resolve ( res ) ;
}
2020-12-06 17:56:38 +08:00
}
2022-05-17 03:48:15 +00:00
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
2020-12-06 17:56:38 +08:00
} ) ;
2020-08-27 11:53:14 +08:00
}
2020-12-06 17:56:38 +08:00
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
if ( typeof data === 'string' ) {
2022-05-17 03:48:15 +00:00
if ( ! info . options . headers ) {
info . options . headers = { } ;
}
2020-12-06 17:56:38 +08:00
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
let callbackCalled = false ;
2022-05-17 03:48:15 +00:00
function handleResult ( err , res ) {
2020-12-06 17:56:38 +08:00
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
2020-08-27 20:39:35 +08:00
}
2022-05-17 03:48:15 +00:00
}
const req = info . httpModule . request ( info . options , ( msg ) => {
const res = new HttpClientResponse ( msg ) ;
handleResult ( undefined , res ) ;
2020-12-06 17:56:38 +08:00
} ) ;
2022-05-17 03:48:15 +00:00
let socket ;
2020-12-06 17:56:38 +08:00
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
2020-08-27 20:39:35 +08:00
}
2022-05-17 03:48:15 +00:00
handleResult ( new Error ( ` Request timeout: ${ info . options . path } ` ) ) ;
2020-12-06 17:56:38 +08:00
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
2022-05-17 03:48:15 +00:00
handleResult ( err ) ;
2020-12-06 17:56:38 +08:00
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
2020-08-27 20:39:35 +08:00
}
else {
2020-12-06 17:56:38 +08:00
req . end ( ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
2022-05-17 03:48:15 +00:00
const parsedUrl = new URL ( serverUrl ) ;
2020-12-06 17:56:38 +08:00
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
2022-05-17 03:48:15 +00:00
for ( const handler of this . handlers ) {
2020-12-06 17:56:38 +08:00
handler . prepareRequest ( info . options ) ;
2022-05-17 03:48:15 +00:00
}
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
return info ;
}
_mergeHeaders ( headers ) {
if ( this . requestOptions && this . requestOptions . headers ) {
2022-05-17 03:48:15 +00:00
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers || { } ) ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
2022-05-17 03:48:15 +00:00
const proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
const useProxy = proxyUrl && proxyUrl . hostname ;
2020-12-06 17:56:38 +08:00
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
2022-05-17 03:48:15 +00:00
if ( agent ) {
2020-12-06 17:56:38 +08:00
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
2022-05-17 03:48:15 +00:00
if ( this . requestOptions ) {
2020-12-06 17:56:38 +08:00
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
2022-05-17 03:48:15 +00:00
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
if ( proxyUrl && proxyUrl . hostname ) {
2020-12-06 17:56:38 +08:00
const agentOptions = {
2022-05-17 03:48:15 +00:00
maxSockets ,
2020-12-06 17:56:38 +08:00
keepAlive : this . _keepAlive ,
2022-05-17 03:48:15 +00:00
proxy : Object . assign ( Object . assign ( { } , ( ( proxyUrl . username || proxyUrl . password ) && {
proxyAuth : ` ${ proxyUrl . username } : ${ proxyUrl . password } `
} ) ) , { host : proxyUrl . hostname , port : proxyUrl . port } )
2020-12-06 17:56:38 +08:00
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
}
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
2022-05-17 03:48:15 +00:00
const options = { keepAlive : this . _keepAlive , maxSockets } ;
2020-12-06 17:56:38 +08:00
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
_performExponentialBackoff ( retryNumber ) {
2022-05-17 03:48:15 +00:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
} ) ;
2020-12-06 17:56:38 +08:00
}
2022-05-17 03:48:15 +00:00
_processResponse ( res , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const statusCode = res . message . statusCode || 0 ;
const response = {
statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode === HttpCodes . NotFound ) {
resolve ( response ) ;
}
// get the result from the body
function dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
const a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
2020-12-06 17:56:38 +08:00
}
2022-05-17 03:48:15 +00:00
return value ;
}
let obj ;
let contents ;
try {
contents = yield res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
2020-12-06 17:56:38 +08:00
}
2022-05-17 03:48:15 +00:00
response . headers = res . message . headers ;
2020-12-06 17:56:38 +08:00
}
2022-05-17 03:48:15 +00:00
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
2020-12-06 17:56:38 +08:00
}
2022-05-17 03:48:15 +00:00
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = ` Failed request: ( ${ statusCode } ) ` ;
}
const err = new HttpClientError ( msg , statusCode ) ;
err . result = response . result ;
reject ( err ) ;
2020-12-06 17:56:38 +08:00
}
else {
2022-05-17 03:48:15 +00:00
resolve ( response ) ;
2020-12-06 17:56:38 +08:00
}
2022-05-17 03:48:15 +00:00
} ) ) ;
2020-12-06 17:56:38 +08:00
} ) ;
}
}
exports . HttpClient = HttpClient ;
2022-05-17 03:48:15 +00:00
const lowercaseKeys = ( obj ) => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
//# sourceMappingURL=index.js.map
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 9835 :
2020-12-06 17:56:38 +08:00
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-05-17 03:48:15 +00:00
exports . checkBypass = exports . getProxyUrl = void 0 ;
2020-12-06 17:56:38 +08:00
function getProxyUrl ( reqUrl ) {
2022-05-17 03:48:15 +00:00
const usingSsl = reqUrl . protocol === 'https:' ;
2020-12-06 17:56:38 +08:00
if ( checkBypass ( reqUrl ) ) {
2022-05-17 03:48:15 +00:00
return undefined ;
2020-12-06 17:56:38 +08:00
}
2022-05-17 03:48:15 +00:00
const proxyVar = ( ( ) => {
if ( usingSsl ) {
return process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
}
else {
return process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
}
} ) ( ) ;
if ( proxyVar ) {
return new URL ( proxyVar ) ;
2020-12-06 17:56:38 +08:00
}
else {
2022-05-17 03:48:15 +00:00
return undefined ;
2020-08-27 20:39:35 +08:00
}
}
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
2022-05-17 03:48:15 +00:00
const noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
2020-08-27 20:39:35 +08:00
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
2022-05-17 03:48:15 +00:00
const upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
2020-08-27 20:39:35 +08:00
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
2022-05-17 03:48:15 +00:00
for ( const upperNoProxyItem of noProxy
2020-08-27 20:39:35 +08:00
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
2022-05-17 03:48:15 +00:00
//# sourceMappingURL=proxy.js.map
2020-08-27 20:39:35 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 1962 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-27 20:39:35 +08:00
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2020-12-06 17:56:38 +08:00
var _a ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-08-09 17:17:26 +00:00
const assert _1 = _ _nccwpck _require _ _ ( 9491 ) ;
const fs = _ _nccwpck _require _ _ ( 7147 ) ;
const path = _ _nccwpck _require _ _ ( 1017 ) ;
2020-12-06 17:56:38 +08:00
_a = fs . promises , exports . chmod = _a . chmod , exports . copyFile = _a . copyFile , exports . lstat = _a . lstat , exports . mkdir = _a . mkdir , exports . readdir = _a . readdir , exports . readlink = _a . readlink , exports . rename = _a . rename , exports . rmdir = _a . rmdir , exports . stat = _a . stat , exports . symlink = _a . symlink , exports . unlink = _a . unlink ;
exports . IS _WINDOWS = process . platform === 'win32' ;
function exists ( fsPath ) {
2020-08-27 20:39:35 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-12-06 17:56:38 +08:00
try {
yield exports . stat ( fsPath ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
catch ( err ) {
if ( err . code === 'ENOENT' ) {
return false ;
}
throw err ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
return true ;
2020-08-27 20:39:35 +08:00
} ) ;
}
2020-12-06 17:56:38 +08:00
exports . exists = exists ;
function isDirectory ( fsPath , useStat = false ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = useStat ? yield exports . stat ( fsPath ) : yield exports . lstat ( fsPath ) ;
return stats . isDirectory ( ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
exports . isDirectory = isDirectory ;
/ * *
* On OSX / Linux , true if path starts with '/' . On Windows , true for paths like :
* \ , \ hello , \ \ hello \ share , C : , and C : \ hello ( and corresponding alternate separator cases ) .
* /
function isRooted ( p ) {
p = normalizeSeparators ( p ) ;
if ( ! p ) {
throw new Error ( 'isRooted() parameter "p" cannot be empty' ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
if ( exports . IS _WINDOWS ) {
return ( p . startsWith ( '\\' ) || /^[A-Z]:/i . test ( p ) // e.g. \ or \hello or \\hello
) ; // e.g. C: or C:\hello
}
return p . startsWith ( '/' ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
exports . isRooted = isRooted ;
2020-08-27 20:39:35 +08:00
/ * *
2020-12-06 17:56:38 +08:00
* Recursively create a directory at ` fsPath ` .
2020-08-27 20:39:35 +08:00
*
2020-12-06 17:56:38 +08:00
* This implementation is optimistic , meaning it attempts to create the full
* path first , and backs up the path stack from there .
*
* @ param fsPath The path to create
* @ param maxDepth The maximum recursion depth
* @ param depth The current recursion depth
2020-08-27 20:39:35 +08:00
* /
2020-12-06 17:56:38 +08:00
function mkdirP ( fsPath , maxDepth = 1000 , depth = 1 ) {
2020-08-27 20:39:35 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-12-06 17:56:38 +08:00
assert _1 . ok ( fsPath , 'a path argument must be provided' ) ;
fsPath = path . resolve ( fsPath ) ;
if ( depth >= maxDepth )
return exports . mkdir ( fsPath ) ;
try {
yield exports . mkdir ( fsPath ) ;
return ;
}
catch ( err ) {
switch ( err . code ) {
case 'ENOENT' : {
yield mkdirP ( path . dirname ( fsPath ) , maxDepth , depth + 1 ) ;
yield exports . mkdir ( fsPath ) ;
return ;
}
default : {
let stats ;
try {
stats = yield exports . stat ( fsPath ) ;
}
catch ( err2 ) {
throw err ;
}
if ( ! stats . isDirectory ( ) )
throw err ;
}
}
}
2020-08-27 11:53:14 +08:00
} ) ;
}
2020-12-06 17:56:38 +08:00
exports . mkdirP = mkdirP ;
2020-08-27 20:39:35 +08:00
/ * *
2020-12-06 17:56:38 +08:00
* Best effort attempt to determine whether a file exists and is executable .
* @ param filePath file path to check
* @ param extensions additional file extensions to try
* @ return if file exists and is executable , returns the file path . otherwise empty string .
2020-08-27 20:39:35 +08:00
* /
2020-12-06 17:56:38 +08:00
function tryGetExecutablePath ( filePath , extensions ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stats = undefined ;
try {
// test file exists
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// on Windows, test for valid extension
const upperExt = path . extname ( filePath ) . toUpperCase ( ) ;
if ( extensions . some ( validExt => validExt . toUpperCase ( ) === upperExt ) ) {
return filePath ;
}
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
// try each extension
const originalFilePath = filePath ;
for ( const extension of extensions ) {
filePath = originalFilePath + extension ;
stats = undefined ;
try {
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path . dirname ( filePath ) ;
const upperName = path . basename ( filePath ) . toUpperCase ( ) ;
for ( const actualName of yield exports . readdir ( directory ) ) {
if ( upperName === actualName . toUpperCase ( ) ) {
filePath = path . join ( directory , actualName ) ;
break ;
}
}
}
catch ( err ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine the actual case of the file ' ${ filePath } ': ${ err } ` ) ;
}
return filePath ;
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
}
return '' ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
exports . tryGetExecutablePath = tryGetExecutablePath ;
function normalizeSeparators ( p ) {
p = p || '' ;
if ( exports . IS _WINDOWS ) {
// convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// remove redundant slashes
return p . replace ( /\\\\+/g , '\\' ) ;
}
// remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
function isUnixExecutable ( stats ) {
return ( ( stats . mode & 1 ) > 0 ||
( ( stats . mode & 8 ) > 0 && stats . gid === process . getgid ( ) ) ||
( ( stats . mode & 64 ) > 0 && stats . uid === process . getuid ( ) ) ) ;
}
//# sourceMappingURL=io-util.js.map
2020-08-27 20:39:35 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 7436 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-27 20:39:35 +08:00
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2020-12-06 17:56:38 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-08-09 17:17:26 +00:00
const childProcess = _ _nccwpck _require _ _ ( 2081 ) ;
const path = _ _nccwpck _require _ _ ( 1017 ) ;
const util _1 = _ _nccwpck _require _ _ ( 3837 ) ;
const ioUtil = _ _nccwpck _require _ _ ( 1962 ) ;
2020-12-06 17:56:38 +08:00
const exec = util _1 . promisify ( childProcess . exec ) ;
2020-08-27 20:39:35 +08:00
/ * *
2020-12-06 17:56:38 +08:00
* Copies a file or folder .
* Based off of shelljs - https : //github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
2020-08-27 20:39:35 +08:00
*
2020-12-06 17:56:38 +08:00
* @ param source source path
* @ param dest destination path
* @ param options optional . See CopyOptions .
2020-08-27 20:39:35 +08:00
* /
2020-12-06 17:56:38 +08:00
function cp ( source , dest , options = { } ) {
2020-08-27 20:39:35 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-12-06 17:56:38 +08:00
const { force , recursive } = readCopyOptions ( options ) ;
const destStat = ( yield ioUtil . exists ( dest ) ) ? yield ioUtil . stat ( dest ) : null ;
// Dest is an existing file, but not forcing
if ( destStat && destStat . isFile ( ) && ! force ) {
return ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
// If dest is an existing directory, should copy inside.
const newDest = destStat && destStat . isDirectory ( )
? path . join ( dest , path . basename ( source ) )
: dest ;
if ( ! ( yield ioUtil . exists ( source ) ) ) {
throw new Error ( ` no such file or directory: ${ source } ` ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
const sourceStat = yield ioUtil . stat ( source ) ;
if ( sourceStat . isDirectory ( ) ) {
if ( ! recursive ) {
throw new Error ( ` Failed to copy. ${ source } is a directory, but tried to copy without recursive flag. ` ) ;
}
else {
yield cpDirRecursive ( source , newDest , 0 , force ) ;
}
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
else {
if ( path . relative ( source , newDest ) === '' ) {
// a file cannot be copied to itself
throw new Error ( ` ' ${ newDest } ' and ' ${ source } ' are the same file ` ) ;
}
yield copyFile ( source , newDest , force ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
} ) ;
}
exports . cp = cp ;
/ * *
* Moves a path .
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See MoveOptions .
* /
function mv ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( yield ioUtil . exists ( dest ) ) {
let destExists = true ;
if ( yield ioUtil . isDirectory ( dest ) ) {
// If dest is directory copy src into dest
dest = path . join ( dest , path . basename ( source ) ) ;
destExists = yield ioUtil . exists ( dest ) ;
}
if ( destExists ) {
if ( options . force == null || options . force ) {
yield rmRF ( dest ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
else {
throw new Error ( 'Destination already exists' ) ;
2020-08-27 20:39:35 +08:00
}
}
}
2020-12-06 17:56:38 +08:00
yield mkdirP ( path . dirname ( dest ) ) ;
yield ioUtil . rename ( source , dest ) ;
2020-08-27 20:39:35 +08:00
} ) ;
}
2020-12-06 17:56:38 +08:00
exports . mv = mv ;
2020-08-27 20:39:35 +08:00
/ * *
2020-12-06 17:56:38 +08:00
* Remove a path recursively with force
2020-08-27 20:39:35 +08:00
*
2020-12-06 17:56:38 +08:00
* @ param inputPath path to remove
2020-08-27 20:39:35 +08:00
* /
2020-12-06 17:56:38 +08:00
function rmRF ( inputPath ) {
2020-08-27 20:39:35 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-12-06 17:56:38 +08:00
if ( ioUtil . IS _WINDOWS ) {
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
2020-08-27 20:39:35 +08:00
try {
2020-12-06 17:56:38 +08:00
if ( yield ioUtil . isDirectory ( inputPath , true ) ) {
yield exec ( ` rd /s /q " ${ inputPath } " ` ) ;
}
else {
yield exec ( ` del /f /a " ${ inputPath } " ` ) ;
}
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
try {
yield ioUtil . unlink ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
2020-08-27 20:39:35 +08:00
}
}
else {
2020-12-06 17:56:38 +08:00
let isDir = false ;
2020-08-27 20:39:35 +08:00
try {
2020-12-06 17:56:38 +08:00
isDir = yield ioUtil . isDirectory ( inputPath ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
return ;
}
if ( isDir ) {
yield exec ( ` rm -rf " ${ inputPath } " ` ) ;
}
else {
yield ioUtil . unlink ( inputPath ) ;
2020-08-27 20:39:35 +08:00
}
}
} ) ;
}
2020-12-06 17:56:38 +08:00
exports . rmRF = rmRF ;
2020-08-27 20:39:35 +08:00
/ * *
2020-12-06 17:56:38 +08:00
* Make a directory . Creates the full path with folders in between
* Will throw if it fails
2020-08-27 20:39:35 +08:00
*
2020-12-06 17:56:38 +08:00
* @ param fsPath path to create
* @ returns Promise < void >
2020-08-27 20:39:35 +08:00
* /
2020-12-06 17:56:38 +08:00
function mkdirP ( fsPath ) {
2020-08-27 20:39:35 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-12-06 17:56:38 +08:00
yield ioUtil . mkdirP ( fsPath ) ;
2020-08-27 20:39:35 +08:00
} ) ;
}
2020-12-06 17:56:38 +08:00
exports . mkdirP = mkdirP ;
2020-08-27 20:39:35 +08:00
/ * *
2020-12-06 17:56:38 +08:00
* Returns path of a tool had the tool actually been invoked . Resolves via paths .
* If you check and the tool does not exist , it will throw .
2020-08-27 20:39:35 +08:00
*
2020-12-06 17:56:38 +08:00
* @ param tool name of the tool
* @ param check whether to check if tool exists
* @ returns Promise < string > path to tool
2020-08-27 20:39:35 +08:00
* /
2020-12-06 17:56:38 +08:00
function which ( tool , check ) {
2020-08-27 20:39:35 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-12-06 17:56:38 +08:00
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
// recursive when check=true
if ( check ) {
const result = yield which ( tool , false ) ;
if ( ! result ) {
if ( ioUtil . IS _WINDOWS ) {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file. ` ) ;
}
else {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable. ` ) ;
}
}
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
try {
// build the list of extensions to try
const extensions = [ ] ;
if ( ioUtil . IS _WINDOWS && process . env . PATHEXT ) {
for ( const extension of process . env . PATHEXT . split ( path . delimiter ) ) {
if ( extension ) {
extensions . push ( extension ) ;
}
}
}
// if it's rooted, return it if exists. otherwise return empty.
if ( ioUtil . isRooted ( tool ) ) {
const filePath = yield ioUtil . tryGetExecutablePath ( tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
return '' ;
}
// if any path separators, return empty
if ( tool . includes ( '/' ) || ( ioUtil . IS _WINDOWS && tool . includes ( '\\' ) ) ) {
return '' ;
}
// build the list of directories
//
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
// it feels like we should not do this. Checking the current directory seems like more of a use
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
// across platforms.
const directories = [ ] ;
if ( process . env . PATH ) {
for ( const p of process . env . PATH . split ( path . delimiter ) ) {
if ( p ) {
directories . push ( p ) ;
}
}
}
// return the first match
for ( const directory of directories ) {
const filePath = yield ioUtil . tryGetExecutablePath ( directory + path . sep + tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
}
return '' ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
catch ( err ) {
throw new Error ( ` which failed with message ${ err . message } ` ) ;
2020-08-27 20:39:35 +08:00
}
} ) ;
}
2020-12-06 17:56:38 +08:00
exports . which = which ;
function readCopyOptions ( options ) {
const force = options . force == null ? true : options . force ;
const recursive = Boolean ( options . recursive ) ;
return { force , recursive } ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
function cpDirRecursive ( sourceDir , destDir , currentDepth , force ) {
2020-08-27 20:39:35 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-12-06 17:56:38 +08:00
// Ensure there is not a run away recursive copy
if ( currentDepth >= 255 )
return ;
currentDepth ++ ;
yield mkdirP ( destDir ) ;
const files = yield ioUtil . readdir ( sourceDir ) ;
for ( const fileName of files ) {
const srcFile = ` ${ sourceDir } / ${ fileName } ` ;
const destFile = ` ${ destDir } / ${ fileName } ` ;
const srcFileStat = yield ioUtil . lstat ( srcFile ) ;
if ( srcFileStat . isDirectory ( ) ) {
// Recurse
yield cpDirRecursive ( srcFile , destFile , currentDepth , force ) ;
}
else {
yield copyFile ( srcFile , destFile , force ) ;
}
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
// Change the mode for the newly created directory
yield ioUtil . chmod ( destDir , ( yield ioUtil . stat ( sourceDir ) ) . mode ) ;
2020-08-27 20:39:35 +08:00
} ) ;
}
2020-12-06 17:56:38 +08:00
// Buffered file copy
function copyFile ( srcFile , destFile , force ) {
2020-08-27 20:39:35 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-12-06 17:56:38 +08:00
if ( ( yield ioUtil . lstat ( srcFile ) ) . isSymbolicLink ( ) ) {
// unlink/re-link it
try {
yield ioUtil . lstat ( destFile ) ;
yield ioUtil . unlink ( destFile ) ;
}
catch ( e ) {
// Try to override file permission
if ( e . code === 'EPERM' ) {
yield ioUtil . chmod ( destFile , '0666' ) ;
yield ioUtil . unlink ( destFile ) ;
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
const symlinkFull = yield ioUtil . readlink ( srcFile ) ;
yield ioUtil . symlink ( symlinkFull , destFile , ioUtil . IS _WINDOWS ? 'junction' : null ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
else if ( ! ( yield ioUtil . exists ( destFile ) ) || force ) {
yield ioUtil . copyFile ( srcFile , destFile ) ;
2020-08-27 20:39:35 +08:00
}
} ) ;
}
2020-12-06 17:56:38 +08:00
//# sourceMappingURL=io.js.map
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 2473 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( module , exports , _ _nccwpck _require _ _ ) {
2020-12-06 17:56:38 +08:00
"use strict" ;
2021-07-15 07:24:13 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-12-06 17:56:38 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
2020-08-27 20:39:35 +08:00
} ) ;
2020-12-06 17:56:38 +08:00
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2021-07-15 07:24:13 +08:00
exports . _readLinuxVersionFile = exports . _getOsVersion = exports . _findMatch = void 0 ;
2022-08-09 17:17:26 +00:00
const semver = _ _importStar ( _ _nccwpck _require _ _ ( 5911 ) ) ;
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
2020-12-06 17:56:38 +08:00
// needs to be require for core node modules to be mocked
/* eslint @typescript-eslint/no-require-imports: 0 */
2022-08-09 17:17:26 +00:00
const os = _ _nccwpck _require _ _ ( 2037 ) ;
const cp = _ _nccwpck _require _ _ ( 2081 ) ;
const fs = _ _nccwpck _require _ _ ( 7147 ) ;
2020-12-06 17:56:38 +08:00
function _findMatch ( versionSpec , stable , candidates , archFilter ) {
2020-08-27 20:39:35 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-12-06 17:56:38 +08:00
const platFilter = os . platform ( ) ;
let result ;
let match ;
let file ;
for ( const candidate of candidates ) {
const version = candidate . version ;
core _1 . debug ( ` check ${ version } satisfies ${ versionSpec } ` ) ;
if ( semver . satisfies ( version , versionSpec ) &&
( ! stable || candidate . stable === stable ) ) {
file = candidate . files . find ( item => {
core _1 . debug ( ` ${ item . arch } === ${ archFilter } && ${ item . platform } === ${ platFilter } ` ) ;
let chk = item . arch === archFilter && item . platform === platFilter ;
if ( chk && item . platform _version ) {
const osVersion = module . exports . _getOsVersion ( ) ;
if ( osVersion === item . platform _version ) {
chk = true ;
}
else {
chk = semver . satisfies ( osVersion , item . platform _version ) ;
}
}
return chk ;
} ) ;
if ( file ) {
core _1 . debug ( ` matched ${ candidate . version } ` ) ;
match = candidate ;
break ;
}
2020-08-27 20:39:35 +08:00
}
}
2020-12-06 17:56:38 +08:00
if ( match && file ) {
// clone since we're mutating the file list to be only the file that matches
result = Object . assign ( { } , match ) ;
result . files = [ file ] ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
return result ;
2020-08-27 20:39:35 +08:00
} ) ;
}
2020-12-06 17:56:38 +08:00
exports . _findMatch = _findMatch ;
function _getOsVersion ( ) {
// TODO: add windows and other linux, arm variants
// right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)
const plat = os . platform ( ) ;
2020-08-27 20:39:35 +08:00
let version = '' ;
2020-12-06 17:56:38 +08:00
if ( plat === 'darwin' ) {
version = cp . execSync ( 'sw_vers -productVersion' ) . toString ( ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
else if ( plat === 'linux' ) {
// lsb_release process not in some containers, readfile
// Run cat /etc/lsb-release
// DISTRIB_ID=Ubuntu
// DISTRIB_RELEASE=18.04
// DISTRIB_CODENAME=bionic
// DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS"
const lsbContents = module . exports . _readLinuxVersionFile ( ) ;
if ( lsbContents ) {
const lines = lsbContents . split ( '\n' ) ;
for ( const line of lines ) {
const parts = line . split ( '=' ) ;
2021-07-16 17:22:55 +08:00
if ( parts . length === 2 &&
( parts [ 0 ] . trim ( ) === 'VERSION_ID' ||
parts [ 0 ] . trim ( ) === 'DISTRIB_RELEASE' ) ) {
version = parts [ 1 ]
. trim ( )
. replace ( /^"/ , '' )
. replace ( /"$/ , '' ) ;
2020-12-06 17:56:38 +08:00
break ;
}
}
}
2020-08-27 20:39:35 +08:00
}
return version ;
}
2020-12-06 17:56:38 +08:00
exports . _getOsVersion = _getOsVersion ;
function _readLinuxVersionFile ( ) {
2021-07-16 17:22:55 +08:00
const lsbReleaseFile = '/etc/lsb-release' ;
const osReleaseFile = '/etc/os-release' ;
2020-12-06 17:56:38 +08:00
let contents = '' ;
2021-07-16 17:22:55 +08:00
if ( fs . existsSync ( lsbReleaseFile ) ) {
contents = fs . readFileSync ( lsbReleaseFile ) . toString ( ) ;
}
else if ( fs . existsSync ( osReleaseFile ) ) {
contents = fs . readFileSync ( osReleaseFile ) . toString ( ) ;
2020-12-06 17:56:38 +08:00
}
return contents ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
exports . _readLinuxVersionFile = _readLinuxVersionFile ;
//# sourceMappingURL=manifest.js.map
2020-08-27 20:39:35 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 8279 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-27 20:39:35 +08:00
2020-12-06 17:56:38 +08:00
"use strict" ;
2020-08-27 20:39:35 +08:00
2021-07-15 07:24:13 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-12-06 17:56:38 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2021-07-15 07:24:13 +08:00
exports . RetryHelper = void 0 ;
2022-08-09 17:17:26 +00:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
2020-12-06 17:56:38 +08:00
/ * *
* Internal class for retries
* /
class RetryHelper {
constructor ( maxAttempts , minSeconds , maxSeconds ) {
if ( maxAttempts < 1 ) {
throw new Error ( 'max attempts should be greater than or equal to 1' ) ;
}
this . maxAttempts = maxAttempts ;
this . minSeconds = Math . floor ( minSeconds ) ;
this . maxSeconds = Math . floor ( maxSeconds ) ;
if ( this . minSeconds > this . maxSeconds ) {
throw new Error ( 'min seconds should be less than or equal to max seconds' ) ;
}
}
execute ( action , isRetryable ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let attempt = 1 ;
while ( attempt < this . maxAttempts ) {
// Try
try {
return yield action ( ) ;
}
catch ( err ) {
if ( isRetryable && ! isRetryable ( err ) ) {
throw err ;
}
core . info ( err . message ) ;
}
// Sleep
const seconds = this . getSleepAmount ( ) ;
core . info ( ` Waiting ${ seconds } seconds before trying again ` ) ;
yield this . sleep ( seconds ) ;
attempt ++ ;
}
// Last attempt
return yield action ( ) ;
} ) ;
}
getSleepAmount ( ) {
return ( Math . floor ( Math . random ( ) * ( this . maxSeconds - this . minSeconds + 1 ) ) +
this . minSeconds ) ;
}
sleep ( seconds ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( resolve => setTimeout ( resolve , seconds * 1000 ) ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
}
2020-12-06 17:56:38 +08:00
exports . RetryHelper = RetryHelper ;
//# sourceMappingURL=retry-helper.js.map
2020-08-27 20:39:35 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 7784 :
2021-07-15 07:24:13 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-08-27 20:39:35 +08:00
2020-12-06 17:56:38 +08:00
"use strict" ;
2020-08-27 20:39:35 +08:00
2021-07-15 07:24:13 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-12-06 17:56:38 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
2020-08-27 20:39:35 +08:00
} ;
2020-12-06 17:56:38 +08:00
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2021-07-15 07:24:13 +08:00
exports . evaluateVersions = exports . isExplicitVersion = exports . findFromManifest = exports . getManifestFromRepo = exports . findAllVersions = exports . find = exports . cacheFile = exports . cacheDir = exports . extractZip = exports . extractXar = exports . extractTar = exports . extract7z = exports . downloadTool = exports . HTTPError = void 0 ;
2022-08-09 17:17:26 +00:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 2940 ) ) ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const mm = _ _importStar ( _ _nccwpck _require _ _ ( 2473 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const httpm = _ _importStar ( _ _nccwpck _require _ _ ( 6255 ) ) ;
const semver = _ _importStar ( _ _nccwpck _require _ _ ( 5911 ) ) ;
const stream = _ _importStar ( _ _nccwpck _require _ _ ( 2781 ) ) ;
const util = _ _importStar ( _ _nccwpck _require _ _ ( 3837 ) ) ;
const assert _1 = _ _nccwpck _require _ _ ( 9491 ) ;
2021-07-15 07:24:13 +08:00
const v4 _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 824 ) ) ;
2022-08-09 17:17:26 +00:00
const exec _1 = _ _nccwpck _require _ _ ( 1514 ) ;
const retry _helper _1 = _ _nccwpck _require _ _ ( 8279 ) ;
2020-12-06 17:56:38 +08:00
class HTTPError extends Error {
constructor ( httpStatusCode ) {
super ( ` Unexpected HTTP response: ${ httpStatusCode } ` ) ;
this . httpStatusCode = httpStatusCode ;
Object . setPrototypeOf ( this , new . target . prototype ) ;
}
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
exports . HTTPError = HTTPError ;
const IS _WINDOWS = process . platform === 'win32' ;
const IS _MAC = process . platform === 'darwin' ;
const userAgent = 'actions/tool-cache' ;
/ * *
* Download a tool from an url and stream it into a file
*
* @ param url url of tool to download
* @ param dest path to download tool
* @ param auth authorization header
2021-07-15 07:24:13 +08:00
* @ param headers other headers
2020-12-06 17:56:38 +08:00
* @ returns path to downloaded tool
* /
2021-07-15 07:24:13 +08:00
function downloadTool ( url , dest , auth , headers ) {
2020-12-06 17:56:38 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
dest = dest || path . join ( _getTempDirectory ( ) , v4 _1 . default ( ) ) ;
yield io . mkdirP ( path . dirname ( dest ) ) ;
core . debug ( ` Downloading ${ url } ` ) ;
core . debug ( ` Destination ${ dest } ` ) ;
const maxAttempts = 3 ;
const minSeconds = _getGlobal ( 'TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS' , 10 ) ;
const maxSeconds = _getGlobal ( 'TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS' , 20 ) ;
const retryHelper = new retry _helper _1 . RetryHelper ( maxAttempts , minSeconds , maxSeconds ) ;
return yield retryHelper . execute ( ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2021-07-15 07:24:13 +08:00
return yield downloadToolAttempt ( url , dest || '' , auth , headers ) ;
2020-12-06 17:56:38 +08:00
} ) , ( err ) => {
if ( err instanceof HTTPError && err . httpStatusCode ) {
// Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
if ( err . httpStatusCode < 500 &&
err . httpStatusCode !== 408 &&
err . httpStatusCode !== 429 ) {
return false ;
}
}
// Otherwise retry
return true ;
} ) ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
exports . downloadTool = downloadTool ;
2021-07-15 07:24:13 +08:00
function downloadToolAttempt ( url , dest , auth , headers ) {
2020-12-06 17:56:38 +08:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( fs . existsSync ( dest ) ) {
throw new Error ( ` Destination file path ${ dest } already exists ` ) ;
}
// Get the response headers
const http = new httpm . HttpClient ( userAgent , [ ] , {
allowRetries : false
} ) ;
if ( auth ) {
core . debug ( 'set auth' ) ;
2021-07-15 07:24:13 +08:00
if ( headers === undefined ) {
headers = { } ;
}
headers . authorization = auth ;
2020-12-06 17:56:38 +08:00
}
const response = yield http . get ( url , headers ) ;
if ( response . message . statusCode !== 200 ) {
const err = new HTTPError ( response . message . statusCode ) ;
core . debug ( ` Failed to download from " ${ url } ". Code( ${ response . message . statusCode } ) Message( ${ response . message . statusMessage } ) ` ) ;
throw err ;
}
// Download the response body
const pipeline = util . promisify ( stream . pipeline ) ;
const responseMessageFactory = _getGlobal ( 'TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY' , ( ) => response . message ) ;
const readStream = responseMessageFactory ( ) ;
let succeeded = false ;
try {
yield pipeline ( readStream , fs . createWriteStream ( dest ) ) ;
core . debug ( 'download complete' ) ;
succeeded = true ;
return dest ;
}
finally {
// Error, delete dest before retry
if ( ! succeeded ) {
core . debug ( 'download failed' ) ;
try {
yield io . rmRF ( dest ) ;
}
catch ( err ) {
core . debug ( ` Failed to delete ' ${ dest } '. ${ err . message } ` ) ;
}
}
}
} ) ;
}
/ * *
* Extract a . 7 z file
*
* @ param file path to the . 7 z file
* @ param dest destination directory . Optional .
* @ param _7zPath path to 7 zr . exe . Optional , for long path support . Most . 7 z archives do not have this
* problem . If your . 7 z archive contains very long paths , you can pass the path to 7 zr . exe which will
* gracefully handle long paths . By default 7 zdec . exe is used because it is a very small program and is
* bundled with the tool lib . However it does not support long paths . 7 zr . exe is the reduced command line
* interface , it is smaller than the full command line interface , and it does support long paths . At the
* time of this writing , it is freely available from the LZMA SDK that is available on the 7 zip website .
* Be sure to check the current license agreement . If 7 zr . exe is bundled with your action , then the path
* to 7 zr . exe can be pass to this function .
* @ returns path to the destination directory
* /
function extract7z ( file , dest , _7zPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( IS _WINDOWS , 'extract7z() not supported on current OS' ) ;
assert _1 . ok ( file , 'parameter "file" is required' ) ;
dest = yield _createExtractFolder ( dest ) ;
const originalCwd = process . cwd ( ) ;
process . chdir ( dest ) ;
if ( _7zPath ) {
try {
const logLevel = core . isDebug ( ) ? '-bb1' : '-bb0' ;
const args = [
'x' ,
logLevel ,
'-bd' ,
'-sccUTF-8' ,
file
] ;
const options = {
silent : true
} ;
yield exec _1 . exec ( ` " ${ _7zPath } " ` , args , options ) ;
}
finally {
process . chdir ( originalCwd ) ;
}
}
else {
const escapedScript = path
. join ( _ _dirname , '..' , 'scripts' , 'Invoke-7zdec.ps1' )
. replace ( /'/g , "''" )
. replace ( /"|\n|\r/g , '' ) ; // double-up single quotes, remove double quotes and newlines
const escapedFile = file . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ;
const escapedTarget = dest . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ;
const command = ` & ' ${ escapedScript } ' -Source ' ${ escapedFile } ' -Target ' ${ escapedTarget } ' ` ;
const args = [
'-NoLogo' ,
'-Sta' ,
'-NoProfile' ,
'-NonInteractive' ,
'-ExecutionPolicy' ,
'Unrestricted' ,
'-Command' ,
command
] ;
const options = {
silent : true
} ;
try {
const powershellPath = yield io . which ( 'powershell' , true ) ;
yield exec _1 . exec ( ` " ${ powershellPath } " ` , args , options ) ;
}
finally {
process . chdir ( originalCwd ) ;
}
}
return dest ;
} ) ;
}
exports . extract7z = extract7z ;
/ * *
* Extract a compressed tar archive
*
* @ param file path to the tar
* @ param dest destination directory . Optional .
* @ param flags flags for the tar command to use for extraction . Defaults to 'xz' ( extracting gzipped tars ) . Optional .
* @ returns path to the destination directory
* /
function extractTar ( file , dest , flags = 'xz' ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! file ) {
throw new Error ( "parameter 'file' is required" ) ;
}
// Create dest
dest = yield _createExtractFolder ( dest ) ;
// Determine whether GNU tar
core . debug ( 'Checking tar --version' ) ;
let versionOutput = '' ;
yield exec _1 . exec ( 'tar --version' , [ ] , {
ignoreReturnCode : true ,
silent : true ,
listeners : {
stdout : ( data ) => ( versionOutput += data . toString ( ) ) ,
stderr : ( data ) => ( versionOutput += data . toString ( ) )
}
} ) ;
core . debug ( versionOutput . trim ( ) ) ;
const isGnuTar = versionOutput . toUpperCase ( ) . includes ( 'GNU TAR' ) ;
// Initialize args
let args ;
if ( flags instanceof Array ) {
args = flags ;
}
else {
args = [ flags ] ;
}
if ( core . isDebug ( ) && ! flags . includes ( 'v' ) ) {
args . push ( '-v' ) ;
}
let destArg = dest ;
let fileArg = file ;
if ( IS _WINDOWS && isGnuTar ) {
args . push ( '--force-local' ) ;
destArg = dest . replace ( /\\/g , '/' ) ;
// Technically only the dest needs to have `/` but for aesthetic consistency
// convert slashes in the file arg too.
fileArg = file . replace ( /\\/g , '/' ) ;
}
if ( isGnuTar ) {
// Suppress warnings when using GNU tar to extract archives created by BSD tar
args . push ( '--warning=no-unknown-keyword' ) ;
2021-07-15 07:24:13 +08:00
args . push ( '--overwrite' ) ;
2020-12-06 17:56:38 +08:00
}
args . push ( '-C' , destArg , '-f' , fileArg ) ;
yield exec _1 . exec ( ` tar ` , args ) ;
return dest ;
} ) ;
}
exports . extractTar = extractTar ;
/ * *
* Extract a xar compatible archive
*
* @ param file path to the archive
* @ param dest destination directory . Optional .
* @ param flags flags for the xar . Optional .
* @ returns path to the destination directory
* /
function extractXar ( file , dest , flags = [ ] ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( IS _MAC , 'extractXar() not supported on current OS' ) ;
assert _1 . ok ( file , 'parameter "file" is required' ) ;
dest = yield _createExtractFolder ( dest ) ;
let args ;
if ( flags instanceof Array ) {
args = flags ;
}
else {
args = [ flags ] ;
}
args . push ( '-x' , '-C' , dest , '-f' , file ) ;
if ( core . isDebug ( ) ) {
args . push ( '-v' ) ;
}
const xarPath = yield io . which ( 'xar' , true ) ;
yield exec _1 . exec ( ` " ${ xarPath } " ` , _unique ( args ) ) ;
return dest ;
} ) ;
}
exports . extractXar = extractXar ;
/ * *
* Extract a zip
*
* @ param file path to the zip
* @ param dest destination directory . Optional .
* @ returns path to the destination directory
* /
function extractZip ( file , dest ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! file ) {
throw new Error ( "parameter 'file' is required" ) ;
}
dest = yield _createExtractFolder ( dest ) ;
if ( IS _WINDOWS ) {
yield extractZipWin ( file , dest ) ;
}
else {
yield extractZipNix ( file , dest ) ;
}
return dest ;
} ) ;
}
exports . extractZip = extractZip ;
function extractZipWin ( file , dest ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// build the powershell command
const escapedFile = file . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ; // double-up single quotes, remove double quotes and newlines
const escapedDest = dest . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ;
2021-07-15 07:24:13 +08:00
const pwshPath = yield io . which ( 'pwsh' , false ) ;
//To match the file overwrite behavior on nix systems, we use the overwrite = true flag for ExtractToDirectory
//and the -Force flag for Expand-Archive as a fallback
if ( pwshPath ) {
//attempt to use pwsh with ExtractToDirectory, if this fails attempt Expand-Archive
const pwshCommand = [
` $ ErrorActionPreference = 'Stop' ; ` ,
` try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ; ` ,
` try { [System.IO.Compression.ZipFile]::ExtractToDirectory(' ${ escapedFile } ', ' ${ escapedDest } ', $ true) } ` ,
` catch { if (( $ _.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ( $ _.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath ' ${ escapedFile } ' -DestinationPath ' ${ escapedDest } ' -Force } else { throw $ _ } } ; `
] . join ( ' ' ) ;
const args = [
'-NoLogo' ,
'-NoProfile' ,
'-NonInteractive' ,
'-ExecutionPolicy' ,
'Unrestricted' ,
'-Command' ,
pwshCommand
] ;
core . debug ( ` Using pwsh at path: ${ pwshPath } ` ) ;
yield exec _1 . exec ( ` " ${ pwshPath } " ` , args ) ;
}
else {
const powershellCommand = [
` $ ErrorActionPreference = 'Stop' ; ` ,
` try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; ` ,
` if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath ' ${ escapedFile } ' -DestinationPath ' ${ escapedDest } ' -Force } ` ,
` else {[System.IO.Compression.ZipFile]::ExtractToDirectory(' ${ escapedFile } ', ' ${ escapedDest } ', $ true) } `
] . join ( ' ' ) ;
const args = [
'-NoLogo' ,
'-Sta' ,
'-NoProfile' ,
'-NonInteractive' ,
'-ExecutionPolicy' ,
'Unrestricted' ,
'-Command' ,
powershellCommand
] ;
const powershellPath = yield io . which ( 'powershell' , true ) ;
core . debug ( ` Using powershell at path: ${ powershellPath } ` ) ;
yield exec _1 . exec ( ` " ${ powershellPath } " ` , args ) ;
}
2020-12-06 17:56:38 +08:00
} ) ;
}
function extractZipNix ( file , dest ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const unzipPath = yield io . which ( 'unzip' , true ) ;
const args = [ file ] ;
if ( ! core . isDebug ( ) ) {
args . unshift ( '-q' ) ;
}
2021-07-15 07:24:13 +08:00
args . unshift ( '-o' ) ; //overwrite with -o, otherwise a prompt is shown which freezes the run
2020-12-06 17:56:38 +08:00
yield exec _1 . exec ( ` " ${ unzipPath } " ` , args , { cwd : dest } ) ;
} ) ;
}
/ * *
* Caches a directory and installs it into the tool cacheDir
*
* @ param sourceDir the directory to cache into tools
* @ param tool tool name
* @ param version version of the tool . semver format
* @ param arch architecture of the tool . Optional . Defaults to machine architecture
* /
function cacheDir ( sourceDir , tool , version , arch ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
version = semver . clean ( version ) || version ;
arch = arch || os . arch ( ) ;
core . debug ( ` Caching tool ${ tool } ${ version } ${ arch } ` ) ;
core . debug ( ` source dir: ${ sourceDir } ` ) ;
if ( ! fs . statSync ( sourceDir ) . isDirectory ( ) ) {
throw new Error ( 'sourceDir is not a directory' ) ;
}
// Create the tool dir
const destPath = yield _createToolPath ( tool , version , arch ) ;
// copy each child item. do not move. move can fail on Windows
// due to anti-virus software having an open handle on a file.
for ( const itemName of fs . readdirSync ( sourceDir ) ) {
const s = path . join ( sourceDir , itemName ) ;
yield io . cp ( s , destPath , { recursive : true } ) ;
}
// write .complete
_completeToolPath ( tool , version , arch ) ;
return destPath ;
} ) ;
}
exports . cacheDir = cacheDir ;
/ * *
* Caches a downloaded file ( GUID ) and installs it
* into the tool cache with a given targetName
*
* @ param sourceFile the file to cache into tools . Typically a result of downloadTool which is a guid .
* @ param targetFile the name of the file name in the tools directory
* @ param tool tool name
* @ param version version of the tool . semver format
* @ param arch architecture of the tool . Optional . Defaults to machine architecture
* /
function cacheFile ( sourceFile , targetFile , tool , version , arch ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
version = semver . clean ( version ) || version ;
arch = arch || os . arch ( ) ;
core . debug ( ` Caching tool ${ tool } ${ version } ${ arch } ` ) ;
core . debug ( ` source file: ${ sourceFile } ` ) ;
if ( ! fs . statSync ( sourceFile ) . isFile ( ) ) {
throw new Error ( 'sourceFile is not a file' ) ;
}
// create the tool dir
const destFolder = yield _createToolPath ( tool , version , arch ) ;
// copy instead of move. move can fail on Windows due to
// anti-virus software having an open handle on a file.
const destPath = path . join ( destFolder , targetFile ) ;
core . debug ( ` destination file ${ destPath } ` ) ;
yield io . cp ( sourceFile , destPath ) ;
// write .complete
_completeToolPath ( tool , version , arch ) ;
return destFolder ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
exports . cacheFile = cacheFile ;
/ * *
* Finds the path to a tool version in the local installed tool cache
*
* @ param toolName name of the tool
* @ param versionSpec version of the tool
* @ param arch optional arch . defaults to arch of computer
* /
function find ( toolName , versionSpec , arch ) {
if ( ! toolName ) {
throw new Error ( 'toolName parameter is required' ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
if ( ! versionSpec ) {
throw new Error ( 'versionSpec parameter is required' ) ;
}
arch = arch || os . arch ( ) ;
// attempt to resolve an explicit version
2021-07-15 07:24:13 +08:00
if ( ! isExplicitVersion ( versionSpec ) ) {
2020-12-06 17:56:38 +08:00
const localVersions = findAllVersions ( toolName , arch ) ;
2021-07-15 07:24:13 +08:00
const match = evaluateVersions ( localVersions , versionSpec ) ;
2020-12-06 17:56:38 +08:00
versionSpec = match ;
}
// check for the explicit version in the cache
let toolPath = '' ;
if ( versionSpec ) {
versionSpec = semver . clean ( versionSpec ) || '' ;
const cachePath = path . join ( _getCacheDirectory ( ) , toolName , versionSpec , arch ) ;
core . debug ( ` checking cache: ${ cachePath } ` ) ;
if ( fs . existsSync ( cachePath ) && fs . existsSync ( ` ${ cachePath } .complete ` ) ) {
core . debug ( ` Found tool in cache ${ toolName } ${ versionSpec } ${ arch } ` ) ;
toolPath = cachePath ;
}
else {
core . debug ( 'not found' ) ;
}
}
return toolPath ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
exports . find = find ;
/ * *
* Finds the paths to all versions of a tool that are installed in the local tool cache
*
* @ param toolName name of the tool
* @ param arch optional arch . defaults to arch of computer
* /
function findAllVersions ( toolName , arch ) {
const versions = [ ] ;
arch = arch || os . arch ( ) ;
const toolPath = path . join ( _getCacheDirectory ( ) , toolName ) ;
if ( fs . existsSync ( toolPath ) ) {
const children = fs . readdirSync ( toolPath ) ;
for ( const child of children ) {
2021-07-15 07:24:13 +08:00
if ( isExplicitVersion ( child ) ) {
2020-12-06 17:56:38 +08:00
const fullPath = path . join ( toolPath , child , arch || '' ) ;
if ( fs . existsSync ( fullPath ) && fs . existsSync ( ` ${ fullPath } .complete ` ) ) {
versions . push ( child ) ;
}
}
}
}
return versions ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
exports . findAllVersions = findAllVersions ;
function getManifestFromRepo ( owner , repo , auth , branch = 'master' ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let releases = [ ] ;
const treeUrl = ` https://api.github.com/repos/ ${ owner } / ${ repo } /git/trees/ ${ branch } ` ;
const http = new httpm . HttpClient ( 'tool-cache' ) ;
const headers = { } ;
if ( auth ) {
core . debug ( 'set auth' ) ;
headers . authorization = auth ;
}
const response = yield http . getJson ( treeUrl , headers ) ;
if ( ! response . result ) {
return releases ;
}
let manifestUrl = '' ;
for ( const item of response . result . tree ) {
if ( item . path === 'versions-manifest.json' ) {
manifestUrl = item . url ;
break ;
}
}
headers [ 'accept' ] = 'application/vnd.github.VERSION.raw' ;
let versionsRaw = yield ( yield http . get ( manifestUrl , headers ) ) . readBody ( ) ;
if ( versionsRaw ) {
// shouldn't be needed but protects against invalid json saved with BOM
versionsRaw = versionsRaw . replace ( /^\uFEFF/ , '' ) ;
try {
releases = JSON . parse ( versionsRaw ) ;
}
catch ( _a ) {
core . debug ( 'Invalid json' ) ;
}
}
return releases ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
exports . getManifestFromRepo = getManifestFromRepo ;
function findFromManifest ( versionSpec , stable , manifest , archFilter = os . arch ( ) ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// wrap the internal impl
const match = yield mm . _findMatch ( versionSpec , stable , manifest , archFilter ) ;
return match ;
} ) ;
}
exports . findFromManifest = findFromManifest ;
function _createExtractFolder ( dest ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! dest ) {
// create a temp dir
dest = path . join ( _getTempDirectory ( ) , v4 _1 . default ( ) ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
yield io . mkdirP ( dest ) ;
return dest ;
} ) ;
}
function _createToolPath ( tool , version , arch ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const folderPath = path . join ( _getCacheDirectory ( ) , tool , semver . clean ( version ) || version , arch || '' ) ;
core . debug ( ` destination ${ folderPath } ` ) ;
const markerPath = ` ${ folderPath } .complete ` ;
yield io . rmRF ( folderPath ) ;
yield io . rmRF ( markerPath ) ;
yield io . mkdirP ( folderPath ) ;
return folderPath ;
} ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
function _completeToolPath ( tool , version , arch ) {
const folderPath = path . join ( _getCacheDirectory ( ) , tool , semver . clean ( version ) || version , arch || '' ) ;
const markerPath = ` ${ folderPath } .complete ` ;
fs . writeFileSync ( markerPath , '' ) ;
core . debug ( 'finished caching tool' ) ;
2020-08-27 20:39:35 +08:00
}
2021-07-15 07:24:13 +08:00
/ * *
* Check if version string is explicit
*
* @ param versionSpec version string to check
* /
function isExplicitVersion ( versionSpec ) {
2020-12-06 17:56:38 +08:00
const c = semver . clean ( versionSpec ) || '' ;
core . debug ( ` isExplicit: ${ c } ` ) ;
const valid = semver . valid ( c ) != null ;
core . debug ( ` explicit? ${ valid } ` ) ;
return valid ;
2020-08-27 20:39:35 +08:00
}
2021-07-15 07:24:13 +08:00
exports . isExplicitVersion = isExplicitVersion ;
/ * *
* Get the highest satisfiying semantic version in ` versions ` which satisfies ` versionSpec `
*
* @ param versions array of versions to evaluate
* @ param versionSpec semantic version spec to satisfy
* /
function evaluateVersions ( versions , versionSpec ) {
2020-12-06 17:56:38 +08:00
let version = '' ;
core . debug ( ` evaluating ${ versions . length } versions ` ) ;
versions = versions . sort ( ( a , b ) => {
if ( semver . gt ( a , b ) ) {
return 1 ;
}
return - 1 ;
} ) ;
for ( let i = versions . length - 1 ; i >= 0 ; i -- ) {
const potential = versions [ i ] ;
const satisfied = semver . satisfies ( potential , versionSpec ) ;
if ( satisfied ) {
version = potential ;
break ;
}
}
if ( version ) {
core . debug ( ` matched: ${ version } ` ) ;
}
else {
core . debug ( 'match not found' ) ;
}
return version ;
2020-08-27 20:39:35 +08:00
}
2021-07-15 07:24:13 +08:00
exports . evaluateVersions = evaluateVersions ;
2020-12-06 17:56:38 +08:00
/ * *
* Gets RUNNER _TOOL _CACHE
* /
function _getCacheDirectory ( ) {
const cacheDirectory = process . env [ 'RUNNER_TOOL_CACHE' ] || '' ;
assert _1 . ok ( cacheDirectory , 'Expected RUNNER_TOOL_CACHE to be defined' ) ;
return cacheDirectory ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
/ * *
* Gets RUNNER _TEMP
* /
function _getTempDirectory ( ) {
const tempDirectory = process . env [ 'RUNNER_TEMP' ] || '' ;
assert _1 . ok ( tempDirectory , 'Expected RUNNER_TEMP to be defined' ) ;
return tempDirectory ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
/ * *
* Gets a global variable
* /
function _getGlobal ( key , defaultValue ) {
/* eslint-disable @typescript-eslint/no-explicit-any */
const value = global [ key ] ;
/* eslint-enable @typescript-eslint/no-explicit-any */
return value !== undefined ? value : defaultValue ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
/ * *
* Returns an array of unique values .
* @ param values Values to make unique .
* /
function _unique ( values ) {
return Array . from ( new Set ( values ) ) ;
}
//# sourceMappingURL=tool-cache.js.map
2020-08-27 20:39:35 +08:00
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2020-08-27 20:39:35 +08:00
2021-07-16 17:22:55 +08:00
/***/ 71 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _a ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getCmdPath = exports . tryGetExecutablePath = exports . isRooted = exports . isDirectory = exports . exists = exports . IS _WINDOWS = exports . unlink = exports . symlink = exports . stat = exports . rmdir = exports . rename = exports . readlink = exports . readdir = exports . mkdir = exports . lstat = exports . copyFile = exports . chmod = void 0 ;
2022-08-09 17:17:26 +00:00
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
2021-07-16 17:22:55 +08:00
_a = fs . promises , exports . chmod = _a . chmod , exports . copyFile = _a . copyFile , exports . lstat = _a . lstat , exports . mkdir = _a . mkdir , exports . readdir = _a . readdir , exports . readlink = _a . readlink , exports . rename = _a . rename , exports . rmdir = _a . rmdir , exports . stat = _a . stat , exports . symlink = _a . symlink , exports . unlink = _a . unlink ;
exports . IS _WINDOWS = process . platform === 'win32' ;
function exists ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield exports . stat ( fsPath ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
return false ;
}
throw err ;
}
return true ;
} ) ;
}
exports . exists = exists ;
function isDirectory ( fsPath , useStat = false ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = useStat ? yield exports . stat ( fsPath ) : yield exports . lstat ( fsPath ) ;
return stats . isDirectory ( ) ;
} ) ;
}
exports . isDirectory = isDirectory ;
/ * *
* On OSX / Linux , true if path starts with '/' . On Windows , true for paths like :
* \ , \ hello , \ \ hello \ share , C : , and C : \ hello ( and corresponding alternate separator cases ) .
* /
function isRooted ( p ) {
p = normalizeSeparators ( p ) ;
if ( ! p ) {
throw new Error ( 'isRooted() parameter "p" cannot be empty' ) ;
}
if ( exports . IS _WINDOWS ) {
return ( p . startsWith ( '\\' ) || /^[A-Z]:/i . test ( p ) // e.g. \ or \hello or \\hello
) ; // e.g. C: or C:\hello
}
return p . startsWith ( '/' ) ;
}
exports . isRooted = isRooted ;
/ * *
* Best effort attempt to determine whether a file exists and is executable .
* @ param filePath file path to check
* @ param extensions additional file extensions to try
* @ return if file exists and is executable , returns the file path . otherwise empty string .
* /
function tryGetExecutablePath ( filePath , extensions ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stats = undefined ;
try {
// test file exists
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// on Windows, test for valid extension
const upperExt = path . extname ( filePath ) . toUpperCase ( ) ;
if ( extensions . some ( validExt => validExt . toUpperCase ( ) === upperExt ) ) {
return filePath ;
}
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
// try each extension
const originalFilePath = filePath ;
for ( const extension of extensions ) {
filePath = originalFilePath + extension ;
stats = undefined ;
try {
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path . dirname ( filePath ) ;
const upperName = path . basename ( filePath ) . toUpperCase ( ) ;
for ( const actualName of yield exports . readdir ( directory ) ) {
if ( upperName === actualName . toUpperCase ( ) ) {
filePath = path . join ( directory , actualName ) ;
break ;
}
}
}
catch ( err ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine the actual case of the file ' ${ filePath } ': ${ err } ` ) ;
}
return filePath ;
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
}
return '' ;
} ) ;
}
exports . tryGetExecutablePath = tryGetExecutablePath ;
function normalizeSeparators ( p ) {
p = p || '' ;
if ( exports . IS _WINDOWS ) {
// convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// remove redundant slashes
return p . replace ( /\\\\+/g , '\\' ) ;
}
// remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
}
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
function isUnixExecutable ( stats ) {
return ( ( stats . mode & 1 ) > 0 ||
( ( stats . mode & 8 ) > 0 && stats . gid === process . getgid ( ) ) ||
( ( stats . mode & 64 ) > 0 && stats . uid === process . getuid ( ) ) ) ;
}
// Get the path of cmd.exe in windows
function getCmdPath ( ) {
var _a ;
return ( _a = process . env [ 'COMSPEC' ] ) !== null && _a !== void 0 ? _a : ` cmd.exe ` ;
}
exports . getCmdPath = getCmdPath ;
//# sourceMappingURL=io-util.js.map
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 2940 :
2021-07-16 17:22:55 +08:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . findInPath = exports . which = exports . mkdirP = exports . rmRF = exports . mv = exports . cp = void 0 ;
2022-08-09 17:17:26 +00:00
const assert _1 = _ _nccwpck _require _ _ ( 9491 ) ;
const childProcess = _ _importStar ( _ _nccwpck _require _ _ ( 2081 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const util _1 = _ _nccwpck _require _ _ ( 3837 ) ;
2021-07-16 17:22:55 +08:00
const ioUtil = _ _importStar ( _ _nccwpck _require _ _ ( 71 ) ) ;
const exec = util _1 . promisify ( childProcess . exec ) ;
const execFile = util _1 . promisify ( childProcess . execFile ) ;
/ * *
* Copies a file or folder .
* Based off of shelljs - https : //github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See CopyOptions .
* /
function cp ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const { force , recursive , copySourceDirectory } = readCopyOptions ( options ) ;
const destStat = ( yield ioUtil . exists ( dest ) ) ? yield ioUtil . stat ( dest ) : null ;
// Dest is an existing file, but not forcing
if ( destStat && destStat . isFile ( ) && ! force ) {
return ;
}
// If dest is an existing directory, should copy inside.
const newDest = destStat && destStat . isDirectory ( ) && copySourceDirectory
? path . join ( dest , path . basename ( source ) )
: dest ;
if ( ! ( yield ioUtil . exists ( source ) ) ) {
throw new Error ( ` no such file or directory: ${ source } ` ) ;
}
const sourceStat = yield ioUtil . stat ( source ) ;
if ( sourceStat . isDirectory ( ) ) {
if ( ! recursive ) {
throw new Error ( ` Failed to copy. ${ source } is a directory, but tried to copy without recursive flag. ` ) ;
}
else {
yield cpDirRecursive ( source , newDest , 0 , force ) ;
}
}
else {
if ( path . relative ( source , newDest ) === '' ) {
// a file cannot be copied to itself
throw new Error ( ` ' ${ newDest } ' and ' ${ source } ' are the same file ` ) ;
}
yield copyFile ( source , newDest , force ) ;
}
} ) ;
}
exports . cp = cp ;
/ * *
* Moves a path .
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See MoveOptions .
* /
function mv ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( yield ioUtil . exists ( dest ) ) {
let destExists = true ;
if ( yield ioUtil . isDirectory ( dest ) ) {
// If dest is directory copy src into dest
dest = path . join ( dest , path . basename ( source ) ) ;
destExists = yield ioUtil . exists ( dest ) ;
}
if ( destExists ) {
if ( options . force == null || options . force ) {
yield rmRF ( dest ) ;
}
else {
throw new Error ( 'Destination already exists' ) ;
}
}
}
yield mkdirP ( path . dirname ( dest ) ) ;
yield ioUtil . rename ( source , dest ) ;
} ) ;
}
exports . mv = mv ;
/ * *
* Remove a path recursively with force
*
* @ param inputPath path to remove
* /
function rmRF ( inputPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ioUtil . IS _WINDOWS ) {
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
// Check for invalid characters
// https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
if ( /[*"<>|]/ . test ( inputPath ) ) {
throw new Error ( 'File path must not contain `*`, `"`, `<`, `>` or `|` on Windows' ) ;
}
try {
const cmdPath = ioUtil . getCmdPath ( ) ;
if ( yield ioUtil . isDirectory ( inputPath , true ) ) {
yield exec ( ` ${ cmdPath } /s /c "rd /s /q "%inputPath%"" ` , {
env : { inputPath }
} ) ;
}
else {
yield exec ( ` ${ cmdPath } /s /c "del /f /a "%inputPath%"" ` , {
env : { inputPath }
} ) ;
}
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
try {
yield ioUtil . unlink ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
}
else {
let isDir = false ;
try {
isDir = yield ioUtil . isDirectory ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
return ;
}
if ( isDir ) {
yield execFile ( ` rm ` , [ ` -rf ` , ` ${ inputPath } ` ] ) ;
}
else {
yield ioUtil . unlink ( inputPath ) ;
}
}
} ) ;
}
exports . rmRF = rmRF ;
/ * *
* Make a directory . Creates the full path with folders in between
* Will throw if it fails
*
* @ param fsPath path to create
* @ returns Promise < void >
* /
function mkdirP ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( fsPath , 'a path argument must be provided' ) ;
yield ioUtil . mkdir ( fsPath , { recursive : true } ) ;
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Returns path of a tool had the tool actually been invoked . Resolves via paths .
* If you check and the tool does not exist , it will throw .
*
* @ param tool name of the tool
* @ param check whether to check if tool exists
* @ returns Promise < string > path to tool
* /
function which ( tool , check ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// recursive when check=true
if ( check ) {
const result = yield which ( tool , false ) ;
if ( ! result ) {
if ( ioUtil . IS _WINDOWS ) {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file. ` ) ;
}
else {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable. ` ) ;
}
}
return result ;
}
const matches = yield findInPath ( tool ) ;
if ( matches && matches . length > 0 ) {
return matches [ 0 ] ;
}
return '' ;
} ) ;
}
exports . which = which ;
/ * *
* Returns a list of all occurrences of the given tool on the system path .
*
* @ returns Promise < string [ ] > the paths of the tool
* /
function findInPath ( tool ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// build the list of extensions to try
const extensions = [ ] ;
if ( ioUtil . IS _WINDOWS && process . env [ 'PATHEXT' ] ) {
for ( const extension of process . env [ 'PATHEXT' ] . split ( path . delimiter ) ) {
if ( extension ) {
extensions . push ( extension ) ;
}
}
}
// if it's rooted, return it if exists. otherwise return empty.
if ( ioUtil . isRooted ( tool ) ) {
const filePath = yield ioUtil . tryGetExecutablePath ( tool , extensions ) ;
if ( filePath ) {
return [ filePath ] ;
}
return [ ] ;
}
// if any path separators, return empty
if ( tool . includes ( path . sep ) ) {
return [ ] ;
}
// build the list of directories
//
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
// it feels like we should not do this. Checking the current directory seems like more of a use
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
// across platforms.
const directories = [ ] ;
if ( process . env . PATH ) {
for ( const p of process . env . PATH . split ( path . delimiter ) ) {
if ( p ) {
directories . push ( p ) ;
}
}
}
// find all matches
const matches = [ ] ;
for ( const directory of directories ) {
const filePath = yield ioUtil . tryGetExecutablePath ( path . join ( directory , tool ) , extensions ) ;
if ( filePath ) {
matches . push ( filePath ) ;
}
}
return matches ;
} ) ;
}
exports . findInPath = findInPath ;
function readCopyOptions ( options ) {
const force = options . force == null ? true : options . force ;
const recursive = Boolean ( options . recursive ) ;
const copySourceDirectory = options . copySourceDirectory == null
? true
: Boolean ( options . copySourceDirectory ) ;
return { force , recursive , copySourceDirectory } ;
}
function cpDirRecursive ( sourceDir , destDir , currentDepth , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Ensure there is not a run away recursive copy
if ( currentDepth >= 255 )
return ;
currentDepth ++ ;
yield mkdirP ( destDir ) ;
const files = yield ioUtil . readdir ( sourceDir ) ;
for ( const fileName of files ) {
const srcFile = ` ${ sourceDir } / ${ fileName } ` ;
const destFile = ` ${ destDir } / ${ fileName } ` ;
const srcFileStat = yield ioUtil . lstat ( srcFile ) ;
if ( srcFileStat . isDirectory ( ) ) {
// Recurse
yield cpDirRecursive ( srcFile , destFile , currentDepth , force ) ;
}
else {
yield copyFile ( srcFile , destFile , force ) ;
}
}
// Change the mode for the newly created directory
yield ioUtil . chmod ( destDir , ( yield ioUtil . stat ( sourceDir ) ) . mode ) ;
} ) ;
}
// Buffered file copy
function copyFile ( srcFile , destFile , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ( yield ioUtil . lstat ( srcFile ) ) . isSymbolicLink ( ) ) {
// unlink/re-link it
try {
yield ioUtil . lstat ( destFile ) ;
yield ioUtil . unlink ( destFile ) ;
}
catch ( e ) {
// Try to override file permission
if ( e . code === 'EPERM' ) {
yield ioUtil . chmod ( destFile , '0666' ) ;
yield ioUtil . unlink ( destFile ) ;
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
const symlinkFull = yield ioUtil . readlink ( srcFile ) ;
yield ioUtil . symlink ( symlinkFull , destFile , ioUtil . IS _WINDOWS ? 'junction' : null ) ;
}
else if ( ! ( yield ioUtil . exists ( destFile ) ) || force ) {
yield ioUtil . copyFile ( srcFile , destFile ) ;
}
} ) ;
}
//# sourceMappingURL=io.js.map
/***/ } ) ,
2022-12-25 13:58:23 +08:00
/***/ 1231 :
2023-06-05 00:17:20 +00:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-08-27 20:39:35 +08:00
2022-12-25 13:58:23 +08:00
"use strict" ;
2020-08-27 20:39:35 +08:00
2023-06-05 00:17:20 +00:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2020-08-27 20:39:35 +08:00
2023-06-05 00:17:20 +00:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
2023-07-27 11:01:06 +00:00
Octokit : ( ) => Octokit ,
customFetch : ( ) => customFetch ,
getProxyAgent : ( ) => getProxyAgent
2023-06-05 00:17:20 +00:00
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
var import _core = _ _nccwpck _require _ _ ( 8134 ) ;
var import _auth _action = _ _nccwpck _require _ _ ( 20 ) ;
var import _plugin _paginate _rest = _ _nccwpck _require _ _ ( 9331 ) ;
var import _plugin _rest _endpoint _methods = _ _nccwpck _require _ _ ( 8528 ) ;
2020-08-27 20:39:35 +08:00
2023-06-05 00:17:20 +00:00
// pkg/dist-src/version.js
2023-07-27 11:01:06 +00:00
var VERSION = "6.0.5" ;
2020-08-27 20:39:35 +08:00
2023-06-05 00:17:20 +00:00
// pkg/dist-src/index.js
2023-07-27 11:01:06 +00:00
var import _undici = _ _nccwpck _require _ _ ( 1773 ) ;
2023-06-05 00:17:20 +00:00
var DEFAULTS = {
authStrategy : import _auth _action . createActionAuth ,
2022-12-25 13:58:23 +08:00
baseUrl : getApiBaseUrl ( ) ,
userAgent : ` octokit-action.js/ ${ VERSION } `
} ;
function getProxyAgent ( ) {
const httpProxy = process . env [ "HTTP_PROXY" ] || process . env [ "http_proxy" ] ;
if ( httpProxy ) {
2023-07-27 11:01:06 +00:00
return new import _undici . ProxyAgent ( httpProxy ) ;
2022-12-25 13:58:23 +08:00
}
const httpsProxy = process . env [ "HTTPS_PROXY" ] || process . env [ "https_proxy" ] ;
if ( httpsProxy ) {
2023-07-27 11:01:06 +00:00
return new import _undici . ProxyAgent ( httpsProxy ) ;
2022-12-25 13:58:23 +08:00
}
2023-06-05 00:17:20 +00:00
return void 0 ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
var customFetch = async function ( url , opts ) {
return await ( 0 , import _undici . fetch ) ( url , {
dispatcher : getProxyAgent ( ) ,
... opts
} ) ;
} ;
2023-06-05 00:17:20 +00:00
var Octokit = import _core . Octokit . plugin (
import _plugin _paginate _rest . paginateRest ,
import _plugin _rest _endpoint _methods . legacyRestEndpointMethods
) . defaults ( function buildDefaults ( options ) {
2023-01-20 23:52:29 +00:00
return {
... DEFAULTS ,
2022-12-25 13:58:23 +08:00
... options ,
request : {
2023-07-27 11:01:06 +00:00
fetch : customFetch ,
2022-12-25 13:58:23 +08:00
... options . request
}
} ;
} ) ;
function getApiBaseUrl ( ) {
return process . env [ "GITHUB_API_URL" ] || "https://api.github.com" ;
}
2023-06-05 00:17:20 +00:00
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2020-12-06 17:56:38 +08:00
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2020-12-06 17:56:38 +08:00
2022-12-25 13:58:23 +08:00
/***/ 1793 :
2023-06-07 23:17:45 +00:00
/***/ ( ( module ) => {
2020-12-06 17:56:38 +08:00
2022-12-25 13:58:23 +08:00
"use strict" ;
2020-12-06 17:56:38 +08:00
2023-06-07 23:17:45 +00:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2020-12-06 17:56:38 +08:00
2023-06-07 23:17:45 +00:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
createTokenAuth : ( ) => createTokenAuth
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
2020-12-06 17:56:38 +08:00
2023-06-07 23:17:45 +00:00
// pkg/dist-src/auth.js
var REGEX _IS _INSTALLATION _LEGACY = /^v1\./ ;
var REGEX _IS _INSTALLATION = /^ghs_/ ;
var REGEX _IS _USER _TO _SERVER = /^ghu_/ ;
2022-12-25 13:58:23 +08:00
async function auth ( token ) {
const isApp = token . split ( /\./ ) . length === 3 ;
const isInstallation = REGEX _IS _INSTALLATION _LEGACY . test ( token ) || REGEX _IS _INSTALLATION . test ( token ) ;
const isUserToServer = REGEX _IS _USER _TO _SERVER . test ( token ) ;
const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth" ;
return {
type : "token" ,
2023-06-07 23:17:45 +00:00
token ,
2022-12-25 13:58:23 +08:00
tokenType
} ;
}
2020-12-06 17:56:38 +08:00
2023-06-07 23:17:45 +00:00
// pkg/dist-src/with-authorization-prefix.js
2022-12-25 13:58:23 +08:00
function withAuthorizationPrefix ( token ) {
if ( token . split ( /\./ ) . length === 3 ) {
return ` bearer ${ token } ` ;
}
return ` token ${ token } ` ;
}
2020-12-06 17:56:38 +08:00
2023-06-07 23:17:45 +00:00
// pkg/dist-src/hook.js
2022-12-25 13:58:23 +08:00
async function hook ( token , request , route , parameters ) {
2023-06-07 23:17:45 +00:00
const endpoint = request . endpoint . merge (
route ,
parameters
) ;
2022-12-25 13:58:23 +08:00
endpoint . headers . authorization = withAuthorizationPrefix ( token ) ;
return request ( endpoint ) ;
}
2020-12-06 17:56:38 +08:00
2023-06-07 23:17:45 +00:00
// pkg/dist-src/index.js
var createTokenAuth = function createTokenAuth2 ( token ) {
2022-12-25 13:58:23 +08:00
if ( ! token ) {
throw new Error ( "[@octokit/auth-token] No token passed to createTokenAuth" ) ;
}
if ( typeof token !== "string" ) {
2023-06-07 23:17:45 +00:00
throw new Error (
"[@octokit/auth-token] Token passed to createTokenAuth is not a string"
) ;
2022-12-25 13:58:23 +08:00
}
token = token . replace ( /^(token|bearer) +/i , "" ) ;
return Object . assign ( auth . bind ( null , token ) , {
hook : hook . bind ( null , token )
} ) ;
} ;
2023-06-07 23:17:45 +00:00
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2020-12-06 17:56:38 +08:00
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2020-12-06 17:56:38 +08:00
2022-12-25 13:58:23 +08:00
/***/ 8134 :
2023-05-25 21:58:02 +00:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-12-06 17:56:38 +08:00
2022-12-25 13:58:23 +08:00
"use strict" ;
2020-12-06 17:56:38 +08:00
2023-05-25 21:58:02 +00:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2020-12-06 17:56:38 +08:00
2023-05-25 21:58:02 +00:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
Octokit : ( ) => Octokit
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
var import _universal _user _agent = _ _nccwpck _require _ _ ( 5030 ) ;
var import _before _after _hook = _ _nccwpck _require _ _ ( 3682 ) ;
var import _request = _ _nccwpck _require _ _ ( 6094 ) ;
var import _graphql = _ _nccwpck _require _ _ ( 3526 ) ;
var import _auth _token = _ _nccwpck _require _ _ ( 1793 ) ;
// pkg/dist-src/version.js
2023-07-27 11:01:06 +00:00
var VERSION = "5.0.0" ;
2023-05-25 21:58:02 +00:00
// pkg/dist-src/index.js
var Octokit = class {
2023-07-27 11:01:06 +00:00
static {
this . VERSION = VERSION ;
}
2023-05-25 21:58:02 +00:00
static defaults ( defaults ) {
const OctokitWithDefaults = class extends this {
constructor ( ... args ) {
const options = args [ 0 ] || { } ;
if ( typeof defaults === "function" ) {
super ( defaults ( options ) ) ;
return ;
}
super (
Object . assign (
{ } ,
defaults ,
options ,
options . userAgent && defaults . userAgent ? {
userAgent : ` ${ options . userAgent } ${ defaults . userAgent } `
} : null
)
) ;
}
} ;
return OctokitWithDefaults ;
}
2023-07-27 11:01:06 +00:00
static {
this . plugins = [ ] ;
}
2023-05-25 21:58:02 +00:00
/ * *
* Attach a plugin ( or many ) to your Octokit instance .
*
* @ example
* const API = Octokit . plugin ( plugin1 , plugin2 , plugin3 , ... )
* /
static plugin ( ... newPlugins ) {
const currentPlugins = this . plugins ;
2023-07-27 11:01:06 +00:00
const NewOctokit = class extends this {
static {
this . plugins = currentPlugins . concat (
newPlugins . filter ( ( plugin ) => ! currentPlugins . includes ( plugin ) )
) ;
}
} ;
2023-05-25 21:58:02 +00:00
return NewOctokit ;
}
2022-12-25 13:58:23 +08:00
constructor ( options = { } ) {
2023-05-25 21:58:02 +00:00
const hook = new import _before _after _hook . Collection ( ) ;
2022-12-25 13:58:23 +08:00
const requestDefaults = {
2023-05-25 21:58:02 +00:00
baseUrl : import _request . request . endpoint . DEFAULTS . baseUrl ,
2022-12-25 13:58:23 +08:00
headers : { } ,
request : Object . assign ( { } , options . request , {
// @ts-ignore internal usage only, no need to type
hook : hook . bind ( null , "request" )
} ) ,
mediaType : {
previews : [ ] ,
format : ""
}
2023-05-25 21:58:02 +00:00
} ;
requestDefaults . headers [ "user-agent" ] = [
options . userAgent ,
` octokit-core.js/ ${ VERSION } ${ ( 0 , import _universal _user _agent . getUserAgent ) ( ) } `
] . filter ( Boolean ) . join ( " " ) ;
2022-12-25 13:58:23 +08:00
if ( options . baseUrl ) {
requestDefaults . baseUrl = options . baseUrl ;
}
if ( options . previews ) {
requestDefaults . mediaType . previews = options . previews ;
}
if ( options . timeZone ) {
requestDefaults . headers [ "time-zone" ] = options . timeZone ;
}
2023-05-25 21:58:02 +00:00
this . request = import _request . request . defaults ( requestDefaults ) ;
this . graphql = ( 0 , import _graphql . withCustomRequest ) ( this . request ) . defaults ( requestDefaults ) ;
this . log = Object . assign (
{
debug : ( ) => {
} ,
info : ( ) => {
} ,
warn : console . warn . bind ( console ) ,
error : console . error . bind ( console )
} ,
options . log
) ;
this . hook = hook ;
2022-12-25 13:58:23 +08:00
if ( ! options . authStrategy ) {
if ( ! options . auth ) {
this . auth = async ( ) => ( {
type : "unauthenticated"
} ) ;
} else {
2023-05-25 21:58:02 +00:00
const auth = ( 0 , import _auth _token . createTokenAuth ) ( options . auth ) ;
2022-12-25 13:58:23 +08:00
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
}
} else {
2023-05-25 21:58:02 +00:00
const { authStrategy , ... otherOptions } = options ;
const auth = authStrategy (
Object . assign (
{
request : this . request ,
log : this . log ,
// we pass the current octokit instance as well as its constructor options
// to allow for authentication strategies that return a new octokit instance
// that shares the same internal state as the current one. The original
// requirement for this was the "event-octokit" authentication strategy
// of https://github.com/probot/octokit-auth-probot.
octokit : this ,
octokitOptions : otherOptions
} ,
options . auth
)
) ;
2022-12-25 13:58:23 +08:00
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
2023-05-25 21:58:02 +00:00
}
2022-12-25 13:58:23 +08:00
const classConstructor = this . constructor ;
2023-05-25 21:58:02 +00:00
classConstructor . plugins . forEach ( ( plugin ) => {
2022-12-25 13:58:23 +08:00
Object . assign ( this , plugin ( this , options ) ) ;
} ) ;
}
2023-05-25 21:58:02 +00:00
} ;
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
/***/ 3348 :
2023-06-18 08:35:17 +00:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
"use strict" ;
2023-06-18 08:35:17 +00:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2022-12-25 13:58:23 +08:00
2023-06-18 08:35:17 +00:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
endpoint : ( ) => endpoint
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// pkg/dist-src/defaults.js
var import _universal _user _agent = _ _nccwpck _require _ _ ( 5030 ) ;
// pkg/dist-src/version.js
var VERSION = "9.0.0" ;
// pkg/dist-src/defaults.js
var userAgent = ` octokit-endpoint.js/ ${ VERSION } ${ ( 0 , import _universal _user _agent . getUserAgent ) ( ) } ` ;
var DEFAULTS = {
method : "GET" ,
baseUrl : "https://api.github.com" ,
headers : {
accept : "application/vnd.github.v3+json" ,
"user-agent" : userAgent
} ,
mediaType : {
format : ""
}
} ;
2023-06-18 08:35:17 +00:00
// pkg/dist-src/util/lowercase-keys.js
2022-12-25 13:58:23 +08:00
function lowercaseKeys ( object ) {
if ( ! object ) {
return { } ;
}
return Object . keys ( object ) . reduce ( ( newObj , key ) => {
newObj [ key . toLowerCase ( ) ] = object [ key ] ;
return newObj ;
} , { } ) ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/util/merge-deep.js
var import _is _plain _object = _ _nccwpck _require _ _ ( 3287 ) ;
2022-12-25 13:58:23 +08:00
function mergeDeep ( defaults , options ) {
const result = Object . assign ( { } , defaults ) ;
2023-06-18 08:35:17 +00:00
Object . keys ( options ) . forEach ( ( key ) => {
if ( ( 0 , import _is _plain _object . isPlainObject ) ( options [ key ] ) ) {
if ( ! ( key in defaults ) )
Object . assign ( result , { [ key ] : options [ key ] } ) ;
else
result [ key ] = mergeDeep ( defaults [ key ] , options [ key ] ) ;
2022-12-25 13:58:23 +08:00
} else {
2023-06-18 08:35:17 +00:00
Object . assign ( result , { [ key ] : options [ key ] } ) ;
2022-12-25 13:58:23 +08:00
}
} ) ;
return result ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/util/remove-undefined-properties.js
2022-12-25 13:58:23 +08:00
function removeUndefinedProperties ( obj ) {
for ( const key in obj ) {
2023-06-18 08:35:17 +00:00
if ( obj [ key ] === void 0 ) {
2022-12-25 13:58:23 +08:00
delete obj [ key ] ;
}
}
return obj ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/merge.js
2022-12-25 13:58:23 +08:00
function merge ( defaults , route , options ) {
if ( typeof route === "string" ) {
let [ method , url ] = route . split ( " " ) ;
2023-06-18 08:35:17 +00:00
options = Object . assign ( url ? { method , url } : { url : method } , options ) ;
2022-12-25 13:58:23 +08:00
} else {
options = Object . assign ( { } , route ) ;
2023-01-20 23:52:29 +00:00
}
options . headers = lowercaseKeys ( options . headers ) ;
2022-12-25 13:58:23 +08:00
removeUndefinedProperties ( options ) ;
removeUndefinedProperties ( options . headers ) ;
2023-01-20 23:52:29 +00:00
const mergedOptions = mergeDeep ( defaults || { } , options ) ;
2023-07-27 11:01:06 +00:00
if ( options . url === "/graphql" ) {
if ( defaults && defaults . mediaType . previews ? . length ) {
mergedOptions . mediaType . previews = defaults . mediaType . previews . filter (
( preview ) => ! mergedOptions . mediaType . previews . includes ( preview )
) . concat ( mergedOptions . mediaType . previews ) ;
}
mergedOptions . mediaType . previews = ( mergedOptions . mediaType . previews || [ ] ) . map ( ( preview ) => preview . replace ( /-preview/ , "" ) ) ;
2022-12-25 13:58:23 +08:00
}
return mergedOptions ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/util/add-query-parameters.js
2022-12-25 13:58:23 +08:00
function addQueryParameters ( url , parameters ) {
const separator = /\?/ . test ( url ) ? "&" : "?" ;
const names = Object . keys ( parameters ) ;
if ( names . length === 0 ) {
return url ;
}
2023-06-18 08:35:17 +00:00
return url + separator + names . map ( ( name ) => {
2022-12-25 13:58:23 +08:00
if ( name === "q" ) {
return "q=" + parameters . q . split ( "+" ) . map ( encodeURIComponent ) . join ( "+" ) ;
}
return ` ${ name } = ${ encodeURIComponent ( parameters [ name ] ) } ` ;
} ) . join ( "&" ) ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/util/extract-url-variable-names.js
var urlVariableRegex = /\{[^}]+\}/g ;
2022-12-25 13:58:23 +08:00
function removeNonChars ( variableName ) {
return variableName . replace ( /^\W+|\W+$/g , "" ) . split ( /,/ ) ;
}
function extractUrlVariableNames ( url ) {
const matches = url . match ( urlVariableRegex ) ;
if ( ! matches ) {
return [ ] ;
}
return matches . map ( removeNonChars ) . reduce ( ( a , b ) => a . concat ( b ) , [ ] ) ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/util/omit.js
2022-12-25 13:58:23 +08:00
function omit ( object , keysToOmit ) {
2023-06-18 08:35:17 +00:00
return Object . keys ( object ) . filter ( ( option ) => ! keysToOmit . includes ( option ) ) . reduce ( ( obj , key ) => {
2022-12-25 13:58:23 +08:00
obj [ key ] = object [ key ] ;
return obj ;
} , { } ) ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/util/url-template.js
2022-12-25 13:58:23 +08:00
function encodeReserved ( str ) {
2023-06-18 08:35:17 +00:00
return str . split ( /(%[0-9A-Fa-f]{2})/g ) . map ( function ( part ) {
2022-12-25 13:58:23 +08:00
if ( ! /%[0-9A-Fa-f]/ . test ( part ) ) {
part = encodeURI ( part ) . replace ( /%5B/g , "[" ) . replace ( /%5D/g , "]" ) ;
}
return part ;
} ) . join ( "" ) ;
}
function encodeUnreserved ( str ) {
2023-06-18 08:35:17 +00:00
return encodeURIComponent ( str ) . replace ( /[!'()*]/g , function ( c ) {
2022-12-25 13:58:23 +08:00
return "%" + c . charCodeAt ( 0 ) . toString ( 16 ) . toUpperCase ( ) ;
} ) ;
}
function encodeValue ( operator , value , key ) {
value = operator === "+" || operator === "#" ? encodeReserved ( value ) : encodeUnreserved ( value ) ;
if ( key ) {
return encodeUnreserved ( key ) + "=" + value ;
} else {
return value ;
}
}
function isDefined ( value ) {
2023-06-18 08:35:17 +00:00
return value !== void 0 && value !== null ;
2022-12-25 13:58:23 +08:00
}
function isKeyOperator ( operator ) {
return operator === ";" || operator === "&" || operator === "?" ;
}
function getValues ( context , operator , key , modifier ) {
2023-06-18 08:35:17 +00:00
var value = context [ key ] , result = [ ] ;
2022-12-25 13:58:23 +08:00
if ( isDefined ( value ) && value !== "" ) {
if ( typeof value === "string" || typeof value === "number" || typeof value === "boolean" ) {
value = value . toString ( ) ;
if ( modifier && modifier !== "*" ) {
value = value . substring ( 0 , parseInt ( modifier , 10 ) ) ;
}
2023-06-18 08:35:17 +00:00
result . push (
encodeValue ( operator , value , isKeyOperator ( operator ) ? key : "" )
) ;
2022-12-25 13:58:23 +08:00
} else {
if ( modifier === "*" ) {
if ( Array . isArray ( value ) ) {
2023-06-18 08:35:17 +00:00
value . filter ( isDefined ) . forEach ( function ( value2 ) {
result . push (
encodeValue ( operator , value2 , isKeyOperator ( operator ) ? key : "" )
) ;
2022-12-25 13:58:23 +08:00
} ) ;
} else {
2023-06-18 08:35:17 +00:00
Object . keys ( value ) . forEach ( function ( k ) {
2022-12-25 13:58:23 +08:00
if ( isDefined ( value [ k ] ) ) {
result . push ( encodeValue ( operator , value [ k ] , k ) ) ;
}
} ) ;
}
} else {
const tmp = [ ] ;
if ( Array . isArray ( value ) ) {
2023-06-18 08:35:17 +00:00
value . filter ( isDefined ) . forEach ( function ( value2 ) {
tmp . push ( encodeValue ( operator , value2 ) ) ;
2022-12-25 13:58:23 +08:00
} ) ;
} else {
2023-06-18 08:35:17 +00:00
Object . keys ( value ) . forEach ( function ( k ) {
2022-12-25 13:58:23 +08:00
if ( isDefined ( value [ k ] ) ) {
tmp . push ( encodeUnreserved ( k ) ) ;
tmp . push ( encodeValue ( operator , value [ k ] . toString ( ) ) ) ;
}
} ) ;
}
if ( isKeyOperator ( operator ) ) {
result . push ( encodeUnreserved ( key ) + "=" + tmp . join ( "," ) ) ;
} else if ( tmp . length !== 0 ) {
result . push ( tmp . join ( "," ) ) ;
}
}
}
} else {
if ( operator === ";" ) {
if ( isDefined ( value ) ) {
result . push ( encodeUnreserved ( key ) ) ;
}
} else if ( value === "" && ( operator === "&" || operator === "?" ) ) {
result . push ( encodeUnreserved ( key ) + "=" ) ;
} else if ( value === "" ) {
result . push ( "" ) ;
}
}
return result ;
}
function parseUrl ( template ) {
return {
expand : expand . bind ( null , template )
} ;
}
function expand ( template , context ) {
var operators = [ "+" , "#" , "." , "/" , ";" , "?" , "&" ] ;
2023-06-18 08:35:17 +00:00
return template . replace (
/\{([^\{\}]+)\}|([^\{\}]+)/g ,
function ( _ , expression , literal ) {
if ( expression ) {
let operator = "" ;
const values = [ ] ;
if ( operators . indexOf ( expression . charAt ( 0 ) ) !== - 1 ) {
operator = expression . charAt ( 0 ) ;
expression = expression . substr ( 1 ) ;
}
expression . split ( /,/g ) . forEach ( function ( variable ) {
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/ . exec ( variable ) ;
values . push ( getValues ( context , operator , tmp [ 1 ] , tmp [ 2 ] || tmp [ 3 ] ) ) ;
} ) ;
if ( operator && operator !== "+" ) {
var separator = "," ;
if ( operator === "?" ) {
separator = "&" ;
} else if ( operator !== "#" ) {
separator = operator ;
}
return ( values . length !== 0 ? operator : "" ) + values . join ( separator ) ;
} else {
return values . join ( "," ) ;
}
2022-12-25 13:58:23 +08:00
} else {
2023-06-18 08:35:17 +00:00
return encodeReserved ( literal ) ;
2022-12-25 13:58:23 +08:00
}
}
2023-06-18 08:35:17 +00:00
) ;
2022-12-25 13:58:23 +08:00
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/parse.js
2022-12-25 13:58:23 +08:00
function parse ( options ) {
2023-01-20 23:52:29 +00:00
let method = options . method . toUpperCase ( ) ;
2022-12-25 13:58:23 +08:00
let url = ( options . url || "/" ) . replace ( /:([a-z]\w+)/g , "{$1}" ) ;
let headers = Object . assign ( { } , options . headers ) ;
let body ;
2023-06-18 08:35:17 +00:00
let parameters = omit ( options , [
"method" ,
"baseUrl" ,
"url" ,
"headers" ,
"request" ,
"mediaType"
] ) ;
2022-12-25 13:58:23 +08:00
const urlVariableNames = extractUrlVariableNames ( url ) ;
url = parseUrl ( url ) . expand ( parameters ) ;
if ( ! /^http/ . test ( url ) ) {
url = options . baseUrl + url ;
}
2023-06-18 08:35:17 +00:00
const omittedParameters = Object . keys ( options ) . filter ( ( option ) => urlVariableNames . includes ( option ) ) . concat ( "baseUrl" ) ;
2022-12-25 13:58:23 +08:00
const remainingParameters = omit ( parameters , omittedParameters ) ;
const isBinaryRequest = /application\/octet-stream/i . test ( headers . accept ) ;
if ( ! isBinaryRequest ) {
if ( options . mediaType . format ) {
2023-06-18 08:35:17 +00:00
headers . accept = headers . accept . split ( /,/ ) . map (
2023-07-27 11:01:06 +00:00
( format ) => format . replace (
2023-06-18 08:35:17 +00:00
/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/ ,
` application/vnd $ 1 $ 2. ${ options . mediaType . format } `
)
) . join ( "," ) ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
if ( url . endsWith ( "/graphql" ) ) {
if ( options . mediaType . previews ? . length ) {
const previewsFromAcceptHeader = headers . accept . match ( /[\w-]+(?=-preview)/g ) || [ ] ;
headers . accept = previewsFromAcceptHeader . concat ( options . mediaType . previews ) . map ( ( preview ) => {
const format = options . mediaType . format ? ` . ${ options . mediaType . format } ` : "+json" ;
return ` application/vnd.github. ${ preview } -preview ${ format } ` ;
} ) . join ( "," ) ;
}
2022-12-25 13:58:23 +08:00
}
2023-01-20 23:52:29 +00:00
}
2022-12-25 13:58:23 +08:00
if ( [ "GET" , "HEAD" ] . includes ( method ) ) {
url = addQueryParameters ( url , remainingParameters ) ;
} else {
if ( "data" in remainingParameters ) {
body = remainingParameters . data ;
} else {
if ( Object . keys ( remainingParameters ) . length ) {
body = remainingParameters ;
}
}
2023-01-20 23:52:29 +00:00
}
2022-12-25 13:58:23 +08:00
if ( ! headers [ "content-type" ] && typeof body !== "undefined" ) {
headers [ "content-type" ] = "application/json; charset=utf-8" ;
2023-01-20 23:52:29 +00:00
}
2022-12-25 13:58:23 +08:00
if ( [ "PATCH" , "PUT" ] . includes ( method ) && typeof body === "undefined" ) {
body = "" ;
2023-01-20 23:52:29 +00:00
}
2023-06-18 08:35:17 +00:00
return Object . assign (
{ method , url , headers } ,
typeof body !== "undefined" ? { body } : null ,
options . request ? { request : options . request } : null
) ;
2022-12-25 13:58:23 +08:00
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/endpoint-with-defaults.js
2022-12-25 13:58:23 +08:00
function endpointWithDefaults ( defaults , route , options ) {
return parse ( merge ( defaults , route , options ) ) ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/with-defaults.js
2022-12-25 13:58:23 +08:00
function withDefaults ( oldDefaults , newDefaults ) {
2023-06-18 08:35:17 +00:00
const DEFAULTS2 = merge ( oldDefaults , newDefaults ) ;
const endpoint2 = endpointWithDefaults . bind ( null , DEFAULTS2 ) ;
return Object . assign ( endpoint2 , {
DEFAULTS : DEFAULTS2 ,
defaults : withDefaults . bind ( null , DEFAULTS2 ) ,
merge : merge . bind ( null , DEFAULTS2 ) ,
2022-12-25 13:58:23 +08:00
parse
} ) ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/index.js
var endpoint = withDefaults ( null , DEFAULTS ) ;
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
/***/ 3526 :
2023-05-25 21:58:02 +00:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
"use strict" ;
2023-05-25 21:58:02 +00:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
GraphqlResponseError : ( ) => GraphqlResponseError ,
graphql : ( ) => graphql2 ,
withCustomRequest : ( ) => withCustomRequest
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
2023-07-27 11:01:06 +00:00
var import _request3 = _ _nccwpck _require _ _ ( 6094 ) ;
2023-05-25 21:58:02 +00:00
var import _universal _user _agent = _ _nccwpck _require _ _ ( 5030 ) ;
2022-12-25 13:58:23 +08:00
2023-05-25 21:58:02 +00:00
// pkg/dist-src/version.js
2023-07-27 11:01:06 +00:00
var VERSION = "7.0.1" ;
// pkg/dist-src/with-defaults.js
var import _request2 = _ _nccwpck _require _ _ ( 6094 ) ;
// pkg/dist-src/graphql.js
var import _request = _ _nccwpck _require _ _ ( 6094 ) ;
2022-12-25 13:58:23 +08:00
2023-05-25 21:58:02 +00:00
// pkg/dist-src/error.js
2022-12-25 13:58:23 +08:00
function _buildMessageForResponseErrors ( data ) {
2023-05-25 21:58:02 +00:00
return ` Request failed due to following response errors:
` + data.errors.map((e) => ` - $ { e . message } ` ).join(" \n ");
2022-12-25 13:58:23 +08:00
}
2023-05-25 21:58:02 +00:00
var GraphqlResponseError = class extends Error {
constructor ( request2 , headers , response ) {
2022-12-25 13:58:23 +08:00
super ( _buildMessageForResponseErrors ( response ) ) ;
2023-05-25 21:58:02 +00:00
this . request = request2 ;
2022-12-25 13:58:23 +08:00
this . headers = headers ;
this . response = response ;
this . name = "GraphqlResponseError" ;
this . errors = response . errors ;
this . data = response . data ;
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
}
2023-05-25 21:58:02 +00:00
} ;
2022-12-25 13:58:23 +08:00
2023-05-25 21:58:02 +00:00
// pkg/dist-src/graphql.js
var NON _VARIABLE _OPTIONS = [
"method" ,
"baseUrl" ,
"url" ,
"headers" ,
"request" ,
"query" ,
"mediaType"
] ;
var FORBIDDEN _VARIABLE _OPTIONS = [ "query" , "method" , "url" ] ;
var GHES _V3 _SUFFIX _REGEX = /\/api\/v3\/?$/ ;
function graphql ( request2 , query , options ) {
2022-12-25 13:58:23 +08:00
if ( options ) {
if ( typeof query === "string" && "query" in options ) {
2023-05-25 21:58:02 +00:00
return Promise . reject (
new Error ( ` [@octokit/graphql] "query" cannot be used as variable name ` )
) ;
2022-12-25 13:58:23 +08:00
}
for ( const key in options ) {
2023-05-25 21:58:02 +00:00
if ( ! FORBIDDEN _VARIABLE _OPTIONS . includes ( key ) )
continue ;
return Promise . reject (
2023-07-27 11:01:06 +00:00
new Error (
` [@octokit/graphql] " ${ key } " cannot be used as variable name `
)
2023-05-25 21:58:02 +00:00
) ;
2022-12-25 13:58:23 +08:00
}
}
2023-05-25 21:58:02 +00:00
const parsedOptions = typeof query === "string" ? Object . assign ( { query } , options ) : query ;
const requestOptions = Object . keys (
parsedOptions
) . reduce ( ( result , key ) => {
2022-12-25 13:58:23 +08:00
if ( NON _VARIABLE _OPTIONS . includes ( key ) ) {
result [ key ] = parsedOptions [ key ] ;
return result ;
}
if ( ! result . variables ) {
result . variables = { } ;
}
result . variables [ key ] = parsedOptions [ key ] ;
return result ;
} , { } ) ;
2023-05-25 21:58:02 +00:00
const baseUrl = parsedOptions . baseUrl || request2 . endpoint . DEFAULTS . baseUrl ;
2022-12-25 13:58:23 +08:00
if ( GHES _V3 _SUFFIX _REGEX . test ( baseUrl ) ) {
requestOptions . url = baseUrl . replace ( GHES _V3 _SUFFIX _REGEX , "/api/graphql" ) ;
}
2023-05-25 21:58:02 +00:00
return request2 ( requestOptions ) . then ( ( response ) => {
2022-12-25 13:58:23 +08:00
if ( response . data . errors ) {
const headers = { } ;
for ( const key of Object . keys ( response . headers ) ) {
headers [ key ] = response . headers [ key ] ;
}
2023-05-25 21:58:02 +00:00
throw new GraphqlResponseError (
requestOptions ,
headers ,
response . data
) ;
2022-12-25 13:58:23 +08:00
}
return response . data . data ;
} ) ;
}
2023-05-25 21:58:02 +00:00
// pkg/dist-src/with-defaults.js
function withDefaults ( request2 , newDefaults ) {
const newRequest = request2 . defaults ( newDefaults ) ;
2022-12-25 13:58:23 +08:00
const newApi = ( query , options ) => {
return graphql ( newRequest , query , options ) ;
} ;
return Object . assign ( newApi , {
defaults : withDefaults . bind ( null , newRequest ) ,
endpoint : newRequest . endpoint
} ) ;
}
2023-05-25 21:58:02 +00:00
// pkg/dist-src/index.js
2023-07-27 11:01:06 +00:00
var graphql2 = withDefaults ( import _request3 . request , {
2022-12-25 13:58:23 +08:00
headers : {
2023-05-25 21:58:02 +00:00
"user-agent" : ` octokit-graphql.js/ ${ VERSION } ${ ( 0 , import _universal _user _agent . getUserAgent ) ( ) } `
2022-12-25 13:58:23 +08:00
} ,
method : "POST" ,
url : "/graphql"
} ) ;
function withCustomRequest ( customRequest ) {
return withDefaults ( customRequest , {
method : "POST" ,
url : "/graphql"
} ) ;
}
2023-05-25 21:58:02 +00:00
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
/***/ 9331 :
2023-05-25 21:58:02 +00:00
/***/ ( ( module ) => {
2022-12-25 13:58:23 +08:00
"use strict" ;
2023-05-25 21:58:02 +00:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
composePaginateRest : ( ) => composePaginateRest ,
isPaginatingEndpoint : ( ) => isPaginatingEndpoint ,
paginateRest : ( ) => paginateRest ,
paginatingEndpoints : ( ) => paginatingEndpoints
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
2022-12-25 13:58:23 +08:00
2023-05-25 21:58:02 +00:00
// pkg/dist-src/version.js
2023-07-27 11:01:06 +00:00
var VERSION = "8.0.0" ;
2022-12-25 13:58:23 +08:00
2023-05-25 21:58:02 +00:00
// pkg/dist-src/normalize-paginated-list-response.js
2022-12-25 13:58:23 +08:00
function normalizePaginatedListResponse ( response ) {
if ( ! response . data ) {
return {
... response ,
data : [ ]
} ;
}
const responseNeedsNormalization = "total_count" in response . data && ! ( "url" in response . data ) ;
2023-05-25 21:58:02 +00:00
if ( ! responseNeedsNormalization )
return response ;
2022-12-25 13:58:23 +08:00
const incompleteResults = response . data . incomplete _results ;
const repositorySelection = response . data . repository _selection ;
const totalCount = response . data . total _count ;
delete response . data . incomplete _results ;
delete response . data . repository _selection ;
delete response . data . total _count ;
const namespaceKey = Object . keys ( response . data ) [ 0 ] ;
const data = response . data [ namespaceKey ] ;
response . data = data ;
if ( typeof incompleteResults !== "undefined" ) {
response . data . incomplete _results = incompleteResults ;
}
if ( typeof repositorySelection !== "undefined" ) {
response . data . repository _selection = repositorySelection ;
}
response . data . total _count = totalCount ;
return response ;
}
2023-05-25 21:58:02 +00:00
// pkg/dist-src/iterator.js
2022-12-25 13:58:23 +08:00
function iterator ( octokit , route , parameters ) {
const options = typeof route === "function" ? route . endpoint ( parameters ) : octokit . request . endpoint ( route , parameters ) ;
const requestMethod = typeof route === "function" ? route : octokit . request ;
const method = options . method ;
const headers = options . headers ;
let url = options . url ;
return {
[ Symbol . asyncIterator ] : ( ) => ( {
async next ( ) {
2023-05-25 21:58:02 +00:00
if ( ! url )
return { done : true } ;
2022-12-25 13:58:23 +08:00
try {
2023-05-25 21:58:02 +00:00
const response = await requestMethod ( { method , url , headers } ) ;
2022-12-25 13:58:23 +08:00
const normalizedResponse = normalizePaginatedListResponse ( response ) ;
2023-05-25 21:58:02 +00:00
url = ( ( normalizedResponse . headers . link || "" ) . match (
/<([^>]+)>;\s*rel="next"/
) || [ ] ) [ 1 ] ;
return { value : normalizedResponse } ;
2022-12-25 13:58:23 +08:00
} catch ( error ) {
2023-05-25 21:58:02 +00:00
if ( error . status !== 409 )
throw error ;
2022-12-25 13:58:23 +08:00
url = "" ;
return {
value : {
status : 200 ,
headers : { } ,
data : [ ]
}
} ;
}
}
} )
} ;
}
2023-05-25 21:58:02 +00:00
// pkg/dist-src/paginate.js
2022-12-25 13:58:23 +08:00
function paginate ( octokit , route , parameters , mapFn ) {
if ( typeof parameters === "function" ) {
mapFn = parameters ;
2023-05-25 21:58:02 +00:00
parameters = void 0 ;
2022-12-25 13:58:23 +08:00
}
2023-05-25 21:58:02 +00:00
return gather (
octokit ,
[ ] ,
iterator ( octokit , route , parameters ) [ Symbol . asyncIterator ] ( ) ,
mapFn
) ;
}
function gather ( octokit , results , iterator2 , mapFn ) {
return iterator2 . next ( ) . then ( ( result ) => {
2022-12-25 13:58:23 +08:00
if ( result . done ) {
return results ;
}
let earlyExit = false ;
function done ( ) {
earlyExit = true ;
}
2023-05-25 21:58:02 +00:00
results = results . concat (
mapFn ? mapFn ( result . value , done ) : result . value . data
) ;
2022-12-25 13:58:23 +08:00
if ( earlyExit ) {
return results ;
}
2023-05-25 21:58:02 +00:00
return gather ( octokit , results , iterator2 , mapFn ) ;
2022-12-25 13:58:23 +08:00
} ) ;
}
2023-05-25 21:58:02 +00:00
// pkg/dist-src/compose-paginate.js
var composePaginateRest = Object . assign ( paginate , {
2022-12-25 13:58:23 +08:00
iterator
} ) ;
2023-05-25 21:58:02 +00:00
// pkg/dist-src/generated/paginating-endpoints.js
var paginatingEndpoints = [
"GET /app/hook/deliveries" ,
"GET /app/installation-requests" ,
"GET /app/installations" ,
"GET /enterprises/{enterprise}/dependabot/alerts" ,
"GET /enterprises/{enterprise}/secret-scanning/alerts" ,
"GET /events" ,
"GET /gists" ,
"GET /gists/public" ,
"GET /gists/starred" ,
"GET /gists/{gist_id}/comments" ,
"GET /gists/{gist_id}/commits" ,
"GET /gists/{gist_id}/forks" ,
"GET /installation/repositories" ,
"GET /issues" ,
"GET /licenses" ,
"GET /marketplace_listing/plans" ,
"GET /marketplace_listing/plans/{plan_id}/accounts" ,
"GET /marketplace_listing/stubbed/plans" ,
"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" ,
"GET /networks/{owner}/{repo}/events" ,
"GET /notifications" ,
"GET /organizations" ,
"GET /organizations/{org}/personal-access-token-requests" ,
"GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories" ,
"GET /organizations/{org}/personal-access-tokens" ,
"GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories" ,
"GET /orgs/{org}/actions/cache/usage-by-repository" ,
"GET /orgs/{org}/actions/permissions/repositories" ,
"GET /orgs/{org}/actions/required_workflows" ,
"GET /orgs/{org}/actions/runners" ,
"GET /orgs/{org}/actions/secrets" ,
"GET /orgs/{org}/actions/secrets/{secret_name}/repositories" ,
"GET /orgs/{org}/actions/variables" ,
"GET /orgs/{org}/actions/variables/{name}/repositories" ,
"GET /orgs/{org}/blocks" ,
"GET /orgs/{org}/code-scanning/alerts" ,
"GET /orgs/{org}/codespaces" ,
"GET /orgs/{org}/codespaces/secrets" ,
"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" ,
"GET /orgs/{org}/dependabot/alerts" ,
"GET /orgs/{org}/dependabot/secrets" ,
"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" ,
"GET /orgs/{org}/events" ,
"GET /orgs/{org}/failed_invitations" ,
"GET /orgs/{org}/hooks" ,
"GET /orgs/{org}/hooks/{hook_id}/deliveries" ,
"GET /orgs/{org}/installations" ,
"GET /orgs/{org}/invitations" ,
"GET /orgs/{org}/invitations/{invitation_id}/teams" ,
"GET /orgs/{org}/issues" ,
"GET /orgs/{org}/members" ,
"GET /orgs/{org}/members/{username}/codespaces" ,
"GET /orgs/{org}/migrations" ,
"GET /orgs/{org}/migrations/{migration_id}/repositories" ,
"GET /orgs/{org}/outside_collaborators" ,
"GET /orgs/{org}/packages" ,
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions" ,
"GET /orgs/{org}/projects" ,
"GET /orgs/{org}/public_members" ,
"GET /orgs/{org}/repos" ,
2023-06-18 08:35:17 +00:00
"GET /orgs/{org}/rulesets" ,
2023-05-25 21:58:02 +00:00
"GET /orgs/{org}/secret-scanning/alerts" ,
"GET /orgs/{org}/teams" ,
"GET /orgs/{org}/teams/{team_slug}/discussions" ,
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ,
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" ,
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" ,
"GET /orgs/{org}/teams/{team_slug}/invitations" ,
"GET /orgs/{org}/teams/{team_slug}/members" ,
"GET /orgs/{org}/teams/{team_slug}/projects" ,
"GET /orgs/{org}/teams/{team_slug}/repos" ,
"GET /orgs/{org}/teams/{team_slug}/teams" ,
"GET /projects/columns/{column_id}/cards" ,
"GET /projects/{project_id}/collaborators" ,
"GET /projects/{project_id}/columns" ,
"GET /repos/{org}/{repo}/actions/required_workflows" ,
"GET /repos/{owner}/{repo}/actions/artifacts" ,
"GET /repos/{owner}/{repo}/actions/caches" ,
"GET /repos/{owner}/{repo}/actions/organization-secrets" ,
"GET /repos/{owner}/{repo}/actions/organization-variables" ,
"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs" ,
"GET /repos/{owner}/{repo}/actions/runners" ,
"GET /repos/{owner}/{repo}/actions/runs" ,
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" ,
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" ,
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" ,
"GET /repos/{owner}/{repo}/actions/secrets" ,
"GET /repos/{owner}/{repo}/actions/variables" ,
"GET /repos/{owner}/{repo}/actions/workflows" ,
"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" ,
"GET /repos/{owner}/{repo}/assignees" ,
"GET /repos/{owner}/{repo}/branches" ,
"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" ,
"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" ,
"GET /repos/{owner}/{repo}/code-scanning/alerts" ,
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" ,
"GET /repos/{owner}/{repo}/code-scanning/analyses" ,
"GET /repos/{owner}/{repo}/codespaces" ,
"GET /repos/{owner}/{repo}/codespaces/devcontainers" ,
"GET /repos/{owner}/{repo}/codespaces/secrets" ,
"GET /repos/{owner}/{repo}/collaborators" ,
"GET /repos/{owner}/{repo}/comments" ,
"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" ,
"GET /repos/{owner}/{repo}/commits" ,
"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" ,
"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" ,
"GET /repos/{owner}/{repo}/commits/{ref}/check-runs" ,
"GET /repos/{owner}/{repo}/commits/{ref}/check-suites" ,
"GET /repos/{owner}/{repo}/commits/{ref}/status" ,
"GET /repos/{owner}/{repo}/commits/{ref}/statuses" ,
"GET /repos/{owner}/{repo}/contributors" ,
"GET /repos/{owner}/{repo}/dependabot/alerts" ,
"GET /repos/{owner}/{repo}/dependabot/secrets" ,
"GET /repos/{owner}/{repo}/deployments" ,
"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ,
"GET /repos/{owner}/{repo}/environments" ,
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" ,
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" ,
"GET /repos/{owner}/{repo}/events" ,
"GET /repos/{owner}/{repo}/forks" ,
"GET /repos/{owner}/{repo}/hooks" ,
"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" ,
"GET /repos/{owner}/{repo}/invitations" ,
"GET /repos/{owner}/{repo}/issues" ,
"GET /repos/{owner}/{repo}/issues/comments" ,
"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" ,
"GET /repos/{owner}/{repo}/issues/events" ,
"GET /repos/{owner}/{repo}/issues/{issue_number}/comments" ,
"GET /repos/{owner}/{repo}/issues/{issue_number}/events" ,
"GET /repos/{owner}/{repo}/issues/{issue_number}/labels" ,
"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions" ,
"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" ,
"GET /repos/{owner}/{repo}/keys" ,
"GET /repos/{owner}/{repo}/labels" ,
"GET /repos/{owner}/{repo}/milestones" ,
"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" ,
"GET /repos/{owner}/{repo}/notifications" ,
"GET /repos/{owner}/{repo}/pages/builds" ,
"GET /repos/{owner}/{repo}/projects" ,
"GET /repos/{owner}/{repo}/pulls" ,
"GET /repos/{owner}/{repo}/pulls/comments" ,
"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" ,
"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" ,
"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits" ,
"GET /repos/{owner}/{repo}/pulls/{pull_number}/files" ,
"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ,
"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" ,
"GET /repos/{owner}/{repo}/releases" ,
"GET /repos/{owner}/{repo}/releases/{release_id}/assets" ,
"GET /repos/{owner}/{repo}/releases/{release_id}/reactions" ,
2023-06-18 08:35:17 +00:00
"GET /repos/{owner}/{repo}/rules/branches/{branch}" ,
"GET /repos/{owner}/{repo}/rulesets" ,
2023-05-25 21:58:02 +00:00
"GET /repos/{owner}/{repo}/secret-scanning/alerts" ,
"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" ,
"GET /repos/{owner}/{repo}/security-advisories" ,
"GET /repos/{owner}/{repo}/stargazers" ,
"GET /repos/{owner}/{repo}/subscribers" ,
"GET /repos/{owner}/{repo}/tags" ,
"GET /repos/{owner}/{repo}/teams" ,
"GET /repos/{owner}/{repo}/topics" ,
"GET /repositories" ,
"GET /repositories/{repository_id}/environments/{environment_name}/secrets" ,
"GET /repositories/{repository_id}/environments/{environment_name}/variables" ,
"GET /search/code" ,
"GET /search/commits" ,
"GET /search/issues" ,
"GET /search/labels" ,
"GET /search/repositories" ,
"GET /search/topics" ,
"GET /search/users" ,
"GET /teams/{team_id}/discussions" ,
"GET /teams/{team_id}/discussions/{discussion_number}/comments" ,
"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions" ,
"GET /teams/{team_id}/discussions/{discussion_number}/reactions" ,
"GET /teams/{team_id}/invitations" ,
"GET /teams/{team_id}/members" ,
"GET /teams/{team_id}/projects" ,
"GET /teams/{team_id}/repos" ,
"GET /teams/{team_id}/teams" ,
"GET /user/blocks" ,
"GET /user/codespaces" ,
"GET /user/codespaces/secrets" ,
"GET /user/emails" ,
"GET /user/followers" ,
"GET /user/following" ,
"GET /user/gpg_keys" ,
"GET /user/installations" ,
"GET /user/installations/{installation_id}/repositories" ,
"GET /user/issues" ,
"GET /user/keys" ,
"GET /user/marketplace_purchases" ,
"GET /user/marketplace_purchases/stubbed" ,
"GET /user/memberships/orgs" ,
"GET /user/migrations" ,
"GET /user/migrations/{migration_id}/repositories" ,
"GET /user/orgs" ,
"GET /user/packages" ,
"GET /user/packages/{package_type}/{package_name}/versions" ,
"GET /user/public_emails" ,
"GET /user/repos" ,
"GET /user/repository_invitations" ,
"GET /user/social_accounts" ,
"GET /user/ssh_signing_keys" ,
"GET /user/starred" ,
"GET /user/subscriptions" ,
"GET /user/teams" ,
"GET /users" ,
"GET /users/{username}/events" ,
"GET /users/{username}/events/orgs/{org}" ,
"GET /users/{username}/events/public" ,
"GET /users/{username}/followers" ,
"GET /users/{username}/following" ,
"GET /users/{username}/gists" ,
"GET /users/{username}/gpg_keys" ,
"GET /users/{username}/keys" ,
"GET /users/{username}/orgs" ,
"GET /users/{username}/packages" ,
"GET /users/{username}/projects" ,
"GET /users/{username}/received_events" ,
"GET /users/{username}/received_events/public" ,
"GET /users/{username}/repos" ,
"GET /users/{username}/social_accounts" ,
"GET /users/{username}/ssh_signing_keys" ,
"GET /users/{username}/starred" ,
"GET /users/{username}/subscriptions"
] ;
2022-12-25 13:58:23 +08:00
2023-05-25 21:58:02 +00:00
// pkg/dist-src/paginating-endpoints.js
2022-12-25 13:58:23 +08:00
function isPaginatingEndpoint ( arg ) {
if ( typeof arg === "string" ) {
return paginatingEndpoints . includes ( arg ) ;
} else {
return false ;
}
}
2023-05-25 21:58:02 +00:00
// pkg/dist-src/index.js
2022-12-25 13:58:23 +08:00
function paginateRest ( octokit ) {
return {
paginate : Object . assign ( paginate . bind ( null , octokit ) , {
iterator : iterator . bind ( null , octokit )
} )
} ;
}
paginateRest . VERSION = VERSION ;
2023-05-25 21:58:02 +00:00
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
/***/ 8528 :
2023-06-07 23:17:45 +00:00
/***/ ( ( module ) => {
2022-12-25 13:58:23 +08:00
"use strict" ;
2023-06-07 23:17:45 +00:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2022-12-25 13:58:23 +08:00
2023-06-07 23:17:45 +00:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
legacyRestEndpointMethods : ( ) => legacyRestEndpointMethods ,
restEndpointMethods : ( ) => restEndpointMethods
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
2022-12-25 13:58:23 +08:00
2023-06-18 08:35:17 +00:00
// pkg/dist-src/version.js
2023-07-27 11:01:06 +00:00
var VERSION = "9.0.0" ;
2023-06-18 08:35:17 +00:00
2023-06-07 23:17:45 +00:00
// pkg/dist-src/generated/endpoints.js
var Endpoints = {
2022-12-25 13:58:23 +08:00
actions : {
2023-06-07 23:17:45 +00:00
addCustomLabelsToSelfHostedRunnerForOrg : [
"POST /orgs/{org}/actions/runners/{runner_id}/labels"
] ,
addCustomLabelsToSelfHostedRunnerForRepo : [
"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
] ,
addSelectedRepoToOrgSecret : [
"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
] ,
addSelectedRepoToOrgVariable : [
"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
] ,
addSelectedRepoToRequiredWorkflow : [
"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}"
] ,
approveWorkflowRun : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"
] ,
cancelWorkflowRun : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"
] ,
createEnvironmentVariable : [
"POST /repositories/{repository_id}/environments/{environment_name}/variables"
] ,
createOrUpdateEnvironmentSecret : [
"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
] ,
2022-12-25 13:58:23 +08:00
createOrUpdateOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}" ] ,
2023-06-07 23:17:45 +00:00
createOrUpdateRepoSecret : [
"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"
] ,
2023-01-20 23:52:29 +00:00
createOrgVariable : [ "POST /orgs/{org}/actions/variables" ] ,
2023-06-07 23:17:45 +00:00
createRegistrationTokenForOrg : [
"POST /orgs/{org}/actions/runners/registration-token"
] ,
createRegistrationTokenForRepo : [
"POST /repos/{owner}/{repo}/actions/runners/registration-token"
] ,
2022-12-25 13:58:23 +08:00
createRemoveTokenForOrg : [ "POST /orgs/{org}/actions/runners/remove-token" ] ,
2023-06-07 23:17:45 +00:00
createRemoveTokenForRepo : [
"POST /repos/{owner}/{repo}/actions/runners/remove-token"
] ,
2023-01-20 23:52:29 +00:00
createRepoVariable : [ "POST /repos/{owner}/{repo}/actions/variables" ] ,
createRequiredWorkflow : [ "POST /orgs/{org}/actions/required_workflows" ] ,
2023-06-07 23:17:45 +00:00
createWorkflowDispatch : [
"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"
] ,
deleteActionsCacheById : [
"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"
] ,
deleteActionsCacheByKey : [
"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"
] ,
deleteArtifact : [
"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"
] ,
deleteEnvironmentSecret : [
"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
] ,
deleteEnvironmentVariable : [
"DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
] ,
2022-12-25 13:58:23 +08:00
deleteOrgSecret : [ "DELETE /orgs/{org}/actions/secrets/{secret_name}" ] ,
2023-01-20 23:52:29 +00:00
deleteOrgVariable : [ "DELETE /orgs/{org}/actions/variables/{name}" ] ,
2023-06-07 23:17:45 +00:00
deleteRepoSecret : [
"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"
] ,
deleteRepoVariable : [
"DELETE /repos/{owner}/{repo}/actions/variables/{name}"
] ,
deleteRequiredWorkflow : [
"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}"
] ,
deleteSelfHostedRunnerFromOrg : [
"DELETE /orgs/{org}/actions/runners/{runner_id}"
] ,
deleteSelfHostedRunnerFromRepo : [
"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"
] ,
2022-12-25 13:58:23 +08:00
deleteWorkflowRun : [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
2023-06-07 23:17:45 +00:00
deleteWorkflowRunLogs : [
"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
] ,
disableSelectedRepositoryGithubActionsOrganization : [
"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"
] ,
disableWorkflow : [
"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"
] ,
downloadArtifact : [
"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"
] ,
downloadJobLogsForWorkflowRun : [
"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"
] ,
downloadWorkflowRunAttemptLogs : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"
] ,
downloadWorkflowRunLogs : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
] ,
enableSelectedRepositoryGithubActionsOrganization : [
"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"
] ,
enableWorkflow : [
"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"
] ,
2023-06-18 08:35:17 +00:00
generateRunnerJitconfigForOrg : [
"POST /orgs/{org}/actions/runners/generate-jitconfig"
] ,
generateRunnerJitconfigForRepo : [
"POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig"
] ,
2022-12-25 13:58:23 +08:00
getActionsCacheList : [ "GET /repos/{owner}/{repo}/actions/caches" ] ,
getActionsCacheUsage : [ "GET /repos/{owner}/{repo}/actions/cache/usage" ] ,
2023-06-07 23:17:45 +00:00
getActionsCacheUsageByRepoForOrg : [
"GET /orgs/{org}/actions/cache/usage-by-repository"
] ,
2022-12-25 13:58:23 +08:00
getActionsCacheUsageForOrg : [ "GET /orgs/{org}/actions/cache/usage" ] ,
2023-06-07 23:17:45 +00:00
getAllowedActionsOrganization : [
"GET /orgs/{org}/actions/permissions/selected-actions"
] ,
getAllowedActionsRepository : [
"GET /repos/{owner}/{repo}/actions/permissions/selected-actions"
] ,
2022-12-25 13:58:23 +08:00
getArtifact : [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ] ,
2023-06-07 23:17:45 +00:00
getEnvironmentPublicKey : [
"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"
] ,
getEnvironmentSecret : [
"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
] ,
getEnvironmentVariable : [
"GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
] ,
getGithubActionsDefaultWorkflowPermissionsOrganization : [
"GET /orgs/{org}/actions/permissions/workflow"
] ,
getGithubActionsDefaultWorkflowPermissionsRepository : [
"GET /repos/{owner}/{repo}/actions/permissions/workflow"
] ,
getGithubActionsPermissionsOrganization : [
"GET /orgs/{org}/actions/permissions"
] ,
getGithubActionsPermissionsRepository : [
"GET /repos/{owner}/{repo}/actions/permissions"
] ,
2022-12-25 13:58:23 +08:00
getJobForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}" ] ,
getOrgPublicKey : [ "GET /orgs/{org}/actions/secrets/public-key" ] ,
getOrgSecret : [ "GET /orgs/{org}/actions/secrets/{secret_name}" ] ,
2023-01-20 23:52:29 +00:00
getOrgVariable : [ "GET /orgs/{org}/actions/variables/{name}" ] ,
2023-06-07 23:17:45 +00:00
getPendingDeploymentsForRun : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
] ,
getRepoPermissions : [
"GET /repos/{owner}/{repo}/actions/permissions" ,
{ } ,
{ renamed : [ "actions" , "getGithubActionsPermissionsRepository" ] }
] ,
2022-12-25 13:58:23 +08:00
getRepoPublicKey : [ "GET /repos/{owner}/{repo}/actions/secrets/public-key" ] ,
2023-06-07 23:17:45 +00:00
getRepoRequiredWorkflow : [
"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}"
] ,
getRepoRequiredWorkflowUsage : [
"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing"
] ,
2022-12-25 13:58:23 +08:00
getRepoSecret : [ "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
2023-01-20 23:52:29 +00:00
getRepoVariable : [ "GET /repos/{owner}/{repo}/actions/variables/{name}" ] ,
2023-06-07 23:17:45 +00:00
getRequiredWorkflow : [
"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}"
] ,
getReviewsForRun : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"
] ,
2022-12-25 13:58:23 +08:00
getSelfHostedRunnerForOrg : [ "GET /orgs/{org}/actions/runners/{runner_id}" ] ,
2023-06-07 23:17:45 +00:00
getSelfHostedRunnerForRepo : [
"GET /repos/{owner}/{repo}/actions/runners/{runner_id}"
] ,
2022-12-25 13:58:23 +08:00
getWorkflow : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}" ] ,
2023-06-07 23:17:45 +00:00
getWorkflowAccessToRepository : [
"GET /repos/{owner}/{repo}/actions/permissions/access"
] ,
2022-12-25 13:58:23 +08:00
getWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
2023-06-07 23:17:45 +00:00
getWorkflowRunAttempt : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"
] ,
getWorkflowRunUsage : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"
] ,
getWorkflowUsage : [
"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"
] ,
2022-12-25 13:58:23 +08:00
listArtifactsForRepo : [ "GET /repos/{owner}/{repo}/actions/artifacts" ] ,
2023-06-07 23:17:45 +00:00
listEnvironmentSecrets : [
"GET /repositories/{repository_id}/environments/{environment_name}/secrets"
] ,
listEnvironmentVariables : [
"GET /repositories/{repository_id}/environments/{environment_name}/variables"
] ,
listJobsForWorkflowRun : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"
] ,
listJobsForWorkflowRunAttempt : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"
] ,
listLabelsForSelfHostedRunnerForOrg : [
"GET /orgs/{org}/actions/runners/{runner_id}/labels"
] ,
listLabelsForSelfHostedRunnerForRepo : [
"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
] ,
2022-12-25 13:58:23 +08:00
listOrgSecrets : [ "GET /orgs/{org}/actions/secrets" ] ,
2023-01-20 23:52:29 +00:00
listOrgVariables : [ "GET /orgs/{org}/actions/variables" ] ,
2023-06-07 23:17:45 +00:00
listRepoOrganizationSecrets : [
"GET /repos/{owner}/{repo}/actions/organization-secrets"
] ,
listRepoOrganizationVariables : [
"GET /repos/{owner}/{repo}/actions/organization-variables"
] ,
listRepoRequiredWorkflows : [
"GET /repos/{org}/{repo}/actions/required_workflows"
] ,
2022-12-25 13:58:23 +08:00
listRepoSecrets : [ "GET /repos/{owner}/{repo}/actions/secrets" ] ,
2023-01-20 23:52:29 +00:00
listRepoVariables : [ "GET /repos/{owner}/{repo}/actions/variables" ] ,
2022-12-25 13:58:23 +08:00
listRepoWorkflows : [ "GET /repos/{owner}/{repo}/actions/workflows" ] ,
2023-06-07 23:17:45 +00:00
listRequiredWorkflowRuns : [
"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs"
] ,
2023-01-20 23:52:29 +00:00
listRequiredWorkflows : [ "GET /orgs/{org}/actions/required_workflows" ] ,
2022-12-25 13:58:23 +08:00
listRunnerApplicationsForOrg : [ "GET /orgs/{org}/actions/runners/downloads" ] ,
2023-06-07 23:17:45 +00:00
listRunnerApplicationsForRepo : [
"GET /repos/{owner}/{repo}/actions/runners/downloads"
] ,
listSelectedReposForOrgSecret : [
"GET /orgs/{org}/actions/secrets/{secret_name}/repositories"
] ,
listSelectedReposForOrgVariable : [
"GET /orgs/{org}/actions/variables/{name}/repositories"
] ,
listSelectedRepositoriesEnabledGithubActionsOrganization : [
"GET /orgs/{org}/actions/permissions/repositories"
] ,
listSelectedRepositoriesRequiredWorkflow : [
"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories"
] ,
2022-12-25 13:58:23 +08:00
listSelfHostedRunnersForOrg : [ "GET /orgs/{org}/actions/runners" ] ,
listSelfHostedRunnersForRepo : [ "GET /repos/{owner}/{repo}/actions/runners" ] ,
2023-06-07 23:17:45 +00:00
listWorkflowRunArtifacts : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"
] ,
listWorkflowRuns : [
"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"
] ,
2022-12-25 13:58:23 +08:00
listWorkflowRunsForRepo : [ "GET /repos/{owner}/{repo}/actions/runs" ] ,
2023-06-07 23:17:45 +00:00
reRunJobForWorkflowRun : [
"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"
] ,
2022-12-25 13:58:23 +08:00
reRunWorkflow : [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun" ] ,
2023-06-07 23:17:45 +00:00
reRunWorkflowFailedJobs : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"
] ,
removeAllCustomLabelsFromSelfHostedRunnerForOrg : [
"DELETE /orgs/{org}/actions/runners/{runner_id}/labels"
] ,
removeAllCustomLabelsFromSelfHostedRunnerForRepo : [
"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
] ,
removeCustomLabelFromSelfHostedRunnerForOrg : [
"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"
] ,
removeCustomLabelFromSelfHostedRunnerForRepo : [
"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"
] ,
removeSelectedRepoFromOrgSecret : [
"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
] ,
removeSelectedRepoFromOrgVariable : [
"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
] ,
removeSelectedRepoFromRequiredWorkflow : [
"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}"
] ,
reviewCustomGatesForRun : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule"
] ,
reviewPendingDeploymentsForRun : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
] ,
setAllowedActionsOrganization : [
"PUT /orgs/{org}/actions/permissions/selected-actions"
] ,
setAllowedActionsRepository : [
"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"
] ,
setCustomLabelsForSelfHostedRunnerForOrg : [
"PUT /orgs/{org}/actions/runners/{runner_id}/labels"
] ,
setCustomLabelsForSelfHostedRunnerForRepo : [
"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
] ,
setGithubActionsDefaultWorkflowPermissionsOrganization : [
"PUT /orgs/{org}/actions/permissions/workflow"
] ,
setGithubActionsDefaultWorkflowPermissionsRepository : [
"PUT /repos/{owner}/{repo}/actions/permissions/workflow"
] ,
setGithubActionsPermissionsOrganization : [
"PUT /orgs/{org}/actions/permissions"
] ,
setGithubActionsPermissionsRepository : [
"PUT /repos/{owner}/{repo}/actions/permissions"
] ,
setSelectedReposForOrgSecret : [
"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"
] ,
setSelectedReposForOrgVariable : [
"PUT /orgs/{org}/actions/variables/{name}/repositories"
] ,
setSelectedReposToRequiredWorkflow : [
"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories"
] ,
setSelectedRepositoriesEnabledGithubActionsOrganization : [
"PUT /orgs/{org}/actions/permissions/repositories"
] ,
setWorkflowAccessToRepository : [
"PUT /repos/{owner}/{repo}/actions/permissions/access"
] ,
updateEnvironmentVariable : [
"PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
] ,
2023-01-20 23:52:29 +00:00
updateOrgVariable : [ "PATCH /orgs/{org}/actions/variables/{name}" ] ,
2023-06-07 23:17:45 +00:00
updateRepoVariable : [
"PATCH /repos/{owner}/{repo}/actions/variables/{name}"
] ,
updateRequiredWorkflow : [
"PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}"
]
2022-12-25 13:58:23 +08:00
} ,
activity : {
checkRepoIsStarredByAuthenticatedUser : [ "GET /user/starred/{owner}/{repo}" ] ,
deleteRepoSubscription : [ "DELETE /repos/{owner}/{repo}/subscription" ] ,
2023-06-07 23:17:45 +00:00
deleteThreadSubscription : [
"DELETE /notifications/threads/{thread_id}/subscription"
] ,
2022-12-25 13:58:23 +08:00
getFeeds : [ "GET /feeds" ] ,
getRepoSubscription : [ "GET /repos/{owner}/{repo}/subscription" ] ,
getThread : [ "GET /notifications/threads/{thread_id}" ] ,
2023-06-07 23:17:45 +00:00
getThreadSubscriptionForAuthenticatedUser : [
"GET /notifications/threads/{thread_id}/subscription"
] ,
2022-12-25 13:58:23 +08:00
listEventsForAuthenticatedUser : [ "GET /users/{username}/events" ] ,
listNotificationsForAuthenticatedUser : [ "GET /notifications" ] ,
2023-06-07 23:17:45 +00:00
listOrgEventsForAuthenticatedUser : [
"GET /users/{username}/events/orgs/{org}"
] ,
2022-12-25 13:58:23 +08:00
listPublicEvents : [ "GET /events" ] ,
listPublicEventsForRepoNetwork : [ "GET /networks/{owner}/{repo}/events" ] ,
listPublicEventsForUser : [ "GET /users/{username}/events/public" ] ,
listPublicOrgEvents : [ "GET /orgs/{org}/events" ] ,
listReceivedEventsForUser : [ "GET /users/{username}/received_events" ] ,
2023-06-07 23:17:45 +00:00
listReceivedPublicEventsForUser : [
"GET /users/{username}/received_events/public"
] ,
2022-12-25 13:58:23 +08:00
listRepoEvents : [ "GET /repos/{owner}/{repo}/events" ] ,
2023-06-07 23:17:45 +00:00
listRepoNotificationsForAuthenticatedUser : [
"GET /repos/{owner}/{repo}/notifications"
] ,
2022-12-25 13:58:23 +08:00
listReposStarredByAuthenticatedUser : [ "GET /user/starred" ] ,
listReposStarredByUser : [ "GET /users/{username}/starred" ] ,
listReposWatchedByUser : [ "GET /users/{username}/subscriptions" ] ,
listStargazersForRepo : [ "GET /repos/{owner}/{repo}/stargazers" ] ,
listWatchedReposForAuthenticatedUser : [ "GET /user/subscriptions" ] ,
listWatchersForRepo : [ "GET /repos/{owner}/{repo}/subscribers" ] ,
markNotificationsAsRead : [ "PUT /notifications" ] ,
markRepoNotificationsAsRead : [ "PUT /repos/{owner}/{repo}/notifications" ] ,
markThreadAsRead : [ "PATCH /notifications/threads/{thread_id}" ] ,
setRepoSubscription : [ "PUT /repos/{owner}/{repo}/subscription" ] ,
2023-06-07 23:17:45 +00:00
setThreadSubscription : [
"PUT /notifications/threads/{thread_id}/subscription"
] ,
2022-12-25 13:58:23 +08:00
starRepoForAuthenticatedUser : [ "PUT /user/starred/{owner}/{repo}" ] ,
unstarRepoForAuthenticatedUser : [ "DELETE /user/starred/{owner}/{repo}" ]
} ,
apps : {
2023-06-07 23:17:45 +00:00
addRepoToInstallation : [
"PUT /user/installations/{installation_id}/repositories/{repository_id}" ,
{ } ,
{ renamed : [ "apps" , "addRepoToInstallationForAuthenticatedUser" ] }
] ,
addRepoToInstallationForAuthenticatedUser : [
"PUT /user/installations/{installation_id}/repositories/{repository_id}"
] ,
2022-12-25 13:58:23 +08:00
checkToken : [ "POST /applications/{client_id}/token" ] ,
createFromManifest : [ "POST /app-manifests/{code}/conversions" ] ,
2023-06-07 23:17:45 +00:00
createInstallationAccessToken : [
"POST /app/installations/{installation_id}/access_tokens"
] ,
2022-12-25 13:58:23 +08:00
deleteAuthorization : [ "DELETE /applications/{client_id}/grant" ] ,
deleteInstallation : [ "DELETE /app/installations/{installation_id}" ] ,
deleteToken : [ "DELETE /applications/{client_id}/token" ] ,
getAuthenticated : [ "GET /app" ] ,
getBySlug : [ "GET /apps/{app_slug}" ] ,
getInstallation : [ "GET /app/installations/{installation_id}" ] ,
getOrgInstallation : [ "GET /orgs/{org}/installation" ] ,
getRepoInstallation : [ "GET /repos/{owner}/{repo}/installation" ] ,
2023-06-07 23:17:45 +00:00
getSubscriptionPlanForAccount : [
"GET /marketplace_listing/accounts/{account_id}"
] ,
getSubscriptionPlanForAccountStubbed : [
"GET /marketplace_listing/stubbed/accounts/{account_id}"
] ,
2022-12-25 13:58:23 +08:00
getUserInstallation : [ "GET /users/{username}/installation" ] ,
getWebhookConfigForApp : [ "GET /app/hook/config" ] ,
getWebhookDelivery : [ "GET /app/hook/deliveries/{delivery_id}" ] ,
listAccountsForPlan : [ "GET /marketplace_listing/plans/{plan_id}/accounts" ] ,
2023-06-07 23:17:45 +00:00
listAccountsForPlanStubbed : [
"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"
] ,
listInstallationReposForAuthenticatedUser : [
"GET /user/installations/{installation_id}/repositories"
] ,
listInstallationRequestsForAuthenticatedApp : [
"GET /app/installation-requests"
] ,
2022-12-25 13:58:23 +08:00
listInstallations : [ "GET /app/installations" ] ,
listInstallationsForAuthenticatedUser : [ "GET /user/installations" ] ,
listPlans : [ "GET /marketplace_listing/plans" ] ,
listPlansStubbed : [ "GET /marketplace_listing/stubbed/plans" ] ,
listReposAccessibleToInstallation : [ "GET /installation/repositories" ] ,
listSubscriptionsForAuthenticatedUser : [ "GET /user/marketplace_purchases" ] ,
2023-06-07 23:17:45 +00:00
listSubscriptionsForAuthenticatedUserStubbed : [
"GET /user/marketplace_purchases/stubbed"
] ,
2022-12-25 13:58:23 +08:00
listWebhookDeliveries : [ "GET /app/hook/deliveries" ] ,
2023-06-07 23:17:45 +00:00
redeliverWebhookDelivery : [
"POST /app/hook/deliveries/{delivery_id}/attempts"
] ,
removeRepoFromInstallation : [
"DELETE /user/installations/{installation_id}/repositories/{repository_id}" ,
{ } ,
{ renamed : [ "apps" , "removeRepoFromInstallationForAuthenticatedUser" ] }
] ,
removeRepoFromInstallationForAuthenticatedUser : [
"DELETE /user/installations/{installation_id}/repositories/{repository_id}"
] ,
2022-12-25 13:58:23 +08:00
resetToken : [ "PATCH /applications/{client_id}/token" ] ,
revokeInstallationAccessToken : [ "DELETE /installation/token" ] ,
scopeToken : [ "POST /applications/{client_id}/token/scoped" ] ,
suspendInstallation : [ "PUT /app/installations/{installation_id}/suspended" ] ,
2023-06-07 23:17:45 +00:00
unsuspendInstallation : [
"DELETE /app/installations/{installation_id}/suspended"
] ,
2022-12-25 13:58:23 +08:00
updateWebhookConfigForApp : [ "PATCH /app/hook/config" ]
} ,
billing : {
getGithubActionsBillingOrg : [ "GET /orgs/{org}/settings/billing/actions" ] ,
2023-06-07 23:17:45 +00:00
getGithubActionsBillingUser : [
"GET /users/{username}/settings/billing/actions"
] ,
2022-12-25 13:58:23 +08:00
getGithubPackagesBillingOrg : [ "GET /orgs/{org}/settings/billing/packages" ] ,
2023-06-07 23:17:45 +00:00
getGithubPackagesBillingUser : [
"GET /users/{username}/settings/billing/packages"
] ,
getSharedStorageBillingOrg : [
"GET /orgs/{org}/settings/billing/shared-storage"
] ,
getSharedStorageBillingUser : [
"GET /users/{username}/settings/billing/shared-storage"
]
2022-12-25 13:58:23 +08:00
} ,
checks : {
create : [ "POST /repos/{owner}/{repo}/check-runs" ] ,
createSuite : [ "POST /repos/{owner}/{repo}/check-suites" ] ,
get : [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}" ] ,
getSuite : [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}" ] ,
2023-06-07 23:17:45 +00:00
listAnnotations : [
"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"
] ,
2022-12-25 13:58:23 +08:00
listForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-runs" ] ,
2023-06-07 23:17:45 +00:00
listForSuite : [
"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"
] ,
2022-12-25 13:58:23 +08:00
listSuitesForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-suites" ] ,
2023-06-07 23:17:45 +00:00
rerequestRun : [
"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"
] ,
rerequestSuite : [
"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"
] ,
setSuitesPreferences : [
"PATCH /repos/{owner}/{repo}/check-suites/preferences"
] ,
2022-12-25 13:58:23 +08:00
update : [ "PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}" ]
} ,
codeScanning : {
2023-06-07 23:17:45 +00:00
deleteAnalysis : [
"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"
] ,
getAlert : [
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" ,
{ } ,
{ renamedParameters : { alert _id : "alert_number" } }
] ,
getAnalysis : [
"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"
] ,
getCodeqlDatabase : [
"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}"
] ,
2023-05-19 08:00:27 +00:00
getDefaultSetup : [ "GET /repos/{owner}/{repo}/code-scanning/default-setup" ] ,
2022-12-25 13:58:23 +08:00
getSarif : [ "GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}" ] ,
2023-06-07 23:17:45 +00:00
listAlertInstances : [
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"
] ,
2022-12-25 13:58:23 +08:00
listAlertsForOrg : [ "GET /orgs/{org}/code-scanning/alerts" ] ,
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/code-scanning/alerts" ] ,
2023-06-07 23:17:45 +00:00
listAlertsInstances : [
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" ,
{ } ,
{ renamed : [ "codeScanning" , "listAlertInstances" ] }
] ,
listCodeqlDatabases : [
"GET /repos/{owner}/{repo}/code-scanning/codeql/databases"
] ,
2022-12-25 13:58:23 +08:00
listRecentAnalyses : [ "GET /repos/{owner}/{repo}/code-scanning/analyses" ] ,
2023-06-07 23:17:45 +00:00
updateAlert : [
"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"
] ,
updateDefaultSetup : [
"PATCH /repos/{owner}/{repo}/code-scanning/default-setup"
] ,
2022-12-25 13:58:23 +08:00
uploadSarif : [ "POST /repos/{owner}/{repo}/code-scanning/sarifs" ]
} ,
codesOfConduct : {
getAllCodesOfConduct : [ "GET /codes_of_conduct" ] ,
getConductCode : [ "GET /codes_of_conduct/{key}" ]
} ,
codespaces : {
2023-06-07 23:17:45 +00:00
addRepositoryForSecretForAuthenticatedUser : [
"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
] ,
addSelectedRepoToOrgSecret : [
"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
] ,
codespaceMachinesForAuthenticatedUser : [
"GET /user/codespaces/{codespace_name}/machines"
] ,
2022-12-25 13:58:23 +08:00
createForAuthenticatedUser : [ "POST /user/codespaces" ] ,
2023-06-07 23:17:45 +00:00
createOrUpdateOrgSecret : [
"PUT /orgs/{org}/codespaces/secrets/{secret_name}"
] ,
createOrUpdateRepoSecret : [
"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
] ,
createOrUpdateSecretForAuthenticatedUser : [
"PUT /user/codespaces/secrets/{secret_name}"
] ,
createWithPrForAuthenticatedUser : [
"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"
] ,
createWithRepoForAuthenticatedUser : [
"POST /repos/{owner}/{repo}/codespaces"
] ,
deleteCodespacesBillingUsers : [
"DELETE /orgs/{org}/codespaces/billing/selected_users"
] ,
2022-12-25 13:58:23 +08:00
deleteForAuthenticatedUser : [ "DELETE /user/codespaces/{codespace_name}" ] ,
2023-06-07 23:17:45 +00:00
deleteFromOrganization : [
"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"
] ,
2023-01-20 23:52:29 +00:00
deleteOrgSecret : [ "DELETE /orgs/{org}/codespaces/secrets/{secret_name}" ] ,
2023-06-07 23:17:45 +00:00
deleteRepoSecret : [
"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
] ,
deleteSecretForAuthenticatedUser : [
"DELETE /user/codespaces/secrets/{secret_name}"
] ,
exportForAuthenticatedUser : [
"POST /user/codespaces/{codespace_name}/exports"
] ,
getCodespacesForUserInOrg : [
"GET /orgs/{org}/members/{username}/codespaces"
] ,
getExportDetailsForAuthenticatedUser : [
"GET /user/codespaces/{codespace_name}/exports/{export_id}"
] ,
2022-12-25 13:58:23 +08:00
getForAuthenticatedUser : [ "GET /user/codespaces/{codespace_name}" ] ,
2023-01-20 23:52:29 +00:00
getOrgPublicKey : [ "GET /orgs/{org}/codespaces/secrets/public-key" ] ,
getOrgSecret : [ "GET /orgs/{org}/codespaces/secrets/{secret_name}" ] ,
2023-06-07 23:17:45 +00:00
getPublicKeyForAuthenticatedUser : [
"GET /user/codespaces/secrets/public-key"
] ,
getRepoPublicKey : [
"GET /repos/{owner}/{repo}/codespaces/secrets/public-key"
] ,
getRepoSecret : [
"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
] ,
getSecretForAuthenticatedUser : [
"GET /user/codespaces/secrets/{secret_name}"
] ,
listDevcontainersInRepositoryForAuthenticatedUser : [
"GET /repos/{owner}/{repo}/codespaces/devcontainers"
] ,
2022-12-25 13:58:23 +08:00
listForAuthenticatedUser : [ "GET /user/codespaces" ] ,
2023-06-07 23:17:45 +00:00
listInOrganization : [
"GET /orgs/{org}/codespaces" ,
{ } ,
{ renamedParameters : { org _id : "org" } }
] ,
listInRepositoryForAuthenticatedUser : [
"GET /repos/{owner}/{repo}/codespaces"
] ,
2023-01-20 23:52:29 +00:00
listOrgSecrets : [ "GET /orgs/{org}/codespaces/secrets" ] ,
2022-12-25 13:58:23 +08:00
listRepoSecrets : [ "GET /repos/{owner}/{repo}/codespaces/secrets" ] ,
2023-06-07 23:17:45 +00:00
listRepositoriesForSecretForAuthenticatedUser : [
"GET /user/codespaces/secrets/{secret_name}/repositories"
] ,
2022-12-25 13:58:23 +08:00
listSecretsForAuthenticatedUser : [ "GET /user/codespaces/secrets" ] ,
2023-06-07 23:17:45 +00:00
listSelectedReposForOrgSecret : [
"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
] ,
preFlightWithRepoForAuthenticatedUser : [
"GET /repos/{owner}/{repo}/codespaces/new"
] ,
publishForAuthenticatedUser : [
"POST /user/codespaces/{codespace_name}/publish"
] ,
removeRepositoryForSecretForAuthenticatedUser : [
"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
] ,
removeSelectedRepoFromOrgSecret : [
"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
] ,
repoMachinesForAuthenticatedUser : [
"GET /repos/{owner}/{repo}/codespaces/machines"
] ,
2023-01-20 23:52:29 +00:00
setCodespacesBilling : [ "PUT /orgs/{org}/codespaces/billing" ] ,
2023-06-07 23:17:45 +00:00
setCodespacesBillingUsers : [
"POST /orgs/{org}/codespaces/billing/selected_users"
] ,
setRepositoriesForSecretForAuthenticatedUser : [
"PUT /user/codespaces/secrets/{secret_name}/repositories"
] ,
setSelectedReposForOrgSecret : [
"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
] ,
2022-12-25 13:58:23 +08:00
startForAuthenticatedUser : [ "POST /user/codespaces/{codespace_name}/start" ] ,
stopForAuthenticatedUser : [ "POST /user/codespaces/{codespace_name}/stop" ] ,
2023-06-07 23:17:45 +00:00
stopInOrganization : [
"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"
] ,
2022-12-25 13:58:23 +08:00
updateForAuthenticatedUser : [ "PATCH /user/codespaces/{codespace_name}" ]
} ,
dependabot : {
2023-06-07 23:17:45 +00:00
addSelectedRepoToOrgSecret : [
"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
] ,
createOrUpdateOrgSecret : [
"PUT /orgs/{org}/dependabot/secrets/{secret_name}"
] ,
createOrUpdateRepoSecret : [
"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
] ,
2022-12-25 13:58:23 +08:00
deleteOrgSecret : [ "DELETE /orgs/{org}/dependabot/secrets/{secret_name}" ] ,
2023-06-07 23:17:45 +00:00
deleteRepoSecret : [
"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
] ,
2022-12-25 13:58:23 +08:00
getAlert : [ "GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" ] ,
getOrgPublicKey : [ "GET /orgs/{org}/dependabot/secrets/public-key" ] ,
getOrgSecret : [ "GET /orgs/{org}/dependabot/secrets/{secret_name}" ] ,
2023-06-07 23:17:45 +00:00
getRepoPublicKey : [
"GET /repos/{owner}/{repo}/dependabot/secrets/public-key"
] ,
getRepoSecret : [
"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
] ,
listAlertsForEnterprise : [
"GET /enterprises/{enterprise}/dependabot/alerts"
] ,
2023-01-20 23:52:29 +00:00
listAlertsForOrg : [ "GET /orgs/{org}/dependabot/alerts" ] ,
2022-12-25 13:58:23 +08:00
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/dependabot/alerts" ] ,
listOrgSecrets : [ "GET /orgs/{org}/dependabot/secrets" ] ,
listRepoSecrets : [ "GET /repos/{owner}/{repo}/dependabot/secrets" ] ,
2023-06-07 23:17:45 +00:00
listSelectedReposForOrgSecret : [
"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
] ,
removeSelectedRepoFromOrgSecret : [
"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
] ,
setSelectedReposForOrgSecret : [
"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
] ,
updateAlert : [
"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"
]
2022-12-25 13:58:23 +08:00
} ,
dependencyGraph : {
2023-06-07 23:17:45 +00:00
createRepositorySnapshot : [
"POST /repos/{owner}/{repo}/dependency-graph/snapshots"
] ,
diffRange : [
"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"
] ,
2023-05-19 08:00:27 +00:00
exportSbom : [ "GET /repos/{owner}/{repo}/dependency-graph/sbom" ]
2022-12-25 13:58:23 +08:00
} ,
2023-06-07 23:17:45 +00:00
emojis : { get : [ "GET /emojis" ] } ,
2022-12-25 13:58:23 +08:00
gists : {
checkIsStarred : [ "GET /gists/{gist_id}/star" ] ,
create : [ "POST /gists" ] ,
createComment : [ "POST /gists/{gist_id}/comments" ] ,
delete : [ "DELETE /gists/{gist_id}" ] ,
deleteComment : [ "DELETE /gists/{gist_id}/comments/{comment_id}" ] ,
fork : [ "POST /gists/{gist_id}/forks" ] ,
get : [ "GET /gists/{gist_id}" ] ,
getComment : [ "GET /gists/{gist_id}/comments/{comment_id}" ] ,
getRevision : [ "GET /gists/{gist_id}/{sha}" ] ,
list : [ "GET /gists" ] ,
listComments : [ "GET /gists/{gist_id}/comments" ] ,
listCommits : [ "GET /gists/{gist_id}/commits" ] ,
listForUser : [ "GET /users/{username}/gists" ] ,
listForks : [ "GET /gists/{gist_id}/forks" ] ,
listPublic : [ "GET /gists/public" ] ,
listStarred : [ "GET /gists/starred" ] ,
star : [ "PUT /gists/{gist_id}/star" ] ,
unstar : [ "DELETE /gists/{gist_id}/star" ] ,
update : [ "PATCH /gists/{gist_id}" ] ,
updateComment : [ "PATCH /gists/{gist_id}/comments/{comment_id}" ]
} ,
git : {
createBlob : [ "POST /repos/{owner}/{repo}/git/blobs" ] ,
createCommit : [ "POST /repos/{owner}/{repo}/git/commits" ] ,
createRef : [ "POST /repos/{owner}/{repo}/git/refs" ] ,
createTag : [ "POST /repos/{owner}/{repo}/git/tags" ] ,
createTree : [ "POST /repos/{owner}/{repo}/git/trees" ] ,
deleteRef : [ "DELETE /repos/{owner}/{repo}/git/refs/{ref}" ] ,
getBlob : [ "GET /repos/{owner}/{repo}/git/blobs/{file_sha}" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/git/commits/{commit_sha}" ] ,
getRef : [ "GET /repos/{owner}/{repo}/git/ref/{ref}" ] ,
getTag : [ "GET /repos/{owner}/{repo}/git/tags/{tag_sha}" ] ,
getTree : [ "GET /repos/{owner}/{repo}/git/trees/{tree_sha}" ] ,
listMatchingRefs : [ "GET /repos/{owner}/{repo}/git/matching-refs/{ref}" ] ,
updateRef : [ "PATCH /repos/{owner}/{repo}/git/refs/{ref}" ]
} ,
gitignore : {
getAllTemplates : [ "GET /gitignore/templates" ] ,
getTemplate : [ "GET /gitignore/templates/{name}" ]
} ,
interactions : {
getRestrictionsForAuthenticatedUser : [ "GET /user/interaction-limits" ] ,
getRestrictionsForOrg : [ "GET /orgs/{org}/interaction-limits" ] ,
getRestrictionsForRepo : [ "GET /repos/{owner}/{repo}/interaction-limits" ] ,
2023-06-07 23:17:45 +00:00
getRestrictionsForYourPublicRepos : [
"GET /user/interaction-limits" ,
{ } ,
{ renamed : [ "interactions" , "getRestrictionsForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
removeRestrictionsForAuthenticatedUser : [ "DELETE /user/interaction-limits" ] ,
removeRestrictionsForOrg : [ "DELETE /orgs/{org}/interaction-limits" ] ,
2023-06-07 23:17:45 +00:00
removeRestrictionsForRepo : [
"DELETE /repos/{owner}/{repo}/interaction-limits"
] ,
removeRestrictionsForYourPublicRepos : [
"DELETE /user/interaction-limits" ,
{ } ,
{ renamed : [ "interactions" , "removeRestrictionsForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
setRestrictionsForAuthenticatedUser : [ "PUT /user/interaction-limits" ] ,
setRestrictionsForOrg : [ "PUT /orgs/{org}/interaction-limits" ] ,
setRestrictionsForRepo : [ "PUT /repos/{owner}/{repo}/interaction-limits" ] ,
2023-06-07 23:17:45 +00:00
setRestrictionsForYourPublicRepos : [
"PUT /user/interaction-limits" ,
{ } ,
{ renamed : [ "interactions" , "setRestrictionsForAuthenticatedUser" ] }
]
2022-12-25 13:58:23 +08:00
} ,
issues : {
2023-06-07 23:17:45 +00:00
addAssignees : [
"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"
] ,
2022-12-25 13:58:23 +08:00
addLabels : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
checkUserCanBeAssigned : [ "GET /repos/{owner}/{repo}/assignees/{assignee}" ] ,
2023-06-07 23:17:45 +00:00
checkUserCanBeAssignedToIssue : [
"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}"
] ,
2022-12-25 13:58:23 +08:00
create : [ "POST /repos/{owner}/{repo}/issues" ] ,
2023-06-07 23:17:45 +00:00
createComment : [
"POST /repos/{owner}/{repo}/issues/{issue_number}/comments"
] ,
2022-12-25 13:58:23 +08:00
createLabel : [ "POST /repos/{owner}/{repo}/labels" ] ,
createMilestone : [ "POST /repos/{owner}/{repo}/milestones" ] ,
2023-06-07 23:17:45 +00:00
deleteComment : [
"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"
] ,
2022-12-25 13:58:23 +08:00
deleteLabel : [ "DELETE /repos/{owner}/{repo}/labels/{name}" ] ,
2023-06-07 23:17:45 +00:00
deleteMilestone : [
"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"
] ,
2022-12-25 13:58:23 +08:00
get : [ "GET /repos/{owner}/{repo}/issues/{issue_number}" ] ,
getComment : [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
getEvent : [ "GET /repos/{owner}/{repo}/issues/events/{event_id}" ] ,
getLabel : [ "GET /repos/{owner}/{repo}/labels/{name}" ] ,
getMilestone : [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}" ] ,
list : [ "GET /issues" ] ,
listAssignees : [ "GET /repos/{owner}/{repo}/assignees" ] ,
listComments : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/comments" ] ,
listCommentsForRepo : [ "GET /repos/{owner}/{repo}/issues/comments" ] ,
listEvents : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/events" ] ,
listEventsForRepo : [ "GET /repos/{owner}/{repo}/issues/events" ] ,
2023-06-07 23:17:45 +00:00
listEventsForTimeline : [
"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"
] ,
2022-12-25 13:58:23 +08:00
listForAuthenticatedUser : [ "GET /user/issues" ] ,
listForOrg : [ "GET /orgs/{org}/issues" ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/issues" ] ,
2023-06-07 23:17:45 +00:00
listLabelsForMilestone : [
"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"
] ,
2022-12-25 13:58:23 +08:00
listLabelsForRepo : [ "GET /repos/{owner}/{repo}/labels" ] ,
2023-06-07 23:17:45 +00:00
listLabelsOnIssue : [
"GET /repos/{owner}/{repo}/issues/{issue_number}/labels"
] ,
2022-12-25 13:58:23 +08:00
listMilestones : [ "GET /repos/{owner}/{repo}/milestones" ] ,
lock : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
2023-06-07 23:17:45 +00:00
removeAllLabels : [
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"
] ,
removeAssignees : [
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"
] ,
removeLabel : [
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"
] ,
2022-12-25 13:58:23 +08:00
setLabels : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
unlock : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
update : [ "PATCH /repos/{owner}/{repo}/issues/{issue_number}" ] ,
updateComment : [ "PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
updateLabel : [ "PATCH /repos/{owner}/{repo}/labels/{name}" ] ,
2023-06-07 23:17:45 +00:00
updateMilestone : [
"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"
]
2022-12-25 13:58:23 +08:00
} ,
licenses : {
get : [ "GET /licenses/{license}" ] ,
getAllCommonlyUsed : [ "GET /licenses" ] ,
getForRepo : [ "GET /repos/{owner}/{repo}/license" ]
} ,
markdown : {
render : [ "POST /markdown" ] ,
2023-06-07 23:17:45 +00:00
renderRaw : [
"POST /markdown/raw" ,
{ headers : { "content-type" : "text/plain; charset=utf-8" } }
]
2022-12-25 13:58:23 +08:00
} ,
meta : {
get : [ "GET /meta" ] ,
2023-01-20 23:52:29 +00:00
getAllVersions : [ "GET /versions" ] ,
2022-12-25 13:58:23 +08:00
getOctocat : [ "GET /octocat" ] ,
getZen : [ "GET /zen" ] ,
root : [ "GET /" ]
} ,
migrations : {
cancelImport : [ "DELETE /repos/{owner}/{repo}/import" ] ,
2023-06-07 23:17:45 +00:00
deleteArchiveForAuthenticatedUser : [
"DELETE /user/migrations/{migration_id}/archive"
] ,
deleteArchiveForOrg : [
"DELETE /orgs/{org}/migrations/{migration_id}/archive"
] ,
downloadArchiveForOrg : [
"GET /orgs/{org}/migrations/{migration_id}/archive"
] ,
getArchiveForAuthenticatedUser : [
"GET /user/migrations/{migration_id}/archive"
] ,
2022-12-25 13:58:23 +08:00
getCommitAuthors : [ "GET /repos/{owner}/{repo}/import/authors" ] ,
getImportStatus : [ "GET /repos/{owner}/{repo}/import" ] ,
getLargeFiles : [ "GET /repos/{owner}/{repo}/import/large_files" ] ,
getStatusForAuthenticatedUser : [ "GET /user/migrations/{migration_id}" ] ,
getStatusForOrg : [ "GET /orgs/{org}/migrations/{migration_id}" ] ,
listForAuthenticatedUser : [ "GET /user/migrations" ] ,
listForOrg : [ "GET /orgs/{org}/migrations" ] ,
2023-06-07 23:17:45 +00:00
listReposForAuthenticatedUser : [
"GET /user/migrations/{migration_id}/repositories"
] ,
2022-12-25 13:58:23 +08:00
listReposForOrg : [ "GET /orgs/{org}/migrations/{migration_id}/repositories" ] ,
2023-06-07 23:17:45 +00:00
listReposForUser : [
"GET /user/migrations/{migration_id}/repositories" ,
{ } ,
{ renamed : [ "migrations" , "listReposForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
mapCommitAuthor : [ "PATCH /repos/{owner}/{repo}/import/authors/{author_id}" ] ,
setLfsPreference : [ "PATCH /repos/{owner}/{repo}/import/lfs" ] ,
startForAuthenticatedUser : [ "POST /user/migrations" ] ,
startForOrg : [ "POST /orgs/{org}/migrations" ] ,
startImport : [ "PUT /repos/{owner}/{repo}/import" ] ,
2023-06-07 23:17:45 +00:00
unlockRepoForAuthenticatedUser : [
"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"
] ,
unlockRepoForOrg : [
"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"
] ,
2022-12-25 13:58:23 +08:00
updateImport : [ "PATCH /repos/{owner}/{repo}/import" ]
} ,
orgs : {
2023-06-07 23:17:45 +00:00
addSecurityManagerTeam : [
"PUT /orgs/{org}/security-managers/teams/{team_slug}"
] ,
2022-12-25 13:58:23 +08:00
blockUser : [ "PUT /orgs/{org}/blocks/{username}" ] ,
cancelInvitation : [ "DELETE /orgs/{org}/invitations/{invitation_id}" ] ,
checkBlockedUser : [ "GET /orgs/{org}/blocks/{username}" ] ,
checkMembershipForUser : [ "GET /orgs/{org}/members/{username}" ] ,
checkPublicMembershipForUser : [ "GET /orgs/{org}/public_members/{username}" ] ,
2023-06-07 23:17:45 +00:00
convertMemberToOutsideCollaborator : [
"PUT /orgs/{org}/outside_collaborators/{username}"
] ,
2022-12-25 13:58:23 +08:00
createInvitation : [ "POST /orgs/{org}/invitations" ] ,
createWebhook : [ "POST /orgs/{org}/hooks" ] ,
2023-05-19 08:00:27 +00:00
delete : [ "DELETE /orgs/{org}" ] ,
2022-12-25 13:58:23 +08:00
deleteWebhook : [ "DELETE /orgs/{org}/hooks/{hook_id}" ] ,
2023-06-07 23:17:45 +00:00
enableOrDisableSecurityProductOnAllOrgRepos : [
"POST /orgs/{org}/{security_product}/{enablement}"
] ,
2022-12-25 13:58:23 +08:00
get : [ "GET /orgs/{org}" ] ,
getMembershipForAuthenticatedUser : [ "GET /user/memberships/orgs/{org}" ] ,
getMembershipForUser : [ "GET /orgs/{org}/memberships/{username}" ] ,
getWebhook : [ "GET /orgs/{org}/hooks/{hook_id}" ] ,
getWebhookConfigForOrg : [ "GET /orgs/{org}/hooks/{hook_id}/config" ] ,
2023-06-07 23:17:45 +00:00
getWebhookDelivery : [
"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"
] ,
2022-12-25 13:58:23 +08:00
list : [ "GET /organizations" ] ,
listAppInstallations : [ "GET /orgs/{org}/installations" ] ,
listBlockedUsers : [ "GET /orgs/{org}/blocks" ] ,
listFailedInvitations : [ "GET /orgs/{org}/failed_invitations" ] ,
listForAuthenticatedUser : [ "GET /user/orgs" ] ,
listForUser : [ "GET /users/{username}/orgs" ] ,
listInvitationTeams : [ "GET /orgs/{org}/invitations/{invitation_id}/teams" ] ,
listMembers : [ "GET /orgs/{org}/members" ] ,
listMembershipsForAuthenticatedUser : [ "GET /user/memberships/orgs" ] ,
listOutsideCollaborators : [ "GET /orgs/{org}/outside_collaborators" ] ,
2023-06-07 23:17:45 +00:00
listPatGrantRepositories : [
"GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories"
] ,
listPatGrantRequestRepositories : [
"GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories"
] ,
listPatGrantRequests : [
"GET /organizations/{org}/personal-access-token-requests"
] ,
2023-05-19 08:00:27 +00:00
listPatGrants : [ "GET /organizations/{org}/personal-access-tokens" ] ,
2022-12-25 13:58:23 +08:00
listPendingInvitations : [ "GET /orgs/{org}/invitations" ] ,
listPublicMembers : [ "GET /orgs/{org}/public_members" ] ,
listSecurityManagerTeams : [ "GET /orgs/{org}/security-managers" ] ,
listWebhookDeliveries : [ "GET /orgs/{org}/hooks/{hook_id}/deliveries" ] ,
listWebhooks : [ "GET /orgs/{org}/hooks" ] ,
pingWebhook : [ "POST /orgs/{org}/hooks/{hook_id}/pings" ] ,
2023-06-07 23:17:45 +00:00
redeliverWebhookDelivery : [
"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
] ,
2022-12-25 13:58:23 +08:00
removeMember : [ "DELETE /orgs/{org}/members/{username}" ] ,
removeMembershipForUser : [ "DELETE /orgs/{org}/memberships/{username}" ] ,
2023-06-07 23:17:45 +00:00
removeOutsideCollaborator : [
"DELETE /orgs/{org}/outside_collaborators/{username}"
] ,
removePublicMembershipForAuthenticatedUser : [
"DELETE /orgs/{org}/public_members/{username}"
] ,
removeSecurityManagerTeam : [
"DELETE /orgs/{org}/security-managers/teams/{team_slug}"
] ,
reviewPatGrantRequest : [
"POST /organizations/{org}/personal-access-token-requests/{pat_request_id}"
] ,
reviewPatGrantRequestsInBulk : [
"POST /organizations/{org}/personal-access-token-requests"
] ,
2022-12-25 13:58:23 +08:00
setMembershipForUser : [ "PUT /orgs/{org}/memberships/{username}" ] ,
2023-06-07 23:17:45 +00:00
setPublicMembershipForAuthenticatedUser : [
"PUT /orgs/{org}/public_members/{username}"
] ,
2022-12-25 13:58:23 +08:00
unblockUser : [ "DELETE /orgs/{org}/blocks/{username}" ] ,
update : [ "PATCH /orgs/{org}" ] ,
2023-06-07 23:17:45 +00:00
updateMembershipForAuthenticatedUser : [
"PATCH /user/memberships/orgs/{org}"
] ,
updatePatAccess : [
"POST /organizations/{org}/personal-access-tokens/{pat_id}"
] ,
2023-05-19 08:00:27 +00:00
updatePatAccesses : [ "POST /organizations/{org}/personal-access-tokens" ] ,
2022-12-25 13:58:23 +08:00
updateWebhook : [ "PATCH /orgs/{org}/hooks/{hook_id}" ] ,
updateWebhookConfigForOrg : [ "PATCH /orgs/{org}/hooks/{hook_id}/config" ]
} ,
packages : {
2023-06-07 23:17:45 +00:00
deletePackageForAuthenticatedUser : [
"DELETE /user/packages/{package_type}/{package_name}"
] ,
deletePackageForOrg : [
"DELETE /orgs/{org}/packages/{package_type}/{package_name}"
] ,
deletePackageForUser : [
"DELETE /users/{username}/packages/{package_type}/{package_name}"
] ,
deletePackageVersionForAuthenticatedUser : [
"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
deletePackageVersionForOrg : [
"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
deletePackageVersionForUser : [
"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
getAllPackageVersionsForAPackageOwnedByAnOrg : [
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions" ,
{ } ,
{ renamed : [ "packages" , "getAllPackageVersionsForPackageOwnedByOrg" ] }
] ,
getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser : [
"GET /user/packages/{package_type}/{package_name}/versions" ,
{ } ,
{
renamed : [
"packages" ,
"getAllPackageVersionsForPackageOwnedByAuthenticatedUser"
]
}
] ,
getAllPackageVersionsForPackageOwnedByAuthenticatedUser : [
"GET /user/packages/{package_type}/{package_name}/versions"
] ,
getAllPackageVersionsForPackageOwnedByOrg : [
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions"
] ,
getAllPackageVersionsForPackageOwnedByUser : [
"GET /users/{username}/packages/{package_type}/{package_name}/versions"
] ,
getPackageForAuthenticatedUser : [
"GET /user/packages/{package_type}/{package_name}"
] ,
getPackageForOrganization : [
"GET /orgs/{org}/packages/{package_type}/{package_name}"
] ,
getPackageForUser : [
"GET /users/{username}/packages/{package_type}/{package_name}"
] ,
getPackageVersionForAuthenticatedUser : [
"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
getPackageVersionForOrganization : [
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
getPackageVersionForUser : [
"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
listDockerMigrationConflictingPackagesForAuthenticatedUser : [
"GET /user/docker/conflicts"
] ,
listDockerMigrationConflictingPackagesForOrganization : [
"GET /orgs/{org}/docker/conflicts"
] ,
listDockerMigrationConflictingPackagesForUser : [
"GET /users/{username}/docker/conflicts"
] ,
2022-12-25 13:58:23 +08:00
listPackagesForAuthenticatedUser : [ "GET /user/packages" ] ,
listPackagesForOrganization : [ "GET /orgs/{org}/packages" ] ,
listPackagesForUser : [ "GET /users/{username}/packages" ] ,
2023-06-07 23:17:45 +00:00
restorePackageForAuthenticatedUser : [
"POST /user/packages/{package_type}/{package_name}/restore{?token}"
] ,
restorePackageForOrg : [
"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"
] ,
restorePackageForUser : [
"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"
] ,
restorePackageVersionForAuthenticatedUser : [
"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
] ,
restorePackageVersionForOrg : [
"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
] ,
restorePackageVersionForUser : [
"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
]
2022-12-25 13:58:23 +08:00
} ,
projects : {
addCollaborator : [ "PUT /projects/{project_id}/collaborators/{username}" ] ,
createCard : [ "POST /projects/columns/{column_id}/cards" ] ,
createColumn : [ "POST /projects/{project_id}/columns" ] ,
createForAuthenticatedUser : [ "POST /user/projects" ] ,
createForOrg : [ "POST /orgs/{org}/projects" ] ,
createForRepo : [ "POST /repos/{owner}/{repo}/projects" ] ,
delete : [ "DELETE /projects/{project_id}" ] ,
deleteCard : [ "DELETE /projects/columns/cards/{card_id}" ] ,
deleteColumn : [ "DELETE /projects/columns/{column_id}" ] ,
get : [ "GET /projects/{project_id}" ] ,
getCard : [ "GET /projects/columns/cards/{card_id}" ] ,
getColumn : [ "GET /projects/columns/{column_id}" ] ,
2023-06-07 23:17:45 +00:00
getPermissionForUser : [
"GET /projects/{project_id}/collaborators/{username}/permission"
] ,
2022-12-25 13:58:23 +08:00
listCards : [ "GET /projects/columns/{column_id}/cards" ] ,
listCollaborators : [ "GET /projects/{project_id}/collaborators" ] ,
listColumns : [ "GET /projects/{project_id}/columns" ] ,
listForOrg : [ "GET /orgs/{org}/projects" ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/projects" ] ,
listForUser : [ "GET /users/{username}/projects" ] ,
moveCard : [ "POST /projects/columns/cards/{card_id}/moves" ] ,
moveColumn : [ "POST /projects/columns/{column_id}/moves" ] ,
2023-06-07 23:17:45 +00:00
removeCollaborator : [
"DELETE /projects/{project_id}/collaborators/{username}"
] ,
2022-12-25 13:58:23 +08:00
update : [ "PATCH /projects/{project_id}" ] ,
updateCard : [ "PATCH /projects/columns/cards/{card_id}" ] ,
updateColumn : [ "PATCH /projects/columns/{column_id}" ]
} ,
pulls : {
checkIfMerged : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
create : [ "POST /repos/{owner}/{repo}/pulls" ] ,
2023-06-07 23:17:45 +00:00
createReplyForReviewComment : [
"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"
] ,
2022-12-25 13:58:23 +08:00
createReview : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
2023-06-07 23:17:45 +00:00
createReviewComment : [
"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"
] ,
deletePendingReview : [
"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
] ,
deleteReviewComment : [
"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"
] ,
dismissReview : [
"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"
] ,
2022-12-25 13:58:23 +08:00
get : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
2023-06-07 23:17:45 +00:00
getReview : [
"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
] ,
2022-12-25 13:58:23 +08:00
getReviewComment : [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
list : [ "GET /repos/{owner}/{repo}/pulls" ] ,
2023-06-07 23:17:45 +00:00
listCommentsForReview : [
"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"
] ,
2022-12-25 13:58:23 +08:00
listCommits : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits" ] ,
listFiles : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/files" ] ,
2023-06-07 23:17:45 +00:00
listRequestedReviewers : [
"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
] ,
listReviewComments : [
"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"
] ,
2022-12-25 13:58:23 +08:00
listReviewCommentsForRepo : [ "GET /repos/{owner}/{repo}/pulls/comments" ] ,
listReviews : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
merge : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
2023-06-07 23:17:45 +00:00
removeRequestedReviewers : [
"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
] ,
requestReviewers : [
"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
] ,
submitReview : [
"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"
] ,
2022-12-25 13:58:23 +08:00
update : [ "PATCH /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
2023-06-07 23:17:45 +00:00
updateBranch : [
"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"
] ,
updateReview : [
"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
] ,
updateReviewComment : [
"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"
]
2022-12-25 13:58:23 +08:00
} ,
2023-06-07 23:17:45 +00:00
rateLimit : { get : [ "GET /rate_limit" ] } ,
2022-12-25 13:58:23 +08:00
reactions : {
2023-06-07 23:17:45 +00:00
createForCommitComment : [
"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"
] ,
createForIssue : [
"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"
] ,
createForIssueComment : [
"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
] ,
createForPullRequestReviewComment : [
"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
] ,
createForRelease : [
"POST /repos/{owner}/{repo}/releases/{release_id}/reactions"
] ,
createForTeamDiscussionCommentInOrg : [
"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
] ,
createForTeamDiscussionInOrg : [
"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
] ,
deleteForCommitComment : [
"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"
] ,
deleteForIssue : [
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"
] ,
deleteForIssueComment : [
"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"
] ,
deleteForPullRequestComment : [
"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"
] ,
deleteForRelease : [
"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"
] ,
deleteForTeamDiscussion : [
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"
] ,
deleteForTeamDiscussionComment : [
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"
] ,
listForCommitComment : [
"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"
] ,
2022-12-25 13:58:23 +08:00
listForIssue : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions" ] ,
2023-06-07 23:17:45 +00:00
listForIssueComment : [
"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
] ,
listForPullRequestReviewComment : [
"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
] ,
listForRelease : [
"GET /repos/{owner}/{repo}/releases/{release_id}/reactions"
] ,
listForTeamDiscussionCommentInOrg : [
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
] ,
listForTeamDiscussionInOrg : [
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
]
2022-12-25 13:58:23 +08:00
} ,
repos : {
2023-06-07 23:17:45 +00:00
acceptInvitation : [
"PATCH /user/repository_invitations/{invitation_id}" ,
{ } ,
{ renamed : [ "repos" , "acceptInvitationForAuthenticatedUser" ] }
] ,
acceptInvitationForAuthenticatedUser : [
"PATCH /user/repository_invitations/{invitation_id}"
] ,
addAppAccessRestrictions : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ,
{ } ,
{ mapToData : "apps" }
] ,
2022-12-25 13:58:23 +08:00
addCollaborator : [ "PUT /repos/{owner}/{repo}/collaborators/{username}" ] ,
2023-06-07 23:17:45 +00:00
addStatusCheckContexts : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ,
{ } ,
{ mapToData : "contexts" }
] ,
addTeamAccessRestrictions : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ,
{ } ,
{ mapToData : "teams" }
] ,
addUserAccessRestrictions : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ,
{ } ,
{ mapToData : "users" }
] ,
2022-12-25 13:58:23 +08:00
checkCollaborator : [ "GET /repos/{owner}/{repo}/collaborators/{username}" ] ,
2023-06-07 23:17:45 +00:00
checkVulnerabilityAlerts : [
"GET /repos/{owner}/{repo}/vulnerability-alerts"
] ,
2022-12-25 13:58:23 +08:00
codeownersErrors : [ "GET /repos/{owner}/{repo}/codeowners/errors" ] ,
compareCommits : [ "GET /repos/{owner}/{repo}/compare/{base}...{head}" ] ,
2023-06-07 23:17:45 +00:00
compareCommitsWithBasehead : [
"GET /repos/{owner}/{repo}/compare/{basehead}"
] ,
2022-12-25 13:58:23 +08:00
createAutolink : [ "POST /repos/{owner}/{repo}/autolinks" ] ,
2023-06-07 23:17:45 +00:00
createCommitComment : [
"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"
] ,
createCommitSignatureProtection : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
] ,
2022-12-25 13:58:23 +08:00
createCommitStatus : [ "POST /repos/{owner}/{repo}/statuses/{sha}" ] ,
createDeployKey : [ "POST /repos/{owner}/{repo}/keys" ] ,
createDeployment : [ "POST /repos/{owner}/{repo}/deployments" ] ,
2023-06-07 23:17:45 +00:00
createDeploymentBranchPolicy : [
"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
] ,
createDeploymentProtectionRule : [
"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
] ,
createDeploymentStatus : [
"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
] ,
2022-12-25 13:58:23 +08:00
createDispatchEvent : [ "POST /repos/{owner}/{repo}/dispatches" ] ,
createForAuthenticatedUser : [ "POST /user/repos" ] ,
createFork : [ "POST /repos/{owner}/{repo}/forks" ] ,
createInOrg : [ "POST /orgs/{org}/repos" ] ,
2023-06-07 23:17:45 +00:00
createOrUpdateEnvironment : [
"PUT /repos/{owner}/{repo}/environments/{environment_name}"
] ,
2022-12-25 13:58:23 +08:00
createOrUpdateFileContents : [ "PUT /repos/{owner}/{repo}/contents/{path}" ] ,
2023-05-19 08:00:27 +00:00
createOrgRuleset : [ "POST /orgs/{org}/rulesets" ] ,
2022-12-25 13:58:23 +08:00
createPagesDeployment : [ "POST /repos/{owner}/{repo}/pages/deployment" ] ,
createPagesSite : [ "POST /repos/{owner}/{repo}/pages" ] ,
createRelease : [ "POST /repos/{owner}/{repo}/releases" ] ,
2023-05-19 08:00:27 +00:00
createRepoRuleset : [ "POST /repos/{owner}/{repo}/rulesets" ] ,
2022-12-25 13:58:23 +08:00
createTagProtection : [ "POST /repos/{owner}/{repo}/tags/protection" ] ,
2023-06-07 23:17:45 +00:00
createUsingTemplate : [
"POST /repos/{template_owner}/{template_repo}/generate"
] ,
2022-12-25 13:58:23 +08:00
createWebhook : [ "POST /repos/{owner}/{repo}/hooks" ] ,
2023-06-07 23:17:45 +00:00
declineInvitation : [
"DELETE /user/repository_invitations/{invitation_id}" ,
{ } ,
{ renamed : [ "repos" , "declineInvitationForAuthenticatedUser" ] }
] ,
declineInvitationForAuthenticatedUser : [
"DELETE /user/repository_invitations/{invitation_id}"
] ,
2022-12-25 13:58:23 +08:00
delete : [ "DELETE /repos/{owner}/{repo}" ] ,
2023-06-07 23:17:45 +00:00
deleteAccessRestrictions : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
] ,
deleteAdminBranchProtection : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
] ,
deleteAnEnvironment : [
"DELETE /repos/{owner}/{repo}/environments/{environment_name}"
] ,
2022-12-25 13:58:23 +08:00
deleteAutolink : [ "DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}" ] ,
2023-06-07 23:17:45 +00:00
deleteBranchProtection : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection"
] ,
2022-12-25 13:58:23 +08:00
deleteCommitComment : [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}" ] ,
2023-06-07 23:17:45 +00:00
deleteCommitSignatureProtection : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
] ,
2022-12-25 13:58:23 +08:00
deleteDeployKey : [ "DELETE /repos/{owner}/{repo}/keys/{key_id}" ] ,
2023-06-07 23:17:45 +00:00
deleteDeployment : [
"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"
] ,
deleteDeploymentBranchPolicy : [
"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
] ,
2022-12-25 13:58:23 +08:00
deleteFile : [ "DELETE /repos/{owner}/{repo}/contents/{path}" ] ,
2023-06-07 23:17:45 +00:00
deleteInvitation : [
"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"
] ,
2023-05-19 08:00:27 +00:00
deleteOrgRuleset : [ "DELETE /orgs/{org}/rulesets/{ruleset_id}" ] ,
2022-12-25 13:58:23 +08:00
deletePagesSite : [ "DELETE /repos/{owner}/{repo}/pages" ] ,
2023-06-07 23:17:45 +00:00
deletePullRequestReviewProtection : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
] ,
2022-12-25 13:58:23 +08:00
deleteRelease : [ "DELETE /repos/{owner}/{repo}/releases/{release_id}" ] ,
2023-06-07 23:17:45 +00:00
deleteReleaseAsset : [
"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"
] ,
2023-05-19 08:00:27 +00:00
deleteRepoRuleset : [ "DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}" ] ,
2023-06-07 23:17:45 +00:00
deleteTagProtection : [
"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"
] ,
2022-12-25 13:58:23 +08:00
deleteWebhook : [ "DELETE /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
2023-06-07 23:17:45 +00:00
disableAutomatedSecurityFixes : [
"DELETE /repos/{owner}/{repo}/automated-security-fixes"
] ,
disableDeploymentProtectionRule : [
"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
] ,
2022-12-25 13:58:23 +08:00
disableLfsForRepo : [ "DELETE /repos/{owner}/{repo}/lfs" ] ,
2023-06-07 23:17:45 +00:00
disableVulnerabilityAlerts : [
"DELETE /repos/{owner}/{repo}/vulnerability-alerts"
] ,
downloadArchive : [
"GET /repos/{owner}/{repo}/zipball/{ref}" ,
{ } ,
{ renamed : [ "repos" , "downloadZipballArchive" ] }
] ,
2022-12-25 13:58:23 +08:00
downloadTarballArchive : [ "GET /repos/{owner}/{repo}/tarball/{ref}" ] ,
downloadZipballArchive : [ "GET /repos/{owner}/{repo}/zipball/{ref}" ] ,
2023-06-07 23:17:45 +00:00
enableAutomatedSecurityFixes : [
"PUT /repos/{owner}/{repo}/automated-security-fixes"
] ,
2022-12-25 13:58:23 +08:00
enableLfsForRepo : [ "PUT /repos/{owner}/{repo}/lfs" ] ,
2023-06-07 23:17:45 +00:00
enableVulnerabilityAlerts : [
"PUT /repos/{owner}/{repo}/vulnerability-alerts"
] ,
generateReleaseNotes : [
"POST /repos/{owner}/{repo}/releases/generate-notes"
] ,
2022-12-25 13:58:23 +08:00
get : [ "GET /repos/{owner}/{repo}" ] ,
2023-06-07 23:17:45 +00:00
getAccessRestrictions : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
] ,
getAdminBranchProtection : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
] ,
getAllDeploymentProtectionRules : [
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
] ,
2022-12-25 13:58:23 +08:00
getAllEnvironments : [ "GET /repos/{owner}/{repo}/environments" ] ,
2023-06-07 23:17:45 +00:00
getAllStatusCheckContexts : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"
] ,
2022-12-25 13:58:23 +08:00
getAllTopics : [ "GET /repos/{owner}/{repo}/topics" ] ,
2023-06-07 23:17:45 +00:00
getAppsWithAccessToProtectedBranch : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"
] ,
2022-12-25 13:58:23 +08:00
getAutolink : [ "GET /repos/{owner}/{repo}/autolinks/{autolink_id}" ] ,
getBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}" ] ,
2023-06-07 23:17:45 +00:00
getBranchProtection : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection"
] ,
2023-05-19 08:00:27 +00:00
getBranchRules : [ "GET /repos/{owner}/{repo}/rules/branches/{branch}" ] ,
2022-12-25 13:58:23 +08:00
getClones : [ "GET /repos/{owner}/{repo}/traffic/clones" ] ,
getCodeFrequencyStats : [ "GET /repos/{owner}/{repo}/stats/code_frequency" ] ,
2023-06-07 23:17:45 +00:00
getCollaboratorPermissionLevel : [
"GET /repos/{owner}/{repo}/collaborators/{username}/permission"
] ,
2022-12-25 13:58:23 +08:00
getCombinedStatusForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/status" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/commits/{ref}" ] ,
getCommitActivityStats : [ "GET /repos/{owner}/{repo}/stats/commit_activity" ] ,
getCommitComment : [ "GET /repos/{owner}/{repo}/comments/{comment_id}" ] ,
2023-06-07 23:17:45 +00:00
getCommitSignatureProtection : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
] ,
2022-12-25 13:58:23 +08:00
getCommunityProfileMetrics : [ "GET /repos/{owner}/{repo}/community/profile" ] ,
getContent : [ "GET /repos/{owner}/{repo}/contents/{path}" ] ,
getContributorsStats : [ "GET /repos/{owner}/{repo}/stats/contributors" ] ,
2023-06-07 23:17:45 +00:00
getCustomDeploymentProtectionRule : [
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
] ,
2022-12-25 13:58:23 +08:00
getDeployKey : [ "GET /repos/{owner}/{repo}/keys/{key_id}" ] ,
getDeployment : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}" ] ,
2023-06-07 23:17:45 +00:00
getDeploymentBranchPolicy : [
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
] ,
getDeploymentStatus : [
"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"
] ,
getEnvironment : [
"GET /repos/{owner}/{repo}/environments/{environment_name}"
] ,
2022-12-25 13:58:23 +08:00
getLatestPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/latest" ] ,
getLatestRelease : [ "GET /repos/{owner}/{repo}/releases/latest" ] ,
2023-05-19 08:00:27 +00:00
getOrgRuleset : [ "GET /orgs/{org}/rulesets/{ruleset_id}" ] ,
getOrgRulesets : [ "GET /orgs/{org}/rulesets" ] ,
2022-12-25 13:58:23 +08:00
getPages : [ "GET /repos/{owner}/{repo}/pages" ] ,
getPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/{build_id}" ] ,
getPagesHealthCheck : [ "GET /repos/{owner}/{repo}/pages/health" ] ,
getParticipationStats : [ "GET /repos/{owner}/{repo}/stats/participation" ] ,
2023-06-07 23:17:45 +00:00
getPullRequestReviewProtection : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
] ,
2022-12-25 13:58:23 +08:00
getPunchCardStats : [ "GET /repos/{owner}/{repo}/stats/punch_card" ] ,
getReadme : [ "GET /repos/{owner}/{repo}/readme" ] ,
getReadmeInDirectory : [ "GET /repos/{owner}/{repo}/readme/{dir}" ] ,
getRelease : [ "GET /repos/{owner}/{repo}/releases/{release_id}" ] ,
getReleaseAsset : [ "GET /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
getReleaseByTag : [ "GET /repos/{owner}/{repo}/releases/tags/{tag}" ] ,
2023-05-19 08:00:27 +00:00
getRepoRuleset : [ "GET /repos/{owner}/{repo}/rulesets/{ruleset_id}" ] ,
getRepoRulesets : [ "GET /repos/{owner}/{repo}/rulesets" ] ,
2023-06-07 23:17:45 +00:00
getStatusChecksProtection : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
] ,
getTeamsWithAccessToProtectedBranch : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"
] ,
2022-12-25 13:58:23 +08:00
getTopPaths : [ "GET /repos/{owner}/{repo}/traffic/popular/paths" ] ,
getTopReferrers : [ "GET /repos/{owner}/{repo}/traffic/popular/referrers" ] ,
2023-06-07 23:17:45 +00:00
getUsersWithAccessToProtectedBranch : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"
] ,
2022-12-25 13:58:23 +08:00
getViews : [ "GET /repos/{owner}/{repo}/traffic/views" ] ,
getWebhook : [ "GET /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
2023-06-07 23:17:45 +00:00
getWebhookConfigForRepo : [
"GET /repos/{owner}/{repo}/hooks/{hook_id}/config"
] ,
getWebhookDelivery : [
"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"
] ,
2022-12-25 13:58:23 +08:00
listAutolinks : [ "GET /repos/{owner}/{repo}/autolinks" ] ,
listBranches : [ "GET /repos/{owner}/{repo}/branches" ] ,
2023-06-07 23:17:45 +00:00
listBranchesForHeadCommit : [
"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"
] ,
2022-12-25 13:58:23 +08:00
listCollaborators : [ "GET /repos/{owner}/{repo}/collaborators" ] ,
2023-06-07 23:17:45 +00:00
listCommentsForCommit : [
"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"
] ,
2022-12-25 13:58:23 +08:00
listCommitCommentsForRepo : [ "GET /repos/{owner}/{repo}/comments" ] ,
2023-06-07 23:17:45 +00:00
listCommitStatusesForRef : [
"GET /repos/{owner}/{repo}/commits/{ref}/statuses"
] ,
2022-12-25 13:58:23 +08:00
listCommits : [ "GET /repos/{owner}/{repo}/commits" ] ,
listContributors : [ "GET /repos/{owner}/{repo}/contributors" ] ,
2023-06-07 23:17:45 +00:00
listCustomDeploymentRuleIntegrations : [
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"
] ,
2022-12-25 13:58:23 +08:00
listDeployKeys : [ "GET /repos/{owner}/{repo}/keys" ] ,
2023-06-07 23:17:45 +00:00
listDeploymentBranchPolicies : [
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
] ,
listDeploymentStatuses : [
"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
] ,
2022-12-25 13:58:23 +08:00
listDeployments : [ "GET /repos/{owner}/{repo}/deployments" ] ,
listForAuthenticatedUser : [ "GET /user/repos" ] ,
listForOrg : [ "GET /orgs/{org}/repos" ] ,
listForUser : [ "GET /users/{username}/repos" ] ,
listForks : [ "GET /repos/{owner}/{repo}/forks" ] ,
listInvitations : [ "GET /repos/{owner}/{repo}/invitations" ] ,
listInvitationsForAuthenticatedUser : [ "GET /user/repository_invitations" ] ,
listLanguages : [ "GET /repos/{owner}/{repo}/languages" ] ,
listPagesBuilds : [ "GET /repos/{owner}/{repo}/pages/builds" ] ,
listPublic : [ "GET /repositories" ] ,
2023-06-07 23:17:45 +00:00
listPullRequestsAssociatedWithCommit : [
"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"
] ,
listReleaseAssets : [
"GET /repos/{owner}/{repo}/releases/{release_id}/assets"
] ,
2022-12-25 13:58:23 +08:00
listReleases : [ "GET /repos/{owner}/{repo}/releases" ] ,
listTagProtection : [ "GET /repos/{owner}/{repo}/tags/protection" ] ,
listTags : [ "GET /repos/{owner}/{repo}/tags" ] ,
listTeams : [ "GET /repos/{owner}/{repo}/teams" ] ,
2023-06-07 23:17:45 +00:00
listWebhookDeliveries : [
"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"
] ,
2022-12-25 13:58:23 +08:00
listWebhooks : [ "GET /repos/{owner}/{repo}/hooks" ] ,
merge : [ "POST /repos/{owner}/{repo}/merges" ] ,
mergeUpstream : [ "POST /repos/{owner}/{repo}/merge-upstream" ] ,
pingWebhook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/pings" ] ,
2023-06-07 23:17:45 +00:00
redeliverWebhookDelivery : [
"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
] ,
removeAppAccessRestrictions : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ,
{ } ,
{ mapToData : "apps" }
] ,
removeCollaborator : [
"DELETE /repos/{owner}/{repo}/collaborators/{username}"
] ,
removeStatusCheckContexts : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ,
{ } ,
{ mapToData : "contexts" }
] ,
removeStatusCheckProtection : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
] ,
removeTeamAccessRestrictions : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ,
{ } ,
{ mapToData : "teams" }
] ,
removeUserAccessRestrictions : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ,
{ } ,
{ mapToData : "users" }
] ,
2022-12-25 13:58:23 +08:00
renameBranch : [ "POST /repos/{owner}/{repo}/branches/{branch}/rename" ] ,
replaceAllTopics : [ "PUT /repos/{owner}/{repo}/topics" ] ,
requestPagesBuild : [ "POST /repos/{owner}/{repo}/pages/builds" ] ,
2023-06-07 23:17:45 +00:00
setAdminBranchProtection : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
] ,
setAppAccessRestrictions : [
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ,
{ } ,
{ mapToData : "apps" }
] ,
setStatusCheckContexts : [
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ,
{ } ,
{ mapToData : "contexts" }
] ,
setTeamAccessRestrictions : [
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ,
{ } ,
{ mapToData : "teams" }
] ,
setUserAccessRestrictions : [
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ,
{ } ,
{ mapToData : "users" }
] ,
2022-12-25 13:58:23 +08:00
testPushWebhook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/tests" ] ,
transfer : [ "POST /repos/{owner}/{repo}/transfer" ] ,
update : [ "PATCH /repos/{owner}/{repo}" ] ,
2023-06-07 23:17:45 +00:00
updateBranchProtection : [
"PUT /repos/{owner}/{repo}/branches/{branch}/protection"
] ,
2022-12-25 13:58:23 +08:00
updateCommitComment : [ "PATCH /repos/{owner}/{repo}/comments/{comment_id}" ] ,
2023-06-07 23:17:45 +00:00
updateDeploymentBranchPolicy : [
"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
] ,
2022-12-25 13:58:23 +08:00
updateInformationAboutPagesSite : [ "PUT /repos/{owner}/{repo}/pages" ] ,
2023-06-07 23:17:45 +00:00
updateInvitation : [
"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"
] ,
2023-05-19 08:00:27 +00:00
updateOrgRuleset : [ "PUT /orgs/{org}/rulesets/{ruleset_id}" ] ,
2023-06-07 23:17:45 +00:00
updatePullRequestReviewProtection : [
"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
] ,
2022-12-25 13:58:23 +08:00
updateRelease : [ "PATCH /repos/{owner}/{repo}/releases/{release_id}" ] ,
2023-06-07 23:17:45 +00:00
updateReleaseAsset : [
"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"
] ,
2023-05-19 08:00:27 +00:00
updateRepoRuleset : [ "PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}" ] ,
2023-06-07 23:17:45 +00:00
updateStatusCheckPotection : [
"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ,
{ } ,
{ renamed : [ "repos" , "updateStatusCheckProtection" ] }
] ,
updateStatusCheckProtection : [
"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
] ,
2022-12-25 13:58:23 +08:00
updateWebhook : [ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
2023-06-07 23:17:45 +00:00
updateWebhookConfigForRepo : [
"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"
] ,
uploadReleaseAsset : [
"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}" ,
{ baseUrl : "https://uploads.github.com" }
]
2022-12-25 13:58:23 +08:00
} ,
search : {
code : [ "GET /search/code" ] ,
commits : [ "GET /search/commits" ] ,
issuesAndPullRequests : [ "GET /search/issues" ] ,
labels : [ "GET /search/labels" ] ,
repos : [ "GET /search/repositories" ] ,
topics : [ "GET /search/topics" ] ,
users : [ "GET /search/users" ]
} ,
secretScanning : {
2023-06-07 23:17:45 +00:00
getAlert : [
"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
] ,
listAlertsForEnterprise : [
"GET /enterprises/{enterprise}/secret-scanning/alerts"
] ,
2022-12-25 13:58:23 +08:00
listAlertsForOrg : [ "GET /orgs/{org}/secret-scanning/alerts" ] ,
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/secret-scanning/alerts" ] ,
2023-06-07 23:17:45 +00:00
listLocationsForAlert : [
"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"
] ,
updateAlert : [
"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
]
2022-12-25 13:58:23 +08:00
} ,
2023-05-19 08:00:27 +00:00
securityAdvisories : {
2023-06-07 23:17:45 +00:00
createPrivateVulnerabilityReport : [
"POST /repos/{owner}/{repo}/security-advisories/reports"
] ,
createRepositoryAdvisory : [
"POST /repos/{owner}/{repo}/security-advisories"
] ,
getRepositoryAdvisory : [
"GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
] ,
2023-05-19 08:00:27 +00:00
listRepositoryAdvisories : [ "GET /repos/{owner}/{repo}/security-advisories" ] ,
2023-06-07 23:17:45 +00:00
updateRepositoryAdvisory : [
"PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
]
2023-05-19 08:00:27 +00:00
} ,
2022-12-25 13:58:23 +08:00
teams : {
2023-06-07 23:17:45 +00:00
addOrUpdateMembershipForUserInOrg : [
"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"
] ,
addOrUpdateProjectPermissionsInOrg : [
"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"
] ,
addOrUpdateRepoPermissionsInOrg : [
"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
] ,
checkPermissionsForProjectInOrg : [
"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"
] ,
checkPermissionsForRepoInOrg : [
"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
] ,
2022-12-25 13:58:23 +08:00
create : [ "POST /orgs/{org}/teams" ] ,
2023-06-07 23:17:45 +00:00
createDiscussionCommentInOrg : [
"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
] ,
2022-12-25 13:58:23 +08:00
createDiscussionInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions" ] ,
2023-06-07 23:17:45 +00:00
deleteDiscussionCommentInOrg : [
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
] ,
deleteDiscussionInOrg : [
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
] ,
2022-12-25 13:58:23 +08:00
deleteInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}" ] ,
getByName : [ "GET /orgs/{org}/teams/{team_slug}" ] ,
2023-06-07 23:17:45 +00:00
getDiscussionCommentInOrg : [
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
] ,
getDiscussionInOrg : [
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
] ,
getMembershipForUserInOrg : [
"GET /orgs/{org}/teams/{team_slug}/memberships/{username}"
] ,
2022-12-25 13:58:23 +08:00
list : [ "GET /orgs/{org}/teams" ] ,
listChildInOrg : [ "GET /orgs/{org}/teams/{team_slug}/teams" ] ,
2023-06-07 23:17:45 +00:00
listDiscussionCommentsInOrg : [
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
] ,
2022-12-25 13:58:23 +08:00
listDiscussionsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions" ] ,
listForAuthenticatedUser : [ "GET /user/teams" ] ,
listMembersInOrg : [ "GET /orgs/{org}/teams/{team_slug}/members" ] ,
2023-06-07 23:17:45 +00:00
listPendingInvitationsInOrg : [
"GET /orgs/{org}/teams/{team_slug}/invitations"
] ,
2022-12-25 13:58:23 +08:00
listProjectsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects" ] ,
listReposInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos" ] ,
2023-06-07 23:17:45 +00:00
removeMembershipForUserInOrg : [
"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"
] ,
removeProjectInOrg : [
"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"
] ,
removeRepoInOrg : [
"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
] ,
updateDiscussionCommentInOrg : [
"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
] ,
updateDiscussionInOrg : [
"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
] ,
2022-12-25 13:58:23 +08:00
updateInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}" ]
} ,
users : {
2023-06-07 23:17:45 +00:00
addEmailForAuthenticated : [
"POST /user/emails" ,
{ } ,
{ renamed : [ "users" , "addEmailForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
addEmailForAuthenticatedUser : [ "POST /user/emails" ] ,
2023-05-19 08:00:27 +00:00
addSocialAccountForAuthenticatedUser : [ "POST /user/social_accounts" ] ,
2022-12-25 13:58:23 +08:00
block : [ "PUT /user/blocks/{username}" ] ,
checkBlocked : [ "GET /user/blocks/{username}" ] ,
checkFollowingForUser : [ "GET /users/{username}/following/{target_user}" ] ,
checkPersonIsFollowedByAuthenticated : [ "GET /user/following/{username}" ] ,
2023-06-07 23:17:45 +00:00
createGpgKeyForAuthenticated : [
"POST /user/gpg_keys" ,
{ } ,
{ renamed : [ "users" , "createGpgKeyForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
createGpgKeyForAuthenticatedUser : [ "POST /user/gpg_keys" ] ,
2023-06-07 23:17:45 +00:00
createPublicSshKeyForAuthenticated : [
"POST /user/keys" ,
{ } ,
{ renamed : [ "users" , "createPublicSshKeyForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
createPublicSshKeyForAuthenticatedUser : [ "POST /user/keys" ] ,
createSshSigningKeyForAuthenticatedUser : [ "POST /user/ssh_signing_keys" ] ,
2023-06-07 23:17:45 +00:00
deleteEmailForAuthenticated : [
"DELETE /user/emails" ,
{ } ,
{ renamed : [ "users" , "deleteEmailForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
deleteEmailForAuthenticatedUser : [ "DELETE /user/emails" ] ,
2023-06-07 23:17:45 +00:00
deleteGpgKeyForAuthenticated : [
"DELETE /user/gpg_keys/{gpg_key_id}" ,
{ } ,
{ renamed : [ "users" , "deleteGpgKeyForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
deleteGpgKeyForAuthenticatedUser : [ "DELETE /user/gpg_keys/{gpg_key_id}" ] ,
2023-06-07 23:17:45 +00:00
deletePublicSshKeyForAuthenticated : [
"DELETE /user/keys/{key_id}" ,
{ } ,
{ renamed : [ "users" , "deletePublicSshKeyForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
deletePublicSshKeyForAuthenticatedUser : [ "DELETE /user/keys/{key_id}" ] ,
2023-05-19 08:00:27 +00:00
deleteSocialAccountForAuthenticatedUser : [ "DELETE /user/social_accounts" ] ,
2023-06-07 23:17:45 +00:00
deleteSshSigningKeyForAuthenticatedUser : [
"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}"
] ,
2022-12-25 13:58:23 +08:00
follow : [ "PUT /user/following/{username}" ] ,
getAuthenticated : [ "GET /user" ] ,
getByUsername : [ "GET /users/{username}" ] ,
getContextForUser : [ "GET /users/{username}/hovercard" ] ,
2023-06-07 23:17:45 +00:00
getGpgKeyForAuthenticated : [
"GET /user/gpg_keys/{gpg_key_id}" ,
{ } ,
{ renamed : [ "users" , "getGpgKeyForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
getGpgKeyForAuthenticatedUser : [ "GET /user/gpg_keys/{gpg_key_id}" ] ,
2023-06-07 23:17:45 +00:00
getPublicSshKeyForAuthenticated : [
"GET /user/keys/{key_id}" ,
{ } ,
{ renamed : [ "users" , "getPublicSshKeyForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
getPublicSshKeyForAuthenticatedUser : [ "GET /user/keys/{key_id}" ] ,
2023-06-07 23:17:45 +00:00
getSshSigningKeyForAuthenticatedUser : [
"GET /user/ssh_signing_keys/{ssh_signing_key_id}"
] ,
2022-12-25 13:58:23 +08:00
list : [ "GET /users" ] ,
2023-06-07 23:17:45 +00:00
listBlockedByAuthenticated : [
"GET /user/blocks" ,
{ } ,
{ renamed : [ "users" , "listBlockedByAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
listBlockedByAuthenticatedUser : [ "GET /user/blocks" ] ,
2023-06-07 23:17:45 +00:00
listEmailsForAuthenticated : [
"GET /user/emails" ,
{ } ,
{ renamed : [ "users" , "listEmailsForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
listEmailsForAuthenticatedUser : [ "GET /user/emails" ] ,
2023-06-07 23:17:45 +00:00
listFollowedByAuthenticated : [
"GET /user/following" ,
{ } ,
{ renamed : [ "users" , "listFollowedByAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
listFollowedByAuthenticatedUser : [ "GET /user/following" ] ,
listFollowersForAuthenticatedUser : [ "GET /user/followers" ] ,
listFollowersForUser : [ "GET /users/{username}/followers" ] ,
listFollowingForUser : [ "GET /users/{username}/following" ] ,
2023-06-07 23:17:45 +00:00
listGpgKeysForAuthenticated : [
"GET /user/gpg_keys" ,
{ } ,
{ renamed : [ "users" , "listGpgKeysForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
listGpgKeysForAuthenticatedUser : [ "GET /user/gpg_keys" ] ,
listGpgKeysForUser : [ "GET /users/{username}/gpg_keys" ] ,
2023-06-07 23:17:45 +00:00
listPublicEmailsForAuthenticated : [
"GET /user/public_emails" ,
{ } ,
{ renamed : [ "users" , "listPublicEmailsForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
listPublicEmailsForAuthenticatedUser : [ "GET /user/public_emails" ] ,
listPublicKeysForUser : [ "GET /users/{username}/keys" ] ,
2023-06-07 23:17:45 +00:00
listPublicSshKeysForAuthenticated : [
"GET /user/keys" ,
{ } ,
{ renamed : [ "users" , "listPublicSshKeysForAuthenticatedUser" ] }
] ,
2022-12-25 13:58:23 +08:00
listPublicSshKeysForAuthenticatedUser : [ "GET /user/keys" ] ,
2023-05-19 08:00:27 +00:00
listSocialAccountsForAuthenticatedUser : [ "GET /user/social_accounts" ] ,
listSocialAccountsForUser : [ "GET /users/{username}/social_accounts" ] ,
2022-12-25 13:58:23 +08:00
listSshSigningKeysForAuthenticatedUser : [ "GET /user/ssh_signing_keys" ] ,
listSshSigningKeysForUser : [ "GET /users/{username}/ssh_signing_keys" ] ,
2023-06-07 23:17:45 +00:00
setPrimaryEmailVisibilityForAuthenticated : [
"PATCH /user/email/visibility" ,
{ } ,
{ renamed : [ "users" , "setPrimaryEmailVisibilityForAuthenticatedUser" ] }
] ,
setPrimaryEmailVisibilityForAuthenticatedUser : [
"PATCH /user/email/visibility"
] ,
2022-12-25 13:58:23 +08:00
unblock : [ "DELETE /user/blocks/{username}" ] ,
unfollow : [ "DELETE /user/following/{username}" ] ,
updateAuthenticated : [ "PATCH /user" ]
}
} ;
2023-06-07 23:17:45 +00:00
var endpoints _default = Endpoints ;
2022-12-25 13:58:23 +08:00
2023-06-07 23:17:45 +00:00
// pkg/dist-src/endpoints-to-methods.js
2023-06-18 08:35:17 +00:00
var endpointMethodsMap = /* @__PURE__ */ new Map ( ) ;
for ( const [ scope , endpoints ] of Object . entries ( endpoints _default ) ) {
for ( const [ methodName , endpoint ] of Object . entries ( endpoints ) ) {
const [ route , defaults , decorations ] = endpoint ;
const [ method , url ] = route . split ( / / ) ;
const endpointDefaults = Object . assign (
{
method ,
url
} ,
defaults
) ;
if ( ! endpointMethodsMap . has ( scope ) ) {
endpointMethodsMap . set ( scope , /* @__PURE__ */ new Map ( ) ) ;
}
endpointMethodsMap . get ( scope ) . set ( methodName , {
scope ,
methodName ,
endpointDefaults ,
decorations
} ) ;
}
}
var handler = {
get ( { octokit , scope , cache } , methodName ) {
if ( cache [ methodName ] ) {
return cache [ methodName ] ;
}
const { decorations , endpointDefaults } = endpointMethodsMap . get ( scope ) . get ( methodName ) ;
if ( decorations ) {
cache [ methodName ] = decorate (
octokit ,
scope ,
methodName ,
endpointDefaults ,
decorations
2023-06-07 23:17:45 +00:00
) ;
2023-06-18 08:35:17 +00:00
} else {
cache [ methodName ] = octokit . request . defaults ( endpointDefaults ) ;
2022-12-25 13:58:23 +08:00
}
2023-06-18 08:35:17 +00:00
return cache [ methodName ] ;
}
} ;
function endpointsToMethods ( octokit ) {
const newMethods = { } ;
for ( const scope of endpointMethodsMap . keys ( ) ) {
newMethods [ scope ] = new Proxy ( { octokit , scope , cache : { } } , handler ) ;
2022-12-25 13:58:23 +08:00
}
return newMethods ;
}
function decorate ( octokit , scope , methodName , defaults , decorations ) {
const requestWithDefaults = octokit . request . defaults ( defaults ) ;
function withDecorations ( ... args ) {
let options = requestWithDefaults . endpoint . merge ( ... args ) ;
if ( decorations . mapToData ) {
options = Object . assign ( { } , options , {
data : options [ decorations . mapToData ] ,
2023-06-07 23:17:45 +00:00
[ decorations . mapToData ] : void 0
2022-12-25 13:58:23 +08:00
} ) ;
return requestWithDefaults ( options ) ;
}
if ( decorations . renamed ) {
const [ newScope , newMethodName ] = decorations . renamed ;
2023-06-07 23:17:45 +00:00
octokit . log . warn (
` octokit. ${ scope } . ${ methodName } () has been renamed to octokit. ${ newScope } . ${ newMethodName } () `
) ;
2022-12-25 13:58:23 +08:00
}
if ( decorations . deprecated ) {
octokit . log . warn ( decorations . deprecated ) ;
}
if ( decorations . renamedParameters ) {
2023-06-07 23:17:45 +00:00
const options2 = requestWithDefaults . endpoint . merge ( ... args ) ;
for ( const [ name , alias ] of Object . entries (
decorations . renamedParameters
) ) {
if ( name in options2 ) {
octokit . log . warn (
` " ${ name } " parameter is deprecated for "octokit. ${ scope } . ${ methodName } ()". Use " ${ alias } " instead `
) ;
if ( ! ( alias in options2 ) ) {
options2 [ alias ] = options2 [ name ] ;
2022-12-25 13:58:23 +08:00
}
2023-06-07 23:17:45 +00:00
delete options2 [ name ] ;
2022-12-25 13:58:23 +08:00
}
}
2023-06-07 23:17:45 +00:00
return requestWithDefaults ( options2 ) ;
2022-12-25 13:58:23 +08:00
}
return requestWithDefaults ( ... args ) ;
}
return Object . assign ( withDecorations , requestWithDefaults ) ;
}
2023-06-07 23:17:45 +00:00
// pkg/dist-src/index.js
2022-12-25 13:58:23 +08:00
function restEndpointMethods ( octokit ) {
2023-06-18 08:35:17 +00:00
const api = endpointsToMethods ( octokit ) ;
2022-12-25 13:58:23 +08:00
return {
rest : api
} ;
}
restEndpointMethods . VERSION = VERSION ;
function legacyRestEndpointMethods ( octokit ) {
2023-06-18 08:35:17 +00:00
const api = endpointsToMethods ( octokit ) ;
2022-12-25 13:58:23 +08:00
return {
... api ,
rest : api
} ;
}
legacyRestEndpointMethods . VERSION = VERSION ;
2023-06-07 23:17:45 +00:00
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
/***/ 5203 :
2023-07-27 11:01:06 +00:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
"use strict" ;
2023-07-27 11:01:06 +00:00
var _ _create = Object . create ;
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _getProtoOf = Object . getPrototypeOf ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toESM = ( mod , isNodeMode , target ) => ( target = mod != null ? _ _create ( _ _getProtoOf ( mod ) ) : { } , _ _copyProps (
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || ! mod || ! mod . _ _esModule ? _ _defProp ( target , "default" , { value : mod , enumerable : true } ) : target ,
mod
) ) ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
RequestError : ( ) => RequestError
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
var import _deprecation = _ _nccwpck _require _ _ ( 8932 ) ;
var import _once = _ _toESM ( _ _nccwpck _require _ _ ( 1223 ) ) ;
var logOnceCode = ( 0 , import _once . default ) ( ( deprecation ) => console . warn ( deprecation ) ) ;
var logOnceHeaders = ( 0 , import _once . default ) ( ( deprecation ) => console . warn ( deprecation ) ) ;
var RequestError = class extends Error {
2022-12-25 13:58:23 +08:00
constructor ( message , statusCode , options ) {
2023-01-20 23:52:29 +00:00
super ( message ) ;
2022-12-25 13:58:23 +08:00
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
this . name = "HttpError" ;
this . status = statusCode ;
let headers ;
if ( "headers" in options && typeof options . headers !== "undefined" ) {
headers = options . headers ;
}
if ( "response" in options ) {
this . response = options . response ;
headers = options . response . headers ;
2023-01-20 23:52:29 +00:00
}
2022-12-25 13:58:23 +08:00
const requestCopy = Object . assign ( { } , options . request ) ;
if ( options . request . headers . authorization ) {
requestCopy . headers = Object . assign ( { } , options . request . headers , {
2023-07-27 11:01:06 +00:00
authorization : options . request . headers . authorization . replace (
/ .*$/ ,
" [REDACTED]"
)
2022-12-25 13:58:23 +08:00
} ) ;
}
2023-07-27 11:01:06 +00:00
requestCopy . url = requestCopy . url . replace ( /\bclient_secret=\w+/g , "client_secret=[REDACTED]" ) . replace ( /\baccess_token=\w+/g , "access_token=[REDACTED]" ) ;
2023-01-20 23:52:29 +00:00
this . request = requestCopy ;
2022-12-25 13:58:23 +08:00
Object . defineProperty ( this , "code" , {
get ( ) {
2023-07-27 11:01:06 +00:00
logOnceCode (
new import _deprecation . Deprecation (
"[@octokit/request-error] `error.code` is deprecated, use `error.status`."
)
) ;
2022-12-25 13:58:23 +08:00
return statusCode ;
}
} ) ;
Object . defineProperty ( this , "headers" , {
get ( ) {
2023-07-27 11:01:06 +00:00
logOnceHeaders (
new import _deprecation . Deprecation (
"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."
)
) ;
2022-12-25 13:58:23 +08:00
return headers || { } ;
}
} ) ;
}
2023-07-27 11:01:06 +00:00
} ;
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
/***/ 6094 :
2023-05-25 21:58:02 +00:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
"use strict" ;
2023-05-25 21:58:02 +00:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
request : ( ) => request
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
var import _endpoint = _ _nccwpck _require _ _ ( 3348 ) ;
var import _universal _user _agent = _ _nccwpck _require _ _ ( 5030 ) ;
2022-12-25 13:58:23 +08:00
2023-05-25 21:58:02 +00:00
// pkg/dist-src/version.js
2023-07-27 11:01:06 +00:00
var VERSION = "8.1.0" ;
2022-12-25 13:58:23 +08:00
2023-05-25 21:58:02 +00:00
// pkg/dist-src/fetch-wrapper.js
var import _is _plain _object = _ _nccwpck _require _ _ ( 3287 ) ;
var import _request _error = _ _nccwpck _require _ _ ( 5203 ) ;
2022-12-25 13:58:23 +08:00
2023-05-25 21:58:02 +00:00
// pkg/dist-src/get-buffer-response.js
2022-12-25 13:58:23 +08:00
function getBufferResponse ( response ) {
return response . arrayBuffer ( ) ;
}
2023-05-25 21:58:02 +00:00
// pkg/dist-src/fetch-wrapper.js
2022-12-25 13:58:23 +08:00
function fetchWrapper ( requestOptions ) {
2023-07-27 11:01:06 +00:00
var _a , _b , _c ;
2022-12-25 13:58:23 +08:00
const log = requestOptions . request && requestOptions . request . log ? requestOptions . request . log : console ;
2023-07-27 11:01:06 +00:00
const parseSuccessResponseBody = ( ( _a = requestOptions . request ) == null ? void 0 : _a . parseSuccessResponseBody ) !== false ;
2023-05-25 21:58:02 +00:00
if ( ( 0 , import _is _plain _object . isPlainObject ) ( requestOptions . body ) || Array . isArray ( requestOptions . body ) ) {
2022-12-25 13:58:23 +08:00
requestOptions . body = JSON . stringify ( requestOptions . body ) ;
}
let headers = { } ;
let status ;
let url ;
2023-07-27 11:01:06 +00:00
let { fetch } = globalThis ;
if ( ( _b = requestOptions . request ) == null ? void 0 : _b . fetch ) {
fetch = requestOptions . request . fetch ;
}
if ( ! fetch ) {
throw new Error (
'Global "fetch" not found. Please provide `options.request.fetch` to octokit or upgrade to node@18 or newer.'
) ;
}
return fetch ( requestOptions . url , {
method : requestOptions . method ,
body : requestOptions . body ,
headers : requestOptions . headers ,
signal : ( _c = requestOptions . request ) == null ? void 0 : _c . signal ,
// duplex must be set if request.body is ReadableStream or Async Iterables.
// See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.
... requestOptions . body && { duplex : "half" }
} ) . then ( async ( response ) => {
2022-12-25 13:58:23 +08:00
url = response . url ;
status = response . status ;
for ( const keyAndValue of response . headers ) {
headers [ keyAndValue [ 0 ] ] = keyAndValue [ 1 ] ;
}
if ( "deprecation" in headers ) {
const matches = headers . link && headers . link . match ( /<([^>]+)>; rel="deprecation"/ ) ;
const deprecationLink = matches && matches . pop ( ) ;
2023-05-25 21:58:02 +00:00
log . warn (
` [@octokit/request] " ${ requestOptions . method } ${ requestOptions . url } " is deprecated. It is scheduled to be removed on ${ headers . sunset } ${ deprecationLink ? ` . See ${ deprecationLink } ` : "" } `
) ;
2022-12-25 13:58:23 +08:00
}
if ( status === 204 || status === 205 ) {
return ;
2023-04-28 00:18:11 +00:00
}
2022-12-25 13:58:23 +08:00
if ( requestOptions . method === "HEAD" ) {
if ( status < 400 ) {
return ;
}
2023-05-25 21:58:02 +00:00
throw new import _request _error . RequestError ( response . statusText , status , {
2022-12-25 13:58:23 +08:00
response : {
url ,
status ,
headers ,
2023-05-25 21:58:02 +00:00
data : void 0
2022-12-25 13:58:23 +08:00
} ,
request : requestOptions
} ) ;
}
if ( status === 304 ) {
2023-05-25 21:58:02 +00:00
throw new import _request _error . RequestError ( "Not modified" , status , {
2022-12-25 13:58:23 +08:00
response : {
url ,
status ,
headers ,
data : await getResponseData ( response )
} ,
request : requestOptions
} ) ;
}
if ( status >= 400 ) {
const data = await getResponseData ( response ) ;
2023-05-25 21:58:02 +00:00
const error = new import _request _error . RequestError ( toErrorMessage ( data ) , status , {
2022-12-25 13:58:23 +08:00
response : {
url ,
status ,
headers ,
data
} ,
request : requestOptions
} ) ;
throw error ;
}
2023-07-27 11:01:06 +00:00
return parseSuccessResponseBody ? await getResponseData ( response ) : response . body ;
2023-05-25 21:58:02 +00:00
} ) . then ( ( data ) => {
2022-12-25 13:58:23 +08:00
return {
status ,
url ,
headers ,
data
} ;
2023-05-25 21:58:02 +00:00
} ) . catch ( ( error ) => {
if ( error instanceof import _request _error . RequestError )
throw error ;
else if ( error . name === "AbortError" )
throw error ;
throw new import _request _error . RequestError ( error . message , 500 , {
2022-12-25 13:58:23 +08:00
request : requestOptions
} ) ;
} ) ;
}
async function getResponseData ( response ) {
const contentType = response . headers . get ( "content-type" ) ;
if ( /application\/json/ . test ( contentType ) ) {
return response . json ( ) ;
}
if ( ! contentType || /^text\/|charset=utf-8$/ . test ( contentType ) ) {
return response . text ( ) ;
}
return getBufferResponse ( response ) ;
}
function toErrorMessage ( data ) {
2023-05-25 21:58:02 +00:00
if ( typeof data === "string" )
return data ;
2022-12-25 13:58:23 +08:00
if ( "message" in data ) {
if ( Array . isArray ( data . errors ) ) {
return ` ${ data . message } : ${ data . errors . map ( JSON . stringify ) . join ( ", " ) } ` ;
}
return data . message ;
2023-04-28 00:18:11 +00:00
}
2022-12-25 13:58:23 +08:00
return ` Unknown error: ${ JSON . stringify ( data ) } ` ;
}
2023-05-25 21:58:02 +00:00
// pkg/dist-src/with-defaults.js
2022-12-25 13:58:23 +08:00
function withDefaults ( oldEndpoint , newDefaults ) {
2023-05-25 21:58:02 +00:00
const endpoint2 = oldEndpoint . defaults ( newDefaults ) ;
const newApi = function ( route , parameters ) {
const endpointOptions = endpoint2 . merge ( route , parameters ) ;
2022-12-25 13:58:23 +08:00
if ( ! endpointOptions . request || ! endpointOptions . request . hook ) {
2023-05-25 21:58:02 +00:00
return fetchWrapper ( endpoint2 . parse ( endpointOptions ) ) ;
2022-12-25 13:58:23 +08:00
}
2023-05-25 21:58:02 +00:00
const request2 = ( route2 , parameters2 ) => {
return fetchWrapper (
endpoint2 . parse ( endpoint2 . merge ( route2 , parameters2 ) )
) ;
2022-12-25 13:58:23 +08:00
} ;
2023-05-25 21:58:02 +00:00
Object . assign ( request2 , {
endpoint : endpoint2 ,
defaults : withDefaults . bind ( null , endpoint2 )
2022-12-25 13:58:23 +08:00
} ) ;
2023-05-25 21:58:02 +00:00
return endpointOptions . request . hook ( request2 , endpointOptions ) ;
2022-12-25 13:58:23 +08:00
} ;
return Object . assign ( newApi , {
2023-05-25 21:58:02 +00:00
endpoint : endpoint2 ,
defaults : withDefaults . bind ( null , endpoint2 )
2022-12-25 13:58:23 +08:00
} ) ;
}
2023-05-25 21:58:02 +00:00
// pkg/dist-src/index.js
var request = withDefaults ( import _endpoint . endpoint , {
2022-12-25 13:58:23 +08:00
headers : {
2023-05-25 21:58:02 +00:00
"user-agent" : ` octokit-request.js/ ${ VERSION } ${ ( 0 , import _universal _user _agent . getUserAgent ) ( ) } `
2022-12-25 13:58:23 +08:00
}
} ) ;
2023-05-25 21:58:02 +00:00
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2022-12-25 13:58:23 +08:00
2023-05-19 08:00:27 +00:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 20 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-05-19 08:00:27 +00:00
"use strict" ;
2023-07-27 11:01:06 +00:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
createActionAuth : ( ) => createActionAuth
2023-05-19 08:00:27 +00:00
} ) ;
2023-07-27 11:01:06 +00:00
module . exports = _ _toCommonJS ( dist _src _exports ) ;
var import _auth _token = _ _nccwpck _require _ _ ( 6434 ) ;
var createActionAuth = function createActionAuth2 ( ) {
if ( ! process . env . GITHUB _ACTION ) {
throw new Error (
"[@octokit/auth-action] `GITHUB_ACTION` environment variable is not set. @octokit/auth-action is meant to be used in GitHub Actions only."
) ;
}
const definitions = [
process . env . GITHUB _TOKEN ,
process . env . INPUT _GITHUB _TOKEN ,
process . env . INPUT _TOKEN
] . filter ( Boolean ) ;
if ( definitions . length === 0 ) {
throw new Error (
"[@octokit/auth-action] `GITHUB_TOKEN` variable is not set. It must be set on either `env:` or `with:`. See https://github.com/octokit/auth-action.js#createactionauth"
) ;
}
if ( definitions . length > 1 ) {
throw new Error (
"[@octokit/auth-action] The token variable is specified more than once. Use either `with.token`, `with.GITHUB_TOKEN`, or `env.GITHUB_TOKEN`. See https://github.com/octokit/auth-action.js#createactionauth"
) ;
}
const token = definitions . pop ( ) ;
return ( 0 , import _auth _token . createTokenAuth ) ( token ) ;
2023-05-19 08:00:27 +00:00
} ;
2023-07-27 11:01:06 +00:00
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2023-05-19 08:00:27 +00:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 6434 :
/***/ ( ( module ) => {
2023-05-19 08:00:27 +00:00
"use strict" ;
2023-07-27 11:01:06 +00:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
2023-05-19 08:00:27 +00:00
} ;
2023-07-27 11:01:06 +00:00
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
2023-06-18 08:35:17 +00:00
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2022-12-25 13:58:23 +08:00
2023-06-18 08:35:17 +00:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
createTokenAuth : ( ) => createTokenAuth
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
2022-12-25 13:58:23 +08:00
2023-06-18 08:35:17 +00:00
// pkg/dist-src/auth.js
var REGEX _IS _INSTALLATION _LEGACY = /^v1\./ ;
var REGEX _IS _INSTALLATION = /^ghs_/ ;
var REGEX _IS _USER _TO _SERVER = /^ghu_/ ;
2022-12-25 13:58:23 +08:00
async function auth ( token ) {
const isApp = token . split ( /\./ ) . length === 3 ;
const isInstallation = REGEX _IS _INSTALLATION _LEGACY . test ( token ) || REGEX _IS _INSTALLATION . test ( token ) ;
const isUserToServer = REGEX _IS _USER _TO _SERVER . test ( token ) ;
const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth" ;
return {
type : "token" ,
2023-06-18 08:35:17 +00:00
token ,
2022-12-25 13:58:23 +08:00
tokenType
} ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/with-authorization-prefix.js
2022-12-25 13:58:23 +08:00
function withAuthorizationPrefix ( token ) {
if ( token . split ( /\./ ) . length === 3 ) {
return ` bearer ${ token } ` ;
}
return ` token ${ token } ` ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/hook.js
2022-12-25 13:58:23 +08:00
async function hook ( token , request , route , parameters ) {
2023-06-18 08:35:17 +00:00
const endpoint = request . endpoint . merge (
route ,
parameters
) ;
2022-12-25 13:58:23 +08:00
endpoint . headers . authorization = withAuthorizationPrefix ( token ) ;
return request ( endpoint ) ;
}
2023-06-18 08:35:17 +00:00
// pkg/dist-src/index.js
var createTokenAuth = function createTokenAuth2 ( token ) {
2022-12-25 13:58:23 +08:00
if ( ! token ) {
throw new Error ( "[@octokit/auth-token] No token passed to createTokenAuth" ) ;
}
if ( typeof token !== "string" ) {
2023-06-18 08:35:17 +00:00
throw new Error (
"[@octokit/auth-token] Token passed to createTokenAuth is not a string"
) ;
2022-12-25 13:58:23 +08:00
}
token = token . replace ( /^(token|bearer) +/i , "" ) ;
return Object . assign ( auth . bind ( null , token ) , {
hook : hook . bind ( null , token )
} ) ;
} ;
2023-06-18 08:35:17 +00:00
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
/***/ 3682 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
var register = _ _nccwpck _require _ _ ( 4670 )
var addHook = _ _nccwpck _require _ _ ( 5549 )
var removeHook = _ _nccwpck _require _ _ ( 6819 )
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function . bind
var bindable = bind . bind ( bind )
function bindApi ( hook , state , name ) {
var removeHookRef = bindable ( removeHook , null ) . apply ( null , name ? [ state , name ] : [ state ] )
hook . api = { remove : removeHookRef }
hook . remove = removeHookRef
; [ 'before' , 'error' , 'after' , 'wrap' ] . forEach ( function ( kind ) {
var args = name ? [ state , kind , name ] : [ state , kind ]
hook [ kind ] = hook . api [ kind ] = bindable ( addHook , null ) . apply ( null , args )
} )
}
function HookSingular ( ) {
var singularHookName = 'h'
var singularHookState = {
registry : { }
}
var singularHook = register . bind ( null , singularHookState , singularHookName )
bindApi ( singularHook , singularHookState , singularHookName )
return singularHook
}
function HookCollection ( ) {
var state = {
registry : { }
}
var hook = register . bind ( null , state )
bindApi ( hook , state )
return hook
}
var collectionHookDeprecationMessageDisplayed = false
function Hook ( ) {
if ( ! collectionHookDeprecationMessageDisplayed ) {
console . warn ( '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' )
collectionHookDeprecationMessageDisplayed = true
}
return HookCollection ( )
}
Hook . Singular = HookSingular . bind ( )
Hook . Collection = HookCollection . bind ( )
module . exports = Hook
// expose constructors as a named property for TypeScript
module . exports . Hook = Hook
module . exports . Singular = Hook . Singular
module . exports . Collection = Hook . Collection
/***/ } ) ,
/***/ 5549 :
/***/ ( ( module ) => {
module . exports = addHook ;
function addHook ( state , kind , name , hook ) {
var orig = hook ;
if ( ! state . registry [ name ] ) {
state . registry [ name ] = [ ] ;
}
if ( kind === "before" ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( orig . bind ( null , options ) )
. then ( method . bind ( null , options ) ) ;
} ;
}
if ( kind === "after" ) {
hook = function ( method , options ) {
var result ;
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. then ( function ( result _ ) {
result = result _ ;
return orig ( result , options ) ;
} )
. then ( function ( ) {
return result ;
} ) ;
} ;
}
if ( kind === "error" ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. catch ( function ( error ) {
return orig ( error , options ) ;
} ) ;
} ;
}
state . registry [ name ] . push ( {
hook : hook ,
orig : orig ,
} ) ;
}
/***/ } ) ,
/***/ 4670 :
/***/ ( ( module ) => {
module . exports = register ;
function register ( state , name , method , options ) {
if ( typeof method !== "function" ) {
throw new Error ( "method for before hook must be a function" ) ;
}
if ( ! options ) {
options = { } ;
}
if ( Array . isArray ( name ) ) {
return name . reverse ( ) . reduce ( function ( callback , name ) {
return register . bind ( null , state , name , callback , options ) ;
} , method ) ( ) ;
}
return Promise . resolve ( ) . then ( function ( ) {
if ( ! state . registry [ name ] ) {
return method ( options ) ;
}
return state . registry [ name ] . reduce ( function ( method , registered ) {
return registered . hook . bind ( null , method , options ) ;
} , method ) ( ) ;
} ) ;
}
/***/ } ) ,
/***/ 6819 :
/***/ ( ( module ) => {
module . exports = removeHook ;
function removeHook ( state , name , method ) {
if ( ! state . registry [ name ] ) {
return ;
}
var index = state . registry [ name ]
. map ( function ( registered ) {
return registered . orig ;
} )
. indexOf ( method ) ;
if ( index === - 1 ) {
return ;
}
state . registry [ name ] . splice ( index , 1 ) ;
}
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 6472 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const { parseContentType } = _ _nccwpck _require _ _ ( 1305 ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function getInstance ( cfg ) {
const headers = cfg . headers ;
const conType = parseContentType ( headers [ 'content-type' ] ) ;
if ( ! conType )
throw new Error ( 'Malformed content type' ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
for ( const type of TYPES ) {
const matched = type . detect ( conType ) ;
if ( ! matched )
continue ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const instanceCfg = {
limits : cfg . limits ,
headers ,
conType ,
highWaterMark : undefined ,
fileHwm : undefined ,
defCharset : undefined ,
defParamCharset : undefined ,
preservePath : false ,
} ;
if ( cfg . highWaterMark )
instanceCfg . highWaterMark = cfg . highWaterMark ;
if ( cfg . fileHwm )
instanceCfg . fileHwm = cfg . fileHwm ;
instanceCfg . defCharset = cfg . defCharset ;
instanceCfg . defParamCharset = cfg . defParamCharset ;
instanceCfg . preservePath = cfg . preservePath ;
return new type ( instanceCfg ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
throw new Error ( ` Unsupported content type: ${ headers [ 'content-type' ] } ` ) ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
// Note: types are explicitly listed here for easier bundling
// See: https://github.com/mscdex/busboy/issues/121
const TYPES = [
_ _nccwpck _require _ _ ( 5634 ) ,
_ _nccwpck _require _ _ ( 4041 ) ,
] . filter ( function ( typemod ) { return typeof typemod . detect === 'function' ; } ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
module . exports = ( cfg ) => {
if ( typeof cfg !== 'object' || cfg === null )
cfg = { } ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( typeof cfg . headers !== 'object'
|| cfg . headers === null
|| typeof cfg . headers [ 'content-type' ] !== 'string' ) {
throw new Error ( 'Missing Content-Type' ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
return getInstance ( cfg ) ;
} ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ 5634 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const { Readable , Writable } = _ _nccwpck _require _ _ ( 2781 ) ;
const StreamSearch = _ _nccwpck _require _ _ ( 2405 ) ;
const {
basename ,
convertToUTF8 ,
getDecoder ,
parseContentType ,
parseDisposition ,
} = _ _nccwpck _require _ _ ( 1305 ) ;
const BUF _CRLF = Buffer . from ( '\r\n' ) ;
const BUF _CR = Buffer . from ( '\r' ) ;
const BUF _DASH = Buffer . from ( '-' ) ;
function noop ( ) { }
const MAX _HEADER _PAIRS = 2000 ; // From node
const MAX _HEADER _SIZE = 16 * 1024 ; // From node (its default value)
const HPARSER _NAME = 0 ;
const HPARSER _PRE _OWS = 1 ;
const HPARSER _VALUE = 2 ;
class HeaderParser {
constructor ( cb ) {
this . header = Object . create ( null ) ;
this . pairCount = 0 ;
this . byteCount = 0 ;
this . state = HPARSER _NAME ;
this . name = '' ;
this . value = '' ;
this . crlf = 0 ;
this . cb = cb ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
reset ( ) {
this . header = Object . create ( null ) ;
this . pairCount = 0 ;
this . byteCount = 0 ;
this . state = HPARSER _NAME ;
this . name = '' ;
this . value = '' ;
this . crlf = 0 ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
push ( chunk , pos , end ) {
let start = pos ;
while ( pos < end ) {
switch ( this . state ) {
case HPARSER _NAME : {
let done = false ;
for ( ; pos < end ; ++ pos ) {
if ( this . byteCount === MAX _HEADER _SIZE )
return - 1 ;
++ this . byteCount ;
const code = chunk [ pos ] ;
if ( TOKEN [ code ] !== 1 ) {
if ( code !== 58 /* ':' */ )
return - 1 ;
this . name += chunk . latin1Slice ( start , pos ) ;
if ( this . name . length === 0 )
return - 1 ;
++ pos ;
done = true ;
this . state = HPARSER _PRE _OWS ;
break ;
}
}
if ( ! done ) {
this . name += chunk . latin1Slice ( start , pos ) ;
break ;
}
// FALLTHROUGH
}
case HPARSER _PRE _OWS : {
// Skip optional whitespace
let done = false ;
for ( ; pos < end ; ++ pos ) {
if ( this . byteCount === MAX _HEADER _SIZE )
return - 1 ;
++ this . byteCount ;
const code = chunk [ pos ] ;
if ( code !== 32 /* ' ' */ && code !== 9 /* '\t' */ ) {
start = pos ;
done = true ;
this . state = HPARSER _VALUE ;
break ;
}
}
if ( ! done )
break ;
// FALLTHROUGH
}
case HPARSER _VALUE :
switch ( this . crlf ) {
case 0 : // Nothing yet
for ( ; pos < end ; ++ pos ) {
if ( this . byteCount === MAX _HEADER _SIZE )
return - 1 ;
++ this . byteCount ;
const code = chunk [ pos ] ;
if ( FIELD _VCHAR [ code ] !== 1 ) {
if ( code !== 13 /* '\r' */ )
return - 1 ;
++ this . crlf ;
break ;
}
}
this . value += chunk . latin1Slice ( start , pos ++ ) ;
break ;
case 1 : // Received CR
if ( this . byteCount === MAX _HEADER _SIZE )
return - 1 ;
++ this . byteCount ;
if ( chunk [ pos ++ ] !== 10 /* '\n' */ )
return - 1 ;
++ this . crlf ;
break ;
case 2 : { // Received CR LF
if ( this . byteCount === MAX _HEADER _SIZE )
return - 1 ;
++ this . byteCount ;
const code = chunk [ pos ] ;
if ( code === 32 /* ' ' */ || code === 9 /* '\t' */ ) {
// Folded value
start = pos ;
this . crlf = 0 ;
} else {
if ( ++ this . pairCount < MAX _HEADER _PAIRS ) {
this . name = this . name . toLowerCase ( ) ;
if ( this . header [ this . name ] === undefined )
this . header [ this . name ] = [ this . value ] ;
else
this . header [ this . name ] . push ( this . value ) ;
}
if ( code === 13 /* '\r' */ ) {
++ this . crlf ;
++ pos ;
} else {
// Assume start of next header field name
start = pos ;
this . crlf = 0 ;
this . state = HPARSER _NAME ;
this . name = '' ;
this . value = '' ;
}
}
break ;
}
case 3 : { // Received CR LF CR
if ( this . byteCount === MAX _HEADER _SIZE )
return - 1 ;
++ this . byteCount ;
if ( chunk [ pos ++ ] !== 10 /* '\n' */ )
return - 1 ;
// End of header
const header = this . header ;
this . reset ( ) ;
this . cb ( header ) ;
return pos ;
}
}
break ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
return pos ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
class FileStream extends Readable {
constructor ( opts , owner ) {
super ( opts ) ;
this . truncated = false ;
this . _readcb = null ;
this . once ( 'end' , ( ) => {
// We need to make sure that we call any outstanding _writecb() that is
// associated with this file so that processing of the rest of the form
// can continue. This may not happen if the file stream ends right after
// backpressure kicks in, so we force it here.
this . _read ( ) ;
if ( -- owner . _fileEndsLeft === 0 && owner . _finalcb ) {
const cb = owner . _finalcb ;
owner . _finalcb = null ;
// Make sure other 'end' event handlers get a chance to be executed
// before busboy's 'finish' event is emitted
process . nextTick ( cb ) ;
}
} ) ;
}
_read ( n ) {
const cb = this . _readcb ;
if ( cb ) {
this . _readcb = null ;
cb ( ) ;
}
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const ignoreData = {
push : ( chunk , pos ) => { } ,
destroy : ( ) => { } ,
} ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function callAndUnsetCb ( self , err ) {
const cb = self . _writecb ;
self . _writecb = null ;
if ( err )
self . destroy ( err ) ;
else if ( cb )
cb ( ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function nullDecoder ( val , hint ) {
return val ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
class Multipart extends Writable {
constructor ( cfg ) {
const streamOpts = {
autoDestroy : true ,
emitClose : true ,
highWaterMark : ( typeof cfg . highWaterMark === 'number'
? cfg . highWaterMark
: undefined ) ,
} ;
super ( streamOpts ) ;
if ( ! cfg . conType . params || typeof cfg . conType . params . boundary !== 'string' )
throw new Error ( 'Multipart: Boundary not found' ) ;
const boundary = cfg . conType . params . boundary ;
const paramDecoder = ( typeof cfg . defParamCharset === 'string'
&& cfg . defParamCharset
? getDecoder ( cfg . defParamCharset )
: nullDecoder ) ;
const defCharset = ( cfg . defCharset || 'utf8' ) ;
const preservePath = cfg . preservePath ;
const fileOpts = {
autoDestroy : true ,
emitClose : true ,
highWaterMark : ( typeof cfg . fileHwm === 'number'
? cfg . fileHwm
: undefined ) ,
} ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const limits = cfg . limits ;
const fieldSizeLimit = ( limits && typeof limits . fieldSize === 'number'
? limits . fieldSize
: 1 * 1024 * 1024 ) ;
const fileSizeLimit = ( limits && typeof limits . fileSize === 'number'
? limits . fileSize
: Infinity ) ;
const filesLimit = ( limits && typeof limits . files === 'number'
? limits . files
: Infinity ) ;
const fieldsLimit = ( limits && typeof limits . fields === 'number'
? limits . fields
: Infinity ) ;
const partsLimit = ( limits && typeof limits . parts === 'number'
? limits . parts
: Infinity ) ;
let parts = - 1 ; // Account for initial boundary
let fields = 0 ;
let files = 0 ;
let skipPart = false ;
this . _fileEndsLeft = 0 ;
this . _fileStream = undefined ;
this . _complete = false ;
let fileSize = 0 ;
let field ;
let fieldSize = 0 ;
let partCharset ;
let partEncoding ;
let partType ;
let partName ;
let partTruncated = false ;
let hitFilesLimit = false ;
let hitFieldsLimit = false ;
this . _hparser = null ;
const hparser = new HeaderParser ( ( header ) => {
this . _hparser = null ;
skipPart = false ;
partType = 'text/plain' ;
partCharset = defCharset ;
partEncoding = '7bit' ;
partName = undefined ;
partTruncated = false ;
let filename ;
if ( ! header [ 'content-disposition' ] ) {
skipPart = true ;
return ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const disp = parseDisposition ( header [ 'content-disposition' ] [ 0 ] ,
paramDecoder ) ;
if ( ! disp || disp . type !== 'form-data' ) {
skipPart = true ;
return ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( disp . params ) {
if ( disp . params . name )
partName = disp . params . name ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( disp . params [ 'filename*' ] )
filename = disp . params [ 'filename*' ] ;
else if ( disp . params . filename )
filename = disp . params . filename ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( filename !== undefined && ! preservePath )
filename = basename ( filename ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( header [ 'content-type' ] ) {
const conType = parseContentType ( header [ 'content-type' ] [ 0 ] ) ;
if ( conType ) {
partType = ` ${ conType . type } / ${ conType . subtype } ` ;
if ( conType . params && typeof conType . params . charset === 'string' )
partCharset = conType . params . charset . toLowerCase ( ) ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( header [ 'content-transfer-encoding' ] )
partEncoding = header [ 'content-transfer-encoding' ] [ 0 ] . toLowerCase ( ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( partType === 'application/octet-stream' || filename !== undefined ) {
// File
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( files === filesLimit ) {
if ( ! hitFilesLimit ) {
hitFilesLimit = true ;
this . emit ( 'filesLimit' ) ;
}
skipPart = true ;
return ;
}
++ files ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( this . listenerCount ( 'file' ) === 0 ) {
skipPart = true ;
return ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
fileSize = 0 ;
this . _fileStream = new FileStream ( fileOpts , this ) ;
++ this . _fileEndsLeft ;
this . emit (
'file' ,
partName ,
this . _fileStream ,
{ filename ,
encoding : partEncoding ,
mimeType : partType }
) ;
} else {
// Non-file
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( fields === fieldsLimit ) {
if ( ! hitFieldsLimit ) {
hitFieldsLimit = true ;
this . emit ( 'fieldsLimit' ) ;
}
skipPart = true ;
return ;
}
++ fields ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( this . listenerCount ( 'field' ) === 0 ) {
skipPart = true ;
return ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
field = [ ] ;
fieldSize = 0 ;
}
} ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
let matchPostBoundary = 0 ;
const ssCb = ( isMatch , data , start , end , isDataSafe ) => {
retrydata :
while ( data ) {
if ( this . _hparser !== null ) {
const ret = this . _hparser . push ( data , start , end ) ;
if ( ret === - 1 ) {
this . _hparser = null ;
hparser . reset ( ) ;
this . emit ( 'error' , new Error ( 'Malformed part header' ) ) ;
break ;
}
start = ret ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( start === end )
break ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( matchPostBoundary !== 0 ) {
if ( matchPostBoundary === 1 ) {
switch ( data [ start ] ) {
case 45 : // '-'
// Try matching '--' after boundary
matchPostBoundary = 2 ;
++ start ;
break ;
case 13 : // '\r'
// Try matching CR LF before header
matchPostBoundary = 3 ;
++ start ;
break ;
default :
matchPostBoundary = 0 ;
}
if ( start === end )
return ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( matchPostBoundary === 2 ) {
matchPostBoundary = 0 ;
if ( data [ start ] === 45 /* '-' */ ) {
// End of multipart data
this . _complete = true ;
this . _bparser = ignoreData ;
return ;
}
// We saw something other than '-', so put the dash we consumed
// "back"
const writecb = this . _writecb ;
this . _writecb = noop ;
ssCb ( false , BUF _DASH , 0 , 1 , false ) ;
this . _writecb = writecb ;
} else if ( matchPostBoundary === 3 ) {
matchPostBoundary = 0 ;
if ( data [ start ] === 10 /* '\n' */ ) {
++ start ;
if ( parts >= partsLimit )
break ;
// Prepare the header parser
this . _hparser = hparser ;
if ( start === end )
break ;
// Process the remaining data as a header
continue retrydata ;
} else {
// We saw something other than LF, so put the CR we consumed
// "back"
const writecb = this . _writecb ;
this . _writecb = noop ;
ssCb ( false , BUF _CR , 0 , 1 , false ) ;
this . _writecb = writecb ;
}
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( ! skipPart ) {
if ( this . _fileStream ) {
let chunk ;
const actualLen = Math . min ( end - start , fileSizeLimit - fileSize ) ;
if ( ! isDataSafe ) {
chunk = Buffer . allocUnsafe ( actualLen ) ;
data . copy ( chunk , 0 , start , start + actualLen ) ;
} else {
chunk = data . slice ( start , start + actualLen ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
fileSize += chunk . length ;
if ( fileSize === fileSizeLimit ) {
if ( chunk . length > 0 )
this . _fileStream . push ( chunk ) ;
this . _fileStream . emit ( 'limit' ) ;
this . _fileStream . truncated = true ;
skipPart = true ;
} else if ( ! this . _fileStream . push ( chunk ) ) {
if ( this . _writecb )
this . _fileStream . _readcb = this . _writecb ;
this . _writecb = null ;
}
} else if ( field !== undefined ) {
let chunk ;
const actualLen = Math . min (
end - start ,
fieldSizeLimit - fieldSize
) ;
if ( ! isDataSafe ) {
chunk = Buffer . allocUnsafe ( actualLen ) ;
data . copy ( chunk , 0 , start , start + actualLen ) ;
} else {
chunk = data . slice ( start , start + actualLen ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
fieldSize += actualLen ;
field . push ( chunk ) ;
if ( fieldSize === fieldSizeLimit ) {
skipPart = true ;
partTruncated = true ;
}
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
break ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( isMatch ) {
matchPostBoundary = 1 ;
if ( this . _fileStream ) {
// End the active file stream if the previous part was a file
this . _fileStream . push ( null ) ;
this . _fileStream = null ;
} else if ( field !== undefined ) {
let data ;
switch ( field . length ) {
case 0 :
data = '' ;
break ;
case 1 :
data = convertToUTF8 ( field [ 0 ] , partCharset , 0 ) ;
break ;
default :
data = convertToUTF8 (
Buffer . concat ( field , fieldSize ) ,
partCharset ,
0
) ;
}
field = undefined ;
fieldSize = 0 ;
this . emit (
'field' ,
partName ,
data ,
{ nameTruncated : false ,
valueTruncated : partTruncated ,
encoding : partEncoding ,
mimeType : partType }
) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( ++ parts === partsLimit )
this . emit ( 'partsLimit' ) ;
}
} ;
this . _bparser = new StreamSearch ( ` \r \n -- ${ boundary } ` , ssCb ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
this . _writecb = null ;
this . _finalcb = null ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Just in case there is no preamble
this . write ( BUF _CRLF ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
static detect ( conType ) {
return ( conType . type === 'multipart' && conType . subtype === 'form-data' ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
_write ( chunk , enc , cb ) {
this . _writecb = cb ;
this . _bparser . push ( chunk , 0 ) ;
if ( this . _writecb )
callAndUnsetCb ( this ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
_destroy ( err , cb ) {
this . _hparser = null ;
this . _bparser = ignoreData ;
if ( ! err )
err = checkEndState ( this ) ;
const fileStream = this . _fileStream ;
if ( fileStream ) {
this . _fileStream = null ;
fileStream . destroy ( err ) ;
}
cb ( err ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
_final ( cb ) {
this . _bparser . destroy ( ) ;
if ( ! this . _complete )
return cb ( new Error ( 'Unexpected end of form' ) ) ;
if ( this . _fileEndsLeft )
this . _finalcb = finalcb . bind ( null , this , cb ) ;
else
finalcb ( this , cb ) ;
}
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
function finalcb ( self , cb , err ) {
if ( err )
return cb ( err ) ;
err = checkEndState ( self ) ;
cb ( err ) ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
function checkEndState ( self ) {
if ( self . _hparser )
return new Error ( 'Malformed part header' ) ;
const fileStream = self . _fileStream ;
if ( fileStream ) {
self . _fileStream = null ;
fileStream . destroy ( new Error ( 'Unexpected end of file' ) ) ;
}
if ( ! self . _complete )
return new Error ( 'Unexpected end of form' ) ;
}
const TOKEN = [
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 1 , 0 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 1 , 1 , 0 , 1 , 1 , 0 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 1 , 0 , 1 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
] ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const FIELD _VCHAR = [
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 1 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
] ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
module . exports = Multipart ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 4041 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
"use strict" ;
2023-07-27 11:01:06 +00:00
const { Writable } = _ _nccwpck _require _ _ ( 2781 ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const { getDecoder } = _ _nccwpck _require _ _ ( 1305 ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
class URLEncoded extends Writable {
constructor ( cfg ) {
const streamOpts = {
autoDestroy : true ,
emitClose : true ,
highWaterMark : ( typeof cfg . highWaterMark === 'number'
? cfg . highWaterMark
: undefined ) ,
} ;
super ( streamOpts ) ;
let charset = ( cfg . defCharset || 'utf8' ) ;
if ( cfg . conType . params && typeof cfg . conType . params . charset === 'string' )
charset = cfg . conType . params . charset ;
this . charset = charset ;
const limits = cfg . limits ;
this . fieldSizeLimit = ( limits && typeof limits . fieldSize === 'number'
? limits . fieldSize
: 1 * 1024 * 1024 ) ;
this . fieldsLimit = ( limits && typeof limits . fields === 'number'
? limits . fields
: Infinity ) ;
this . fieldNameSizeLimit = (
limits && typeof limits . fieldNameSize === 'number'
? limits . fieldNameSize
: 100
) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
this . _inKey = true ;
this . _keyTrunc = false ;
this . _valTrunc = false ;
this . _bytesKey = 0 ;
this . _bytesVal = 0 ;
this . _fields = 0 ;
this . _key = '' ;
this . _val = '' ;
this . _byte = - 2 ;
this . _lastPos = 0 ;
this . _encode = 0 ;
this . _decoder = getDecoder ( charset ) ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
static detect ( conType ) {
return ( conType . type === 'application'
&& conType . subtype === 'x-www-form-urlencoded' ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
_write ( chunk , enc , cb ) {
if ( this . _fields >= this . fieldsLimit )
return cb ( ) ;
let i = 0 ;
const len = chunk . length ;
this . _lastPos = 0 ;
// Check if we last ended mid-percent-encoded byte
if ( this . _byte !== - 2 ) {
i = readPctEnc ( this , chunk , i , len ) ;
if ( i === - 1 )
return cb ( new Error ( 'Malformed urlencoded form' ) ) ;
if ( i >= len )
return cb ( ) ;
if ( this . _inKey )
++ this . _bytesKey ;
else
++ this . _bytesVal ;
}
main :
while ( i < len ) {
if ( this . _inKey ) {
// Parsing key
i = skipKeyBytes ( this , chunk , i , len ) ;
while ( i < len ) {
switch ( chunk [ i ] ) {
case 61 : // '='
if ( this . _lastPos < i )
this . _key += chunk . latin1Slice ( this . _lastPos , i ) ;
this . _lastPos = ++ i ;
this . _key = this . _decoder ( this . _key , this . _encode ) ;
this . _encode = 0 ;
this . _inKey = false ;
continue main ;
case 38 : // '&'
if ( this . _lastPos < i )
this . _key += chunk . latin1Slice ( this . _lastPos , i ) ;
this . _lastPos = ++ i ;
this . _key = this . _decoder ( this . _key , this . _encode ) ;
this . _encode = 0 ;
if ( this . _bytesKey > 0 ) {
this . emit (
'field' ,
this . _key ,
'' ,
{ nameTruncated : this . _keyTrunc ,
valueTruncated : false ,
encoding : this . charset ,
mimeType : 'text/plain' }
) ;
}
this . _key = '' ;
this . _val = '' ;
this . _keyTrunc = false ;
this . _valTrunc = false ;
this . _bytesKey = 0 ;
this . _bytesVal = 0 ;
if ( ++ this . _fields >= this . fieldsLimit ) {
this . emit ( 'fieldsLimit' ) ;
return cb ( ) ;
}
continue ;
case 43 : // '+'
if ( this . _lastPos < i )
this . _key += chunk . latin1Slice ( this . _lastPos , i ) ;
this . _key += ' ' ;
this . _lastPos = i + 1 ;
break ;
case 37 : // '%'
if ( this . _encode === 0 )
this . _encode = 1 ;
if ( this . _lastPos < i )
this . _key += chunk . latin1Slice ( this . _lastPos , i ) ;
this . _lastPos = i + 1 ;
this . _byte = - 1 ;
i = readPctEnc ( this , chunk , i + 1 , len ) ;
if ( i === - 1 )
return cb ( new Error ( 'Malformed urlencoded form' ) ) ;
if ( i >= len )
return cb ( ) ;
++ this . _bytesKey ;
i = skipKeyBytes ( this , chunk , i , len ) ;
continue ;
}
++ i ;
++ this . _bytesKey ;
i = skipKeyBytes ( this , chunk , i , len ) ;
}
if ( this . _lastPos < i )
this . _key += chunk . latin1Slice ( this . _lastPos , i ) ;
} else {
// Parsing value
i = skipValBytes ( this , chunk , i , len ) ;
while ( i < len ) {
switch ( chunk [ i ] ) {
case 38 : // '&'
if ( this . _lastPos < i )
this . _val += chunk . latin1Slice ( this . _lastPos , i ) ;
this . _lastPos = ++ i ;
this . _inKey = true ;
this . _val = this . _decoder ( this . _val , this . _encode ) ;
this . _encode = 0 ;
if ( this . _bytesKey > 0 || this . _bytesVal > 0 ) {
this . emit (
'field' ,
this . _key ,
this . _val ,
{ nameTruncated : this . _keyTrunc ,
valueTruncated : this . _valTrunc ,
encoding : this . charset ,
mimeType : 'text/plain' }
) ;
}
this . _key = '' ;
this . _val = '' ;
this . _keyTrunc = false ;
this . _valTrunc = false ;
this . _bytesKey = 0 ;
this . _bytesVal = 0 ;
if ( ++ this . _fields >= this . fieldsLimit ) {
this . emit ( 'fieldsLimit' ) ;
return cb ( ) ;
}
continue main ;
case 43 : // '+'
if ( this . _lastPos < i )
this . _val += chunk . latin1Slice ( this . _lastPos , i ) ;
this . _val += ' ' ;
this . _lastPos = i + 1 ;
break ;
case 37 : // '%'
if ( this . _encode === 0 )
this . _encode = 1 ;
if ( this . _lastPos < i )
this . _val += chunk . latin1Slice ( this . _lastPos , i ) ;
this . _lastPos = i + 1 ;
this . _byte = - 1 ;
i = readPctEnc ( this , chunk , i + 1 , len ) ;
if ( i === - 1 )
return cb ( new Error ( 'Malformed urlencoded form' ) ) ;
if ( i >= len )
return cb ( ) ;
++ this . _bytesVal ;
i = skipValBytes ( this , chunk , i , len ) ;
continue ;
}
++ i ;
++ this . _bytesVal ;
i = skipValBytes ( this , chunk , i , len ) ;
}
if ( this . _lastPos < i )
this . _val += chunk . latin1Slice ( this . _lastPos , i ) ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
cb ( ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
_final ( cb ) {
if ( this . _byte !== - 2 )
return cb ( new Error ( 'Malformed urlencoded form' ) ) ;
if ( ! this . _inKey || this . _bytesKey > 0 || this . _bytesVal > 0 ) {
if ( this . _inKey )
this . _key = this . _decoder ( this . _key , this . _encode ) ;
else
this . _val = this . _decoder ( this . _val , this . _encode ) ;
this . emit (
'field' ,
this . _key ,
this . _val ,
{ nameTruncated : this . _keyTrunc ,
valueTruncated : this . _valTrunc ,
encoding : this . charset ,
mimeType : 'text/plain' }
) ;
}
cb ( ) ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function readPctEnc ( self , chunk , pos , len ) {
if ( pos >= len )
return len ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( self . _byte === - 1 ) {
// We saw a '%' but no hex characters yet
const hexUpper = HEX _VALUES [ chunk [ pos ++ ] ] ;
if ( hexUpper === - 1 )
return - 1 ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( hexUpper >= 8 )
self . _encode = 2 ; // Indicate high bits detected
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( pos < len ) {
// Both hex characters are in this chunk
const hexLower = HEX _VALUES [ chunk [ pos ++ ] ] ;
if ( hexLower === - 1 )
return - 1 ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( self . _inKey )
self . _key += String . fromCharCode ( ( hexUpper << 4 ) + hexLower ) ;
else
self . _val += String . fromCharCode ( ( hexUpper << 4 ) + hexLower ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
self . _byte = - 2 ;
self . _lastPos = pos ;
} else {
// Only one hex character was available in this chunk
self . _byte = hexUpper ;
}
} else {
// We saw only one hex character so far
const hexLower = HEX _VALUES [ chunk [ pos ++ ] ] ;
if ( hexLower === - 1 )
return - 1 ;
if ( self . _inKey )
self . _key += String . fromCharCode ( ( self . _byte << 4 ) + hexLower ) ;
else
self . _val += String . fromCharCode ( ( self . _byte << 4 ) + hexLower ) ;
self . _byte = - 2 ;
self . _lastPos = pos ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
return pos ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
function skipKeyBytes ( self , chunk , pos , len ) {
// Skip bytes if we've truncated
if ( self . _bytesKey > self . fieldNameSizeLimit ) {
if ( ! self . _keyTrunc ) {
if ( self . _lastPos < pos )
self . _key += chunk . latin1Slice ( self . _lastPos , pos - 1 ) ;
}
self . _keyTrunc = true ;
for ( ; pos < len ; ++ pos ) {
const code = chunk [ pos ] ;
if ( code === 61 /* '=' */ || code === 38 /* '&' */ )
break ;
++ self . _bytesKey ;
}
self . _lastPos = pos ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
return pos ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function skipValBytes ( self , chunk , pos , len ) {
// Skip bytes if we've truncated
if ( self . _bytesVal > self . fieldSizeLimit ) {
if ( ! self . _valTrunc ) {
if ( self . _lastPos < pos )
self . _val += chunk . latin1Slice ( self . _lastPos , pos - 1 ) ;
}
self . _valTrunc = true ;
for ( ; pos < len ; ++ pos ) {
if ( chunk [ pos ] === 38 /* '&' */ )
break ;
++ self . _bytesVal ;
}
self . _lastPos = pos ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
return pos ;
}
/* eslint-disable no-multi-spaces */
const HEX _VALUES = [
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , 10 , 11 , 12 , 13 , 14 , 15 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , 10 , 11 , 12 , 13 , 14 , 15 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
] ;
/* eslint-enable no-multi-spaces */
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
module . exports = URLEncoded ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 1305 :
/***/ ( function ( module ) {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function parseContentType ( str ) {
if ( str . length === 0 )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const params = Object . create ( null ) ;
let i = 0 ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Parse type
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( TOKEN [ code ] !== 1 ) {
if ( code !== 47 /* '/' */ || i === 0 )
return ;
break ;
}
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
// Check for type without subtype
if ( i === str . length )
2022-12-25 13:58:23 +08:00
return ;
2023-07-27 11:01:06 +00:00
const type = str . slice ( 0 , i ) . toLowerCase ( ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Parse subtype
const subtypeStart = ++ i ;
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( TOKEN [ code ] !== 1 ) {
// Make sure we have a subtype
if ( i === subtypeStart )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( parseContentTypeParams ( str , i , params ) === undefined )
return ;
break ;
}
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
// Make sure we have a subtype
if ( i === subtypeStart )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const subtype = str . slice ( subtypeStart , i ) . toLowerCase ( ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
return { type , subtype , params } ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
function parseContentTypeParams ( str , i , params ) {
while ( i < str . length ) {
// Consume whitespace
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( code !== 32 /* ' ' */ && code !== 9 /* '\t' */ )
break ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Ended on whitespace
if ( i === str . length )
break ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Check for malformed parameter
if ( str . charCodeAt ( i ++ ) !== 59 /* ';' */ )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Consume whitespace
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( code !== 32 /* ' ' */ && code !== 9 /* '\t' */ )
break ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Ended on whitespace (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
let name ;
const nameStart = i ;
// Parse parameter name
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( TOKEN [ code ] !== 1 ) {
if ( code !== 61 /* '=' */ )
return ;
break ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// No value (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
name = str . slice ( nameStart , i ) ;
++ i ; // Skip over '='
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// No value (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
let value = '' ;
let valueStart ;
if ( str . charCodeAt ( i ) === 34 /* '"' */ ) {
valueStart = ++ i ;
let escaping = false ;
// Parse quoted value
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( code === 92 /* '\\' */ ) {
if ( escaping ) {
valueStart = i ;
escaping = false ;
} else {
value += str . slice ( valueStart , i ) ;
escaping = true ;
}
continue ;
}
if ( code === 34 /* '"' */ ) {
if ( escaping ) {
valueStart = i ;
escaping = false ;
continue ;
}
value += str . slice ( valueStart , i ) ;
break ;
}
if ( escaping ) {
valueStart = i - 1 ;
escaping = false ;
}
// Invalid unescaped quoted character (malformed)
if ( QDTEXT [ code ] !== 1 )
return ;
}
// No end quote (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
++ i ; // Skip over double quote
} else {
valueStart = i ;
// Parse unquoted value
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( TOKEN [ code ] !== 1 ) {
// No value (malformed)
if ( i === valueStart )
return ;
break ;
}
}
value = str . slice ( valueStart , i ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
name = name . toLowerCase ( ) ;
if ( params [ name ] === undefined )
params [ name ] = value ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
return params ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
function parseDisposition ( str , defDecoder ) {
if ( str . length === 0 )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const params = Object . create ( null ) ;
let i = 0 ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( TOKEN [ code ] !== 1 ) {
if ( parseDispositionParams ( str , i , params , defDecoder ) === undefined )
return ;
break ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const type = str . slice ( 0 , i ) . toLowerCase ( ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
return { type , params } ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function parseDispositionParams ( str , i , params , defDecoder ) {
while ( i < str . length ) {
// Consume whitespace
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( code !== 32 /* ' ' */ && code !== 9 /* '\t' */ )
break ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Ended on whitespace
if ( i === str . length )
break ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Check for malformed parameter
if ( str . charCodeAt ( i ++ ) !== 59 /* ';' */ )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Consume whitespace
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( code !== 32 /* ' ' */ && code !== 9 /* '\t' */ )
break ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Ended on whitespace (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
let name ;
const nameStart = i ;
// Parse parameter name
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( TOKEN [ code ] !== 1 ) {
if ( code === 61 /* '=' */ )
break ;
return ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// No value (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
let value = '' ;
let valueStart ;
let charset ;
//~ let lang;
name = str . slice ( nameStart , i ) ;
if ( name . charCodeAt ( name . length - 1 ) === 42 /* '*' */ ) {
// Extended value
const charsetStart = ++ i ;
// Parse charset name
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( CHARSET [ code ] !== 1 ) {
if ( code !== 39 /* '\'' */ )
return ;
break ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Incomplete charset (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
charset = str . slice ( charsetStart , i ) ;
++ i ; // Skip over the '\''
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
//~ const langStart = ++i;
// Parse language name
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( code === 39 /* '\'' */ )
break ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Incomplete language (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
//~ lang = str.slice(langStart, i);
++ i ; // Skip over the '\''
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// No value (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
valueStart = i ;
let encode = 0 ;
// Parse value
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( EXTENDED _VALUE [ code ] !== 1 ) {
if ( code === 37 /* '%' */ ) {
let hexUpper ;
let hexLower ;
if ( i + 2 < str . length
&& ( hexUpper = HEX _VALUES [ str . charCodeAt ( i + 1 ) ] ) !== - 1
&& ( hexLower = HEX _VALUES [ str . charCodeAt ( i + 2 ) ] ) !== - 1 ) {
const byteVal = ( hexUpper << 4 ) + hexLower ;
value += str . slice ( valueStart , i ) ;
value += String . fromCharCode ( byteVal ) ;
i += 2 ;
valueStart = i + 1 ;
if ( byteVal >= 128 )
encode = 2 ;
else if ( encode === 0 )
encode = 1 ;
continue ;
}
// '%' disallowed in non-percent encoded contexts (malformed)
return ;
}
break ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
value += str . slice ( valueStart , i ) ;
value = convertToUTF8 ( value , charset , encode ) ;
if ( value === undefined )
return ;
} else {
// Non-extended value
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
++ i ; // Skip over '='
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// No value (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( str . charCodeAt ( i ) === 34 /* '"' */ ) {
valueStart = ++ i ;
let escaping = false ;
// Parse quoted value
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( code === 92 /* '\\' */ ) {
if ( escaping ) {
valueStart = i ;
escaping = false ;
} else {
value += str . slice ( valueStart , i ) ;
escaping = true ;
}
continue ;
}
if ( code === 34 /* '"' */ ) {
if ( escaping ) {
valueStart = i ;
escaping = false ;
continue ;
}
value += str . slice ( valueStart , i ) ;
break ;
}
if ( escaping ) {
valueStart = i - 1 ;
escaping = false ;
}
// Invalid unescaped quoted character (malformed)
if ( QDTEXT [ code ] !== 1 )
return ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// No end quote (malformed)
if ( i === str . length )
return ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
++ i ; // Skip over double quote
} else {
valueStart = i ;
// Parse unquoted value
for ( ; i < str . length ; ++ i ) {
const code = str . charCodeAt ( i ) ;
if ( TOKEN [ code ] !== 1 ) {
// No value (malformed)
if ( i === valueStart )
return ;
break ;
}
}
value = str . slice ( valueStart , i ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
value = defDecoder ( value , 2 ) ;
if ( value === undefined )
return ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
name = name . toLowerCase ( ) ;
if ( params [ name ] === undefined )
params [ name ] = value ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
return params ;
}
function getDecoder ( charset ) {
let lc ;
while ( true ) {
switch ( charset ) {
case 'utf-8' :
case 'utf8' :
return decoders . utf8 ;
case 'latin1' :
case 'ascii' : // TODO: Make these a separate, strict decoder?
case 'us-ascii' :
case 'iso-8859-1' :
case 'iso8859-1' :
case 'iso88591' :
case 'iso_8859-1' :
case 'windows-1252' :
case 'iso_8859-1:1987' :
case 'cp1252' :
case 'x-cp1252' :
return decoders . latin1 ;
case 'utf16le' :
case 'utf-16le' :
case 'ucs2' :
case 'ucs-2' :
return decoders . utf16le ;
case 'base64' :
return decoders . base64 ;
default :
if ( lc === undefined ) {
lc = true ;
charset = charset . toLowerCase ( ) ;
continue ;
}
return decoders . other . bind ( charset ) ;
}
}
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
const decoders = {
utf8 : ( data , hint ) => {
if ( data . length === 0 )
return '' ;
if ( typeof data === 'string' ) {
// If `data` never had any percent-encoded bytes or never had any that
// were outside of the ASCII range, then we can safely just return the
// input since UTF-8 is ASCII compatible
if ( hint < 2 )
return data ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
data = Buffer . from ( data , 'latin1' ) ;
}
return data . utf8Slice ( 0 , data . length ) ;
} ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
latin1 : ( data , hint ) => {
if ( data . length === 0 )
return '' ;
if ( typeof data === 'string' )
return data ;
return data . latin1Slice ( 0 , data . length ) ;
} ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
utf16le : ( data , hint ) => {
if ( data . length === 0 )
return '' ;
if ( typeof data === 'string' )
data = Buffer . from ( data , 'latin1' ) ;
return data . ucs2Slice ( 0 , data . length ) ;
} ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
base64 : ( data , hint ) => {
if ( data . length === 0 )
return '' ;
if ( typeof data === 'string' )
data = Buffer . from ( data , 'latin1' ) ;
return data . base64Slice ( 0 , data . length ) ;
} ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
other : ( data , hint ) => {
if ( data . length === 0 )
return '' ;
if ( typeof data === 'string' )
data = Buffer . from ( data , 'latin1' ) ;
try {
const decoder = new TextDecoder ( this ) ;
return decoder . decode ( data ) ;
} catch { }
} ,
} ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function convertToUTF8 ( data , charset , hint ) {
const decode = getDecoder ( charset ) ;
if ( decode )
return decode ( data , hint ) ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
function basename ( path ) {
if ( typeof path !== 'string' )
return '' ;
for ( let i = path . length - 1 ; i >= 0 ; -- i ) {
switch ( path . charCodeAt ( i ) ) {
case 0x2F : // '/'
case 0x5C : // '\'
path = path . slice ( i + 1 ) ;
return ( path === '..' || path === '.' ? '' : path ) ;
}
}
return ( path === '..' || path === '.' ? '' : path ) ;
}
const TOKEN = [
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 1 , 0 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 1 , 1 , 0 , 1 , 1 , 0 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 1 , 0 , 1 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
] ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const QDTEXT = [
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 1 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
1 , 1 , 0 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
] ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const CHARSET = [
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 1 , 0 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 1 , 0 , 1 , 0 , 0 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 1 , 1 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
] ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const EXTENDED _VALUE = [
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 1 , 0 , 1 , 1 , 0 , 1 , 0 , 0 , 0 , 0 , 1 , 0 , 1 , 1 , 0 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 1 , 0 , 1 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
] ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/* eslint-disable no-multi-spaces */
const HEX _VALUES = [
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , 10 , 11 , 12 , 13 , 14 , 15 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , 10 , 11 , 12 , 13 , 14 , 15 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
- 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 , - 1 ,
] ;
/* eslint-enable no-multi-spaces */
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
module . exports = {
basename ,
convertToUTF8 ,
getDecoder ,
parseContentType ,
parseDisposition ,
} ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ 8932 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
class Deprecation extends Error {
constructor ( message ) {
super ( message ) ; // Maintains proper stack trace (only available on V8)
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/* istanbul ignore next */
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
this . name = 'Deprecation' ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
exports . Deprecation = Deprecation ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 3287 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2022-12-25 13:58:23 +08:00
"use strict" ;
2023-07-27 11:01:06 +00:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/ * !
* is - plain - object < https : //github.com/jonschlinkert/is-plain-object>
*
* Copyright ( c ) 2014 - 2017 , Jon Schlinkert .
* Released under the MIT License .
* /
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function isObject ( o ) {
return Object . prototype . toString . call ( o ) === '[object Object]' ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
function isPlainObject ( o ) {
var ctor , prot ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( isObject ( o ) === false ) return false ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// If has modified constructor
ctor = o . constructor ;
if ( ctor === undefined ) return true ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// If has modified prototype
prot = ctor . prototype ;
if ( isObject ( prot ) === false ) return false ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// If constructor does not have an Object-specific method
if ( prot . hasOwnProperty ( 'isPrototypeOf' ) === false ) {
return false ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
// Most likely a plain Object
return true ;
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
exports . isPlainObject = isPlainObject ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ 1223 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
var wrappy = _ _nccwpck _require _ _ ( 7461 )
module . exports = wrappy ( once )
module . exports . strict = wrappy ( onceStrict )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
once . proto = once ( function ( ) {
Object . defineProperty ( Function . prototype , 'once' , {
value : function ( ) {
return once ( this )
} ,
configurable : true
} )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
Object . defineProperty ( Function . prototype , 'onceStrict' , {
value : function ( ) {
return onceStrict ( this )
} ,
configurable : true
} )
} )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function once ( fn ) {
var f = function ( ) {
if ( f . called ) return f . value
f . called = true
return f . value = fn . apply ( this , arguments )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
f . called = false
return f
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
function onceStrict ( fn ) {
var f = function ( ) {
if ( f . called )
throw new Error ( f . onceError )
f . called = true
return f . value = fn . apply ( this , arguments )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
var name = fn . name || 'Function wrapped with `once`'
f . onceError = name + " shouldn't be called more than once"
f . called = false
return f
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ 5911 :
/***/ ( ( module , exports ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
exports = module . exports = SemVer
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
var debug
/* istanbul ignore next */
if ( typeof process === 'object' &&
process . env &&
process . env . NODE _DEBUG &&
/\bsemver\b/i . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments , 0 )
args . unshift ( 'SEMVER' )
console . log . apply ( console , args )
}
} else {
debug = function ( ) { }
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
exports . SEMVER _SPEC _VERSION = '2.0.0'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
var MAX _LENGTH = 256
var MAX _SAFE _INTEGER = Number . MAX _SAFE _INTEGER ||
/* istanbul ignore next */ 9007199254740991
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Max safe segment length for coercion.
var MAX _SAFE _COMPONENT _LENGTH = 16
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// The actual regexps go on exports.re
var re = exports . re = [ ]
var src = exports . src = [ ]
var t = exports . tokens = { }
var R = 0
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function tok ( n ) {
t [ n ] = R ++
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
// The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings.
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// ## Numeric Identifier
// A single `0`, or a non-zero digit followed by zero or more digits.
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'NUMERICIDENTIFIER' )
src [ t . NUMERICIDENTIFIER ] = '0|[1-9]\\d*'
tok ( 'NUMERICIDENTIFIERLOOSE' )
src [ t . NUMERICIDENTIFIERLOOSE ] = '[0-9]+'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'NONNUMERICIDENTIFIER' )
src [ t . NONNUMERICIDENTIFIER ] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// ## Main Version
// Three dot-separated numeric identifiers.
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'MAINVERSION' )
src [ t . MAINVERSION ] = '(' + src [ t . NUMERICIDENTIFIER ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIER ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIER ] + ')'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'MAINVERSIONLOOSE' )
src [ t . MAINVERSIONLOOSE ] = '(' + src [ t . NUMERICIDENTIFIERLOOSE ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIERLOOSE ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIERLOOSE ] + ')'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'PRERELEASEIDENTIFIER' )
src [ t . PRERELEASEIDENTIFIER ] = '(?:' + src [ t . NUMERICIDENTIFIER ] +
'|' + src [ t . NONNUMERICIDENTIFIER ] + ')'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'PRERELEASEIDENTIFIERLOOSE' )
src [ t . PRERELEASEIDENTIFIERLOOSE ] = '(?:' + src [ t . NUMERICIDENTIFIERLOOSE ] +
'|' + src [ t . NONNUMERICIDENTIFIER ] + ')'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
// identifiers.
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'PRERELEASE' )
src [ t . PRERELEASE ] = '(?:-(' + src [ t . PRERELEASEIDENTIFIER ] +
'(?:\\.' + src [ t . PRERELEASEIDENTIFIER ] + ')*))'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'PRERELEASELOOSE' )
src [ t . PRERELEASELOOSE ] = '(?:-?(' + src [ t . PRERELEASEIDENTIFIERLOOSE ] +
'(?:\\.' + src [ t . PRERELEASEIDENTIFIERLOOSE ] + ')*))'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'BUILDIDENTIFIER' )
src [ t . BUILDIDENTIFIER ] = '[0-9A-Za-z-]+'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
// identifiers.
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'BUILD' )
src [ t . BUILD ] = '(?:\\+(' + src [ t . BUILDIDENTIFIER ] +
'(?:\\.' + src [ t . BUILDIDENTIFIER ] + ')*))'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// ## Full Version String
// A main version, followed optionally by a pre-release version and
// build metadata.
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Note that the only major, minor, patch, and pre-release sections of
// the version string are capturing groups. The build metadata is not a
// capturing group, because it should not ever be used in version
// comparison.
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'FULL' )
tok ( 'FULLPLAIN' )
src [ t . FULLPLAIN ] = 'v?' + src [ t . MAINVERSION ] +
src [ t . PRERELEASE ] + '?' +
src [ t . BUILD ] + '?'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
src [ t . FULL ] = '^' + src [ t . FULLPLAIN ] + '$'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
// common in the npm registry.
tok ( 'LOOSEPLAIN' )
src [ t . LOOSEPLAIN ] = '[v=\\s]*' + src [ t . MAINVERSIONLOOSE ] +
src [ t . PRERELEASELOOSE ] + '?' +
src [ t . BUILD ] + '?'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'LOOSE' )
src [ t . LOOSE ] = '^' + src [ t . LOOSEPLAIN ] + '$'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'GTLT' )
src [ t . GTLT ] = '((?:<|>)?=?)'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Something like "2.*" or "1.2.x".
// Note that "x.x" is a valid xRange identifer, meaning "any version"
// Only the first item is strictly required.
tok ( 'XRANGEIDENTIFIERLOOSE' )
src [ t . XRANGEIDENTIFIERLOOSE ] = src [ t . NUMERICIDENTIFIERLOOSE ] + '|x|X|\\*'
tok ( 'XRANGEIDENTIFIER' )
src [ t . XRANGEIDENTIFIER ] = src [ t . NUMERICIDENTIFIER ] + '|x|X|\\*'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'XRANGEPLAIN' )
src [ t . XRANGEPLAIN ] = '[v=\\s]*(' + src [ t . XRANGEIDENTIFIER ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIER ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIER ] + ')' +
'(?:' + src [ t . PRERELEASE ] + ')?' +
src [ t . BUILD ] + '?' +
')?)?'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'XRANGEPLAINLOOSE' )
src [ t . XRANGEPLAINLOOSE ] = '[v=\\s]*(' + src [ t . XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:' + src [ t . PRERELEASELOOSE ] + ')?' +
src [ t . BUILD ] + '?' +
')?)?'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'XRANGE' )
src [ t . XRANGE ] = '^' + src [ t . GTLT ] + '\\s*' + src [ t . XRANGEPLAIN ] + '$'
tok ( 'XRANGELOOSE' )
src [ t . XRANGELOOSE ] = '^' + src [ t . GTLT ] + '\\s*' + src [ t . XRANGEPLAINLOOSE ] + '$'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
tok ( 'COERCE' )
src [ t . COERCE ] = '(^|[^\\d])' +
'(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '})' +
'(?:\\.(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '}))?' +
'(?:\\.(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '}))?' +
'(?:$|[^\\d])'
tok ( 'COERCERTL' )
re [ t . COERCERTL ] = new RegExp ( src [ t . COERCE ] , 'g' )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Tilde ranges.
// Meaning is "reasonably at or greater than"
tok ( 'LONETILDE' )
src [ t . LONETILDE ] = '(?:~>?)'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'TILDETRIM' )
src [ t . TILDETRIM ] = '(\\s*)' + src [ t . LONETILDE ] + '\\s+'
re [ t . TILDETRIM ] = new RegExp ( src [ t . TILDETRIM ] , 'g' )
var tildeTrimReplace = '$1~'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'TILDE' )
src [ t . TILDE ] = '^' + src [ t . LONETILDE ] + src [ t . XRANGEPLAIN ] + '$'
tok ( 'TILDELOOSE' )
src [ t . TILDELOOSE ] = '^' + src [ t . LONETILDE ] + src [ t . XRANGEPLAINLOOSE ] + '$'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Caret ranges.
// Meaning is "at least and backwards compatible with"
tok ( 'LONECARET' )
src [ t . LONECARET ] = '(?:\\^)'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'CARETTRIM' )
src [ t . CARETTRIM ] = '(\\s*)' + src [ t . LONECARET ] + '\\s+'
re [ t . CARETTRIM ] = new RegExp ( src [ t . CARETTRIM ] , 'g' )
var caretTrimReplace = '$1^'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'CARET' )
src [ t . CARET ] = '^' + src [ t . LONECARET ] + src [ t . XRANGEPLAIN ] + '$'
tok ( 'CARETLOOSE' )
src [ t . CARETLOOSE ] = '^' + src [ t . LONECARET ] + src [ t . XRANGEPLAINLOOSE ] + '$'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// A simple gt/lt/eq thing, or just "" to indicate "any version"
tok ( 'COMPARATORLOOSE' )
src [ t . COMPARATORLOOSE ] = '^' + src [ t . GTLT ] + '\\s*(' + src [ t . LOOSEPLAIN ] + ')$|^$'
tok ( 'COMPARATOR' )
src [ t . COMPARATOR ] = '^' + src [ t . GTLT ] + '\\s*(' + src [ t . FULLPLAIN ] + ')$|^$'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// An expression to strip any whitespace between the gtlt and the thing
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
tok ( 'COMPARATORTRIM' )
src [ t . COMPARATORTRIM ] = '(\\s*)' + src [ t . GTLT ] +
'\\s*(' + src [ t . LOOSEPLAIN ] + '|' + src [ t . XRANGEPLAIN ] + ')'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// this one has to use the /g flag
re [ t . COMPARATORTRIM ] = new RegExp ( src [ t . COMPARATORTRIM ] , 'g' )
var comparatorTrimReplace = '$1$2$3'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Something like `1.2.3 - 1.2.4`
// Note that these all use the loose form, because they'll be
// checked against either the strict or loose comparator form
// later.
tok ( 'HYPHENRANGE' )
src [ t . HYPHENRANGE ] = '^\\s*(' + src [ t . XRANGEPLAIN ] + ')' +
'\\s+-\\s+' +
'(' + src [ t . XRANGEPLAIN ] + ')' +
'\\s*$'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
tok ( 'HYPHENRANGELOOSE' )
src [ t . HYPHENRANGELOOSE ] = '^\\s*(' + src [ t . XRANGEPLAINLOOSE ] + ')' +
'\\s+-\\s+' +
'(' + src [ t . XRANGEPLAINLOOSE ] + ')' +
'\\s*$'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Star ranges basically just allow anything at all.
tok ( 'STAR' )
src [ t . STAR ] = '(<|>)?=?\\s*\\*'
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Compile to actual regexp objects.
// All are flag-free, unless they were created above with a flag.
for ( var i = 0 ; i < R ; i ++ ) {
debug ( i , src [ i ] )
if ( ! re [ i ] ) {
re [ i ] = new RegExp ( src [ i ] )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
exports . parse = parse
function parse ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
2022-12-25 13:58:23 +08:00
}
}
2023-07-27 11:01:06 +00:00
if ( version instanceof SemVer ) {
return version
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
if ( typeof version !== 'string' ) {
return null
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
if ( version . length > MAX _LENGTH ) {
return null
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
var r = options . loose ? re [ t . LOOSE ] : re [ t . FULL ]
if ( ! r . test ( version ) ) {
return null
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
try {
return new SemVer ( version , options )
} catch ( er ) {
return null
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
exports . valid = valid
function valid ( version , options ) {
var v = parse ( version , options )
return v ? v . version : null
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
exports . clean = clean
function clean ( version , options ) {
var s = parse ( version . trim ( ) . replace ( /^[=v]+/ , '' ) , options )
return s ? s . version : null
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
exports . SemVer = SemVer
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function SemVer ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
2022-12-25 13:58:23 +08:00
}
}
2023-07-27 11:01:06 +00:00
if ( version instanceof SemVer ) {
if ( version . loose === options . loose ) {
return version
} else {
version = version . version
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
} else if ( typeof version !== 'string' ) {
throw new TypeError ( 'Invalid Version: ' + version )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
if ( version . length > MAX _LENGTH ) {
throw new TypeError ( 'version is longer than ' + MAX _LENGTH + ' characters' )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
if ( ! ( this instanceof SemVer ) ) {
return new SemVer ( version , options )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
debug ( 'SemVer' , version , options )
this . options = options
this . loose = ! ! options . loose
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
var m = version . trim ( ) . match ( options . loose ? re [ t . LOOSE ] : re [ t . FULL ] )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( ! m ) {
throw new TypeError ( 'Invalid Version: ' + version )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
this . raw = version
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// these are actually numbers
this . major = + m [ 1 ]
this . minor = + m [ 2 ]
this . patch = + m [ 3 ]
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( this . major > MAX _SAFE _INTEGER || this . major < 0 ) {
throw new TypeError ( 'Invalid major version' )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
if ( this . minor > MAX _SAFE _INTEGER || this . minor < 0 ) {
throw new TypeError ( 'Invalid minor version' )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
if ( this . patch > MAX _SAFE _INTEGER || this . patch < 0 ) {
throw new TypeError ( 'Invalid patch version' )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
// numberify any prerelease numeric ids
if ( ! m [ 4 ] ) {
this . prerelease = [ ]
} else {
this . prerelease = m [ 4 ] . split ( '.' ) . map ( function ( id ) {
if ( /^[0-9]+$/ . test ( id ) ) {
var num = + id
if ( num >= 0 && num < MAX _SAFE _INTEGER ) {
return num
}
}
return id
} )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
this . build = m [ 5 ] ? m [ 5 ] . split ( '.' ) : [ ]
this . format ( )
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
SemVer . prototype . format = function ( ) {
this . version = this . major + '.' + this . minor + '.' + this . patch
if ( this . prerelease . length ) {
this . version += '-' + this . prerelease . join ( '.' )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
return this . version
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
SemVer . prototype . toString = function ( ) {
return this . version
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
SemVer . prototype . compare = function ( other ) {
debug ( 'SemVer.compare' , this . version , this . options , other )
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
return this . compareMain ( other ) || this . comparePre ( other )
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
SemVer . prototype . compareMain = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
return compareIdentifiers ( this . major , other . major ) ||
compareIdentifiers ( this . minor , other . minor ) ||
compareIdentifiers ( this . patch , other . patch )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
SemVer . prototype . comparePre = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
// NOT having a prerelease is > having one
if ( this . prerelease . length && ! other . prerelease . length ) {
return - 1
} else if ( ! this . prerelease . length && other . prerelease . length ) {
return 1
} else if ( ! this . prerelease . length && ! other . prerelease . length ) {
return 0
}
var i = 0
do {
var a = this . prerelease [ i ]
var b = other . prerelease [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
SemVer . prototype . compareBuild = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
var i = 0
do {
var a = this . build [ i ]
var b = other . build [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
SemVer . prototype . inc = function ( release , identifier ) {
switch ( release ) {
case 'premajor' :
this . prerelease . length = 0
this . patch = 0
this . minor = 0
this . major ++
this . inc ( 'pre' , identifier )
break
case 'preminor' :
this . prerelease . length = 0
this . patch = 0
this . minor ++
this . inc ( 'pre' , identifier )
break
case 'prepatch' :
// If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not
// relevant at this point.
this . prerelease . length = 0
this . inc ( 'patch' , identifier )
this . inc ( 'pre' , identifier )
break
// If the input is a non-prerelease version, this acts the same as
// prepatch.
case 'prerelease' :
if ( this . prerelease . length === 0 ) {
this . inc ( 'patch' , identifier )
}
this . inc ( 'pre' , identifier )
break
case 'major' :
// If this is a pre-major version, bump up to the same major version.
// Otherwise increment major.
// 1.0.0-5 bumps to 1.0.0
// 1.1.0 bumps to 2.0.0
if ( this . minor !== 0 ||
this . patch !== 0 ||
this . prerelease . length === 0 ) {
this . major ++
}
this . minor = 0
this . patch = 0
this . prerelease = [ ]
break
case 'minor' :
// If this is a pre-minor version, bump up to the same minor version.
// Otherwise increment minor.
// 1.2.0-5 bumps to 1.2.0
// 1.2.1 bumps to 1.3.0
if ( this . patch !== 0 || this . prerelease . length === 0 ) {
this . minor ++
}
this . patch = 0
this . prerelease = [ ]
break
case 'patch' :
// If this is not a pre-release version, it will increment the patch.
// If it is a pre-release it will bump up to the same patch version.
// 1.2.0-5 patches to 1.2.0
// 1.2.0 patches to 1.2.1
if ( this . prerelease . length === 0 ) {
this . patch ++
}
this . prerelease = [ ]
break
// This probably shouldn't be used publicly.
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
case 'pre' :
if ( this . prerelease . length === 0 ) {
this . prerelease = [ 0 ]
} else {
var i = this . prerelease . length
while ( -- i >= 0 ) {
if ( typeof this . prerelease [ i ] === 'number' ) {
this . prerelease [ i ] ++
i = - 2
}
}
if ( i === - 1 ) {
// didn't increment anything
this . prerelease . push ( 0 )
}
}
if ( identifier ) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
if ( this . prerelease [ 0 ] === identifier ) {
if ( isNaN ( this . prerelease [ 1 ] ) ) {
this . prerelease = [ identifier , 0 ]
}
} else {
this . prerelease = [ identifier , 0 ]
}
}
break
default :
throw new Error ( 'invalid increment argument: ' + release )
}
this . format ( )
this . raw = this . version
return this
}
exports . inc = inc
function inc ( version , release , loose , identifier ) {
if ( typeof ( loose ) === 'string' ) {
identifier = loose
loose = undefined
}
try {
return new SemVer ( version , loose ) . inc ( release , identifier ) . version
} catch ( er ) {
return null
}
}
exports . diff = diff
function diff ( version1 , version2 ) {
if ( eq ( version1 , version2 ) ) {
return null
} else {
var v1 = parse ( version1 )
var v2 = parse ( version2 )
var prefix = ''
if ( v1 . prerelease . length || v2 . prerelease . length ) {
prefix = 'pre'
var defaultResult = 'prerelease'
}
for ( var key in v1 ) {
if ( key === 'major' || key === 'minor' || key === 'patch' ) {
if ( v1 [ key ] !== v2 [ key ] ) {
return prefix + key
}
}
}
return defaultResult // may be undefined
}
}
exports . compareIdentifiers = compareIdentifiers
var numeric = /^[0-9]+$/
function compareIdentifiers ( a , b ) {
var anum = numeric . test ( a )
var bnum = numeric . test ( b )
if ( anum && bnum ) {
a = + a
b = + b
}
return a === b ? 0
: ( anum && ! bnum ) ? - 1
: ( bnum && ! anum ) ? 1
: a < b ? - 1
: 1
}
exports . rcompareIdentifiers = rcompareIdentifiers
function rcompareIdentifiers ( a , b ) {
return compareIdentifiers ( b , a )
}
exports . major = major
function major ( a , loose ) {
return new SemVer ( a , loose ) . major
}
exports . minor = minor
function minor ( a , loose ) {
return new SemVer ( a , loose ) . minor
}
exports . patch = patch
function patch ( a , loose ) {
return new SemVer ( a , loose ) . patch
}
exports . compare = compare
function compare ( a , b , loose ) {
return new SemVer ( a , loose ) . compare ( new SemVer ( b , loose ) )
}
exports . compareLoose = compareLoose
function compareLoose ( a , b ) {
return compare ( a , b , true )
}
exports . compareBuild = compareBuild
function compareBuild ( a , b , loose ) {
var versionA = new SemVer ( a , loose )
var versionB = new SemVer ( b , loose )
return versionA . compare ( versionB ) || versionA . compareBuild ( versionB )
}
exports . rcompare = rcompare
function rcompare ( a , b , loose ) {
return compare ( b , a , loose )
}
exports . sort = sort
function sort ( list , loose ) {
return list . sort ( function ( a , b ) {
return exports . compareBuild ( a , b , loose )
} )
}
exports . rsort = rsort
function rsort ( list , loose ) {
return list . sort ( function ( a , b ) {
return exports . compareBuild ( b , a , loose )
} )
}
exports . gt = gt
function gt ( a , b , loose ) {
return compare ( a , b , loose ) > 0
}
exports . lt = lt
function lt ( a , b , loose ) {
return compare ( a , b , loose ) < 0
}
exports . eq = eq
function eq ( a , b , loose ) {
return compare ( a , b , loose ) === 0
}
exports . neq = neq
function neq ( a , b , loose ) {
return compare ( a , b , loose ) !== 0
}
exports . gte = gte
function gte ( a , b , loose ) {
return compare ( a , b , loose ) >= 0
}
exports . lte = lte
function lte ( a , b , loose ) {
return compare ( a , b , loose ) <= 0
}
exports . cmp = cmp
function cmp ( a , op , b , loose ) {
switch ( op ) {
case '===' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a === b
case '!==' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a !== b
case '' :
case '=' :
case '==' :
return eq ( a , b , loose )
case '!=' :
return neq ( a , b , loose )
case '>' :
return gt ( a , b , loose )
case '>=' :
return gte ( a , b , loose )
case '<' :
return lt ( a , b , loose )
case '<=' :
return lte ( a , b , loose )
default :
throw new TypeError ( 'Invalid operator: ' + op )
}
}
exports . Comparator = Comparator
function Comparator ( comp , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( comp instanceof Comparator ) {
if ( comp . loose === ! ! options . loose ) {
return comp
} else {
comp = comp . value
}
}
if ( ! ( this instanceof Comparator ) ) {
return new Comparator ( comp , options )
}
debug ( 'comparator' , comp , options )
this . options = options
this . loose = ! ! options . loose
this . parse ( comp )
if ( this . semver === ANY ) {
this . value = ''
} else {
this . value = this . operator + this . semver . version
}
debug ( 'comp' , this )
}
var ANY = { }
Comparator . prototype . parse = function ( comp ) {
var r = this . options . loose ? re [ t . COMPARATORLOOSE ] : re [ t . COMPARATOR ]
var m = comp . match ( r )
if ( ! m ) {
throw new TypeError ( 'Invalid comparator: ' + comp )
}
this . operator = m [ 1 ] !== undefined ? m [ 1 ] : ''
if ( this . operator === '=' ) {
this . operator = ''
}
// if it literally is just '>' or '' then allow anything.
if ( ! m [ 2 ] ) {
this . semver = ANY
} else {
this . semver = new SemVer ( m [ 2 ] , this . options . loose )
}
}
Comparator . prototype . toString = function ( ) {
return this . value
}
Comparator . prototype . test = function ( version ) {
debug ( 'Comparator.test' , version , this . options . loose )
if ( this . semver === ANY || version === ANY ) {
return true
}
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
}
}
return cmp ( version , this . operator , this . semver , this . options )
}
Comparator . prototype . intersects = function ( comp , options ) {
if ( ! ( comp instanceof Comparator ) ) {
throw new TypeError ( 'a Comparator is required' )
}
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
var rangeTmp
if ( this . operator === '' ) {
if ( this . value === '' ) {
return true
}
rangeTmp = new Range ( comp . value , options )
return satisfies ( this . value , rangeTmp , options )
} else if ( comp . operator === '' ) {
if ( comp . value === '' ) {
return true
}
rangeTmp = new Range ( this . value , options )
return satisfies ( comp . semver , rangeTmp , options )
}
var sameDirectionIncreasing =
( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '>=' || comp . operator === '>' )
var sameDirectionDecreasing =
( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '<=' || comp . operator === '<' )
var sameSemVer = this . semver . version === comp . semver . version
var differentDirectionsInclusive =
( this . operator === '>=' || this . operator === '<=' ) &&
( comp . operator === '>=' || comp . operator === '<=' )
var oppositeDirectionsLessThan =
cmp ( this . semver , '<' , comp . semver , options ) &&
( ( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '<=' || comp . operator === '<' ) )
var oppositeDirectionsGreaterThan =
cmp ( this . semver , '>' , comp . semver , options ) &&
( ( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '>=' || comp . operator === '>' ) )
return sameDirectionIncreasing || sameDirectionDecreasing ||
( sameSemVer && differentDirectionsInclusive ) ||
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
}
exports . Range = Range
function Range ( range , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( range instanceof Range ) {
if ( range . loose === ! ! options . loose &&
range . includePrerelease === ! ! options . includePrerelease ) {
return range
} else {
return new Range ( range . raw , options )
}
}
if ( range instanceof Comparator ) {
return new Range ( range . value , options )
}
if ( ! ( this instanceof Range ) ) {
return new Range ( range , options )
}
this . options = options
this . loose = ! ! options . loose
this . includePrerelease = ! ! options . includePrerelease
// First, split based on boolean or ||
this . raw = range
this . set = range . split ( /\s*\|\|\s*/ ) . map ( function ( range ) {
return this . parseRange ( range . trim ( ) )
} , this ) . filter ( function ( c ) {
// throw out any that are not relevant for whatever reason
return c . length
} )
if ( ! this . set . length ) {
throw new TypeError ( 'Invalid SemVer Range: ' + range )
}
this . format ( )
}
Range . prototype . format = function ( ) {
this . range = this . set . map ( function ( comps ) {
return comps . join ( ' ' ) . trim ( )
} ) . join ( '||' ) . trim ( )
return this . range
}
Range . prototype . toString = function ( ) {
return this . range
}
Range . prototype . parseRange = function ( range ) {
var loose = this . options . loose
range = range . trim ( )
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? re [ t . HYPHENRANGELOOSE ] : re [ t . HYPHENRANGE ]
range = range . replace ( hr , hyphenReplace )
debug ( 'hyphen replace' , range )
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range . replace ( re [ t . COMPARATORTRIM ] , comparatorTrimReplace )
debug ( 'comparator trim' , range , re [ t . COMPARATORTRIM ] )
// `~ 1.2.3` => `~1.2.3`
range = range . replace ( re [ t . TILDETRIM ] , tildeTrimReplace )
// `^ 1.2.3` => `^1.2.3`
range = range . replace ( re [ t . CARETTRIM ] , caretTrimReplace )
// normalize spaces
range = range . split ( /\s+/ ) . join ( ' ' )
// At this point, the range is completely trimmed and
// ready to be split into comparators.
var compRe = loose ? re [ t . COMPARATORLOOSE ] : re [ t . COMPARATOR ]
var set = range . split ( ' ' ) . map ( function ( comp ) {
return parseComparator ( comp , this . options )
} , this ) . join ( ' ' ) . split ( /\s+/ )
if ( this . options . loose ) {
// in loose mode, throw out any that are not valid comparators
set = set . filter ( function ( comp ) {
return ! ! comp . match ( compRe )
} )
}
set = set . map ( function ( comp ) {
return new Comparator ( comp , this . options )
} , this )
return set
}
Range . prototype . intersects = function ( range , options ) {
if ( ! ( range instanceof Range ) ) {
throw new TypeError ( 'a Range is required' )
}
return this . set . some ( function ( thisComparators ) {
return (
isSatisfiable ( thisComparators , options ) &&
range . set . some ( function ( rangeComparators ) {
return (
isSatisfiable ( rangeComparators , options ) &&
thisComparators . every ( function ( thisComparator ) {
return rangeComparators . every ( function ( rangeComparator ) {
return thisComparator . intersects ( rangeComparator , options )
} )
} )
)
} )
)
} )
}
// take a set of comparators and determine whether there
// exists a version which can satisfy it
function isSatisfiable ( comparators , options ) {
var result = true
var remainingComparators = comparators . slice ( )
var testComparator = remainingComparators . pop ( )
while ( result && remainingComparators . length ) {
result = remainingComparators . every ( function ( otherComparator ) {
return testComparator . intersects ( otherComparator , options )
} )
testComparator = remainingComparators . pop ( )
}
return result
}
// Mostly just for testing and legacy API reasons
exports . toComparators = toComparators
function toComparators ( range , options ) {
return new Range ( range , options ) . set . map ( function ( comp ) {
return comp . map ( function ( c ) {
return c . value
} ) . join ( ' ' ) . trim ( ) . split ( ' ' )
} )
}
// comprised of xranges, tildes, stars, and gtlt's at this point.
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
function parseComparator ( comp , options ) {
debug ( 'comp' , comp , options )
comp = replaceCarets ( comp , options )
debug ( 'caret' , comp )
comp = replaceTildes ( comp , options )
debug ( 'tildes' , comp )
comp = replaceXRanges ( comp , options )
debug ( 'xrange' , comp )
comp = replaceStars ( comp , options )
debug ( 'stars' , comp )
return comp
}
function isX ( id ) {
return ! id || id . toLowerCase ( ) === 'x' || id === '*'
}
// ~, ~> --> * (any, kinda silly)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
function replaceTildes ( comp , options ) {
return comp . trim ( ) . split ( /\s+/ ) . map ( function ( comp ) {
return replaceTilde ( comp , options )
} ) . join ( ' ' )
}
function replaceTilde ( comp , options ) {
var r = options . loose ? re [ t . TILDELOOSE ] : re [ t . TILDE ]
return comp . replace ( r , function ( _ , M , m , p , pr ) {
debug ( 'tilde' , comp , _ , M , m , p , pr )
var ret
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( isX ( p ) ) {
// ~1.2 == >=1.2.0 <1.3.0
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
} else if ( pr ) {
debug ( 'replaceTilde pr' , pr )
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0'
} else {
// ~1.2.3 == >=1.2.3 <1.3.0
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
debug ( 'tilde return' , ret )
return ret
} )
}
// ^ --> * (any, kinda silly)
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
// ^1.2.3 --> >=1.2.3 <2.0.0
// ^1.2.0 --> >=1.2.0 <2.0.0
function replaceCarets ( comp , options ) {
return comp . trim ( ) . split ( /\s+/ ) . map ( function ( comp ) {
return replaceCaret ( comp , options )
} ) . join ( ' ' )
}
function replaceCaret ( comp , options ) {
debug ( 'caret' , comp , options )
var r = options . loose ? re [ t . CARETLOOSE ] : re [ t . CARET ]
return comp . replace ( r , function ( _ , M , m , p , pr ) {
debug ( 'caret' , comp , _ , M , m , p , pr )
var ret
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( isX ( p ) ) {
if ( M === '0' ) {
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
} else {
ret = '>=' + M + '.' + m + '.0 <' + ( + M + 1 ) + '.0.0'
}
} else if ( pr ) {
debug ( 'replaceCaret pr' , pr )
if ( M === '0' ) {
if ( m === '0' ) {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + m + '.' + ( + p + 1 )
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + ( + M + 1 ) + '.0.0'
}
} else {
debug ( 'no pr' )
if ( M === '0' ) {
if ( m === '0' ) {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + m + '.' + ( + p + 1 )
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + ( + M + 1 ) + '.0.0'
}
}
debug ( 'caret return' , ret )
return ret
} )
}
function replaceXRanges ( comp , options ) {
debug ( 'replaceXRanges' , comp , options )
return comp . split ( /\s+/ ) . map ( function ( comp ) {
return replaceXRange ( comp , options )
} ) . join ( ' ' )
}
function replaceXRange ( comp , options ) {
comp = comp . trim ( )
var r = options . loose ? re [ t . XRANGELOOSE ] : re [ t . XRANGE ]
return comp . replace ( r , function ( ret , gtlt , M , m , p , pr ) {
debug ( 'xRange' , comp , ret , gtlt , M , m , p , pr )
var xM = isX ( M )
var xm = xM || isX ( m )
var xp = xm || isX ( p )
var anyX = xp
if ( gtlt === '=' && anyX ) {
gtlt = ''
}
// if we're including prereleases in the match, then we need
// to fix this to -0, the lowest possible prerelease value
pr = options . includePrerelease ? '-0' : ''
if ( xM ) {
if ( gtlt === '>' || gtlt === '<' ) {
// nothing is allowed
ret = '<0.0.0-0'
} else {
// nothing is forbidden
ret = '*'
}
} else if ( gtlt && anyX ) {
// we know patch is an x, because we have any x at all.
// replace X with 0
if ( xm ) {
m = 0
}
p = 0
if ( gtlt === '>' ) {
// >1 => >=2.0.0
// >1.2 => >=1.3.0
// >1.2.3 => >= 1.2.4
gtlt = '>='
if ( xm ) {
M = + M + 1
m = 0
p = 0
} else {
m = + m + 1
p = 0
}
} else if ( gtlt === '<=' ) {
// <=0.7.x is actually <0.8.0, since any 0.7.x should
// pass. Similarly, <=7.x is actually <8.0.0, etc.
gtlt = '<'
if ( xm ) {
M = + M + 1
} else {
m = + m + 1
}
}
ret = gtlt + M + '.' + m + '.' + p + pr
} else if ( xm ) {
ret = '>=' + M + '.0.0' + pr + ' <' + ( + M + 1 ) + '.0.0' + pr
} else if ( xp ) {
ret = '>=' + M + '.' + m + '.0' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0' + pr
}
debug ( 'xRange return' , ret )
return ret
} )
}
// Because * is AND-ed with everything else in the comparator,
// and '' means "any version", just remove the *s entirely.
function replaceStars ( comp , options ) {
debug ( 'replaceStars' , comp , options )
// Looseness is ignored here. star is always as loose as it gets!
return comp . trim ( ) . replace ( re [ t . STAR ] , '' )
}
// This function is passed to string.replace(re[t.HYPHENRANGE])
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0
function hyphenReplace ( $0 ,
from , fM , fm , fp , fpr , fb ,
to , tM , tm , tp , tpr , tb ) {
if ( isX ( fM ) ) {
from = ''
} else if ( isX ( fm ) ) {
from = '>=' + fM + '.0.0'
} else if ( isX ( fp ) ) {
from = '>=' + fM + '.' + fm + '.0'
} else {
from = '>=' + from
}
if ( isX ( tM ) ) {
to = ''
} else if ( isX ( tm ) ) {
to = '<' + ( + tM + 1 ) + '.0.0'
} else if ( isX ( tp ) ) {
to = '<' + tM + '.' + ( + tm + 1 ) + '.0'
} else if ( tpr ) {
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
} else {
to = '<=' + to
}
return ( from + ' ' + to ) . trim ( )
}
// if ANY of the sets match ALL of its comparators, then pass
Range . prototype . test = function ( version ) {
if ( ! version ) {
return false
}
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
}
}
for ( var i = 0 ; i < this . set . length ; i ++ ) {
if ( testSet ( this . set [ i ] , version , this . options ) ) {
return true
}
}
return false
}
function testSet ( set , version , options ) {
for ( var i = 0 ; i < set . length ; i ++ ) {
if ( ! set [ i ] . test ( version ) ) {
return false
}
}
if ( version . prerelease . length && ! options . includePrerelease ) {
// Find the set of versions that are allowed to have prereleases
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
// That should allow `1.2.3-pr.2` to pass.
// However, `1.2.4-alpha.notready` should NOT be allowed,
// even though it's within the range set by the comparators.
for ( i = 0 ; i < set . length ; i ++ ) {
debug ( set [ i ] . semver )
if ( set [ i ] . semver === ANY ) {
continue
}
if ( set [ i ] . semver . prerelease . length > 0 ) {
var allowed = set [ i ] . semver
if ( allowed . major === version . major &&
allowed . minor === version . minor &&
allowed . patch === version . patch ) {
return true
}
}
}
// Version has a -pre, but it's not one of the ones we like.
return false
}
return true
}
exports . satisfies = satisfies
function satisfies ( version , range , options ) {
try {
range = new Range ( range , options )
} catch ( er ) {
return false
}
return range . test ( version )
}
exports . maxSatisfying = maxSatisfying
function maxSatisfying ( versions , range , options ) {
var max = null
var maxSV = null
try {
var rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( function ( v ) {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! max || maxSV . compare ( v ) === - 1 ) {
// compare(max, v, true)
max = v
maxSV = new SemVer ( max , options )
}
}
} )
return max
}
exports . minSatisfying = minSatisfying
function minSatisfying ( versions , range , options ) {
var min = null
var minSV = null
try {
var rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( function ( v ) {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! min || minSV . compare ( v ) === 1 ) {
// compare(min, v, true)
min = v
minSV = new SemVer ( min , options )
}
}
} )
return min
}
exports . minVersion = minVersion
function minVersion ( range , loose ) {
range = new Range ( range , loose )
var minver = new SemVer ( '0.0.0' )
if ( range . test ( minver ) ) {
return minver
}
minver = new SemVer ( '0.0.0-0' )
if ( range . test ( minver ) ) {
return minver
}
minver = null
for ( var i = 0 ; i < range . set . length ; ++ i ) {
var comparators = range . set [ i ]
comparators . forEach ( function ( comparator ) {
// Clone to avoid manipulating the comparator's semver object.
var compver = new SemVer ( comparator . semver . version )
switch ( comparator . operator ) {
case '>' :
if ( compver . prerelease . length === 0 ) {
compver . patch ++
} else {
compver . prerelease . push ( 0 )
}
compver . raw = compver . format ( )
/* fallthrough */
case '' :
case '>=' :
if ( ! minver || gt ( minver , compver ) ) {
minver = compver
}
break
case '<' :
case '<=' :
/* Ignore maximum versions */
break
/* istanbul ignore next */
default :
throw new Error ( 'Unexpected operation: ' + comparator . operator )
}
} )
}
if ( minver && range . test ( minver ) ) {
return minver
}
return null
}
exports . validRange = validRange
function validRange ( range , options ) {
try {
// Return '*' instead of '' so that truthiness works.
// This will throw if it's invalid anyway
return new Range ( range , options ) . range || '*'
} catch ( er ) {
return null
}
}
// Determine if version is less than all the versions possible in the range
exports . ltr = ltr
function ltr ( version , range , options ) {
return outside ( version , range , '<' , options )
}
// Determine if version is greater than all the versions possible in the range.
exports . gtr = gtr
function gtr ( version , range , options ) {
return outside ( version , range , '>' , options )
}
exports . outside = outside
function outside ( version , range , hilo , options ) {
version = new SemVer ( version , options )
range = new Range ( range , options )
var gtfn , ltefn , ltfn , comp , ecomp
switch ( hilo ) {
case '>' :
gtfn = gt
ltefn = lte
ltfn = lt
comp = '>'
ecomp = '>='
break
case '<' :
gtfn = lt
ltefn = gte
ltfn = gt
comp = '<'
ecomp = '<='
break
default :
throw new TypeError ( 'Must provide a hilo val of "<" or ">"' )
}
// If it satisifes the range it is not outside
if ( satisfies ( version , range , options ) ) {
return false
}
// From now on, variable terms are as if we're in "gtr" mode.
// but note that everything is flipped for the "ltr" function.
for ( var i = 0 ; i < range . set . length ; ++ i ) {
var comparators = range . set [ i ]
var high = null
var low = null
comparators . forEach ( function ( comparator ) {
if ( comparator . semver === ANY ) {
comparator = new Comparator ( '>=0.0.0' )
}
high = high || comparator
low = low || comparator
if ( gtfn ( comparator . semver , high . semver , options ) ) {
high = comparator
} else if ( ltfn ( comparator . semver , low . semver , options ) ) {
low = comparator
}
} )
// If the edge version comparator has a operator then our version
// isn't outside it
if ( high . operator === comp || high . operator === ecomp ) {
return false
}
// If the lowest version comparator has an operator and our version
// is less than it then it isn't higher than the range
if ( ( ! low . operator || low . operator === comp ) &&
ltefn ( version , low . semver ) ) {
return false
} else if ( low . operator === ecomp && ltfn ( version , low . semver ) ) {
return false
}
}
return true
}
exports . prerelease = prerelease
function prerelease ( version , options ) {
var parsed = parse ( version , options )
return ( parsed && parsed . prerelease . length ) ? parsed . prerelease : null
}
exports . intersects = intersects
function intersects ( r1 , r2 , options ) {
r1 = new Range ( r1 , options )
r2 = new Range ( r2 , options )
return r1 . intersects ( r2 )
}
exports . coerce = coerce
function coerce ( version , options ) {
if ( version instanceof SemVer ) {
return version
}
if ( typeof version === 'number' ) {
version = String ( version )
}
if ( typeof version !== 'string' ) {
return null
}
options = options || { }
var match = null
if ( ! options . rtl ) {
match = version . match ( re [ t . COERCE ] )
} else {
// Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
//
// Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus.
var next
while ( ( next = re [ t . COERCERTL ] . exec ( version ) ) &&
( ! match || match . index + match [ 0 ] . length !== version . length )
) {
if ( ! match ||
next . index + next [ 0 ] . length !== match . index + match [ 0 ] . length ) {
match = next
}
re [ t . COERCERTL ] . lastIndex = next . index + next [ 1 ] . length + next [ 2 ] . length
}
// leave it in a clean state
re [ t . COERCERTL ] . lastIndex = - 1
}
if ( match === null ) {
return null
}
return parse ( match [ 2 ] +
'.' + ( match [ 3 ] || '0' ) +
'.' + ( match [ 4 ] || '0' ) , options )
}
/***/ } ) ,
/***/ 2405 :
/***/ ( ( module ) => {
"use strict" ;
/ *
Based heavily on the Streaming Boyer - Moore - Horspool C ++ implementation
by Hongli Lai at : https : //github.com/FooBarWidget/boyer-moore-horspool
* /
function memcmp ( buf1 , pos1 , buf2 , pos2 , num ) {
for ( let i = 0 ; i < num ; ++ i ) {
if ( buf1 [ pos1 + i ] !== buf2 [ pos2 + i ] )
return false ;
}
return true ;
}
class SBMH {
constructor ( needle , cb ) {
if ( typeof cb !== 'function' )
throw new Error ( 'Missing match callback' ) ;
if ( typeof needle === 'string' )
needle = Buffer . from ( needle ) ;
else if ( ! Buffer . isBuffer ( needle ) )
throw new Error ( ` Expected Buffer for needle, got ${ typeof needle } ` ) ;
const needleLen = needle . length ;
this . maxMatches = Infinity ;
this . matches = 0 ;
this . _cb = cb ;
this . _lookbehindSize = 0 ;
this . _needle = needle ;
this . _bufPos = 0 ;
this . _lookbehind = Buffer . allocUnsafe ( needleLen ) ;
// Initialize occurrence table.
this . _occ = [
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen , needleLen , needleLen ,
needleLen , needleLen , needleLen , needleLen
] ;
// Populate occurrence table with analysis of the needle, ignoring the last
// letter.
if ( needleLen > 1 ) {
for ( let i = 0 ; i < needleLen - 1 ; ++ i )
this . _occ [ needle [ i ] ] = needleLen - 1 - i ;
}
}
reset ( ) {
this . matches = 0 ;
this . _lookbehindSize = 0 ;
this . _bufPos = 0 ;
}
push ( chunk , pos ) {
let result ;
if ( ! Buffer . isBuffer ( chunk ) )
chunk = Buffer . from ( chunk , 'latin1' ) ;
const chunkLen = chunk . length ;
this . _bufPos = pos || 0 ;
while ( result !== chunkLen && this . matches < this . maxMatches )
result = feed ( this , chunk ) ;
return result ;
}
destroy ( ) {
const lbSize = this . _lookbehindSize ;
if ( lbSize )
this . _cb ( false , this . _lookbehind , 0 , lbSize , false ) ;
this . reset ( ) ;
}
}
function feed ( self , data ) {
const len = data . length ;
const needle = self . _needle ;
const needleLen = needle . length ;
// Positive: points to a position in `data`
// pos == 3 points to data[3]
// Negative: points to a position in the lookbehind buffer
// pos == -2 points to lookbehind[lookbehindSize - 2]
let pos = - self . _lookbehindSize ;
const lastNeedleCharPos = needleLen - 1 ;
const lastNeedleChar = needle [ lastNeedleCharPos ] ;
const end = len - needleLen ;
const occ = self . _occ ;
const lookbehind = self . _lookbehind ;
if ( pos < 0 ) {
// Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
// search with character lookup code that considers both the
// lookbehind buffer and the current round's haystack data.
//
// Loop until
// there is a match.
// or until
// we've moved past the position that requires the
// lookbehind buffer. In this case we switch to the
// optimized loop.
// or until
// the character to look at lies outside the haystack.
while ( pos < 0 && pos <= end ) {
const nextPos = pos + lastNeedleCharPos ;
const ch = ( nextPos < 0
? lookbehind [ self . _lookbehindSize + nextPos ]
: data [ nextPos ] ) ;
if ( ch === lastNeedleChar
&& matchNeedle ( self , data , pos , lastNeedleCharPos ) ) {
self . _lookbehindSize = 0 ;
++ self . matches ;
if ( pos > - self . _lookbehindSize )
self . _cb ( true , lookbehind , 0 , self . _lookbehindSize + pos , false ) ;
else
self . _cb ( true , undefined , 0 , 0 , true ) ;
return ( self . _bufPos = pos + needleLen ) ;
}
pos += occ [ ch ] ;
}
// No match.
// There's too few data for Boyer-Moore-Horspool to run,
// so let's use a different algorithm to skip as much as
// we can.
// Forward pos until
// the trailing part of lookbehind + data
// looks like the beginning of the needle
// or until
// pos == 0
while ( pos < 0 && ! matchNeedle ( self , data , pos , len - pos ) )
++ pos ;
if ( pos < 0 ) {
// Cut off part of the lookbehind buffer that has
// been processed and append the entire haystack
// into it.
const bytesToCutOff = self . _lookbehindSize + pos ;
if ( bytesToCutOff > 0 ) {
// The cut off data is guaranteed not to contain the needle.
self . _cb ( false , lookbehind , 0 , bytesToCutOff , false ) ;
}
self . _lookbehindSize -= bytesToCutOff ;
lookbehind . copy ( lookbehind , 0 , bytesToCutOff , self . _lookbehindSize ) ;
lookbehind . set ( data , self . _lookbehindSize ) ;
self . _lookbehindSize += len ;
self . _bufPos = len ;
return len ;
}
// Discard lookbehind buffer.
self . _cb ( false , lookbehind , 0 , self . _lookbehindSize , false ) ;
self . _lookbehindSize = 0 ;
}
pos += self . _bufPos ;
const firstNeedleChar = needle [ 0 ] ;
// Lookbehind buffer is now empty. Perform Boyer-Moore-Horspool
// search with optimized character lookup code that only considers
// the current round's haystack data.
while ( pos <= end ) {
const ch = data [ pos + lastNeedleCharPos ] ;
if ( ch === lastNeedleChar
&& data [ pos ] === firstNeedleChar
&& memcmp ( needle , 0 , data , pos , lastNeedleCharPos ) ) {
++ self . matches ;
if ( pos > 0 )
self . _cb ( true , data , self . _bufPos , pos , true ) ;
else
self . _cb ( true , undefined , 0 , 0 , true ) ;
return ( self . _bufPos = pos + needleLen ) ;
}
pos += occ [ ch ] ;
}
// There was no match. If there's trailing haystack data that we cannot
// match yet using the Boyer-Moore-Horspool algorithm (because the trailing
// data is less than the needle size) then match using a modified
// algorithm that starts matching from the beginning instead of the end.
// Whatever trailing data is left after running this algorithm is added to
// the lookbehind buffer.
while ( pos < len ) {
if ( data [ pos ] !== firstNeedleChar
|| ! memcmp ( data , pos , needle , 0 , len - pos ) ) {
++ pos ;
continue ;
}
data . copy ( lookbehind , 0 , pos , len ) ;
self . _lookbehindSize = len - pos ;
break ;
}
// Everything until `pos` is guaranteed not to contain needle data.
if ( pos > 0 )
self . _cb ( false , data , self . _bufPos , pos < len ? pos : len , true ) ;
self . _bufPos = len ;
return len ;
}
function matchNeedle ( self , data , pos , len ) {
const lb = self . _lookbehind ;
const lbSize = self . _lookbehindSize ;
const needle = self . _needle ;
for ( let i = 0 ; i < len ; ++ i , ++ pos ) {
const ch = ( pos < 0 ? lb [ lbSize + pos ] : data [ pos ] ) ;
if ( ch !== needle [ i ] )
return false ;
}
return true ;
}
module . exports = SBMH ;
/***/ } ) ,
/***/ 4294 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
module . exports = _ _nccwpck _require _ _ ( 4219 ) ;
/***/ } ) ,
/***/ 4219 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
var net = _ _nccwpck _require _ _ ( 1808 ) ;
var tls = _ _nccwpck _require _ _ ( 4404 ) ;
var http = _ _nccwpck _require _ _ ( 3685 ) ;
var https = _ _nccwpck _require _ _ ( 5687 ) ;
var events = _ _nccwpck _require _ _ ( 2361 ) ;
var assert = _ _nccwpck _require _ _ ( 9491 ) ;
var util = _ _nccwpck _require _ _ ( 3837 ) ;
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
}
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
}
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
}
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
}
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
}
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
}
} ) ;
} ;
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
}
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
}
} ;
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
}
this . sockets . splice ( pos , 1 ) ;
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
} ) ;
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
}
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
}
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
}
}
}
}
return target ;
}
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
}
} else {
debug = function ( ) { } ;
}
exports . debug = debug ; // for test
/***/ } ) ,
/***/ 1773 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const Client = _ _nccwpck _require _ _ ( 3598 )
const Dispatcher = _ _nccwpck _require _ _ ( 412 )
const errors = _ _nccwpck _require _ _ ( 8045 )
const Pool = _ _nccwpck _require _ _ ( 4634 )
const BalancedPool = _ _nccwpck _require _ _ ( 7931 )
const Agent = _ _nccwpck _require _ _ ( 7890 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { InvalidArgumentError } = errors
const api = _ _nccwpck _require _ _ ( 4059 )
const buildConnector = _ _nccwpck _require _ _ ( 2067 )
const MockClient = _ _nccwpck _require _ _ ( 8687 )
const MockAgent = _ _nccwpck _require _ _ ( 6771 )
const MockPool = _ _nccwpck _require _ _ ( 6193 )
const mockErrors = _ _nccwpck _require _ _ ( 888 )
const ProxyAgent = _ _nccwpck _require _ _ ( 7858 )
const { getGlobalDispatcher , setGlobalDispatcher } = _ _nccwpck _require _ _ ( 1892 )
const DecoratorHandler = _ _nccwpck _require _ _ ( 6930 )
const RedirectHandler = _ _nccwpck _require _ _ ( 2860 )
const createRedirectInterceptor = _ _nccwpck _require _ _ ( 8861 )
let hasCrypto
try {
_ _nccwpck _require _ _ ( 6113 )
hasCrypto = true
} catch {
hasCrypto = false
}
Object . assign ( Dispatcher . prototype , api )
module . exports . Dispatcher = Dispatcher
module . exports . Client = Client
module . exports . Pool = Pool
module . exports . BalancedPool = BalancedPool
module . exports . Agent = Agent
module . exports . ProxyAgent = ProxyAgent
module . exports . DecoratorHandler = DecoratorHandler
module . exports . RedirectHandler = RedirectHandler
module . exports . createRedirectInterceptor = createRedirectInterceptor
module . exports . buildConnector = buildConnector
module . exports . errors = errors
function makeDispatcher ( fn ) {
return ( url , opts , handler ) => {
if ( typeof opts === 'function' ) {
handler = opts
opts = null
}
if ( ! url || ( typeof url !== 'string' && typeof url !== 'object' && ! ( url instanceof URL ) ) ) {
throw new InvalidArgumentError ( 'invalid url' )
}
if ( opts != null && typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
if ( opts && opts . path != null ) {
if ( typeof opts . path !== 'string' ) {
throw new InvalidArgumentError ( 'invalid opts.path' )
}
let path = opts . path
if ( ! opts . path . startsWith ( '/' ) ) {
path = ` / ${ path } `
}
url = new URL ( util . parseOrigin ( url ) . origin + path )
} else {
if ( ! opts ) {
opts = typeof url === 'object' ? url : { }
}
url = util . parseURL ( url )
}
const { agent , dispatcher = getGlobalDispatcher ( ) } = opts
if ( agent ) {
throw new InvalidArgumentError ( 'unsupported opts.agent. Did you mean opts.client?' )
}
return fn . call ( dispatcher , {
... opts ,
origin : url . origin ,
path : url . search ? ` ${ url . pathname } ${ url . search } ` : url . pathname ,
method : opts . method || ( opts . body ? 'PUT' : 'GET' )
} , handler )
}
}
module . exports . setGlobalDispatcher = setGlobalDispatcher
module . exports . getGlobalDispatcher = getGlobalDispatcher
if ( util . nodeMajor > 16 || ( util . nodeMajor === 16 && util . nodeMinor >= 8 ) ) {
let fetchImpl = null
module . exports . fetch = async function fetch ( resource ) {
if ( ! fetchImpl ) {
fetchImpl = ( _ _nccwpck _require _ _ ( 4881 ) . fetch )
}
try {
return await fetchImpl ( ... arguments )
} catch ( err ) {
Error . captureStackTrace ( err , this )
throw err
}
}
module . exports . Headers = _ _nccwpck _require _ _ ( 554 ) . Headers
module . exports . Response = _ _nccwpck _require _ _ ( 7823 ) . Response
module . exports . Request = _ _nccwpck _require _ _ ( 8359 ) . Request
module . exports . FormData = _ _nccwpck _require _ _ ( 2015 ) . FormData
module . exports . File = _ _nccwpck _require _ _ ( 8511 ) . File
module . exports . FileReader = _ _nccwpck _require _ _ ( 1446 ) . FileReader
const { setGlobalOrigin , getGlobalOrigin } = _ _nccwpck _require _ _ ( 1246 )
module . exports . setGlobalOrigin = setGlobalOrigin
module . exports . getGlobalOrigin = getGlobalOrigin
const { CacheStorage } = _ _nccwpck _require _ _ ( 7907 )
const { kConstruct } = _ _nccwpck _require _ _ ( 9174 )
// Cache & CacheStorage are tightly coupled with fetch. Even if it may run
// in an older version of Node, it doesn't have any use without fetch.
module . exports . caches = new CacheStorage ( kConstruct )
}
if ( util . nodeMajor >= 16 ) {
const { deleteCookie , getCookies , getSetCookies , setCookie } = _ _nccwpck _require _ _ ( 1724 )
module . exports . deleteCookie = deleteCookie
module . exports . getCookies = getCookies
module . exports . getSetCookies = getSetCookies
module . exports . setCookie = setCookie
const { parseMIMEType , serializeAMimeType } = _ _nccwpck _require _ _ ( 685 )
module . exports . parseMIMEType = parseMIMEType
module . exports . serializeAMimeType = serializeAMimeType
}
if ( util . nodeMajor >= 18 && hasCrypto ) {
const { WebSocket } = _ _nccwpck _require _ _ ( 4284 )
module . exports . WebSocket = WebSocket
}
module . exports . request = makeDispatcher ( api . request )
module . exports . stream = makeDispatcher ( api . stream )
module . exports . pipeline = makeDispatcher ( api . pipeline )
module . exports . connect = makeDispatcher ( api . connect )
module . exports . upgrade = makeDispatcher ( api . upgrade )
module . exports . MockClient = MockClient
module . exports . MockPool = MockPool
module . exports . MockAgent = MockAgent
module . exports . mockErrors = mockErrors
/***/ } ) ,
/***/ 7890 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const { kClients , kRunning , kClose , kDestroy , kDispatch , kInterceptors } = _ _nccwpck _require _ _ ( 2785 )
const DispatcherBase = _ _nccwpck _require _ _ ( 4839 )
const Pool = _ _nccwpck _require _ _ ( 4634 )
const Client = _ _nccwpck _require _ _ ( 3598 )
const util = _ _nccwpck _require _ _ ( 3983 )
const createRedirectInterceptor = _ _nccwpck _require _ _ ( 8861 )
const { WeakRef , FinalizationRegistry } = _ _nccwpck _require _ _ ( 6436 ) ( )
const kOnConnect = Symbol ( 'onConnect' )
const kOnDisconnect = Symbol ( 'onDisconnect' )
const kOnConnectionError = Symbol ( 'onConnectionError' )
const kMaxRedirections = Symbol ( 'maxRedirections' )
const kOnDrain = Symbol ( 'onDrain' )
const kFactory = Symbol ( 'factory' )
const kFinalizer = Symbol ( 'finalizer' )
const kOptions = Symbol ( 'options' )
function defaultFactory ( origin , opts ) {
return opts && opts . connections === 1
? new Client ( origin , opts )
: new Pool ( origin , opts )
}
class Agent extends DispatcherBase {
constructor ( { factory = defaultFactory , maxRedirections = 0 , connect , ... options } = { } ) {
super ( )
if ( typeof factory !== 'function' ) {
throw new InvalidArgumentError ( 'factory must be a function.' )
}
if ( connect != null && typeof connect !== 'function' && typeof connect !== 'object' ) {
throw new InvalidArgumentError ( 'connect must be a function or an object' )
}
if ( ! Number . isInteger ( maxRedirections ) || maxRedirections < 0 ) {
throw new InvalidArgumentError ( 'maxRedirections must be a positive number' )
}
if ( connect && typeof connect !== 'function' ) {
connect = { ... connect }
}
this [ kInterceptors ] = options . interceptors && options . interceptors . Agent && Array . isArray ( options . interceptors . Agent )
? options . interceptors . Agent
: [ createRedirectInterceptor ( { maxRedirections } ) ]
this [ kOptions ] = { ... util . deepClone ( options ) , connect }
this [ kOptions ] . interceptors = options . interceptors
? { ... options . interceptors }
: undefined
this [ kMaxRedirections ] = maxRedirections
this [ kFactory ] = factory
this [ kClients ] = new Map ( )
this [ kFinalizer ] = new FinalizationRegistry ( /* istanbul ignore next: gc is undeterministic */ key => {
const ref = this [ kClients ] . get ( key )
if ( ref !== undefined && ref . deref ( ) === undefined ) {
this [ kClients ] . delete ( key )
}
} )
const agent = this
this [ kOnDrain ] = ( origin , targets ) => {
agent . emit ( 'drain' , origin , [ agent , ... targets ] )
}
this [ kOnConnect ] = ( origin , targets ) => {
agent . emit ( 'connect' , origin , [ agent , ... targets ] )
}
this [ kOnDisconnect ] = ( origin , targets , err ) => {
agent . emit ( 'disconnect' , origin , [ agent , ... targets ] , err )
}
this [ kOnConnectionError ] = ( origin , targets , err ) => {
agent . emit ( 'connectionError' , origin , [ agent , ... targets ] , err )
}
}
get [ kRunning ] ( ) {
let ret = 0
for ( const ref of this [ kClients ] . values ( ) ) {
const client = ref . deref ( )
/* istanbul ignore next: gc is undeterministic */
if ( client ) {
ret += client [ kRunning ]
}
}
return ret
}
[ kDispatch ] ( opts , handler ) {
let key
if ( opts . origin && ( typeof opts . origin === 'string' || opts . origin instanceof URL ) ) {
key = String ( opts . origin )
} else {
throw new InvalidArgumentError ( 'opts.origin must be a non-empty string or URL.' )
}
const ref = this [ kClients ] . get ( key )
let dispatcher = ref ? ref . deref ( ) : null
if ( ! dispatcher ) {
dispatcher = this [ kFactory ] ( opts . origin , this [ kOptions ] )
. on ( 'drain' , this [ kOnDrain ] )
. on ( 'connect' , this [ kOnConnect ] )
. on ( 'disconnect' , this [ kOnDisconnect ] )
. on ( 'connectionError' , this [ kOnConnectionError ] )
this [ kClients ] . set ( key , new WeakRef ( dispatcher ) )
this [ kFinalizer ] . register ( dispatcher , key )
}
return dispatcher . dispatch ( opts , handler )
}
async [ kClose ] ( ) {
const closePromises = [ ]
for ( const ref of this [ kClients ] . values ( ) ) {
const client = ref . deref ( )
/* istanbul ignore else: gc is undeterministic */
if ( client ) {
closePromises . push ( client . close ( ) )
}
}
await Promise . all ( closePromises )
}
async [ kDestroy ] ( err ) {
const destroyPromises = [ ]
for ( const ref of this [ kClients ] . values ( ) ) {
const client = ref . deref ( )
/* istanbul ignore else: gc is undeterministic */
if ( client ) {
destroyPromises . push ( client . destroy ( err ) )
}
}
await Promise . all ( destroyPromises )
}
}
module . exports = Agent
/***/ } ) ,
/***/ 7032 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
const { RequestAbortedError } = _ _nccwpck _require _ _ ( 8045 )
const kListener = Symbol ( 'kListener' )
const kSignal = Symbol ( 'kSignal' )
function abort ( self ) {
if ( self . abort ) {
self . abort ( )
} else {
self . onError ( new RequestAbortedError ( ) )
}
}
function addSignal ( self , signal ) {
self [ kSignal ] = null
self [ kListener ] = null
if ( ! signal ) {
return
}
if ( signal . aborted ) {
abort ( self )
return
}
self [ kSignal ] = signal
self [ kListener ] = ( ) => {
abort ( self )
}
if ( 'addEventListener' in self [ kSignal ] ) {
self [ kSignal ] . addEventListener ( 'abort' , self [ kListener ] )
} else {
self [ kSignal ] . addListener ( 'abort' , self [ kListener ] )
}
}
function removeSignal ( self ) {
if ( ! self [ kSignal ] ) {
return
}
if ( 'removeEventListener' in self [ kSignal ] ) {
self [ kSignal ] . removeEventListener ( 'abort' , self [ kListener ] )
} else {
self [ kSignal ] . removeListener ( 'abort' , self [ kListener ] )
}
self [ kSignal ] = null
self [ kListener ] = null
}
module . exports = {
addSignal ,
removeSignal
}
/***/ } ) ,
/***/ 9744 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { InvalidArgumentError , RequestAbortedError , SocketError } = _ _nccwpck _require _ _ ( 8045 )
const { AsyncResource } = _ _nccwpck _require _ _ ( 852 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { addSignal , removeSignal } = _ _nccwpck _require _ _ ( 7032 )
class ConnectHandler extends AsyncResource {
constructor ( opts , callback ) {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
const { signal , opaque , responseHeaders } = opts
if ( signal && typeof signal . on !== 'function' && typeof signal . addEventListener !== 'function' ) {
throw new InvalidArgumentError ( 'signal must be an EventEmitter or EventTarget' )
}
super ( 'UNDICI_CONNECT' )
this . opaque = opaque || null
this . responseHeaders = responseHeaders || null
this . callback = callback
this . abort = null
addSignal ( this , signal )
}
onConnect ( abort , context ) {
if ( ! this . callback ) {
throw new RequestAbortedError ( )
}
this . abort = abort
this . context = context
}
onHeaders ( ) {
throw new SocketError ( 'bad connect' , null )
}
onUpgrade ( statusCode , rawHeaders , socket ) {
const { callback , opaque , context } = this
removeSignal ( this )
this . callback = null
const headers = this . responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
this . runInAsyncScope ( callback , null , null , {
statusCode ,
headers ,
socket ,
opaque ,
context
} )
}
onError ( err ) {
const { callback , opaque } = this
removeSignal ( this )
if ( callback ) {
this . callback = null
queueMicrotask ( ( ) => {
this . runInAsyncScope ( callback , null , err , { opaque } )
} )
}
}
}
function connect ( opts , callback ) {
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
connect . call ( this , opts , ( err , data ) => {
return err ? reject ( err ) : resolve ( data )
} )
} )
}
try {
const connectHandler = new ConnectHandler ( opts , callback )
this . dispatch ( { ... opts , method : 'CONNECT' } , connectHandler )
} catch ( err ) {
if ( typeof callback !== 'function' ) {
throw err
}
const opaque = opts && opts . opaque
queueMicrotask ( ( ) => callback ( err , { opaque } ) )
}
}
module . exports = connect
/***/ } ) ,
/***/ 8752 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
Readable ,
Duplex ,
PassThrough
} = _ _nccwpck _require _ _ ( 2781 )
const {
InvalidArgumentError ,
InvalidReturnValueError ,
RequestAbortedError
} = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { AsyncResource } = _ _nccwpck _require _ _ ( 852 )
const { addSignal , removeSignal } = _ _nccwpck _require _ _ ( 7032 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const kResume = Symbol ( 'resume' )
class PipelineRequest extends Readable {
constructor ( ) {
super ( { autoDestroy : true } )
this [ kResume ] = null
}
_read ( ) {
const { [ kResume ] : resume } = this
if ( resume ) {
this [ kResume ] = null
resume ( )
}
}
_destroy ( err , callback ) {
this . _read ( )
callback ( err )
}
}
class PipelineResponse extends Readable {
constructor ( resume ) {
super ( { autoDestroy : true } )
this [ kResume ] = resume
}
_read ( ) {
this [ kResume ] ( )
}
_destroy ( err , callback ) {
if ( ! err && ! this . _readableState . endEmitted ) {
err = new RequestAbortedError ( )
}
callback ( err )
}
}
class PipelineHandler extends AsyncResource {
constructor ( opts , handler ) {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
if ( typeof handler !== 'function' ) {
throw new InvalidArgumentError ( 'invalid handler' )
}
const { signal , method , opaque , onInfo , responseHeaders } = opts
if ( signal && typeof signal . on !== 'function' && typeof signal . addEventListener !== 'function' ) {
throw new InvalidArgumentError ( 'signal must be an EventEmitter or EventTarget' )
}
if ( method === 'CONNECT' ) {
throw new InvalidArgumentError ( 'invalid method' )
}
if ( onInfo && typeof onInfo !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onInfo callback' )
}
super ( 'UNDICI_PIPELINE' )
this . opaque = opaque || null
this . responseHeaders = responseHeaders || null
this . handler = handler
this . abort = null
this . context = null
this . onInfo = onInfo || null
this . req = new PipelineRequest ( ) . on ( 'error' , util . nop )
this . ret = new Duplex ( {
readableObjectMode : opts . objectMode ,
autoDestroy : true ,
read : ( ) => {
const { body } = this
if ( body && body . resume ) {
body . resume ( )
}
} ,
write : ( chunk , encoding , callback ) => {
const { req } = this
if ( req . push ( chunk , encoding ) || req . _readableState . destroyed ) {
callback ( )
} else {
req [ kResume ] = callback
}
} ,
destroy : ( err , callback ) => {
const { body , req , res , ret , abort } = this
if ( ! err && ! ret . _readableState . endEmitted ) {
err = new RequestAbortedError ( )
}
if ( abort && err ) {
abort ( )
}
util . destroy ( body , err )
util . destroy ( req , err )
util . destroy ( res , err )
removeSignal ( this )
callback ( err )
}
} ) . on ( 'prefinish' , ( ) => {
const { req } = this
// Node < 15 does not call _final in same tick.
req . push ( null )
} )
this . res = null
addSignal ( this , signal )
}
onConnect ( abort , context ) {
const { ret , res } = this
assert ( ! res , 'pipeline cannot be retried' )
if ( ret . destroyed ) {
throw new RequestAbortedError ( )
}
this . abort = abort
this . context = context
}
onHeaders ( statusCode , rawHeaders , resume ) {
const { opaque , handler , context } = this
if ( statusCode < 200 ) {
if ( this . onInfo ) {
const headers = this . responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
this . onInfo ( { statusCode , headers } )
}
return
}
this . res = new PipelineResponse ( resume )
let body
try {
this . handler = null
const headers = this . responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
body = this . runInAsyncScope ( handler , null , {
statusCode ,
headers ,
opaque ,
body : this . res ,
context
} )
} catch ( err ) {
this . res . on ( 'error' , util . nop )
throw err
}
if ( ! body || typeof body . on !== 'function' ) {
throw new InvalidReturnValueError ( 'expected Readable' )
}
body
. on ( 'data' , ( chunk ) => {
const { ret , body } = this
if ( ! ret . push ( chunk ) && body . pause ) {
body . pause ( )
}
} )
. on ( 'error' , ( err ) => {
const { ret } = this
util . destroy ( ret , err )
} )
. on ( 'end' , ( ) => {
const { ret } = this
ret . push ( null )
} )
. on ( 'close' , ( ) => {
const { ret } = this
if ( ! ret . _readableState . ended ) {
util . destroy ( ret , new RequestAbortedError ( ) )
}
} )
this . body = body
}
onData ( chunk ) {
const { res } = this
return res . push ( chunk )
}
onComplete ( trailers ) {
const { res } = this
res . push ( null )
}
onError ( err ) {
const { ret } = this
this . handler = null
util . destroy ( ret , err )
}
}
function pipeline ( opts , handler ) {
try {
const pipelineHandler = new PipelineHandler ( opts , handler )
this . dispatch ( { ... opts , body : pipelineHandler . req } , pipelineHandler )
return pipelineHandler . ret
} catch ( err ) {
return new PassThrough ( ) . destroy ( err )
}
}
module . exports = pipeline
/***/ } ) ,
/***/ 5448 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const Readable = _ _nccwpck _require _ _ ( 3858 )
const {
InvalidArgumentError ,
RequestAbortedError
} = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { getResolveErrorBodyCallback } = _ _nccwpck _require _ _ ( 7474 )
const { AsyncResource } = _ _nccwpck _require _ _ ( 852 )
const { addSignal , removeSignal } = _ _nccwpck _require _ _ ( 7032 )
class RequestHandler extends AsyncResource {
constructor ( opts , callback ) {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
const { signal , method , opaque , body , onInfo , responseHeaders , throwOnError , highWaterMark } = opts
try {
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
if ( highWaterMark && ( typeof highWaterMark !== 'number' || highWaterMark < 0 ) ) {
throw new InvalidArgumentError ( 'invalid highWaterMark' )
}
if ( signal && typeof signal . on !== 'function' && typeof signal . addEventListener !== 'function' ) {
throw new InvalidArgumentError ( 'signal must be an EventEmitter or EventTarget' )
}
if ( method === 'CONNECT' ) {
throw new InvalidArgumentError ( 'invalid method' )
}
if ( onInfo && typeof onInfo !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onInfo callback' )
}
super ( 'UNDICI_REQUEST' )
} catch ( err ) {
if ( util . isStream ( body ) ) {
util . destroy ( body . on ( 'error' , util . nop ) , err )
}
throw err
}
this . responseHeaders = responseHeaders || null
this . opaque = opaque || null
this . callback = callback
this . res = null
this . abort = null
this . body = body
this . trailers = { }
this . context = null
this . onInfo = onInfo || null
this . throwOnError = throwOnError
this . highWaterMark = highWaterMark
if ( util . isStream ( body ) ) {
body . on ( 'error' , ( err ) => {
this . onError ( err )
} )
}
addSignal ( this , signal )
}
onConnect ( abort , context ) {
if ( ! this . callback ) {
throw new RequestAbortedError ( )
}
this . abort = abort
this . context = context
}
onHeaders ( statusCode , rawHeaders , resume , statusMessage ) {
const { callback , opaque , abort , context , responseHeaders , highWaterMark } = this
const headers = responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
if ( statusCode < 200 ) {
if ( this . onInfo ) {
this . onInfo ( { statusCode , headers } )
}
return
}
const parsedHeaders = responseHeaders === 'raw' ? util . parseHeaders ( rawHeaders ) : headers
const contentType = parsedHeaders [ 'content-type' ]
const body = new Readable ( { resume , abort , contentType , highWaterMark } )
this . callback = null
this . res = body
if ( callback !== null ) {
if ( this . throwOnError && statusCode >= 400 ) {
this . runInAsyncScope ( getResolveErrorBodyCallback , null ,
{ callback , body , contentType , statusCode , statusMessage , headers }
)
} else {
this . runInAsyncScope ( callback , null , null , {
statusCode ,
headers ,
trailers : this . trailers ,
opaque ,
body ,
context
} )
}
}
}
onData ( chunk ) {
const { res } = this
return res . push ( chunk )
}
onComplete ( trailers ) {
const { res } = this
removeSignal ( this )
util . parseHeaders ( trailers , this . trailers )
res . push ( null )
}
onError ( err ) {
const { res , callback , body , opaque } = this
removeSignal ( this )
if ( callback ) {
// TODO: Does this need queueMicrotask?
this . callback = null
queueMicrotask ( ( ) => {
this . runInAsyncScope ( callback , null , err , { opaque } )
} )
}
if ( res ) {
this . res = null
// Ensure all queued handlers are invoked before destroying res.
queueMicrotask ( ( ) => {
util . destroy ( res , err )
} )
}
if ( body ) {
this . body = null
util . destroy ( body , err )
}
}
}
function request ( opts , callback ) {
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
request . call ( this , opts , ( err , data ) => {
return err ? reject ( err ) : resolve ( data )
} )
} )
}
try {
this . dispatch ( opts , new RequestHandler ( opts , callback ) )
} catch ( err ) {
if ( typeof callback !== 'function' ) {
throw err
}
const opaque = opts && opts . opaque
queueMicrotask ( ( ) => callback ( err , { opaque } ) )
}
}
module . exports = request
/***/ } ) ,
/***/ 5395 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { finished , PassThrough } = _ _nccwpck _require _ _ ( 2781 )
const {
InvalidArgumentError ,
InvalidReturnValueError ,
RequestAbortedError
} = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { getResolveErrorBodyCallback } = _ _nccwpck _require _ _ ( 7474 )
const { AsyncResource } = _ _nccwpck _require _ _ ( 852 )
const { addSignal , removeSignal } = _ _nccwpck _require _ _ ( 7032 )
class StreamHandler extends AsyncResource {
constructor ( opts , factory , callback ) {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
const { signal , method , opaque , body , onInfo , responseHeaders , throwOnError } = opts
try {
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
if ( typeof factory !== 'function' ) {
throw new InvalidArgumentError ( 'invalid factory' )
}
if ( signal && typeof signal . on !== 'function' && typeof signal . addEventListener !== 'function' ) {
throw new InvalidArgumentError ( 'signal must be an EventEmitter or EventTarget' )
}
if ( method === 'CONNECT' ) {
throw new InvalidArgumentError ( 'invalid method' )
}
if ( onInfo && typeof onInfo !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onInfo callback' )
}
super ( 'UNDICI_STREAM' )
} catch ( err ) {
if ( util . isStream ( body ) ) {
util . destroy ( body . on ( 'error' , util . nop ) , err )
}
throw err
}
this . responseHeaders = responseHeaders || null
this . opaque = opaque || null
this . factory = factory
this . callback = callback
this . res = null
this . abort = null
this . context = null
this . trailers = null
this . body = body
this . onInfo = onInfo || null
this . throwOnError = throwOnError || false
if ( util . isStream ( body ) ) {
body . on ( 'error' , ( err ) => {
this . onError ( err )
} )
}
addSignal ( this , signal )
}
onConnect ( abort , context ) {
if ( ! this . callback ) {
throw new RequestAbortedError ( )
}
this . abort = abort
this . context = context
}
onHeaders ( statusCode , rawHeaders , resume , statusMessage ) {
const { factory , opaque , context , callback , responseHeaders } = this
const headers = responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
if ( statusCode < 200 ) {
if ( this . onInfo ) {
this . onInfo ( { statusCode , headers } )
}
return
}
this . factory = null
let res
if ( this . throwOnError && statusCode >= 400 ) {
const parsedHeaders = responseHeaders === 'raw' ? util . parseHeaders ( rawHeaders ) : headers
const contentType = parsedHeaders [ 'content-type' ]
res = new PassThrough ( )
this . callback = null
this . runInAsyncScope ( getResolveErrorBodyCallback , null ,
{ callback , body : res , contentType , statusCode , statusMessage , headers }
)
} else {
res = this . runInAsyncScope ( factory , null , {
statusCode ,
headers ,
opaque ,
context
} )
if (
! res ||
typeof res . write !== 'function' ||
typeof res . end !== 'function' ||
typeof res . on !== 'function'
) {
throw new InvalidReturnValueError ( 'expected Writable' )
}
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
finished ( res , { readable : false } , ( err ) => {
const { callback , res , opaque , trailers , abort } = this
this . res = null
if ( err || ! res . readable ) {
util . destroy ( res , err )
}
this . callback = null
this . runInAsyncScope ( callback , null , err || null , { opaque , trailers } )
if ( err ) {
abort ( )
}
} )
}
res . on ( 'drain' , resume )
this . res = res
const needDrain = res . writableNeedDrain !== undefined
? res . writableNeedDrain
: res . _writableState && res . _writableState . needDrain
return needDrain !== true
}
onData ( chunk ) {
const { res } = this
return res . write ( chunk )
}
onComplete ( trailers ) {
const { res } = this
removeSignal ( this )
this . trailers = util . parseHeaders ( trailers )
res . end ( )
}
onError ( err ) {
const { res , callback , opaque , body } = this
removeSignal ( this )
this . factory = null
if ( res ) {
this . res = null
util . destroy ( res , err )
} else if ( callback ) {
this . callback = null
queueMicrotask ( ( ) => {
this . runInAsyncScope ( callback , null , err , { opaque } )
} )
}
if ( body ) {
this . body = null
util . destroy ( body , err )
}
}
}
function stream ( opts , factory , callback ) {
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
stream . call ( this , opts , factory , ( err , data ) => {
return err ? reject ( err ) : resolve ( data )
} )
} )
}
try {
this . dispatch ( opts , new StreamHandler ( opts , factory , callback ) )
} catch ( err ) {
if ( typeof callback !== 'function' ) {
throw err
}
const opaque = opts && opts . opaque
queueMicrotask ( ( ) => callback ( err , { opaque } ) )
}
}
module . exports = stream
/***/ } ) ,
/***/ 6923 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { InvalidArgumentError , RequestAbortedError , SocketError } = _ _nccwpck _require _ _ ( 8045 )
const { AsyncResource } = _ _nccwpck _require _ _ ( 852 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { addSignal , removeSignal } = _ _nccwpck _require _ _ ( 7032 )
const assert = _ _nccwpck _require _ _ ( 9491 )
class UpgradeHandler extends AsyncResource {
constructor ( opts , callback ) {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
const { signal , opaque , responseHeaders } = opts
if ( signal && typeof signal . on !== 'function' && typeof signal . addEventListener !== 'function' ) {
throw new InvalidArgumentError ( 'signal must be an EventEmitter or EventTarget' )
}
super ( 'UNDICI_UPGRADE' )
this . responseHeaders = responseHeaders || null
this . opaque = opaque || null
this . callback = callback
this . abort = null
this . context = null
addSignal ( this , signal )
}
onConnect ( abort , context ) {
if ( ! this . callback ) {
throw new RequestAbortedError ( )
}
this . abort = abort
this . context = null
}
onHeaders ( ) {
throw new SocketError ( 'bad upgrade' , null )
}
onUpgrade ( statusCode , rawHeaders , socket ) {
const { callback , opaque , context } = this
assert . strictEqual ( statusCode , 101 )
removeSignal ( this )
this . callback = null
const headers = this . responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
this . runInAsyncScope ( callback , null , null , {
headers ,
socket ,
opaque ,
context
} )
}
onError ( err ) {
const { callback , opaque } = this
removeSignal ( this )
if ( callback ) {
this . callback = null
queueMicrotask ( ( ) => {
this . runInAsyncScope ( callback , null , err , { opaque } )
} )
}
}
}
function upgrade ( opts , callback ) {
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
upgrade . call ( this , opts , ( err , data ) => {
return err ? reject ( err ) : resolve ( data )
} )
} )
}
try {
const upgradeHandler = new UpgradeHandler ( opts , callback )
this . dispatch ( {
... opts ,
method : opts . method || 'GET' ,
upgrade : opts . protocol || 'Websocket'
} , upgradeHandler )
} catch ( err ) {
if ( typeof callback !== 'function' ) {
throw err
}
const opaque = opts && opts . opaque
queueMicrotask ( ( ) => callback ( err , { opaque } ) )
}
}
module . exports = upgrade
/***/ } ) ,
/***/ 4059 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
module . exports . request = _ _nccwpck _require _ _ ( 5448 )
module . exports . stream = _ _nccwpck _require _ _ ( 5395 )
module . exports . pipeline = _ _nccwpck _require _ _ ( 8752 )
module . exports . upgrade = _ _nccwpck _require _ _ ( 6923 )
module . exports . connect = _ _nccwpck _require _ _ ( 9744 )
/***/ } ) ,
/***/ 3858 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
// Ported from https://github.com/nodejs/undici/pull/907
const assert = _ _nccwpck _require _ _ ( 9491 )
const { Readable } = _ _nccwpck _require _ _ ( 2781 )
const { RequestAbortedError , NotSupportedError , InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { ReadableStreamFrom , toUSVString } = _ _nccwpck _require _ _ ( 3983 )
let Blob
const kConsume = Symbol ( 'kConsume' )
const kReading = Symbol ( 'kReading' )
const kBody = Symbol ( 'kBody' )
const kAbort = Symbol ( 'abort' )
const kContentType = Symbol ( 'kContentType' )
module . exports = class BodyReadable extends Readable {
constructor ( {
resume ,
abort ,
contentType = '' ,
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
} ) {
super ( {
autoDestroy : true ,
read : resume ,
highWaterMark
} )
this . _readableState . dataEmitted = false
this [ kAbort ] = abort
this [ kConsume ] = null
this [ kBody ] = null
this [ kContentType ] = contentType
// Is stream being consumed through Readable API?
// This is an optimization so that we avoid checking
// for 'data' and 'readable' listeners in the hot path
// inside push().
this [ kReading ] = false
}
destroy ( err ) {
if ( this . destroyed ) {
// Node < 16
return this
}
if ( ! err && ! this . _readableState . endEmitted ) {
err = new RequestAbortedError ( )
}
if ( err ) {
this [ kAbort ] ( )
}
return super . destroy ( err )
}
emit ( ev , ... args ) {
if ( ev === 'data' ) {
// Node < 16.7
this . _readableState . dataEmitted = true
} else if ( ev === 'error' ) {
// Node < 16
this . _readableState . errorEmitted = true
}
return super . emit ( ev , ... args )
}
on ( ev , ... args ) {
if ( ev === 'data' || ev === 'readable' ) {
this [ kReading ] = true
}
return super . on ( ev , ... args )
}
addListener ( ev , ... args ) {
return this . on ( ev , ... args )
}
off ( ev , ... args ) {
const ret = super . off ( ev , ... args )
if ( ev === 'data' || ev === 'readable' ) {
this [ kReading ] = (
this . listenerCount ( 'data' ) > 0 ||
this . listenerCount ( 'readable' ) > 0
)
}
return ret
}
removeListener ( ev , ... args ) {
return this . off ( ev , ... args )
}
push ( chunk ) {
if ( this [ kConsume ] && chunk !== null && this . readableLength === 0 ) {
consumePush ( this [ kConsume ] , chunk )
return this [ kReading ] ? super . push ( chunk ) : true
}
return super . push ( chunk )
}
// https://fetch.spec.whatwg.org/#dom-body-text
async text ( ) {
return consume ( this , 'text' )
}
// https://fetch.spec.whatwg.org/#dom-body-json
async json ( ) {
return consume ( this , 'json' )
}
// https://fetch.spec.whatwg.org/#dom-body-blob
async blob ( ) {
return consume ( this , 'blob' )
}
// https://fetch.spec.whatwg.org/#dom-body-arraybuffer
async arrayBuffer ( ) {
return consume ( this , 'arrayBuffer' )
}
// https://fetch.spec.whatwg.org/#dom-body-formdata
async formData ( ) {
// TODO: Implement.
throw new NotSupportedError ( )
}
// https://fetch.spec.whatwg.org/#dom-body-bodyused
get bodyUsed ( ) {
return util . isDisturbed ( this )
}
// https://fetch.spec.whatwg.org/#dom-body-body
get body ( ) {
if ( ! this [ kBody ] ) {
this [ kBody ] = ReadableStreamFrom ( this )
if ( this [ kConsume ] ) {
// TODO: Is this the best way to force a lock?
this [ kBody ] . getReader ( ) // Ensure stream is locked.
assert ( this [ kBody ] . locked )
}
}
return this [ kBody ]
}
async dump ( opts ) {
let limit = opts && Number . isFinite ( opts . limit ) ? opts . limit : 262144
const signal = opts && opts . signal
const abortFn = ( ) => {
this . destroy ( )
}
if ( signal ) {
if ( typeof signal !== 'object' || ! ( 'aborted' in signal ) ) {
throw new InvalidArgumentError ( 'signal must be an AbortSignal' )
}
util . throwIfAborted ( signal )
signal . addEventListener ( 'abort' , abortFn , { once : true } )
}
try {
for await ( const chunk of this ) {
util . throwIfAborted ( signal )
limit -= Buffer . byteLength ( chunk )
if ( limit < 0 ) {
return
}
}
} catch {
util . throwIfAborted ( signal )
} finally {
if ( signal ) {
signal . removeEventListener ( 'abort' , abortFn )
}
}
}
}
// https://streams.spec.whatwg.org/#readablestream-locked
function isLocked ( self ) {
// Consume is an implicit lock.
return ( self [ kBody ] && self [ kBody ] . locked === true ) || self [ kConsume ]
}
// https://fetch.spec.whatwg.org/#body-unusable
function isUnusable ( self ) {
return util . isDisturbed ( self ) || isLocked ( self )
}
async function consume ( stream , type ) {
if ( isUnusable ( stream ) ) {
throw new TypeError ( 'unusable' )
}
assert ( ! stream [ kConsume ] )
return new Promise ( ( resolve , reject ) => {
stream [ kConsume ] = {
type ,
stream ,
resolve ,
reject ,
length : 0 ,
body : [ ]
}
stream
. on ( 'error' , function ( err ) {
consumeFinish ( this [ kConsume ] , err )
} )
. on ( 'close' , function ( ) {
if ( this [ kConsume ] . body !== null ) {
consumeFinish ( this [ kConsume ] , new RequestAbortedError ( ) )
}
} )
process . nextTick ( consumeStart , stream [ kConsume ] )
} )
}
function consumeStart ( consume ) {
if ( consume . body === null ) {
return
}
const { _readableState : state } = consume . stream
for ( const chunk of state . buffer ) {
consumePush ( consume , chunk )
}
if ( state . endEmitted ) {
consumeEnd ( this [ kConsume ] )
} else {
consume . stream . on ( 'end' , function ( ) {
consumeEnd ( this [ kConsume ] )
} )
}
consume . stream . resume ( )
while ( consume . stream . read ( ) != null ) {
// Loop
}
}
function consumeEnd ( consume ) {
const { type , body , resolve , stream , length } = consume
try {
if ( type === 'text' ) {
resolve ( toUSVString ( Buffer . concat ( body ) ) )
} else if ( type === 'json' ) {
resolve ( JSON . parse ( Buffer . concat ( body ) ) )
} else if ( type === 'arrayBuffer' ) {
const dst = new Uint8Array ( length )
let pos = 0
for ( const buf of body ) {
dst . set ( buf , pos )
pos += buf . byteLength
}
resolve ( dst )
} else if ( type === 'blob' ) {
if ( ! Blob ) {
Blob = ( _ _nccwpck _require _ _ ( 4300 ) . Blob )
}
resolve ( new Blob ( body , { type : stream [ kContentType ] } ) )
}
consumeFinish ( consume )
} catch ( err ) {
stream . destroy ( err )
}
}
function consumePush ( consume , chunk ) {
consume . length += chunk . length
consume . body . push ( chunk )
}
function consumeFinish ( consume , err ) {
if ( consume . body === null ) {
return
}
if ( err ) {
consume . reject ( err )
} else {
consume . resolve ( )
}
consume . type = null
consume . stream = null
consume . resolve = null
consume . reject = null
consume . length = 0
consume . body = null
}
/***/ } ) ,
/***/ 7474 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
const assert = _ _nccwpck _require _ _ ( 9491 )
const {
ResponseStatusCodeError
} = _ _nccwpck _require _ _ ( 8045 )
const { toUSVString } = _ _nccwpck _require _ _ ( 3983 )
async function getResolveErrorBodyCallback ( { callback , body , contentType , statusCode , statusMessage , headers } ) {
assert ( body )
let chunks = [ ]
let limit = 0
for await ( const chunk of body ) {
chunks . push ( chunk )
limit += chunk . length
if ( limit > 128 * 1024 ) {
chunks = null
break
}
}
if ( statusCode === 204 || ! contentType || ! chunks ) {
process . nextTick ( callback , new ResponseStatusCodeError ( ` Response status code ${ statusCode } ${ statusMessage ? ` : ${ statusMessage } ` : '' } ` , statusCode , headers ) )
return
}
try {
if ( contentType . startsWith ( 'application/json' ) ) {
const payload = JSON . parse ( toUSVString ( Buffer . concat ( chunks ) ) )
process . nextTick ( callback , new ResponseStatusCodeError ( ` Response status code ${ statusCode } ${ statusMessage ? ` : ${ statusMessage } ` : '' } ` , statusCode , headers , payload ) )
return
}
if ( contentType . startsWith ( 'text/' ) ) {
const payload = toUSVString ( Buffer . concat ( chunks ) )
process . nextTick ( callback , new ResponseStatusCodeError ( ` Response status code ${ statusCode } ${ statusMessage ? ` : ${ statusMessage } ` : '' } ` , statusCode , headers , payload ) )
return
}
} catch ( err ) {
// Process in a fallback if error
}
process . nextTick ( callback , new ResponseStatusCodeError ( ` Response status code ${ statusCode } ${ statusMessage ? ` : ${ statusMessage } ` : '' } ` , statusCode , headers ) )
}
module . exports = { getResolveErrorBodyCallback }
/***/ } ) ,
/***/ 7931 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
BalancedPoolMissingUpstreamError ,
InvalidArgumentError
} = _ _nccwpck _require _ _ ( 8045 )
const {
PoolBase ,
kClients ,
kNeedDrain ,
kAddClient ,
kRemoveClient ,
kGetDispatcher
} = _ _nccwpck _require _ _ ( 3198 )
const Pool = _ _nccwpck _require _ _ ( 4634 )
const { kUrl , kInterceptors } = _ _nccwpck _require _ _ ( 2785 )
const { parseOrigin } = _ _nccwpck _require _ _ ( 3983 )
const kFactory = Symbol ( 'factory' )
const kOptions = Symbol ( 'options' )
const kGreatestCommonDivisor = Symbol ( 'kGreatestCommonDivisor' )
const kCurrentWeight = Symbol ( 'kCurrentWeight' )
const kIndex = Symbol ( 'kIndex' )
const kWeight = Symbol ( 'kWeight' )
const kMaxWeightPerServer = Symbol ( 'kMaxWeightPerServer' )
const kErrorPenalty = Symbol ( 'kErrorPenalty' )
function getGreatestCommonDivisor ( a , b ) {
if ( b === 0 ) return a
return getGreatestCommonDivisor ( b , a % b )
}
function defaultFactory ( origin , opts ) {
return new Pool ( origin , opts )
}
class BalancedPool extends PoolBase {
constructor ( upstreams = [ ] , { factory = defaultFactory , ... opts } = { } ) {
super ( )
this [ kOptions ] = opts
this [ kIndex ] = - 1
this [ kCurrentWeight ] = 0
this [ kMaxWeightPerServer ] = this [ kOptions ] . maxWeightPerServer || 100
this [ kErrorPenalty ] = this [ kOptions ] . errorPenalty || 15
if ( ! Array . isArray ( upstreams ) ) {
upstreams = [ upstreams ]
}
if ( typeof factory !== 'function' ) {
throw new InvalidArgumentError ( 'factory must be a function.' )
}
this [ kInterceptors ] = opts . interceptors && opts . interceptors . BalancedPool && Array . isArray ( opts . interceptors . BalancedPool )
? opts . interceptors . BalancedPool
: [ ]
this [ kFactory ] = factory
for ( const upstream of upstreams ) {
this . addUpstream ( upstream )
}
this . _updateBalancedPoolStats ( )
}
addUpstream ( upstream ) {
const upstreamOrigin = parseOrigin ( upstream ) . origin
if ( this [ kClients ] . find ( ( pool ) => (
pool [ kUrl ] . origin === upstreamOrigin &&
pool . closed !== true &&
pool . destroyed !== true
) ) ) {
return this
}
const pool = this [ kFactory ] ( upstreamOrigin , Object . assign ( { } , this [ kOptions ] ) )
this [ kAddClient ] ( pool )
pool . on ( 'connect' , ( ) => {
pool [ kWeight ] = Math . min ( this [ kMaxWeightPerServer ] , pool [ kWeight ] + this [ kErrorPenalty ] )
} )
pool . on ( 'connectionError' , ( ) => {
pool [ kWeight ] = Math . max ( 1 , pool [ kWeight ] - this [ kErrorPenalty ] )
this . _updateBalancedPoolStats ( )
} )
pool . on ( 'disconnect' , ( ... args ) => {
const err = args [ 2 ]
if ( err && err . code === 'UND_ERR_SOCKET' ) {
// decrease the weight of the pool.
pool [ kWeight ] = Math . max ( 1 , pool [ kWeight ] - this [ kErrorPenalty ] )
this . _updateBalancedPoolStats ( )
}
} )
for ( const client of this [ kClients ] ) {
client [ kWeight ] = this [ kMaxWeightPerServer ]
}
this . _updateBalancedPoolStats ( )
return this
}
_updateBalancedPoolStats ( ) {
this [ kGreatestCommonDivisor ] = this [ kClients ] . map ( p => p [ kWeight ] ) . reduce ( getGreatestCommonDivisor , 0 )
}
removeUpstream ( upstream ) {
const upstreamOrigin = parseOrigin ( upstream ) . origin
const pool = this [ kClients ] . find ( ( pool ) => (
pool [ kUrl ] . origin === upstreamOrigin &&
pool . closed !== true &&
pool . destroyed !== true
) )
if ( pool ) {
this [ kRemoveClient ] ( pool )
}
return this
}
get upstreams ( ) {
return this [ kClients ]
. filter ( dispatcher => dispatcher . closed !== true && dispatcher . destroyed !== true )
. map ( ( p ) => p [ kUrl ] . origin )
}
[ kGetDispatcher ] ( ) {
// We validate that pools is greater than 0,
// otherwise we would have to wait until an upstream
// is added, which might never happen.
if ( this [ kClients ] . length === 0 ) {
throw new BalancedPoolMissingUpstreamError ( )
}
const dispatcher = this [ kClients ] . find ( dispatcher => (
! dispatcher [ kNeedDrain ] &&
dispatcher . closed !== true &&
dispatcher . destroyed !== true
) )
if ( ! dispatcher ) {
return
}
const allClientsBusy = this [ kClients ] . map ( pool => pool [ kNeedDrain ] ) . reduce ( ( a , b ) => a && b , true )
if ( allClientsBusy ) {
return
}
let counter = 0
let maxWeightIndex = this [ kClients ] . findIndex ( pool => ! pool [ kNeedDrain ] )
while ( counter ++ < this [ kClients ] . length ) {
this [ kIndex ] = ( this [ kIndex ] + 1 ) % this [ kClients ] . length
const pool = this [ kClients ] [ this [ kIndex ] ]
// find pool index with the largest weight
if ( pool [ kWeight ] > this [ kClients ] [ maxWeightIndex ] [ kWeight ] && ! pool [ kNeedDrain ] ) {
maxWeightIndex = this [ kIndex ]
}
// decrease the current weight every `this[kClients].length`.
if ( this [ kIndex ] === 0 ) {
// Set the current weight to the next lower weight.
this [ kCurrentWeight ] = this [ kCurrentWeight ] - this [ kGreatestCommonDivisor ]
if ( this [ kCurrentWeight ] <= 0 ) {
this [ kCurrentWeight ] = this [ kMaxWeightPerServer ]
}
}
if ( pool [ kWeight ] >= this [ kCurrentWeight ] && ( ! pool [ kNeedDrain ] ) ) {
return pool
}
}
this [ kCurrentWeight ] = this [ kClients ] [ maxWeightIndex ] [ kWeight ]
this [ kIndex ] = maxWeightIndex
return this [ kClients ] [ maxWeightIndex ]
}
}
module . exports = BalancedPool
/***/ } ) ,
/***/ 6101 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { kConstruct } = _ _nccwpck _require _ _ ( 9174 )
const { urlEquals , fieldValues : getFieldValues } = _ _nccwpck _require _ _ ( 2396 )
const { kEnumerableProperty , isDisturbed } = _ _nccwpck _require _ _ ( 3983 )
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { Response , cloneResponse } = _ _nccwpck _require _ _ ( 7823 )
const { Request } = _ _nccwpck _require _ _ ( 8359 )
const { kState , kHeaders , kGuard , kRealm } = _ _nccwpck _require _ _ ( 5861 )
const { fetching } = _ _nccwpck _require _ _ ( 4881 )
const { urlIsHttpHttpsScheme , createDeferredPromise , readAllBytes } = _ _nccwpck _require _ _ ( 2538 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { getGlobalDispatcher } = _ _nccwpck _require _ _ ( 1892 )
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
* @ typedef { Object } CacheBatchOperation
* @ property { 'delete' | 'put' } type
* @ property { any } request
* @ property { any } response
* @ property { import ( '../../types/cache' ) . CacheQueryOptions } options
* /
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dfn-request-response-list
* @ typedef { [ any , any ] [ ] } requestResponseList
* /
class Cache {
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list
* @ type { requestResponseList }
* /
# relevantRequestResponseList
constructor ( ) {
if ( arguments [ 0 ] !== kConstruct ) {
webidl . illegalConstructor ( )
}
this . # relevantRequestResponseList = arguments [ 1 ]
}
async match ( request , options = { } ) {
webidl . brandCheck ( this , Cache )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Cache.match' } )
request = webidl . converters . RequestInfo ( request )
options = webidl . converters . CacheQueryOptions ( options )
const p = await this . matchAll ( request , options )
if ( p . length === 0 ) {
return
}
return p [ 0 ]
}
async matchAll ( request = undefined , options = { } ) {
webidl . brandCheck ( this , Cache )
if ( request !== undefined ) request = webidl . converters . RequestInfo ( request )
options = webidl . converters . CacheQueryOptions ( options )
// 1.
let r = null
// 2.
if ( request !== undefined ) {
if ( request instanceof Request ) {
// 2.1.1
r = request [ kState ]
// 2.1.2
if ( r . method !== 'GET' && ! options . ignoreMethod ) {
return [ ]
}
} else if ( typeof request === 'string' ) {
// 2.2.1
r = new Request ( request ) [ kState ]
}
}
// 5.
// 5.1
const responses = [ ]
// 5.2
if ( request === undefined ) {
// 5.2.1
for ( const requestResponse of this . # relevantRequestResponseList ) {
responses . push ( requestResponse [ 1 ] )
}
} else { // 5.3
// 5.3.1
const requestResponses = this . # queryCache ( r , options )
// 5.3.2
for ( const requestResponse of requestResponses ) {
responses . push ( requestResponse [ 1 ] )
}
}
// 5.4
// We don't implement CORs so we don't need to loop over the responses, yay!
// 5.5.1
const responseList = [ ]
// 5.5.2
for ( const response of responses ) {
// 5.5.2.1
const responseObject = new Response ( response . body ? . source ? ? null )
const body = responseObject [ kState ] . body
responseObject [ kState ] = response
responseObject [ kState ] . body = body
responseObject [ kHeaders ] [ kHeadersList ] = response . headersList
responseObject [ kHeaders ] [ kGuard ] = 'immutable'
responseList . push ( responseObject )
}
// 6.
return Object . freeze ( responseList )
}
async add ( request ) {
webidl . brandCheck ( this , Cache )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Cache.add' } )
request = webidl . converters . RequestInfo ( request )
// 1.
const requests = [ request ]
// 2.
const responseArrayPromise = this . addAll ( requests )
// 3.
return await responseArrayPromise
}
async addAll ( requests ) {
webidl . brandCheck ( this , Cache )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Cache.addAll' } )
requests = webidl . converters [ 'sequence<RequestInfo>' ] ( requests )
// 1.
const responsePromises = [ ]
// 2.
const requestList = [ ]
// 3.
for ( const request of requests ) {
if ( typeof request === 'string' ) {
continue
}
// 3.1
const r = request [ kState ]
// 3.2
if ( ! urlIsHttpHttpsScheme ( r . url ) || r . method !== 'GET' ) {
throw webidl . errors . exception ( {
header : 'Cache.addAll' ,
message : 'Expected http/s scheme when method is not GET.'
} )
}
}
// 4.
/** @type {ReturnType<typeof fetching>[]} */
const fetchControllers = [ ]
// 5.
for ( const request of requests ) {
// 5.1
const r = new Request ( request ) [ kState ]
// 5.2
if ( ! urlIsHttpHttpsScheme ( r . url ) ) {
throw webidl . errors . exception ( {
header : 'Cache.addAll' ,
message : 'Expected http/s scheme.'
} )
}
// 5.4
r . initiator = 'fetch'
r . destination = 'subresource'
// 5.5
requestList . push ( r )
// 5.6
const responsePromise = createDeferredPromise ( )
// 5.7
fetchControllers . push ( fetching ( {
request : r ,
dispatcher : getGlobalDispatcher ( ) ,
processResponse ( response ) {
// 1.
if ( response . type === 'error' || response . status === 206 || response . status < 200 || response . status > 299 ) {
responsePromise . reject ( webidl . errors . exception ( {
header : 'Cache.addAll' ,
message : 'Received an invalid status code or the request failed.'
} ) )
} else if ( response . headersList . contains ( 'vary' ) ) { // 2.
// 2.1
const fieldValues = getFieldValues ( response . headersList . get ( 'vary' ) )
// 2.2
for ( const fieldValue of fieldValues ) {
// 2.2.1
if ( fieldValue === '*' ) {
responsePromise . reject ( webidl . errors . exception ( {
header : 'Cache.addAll' ,
message : 'invalid vary field value'
} ) )
for ( const controller of fetchControllers ) {
controller . abort ( )
}
return
}
}
}
} ,
processResponseEndOfBody ( response ) {
// 1.
if ( response . aborted ) {
responsePromise . reject ( new DOMException ( 'aborted' , 'AbortError' ) )
return
}
// 2.
responsePromise . resolve ( response )
}
} ) )
// 5.8
responsePromises . push ( responsePromise . promise )
}
// 6.
const p = Promise . all ( responsePromises )
// 7.
const responses = await p
// 7.1
const operations = [ ]
// 7.2
let index = 0
// 7.3
for ( const response of responses ) {
// 7.3.1
/** @type {CacheBatchOperation} */
const operation = {
type : 'put' , // 7.3.2
request : requestList [ index ] , // 7.3.3
response // 7.3.4
}
operations . push ( operation ) // 7.3.5
index ++ // 7.3.6
}
// 7.5
const cacheJobPromise = createDeferredPromise ( )
// 7.6.1
let errorData = null
// 7.6.2
try {
this . # batchCacheOperations ( operations )
} catch ( e ) {
errorData = e
}
// 7.6.3
queueMicrotask ( ( ) => {
// 7.6.3.1
if ( errorData === null ) {
cacheJobPromise . resolve ( undefined )
} else {
// 7.6.3.2
cacheJobPromise . reject ( errorData )
}
} )
// 7.7
return cacheJobPromise . promise
}
async put ( request , response ) {
webidl . brandCheck ( this , Cache )
webidl . argumentLengthCheck ( arguments , 2 , { header : 'Cache.put' } )
request = webidl . converters . RequestInfo ( request )
response = webidl . converters . Response ( response )
// 1.
let innerRequest = null
// 2.
if ( request instanceof Request ) {
innerRequest = request [ kState ]
} else { // 3.
innerRequest = new Request ( request ) [ kState ]
}
// 4.
if ( ! urlIsHttpHttpsScheme ( innerRequest . url ) || innerRequest . method !== 'GET' ) {
throw webidl . errors . exception ( {
header : 'Cache.put' ,
message : 'Expected an http/s scheme when method is not GET'
} )
}
// 5.
const innerResponse = response [ kState ]
// 6.
if ( innerResponse . status === 206 ) {
throw webidl . errors . exception ( {
header : 'Cache.put' ,
message : 'Got 206 status'
} )
}
// 7.
if ( innerResponse . headersList . contains ( 'vary' ) ) {
// 7.1.
const fieldValues = getFieldValues ( innerResponse . headersList . get ( 'vary' ) )
// 7.2.
for ( const fieldValue of fieldValues ) {
// 7.2.1
if ( fieldValue === '*' ) {
throw webidl . errors . exception ( {
header : 'Cache.put' ,
message : 'Got * vary field value'
} )
}
}
}
// 8.
if ( innerResponse . body && ( isDisturbed ( innerResponse . body . stream ) || innerResponse . body . stream . locked ) ) {
throw webidl . errors . exception ( {
header : 'Cache.put' ,
message : 'Response body is locked or disturbed'
} )
}
// 9.
const clonedResponse = cloneResponse ( innerResponse )
// 10.
const bodyReadPromise = createDeferredPromise ( )
// 11.
if ( innerResponse . body != null ) {
// 11.1
const stream = innerResponse . body . stream
// 11.2
const reader = stream . getReader ( )
// 11.3
readAllBytes (
reader ,
( bytes ) => bodyReadPromise . resolve ( bytes ) ,
( error ) => bodyReadPromise . reject ( error )
)
} else {
bodyReadPromise . resolve ( undefined )
}
// 12.
/** @type {CacheBatchOperation[]} */
const operations = [ ]
// 13.
/** @type {CacheBatchOperation} */
const operation = {
type : 'put' , // 14.
request : innerRequest , // 15.
response : clonedResponse // 16.
}
// 17.
operations . push ( operation )
// 19.
const bytes = await bodyReadPromise . promise
if ( clonedResponse . body != null ) {
clonedResponse . body . source = bytes
}
// 19.1
const cacheJobPromise = createDeferredPromise ( )
// 19.2.1
let errorData = null
// 19.2.2
try {
this . # batchCacheOperations ( operations )
} catch ( e ) {
errorData = e
}
// 19.2.3
queueMicrotask ( ( ) => {
// 19.2.3.1
if ( errorData === null ) {
cacheJobPromise . resolve ( )
} else { // 19.2.3.2
cacheJobPromise . reject ( errorData )
}
} )
return cacheJobPromise . promise
}
async delete ( request , options = { } ) {
webidl . brandCheck ( this , Cache )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Cache.delete' } )
request = webidl . converters . RequestInfo ( request )
options = webidl . converters . CacheQueryOptions ( options )
/ * *
* @ type { Request }
* /
let r = null
if ( request instanceof Request ) {
r = request [ kState ]
if ( r . method !== 'GET' && ! options . ignoreMethod ) {
return false
}
} else {
assert ( typeof request === 'string' )
r = new Request ( request ) [ kState ]
}
/** @type {CacheBatchOperation[]} */
const operations = [ ]
/** @type {CacheBatchOperation} */
const operation = {
type : 'delete' ,
request : r ,
options
}
operations . push ( operation )
const cacheJobPromise = createDeferredPromise ( )
let errorData = null
let requestResponses
try {
requestResponses = this . # batchCacheOperations ( operations )
} catch ( e ) {
errorData = e
}
queueMicrotask ( ( ) => {
if ( errorData === null ) {
cacheJobPromise . resolve ( ! ! requestResponses ? . length )
} else {
cacheJobPromise . reject ( errorData )
}
} )
return cacheJobPromise . promise
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dom-cache-keys
* @ param { any } request
* @ param { import ( '../../types/cache' ) . CacheQueryOptions } options
* @ returns { readonly Request [ ] }
* /
async keys ( request = undefined , options = { } ) {
webidl . brandCheck ( this , Cache )
if ( request !== undefined ) request = webidl . converters . RequestInfo ( request )
options = webidl . converters . CacheQueryOptions ( options )
// 1.
let r = null
// 2.
if ( request !== undefined ) {
// 2.1
if ( request instanceof Request ) {
// 2.1.1
r = request [ kState ]
// 2.1.2
if ( r . method !== 'GET' && ! options . ignoreMethod ) {
return [ ]
}
} else if ( typeof request === 'string' ) { // 2.2
r = new Request ( request ) [ kState ]
}
}
// 4.
const promise = createDeferredPromise ( )
// 5.
// 5.1
const requests = [ ]
// 5.2
if ( request === undefined ) {
// 5.2.1
for ( const requestResponse of this . # relevantRequestResponseList ) {
// 5.2.1.1
requests . push ( requestResponse [ 0 ] )
}
} else { // 5.3
// 5.3.1
const requestResponses = this . # queryCache ( r , options )
// 5.3.2
for ( const requestResponse of requestResponses ) {
// 5.3.2.1
requests . push ( requestResponse [ 0 ] )
}
}
// 5.4
queueMicrotask ( ( ) => {
// 5.4.1
const requestList = [ ]
// 5.4.2
for ( const request of requests ) {
const requestObject = new Request ( 'https://a' )
requestObject [ kState ] = request
requestObject [ kHeaders ] [ kHeadersList ] = request . headersList
requestObject [ kHeaders ] [ kGuard ] = 'immutable'
requestObject [ kRealm ] = request . client
// 5.4.2.1
requestList . push ( requestObject )
}
// 5.4.3
promise . resolve ( Object . freeze ( requestList ) )
} )
return promise . promise
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm
* @ param { CacheBatchOperation [ ] } operations
* @ returns { requestResponseList }
* /
# batchCacheOperations ( operations ) {
// 1.
const cache = this . # relevantRequestResponseList
// 2.
const backupCache = [ ... cache ]
// 3.
const addedItems = [ ]
// 4.1
const resultList = [ ]
try {
// 4.2
for ( const operation of operations ) {
// 4.2.1
if ( operation . type !== 'delete' && operation . type !== 'put' ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'operation type does not match "delete" or "put"'
} )
}
// 4.2.2
if ( operation . type === 'delete' && operation . response != null ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'delete operation should not have an associated response'
} )
}
// 4.2.3
if ( this . # queryCache ( operation . request , operation . options , addedItems ) . length ) {
throw new DOMException ( '???' , 'InvalidStateError' )
}
// 4.2.4
let requestResponses
// 4.2.5
if ( operation . type === 'delete' ) {
// 4.2.5.1
requestResponses = this . # queryCache ( operation . request , operation . options )
// TODO: the spec is wrong, this is needed to pass WPTs
if ( requestResponses . length === 0 ) {
return [ ]
}
// 4.2.5.2
for ( const requestResponse of requestResponses ) {
const idx = cache . indexOf ( requestResponse )
assert ( idx !== - 1 )
// 4.2.5.2.1
cache . splice ( idx , 1 )
}
} else if ( operation . type === 'put' ) { // 4.2.6
// 4.2.6.1
if ( operation . response == null ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'put operation should have an associated response'
} )
}
// 4.2.6.2
const r = operation . request
// 4.2.6.3
if ( ! urlIsHttpHttpsScheme ( r . url ) ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'expected http or https scheme'
} )
}
// 4.2.6.4
if ( r . method !== 'GET' ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'not get method'
} )
}
// 4.2.6.5
if ( operation . options != null ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'options must not be defined'
} )
}
// 4.2.6.6
requestResponses = this . # queryCache ( operation . request )
// 4.2.6.7
for ( const requestResponse of requestResponses ) {
const idx = cache . indexOf ( requestResponse )
assert ( idx !== - 1 )
// 4.2.6.7.1
cache . splice ( idx , 1 )
}
// 4.2.6.8
cache . push ( [ operation . request , operation . response ] )
// 4.2.6.10
addedItems . push ( [ operation . request , operation . response ] )
}
// 4.2.7
resultList . push ( [ operation . request , operation . response ] )
}
// 4.3
return resultList
} catch ( e ) { // 5.
// 5.1
this . # relevantRequestResponseList . length = 0
// 5.2
this . # relevantRequestResponseList = backupCache
// 5.3
throw e
}
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#query-cache
* @ param { any } requestQuery
* @ param { import ( '../../types/cache' ) . CacheQueryOptions } options
* @ param { requestResponseList } targetStorage
* @ returns { requestResponseList }
* /
# queryCache ( requestQuery , options , targetStorage ) {
/** @type {requestResponseList} */
const resultList = [ ]
const storage = targetStorage ? ? this . # relevantRequestResponseList
for ( const requestResponse of storage ) {
const [ cachedRequest , cachedResponse ] = requestResponse
if ( this . # requestMatchesCachedItem ( requestQuery , cachedRequest , cachedResponse , options ) ) {
resultList . push ( requestResponse )
}
}
return resultList
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm
* @ param { any } requestQuery
* @ param { any } request
* @ param { any | null } response
* @ param { import ( '../../types/cache' ) . CacheQueryOptions | undefined } options
* @ returns { boolean }
* /
# requestMatchesCachedItem ( requestQuery , request , response = null , options ) {
// if (options?.ignoreMethod === false && request.method === 'GET') {
// return false
// }
const queryURL = new URL ( requestQuery . url )
const cachedURL = new URL ( request . url )
if ( options ? . ignoreSearch ) {
cachedURL . search = ''
queryURL . search = ''
}
if ( ! urlEquals ( queryURL , cachedURL , true ) ) {
return false
}
if (
response == null ||
options ? . ignoreVary ||
! response . headersList . contains ( 'vary' )
) {
return true
}
const fieldValues = getFieldValues ( response . headersList . get ( 'vary' ) )
for ( const fieldValue of fieldValues ) {
if ( fieldValue === '*' ) {
return false
}
const requestValue = request . headersList . get ( fieldValue )
const queryValue = requestQuery . headersList . get ( fieldValue )
// If one has the header and the other doesn't, or one has
// a different value than the other, return false
if ( requestValue !== queryValue ) {
return false
}
}
return true
}
}
Object . defineProperties ( Cache . prototype , {
[ Symbol . toStringTag ] : {
value : 'Cache' ,
configurable : true
} ,
match : kEnumerableProperty ,
matchAll : kEnumerableProperty ,
add : kEnumerableProperty ,
addAll : kEnumerableProperty ,
put : kEnumerableProperty ,
delete : kEnumerableProperty ,
keys : kEnumerableProperty
} )
const cacheQueryOptionConverters = [
{
key : 'ignoreSearch' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'ignoreMethod' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'ignoreVary' ,
converter : webidl . converters . boolean ,
defaultValue : false
}
]
webidl . converters . CacheQueryOptions = webidl . dictionaryConverter ( cacheQueryOptionConverters )
webidl . converters . MultiCacheQueryOptions = webidl . dictionaryConverter ( [
... cacheQueryOptionConverters ,
{
key : 'cacheName' ,
converter : webidl . converters . DOMString
}
] )
webidl . converters . Response = webidl . interfaceConverter ( Response )
webidl . converters [ 'sequence<RequestInfo>' ] = webidl . sequenceConverter (
webidl . converters . RequestInfo
)
module . exports = {
Cache
}
/***/ } ) ,
/***/ 7907 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { kConstruct } = _ _nccwpck _require _ _ ( 9174 )
const { Cache } = _ _nccwpck _require _ _ ( 6101 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { kEnumerableProperty } = _ _nccwpck _require _ _ ( 3983 )
class CacheStorage {
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map
* @ type { Map < string , import ( './cache' ) . requestResponseList }
* /
# caches = new Map ( )
constructor ( ) {
if ( arguments [ 0 ] !== kConstruct ) {
webidl . illegalConstructor ( )
}
}
async match ( request , options = { } ) {
webidl . brandCheck ( this , CacheStorage )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'CacheStorage.match' } )
request = webidl . converters . RequestInfo ( request )
options = webidl . converters . MultiCacheQueryOptions ( options )
// 1.
if ( options . cacheName != null ) {
// 1.1.1.1
if ( this . # caches . has ( options . cacheName ) ) {
// 1.1.1.1.1
const cacheList = this . # caches . get ( options . cacheName )
const cache = new Cache ( kConstruct , cacheList )
return await cache . match ( request , options )
}
} else { // 2.
// 2.2
for ( const cacheList of this . # caches . values ( ) ) {
const cache = new Cache ( kConstruct , cacheList )
// 2.2.1.2
const response = await cache . match ( request , options )
if ( response !== undefined ) {
return response
}
}
}
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#cache-storage-has
* @ param { string } cacheName
* @ returns { Promise < boolean > }
* /
async has ( cacheName ) {
webidl . brandCheck ( this , CacheStorage )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'CacheStorage.has' } )
cacheName = webidl . converters . DOMString ( cacheName )
// 2.1.1
// 2.2
return this . # caches . has ( cacheName )
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dom-cachestorage-open
* @ param { string } cacheName
* @ returns { Promise < Cache > }
* /
async open ( cacheName ) {
webidl . brandCheck ( this , CacheStorage )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'CacheStorage.open' } )
cacheName = webidl . converters . DOMString ( cacheName )
// 2.1
if ( this . # caches . has ( cacheName ) ) {
// await caches.open('v1') !== await caches.open('v1')
// 2.1.1
const cache = this . # caches . get ( cacheName )
// 2.1.1.1
return new Cache ( kConstruct , cache )
}
// 2.2
const cache = [ ]
// 2.3
this . # caches . set ( cacheName , cache )
// 2.4
return new Cache ( kConstruct , cache )
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#cache-storage-delete
* @ param { string } cacheName
* @ returns { Promise < boolean > }
* /
async delete ( cacheName ) {
webidl . brandCheck ( this , CacheStorage )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'CacheStorage.delete' } )
cacheName = webidl . converters . DOMString ( cacheName )
return this . # caches . delete ( cacheName )
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#cache-storage-keys
* @ returns { string [ ] }
* /
async keys ( ) {
webidl . brandCheck ( this , CacheStorage )
// 2.1
const keys = this . # caches . keys ( )
// 2.2
return [ ... keys ]
}
}
Object . defineProperties ( CacheStorage . prototype , {
[ Symbol . toStringTag ] : {
value : 'CacheStorage' ,
configurable : true
} ,
match : kEnumerableProperty ,
has : kEnumerableProperty ,
open : kEnumerableProperty ,
delete : kEnumerableProperty ,
keys : kEnumerableProperty
} )
module . exports = {
CacheStorage
}
/***/ } ) ,
/***/ 9174 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = {
kConstruct : Symbol ( 'constructable' )
}
/***/ } ) ,
/***/ 2396 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const assert = _ _nccwpck _require _ _ ( 9491 )
const { URLSerializer } = _ _nccwpck _require _ _ ( 685 )
const { isValidHeaderName } = _ _nccwpck _require _ _ ( 2538 )
/ * *
* @ see https : //url.spec.whatwg.org/#concept-url-equals
* @ param { URL } A
* @ param { URL } B
* @ param { boolean | undefined } excludeFragment
* @ returns { boolean }
* /
function urlEquals ( A , B , excludeFragment = false ) {
const serializedA = URLSerializer ( A , excludeFragment )
const serializedB = URLSerializer ( B , excludeFragment )
return serializedA === serializedB
}
/ * *
* @ see https : //github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
* @ param { string } header
* /
function fieldValues ( header ) {
assert ( header !== null )
const values = [ ]
for ( let value of header . split ( ',' ) ) {
value = value . trim ( )
if ( ! value . length ) {
continue
} else if ( ! isValidHeaderName ( value ) ) {
continue
}
values . push ( value )
}
return values
}
module . exports = {
urlEquals ,
fieldValues
}
/***/ } ) ,
/***/ 3598 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
// @ts-check
/* global WebAssembly */
const assert = _ _nccwpck _require _ _ ( 9491 )
const net = _ _nccwpck _require _ _ ( 1808 )
const util = _ _nccwpck _require _ _ ( 3983 )
const timers = _ _nccwpck _require _ _ ( 9459 )
const Request = _ _nccwpck _require _ _ ( 2905 )
const DispatcherBase = _ _nccwpck _require _ _ ( 4839 )
const {
RequestContentLengthMismatchError ,
ResponseContentLengthMismatchError ,
InvalidArgumentError ,
RequestAbortedError ,
HeadersTimeoutError ,
HeadersOverflowError ,
SocketError ,
InformationalError ,
BodyTimeoutError ,
HTTPParserError ,
ResponseExceededMaxSizeError ,
ClientDestroyedError
} = _ _nccwpck _require _ _ ( 8045 )
const buildConnector = _ _nccwpck _require _ _ ( 2067 )
const {
kUrl ,
kReset ,
kServerName ,
kClient ,
kBusy ,
kParser ,
kConnect ,
kBlocking ,
kResuming ,
kRunning ,
kPending ,
kSize ,
kWriting ,
kQueue ,
kConnected ,
kConnecting ,
kNeedDrain ,
kNoRef ,
kKeepAliveDefaultTimeout ,
kHostHeader ,
kPendingIdx ,
kRunningIdx ,
kError ,
kPipelining ,
kSocket ,
kKeepAliveTimeoutValue ,
kMaxHeadersSize ,
kKeepAliveMaxTimeout ,
kKeepAliveTimeoutThreshold ,
kHeadersTimeout ,
kBodyTimeout ,
kStrictContentLength ,
kConnector ,
kMaxRedirections ,
kMaxRequests ,
kCounter ,
kClose ,
kDestroy ,
kDispatch ,
kInterceptors ,
kLocalAddress ,
kMaxResponseSize
} = _ _nccwpck _require _ _ ( 2785 )
const FastBuffer = Buffer [ Symbol . species ]
const kClosedResolve = Symbol ( 'kClosedResolve' )
const channels = { }
try {
const diagnosticsChannel = _ _nccwpck _require _ _ ( 7643 )
channels . sendHeaders = diagnosticsChannel . channel ( 'undici:client:sendHeaders' )
channels . beforeConnect = diagnosticsChannel . channel ( 'undici:client:beforeConnect' )
channels . connectError = diagnosticsChannel . channel ( 'undici:client:connectError' )
channels . connected = diagnosticsChannel . channel ( 'undici:client:connected' )
} catch {
channels . sendHeaders = { hasSubscribers : false }
channels . beforeConnect = { hasSubscribers : false }
channels . connectError = { hasSubscribers : false }
channels . connected = { hasSubscribers : false }
}
/ * *
* @ type { import ( '../types/client' ) . default }
* /
class Client extends DispatcherBase {
/ * *
*
* @ param { string | URL } url
* @ param { import ( '../types/client' ) . Client . Options } options
* /
constructor ( url , {
interceptors ,
maxHeaderSize ,
headersTimeout ,
socketTimeout ,
requestTimeout ,
connectTimeout ,
bodyTimeout ,
idleTimeout ,
keepAlive ,
keepAliveTimeout ,
maxKeepAliveTimeout ,
keepAliveMaxTimeout ,
keepAliveTimeoutThreshold ,
socketPath ,
pipelining ,
tls ,
strictContentLength ,
maxCachedSessions ,
maxRedirections ,
connect ,
maxRequestsPerClient ,
localAddress ,
maxResponseSize ,
autoSelectFamily ,
autoSelectFamilyAttemptTimeout
} = { } ) {
super ( )
if ( keepAlive !== undefined ) {
throw new InvalidArgumentError ( 'unsupported keepAlive, use pipelining=0 instead' )
}
if ( socketTimeout !== undefined ) {
throw new InvalidArgumentError ( 'unsupported socketTimeout, use headersTimeout & bodyTimeout instead' )
}
if ( requestTimeout !== undefined ) {
throw new InvalidArgumentError ( 'unsupported requestTimeout, use headersTimeout & bodyTimeout instead' )
}
if ( idleTimeout !== undefined ) {
throw new InvalidArgumentError ( 'unsupported idleTimeout, use keepAliveTimeout instead' )
}
if ( maxKeepAliveTimeout !== undefined ) {
throw new InvalidArgumentError ( 'unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead' )
}
if ( maxHeaderSize != null && ! Number . isFinite ( maxHeaderSize ) ) {
throw new InvalidArgumentError ( 'invalid maxHeaderSize' )
}
if ( socketPath != null && typeof socketPath !== 'string' ) {
throw new InvalidArgumentError ( 'invalid socketPath' )
}
if ( connectTimeout != null && ( ! Number . isFinite ( connectTimeout ) || connectTimeout < 0 ) ) {
throw new InvalidArgumentError ( 'invalid connectTimeout' )
}
if ( keepAliveTimeout != null && ( ! Number . isFinite ( keepAliveTimeout ) || keepAliveTimeout <= 0 ) ) {
throw new InvalidArgumentError ( 'invalid keepAliveTimeout' )
}
if ( keepAliveMaxTimeout != null && ( ! Number . isFinite ( keepAliveMaxTimeout ) || keepAliveMaxTimeout <= 0 ) ) {
throw new InvalidArgumentError ( 'invalid keepAliveMaxTimeout' )
}
if ( keepAliveTimeoutThreshold != null && ! Number . isFinite ( keepAliveTimeoutThreshold ) ) {
throw new InvalidArgumentError ( 'invalid keepAliveTimeoutThreshold' )
}
if ( headersTimeout != null && ( ! Number . isInteger ( headersTimeout ) || headersTimeout < 0 ) ) {
throw new InvalidArgumentError ( 'headersTimeout must be a positive integer or zero' )
}
if ( bodyTimeout != null && ( ! Number . isInteger ( bodyTimeout ) || bodyTimeout < 0 ) ) {
throw new InvalidArgumentError ( 'bodyTimeout must be a positive integer or zero' )
}
if ( connect != null && typeof connect !== 'function' && typeof connect !== 'object' ) {
throw new InvalidArgumentError ( 'connect must be a function or an object' )
}
if ( maxRedirections != null && ( ! Number . isInteger ( maxRedirections ) || maxRedirections < 0 ) ) {
throw new InvalidArgumentError ( 'maxRedirections must be a positive number' )
}
if ( maxRequestsPerClient != null && ( ! Number . isInteger ( maxRequestsPerClient ) || maxRequestsPerClient < 0 ) ) {
throw new InvalidArgumentError ( 'maxRequestsPerClient must be a positive number' )
}
if ( localAddress != null && ( typeof localAddress !== 'string' || net . isIP ( localAddress ) === 0 ) ) {
throw new InvalidArgumentError ( 'localAddress must be valid string IP address' )
}
if ( maxResponseSize != null && ( ! Number . isInteger ( maxResponseSize ) || maxResponseSize < - 1 ) ) {
throw new InvalidArgumentError ( 'maxResponseSize must be a positive number' )
}
if (
autoSelectFamilyAttemptTimeout != null &&
( ! Number . isInteger ( autoSelectFamilyAttemptTimeout ) || autoSelectFamilyAttemptTimeout < - 1 )
) {
throw new InvalidArgumentError ( 'autoSelectFamilyAttemptTimeout must be a positive number' )
}
if ( typeof connect !== 'function' ) {
connect = buildConnector ( {
... tls ,
maxCachedSessions ,
socketPath ,
timeout : connectTimeout ,
... ( util . nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily , autoSelectFamilyAttemptTimeout } : undefined ) ,
... connect
} )
}
this [ kInterceptors ] = interceptors && interceptors . Client && Array . isArray ( interceptors . Client )
? interceptors . Client
: [ createRedirectInterceptor ( { maxRedirections } ) ]
this [ kUrl ] = util . parseOrigin ( url )
this [ kConnector ] = connect
this [ kSocket ] = null
this [ kPipelining ] = pipelining != null ? pipelining : 1
this [ kMaxHeadersSize ] = maxHeaderSize || 16384
this [ kKeepAliveDefaultTimeout ] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
this [ kKeepAliveMaxTimeout ] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
this [ kKeepAliveTimeoutThreshold ] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold
this [ kKeepAliveTimeoutValue ] = this [ kKeepAliveDefaultTimeout ]
this [ kServerName ] = null
this [ kLocalAddress ] = localAddress != null ? localAddress : null
this [ kResuming ] = 0 // 0, idle, 1, scheduled, 2 resuming
this [ kNeedDrain ] = 0 // 0, idle, 1, scheduled, 2 resuming
this [ kHostHeader ] = ` host: ${ this [ kUrl ] . hostname } ${ this [ kUrl ] . port ? ` : ${ this [ kUrl ] . port } ` : '' } \r \n `
this [ kBodyTimeout ] = bodyTimeout != null ? bodyTimeout : 300e3
this [ kHeadersTimeout ] = headersTimeout != null ? headersTimeout : 300e3
this [ kStrictContentLength ] = strictContentLength == null ? true : strictContentLength
this [ kMaxRedirections ] = maxRedirections
this [ kMaxRequests ] = maxRequestsPerClient
this [ kClosedResolve ] = null
this [ kMaxResponseSize ] = maxResponseSize > - 1 ? maxResponseSize : - 1
// kQueue is built up of 3 sections separated by
// the kRunningIdx and kPendingIdx indices.
// | complete | running | pending |
// ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
// kRunningIdx points to the first running element.
// kPendingIdx points to the first pending element.
// This implements a fast queue with an amortized
// time of O(1).
this [ kQueue ] = [ ]
this [ kRunningIdx ] = 0
this [ kPendingIdx ] = 0
}
get pipelining ( ) {
return this [ kPipelining ]
}
set pipelining ( value ) {
this [ kPipelining ] = value
resume ( this , true )
}
get [ kPending ] ( ) {
return this [ kQueue ] . length - this [ kPendingIdx ]
}
get [ kRunning ] ( ) {
return this [ kPendingIdx ] - this [ kRunningIdx ]
}
get [ kSize ] ( ) {
return this [ kQueue ] . length - this [ kRunningIdx ]
}
get [ kConnected ] ( ) {
return ! ! this [ kSocket ] && ! this [ kConnecting ] && ! this [ kSocket ] . destroyed
}
get [ kBusy ] ( ) {
const socket = this [ kSocket ]
return (
( socket && ( socket [ kReset ] || socket [ kWriting ] || socket [ kBlocking ] ) ) ||
( this [ kSize ] >= ( this [ kPipelining ] || 1 ) ) ||
this [ kPending ] > 0
)
}
/* istanbul ignore: only used for test */
[ kConnect ] ( cb ) {
connect ( this )
this . once ( 'connect' , cb )
}
[ kDispatch ] ( opts , handler ) {
const origin = opts . origin || this [ kUrl ] . origin
const request = new Request ( origin , opts , handler )
this [ kQueue ] . push ( request )
if ( this [ kResuming ] ) {
// Do nothing.
} else if ( util . bodyLength ( request . body ) == null && util . isIterable ( request . body ) ) {
// Wait a tick in case stream/iterator is ended in the same tick.
this [ kResuming ] = 1
process . nextTick ( resume , this )
} else {
resume ( this , true )
}
if ( this [ kResuming ] && this [ kNeedDrain ] !== 2 && this [ kBusy ] ) {
this [ kNeedDrain ] = 2
}
return this [ kNeedDrain ] < 2
}
async [ kClose ] ( ) {
return new Promise ( ( resolve ) => {
if ( ! this [ kSize ] ) {
resolve ( null )
} else {
this [ kClosedResolve ] = resolve
}
} )
}
async [ kDestroy ] ( err ) {
return new Promise ( ( resolve ) => {
const requests = this [ kQueue ] . splice ( this [ kPendingIdx ] )
for ( let i = 0 ; i < requests . length ; i ++ ) {
const request = requests [ i ]
errorRequest ( this , request , err )
}
const callback = ( ) => {
if ( this [ kClosedResolve ] ) {
// TODO (fix): Should we error here with ClientDestroyedError?
this [ kClosedResolve ] ( )
this [ kClosedResolve ] = null
}
resolve ( )
}
if ( ! this [ kSocket ] ) {
queueMicrotask ( callback )
} else {
util . destroy ( this [ kSocket ] . on ( 'close' , callback ) , err )
}
resume ( this )
} )
}
}
const constants = _ _nccwpck _require _ _ ( 953 )
const createRedirectInterceptor = _ _nccwpck _require _ _ ( 8861 )
const EMPTY _BUF = Buffer . alloc ( 0 )
async function lazyllhttp ( ) {
const llhttpWasmData = process . env . JEST _WORKER _ID ? _ _nccwpck _require _ _ ( 1145 ) : undefined
let mod
try {
mod = await WebAssembly . compile ( Buffer . from ( _ _nccwpck _require _ _ ( 5627 ) , 'base64' ) )
} catch ( e ) {
/* istanbul ignore next */
// We could check if the error was caused by the simd option not
// being enabled, but the occurring of this other error
// * https://github.com/emscripten-core/emscripten/issues/11495
// got me to remove that check to avoid breaking Node 12.
mod = await WebAssembly . compile ( Buffer . from ( llhttpWasmData || _ _nccwpck _require _ _ ( 1145 ) , 'base64' ) )
}
return await WebAssembly . instantiate ( mod , {
env : {
/* eslint-disable camelcase */
wasm _on _url : ( p , at , len ) => {
/* istanbul ignore next */
return 0
} ,
wasm _on _status : ( p , at , len ) => {
assert . strictEqual ( currentParser . ptr , p )
const start = at - currentBufferPtr + currentBufferRef . byteOffset
return currentParser . onStatus ( new FastBuffer ( currentBufferRef . buffer , start , len ) ) || 0
} ,
wasm _on _message _begin : ( p ) => {
assert . strictEqual ( currentParser . ptr , p )
return currentParser . onMessageBegin ( ) || 0
} ,
wasm _on _header _field : ( p , at , len ) => {
assert . strictEqual ( currentParser . ptr , p )
const start = at - currentBufferPtr + currentBufferRef . byteOffset
return currentParser . onHeaderField ( new FastBuffer ( currentBufferRef . buffer , start , len ) ) || 0
} ,
wasm _on _header _value : ( p , at , len ) => {
assert . strictEqual ( currentParser . ptr , p )
const start = at - currentBufferPtr + currentBufferRef . byteOffset
return currentParser . onHeaderValue ( new FastBuffer ( currentBufferRef . buffer , start , len ) ) || 0
} ,
wasm _on _headers _complete : ( p , statusCode , upgrade , shouldKeepAlive ) => {
assert . strictEqual ( currentParser . ptr , p )
return currentParser . onHeadersComplete ( statusCode , Boolean ( upgrade ) , Boolean ( shouldKeepAlive ) ) || 0
} ,
wasm _on _body : ( p , at , len ) => {
assert . strictEqual ( currentParser . ptr , p )
const start = at - currentBufferPtr + currentBufferRef . byteOffset
return currentParser . onBody ( new FastBuffer ( currentBufferRef . buffer , start , len ) ) || 0
} ,
wasm _on _message _complete : ( p ) => {
assert . strictEqual ( currentParser . ptr , p )
return currentParser . onMessageComplete ( ) || 0
}
/* eslint-enable camelcase */
}
} )
}
let llhttpInstance = null
let llhttpPromise = lazyllhttp ( )
llhttpPromise . catch ( )
let currentParser = null
let currentBufferRef = null
let currentBufferSize = 0
let currentBufferPtr = null
const TIMEOUT _HEADERS = 1
const TIMEOUT _BODY = 2
const TIMEOUT _IDLE = 3
class Parser {
constructor ( client , socket , { exports } ) {
assert ( Number . isFinite ( client [ kMaxHeadersSize ] ) && client [ kMaxHeadersSize ] > 0 )
this . llhttp = exports
this . ptr = this . llhttp . llhttp _alloc ( constants . TYPE . RESPONSE )
this . client = client
this . socket = socket
this . timeout = null
this . timeoutValue = null
this . timeoutType = null
this . statusCode = null
this . statusText = ''
this . upgrade = false
this . headers = [ ]
this . headersSize = 0
this . headersMaxSize = client [ kMaxHeadersSize ]
this . shouldKeepAlive = false
this . paused = false
this . resume = this . resume . bind ( this )
this . bytesRead = 0
this . keepAlive = ''
this . contentLength = ''
this . connection = ''
this . maxResponseSize = client [ kMaxResponseSize ]
}
setTimeout ( value , type ) {
this . timeoutType = type
if ( value !== this . timeoutValue ) {
timers . clearTimeout ( this . timeout )
if ( value ) {
this . timeout = timers . setTimeout ( onParserTimeout , value , this )
// istanbul ignore else: only for jest
if ( this . timeout . unref ) {
this . timeout . unref ( )
}
} else {
this . timeout = null
}
this . timeoutValue = value
} else if ( this . timeout ) {
// istanbul ignore else: only for jest
if ( this . timeout . refresh ) {
this . timeout . refresh ( )
}
}
}
resume ( ) {
if ( this . socket . destroyed || ! this . paused ) {
return
}
assert ( this . ptr != null )
assert ( currentParser == null )
this . llhttp . llhttp _resume ( this . ptr )
assert ( this . timeoutType === TIMEOUT _BODY )
if ( this . timeout ) {
// istanbul ignore else: only for jest
if ( this . timeout . refresh ) {
this . timeout . refresh ( )
}
}
this . paused = false
this . execute ( this . socket . read ( ) || EMPTY _BUF ) // Flush parser.
this . readMore ( )
}
readMore ( ) {
while ( ! this . paused && this . ptr ) {
const chunk = this . socket . read ( )
if ( chunk === null ) {
break
}
this . execute ( chunk )
}
}
execute ( data ) {
assert ( this . ptr != null )
assert ( currentParser == null )
assert ( ! this . paused )
const { socket , llhttp } = this
if ( data . length > currentBufferSize ) {
if ( currentBufferPtr ) {
llhttp . free ( currentBufferPtr )
}
currentBufferSize = Math . ceil ( data . length / 4096 ) * 4096
currentBufferPtr = llhttp . malloc ( currentBufferSize )
}
new Uint8Array ( llhttp . memory . buffer , currentBufferPtr , currentBufferSize ) . set ( data )
// Call `execute` on the wasm parser.
// We pass the `llhttp_parser` pointer address, the pointer address of buffer view data,
// and finally the length of bytes to parse.
// The return value is an error code or `constants.ERROR.OK`.
try {
let ret
try {
currentBufferRef = data
currentParser = this
ret = llhttp . llhttp _execute ( this . ptr , currentBufferPtr , data . length )
/* eslint-disable-next-line no-useless-catch */
} catch ( err ) {
/* istanbul ignore next: difficult to make a test case for */
throw err
} finally {
currentParser = null
currentBufferRef = null
}
const offset = llhttp . llhttp _get _error _pos ( this . ptr ) - currentBufferPtr
if ( ret === constants . ERROR . PAUSED _UPGRADE ) {
this . onUpgrade ( data . slice ( offset ) )
} else if ( ret === constants . ERROR . PAUSED ) {
this . paused = true
socket . unshift ( data . slice ( offset ) )
} else if ( ret !== constants . ERROR . OK ) {
const ptr = llhttp . llhttp _get _error _reason ( this . ptr )
let message = ''
/* istanbul ignore else: difficult to make a test case for */
if ( ptr ) {
const len = new Uint8Array ( llhttp . memory . buffer , ptr ) . indexOf ( 0 )
message =
'Response does not match the HTTP/1.1 protocol (' +
Buffer . from ( llhttp . memory . buffer , ptr , len ) . toString ( ) +
')'
}
throw new HTTPParserError ( message , constants . ERROR [ ret ] , data . slice ( offset ) )
}
} catch ( err ) {
util . destroy ( socket , err )
}
}
destroy ( ) {
assert ( this . ptr != null )
assert ( currentParser == null )
this . llhttp . llhttp _free ( this . ptr )
this . ptr = null
timers . clearTimeout ( this . timeout )
this . timeout = null
this . timeoutValue = null
this . timeoutType = null
this . paused = false
}
onStatus ( buf ) {
this . statusText = buf . toString ( )
}
onMessageBegin ( ) {
const { socket , client } = this
/* istanbul ignore next: difficult to make a test case for */
if ( socket . destroyed ) {
return - 1
}
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
if ( ! request ) {
return - 1
}
}
onHeaderField ( buf ) {
const len = this . headers . length
if ( ( len & 1 ) === 0 ) {
this . headers . push ( buf )
} else {
this . headers [ len - 1 ] = Buffer . concat ( [ this . headers [ len - 1 ] , buf ] )
}
this . trackHeader ( buf . length )
}
onHeaderValue ( buf ) {
let len = this . headers . length
if ( ( len & 1 ) === 1 ) {
this . headers . push ( buf )
len += 1
} else {
this . headers [ len - 1 ] = Buffer . concat ( [ this . headers [ len - 1 ] , buf ] )
}
const key = this . headers [ len - 2 ]
if ( key . length === 10 && key . toString ( ) . toLowerCase ( ) === 'keep-alive' ) {
this . keepAlive += buf . toString ( )
} else if ( key . length === 10 && key . toString ( ) . toLowerCase ( ) === 'connection' ) {
this . connection += buf . toString ( )
} else if ( key . length === 14 && key . toString ( ) . toLowerCase ( ) === 'content-length' ) {
this . contentLength += buf . toString ( )
}
this . trackHeader ( buf . length )
}
trackHeader ( len ) {
this . headersSize += len
if ( this . headersSize >= this . headersMaxSize ) {
util . destroy ( this . socket , new HeadersOverflowError ( ) )
}
}
onUpgrade ( head ) {
const { upgrade , client , socket , headers , statusCode } = this
assert ( upgrade )
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
assert ( request )
assert ( ! socket . destroyed )
assert ( socket === client [ kSocket ] )
assert ( ! this . paused )
assert ( request . upgrade || request . method === 'CONNECT' )
this . statusCode = null
this . statusText = ''
this . shouldKeepAlive = null
assert ( this . headers . length % 2 === 0 )
this . headers = [ ]
this . headersSize = 0
socket . unshift ( head )
socket [ kParser ] . destroy ( )
socket [ kParser ] = null
socket [ kClient ] = null
socket [ kError ] = null
socket
. removeListener ( 'error' , onSocketError )
. removeListener ( 'readable' , onSocketReadable )
. removeListener ( 'end' , onSocketEnd )
. removeListener ( 'close' , onSocketClose )
client [ kSocket ] = null
client [ kQueue ] [ client [ kRunningIdx ] ++ ] = null
client . emit ( 'disconnect' , client [ kUrl ] , [ client ] , new InformationalError ( 'upgrade' ) )
try {
request . onUpgrade ( statusCode , headers , socket )
} catch ( err ) {
util . destroy ( socket , err )
}
resume ( client )
}
onHeadersComplete ( statusCode , upgrade , shouldKeepAlive ) {
const { client , socket , headers , statusText } = this
/* istanbul ignore next: difficult to make a test case for */
if ( socket . destroyed ) {
return - 1
}
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
/* istanbul ignore next: difficult to make a test case for */
if ( ! request ) {
return - 1
}
assert ( ! this . upgrade )
assert ( this . statusCode < 200 )
if ( statusCode === 100 ) {
util . destroy ( socket , new SocketError ( 'bad response' , util . getSocketInfo ( socket ) ) )
return - 1
}
/* this can only happen if server is misbehaving */
if ( upgrade && ! request . upgrade ) {
util . destroy ( socket , new SocketError ( 'bad upgrade' , util . getSocketInfo ( socket ) ) )
return - 1
}
assert . strictEqual ( this . timeoutType , TIMEOUT _HEADERS )
this . statusCode = statusCode
this . shouldKeepAlive = (
shouldKeepAlive ||
// Override llhttp value which does not allow keepAlive for HEAD.
( request . method === 'HEAD' && ! socket [ kReset ] && this . connection . toLowerCase ( ) === 'keep-alive' )
)
if ( this . statusCode >= 200 ) {
const bodyTimeout = request . bodyTimeout != null
? request . bodyTimeout
: client [ kBodyTimeout ]
this . setTimeout ( bodyTimeout , TIMEOUT _BODY )
} else if ( this . timeout ) {
// istanbul ignore else: only for jest
if ( this . timeout . refresh ) {
this . timeout . refresh ( )
}
}
if ( request . method === 'CONNECT' ) {
assert ( client [ kRunning ] === 1 )
this . upgrade = true
return 2
}
if ( upgrade ) {
assert ( client [ kRunning ] === 1 )
this . upgrade = true
return 2
}
assert ( this . headers . length % 2 === 0 )
this . headers = [ ]
this . headersSize = 0
if ( this . shouldKeepAlive && client [ kPipelining ] ) {
const keepAliveTimeout = this . keepAlive ? util . parseKeepAliveTimeout ( this . keepAlive ) : null
if ( keepAliveTimeout != null ) {
const timeout = Math . min (
keepAliveTimeout - client [ kKeepAliveTimeoutThreshold ] ,
client [ kKeepAliveMaxTimeout ]
)
if ( timeout <= 0 ) {
socket [ kReset ] = true
} else {
client [ kKeepAliveTimeoutValue ] = timeout
}
} else {
client [ kKeepAliveTimeoutValue ] = client [ kKeepAliveDefaultTimeout ]
}
} else {
// Stop more requests from being dispatched.
socket [ kReset ] = true
}
let pause
try {
pause = request . onHeaders ( statusCode , headers , this . resume , statusText ) === false
} catch ( err ) {
util . destroy ( socket , err )
return - 1
}
if ( request . method === 'HEAD' ) {
return 1
}
if ( statusCode < 200 ) {
return 1
}
if ( socket [ kBlocking ] ) {
socket [ kBlocking ] = false
resume ( client )
}
return pause ? constants . ERROR . PAUSED : 0
}
onBody ( buf ) {
const { client , socket , statusCode , maxResponseSize } = this
if ( socket . destroyed ) {
return - 1
}
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
assert ( request )
assert . strictEqual ( this . timeoutType , TIMEOUT _BODY )
if ( this . timeout ) {
// istanbul ignore else: only for jest
if ( this . timeout . refresh ) {
this . timeout . refresh ( )
}
}
assert ( statusCode >= 200 )
if ( maxResponseSize > - 1 && this . bytesRead + buf . length > maxResponseSize ) {
util . destroy ( socket , new ResponseExceededMaxSizeError ( ) )
return - 1
}
this . bytesRead += buf . length
try {
if ( request . onData ( buf ) === false ) {
return constants . ERROR . PAUSED
}
} catch ( err ) {
util . destroy ( socket , err )
return - 1
}
}
onMessageComplete ( ) {
const { client , socket , statusCode , upgrade , headers , contentLength , bytesRead , shouldKeepAlive } = this
if ( socket . destroyed && ( ! statusCode || shouldKeepAlive ) ) {
return - 1
}
if ( upgrade ) {
return
}
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
assert ( request )
assert ( statusCode >= 100 )
this . statusCode = null
this . statusText = ''
this . bytesRead = 0
this . contentLength = ''
this . keepAlive = ''
this . connection = ''
assert ( this . headers . length % 2 === 0 )
this . headers = [ ]
this . headersSize = 0
if ( statusCode < 200 ) {
return
}
/* istanbul ignore next: should be handled by llhttp? */
if ( request . method !== 'HEAD' && contentLength && bytesRead !== parseInt ( contentLength , 10 ) ) {
util . destroy ( socket , new ResponseContentLengthMismatchError ( ) )
return - 1
}
try {
request . onComplete ( headers )
} catch ( err ) {
errorRequest ( client , request , err )
}
client [ kQueue ] [ client [ kRunningIdx ] ++ ] = null
if ( socket [ kWriting ] ) {
assert . strictEqual ( client [ kRunning ] , 0 )
// Response completed before request.
util . destroy ( socket , new InformationalError ( 'reset' ) )
return constants . ERROR . PAUSED
} else if ( ! shouldKeepAlive ) {
util . destroy ( socket , new InformationalError ( 'reset' ) )
return constants . ERROR . PAUSED
} else if ( socket [ kReset ] && client [ kRunning ] === 0 ) {
// Destroy socket once all requests have completed.
// The request at the tail of the pipeline is the one
// that requested reset and no further requests should
// have been queued since then.
util . destroy ( socket , new InformationalError ( 'reset' ) )
return constants . ERROR . PAUSED
} else if ( client [ kPipelining ] === 1 ) {
// We must wait a full event loop cycle to reuse this socket to make sure
// that non-spec compliant servers are not closing the connection even if they
// said they won't.
setImmediate ( resume , client )
} else {
resume ( client )
}
}
}
function onParserTimeout ( parser ) {
const { socket , timeoutType , client } = parser
/* istanbul ignore else */
if ( timeoutType === TIMEOUT _HEADERS ) {
if ( ! socket [ kWriting ] || socket . writableNeedDrain || client [ kRunning ] > 1 ) {
assert ( ! parser . paused , 'cannot be paused while waiting for headers' )
util . destroy ( socket , new HeadersTimeoutError ( ) )
}
} else if ( timeoutType === TIMEOUT _BODY ) {
if ( ! parser . paused ) {
util . destroy ( socket , new BodyTimeoutError ( ) )
}
} else if ( timeoutType === TIMEOUT _IDLE ) {
assert ( client [ kRunning ] === 0 && client [ kKeepAliveTimeoutValue ] )
util . destroy ( socket , new InformationalError ( 'socket idle timeout' ) )
}
}
function onSocketReadable ( ) {
const { [ kParser ] : parser } = this
parser . readMore ( )
}
function onSocketError ( err ) {
const { [ kParser ] : parser } = this
assert ( err . code !== 'ERR_TLS_CERT_ALTNAME_INVALID' )
// On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded
// to the user.
if ( err . code === 'ECONNRESET' && parser . statusCode && ! parser . shouldKeepAlive ) {
// We treat all incoming data so for as a valid response.
parser . onMessageComplete ( )
return
}
this [ kError ] = err
onError ( this [ kClient ] , err )
}
function onError ( client , err ) {
if (
client [ kRunning ] === 0 &&
err . code !== 'UND_ERR_INFO' &&
err . code !== 'UND_ERR_SOCKET'
) {
// Error is not caused by running request and not a recoverable
// socket error.
assert ( client [ kPendingIdx ] === client [ kRunningIdx ] )
const requests = client [ kQueue ] . splice ( client [ kRunningIdx ] )
for ( let i = 0 ; i < requests . length ; i ++ ) {
const request = requests [ i ]
errorRequest ( client , request , err )
}
assert ( client [ kSize ] === 0 )
}
}
function onSocketEnd ( ) {
const { [ kParser ] : parser } = this
if ( parser . statusCode && ! parser . shouldKeepAlive ) {
// We treat all incoming data so far as a valid response.
parser . onMessageComplete ( )
return
}
util . destroy ( this , new SocketError ( 'other side closed' , util . getSocketInfo ( this ) ) )
}
function onSocketClose ( ) {
const { [ kClient ] : client } = this
if ( ! this [ kError ] && this [ kParser ] . statusCode && ! this [ kParser ] . shouldKeepAlive ) {
// We treat all incoming data so far as a valid response.
this [ kParser ] . onMessageComplete ( )
}
this [ kParser ] . destroy ( )
this [ kParser ] = null
const err = this [ kError ] || new SocketError ( 'closed' , util . getSocketInfo ( this ) )
client [ kSocket ] = null
if ( client . destroyed ) {
assert ( client [ kPending ] === 0 )
// Fail entire queue.
const requests = client [ kQueue ] . splice ( client [ kRunningIdx ] )
for ( let i = 0 ; i < requests . length ; i ++ ) {
const request = requests [ i ]
errorRequest ( client , request , err )
}
} else if ( client [ kRunning ] > 0 && err . code !== 'UND_ERR_INFO' ) {
// Fail head of pipeline.
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
client [ kQueue ] [ client [ kRunningIdx ] ++ ] = null
errorRequest ( client , request , err )
}
client [ kPendingIdx ] = client [ kRunningIdx ]
assert ( client [ kRunning ] === 0 )
client . emit ( 'disconnect' , client [ kUrl ] , [ client ] , err )
resume ( client )
}
async function connect ( client ) {
assert ( ! client [ kConnecting ] )
assert ( ! client [ kSocket ] )
let { host , hostname , protocol , port } = client [ kUrl ]
// Resolve ipv6
if ( hostname [ 0 ] === '[' ) {
const idx = hostname . indexOf ( ']' )
assert ( idx !== - 1 )
const ip = hostname . substr ( 1 , idx - 1 )
assert ( net . isIP ( ip ) )
hostname = ip
}
client [ kConnecting ] = true
if ( channels . beforeConnect . hasSubscribers ) {
channels . beforeConnect . publish ( {
connectParams : {
host ,
hostname ,
protocol ,
port ,
servername : client [ kServerName ] ,
localAddress : client [ kLocalAddress ]
} ,
connector : client [ kConnector ]
} )
}
try {
const socket = await new Promise ( ( resolve , reject ) => {
client [ kConnector ] ( {
host ,
hostname ,
protocol ,
port ,
servername : client [ kServerName ] ,
localAddress : client [ kLocalAddress ]
} , ( err , socket ) => {
if ( err ) {
reject ( err )
} else {
resolve ( socket )
}
} )
} )
if ( client . destroyed ) {
util . destroy ( socket . on ( 'error' , ( ) => { } ) , new ClientDestroyedError ( ) )
return
}
if ( ! llhttpInstance ) {
llhttpInstance = await llhttpPromise
llhttpPromise = null
}
client [ kConnecting ] = false
assert ( socket )
socket [ kNoRef ] = false
socket [ kWriting ] = false
socket [ kReset ] = false
socket [ kBlocking ] = false
socket [ kError ] = null
socket [ kParser ] = new Parser ( client , socket , llhttpInstance )
socket [ kClient ] = client
socket [ kCounter ] = 0
socket [ kMaxRequests ] = client [ kMaxRequests ]
socket
. on ( 'error' , onSocketError )
. on ( 'readable' , onSocketReadable )
. on ( 'end' , onSocketEnd )
. on ( 'close' , onSocketClose )
client [ kSocket ] = socket
if ( channels . connected . hasSubscribers ) {
channels . connected . publish ( {
connectParams : {
host ,
hostname ,
protocol ,
port ,
servername : client [ kServerName ] ,
localAddress : client [ kLocalAddress ]
} ,
connector : client [ kConnector ] ,
socket
} )
}
client . emit ( 'connect' , client [ kUrl ] , [ client ] )
} catch ( err ) {
if ( client . destroyed ) {
return
}
client [ kConnecting ] = false
if ( channels . connectError . hasSubscribers ) {
channels . connectError . publish ( {
connectParams : {
host ,
hostname ,
protocol ,
port ,
servername : client [ kServerName ] ,
localAddress : client [ kLocalAddress ]
} ,
connector : client [ kConnector ] ,
error : err
} )
}
if ( err . code === 'ERR_TLS_CERT_ALTNAME_INVALID' ) {
assert ( client [ kRunning ] === 0 )
while ( client [ kPending ] > 0 && client [ kQueue ] [ client [ kPendingIdx ] ] . servername === client [ kServerName ] ) {
const request = client [ kQueue ] [ client [ kPendingIdx ] ++ ]
errorRequest ( client , request , err )
}
} else {
onError ( client , err )
}
client . emit ( 'connectionError' , client [ kUrl ] , [ client ] , err )
}
resume ( client )
}
function emitDrain ( client ) {
client [ kNeedDrain ] = 0
client . emit ( 'drain' , client [ kUrl ] , [ client ] )
}
function resume ( client , sync ) {
if ( client [ kResuming ] === 2 ) {
return
}
client [ kResuming ] = 2
_resume ( client , sync )
client [ kResuming ] = 0
if ( client [ kRunningIdx ] > 256 ) {
client [ kQueue ] . splice ( 0 , client [ kRunningIdx ] )
client [ kPendingIdx ] -= client [ kRunningIdx ]
client [ kRunningIdx ] = 0
}
}
function _resume ( client , sync ) {
while ( true ) {
if ( client . destroyed ) {
assert ( client [ kPending ] === 0 )
return
}
if ( client [ kClosedResolve ] && ! client [ kSize ] ) {
client [ kClosedResolve ] ( )
client [ kClosedResolve ] = null
return
}
const socket = client [ kSocket ]
if ( socket && ! socket . destroyed ) {
if ( client [ kSize ] === 0 ) {
if ( ! socket [ kNoRef ] && socket . unref ) {
socket . unref ( )
socket [ kNoRef ] = true
}
} else if ( socket [ kNoRef ] && socket . ref ) {
socket . ref ( )
socket [ kNoRef ] = false
}
if ( client [ kSize ] === 0 ) {
if ( socket [ kParser ] . timeoutType !== TIMEOUT _IDLE ) {
socket [ kParser ] . setTimeout ( client [ kKeepAliveTimeoutValue ] , TIMEOUT _IDLE )
}
} else if ( client [ kRunning ] > 0 && socket [ kParser ] . statusCode < 200 ) {
if ( socket [ kParser ] . timeoutType !== TIMEOUT _HEADERS ) {
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
const headersTimeout = request . headersTimeout != null
? request . headersTimeout
: client [ kHeadersTimeout ]
socket [ kParser ] . setTimeout ( headersTimeout , TIMEOUT _HEADERS )
}
}
}
if ( client [ kBusy ] ) {
client [ kNeedDrain ] = 2
} else if ( client [ kNeedDrain ] === 2 ) {
if ( sync ) {
client [ kNeedDrain ] = 1
process . nextTick ( emitDrain , client )
} else {
emitDrain ( client )
}
continue
}
if ( client [ kPending ] === 0 ) {
return
}
if ( client [ kRunning ] >= ( client [ kPipelining ] || 1 ) ) {
return
}
const request = client [ kQueue ] [ client [ kPendingIdx ] ]
if ( client [ kUrl ] . protocol === 'https:' && client [ kServerName ] !== request . servername ) {
if ( client [ kRunning ] > 0 ) {
return
}
client [ kServerName ] = request . servername
if ( socket && socket . servername !== request . servername ) {
util . destroy ( socket , new InformationalError ( 'servername changed' ) )
return
}
}
if ( client [ kConnecting ] ) {
return
}
if ( ! socket ) {
connect ( client )
return
}
if ( socket . destroyed || socket [ kWriting ] || socket [ kReset ] || socket [ kBlocking ] ) {
return
}
if ( client [ kRunning ] > 0 && ! request . idempotent ) {
// Non-idempotent request cannot be retried.
// Ensure that no other requests are inflight and
// could cause failure.
return
}
if ( client [ kRunning ] > 0 && ( request . upgrade || request . method === 'CONNECT' ) ) {
// Don't dispatch an upgrade until all preceding requests have completed.
// A misbehaving server might upgrade the connection before all pipelined
// request has completed.
return
}
if ( util . isStream ( request . body ) && util . bodyLength ( request . body ) === 0 ) {
request . body
. on ( 'data' , /* istanbul ignore next */ function ( ) {
/* istanbul ignore next */
assert ( false )
} )
. on ( 'error' , function ( err ) {
errorRequest ( client , request , err )
} )
. on ( 'end' , function ( ) {
util . destroy ( this )
} )
request . body = null
}
if ( client [ kRunning ] > 0 &&
( util . isStream ( request . body ) || util . isAsyncIterable ( request . body ) ) ) {
// Request with stream or iterator body can error while other requests
// are inflight and indirectly error those as well.
// Ensure this doesn't happen by waiting for inflight
// to complete before dispatching.
// Request with stream or iterator body cannot be retried.
// Ensure that no other requests are inflight and
// could cause failure.
return
}
if ( ! request . aborted && write ( client , request ) ) {
client [ kPendingIdx ] ++
} else {
client [ kQueue ] . splice ( client [ kPendingIdx ] , 1 )
}
}
}
function write ( client , request ) {
const { body , method , path , host , upgrade , headers , blocking , reset } = request
// https://tools.ietf.org/html/rfc7231#section-4.3.1
// https://tools.ietf.org/html/rfc7231#section-4.3.2
// https://tools.ietf.org/html/rfc7231#section-4.3.5
// Sending a payload body on a request that does not
// expect it can cause undefined behavior on some
// servers and corrupt connection state. Do not
// re-use the connection for further requests.
const expectsPayload = (
method === 'PUT' ||
method === 'POST' ||
method === 'PATCH'
)
if ( body && typeof body . read === 'function' ) {
// Try to read EOF in order to get length.
body . read ( 0 )
}
let contentLength = util . bodyLength ( body )
if ( contentLength === null ) {
contentLength = request . contentLength
}
if ( contentLength === 0 && ! expectsPayload ) {
// https://tools.ietf.org/html/rfc7230#section-3.3.2
// A user agent SHOULD NOT send a Content-Length header field when
// the request message does not contain a payload body and the method
// semantics do not anticipate such a body.
contentLength = null
}
if ( request . contentLength !== null && request . contentLength !== contentLength ) {
if ( client [ kStrictContentLength ] ) {
errorRequest ( client , request , new RequestContentLengthMismatchError ( ) )
return false
}
process . emitWarning ( new RequestContentLengthMismatchError ( ) )
}
const socket = client [ kSocket ]
try {
request . onConnect ( ( err ) => {
if ( request . aborted || request . completed ) {
return
}
errorRequest ( client , request , err || new RequestAbortedError ( ) )
util . destroy ( socket , new InformationalError ( 'aborted' ) )
} )
} catch ( err ) {
errorRequest ( client , request , err )
}
if ( request . aborted ) {
return false
}
if ( method === 'HEAD' ) {
// https://github.com/mcollina/undici/issues/258
// Close after a HEAD request to interop with misbehaving servers
// that may send a body in the response.
socket [ kReset ] = true
}
if ( upgrade || method === 'CONNECT' ) {
// On CONNECT or upgrade, block pipeline from dispatching further
// requests on this connection.
socket [ kReset ] = true
}
if ( reset != null ) {
socket [ kReset ] = reset
}
if ( client [ kMaxRequests ] && socket [ kCounter ] ++ >= client [ kMaxRequests ] ) {
socket [ kReset ] = true
}
if ( blocking ) {
socket [ kBlocking ] = true
}
let header = ` ${ method } ${ path } HTTP/1.1 \r \n `
if ( typeof host === 'string' ) {
header += ` host: ${ host } \r \n `
} else {
header += client [ kHostHeader ]
}
if ( upgrade ) {
header += ` connection: upgrade \r \n upgrade: ${ upgrade } \r \n `
} else if ( client [ kPipelining ] && ! socket [ kReset ] ) {
header += 'connection: keep-alive\r\n'
} else {
header += 'connection: close\r\n'
}
if ( headers ) {
header += headers
}
if ( channels . sendHeaders . hasSubscribers ) {
channels . sendHeaders . publish ( { request , headers : header , socket } )
}
/* istanbul ignore else: assertion */
if ( ! body ) {
if ( contentLength === 0 ) {
socket . write ( ` ${ header } content-length: 0 \r \n \r \n ` , 'latin1' )
} else {
assert ( contentLength === null , 'no body must not have content length' )
socket . write ( ` ${ header } \r \n ` , 'latin1' )
}
request . onRequestSent ( )
} else if ( util . isBuffer ( body ) ) {
assert ( contentLength === body . byteLength , 'buffer body must have content length' )
socket . cork ( )
socket . write ( ` ${ header } content-length: ${ contentLength } \r \n \r \n ` , 'latin1' )
socket . write ( body )
socket . uncork ( )
request . onBodySent ( body )
request . onRequestSent ( )
if ( ! expectsPayload ) {
socket [ kReset ] = true
}
} else if ( util . isBlobLike ( body ) ) {
if ( typeof body . stream === 'function' ) {
writeIterable ( { body : body . stream ( ) , client , request , socket , contentLength , header , expectsPayload } )
} else {
writeBlob ( { body , client , request , socket , contentLength , header , expectsPayload } )
}
} else if ( util . isStream ( body ) ) {
writeStream ( { body , client , request , socket , contentLength , header , expectsPayload } )
} else if ( util . isIterable ( body ) ) {
writeIterable ( { body , client , request , socket , contentLength , header , expectsPayload } )
} else {
assert ( false )
}
return true
}
function writeStream ( { body , client , request , socket , contentLength , header , expectsPayload } ) {
assert ( contentLength !== 0 || client [ kRunning ] === 0 , 'stream body cannot be pipelined' )
let finished = false
const writer = new AsyncWriter ( { socket , request , contentLength , client , expectsPayload , header } )
const onData = function ( chunk ) {
if ( finished ) {
return
}
try {
if ( ! writer . write ( chunk ) && this . pause ) {
this . pause ( )
}
} catch ( err ) {
util . destroy ( this , err )
}
}
const onDrain = function ( ) {
if ( finished ) {
return
}
if ( body . resume ) {
body . resume ( )
}
}
const onAbort = function ( ) {
onFinished ( new RequestAbortedError ( ) )
}
const onFinished = function ( err ) {
if ( finished ) {
return
}
finished = true
assert ( socket . destroyed || ( socket [ kWriting ] && client [ kRunning ] <= 1 ) )
socket
. off ( 'drain' , onDrain )
. off ( 'error' , onFinished )
body
. removeListener ( 'data' , onData )
. removeListener ( 'end' , onFinished )
. removeListener ( 'error' , onFinished )
. removeListener ( 'close' , onAbort )
if ( ! err ) {
try {
writer . end ( )
} catch ( er ) {
err = er
}
}
writer . destroy ( err )
if ( err && ( err . code !== 'UND_ERR_INFO' || err . message !== 'reset' ) ) {
util . destroy ( body , err )
} else {
util . destroy ( body )
}
}
body
. on ( 'data' , onData )
. on ( 'end' , onFinished )
. on ( 'error' , onFinished )
. on ( 'close' , onAbort )
if ( body . resume ) {
body . resume ( )
}
socket
. on ( 'drain' , onDrain )
. on ( 'error' , onFinished )
}
async function writeBlob ( { body , client , request , socket , contentLength , header , expectsPayload } ) {
assert ( contentLength === body . size , 'blob body must have content length' )
try {
if ( contentLength != null && contentLength !== body . size ) {
throw new RequestContentLengthMismatchError ( )
}
const buffer = Buffer . from ( await body . arrayBuffer ( ) )
socket . cork ( )
socket . write ( ` ${ header } content-length: ${ contentLength } \r \n \r \n ` , 'latin1' )
socket . write ( buffer )
socket . uncork ( )
request . onBodySent ( buffer )
request . onRequestSent ( )
if ( ! expectsPayload ) {
socket [ kReset ] = true
}
resume ( client )
} catch ( err ) {
util . destroy ( socket , err )
}
}
async function writeIterable ( { body , client , request , socket , contentLength , header , expectsPayload } ) {
assert ( contentLength !== 0 || client [ kRunning ] === 0 , 'iterator body cannot be pipelined' )
let callback = null
function onDrain ( ) {
if ( callback ) {
const cb = callback
callback = null
cb ( )
}
}
const waitForDrain = ( ) => new Promise ( ( resolve , reject ) => {
assert ( callback === null )
if ( socket [ kError ] ) {
reject ( socket [ kError ] )
} else {
callback = resolve
}
} )
socket
. on ( 'close' , onDrain )
. on ( 'drain' , onDrain )
const writer = new AsyncWriter ( { socket , request , contentLength , client , expectsPayload , header } )
try {
// It's up to the user to somehow abort the async iterable.
for await ( const chunk of body ) {
if ( socket [ kError ] ) {
throw socket [ kError ]
}
if ( ! writer . write ( chunk ) ) {
await waitForDrain ( )
}
}
writer . end ( )
} catch ( err ) {
writer . destroy ( err )
} finally {
socket
. off ( 'close' , onDrain )
. off ( 'drain' , onDrain )
}
}
class AsyncWriter {
constructor ( { socket , request , contentLength , client , expectsPayload , header } ) {
this . socket = socket
this . request = request
this . contentLength = contentLength
this . client = client
this . bytesWritten = 0
this . expectsPayload = expectsPayload
this . header = header
socket [ kWriting ] = true
}
write ( chunk ) {
const { socket , request , contentLength , client , bytesWritten , expectsPayload , header } = this
if ( socket [ kError ] ) {
throw socket [ kError ]
}
if ( socket . destroyed ) {
return false
}
const len = Buffer . byteLength ( chunk )
if ( ! len ) {
return true
}
// We should defer writing chunks.
if ( contentLength !== null && bytesWritten + len > contentLength ) {
if ( client [ kStrictContentLength ] ) {
throw new RequestContentLengthMismatchError ( )
}
process . emitWarning ( new RequestContentLengthMismatchError ( ) )
}
socket . cork ( )
if ( bytesWritten === 0 ) {
if ( ! expectsPayload ) {
socket [ kReset ] = true
}
if ( contentLength === null ) {
socket . write ( ` ${ header } transfer-encoding: chunked \r \n ` , 'latin1' )
} else {
socket . write ( ` ${ header } content-length: ${ contentLength } \r \n \r \n ` , 'latin1' )
}
}
if ( contentLength === null ) {
socket . write ( ` \r \n ${ len . toString ( 16 ) } \r \n ` , 'latin1' )
}
this . bytesWritten += len
const ret = socket . write ( chunk )
socket . uncork ( )
request . onBodySent ( chunk )
if ( ! ret ) {
if ( socket [ kParser ] . timeout && socket [ kParser ] . timeoutType === TIMEOUT _HEADERS ) {
// istanbul ignore else: only for jest
if ( socket [ kParser ] . timeout . refresh ) {
socket [ kParser ] . timeout . refresh ( )
}
}
}
return ret
}
end ( ) {
const { socket , contentLength , client , bytesWritten , expectsPayload , header , request } = this
request . onRequestSent ( )
socket [ kWriting ] = false
if ( socket [ kError ] ) {
throw socket [ kError ]
}
if ( socket . destroyed ) {
return
}
if ( bytesWritten === 0 ) {
if ( expectsPayload ) {
// https://tools.ietf.org/html/rfc7230#section-3.3.2
// A user agent SHOULD send a Content-Length in a request message when
// no Transfer-Encoding is sent and the request method defines a meaning
// for an enclosed payload body.
socket . write ( ` ${ header } content-length: 0 \r \n \r \n ` , 'latin1' )
} else {
socket . write ( ` ${ header } \r \n ` , 'latin1' )
}
} else if ( contentLength === null ) {
socket . write ( '\r\n0\r\n\r\n' , 'latin1' )
}
if ( contentLength !== null && bytesWritten !== contentLength ) {
if ( client [ kStrictContentLength ] ) {
throw new RequestContentLengthMismatchError ( )
} else {
process . emitWarning ( new RequestContentLengthMismatchError ( ) )
}
}
if ( socket [ kParser ] . timeout && socket [ kParser ] . timeoutType === TIMEOUT _HEADERS ) {
// istanbul ignore else: only for jest
if ( socket [ kParser ] . timeout . refresh ) {
socket [ kParser ] . timeout . refresh ( )
}
}
resume ( client )
}
destroy ( err ) {
const { socket , client } = this
socket [ kWriting ] = false
if ( err ) {
assert ( client [ kRunning ] <= 1 , 'pipeline should only contain this request' )
util . destroy ( socket , err )
}
}
}
function errorRequest ( client , request , err ) {
try {
request . onError ( err )
assert ( request . aborted )
} catch ( err ) {
client . emit ( 'error' , err )
}
}
module . exports = Client
/***/ } ) ,
/***/ 6436 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
/* istanbul ignore file: only for Node 12 */
const { kConnected , kSize } = _ _nccwpck _require _ _ ( 2785 )
class CompatWeakRef {
constructor ( value ) {
this . value = value
}
deref ( ) {
return this . value [ kConnected ] === 0 && this . value [ kSize ] === 0
? undefined
: this . value
}
}
class CompatFinalizer {
constructor ( finalizer ) {
this . finalizer = finalizer
}
register ( dispatcher , key ) {
dispatcher . on ( 'disconnect' , ( ) => {
if ( dispatcher [ kConnected ] === 0 && dispatcher [ kSize ] === 0 ) {
this . finalizer ( key )
}
} )
}
}
module . exports = function ( ) {
return {
WeakRef : global . WeakRef || CompatWeakRef ,
FinalizationRegistry : global . FinalizationRegistry || CompatFinalizer
}
}
/***/ } ) ,
/***/ 663 :
/***/ ( ( module ) => {
"use strict" ;
// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size
const maxAttributeValueSize = 1024
// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size
const maxNameValuePairSize = 4096
module . exports = {
maxAttributeValueSize ,
maxNameValuePairSize
}
/***/ } ) ,
/***/ 1724 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { parseSetCookie } = _ _nccwpck _require _ _ ( 4408 )
const { stringify , getHeadersList } = _ _nccwpck _require _ _ ( 3121 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { Headers } = _ _nccwpck _require _ _ ( 554 )
/ * *
* @ typedef { Object } Cookie
* @ property { string } name
* @ property { string } value
* @ property { Date | number | undefined } expires
* @ property { number | undefined } maxAge
* @ property { string | undefined } domain
* @ property { string | undefined } path
* @ property { boolean | undefined } secure
* @ property { boolean | undefined } httpOnly
* @ property { 'Strict' | 'Lax' | 'None' } sameSite
* @ property { string [ ] } unparsed
* /
/ * *
* @ param { Headers } headers
* @ returns { Record < string , string > }
* /
function getCookies ( headers ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'getCookies' } )
webidl . brandCheck ( headers , Headers , { strict : false } )
const cookie = headers . get ( 'cookie' )
const out = { }
if ( ! cookie ) {
return out
}
for ( const piece of cookie . split ( ';' ) ) {
const [ name , ... value ] = piece . split ( '=' )
out [ name . trim ( ) ] = value . join ( '=' )
}
return out
}
/ * *
* @ param { Headers } headers
* @ param { string } name
* @ param { { path ? : string , domain ? : string } | undefined } attributes
* @ returns { void }
* /
function deleteCookie ( headers , name , attributes ) {
webidl . argumentLengthCheck ( arguments , 2 , { header : 'deleteCookie' } )
webidl . brandCheck ( headers , Headers , { strict : false } )
name = webidl . converters . DOMString ( name )
attributes = webidl . converters . DeleteCookieAttributes ( attributes )
// Matches behavior of
// https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278
setCookie ( headers , {
name ,
value : '' ,
expires : new Date ( 0 ) ,
... attributes
} )
}
/ * *
* @ param { Headers } headers
* @ returns { Cookie [ ] }
* /
function getSetCookies ( headers ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'getSetCookies' } )
webidl . brandCheck ( headers , Headers , { strict : false } )
const cookies = getHeadersList ( headers ) . cookies
if ( ! cookies ) {
return [ ]
}
// In older versions of undici, cookies is a list of name:value.
return cookies . map ( ( pair ) => parseSetCookie ( Array . isArray ( pair ) ? pair [ 1 ] : pair ) )
}
/ * *
* @ param { Headers } headers
* @ param { Cookie } cookie
* @ returns { void }
* /
function setCookie ( headers , cookie ) {
webidl . argumentLengthCheck ( arguments , 2 , { header : 'setCookie' } )
webidl . brandCheck ( headers , Headers , { strict : false } )
cookie = webidl . converters . Cookie ( cookie )
const str = stringify ( cookie )
if ( str ) {
headers . append ( 'Set-Cookie' , stringify ( cookie ) )
}
}
webidl . converters . DeleteCookieAttributes = webidl . dictionaryConverter ( [
{
converter : webidl . nullableConverter ( webidl . converters . DOMString ) ,
key : 'path' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters . DOMString ) ,
key : 'domain' ,
defaultValue : null
}
] )
webidl . converters . Cookie = webidl . dictionaryConverter ( [
{
converter : webidl . converters . DOMString ,
key : 'name'
} ,
{
converter : webidl . converters . DOMString ,
key : 'value'
} ,
{
converter : webidl . nullableConverter ( ( value ) => {
if ( typeof value === 'number' ) {
return webidl . converters [ 'unsigned long long' ] ( value )
}
return new Date ( value )
} ) ,
key : 'expires' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters [ 'long long' ] ) ,
key : 'maxAge' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters . DOMString ) ,
key : 'domain' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters . DOMString ) ,
key : 'path' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters . boolean ) ,
key : 'secure' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters . boolean ) ,
key : 'httpOnly' ,
defaultValue : null
} ,
{
converter : webidl . converters . USVString ,
key : 'sameSite' ,
allowedValues : [ 'Strict' , 'Lax' , 'None' ]
} ,
{
converter : webidl . sequenceConverter ( webidl . converters . DOMString ) ,
key : 'unparsed' ,
defaultValue : [ ]
}
] )
module . exports = {
getCookies ,
deleteCookie ,
getSetCookies ,
setCookie
}
/***/ } ) ,
/***/ 4408 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { maxNameValuePairSize , maxAttributeValueSize } = _ _nccwpck _require _ _ ( 663 )
const { isCTLExcludingHtab } = _ _nccwpck _require _ _ ( 3121 )
const { collectASequenceOfCodePointsFast } = _ _nccwpck _require _ _ ( 685 )
const assert = _ _nccwpck _require _ _ ( 9491 )
/ * *
* @ description Parses the field - value attributes of a set - cookie header string .
* @ see https : //datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
* @ param { string } header
* @ returns if the header is invalid , null will be returned
* /
function parseSetCookie ( header ) {
// 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F
// character (CTL characters excluding HTAB): Abort these steps and
// ignore the set-cookie-string entirely.
if ( isCTLExcludingHtab ( header ) ) {
return null
}
let nameValuePair = ''
let unparsedAttributes = ''
let name = ''
let value = ''
// 2. If the set-cookie-string contains a %x3B (";") character:
if ( header . includes ( ';' ) ) {
// 1. The name-value-pair string consists of the characters up to,
// but not including, the first %x3B (";"), and the unparsed-
// attributes consist of the remainder of the set-cookie-string
// (including the %x3B (";") in question).
const position = { position : 0 }
nameValuePair = collectASequenceOfCodePointsFast ( ';' , header , position )
unparsedAttributes = header . slice ( position . position )
} else {
// Otherwise:
// 1. The name-value-pair string consists of all the characters
// contained in the set-cookie-string, and the unparsed-
// attributes is the empty string.
nameValuePair = header
}
// 3. If the name-value-pair string lacks a %x3D ("=") character, then
// the name string is empty, and the value string is the value of
// name-value-pair.
if ( ! nameValuePair . includes ( '=' ) ) {
value = nameValuePair
} else {
// Otherwise, the name string consists of the characters up to, but
// not including, the first %x3D ("=") character, and the (possibly
// empty) value string consists of the characters after the first
// %x3D ("=") character.
const position = { position : 0 }
name = collectASequenceOfCodePointsFast (
'=' ,
nameValuePair ,
position
)
value = nameValuePair . slice ( position . position + 1 )
}
// 4. Remove any leading or trailing WSP characters from the name
// string and the value string.
name = name . trim ( )
value = value . trim ( )
// 5. If the sum of the lengths of the name string and the value string
// is more than 4096 octets, abort these steps and ignore the set-
// cookie-string entirely.
if ( name . length + value . length > maxNameValuePairSize ) {
return null
}
// 6. The cookie-name is the name string, and the cookie-value is the
// value string.
return {
name , value , ... parseUnparsedAttributes ( unparsedAttributes )
}
}
/ * *
* Parses the remaining attributes of a set - cookie header
* @ see https : //datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
* @ param { string } unparsedAttributes
* @ param { [ Object . < string , unknown > ] = { } } cookieAttributeList
* /
function parseUnparsedAttributes ( unparsedAttributes , cookieAttributeList = { } ) {
// 1. If the unparsed-attributes string is empty, skip the rest of
// these steps.
if ( unparsedAttributes . length === 0 ) {
return cookieAttributeList
}
// 2. Discard the first character of the unparsed-attributes (which
// will be a %x3B (";") character).
assert ( unparsedAttributes [ 0 ] === ';' )
unparsedAttributes = unparsedAttributes . slice ( 1 )
let cookieAv = ''
// 3. If the remaining unparsed-attributes contains a %x3B (";")
// character:
if ( unparsedAttributes . includes ( ';' ) ) {
// 1. Consume the characters of the unparsed-attributes up to, but
// not including, the first %x3B (";") character.
cookieAv = collectASequenceOfCodePointsFast (
';' ,
unparsedAttributes ,
{ position : 0 }
)
unparsedAttributes = unparsedAttributes . slice ( cookieAv . length )
} else {
// Otherwise:
// 1. Consume the remainder of the unparsed-attributes.
cookieAv = unparsedAttributes
unparsedAttributes = ''
}
// Let the cookie-av string be the characters consumed in this step.
let attributeName = ''
let attributeValue = ''
// 4. If the cookie-av string contains a %x3D ("=") character:
if ( cookieAv . includes ( '=' ) ) {
// 1. The (possibly empty) attribute-name string consists of the
// characters up to, but not including, the first %x3D ("=")
// character, and the (possibly empty) attribute-value string
// consists of the characters after the first %x3D ("=")
// character.
const position = { position : 0 }
attributeName = collectASequenceOfCodePointsFast (
'=' ,
cookieAv ,
position
)
attributeValue = cookieAv . slice ( position . position + 1 )
} else {
// Otherwise:
// 1. The attribute-name string consists of the entire cookie-av
// string, and the attribute-value string is empty.
attributeName = cookieAv
}
// 5. Remove any leading or trailing WSP characters from the attribute-
// name string and the attribute-value string.
attributeName = attributeName . trim ( )
attributeValue = attributeValue . trim ( )
// 6. If the attribute-value is longer than 1024 octets, ignore the
// cookie-av string and return to Step 1 of this algorithm.
if ( attributeValue . length > maxAttributeValueSize ) {
return parseUnparsedAttributes ( unparsedAttributes , cookieAttributeList )
}
// 7. Process the attribute-name and attribute-value according to the
// requirements in the following subsections. (Notice that
// attributes with unrecognized attribute-names are ignored.)
const attributeNameLowercase = attributeName . toLowerCase ( )
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1
// If the attribute-name case-insensitively matches the string
// "Expires", the user agent MUST process the cookie-av as follows.
if ( attributeNameLowercase === 'expires' ) {
// 1. Let the expiry-time be the result of parsing the attribute-value
// as cookie-date (see Section 5.1.1).
const expiryTime = new Date ( attributeValue )
// 2. If the attribute-value failed to parse as a cookie date, ignore
// the cookie-av.
cookieAttributeList . expires = expiryTime
} else if ( attributeNameLowercase === 'max-age' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2
// If the attribute-name case-insensitively matches the string "Max-
// Age", the user agent MUST process the cookie-av as follows.
// 1. If the first character of the attribute-value is not a DIGIT or a
// "-" character, ignore the cookie-av.
const charCode = attributeValue . charCodeAt ( 0 )
if ( ( charCode < 48 || charCode > 57 ) && attributeValue [ 0 ] !== '-' ) {
return parseUnparsedAttributes ( unparsedAttributes , cookieAttributeList )
}
// 2. If the remainder of attribute-value contains a non-DIGIT
// character, ignore the cookie-av.
if ( ! /^\d+$/ . test ( attributeValue ) ) {
return parseUnparsedAttributes ( unparsedAttributes , cookieAttributeList )
}
// 3. Let delta-seconds be the attribute-value converted to an integer.
const deltaSeconds = Number ( attributeValue )
// 4. Let cookie-age-limit be the maximum age of the cookie (which
// SHOULD be 400 days or less, see Section 4.1.2.2).
// 5. Set delta-seconds to the smaller of its present value and cookie-
// age-limit.
// deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)
// 6. If delta-seconds is less than or equal to zero (0), let expiry-
// time be the earliest representable date and time. Otherwise, let
// the expiry-time be the current date and time plus delta-seconds
// seconds.
// const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds
// 7. Append an attribute to the cookie-attribute-list with an
// attribute-name of Max-Age and an attribute-value of expiry-time.
cookieAttributeList . maxAge = deltaSeconds
} else if ( attributeNameLowercase === 'domain' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3
// If the attribute-name case-insensitively matches the string "Domain",
// the user agent MUST process the cookie-av as follows.
// 1. Let cookie-domain be the attribute-value.
let cookieDomain = attributeValue
// 2. If cookie-domain starts with %x2E ("."), let cookie-domain be
// cookie-domain without its leading %x2E (".").
if ( cookieDomain [ 0 ] === '.' ) {
cookieDomain = cookieDomain . slice ( 1 )
}
// 3. Convert the cookie-domain to lower case.
cookieDomain = cookieDomain . toLowerCase ( )
// 4. Append an attribute to the cookie-attribute-list with an
// attribute-name of Domain and an attribute-value of cookie-domain.
cookieAttributeList . domain = cookieDomain
} else if ( attributeNameLowercase === 'path' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4
// If the attribute-name case-insensitively matches the string "Path",
// the user agent MUST process the cookie-av as follows.
// 1. If the attribute-value is empty or if the first character of the
// attribute-value is not %x2F ("/"):
let cookiePath = ''
if ( attributeValue . length === 0 || attributeValue [ 0 ] !== '/' ) {
// 1. Let cookie-path be the default-path.
cookiePath = '/'
} else {
// Otherwise:
// 1. Let cookie-path be the attribute-value.
cookiePath = attributeValue
}
// 2. Append an attribute to the cookie-attribute-list with an
// attribute-name of Path and an attribute-value of cookie-path.
cookieAttributeList . path = cookiePath
} else if ( attributeNameLowercase === 'secure' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5
// If the attribute-name case-insensitively matches the string "Secure",
// the user agent MUST append an attribute to the cookie-attribute-list
// with an attribute-name of Secure and an empty attribute-value.
cookieAttributeList . secure = true
} else if ( attributeNameLowercase === 'httponly' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6
// If the attribute-name case-insensitively matches the string
// "HttpOnly", the user agent MUST append an attribute to the cookie-
// attribute-list with an attribute-name of HttpOnly and an empty
// attribute-value.
cookieAttributeList . httpOnly = true
} else if ( attributeNameLowercase === 'samesite' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7
// If the attribute-name case-insensitively matches the string
// "SameSite", the user agent MUST process the cookie-av as follows:
// 1. Let enforcement be "Default".
let enforcement = 'Default'
const attributeValueLowercase = attributeValue . toLowerCase ( )
// 2. If cookie-av's attribute-value is a case-insensitive match for
// "None", set enforcement to "None".
if ( attributeValueLowercase . includes ( 'none' ) ) {
enforcement = 'None'
}
// 3. If cookie-av's attribute-value is a case-insensitive match for
// "Strict", set enforcement to "Strict".
if ( attributeValueLowercase . includes ( 'strict' ) ) {
enforcement = 'Strict'
}
// 4. If cookie-av's attribute-value is a case-insensitive match for
// "Lax", set enforcement to "Lax".
if ( attributeValueLowercase . includes ( 'lax' ) ) {
enforcement = 'Lax'
}
// 5. Append an attribute to the cookie-attribute-list with an
// attribute-name of "SameSite" and an attribute-value of
// enforcement.
cookieAttributeList . sameSite = enforcement
} else {
cookieAttributeList . unparsed ? ? = [ ]
cookieAttributeList . unparsed . push ( ` ${ attributeName } = ${ attributeValue } ` )
}
// 8. Return to Step 1 of this algorithm.
return parseUnparsedAttributes ( unparsedAttributes , cookieAttributeList )
}
module . exports = {
parseSetCookie ,
parseUnparsedAttributes
}
/***/ } ) ,
/***/ 3121 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const assert = _ _nccwpck _require _ _ ( 9491 )
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
function isCTLExcludingHtab ( value ) {
if ( value . length === 0 ) {
return false
}
for ( const char of value ) {
const code = char . charCodeAt ( 0 )
if (
( code >= 0x00 || code <= 0x08 ) ||
( code >= 0x0A || code <= 0x1F ) ||
code === 0x7F
) {
return false
}
}
}
/ * *
CHAR = < any US - ASCII character ( octets 0 - 127 ) >
token = 1 * < any CHAR except CTLs or separators >
separators = "(" | ")" | "<" | ">" | "@"
| "," | ";" | ":" | "\" | <" >
| "/" | "[" | "]" | "?" | "="
| "{" | "}" | SP | HT
* @ param { string } name
* /
function validateCookieName ( name ) {
for ( const char of name ) {
const code = char . charCodeAt ( 0 )
if (
( code <= 0x20 || code > 0x7F ) ||
char === '(' ||
char === ')' ||
char === '>' ||
char === '<' ||
char === '@' ||
char === ',' ||
char === ';' ||
char === ':' ||
char === '\\' ||
char === '"' ||
char === '/' ||
char === '[' ||
char === ']' ||
char === '?' ||
char === '=' ||
char === '{' ||
char === '}'
) {
throw new Error ( 'Invalid cookie name' )
}
}
}
/ * *
cookie - value = * cookie - octet / ( DQUOTE * cookie - octet DQUOTE )
cookie - octet = % x21 / % x23 - 2 B / % x2D - 3 A / % x3C - 5 B / % x5D - 7 E
; US - ASCII characters excluding CTLs ,
; whitespace DQUOTE , comma , semicolon ,
; and backslash
* @ param { string } value
* /
function validateCookieValue ( value ) {
for ( const char of value ) {
const code = char . charCodeAt ( 0 )
if (
code < 0x21 || // exclude CTLs (0-31)
code === 0x22 ||
code === 0x2C ||
code === 0x3B ||
code === 0x5C ||
code > 0x7E // non-ascii
) {
throw new Error ( 'Invalid header value' )
}
}
}
/ * *
* path - value = < any CHAR except CTLs or ";" >
* @ param { string } path
* /
function validateCookiePath ( path ) {
for ( const char of path ) {
const code = char . charCodeAt ( 0 )
if ( code < 0x21 || char === ';' ) {
throw new Error ( 'Invalid cookie path' )
}
}
}
/ * *
* I have no idea why these values aren ' t allowed to be honest ,
* but Deno tests these . - Khafra
* @ param { string } domain
* /
function validateCookieDomain ( domain ) {
if (
domain . startsWith ( '-' ) ||
domain . endsWith ( '.' ) ||
domain . endsWith ( '-' )
) {
throw new Error ( 'Invalid cookie domain' )
}
}
/ * *
* @ see https : //www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
* @ param { number | Date } date
IMF - fixdate = day - name "," SP date1 SP time - of - day SP GMT
; fixed length / zone / capitalization subset of the format
; see Section 3.3 of [ RFC5322 ]
day - name = % x4D . 6 F . 6 E ; "Mon" , case - sensitive
/ % x 5 4 . 7 5 . 6 5 ; " T u e " , c a s e - s e n s i t i v e
/ % x 5 7 . 6 5 . 6 4 ; " W e d " , c a s e - s e n s i t i v e
/ % x 5 4 . 6 8 . 7 5 ; " T h u " , c a s e - s e n s i t i v e
/ % x 4 6 . 7 2 . 6 9 ; " F r i " , c a s e - s e n s i t i v e
/ % x 5 3 . 6 1 . 7 4 ; " S a t " , c a s e - s e n s i t i v e
/ % x 5 3 . 7 5 . 6 E ; " S u n " , c a s e - s e n s i t i v e
date1 = day SP month SP year
; e . g . , 02 Jun 1982
day = 2 DIGIT
month = % x4A . 61.6 E ; "Jan" , case - sensitive
/ % x 4 6 . 6 5 . 6 2 ; " F e b " , c a s e - s e n s i t i v e
/ % x 4 D . 6 1 . 7 2 ; " M a r " , c a s e - s e n s i t i v e
/ % x 4 1 . 7 0 . 7 2 ; " A p r " , c a s e - s e n s i t i v e
/ % x 4 D . 6 1 . 7 9 ; " M a y " , c a s e - s e n s i t i v e
/ % x 4 A . 7 5 . 6 E ; " J u n " , c a s e - s e n s i t i v e
/ % x 4 A . 7 5 . 6 C ; " J u l " , c a s e - s e n s i t i v e
/ % x 4 1 . 7 5 . 6 7 ; " A u g " , c a s e - s e n s i t i v e
/ % x 5 3 . 6 5 . 7 0 ; " S e p " , c a s e - s e n s i t i v e
/ % x 4 F . 6 3 . 7 4 ; " O c t " , c a s e - s e n s i t i v e
/ % x 4 E . 6 F . 7 6 ; " N o v " , c a s e - s e n s i t i v e
/ % x 4 4 . 6 5 . 6 3 ; " D e c " , c a s e - s e n s i t i v e
year = 4 DIGIT
GMT = % x47 . 4 D . 54 ; "GMT" , case - sensitive
time - of - day = hour ":" minute ":" second
; 00 : 00 : 00 - 23 : 59 : 60 ( leap second )
hour = 2 DIGIT
minute = 2 DIGIT
second = 2 DIGIT
* /
function toIMFDate ( date ) {
if ( typeof date === 'number' ) {
date = new Date ( date )
}
const days = [
'Sun' , 'Mon' , 'Tue' , 'Wed' ,
'Thu' , 'Fri' , 'Sat'
]
const months = [
'Jan' , 'Feb' , 'Mar' , 'Apr' , 'May' , 'Jun' ,
'Jul' , 'Aug' , 'Sep' , 'Oct' , 'Nov' , 'Dec'
]
const dayName = days [ date . getUTCDay ( ) ]
const day = date . getUTCDate ( ) . toString ( ) . padStart ( 2 , '0' )
const month = months [ date . getUTCMonth ( ) ]
const year = date . getUTCFullYear ( )
const hour = date . getUTCHours ( ) . toString ( ) . padStart ( 2 , '0' )
const minute = date . getUTCMinutes ( ) . toString ( ) . padStart ( 2 , '0' )
const second = date . getUTCSeconds ( ) . toString ( ) . padStart ( 2 , '0' )
return ` ${ dayName } , ${ day } ${ month } ${ year } ${ hour } : ${ minute } : ${ second } GMT `
}
/ * *
max - age - av = "Max-Age=" non - zero - digit * DIGIT
; In practice , both expires - av and max - age - av
; are limited to dates representable by the
; user agent .
* @ param { number } maxAge
* /
function validateCookieMaxAge ( maxAge ) {
if ( maxAge < 0 ) {
throw new Error ( 'Invalid cookie max-age' )
}
}
/ * *
* @ see https : //www.rfc-editor.org/rfc/rfc6265#section-4.1.1
* @ param { import ( './index' ) . Cookie } cookie
* /
function stringify ( cookie ) {
if ( cookie . name . length === 0 ) {
return null
}
validateCookieName ( cookie . name )
validateCookieValue ( cookie . value )
const out = [ ` ${ cookie . name } = ${ cookie . value } ` ]
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2
if ( cookie . name . startsWith ( '__Secure-' ) ) {
cookie . secure = true
}
if ( cookie . name . startsWith ( '__Host-' ) ) {
cookie . secure = true
cookie . domain = null
cookie . path = '/'
}
if ( cookie . secure ) {
out . push ( 'Secure' )
}
if ( cookie . httpOnly ) {
out . push ( 'HttpOnly' )
}
if ( typeof cookie . maxAge === 'number' ) {
validateCookieMaxAge ( cookie . maxAge )
out . push ( ` Max-Age= ${ cookie . maxAge } ` )
}
if ( cookie . domain ) {
validateCookieDomain ( cookie . domain )
out . push ( ` Domain= ${ cookie . domain } ` )
}
if ( cookie . path ) {
validateCookiePath ( cookie . path )
out . push ( ` Path= ${ cookie . path } ` )
}
if ( cookie . expires && cookie . expires . toString ( ) !== 'Invalid Date' ) {
out . push ( ` Expires= ${ toIMFDate ( cookie . expires ) } ` )
}
if ( cookie . sameSite ) {
out . push ( ` SameSite= ${ cookie . sameSite } ` )
}
for ( const part of cookie . unparsed ) {
if ( ! part . includes ( '=' ) ) {
throw new Error ( 'Invalid unparsed' )
}
const [ key , ... value ] = part . split ( '=' )
out . push ( ` ${ key . trim ( ) } = ${ value . join ( '=' ) } ` )
}
return out . join ( '; ' )
}
let kHeadersListNode
function getHeadersList ( headers ) {
if ( headers [ kHeadersList ] ) {
return headers [ kHeadersList ]
}
if ( ! kHeadersListNode ) {
kHeadersListNode = Object . getOwnPropertySymbols ( headers ) . find (
( symbol ) => symbol . description === 'headers list'
)
assert ( kHeadersListNode , 'Headers cannot be parsed' )
}
const headersList = headers [ kHeadersListNode ]
assert ( headersList )
return headersList
}
module . exports = {
isCTLExcludingHtab ,
stringify ,
getHeadersList
}
/***/ } ) ,
/***/ 2067 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const net = _ _nccwpck _require _ _ ( 1808 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { InvalidArgumentError , ConnectTimeoutError } = _ _nccwpck _require _ _ ( 8045 )
let tls // include tls conditionally since it is not always available
// TODO: session re-use does not wait for the first
// connection to resolve the session and might therefore
// resolve the same servername multiple times even when
// re-use is enabled.
let SessionCache
if ( global . FinalizationRegistry ) {
SessionCache = class WeakSessionCache {
constructor ( maxCachedSessions ) {
this . _maxCachedSessions = maxCachedSessions
this . _sessionCache = new Map ( )
this . _sessionRegistry = new global . FinalizationRegistry ( ( key ) => {
if ( this . _sessionCache . size < this . _maxCachedSessions ) {
return
}
const ref = this . _sessionCache . get ( key )
if ( ref !== undefined && ref . deref ( ) === undefined ) {
this . _sessionCache . delete ( key )
}
} )
}
get ( sessionKey ) {
const ref = this . _sessionCache . get ( sessionKey )
return ref ? ref . deref ( ) : null
}
set ( sessionKey , session ) {
if ( this . _maxCachedSessions === 0 ) {
return
}
this . _sessionCache . set ( sessionKey , new WeakRef ( session ) )
this . _sessionRegistry . register ( session , sessionKey )
}
}
} else {
SessionCache = class SimpleSessionCache {
constructor ( maxCachedSessions ) {
this . _maxCachedSessions = maxCachedSessions
this . _sessionCache = new Map ( )
}
get ( sessionKey ) {
return this . _sessionCache . get ( sessionKey )
}
set ( sessionKey , session ) {
if ( this . _maxCachedSessions === 0 ) {
return
}
if ( this . _sessionCache . size >= this . _maxCachedSessions ) {
// remove the oldest session
const { value : oldestKey } = this . _sessionCache . keys ( ) . next ( )
this . _sessionCache . delete ( oldestKey )
}
this . _sessionCache . set ( sessionKey , session )
}
}
}
function buildConnector ( { maxCachedSessions , socketPath , timeout , ... opts } ) {
if ( maxCachedSessions != null && ( ! Number . isInteger ( maxCachedSessions ) || maxCachedSessions < 0 ) ) {
throw new InvalidArgumentError ( 'maxCachedSessions must be a positive integer or zero' )
}
const options = { path : socketPath , ... opts }
const sessionCache = new SessionCache ( maxCachedSessions == null ? 100 : maxCachedSessions )
timeout = timeout == null ? 10e3 : timeout
return function connect ( { hostname , host , protocol , port , servername , localAddress , httpSocket } , callback ) {
let socket
if ( protocol === 'https:' ) {
if ( ! tls ) {
tls = _ _nccwpck _require _ _ ( 4404 )
}
servername = servername || options . servername || util . getServerName ( host ) || null
const sessionKey = servername || hostname
const session = sessionCache . get ( sessionKey ) || null
assert ( sessionKey )
socket = tls . connect ( {
highWaterMark : 16384 , // TLS in node can't have bigger HWM anyway...
... options ,
servername ,
session ,
localAddress ,
socket : httpSocket , // upgrade socket connection
port : port || 443 ,
host : hostname
} )
socket
. on ( 'session' , function ( session ) {
// TODO (fix): Can a session become invalid once established? Don't think so?
sessionCache . set ( sessionKey , session )
} )
} else {
assert ( ! httpSocket , 'httpSocket can only be sent on TLS update' )
socket = net . connect ( {
highWaterMark : 64 * 1024 , // Same as nodejs fs streams.
... options ,
localAddress ,
port : port || 80 ,
host : hostname
} )
}
// Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
if ( options . keepAlive == null || options . keepAlive ) {
const keepAliveInitialDelay = options . keepAliveInitialDelay === undefined ? 60e3 : options . keepAliveInitialDelay
socket . setKeepAlive ( true , keepAliveInitialDelay )
}
const cancelTimeout = setupTimeout ( ( ) => onConnectTimeout ( socket ) , timeout )
socket
. setNoDelay ( true )
. once ( protocol === 'https:' ? 'secureConnect' : 'connect' , function ( ) {
cancelTimeout ( )
if ( callback ) {
const cb = callback
callback = null
cb ( null , this )
}
} )
. on ( 'error' , function ( err ) {
cancelTimeout ( )
if ( callback ) {
const cb = callback
callback = null
cb ( err )
}
} )
return socket
}
}
function setupTimeout ( onConnectTimeout , timeout ) {
if ( ! timeout ) {
return ( ) => { }
}
let s1 = null
let s2 = null
const timeoutId = setTimeout ( ( ) => {
// setImmediate is added to make sure that we priotorise socket error events over timeouts
s1 = setImmediate ( ( ) => {
if ( process . platform === 'win32' ) {
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
s2 = setImmediate ( ( ) => onConnectTimeout ( ) )
} else {
onConnectTimeout ( )
}
} )
} , timeout )
return ( ) => {
clearTimeout ( timeoutId )
clearImmediate ( s1 )
clearImmediate ( s2 )
}
}
function onConnectTimeout ( socket ) {
util . destroy ( socket , new ConnectTimeoutError ( ) )
}
module . exports = buildConnector
/***/ } ) ,
/***/ 8045 :
/***/ ( ( module ) => {
"use strict" ;
class UndiciError extends Error {
constructor ( message ) {
super ( message )
this . name = 'UndiciError'
this . code = 'UND_ERR'
}
}
class ConnectTimeoutError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , ConnectTimeoutError )
this . name = 'ConnectTimeoutError'
this . message = message || 'Connect Timeout Error'
this . code = 'UND_ERR_CONNECT_TIMEOUT'
}
}
class HeadersTimeoutError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , HeadersTimeoutError )
this . name = 'HeadersTimeoutError'
this . message = message || 'Headers Timeout Error'
this . code = 'UND_ERR_HEADERS_TIMEOUT'
}
}
class HeadersOverflowError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , HeadersOverflowError )
this . name = 'HeadersOverflowError'
this . message = message || 'Headers Overflow Error'
this . code = 'UND_ERR_HEADERS_OVERFLOW'
}
}
class BodyTimeoutError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , BodyTimeoutError )
this . name = 'BodyTimeoutError'
this . message = message || 'Body Timeout Error'
this . code = 'UND_ERR_BODY_TIMEOUT'
}
}
class ResponseStatusCodeError extends UndiciError {
constructor ( message , statusCode , headers , body ) {
super ( message )
Error . captureStackTrace ( this , ResponseStatusCodeError )
this . name = 'ResponseStatusCodeError'
this . message = message || 'Response Status Code Error'
this . code = 'UND_ERR_RESPONSE_STATUS_CODE'
this . body = body
this . status = statusCode
this . statusCode = statusCode
this . headers = headers
}
}
class InvalidArgumentError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , InvalidArgumentError )
this . name = 'InvalidArgumentError'
this . message = message || 'Invalid Argument Error'
this . code = 'UND_ERR_INVALID_ARG'
}
}
class InvalidReturnValueError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , InvalidReturnValueError )
this . name = 'InvalidReturnValueError'
this . message = message || 'Invalid Return Value Error'
this . code = 'UND_ERR_INVALID_RETURN_VALUE'
}
}
class RequestAbortedError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , RequestAbortedError )
this . name = 'AbortError'
this . message = message || 'Request aborted'
this . code = 'UND_ERR_ABORTED'
}
}
class InformationalError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , InformationalError )
this . name = 'InformationalError'
this . message = message || 'Request information'
this . code = 'UND_ERR_INFO'
}
}
class RequestContentLengthMismatchError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , RequestContentLengthMismatchError )
this . name = 'RequestContentLengthMismatchError'
this . message = message || 'Request body length does not match content-length header'
this . code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
}
}
class ResponseContentLengthMismatchError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , ResponseContentLengthMismatchError )
this . name = 'ResponseContentLengthMismatchError'
this . message = message || 'Response body length does not match content-length header'
this . code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
}
}
class ClientDestroyedError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , ClientDestroyedError )
this . name = 'ClientDestroyedError'
this . message = message || 'The client is destroyed'
this . code = 'UND_ERR_DESTROYED'
}
}
class ClientClosedError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , ClientClosedError )
this . name = 'ClientClosedError'
this . message = message || 'The client is closed'
this . code = 'UND_ERR_CLOSED'
}
}
class SocketError extends UndiciError {
constructor ( message , socket ) {
super ( message )
Error . captureStackTrace ( this , SocketError )
this . name = 'SocketError'
this . message = message || 'Socket error'
this . code = 'UND_ERR_SOCKET'
this . socket = socket
}
}
class NotSupportedError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , NotSupportedError )
this . name = 'NotSupportedError'
this . message = message || 'Not supported error'
this . code = 'UND_ERR_NOT_SUPPORTED'
}
}
class BalancedPoolMissingUpstreamError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , NotSupportedError )
this . name = 'MissingUpstreamError'
this . message = message || 'No upstream has been added to the BalancedPool'
this . code = 'UND_ERR_BPL_MISSING_UPSTREAM'
}
}
class HTTPParserError extends Error {
constructor ( message , code , data ) {
super ( message )
Error . captureStackTrace ( this , HTTPParserError )
this . name = 'HTTPParserError'
this . code = code ? ` HPE_ ${ code } ` : undefined
this . data = data ? data . toString ( ) : undefined
}
}
class ResponseExceededMaxSizeError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , ResponseExceededMaxSizeError )
this . name = 'ResponseExceededMaxSizeError'
this . message = message || 'Response content exceeded max size'
this . code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
}
}
module . exports = {
HTTPParserError ,
UndiciError ,
HeadersTimeoutError ,
HeadersOverflowError ,
BodyTimeoutError ,
RequestContentLengthMismatchError ,
ConnectTimeoutError ,
ResponseStatusCodeError ,
InvalidArgumentError ,
InvalidReturnValueError ,
RequestAbortedError ,
ClientDestroyedError ,
ClientClosedError ,
InformationalError ,
SocketError ,
NotSupportedError ,
ResponseContentLengthMismatchError ,
BalancedPoolMissingUpstreamError ,
ResponseExceededMaxSizeError
}
/***/ } ) ,
/***/ 2905 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
InvalidArgumentError ,
NotSupportedError
} = _ _nccwpck _require _ _ ( 8045 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const util = _ _nccwpck _require _ _ ( 3983 )
// tokenRegExp and headerCharRegex have been lifted from
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
/ * *
* Verifies that the given val is a valid HTTP token
* per the rules defined in RFC 7230
* See https : //tools.ietf.org/html/rfc7230#section-3.2.6
* /
const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/
/ * *
* Matches if val contains an invalid field - vchar
* field - value = * ( field - content / obs - fold )
* field - content = field - vchar [ 1 * ( SP / HTAB ) field - vchar ]
* field - vchar = VCHAR / obs - text
* /
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
// Verifies that a given path is valid does not contain control chars \x00 to \x20
const invalidPathRegex = /[^\u0021-\u00ff]/
const kHandler = Symbol ( 'handler' )
const channels = { }
let extractBody
try {
const diagnosticsChannel = _ _nccwpck _require _ _ ( 7643 )
channels . create = diagnosticsChannel . channel ( 'undici:request:create' )
channels . bodySent = diagnosticsChannel . channel ( 'undici:request:bodySent' )
channels . headers = diagnosticsChannel . channel ( 'undici:request:headers' )
channels . trailers = diagnosticsChannel . channel ( 'undici:request:trailers' )
channels . error = diagnosticsChannel . channel ( 'undici:request:error' )
} catch {
channels . create = { hasSubscribers : false }
channels . bodySent = { hasSubscribers : false }
channels . headers = { hasSubscribers : false }
channels . trailers = { hasSubscribers : false }
channels . error = { hasSubscribers : false }
}
class Request {
constructor ( origin , {
path ,
method ,
body ,
headers ,
query ,
idempotent ,
blocking ,
upgrade ,
headersTimeout ,
bodyTimeout ,
reset ,
throwOnError
} , handler ) {
if ( typeof path !== 'string' ) {
throw new InvalidArgumentError ( 'path must be a string' )
} else if (
path [ 0 ] !== '/' &&
! ( path . startsWith ( 'http://' ) || path . startsWith ( 'https://' ) ) &&
method !== 'CONNECT'
) {
throw new InvalidArgumentError ( 'path must be an absolute URL or start with a slash' )
} else if ( invalidPathRegex . exec ( path ) !== null ) {
throw new InvalidArgumentError ( 'invalid request path' )
}
if ( typeof method !== 'string' ) {
throw new InvalidArgumentError ( 'method must be a string' )
} else if ( tokenRegExp . exec ( method ) === null ) {
throw new InvalidArgumentError ( 'invalid request method' )
}
if ( upgrade && typeof upgrade !== 'string' ) {
throw new InvalidArgumentError ( 'upgrade must be a string' )
}
if ( headersTimeout != null && ( ! Number . isFinite ( headersTimeout ) || headersTimeout < 0 ) ) {
throw new InvalidArgumentError ( 'invalid headersTimeout' )
}
if ( bodyTimeout != null && ( ! Number . isFinite ( bodyTimeout ) || bodyTimeout < 0 ) ) {
throw new InvalidArgumentError ( 'invalid bodyTimeout' )
}
if ( reset != null && typeof reset !== 'boolean' ) {
throw new InvalidArgumentError ( 'invalid reset' )
}
this . headersTimeout = headersTimeout
this . bodyTimeout = bodyTimeout
this . throwOnError = throwOnError === true
this . method = method
if ( body == null ) {
this . body = null
} else if ( util . isStream ( body ) ) {
this . body = body
} else if ( util . isBuffer ( body ) ) {
this . body = body . byteLength ? body : null
} else if ( ArrayBuffer . isView ( body ) ) {
this . body = body . buffer . byteLength ? Buffer . from ( body . buffer , body . byteOffset , body . byteLength ) : null
} else if ( body instanceof ArrayBuffer ) {
this . body = body . byteLength ? Buffer . from ( body ) : null
} else if ( typeof body === 'string' ) {
this . body = body . length ? Buffer . from ( body ) : null
} else if ( util . isFormDataLike ( body ) || util . isIterable ( body ) || util . isBlobLike ( body ) ) {
this . body = body
} else {
throw new InvalidArgumentError ( 'body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable' )
}
this . completed = false
this . aborted = false
this . upgrade = upgrade || null
this . path = query ? util . buildURL ( path , query ) : path
this . origin = origin
this . idempotent = idempotent == null
? method === 'HEAD' || method === 'GET'
: idempotent
this . blocking = blocking == null ? false : blocking
this . reset = reset == null ? null : reset
this . host = null
this . contentLength = null
this . contentType = null
this . headers = ''
if ( Array . isArray ( headers ) ) {
if ( headers . length % 2 !== 0 ) {
throw new InvalidArgumentError ( 'headers array must be even' )
}
for ( let i = 0 ; i < headers . length ; i += 2 ) {
processHeader ( this , headers [ i ] , headers [ i + 1 ] )
}
} else if ( headers && typeof headers === 'object' ) {
const keys = Object . keys ( headers )
for ( let i = 0 ; i < keys . length ; i ++ ) {
const key = keys [ i ]
processHeader ( this , key , headers [ key ] )
}
} else if ( headers != null ) {
throw new InvalidArgumentError ( 'headers must be an object or an array' )
}
if ( util . isFormDataLike ( this . body ) ) {
if ( util . nodeMajor < 16 || ( util . nodeMajor === 16 && util . nodeMinor < 8 ) ) {
throw new InvalidArgumentError ( 'Form-Data bodies are only supported in node v16.8 and newer.' )
}
if ( ! extractBody ) {
extractBody = ( _ _nccwpck _require _ _ ( 9990 ) . extractBody )
}
const [ bodyStream , contentType ] = extractBody ( body )
if ( this . contentType == null ) {
this . contentType = contentType
this . headers += ` content-type: ${ contentType } \r \n `
}
this . body = bodyStream . stream
this . contentLength = bodyStream . length
} else if ( util . isBlobLike ( body ) && this . contentType == null && body . type ) {
this . contentType = body . type
this . headers += ` content-type: ${ body . type } \r \n `
}
util . validateHandler ( handler , method , upgrade )
this . servername = util . getServerName ( this . host )
this [ kHandler ] = handler
if ( channels . create . hasSubscribers ) {
channels . create . publish ( { request : this } )
}
}
onBodySent ( chunk ) {
if ( this [ kHandler ] . onBodySent ) {
try {
this [ kHandler ] . onBodySent ( chunk )
} catch ( err ) {
this . onError ( err )
}
}
}
onRequestSent ( ) {
if ( channels . bodySent . hasSubscribers ) {
channels . bodySent . publish ( { request : this } )
}
}
onConnect ( abort ) {
assert ( ! this . aborted )
assert ( ! this . completed )
return this [ kHandler ] . onConnect ( abort )
}
onHeaders ( statusCode , headers , resume , statusText ) {
assert ( ! this . aborted )
assert ( ! this . completed )
if ( channels . headers . hasSubscribers ) {
channels . headers . publish ( { request : this , response : { statusCode , headers , statusText } } )
}
return this [ kHandler ] . onHeaders ( statusCode , headers , resume , statusText )
}
onData ( chunk ) {
assert ( ! this . aborted )
assert ( ! this . completed )
return this [ kHandler ] . onData ( chunk )
}
onUpgrade ( statusCode , headers , socket ) {
assert ( ! this . aborted )
assert ( ! this . completed )
return this [ kHandler ] . onUpgrade ( statusCode , headers , socket )
}
onComplete ( trailers ) {
assert ( ! this . aborted )
this . completed = true
if ( channels . trailers . hasSubscribers ) {
channels . trailers . publish ( { request : this , trailers } )
}
return this [ kHandler ] . onComplete ( trailers )
}
onError ( error ) {
if ( channels . error . hasSubscribers ) {
channels . error . publish ( { request : this , error } )
}
if ( this . aborted ) {
return
}
this . aborted = true
return this [ kHandler ] . onError ( error )
}
addHeader ( key , value ) {
processHeader ( this , key , value )
return this
}
}
function processHeaderValue ( key , val ) {
if ( val && typeof val === 'object' ) {
throw new InvalidArgumentError ( ` invalid ${ key } header ` )
}
val = val != null ? ` ${ val } ` : ''
if ( headerCharRegex . exec ( val ) !== null ) {
throw new InvalidArgumentError ( ` invalid ${ key } header ` )
}
return ` ${ key } : ${ val } \r \n `
}
function processHeader ( request , key , val ) {
if ( val && ( typeof val === 'object' && ! Array . isArray ( val ) ) ) {
throw new InvalidArgumentError ( ` invalid ${ key } header ` )
} else if ( val === undefined ) {
return
}
if (
request . host === null &&
key . length === 4 &&
key . toLowerCase ( ) === 'host'
) {
if ( headerCharRegex . exec ( val ) !== null ) {
throw new InvalidArgumentError ( ` invalid ${ key } header ` )
}
// Consumed by Client
request . host = val
} else if (
request . contentLength === null &&
key . length === 14 &&
key . toLowerCase ( ) === 'content-length'
) {
request . contentLength = parseInt ( val , 10 )
if ( ! Number . isFinite ( request . contentLength ) ) {
throw new InvalidArgumentError ( 'invalid content-length header' )
}
} else if (
request . contentType === null &&
key . length === 12 &&
key . toLowerCase ( ) === 'content-type'
) {
request . contentType = val
request . headers += processHeaderValue ( key , val )
} else if (
key . length === 17 &&
key . toLowerCase ( ) === 'transfer-encoding'
) {
throw new InvalidArgumentError ( 'invalid transfer-encoding header' )
} else if (
key . length === 10 &&
key . toLowerCase ( ) === 'connection'
) {
const value = typeof val === 'string' ? val . toLowerCase ( ) : null
if ( value !== 'close' && value !== 'keep-alive' ) {
throw new InvalidArgumentError ( 'invalid connection header' )
} else if ( value === 'close' ) {
request . reset = true
}
} else if (
key . length === 10 &&
key . toLowerCase ( ) === 'keep-alive'
) {
throw new InvalidArgumentError ( 'invalid keep-alive header' )
} else if (
key . length === 7 &&
key . toLowerCase ( ) === 'upgrade'
) {
throw new InvalidArgumentError ( 'invalid upgrade header' )
} else if (
key . length === 6 &&
key . toLowerCase ( ) === 'expect'
) {
throw new NotSupportedError ( 'expect header not supported' )
} else if ( tokenRegExp . exec ( key ) === null ) {
throw new InvalidArgumentError ( 'invalid header key' )
} else {
if ( Array . isArray ( val ) ) {
for ( let i = 0 ; i < val . length ; i ++ ) {
request . headers += processHeaderValue ( key , val [ i ] )
}
} else {
request . headers += processHeaderValue ( key , val )
}
}
}
module . exports = Request
/***/ } ) ,
/***/ 2785 :
/***/ ( ( module ) => {
module . exports = {
kClose : Symbol ( 'close' ) ,
kDestroy : Symbol ( 'destroy' ) ,
kDispatch : Symbol ( 'dispatch' ) ,
kUrl : Symbol ( 'url' ) ,
kWriting : Symbol ( 'writing' ) ,
kResuming : Symbol ( 'resuming' ) ,
kQueue : Symbol ( 'queue' ) ,
kConnect : Symbol ( 'connect' ) ,
kConnecting : Symbol ( 'connecting' ) ,
kHeadersList : Symbol ( 'headers list' ) ,
kKeepAliveDefaultTimeout : Symbol ( 'default keep alive timeout' ) ,
kKeepAliveMaxTimeout : Symbol ( 'max keep alive timeout' ) ,
kKeepAliveTimeoutThreshold : Symbol ( 'keep alive timeout threshold' ) ,
kKeepAliveTimeoutValue : Symbol ( 'keep alive timeout' ) ,
kKeepAlive : Symbol ( 'keep alive' ) ,
kHeadersTimeout : Symbol ( 'headers timeout' ) ,
kBodyTimeout : Symbol ( 'body timeout' ) ,
kServerName : Symbol ( 'server name' ) ,
kLocalAddress : Symbol ( 'local address' ) ,
kHost : Symbol ( 'host' ) ,
kNoRef : Symbol ( 'no ref' ) ,
kBodyUsed : Symbol ( 'used' ) ,
kRunning : Symbol ( 'running' ) ,
kBlocking : Symbol ( 'blocking' ) ,
kPending : Symbol ( 'pending' ) ,
kSize : Symbol ( 'size' ) ,
kBusy : Symbol ( 'busy' ) ,
kQueued : Symbol ( 'queued' ) ,
kFree : Symbol ( 'free' ) ,
kConnected : Symbol ( 'connected' ) ,
kClosed : Symbol ( 'closed' ) ,
kNeedDrain : Symbol ( 'need drain' ) ,
kReset : Symbol ( 'reset' ) ,
kDestroyed : Symbol . for ( 'nodejs.stream.destroyed' ) ,
kMaxHeadersSize : Symbol ( 'max headers size' ) ,
kRunningIdx : Symbol ( 'running index' ) ,
kPendingIdx : Symbol ( 'pending index' ) ,
kError : Symbol ( 'error' ) ,
kClients : Symbol ( 'clients' ) ,
kClient : Symbol ( 'client' ) ,
kParser : Symbol ( 'parser' ) ,
kOnDestroyed : Symbol ( 'destroy callbacks' ) ,
kPipelining : Symbol ( 'pipelining' ) ,
kSocket : Symbol ( 'socket' ) ,
kHostHeader : Symbol ( 'host header' ) ,
kConnector : Symbol ( 'connector' ) ,
kStrictContentLength : Symbol ( 'strict content length' ) ,
kMaxRedirections : Symbol ( 'maxRedirections' ) ,
kMaxRequests : Symbol ( 'maxRequestsPerClient' ) ,
kProxy : Symbol ( 'proxy agent options' ) ,
kCounter : Symbol ( 'socket request counter' ) ,
kInterceptors : Symbol ( 'dispatch interceptors' ) ,
kMaxResponseSize : Symbol ( 'max response size' )
}
/***/ } ) ,
/***/ 3983 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const assert = _ _nccwpck _require _ _ ( 9491 )
const { kDestroyed , kBodyUsed } = _ _nccwpck _require _ _ ( 2785 )
const { IncomingMessage } = _ _nccwpck _require _ _ ( 3685 )
const stream = _ _nccwpck _require _ _ ( 2781 )
const net = _ _nccwpck _require _ _ ( 1808 )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const { Blob } = _ _nccwpck _require _ _ ( 4300 )
const nodeUtil = _ _nccwpck _require _ _ ( 3837 )
const { stringify } = _ _nccwpck _require _ _ ( 3477 )
const [ nodeMajor , nodeMinor ] = process . versions . node . split ( '.' ) . map ( v => Number ( v ) )
function nop ( ) { }
function isStream ( obj ) {
return obj && typeof obj === 'object' && typeof obj . pipe === 'function' && typeof obj . on === 'function'
}
// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
function isBlobLike ( object ) {
return ( Blob && object instanceof Blob ) || (
object &&
typeof object === 'object' &&
( typeof object . stream === 'function' ||
typeof object . arrayBuffer === 'function' ) &&
/^(Blob|File)$/ . test ( object [ Symbol . toStringTag ] )
)
}
function buildURL ( url , queryParams ) {
if ( url . includes ( '?' ) || url . includes ( '#' ) ) {
throw new Error ( 'Query params cannot be passed when url already contains "?" or "#".' )
}
const stringified = stringify ( queryParams )
if ( stringified ) {
url += '?' + stringified
}
return url
}
function parseURL ( url ) {
if ( typeof url === 'string' ) {
url = new URL ( url )
if ( ! /^https?:/ . test ( url . origin || url . protocol ) ) {
throw new InvalidArgumentError ( 'Invalid URL protocol: the URL must start with `http:` or `https:`.' )
}
return url
}
if ( ! url || typeof url !== 'object' ) {
throw new InvalidArgumentError ( 'Invalid URL: The URL argument must be a non-null object.' )
}
if ( url . port != null && url . port !== '' && ! Number . isFinite ( parseInt ( url . port ) ) ) {
throw new InvalidArgumentError ( 'Invalid URL: port must be a valid integer or a string representation of an integer.' )
}
if ( url . path != null && typeof url . path !== 'string' ) {
throw new InvalidArgumentError ( 'Invalid URL path: the path must be a string or null/undefined.' )
}
if ( url . pathname != null && typeof url . pathname !== 'string' ) {
throw new InvalidArgumentError ( 'Invalid URL pathname: the pathname must be a string or null/undefined.' )
}
if ( url . hostname != null && typeof url . hostname !== 'string' ) {
throw new InvalidArgumentError ( 'Invalid URL hostname: the hostname must be a string or null/undefined.' )
}
if ( url . origin != null && typeof url . origin !== 'string' ) {
throw new InvalidArgumentError ( 'Invalid URL origin: the origin must be a string or null/undefined.' )
}
if ( ! /^https?:/ . test ( url . origin || url . protocol ) ) {
throw new InvalidArgumentError ( 'Invalid URL protocol: the URL must start with `http:` or `https:`.' )
}
if ( ! ( url instanceof URL ) ) {
const port = url . port != null
? url . port
: ( url . protocol === 'https:' ? 443 : 80 )
let origin = url . origin != null
? url . origin
: ` ${ url . protocol } // ${ url . hostname } : ${ port } `
let path = url . path != null
? url . path
: ` ${ url . pathname || '' } ${ url . search || '' } `
if ( origin . endsWith ( '/' ) ) {
origin = origin . substring ( 0 , origin . length - 1 )
}
if ( path && ! path . startsWith ( '/' ) ) {
path = ` / ${ path } `
}
// new URL(path, origin) is unsafe when `path` contains an absolute URL
// From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
// If first parameter is a relative URL, second param is required, and will be used as the base URL.
// If first parameter is an absolute URL, a given second param will be ignored.
url = new URL ( origin + path )
}
return url
}
function parseOrigin ( url ) {
url = parseURL ( url )
if ( url . pathname !== '/' || url . search || url . hash ) {
throw new InvalidArgumentError ( 'invalid url' )
}
return url
}
function getHostname ( host ) {
if ( host [ 0 ] === '[' ) {
const idx = host . indexOf ( ']' )
assert ( idx !== - 1 )
return host . substr ( 1 , idx - 1 )
}
const idx = host . indexOf ( ':' )
if ( idx === - 1 ) return host
return host . substr ( 0 , idx )
}
// IP addresses are not valid server names per RFC6066
// > Currently, the only server names supported are DNS hostnames
function getServerName ( host ) {
if ( ! host ) {
return null
}
assert . strictEqual ( typeof host , 'string' )
const servername = getHostname ( host )
if ( net . isIP ( servername ) ) {
return ''
}
return servername
}
function deepClone ( obj ) {
return JSON . parse ( JSON . stringify ( obj ) )
}
function isAsyncIterable ( obj ) {
return ! ! ( obj != null && typeof obj [ Symbol . asyncIterator ] === 'function' )
}
function isIterable ( obj ) {
return ! ! ( obj != null && ( typeof obj [ Symbol . iterator ] === 'function' || typeof obj [ Symbol . asyncIterator ] === 'function' ) )
}
function bodyLength ( body ) {
if ( body == null ) {
return 0
} else if ( isStream ( body ) ) {
const state = body . _readableState
return state && state . ended === true && Number . isFinite ( state . length )
? state . length
: null
} else if ( isBlobLike ( body ) ) {
return body . size != null ? body . size : null
} else if ( isBuffer ( body ) ) {
return body . byteLength
}
return null
}
function isDestroyed ( stream ) {
return ! stream || ! ! ( stream . destroyed || stream [ kDestroyed ] )
}
function isReadableAborted ( stream ) {
const state = stream && stream . _readableState
return isDestroyed ( stream ) && state && ! state . endEmitted
}
function destroy ( stream , err ) {
if ( ! isStream ( stream ) || isDestroyed ( stream ) ) {
return
}
if ( typeof stream . destroy === 'function' ) {
if ( Object . getPrototypeOf ( stream ) . constructor === IncomingMessage ) {
// See: https://github.com/nodejs/node/pull/38505/files
stream . socket = null
}
stream . destroy ( err )
} else if ( err ) {
process . nextTick ( ( stream , err ) => {
stream . emit ( 'error' , err )
} , stream , err )
}
if ( stream . destroyed !== true ) {
stream [ kDestroyed ] = true
}
}
const KEEPALIVE _TIMEOUT _EXPR = /timeout=(\d+)/
function parseKeepAliveTimeout ( val ) {
const m = val . toString ( ) . match ( KEEPALIVE _TIMEOUT _EXPR )
return m ? parseInt ( m [ 1 ] , 10 ) * 1000 : null
}
function parseHeaders ( headers , obj = { } ) {
for ( let i = 0 ; i < headers . length ; i += 2 ) {
const key = headers [ i ] . toString ( ) . toLowerCase ( )
let val = obj [ key ]
if ( ! val ) {
if ( Array . isArray ( headers [ i + 1 ] ) ) {
obj [ key ] = headers [ i + 1 ]
} else {
obj [ key ] = headers [ i + 1 ] . toString ( 'utf8' )
}
} else {
if ( ! Array . isArray ( val ) ) {
val = [ val ]
obj [ key ] = val
}
val . push ( headers [ i + 1 ] . toString ( 'utf8' ) )
}
}
// See https://github.com/nodejs/node/pull/46528
if ( 'content-length' in obj && 'content-disposition' in obj ) {
obj [ 'content-disposition' ] = Buffer . from ( obj [ 'content-disposition' ] ) . toString ( 'latin1' )
}
return obj
}
function parseRawHeaders ( headers ) {
const ret = [ ]
let hasContentLength = false
let contentDispositionIdx = - 1
for ( let n = 0 ; n < headers . length ; n += 2 ) {
const key = headers [ n + 0 ] . toString ( )
const val = headers [ n + 1 ] . toString ( 'utf8' )
if ( key . length === 14 && ( key === 'content-length' || key . toLowerCase ( ) === 'content-length' ) ) {
ret . push ( key , val )
hasContentLength = true
} else if ( key . length === 19 && ( key === 'content-disposition' || key . toLowerCase ( ) === 'content-disposition' ) ) {
contentDispositionIdx = ret . push ( key , val ) - 1
} else {
ret . push ( key , val )
}
}
// See https://github.com/nodejs/node/pull/46528
if ( hasContentLength && contentDispositionIdx !== - 1 ) {
ret [ contentDispositionIdx ] = Buffer . from ( ret [ contentDispositionIdx ] ) . toString ( 'latin1' )
}
return ret
}
function isBuffer ( buffer ) {
// See, https://github.com/mcollina/undici/pull/319
return buffer instanceof Uint8Array || Buffer . isBuffer ( buffer )
}
function validateHandler ( handler , method , upgrade ) {
if ( ! handler || typeof handler !== 'object' ) {
throw new InvalidArgumentError ( 'handler must be an object' )
}
if ( typeof handler . onConnect !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onConnect method' )
}
if ( typeof handler . onError !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onError method' )
}
if ( typeof handler . onBodySent !== 'function' && handler . onBodySent !== undefined ) {
throw new InvalidArgumentError ( 'invalid onBodySent method' )
}
if ( upgrade || method === 'CONNECT' ) {
if ( typeof handler . onUpgrade !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onUpgrade method' )
}
} else {
if ( typeof handler . onHeaders !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onHeaders method' )
}
if ( typeof handler . onData !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onData method' )
}
if ( typeof handler . onComplete !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onComplete method' )
}
}
}
// A body is disturbed if it has been read from and it cannot
// be re-used without losing state or data.
function isDisturbed ( body ) {
return ! ! ( body && (
stream . isDisturbed
? stream . isDisturbed ( body ) || body [ kBodyUsed ] // TODO (fix): Why is body[kBodyUsed] needed?
: body [ kBodyUsed ] ||
body . readableDidRead ||
( body . _readableState && body . _readableState . dataEmitted ) ||
isReadableAborted ( body )
) )
}
function isErrored ( body ) {
return ! ! ( body && (
stream . isErrored
? stream . isErrored ( body )
: /state: 'errored'/ . test ( nodeUtil . inspect ( body )
) ) )
}
function isReadable ( body ) {
return ! ! ( body && (
stream . isReadable
? stream . isReadable ( body )
: /state: 'readable'/ . test ( nodeUtil . inspect ( body )
) ) )
}
function getSocketInfo ( socket ) {
return {
localAddress : socket . localAddress ,
localPort : socket . localPort ,
remoteAddress : socket . remoteAddress ,
remotePort : socket . remotePort ,
remoteFamily : socket . remoteFamily ,
timeout : socket . timeout ,
bytesWritten : socket . bytesWritten ,
bytesRead : socket . bytesRead
}
}
let ReadableStream
function ReadableStreamFrom ( iterable ) {
if ( ! ReadableStream ) {
ReadableStream = ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
}
if ( ReadableStream . from ) {
// https://github.com/whatwg/streams/pull/1083
return ReadableStream . from ( iterable )
}
let iterator
return new ReadableStream (
{
async start ( ) {
iterator = iterable [ Symbol . asyncIterator ] ( )
} ,
async pull ( controller ) {
const { done , value } = await iterator . next ( )
if ( done ) {
queueMicrotask ( ( ) => {
controller . close ( )
} )
} else {
const buf = Buffer . isBuffer ( value ) ? value : Buffer . from ( value )
controller . enqueue ( new Uint8Array ( buf ) )
}
return controller . desiredSize > 0
} ,
async cancel ( reason ) {
await iterator . return ( )
}
} ,
0
)
}
// The chunk should be a FormData instance and contains
// all the required methods.
function isFormDataLike ( object ) {
return (
object &&
typeof object === 'object' &&
typeof object . append === 'function' &&
typeof object . delete === 'function' &&
typeof object . get === 'function' &&
typeof object . getAll === 'function' &&
typeof object . has === 'function' &&
typeof object . set === 'function' &&
object [ Symbol . toStringTag ] === 'FormData'
)
}
function throwIfAborted ( signal ) {
if ( ! signal ) { return }
if ( typeof signal . throwIfAborted === 'function' ) {
signal . throwIfAborted ( )
} else {
if ( signal . aborted ) {
// DOMException not available < v17.0.0
const err = new Error ( 'The operation was aborted' )
err . name = 'AbortError'
throw err
}
}
}
const hasToWellFormed = ! ! String . prototype . toWellFormed
/ * *
* @ param { string } val
* /
function toUSVString ( val ) {
if ( hasToWellFormed ) {
return ` ${ val } ` . toWellFormed ( )
} else if ( nodeUtil . toUSVString ) {
return nodeUtil . toUSVString ( val )
}
return ` ${ val } `
}
const kEnumerableProperty = Object . create ( null )
kEnumerableProperty . enumerable = true
module . exports = {
kEnumerableProperty ,
nop ,
isDisturbed ,
isErrored ,
isReadable ,
toUSVString ,
isReadableAborted ,
isBlobLike ,
parseOrigin ,
parseURL ,
getServerName ,
isStream ,
isIterable ,
isAsyncIterable ,
isDestroyed ,
parseRawHeaders ,
parseHeaders ,
parseKeepAliveTimeout ,
destroy ,
bodyLength ,
deepClone ,
ReadableStreamFrom ,
isBuffer ,
validateHandler ,
getSocketInfo ,
isFormDataLike ,
buildURL ,
throwIfAborted ,
nodeMajor ,
nodeMinor ,
nodeHasAutoSelectFamily : nodeMajor > 18 || ( nodeMajor === 18 && nodeMinor >= 13 )
}
/***/ } ) ,
/***/ 4839 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const Dispatcher = _ _nccwpck _require _ _ ( 412 )
const {
ClientDestroyedError ,
ClientClosedError ,
InvalidArgumentError
} = _ _nccwpck _require _ _ ( 8045 )
const { kDestroy , kClose , kDispatch , kInterceptors } = _ _nccwpck _require _ _ ( 2785 )
const kDestroyed = Symbol ( 'destroyed' )
const kClosed = Symbol ( 'closed' )
const kOnDestroyed = Symbol ( 'onDestroyed' )
const kOnClosed = Symbol ( 'onClosed' )
const kInterceptedDispatch = Symbol ( 'Intercepted Dispatch' )
class DispatcherBase extends Dispatcher {
constructor ( ) {
super ( )
this [ kDestroyed ] = false
this [ kOnDestroyed ] = null
this [ kClosed ] = false
this [ kOnClosed ] = [ ]
}
get destroyed ( ) {
return this [ kDestroyed ]
}
get closed ( ) {
return this [ kClosed ]
}
get interceptors ( ) {
return this [ kInterceptors ]
}
set interceptors ( newInterceptors ) {
if ( newInterceptors ) {
for ( let i = newInterceptors . length - 1 ; i >= 0 ; i -- ) {
const interceptor = this [ kInterceptors ] [ i ]
if ( typeof interceptor !== 'function' ) {
throw new InvalidArgumentError ( 'interceptor must be an function' )
}
}
}
this [ kInterceptors ] = newInterceptors
}
close ( callback ) {
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
this . close ( ( err , data ) => {
return err ? reject ( err ) : resolve ( data )
} )
} )
}
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
if ( this [ kDestroyed ] ) {
queueMicrotask ( ( ) => callback ( new ClientDestroyedError ( ) , null ) )
return
}
if ( this [ kClosed ] ) {
if ( this [ kOnClosed ] ) {
this [ kOnClosed ] . push ( callback )
} else {
queueMicrotask ( ( ) => callback ( null , null ) )
}
return
}
this [ kClosed ] = true
this [ kOnClosed ] . push ( callback )
const onClosed = ( ) => {
const callbacks = this [ kOnClosed ]
this [ kOnClosed ] = null
for ( let i = 0 ; i < callbacks . length ; i ++ ) {
callbacks [ i ] ( null , null )
}
}
// Should not error.
this [ kClose ] ( )
. then ( ( ) => this . destroy ( ) )
. then ( ( ) => {
queueMicrotask ( onClosed )
} )
}
destroy ( err , callback ) {
if ( typeof err === 'function' ) {
callback = err
err = null
}
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
this . destroy ( err , ( err , data ) => {
return err ? /* istanbul ignore next: should never error */ reject ( err ) : resolve ( data )
} )
} )
}
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
if ( this [ kDestroyed ] ) {
if ( this [ kOnDestroyed ] ) {
this [ kOnDestroyed ] . push ( callback )
} else {
queueMicrotask ( ( ) => callback ( null , null ) )
}
return
}
if ( ! err ) {
err = new ClientDestroyedError ( )
}
this [ kDestroyed ] = true
this [ kOnDestroyed ] = this [ kOnDestroyed ] || [ ]
this [ kOnDestroyed ] . push ( callback )
const onDestroyed = ( ) => {
const callbacks = this [ kOnDestroyed ]
this [ kOnDestroyed ] = null
for ( let i = 0 ; i < callbacks . length ; i ++ ) {
callbacks [ i ] ( null , null )
}
}
// Should not error.
this [ kDestroy ] ( err ) . then ( ( ) => {
queueMicrotask ( onDestroyed )
} )
}
[ kInterceptedDispatch ] ( opts , handler ) {
if ( ! this [ kInterceptors ] || this [ kInterceptors ] . length === 0 ) {
this [ kInterceptedDispatch ] = this [ kDispatch ]
return this [ kDispatch ] ( opts , handler )
}
let dispatch = this [ kDispatch ] . bind ( this )
for ( let i = this [ kInterceptors ] . length - 1 ; i >= 0 ; i -- ) {
dispatch = this [ kInterceptors ] [ i ] ( dispatch )
}
this [ kInterceptedDispatch ] = dispatch
return dispatch ( opts , handler )
}
dispatch ( opts , handler ) {
if ( ! handler || typeof handler !== 'object' ) {
throw new InvalidArgumentError ( 'handler must be an object' )
}
try {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'opts must be an object.' )
}
if ( this [ kDestroyed ] || this [ kOnDestroyed ] ) {
throw new ClientDestroyedError ( )
}
if ( this [ kClosed ] ) {
throw new ClientClosedError ( )
}
return this [ kInterceptedDispatch ] ( opts , handler )
} catch ( err ) {
if ( typeof handler . onError !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onError method' )
}
handler . onError ( err )
return false
}
}
}
module . exports = DispatcherBase
/***/ } ) ,
/***/ 412 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const EventEmitter = _ _nccwpck _require _ _ ( 2361 )
class Dispatcher extends EventEmitter {
dispatch ( ) {
throw new Error ( 'not implemented' )
}
close ( ) {
throw new Error ( 'not implemented' )
}
destroy ( ) {
throw new Error ( 'not implemented' )
}
}
module . exports = Dispatcher
/***/ } ) ,
/***/ 9990 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const Busboy = _ _nccwpck _require _ _ ( 6472 )
const util = _ _nccwpck _require _ _ ( 3983 )
const {
ReadableStreamFrom ,
isBlobLike ,
isReadableStreamLike ,
readableStreamClose ,
createDeferredPromise ,
fullyReadBody
} = _ _nccwpck _require _ _ ( 2538 )
const { FormData } = _ _nccwpck _require _ _ ( 2015 )
const { kState } = _ _nccwpck _require _ _ ( 5861 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { DOMException , structuredClone } = _ _nccwpck _require _ _ ( 1037 )
const { Blob , File : NativeFile } = _ _nccwpck _require _ _ ( 4300 )
const { kBodyUsed } = _ _nccwpck _require _ _ ( 2785 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { isErrored } = _ _nccwpck _require _ _ ( 3983 )
const { isUint8Array , isArrayBuffer } = _ _nccwpck _require _ _ ( 9830 )
const { File : UndiciFile } = _ _nccwpck _require _ _ ( 8511 )
const { parseMIMEType , serializeAMimeType } = _ _nccwpck _require _ _ ( 685 )
let ReadableStream = globalThis . ReadableStream
/** @type {globalThis['File']} */
const File = NativeFile ? ? UndiciFile
// https://fetch.spec.whatwg.org/#concept-bodyinit-extract
function extractBody ( object , keepalive = false ) {
if ( ! ReadableStream ) {
ReadableStream = ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
}
// 1. Let stream be null.
let stream = null
// 2. If object is a ReadableStream object, then set stream to object.
if ( object instanceof ReadableStream ) {
stream = object
} else if ( isBlobLike ( object ) ) {
// 3. Otherwise, if object is a Blob object, set stream to the
// result of running object’ s get stream.
stream = object . stream ( )
} else {
// 4. Otherwise, set stream to a new ReadableStream object, and set
// up stream.
stream = new ReadableStream ( {
async pull ( controller ) {
controller . enqueue (
typeof source === 'string' ? new TextEncoder ( ) . encode ( source ) : source
)
queueMicrotask ( ( ) => readableStreamClose ( controller ) )
} ,
start ( ) { } ,
type : undefined
} )
}
// 5. Assert: stream is a ReadableStream object.
assert ( isReadableStreamLike ( stream ) )
// 6. Let action be null.
let action = null
// 7. Let source be null.
let source = null
// 8. Let length be null.
let length = null
// 9. Let type be null.
let type = null
// 10. Switch on object:
if ( typeof object === 'string' ) {
// Set source to the UTF-8 encoding of object.
// Note: setting source to a Uint8Array here breaks some mocking assumptions.
source = object
// Set type to `text/plain;charset=UTF-8`.
type = 'text/plain;charset=UTF-8'
} else if ( object instanceof URLSearchParams ) {
// URLSearchParams
// spec says to run application/x-www-form-urlencoded on body.list
// this is implemented in Node.js as apart of an URLSearchParams instance toString method
// See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490
// and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100
// Set source to the result of running the application/x-www-form-urlencoded serializer with object’ s list.
source = object . toString ( )
// Set type to `application/x-www-form-urlencoded;charset=UTF-8`.
type = 'application/x-www-form-urlencoded;charset=UTF-8'
} else if ( isArrayBuffer ( object ) ) {
// BufferSource/ArrayBuffer
// Set source to a copy of the bytes held by object.
source = new Uint8Array ( object . slice ( ) )
} else if ( ArrayBuffer . isView ( object ) ) {
// BufferSource/ArrayBufferView
// Set source to a copy of the bytes held by object.
source = new Uint8Array ( object . buffer . slice ( object . byteOffset , object . byteOffset + object . byteLength ) )
} else if ( util . isFormDataLike ( object ) ) {
const boundary = ` ----formdata-undici- ${ Math . random ( ) } ` . replace ( '.' , '' ) . slice ( 0 , 32 )
const prefix = ` -- ${ boundary } \r \n Content-Disposition: form-data `
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
const escape = ( str ) =>
str . replace ( /\n/g , '%0A' ) . replace ( /\r/g , '%0D' ) . replace ( /"/g , '%22' )
const normalizeLinefeeds = ( value ) => value . replace ( /\r?\n|\r/g , '\r\n' )
// Set action to this step: run the multipart/form-data
// encoding algorithm, with object’ s entry list and UTF-8.
// - This ensures that the body is immutable and can't be changed afterwords
// - That the content-length is calculated in advance.
// - And that all parts are pre-encoded and ready to be sent.
const enc = new TextEncoder ( )
const blobParts = [ ]
const rn = new Uint8Array ( [ 13 , 10 ] ) // '\r\n'
length = 0
let hasUnknownSizeValue = false
for ( const [ name , value ] of object ) {
if ( typeof value === 'string' ) {
const chunk = enc . encode ( prefix +
` ; name=" ${ escape ( normalizeLinefeeds ( name ) ) } " ` +
` \r \n \r \n ${ normalizeLinefeeds ( value ) } \r \n ` )
blobParts . push ( chunk )
length += chunk . byteLength
} else {
const chunk = enc . encode ( ` ${ prefix } ; name=" ${ escape ( normalizeLinefeeds ( name ) ) } " ` +
( value . name ? ` ; filename=" ${ escape ( value . name ) } " ` : '' ) + '\r\n' +
` Content-Type: ${
value . type || 'application/octet-stream'
} \ r \ n \ r \ n ` )
blobParts . push ( chunk , value , rn )
if ( typeof value . size === 'number' ) {
length += chunk . byteLength + value . size + rn . byteLength
} else {
hasUnknownSizeValue = true
}
}
}
const chunk = enc . encode ( ` -- ${ boundary } -- ` )
blobParts . push ( chunk )
length += chunk . byteLength
if ( hasUnknownSizeValue ) {
length = null
}
// Set source to object.
source = object
action = async function * ( ) {
for ( const part of blobParts ) {
if ( part . stream ) {
yield * part . stream ( )
} else {
yield part
}
}
}
// Set type to `multipart/form-data; boundary=`,
// followed by the multipart/form-data boundary string generated
// by the multipart/form-data encoding algorithm.
type = 'multipart/form-data; boundary=' + boundary
} else if ( isBlobLike ( object ) ) {
// Blob
// Set source to object.
source = object
// Set length to object’ s size.
length = object . size
// If object’ s type attribute is not the empty byte sequence, set
// type to its value.
if ( object . type ) {
type = object . type
}
} else if ( typeof object [ Symbol . asyncIterator ] === 'function' ) {
// If keepalive is true, then throw a TypeError.
if ( keepalive ) {
throw new TypeError ( 'keepalive' )
}
// If object is disturbed or locked, then throw a TypeError.
if ( util . isDisturbed ( object ) || object . locked ) {
throw new TypeError (
'Response body object should not be disturbed or locked'
)
}
stream =
object instanceof ReadableStream ? object : ReadableStreamFrom ( object )
}
// 11. If source is a byte sequence, then set action to a
// step that returns source and length to source’ s length.
if ( typeof source === 'string' || util . isBuffer ( source ) ) {
length = Buffer . byteLength ( source )
}
// 12. If action is non-null, then run these steps in in parallel:
if ( action != null ) {
// Run action.
let iterator
stream = new ReadableStream ( {
async start ( ) {
iterator = action ( object ) [ Symbol . asyncIterator ] ( )
} ,
async pull ( controller ) {
const { value , done } = await iterator . next ( )
if ( done ) {
// When running action is done, close stream.
queueMicrotask ( ( ) => {
controller . close ( )
} )
} else {
// Whenever one or more bytes are available and stream is not errored,
// enqueue a Uint8Array wrapping an ArrayBuffer containing the available
// bytes into stream.
if ( ! isErrored ( stream ) ) {
controller . enqueue ( new Uint8Array ( value ) )
}
}
return controller . desiredSize > 0
} ,
async cancel ( reason ) {
await iterator . return ( )
} ,
type : undefined
} )
}
// 13. Let body be a body whose stream is stream, source is source,
// and length is length.
const body = { stream , source , length }
// 14. Return (body, type).
return [ body , type ]
}
// https://fetch.spec.whatwg.org/#bodyinit-safely-extract
function safelyExtractBody ( object , keepalive = false ) {
if ( ! ReadableStream ) {
// istanbul ignore next
ReadableStream = ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
}
// To safely extract a body and a `Content-Type` value from
// a byte sequence or BodyInit object object, run these steps:
// 1. If object is a ReadableStream object, then:
if ( object instanceof ReadableStream ) {
// Assert: object is neither disturbed nor locked.
// istanbul ignore next
assert ( ! util . isDisturbed ( object ) , 'The body has already been consumed.' )
// istanbul ignore next
assert ( ! object . locked , 'The stream is locked.' )
}
// 2. Return the results of extracting object.
return extractBody ( object , keepalive )
}
function cloneBody ( body ) {
// To clone a body body, run these steps:
// https://fetch.spec.whatwg.org/#concept-body-clone
// 1. Let « out1, out2 » be the result of teeing body’ s stream.
const [ out1 , out2 ] = body . stream . tee ( )
const out2Clone = structuredClone ( out2 , { transfer : [ out2 ] } )
// This, for whatever reasons, unrefs out2Clone which allows
// the process to exit by itself.
const [ , finalClone ] = out2Clone . tee ( )
// 2. Set body’ s stream to out1.
body . stream = out1
// 3. Return a body whose stream is out2 and other members are copied from body.
return {
stream : finalClone ,
length : body . length ,
source : body . source
}
}
async function * consumeBody ( body ) {
if ( body ) {
if ( isUint8Array ( body ) ) {
yield body
} else {
const stream = body . stream
if ( util . isDisturbed ( stream ) ) {
throw new TypeError ( 'The body has already been consumed.' )
}
if ( stream . locked ) {
throw new TypeError ( 'The stream is locked.' )
}
// Compat.
stream [ kBodyUsed ] = true
yield * stream
}
}
}
function throwIfAborted ( state ) {
if ( state . aborted ) {
throw new DOMException ( 'The operation was aborted.' , 'AbortError' )
}
}
function bodyMixinMethods ( instance ) {
const methods = {
blob ( ) {
// The blob() method steps are to return the result of
// running consume body with this and the following step
// given a byte sequence bytes: return a Blob whose
// contents are bytes and whose type attribute is this’ s
// MIME type.
return specConsumeBody ( this , ( bytes ) => {
let mimeType = bodyMimeType ( this )
if ( mimeType === 'failure' ) {
mimeType = ''
} else if ( mimeType ) {
mimeType = serializeAMimeType ( mimeType )
}
// Return a Blob whose contents are bytes and type attribute
// is mimeType.
return new Blob ( [ bytes ] , { type : mimeType } )
} , instance )
} ,
arrayBuffer ( ) {
// The arrayBuffer() method steps are to return the result
// of running consume body with this and the following step
// given a byte sequence bytes: return a new ArrayBuffer
// whose contents are bytes.
return specConsumeBody ( this , ( bytes ) => {
return new Uint8Array ( bytes ) . buffer
} , instance )
} ,
text ( ) {
// The text() method steps are to return the result of running
// consume body with this and UTF-8 decode.
return specConsumeBody ( this , utf8DecodeBytes , instance )
} ,
json ( ) {
// The json() method steps are to return the result of running
// consume body with this and parse JSON from bytes.
return specConsumeBody ( this , parseJSONFromBytes , instance )
} ,
async formData ( ) {
webidl . brandCheck ( this , instance )
throwIfAborted ( this [ kState ] )
const contentType = this . headers . get ( 'Content-Type' )
// If mimeType’ s essence is "multipart/form-data", then:
if ( /multipart\/form-data/ . test ( contentType ) ) {
const headers = { }
for ( const [ key , value ] of this . headers ) headers [ key . toLowerCase ( ) ] = value
const responseFormData = new FormData ( )
let busboy
try {
busboy = Busboy ( {
headers ,
defParamCharset : 'utf8'
} )
} catch ( err ) {
throw new DOMException ( ` ${ err } ` , 'AbortError' )
}
busboy . on ( 'field' , ( name , value ) => {
responseFormData . append ( name , value )
} )
busboy . on ( 'file' , ( name , value , info ) => {
const { filename , encoding , mimeType } = info
const chunks = [ ]
if ( encoding === 'base64' || encoding . toLowerCase ( ) === 'base64' ) {
let base64chunk = ''
value . on ( 'data' , ( chunk ) => {
base64chunk += chunk . toString ( ) . replace ( /[\r\n]/gm , '' )
const end = base64chunk . length - base64chunk . length % 4
chunks . push ( Buffer . from ( base64chunk . slice ( 0 , end ) , 'base64' ) )
base64chunk = base64chunk . slice ( end )
} )
value . on ( 'end' , ( ) => {
chunks . push ( Buffer . from ( base64chunk , 'base64' ) )
responseFormData . append ( name , new File ( chunks , filename , { type : mimeType } ) )
} )
} else {
value . on ( 'data' , ( chunk ) => {
chunks . push ( chunk )
} )
value . on ( 'end' , ( ) => {
responseFormData . append ( name , new File ( chunks , filename , { type : mimeType } ) )
} )
}
} )
const busboyResolve = new Promise ( ( resolve , reject ) => {
busboy . on ( 'finish' , resolve )
busboy . on ( 'error' , ( err ) => reject ( new TypeError ( err ) ) )
} )
if ( this . body !== null ) for await ( const chunk of consumeBody ( this [ kState ] . body ) ) busboy . write ( chunk )
busboy . end ( )
await busboyResolve
return responseFormData
} else if ( /application\/x-www-form-urlencoded/ . test ( contentType ) ) {
// Otherwise, if mimeType’ s essence is "application/x-www-form-urlencoded", then:
// 1. Let entries be the result of parsing bytes.
let entries
try {
let text = ''
// application/x-www-form-urlencoded parser will keep the BOM.
// https://url.spec.whatwg.org/#concept-urlencoded-parser
const textDecoder = new TextDecoder ( 'utf-8' , { ignoreBOM : true } )
for await ( const chunk of consumeBody ( this [ kState ] . body ) ) {
if ( ! isUint8Array ( chunk ) ) {
throw new TypeError ( 'Expected Uint8Array chunk' )
}
text += textDecoder . decode ( chunk , { stream : true } )
}
text += textDecoder . decode ( )
entries = new URLSearchParams ( text )
} catch ( err ) {
// istanbul ignore next: Unclear when new URLSearchParams can fail on a string.
// 2. If entries is failure, then throw a TypeError.
throw Object . assign ( new TypeError ( ) , { cause : err } )
}
// 3. Return a new FormData object whose entries are entries.
const formData = new FormData ( )
for ( const [ name , value ] of entries ) {
formData . append ( name , value )
}
return formData
} else {
// Wait a tick before checking if the request has been aborted.
// Otherwise, a TypeError can be thrown when an AbortError should.
await Promise . resolve ( )
throwIfAborted ( this [ kState ] )
// Otherwise, throw a TypeError.
throw webidl . errors . exception ( {
header : ` ${ instance . name } .formData ` ,
message : 'Could not parse content as FormData.'
} )
}
}
}
return methods
}
function mixinBody ( prototype ) {
Object . assign ( prototype . prototype , bodyMixinMethods ( prototype ) )
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#concept-body-consume-body
* @ param { Response | Request } object
* @ param { ( value : unknown ) => unknown } convertBytesToJSValue
* @ param { Response | Request } instance
* /
async function specConsumeBody ( object , convertBytesToJSValue , instance ) {
webidl . brandCheck ( object , instance )
throwIfAborted ( object [ kState ] )
// 1. If object is unusable, then return a promise rejected
// with a TypeError.
if ( bodyUnusable ( object [ kState ] . body ) ) {
throw new TypeError ( 'Body is unusable' )
}
// 2. Let promise be a new promise.
const promise = createDeferredPromise ( )
// 3. Let errorSteps given error be to reject promise with error.
const errorSteps = ( error ) => promise . reject ( error )
// 4. Let successSteps given a byte sequence data be to resolve
// promise with the result of running convertBytesToJSValue
// with data. If that threw an exception, then run errorSteps
// with that exception.
const successSteps = ( data ) => {
try {
promise . resolve ( convertBytesToJSValue ( data ) )
} catch ( e ) {
errorSteps ( e )
}
}
// 5. If object’ s body is null, then run successSteps with an
// empty byte sequence.
if ( object [ kState ] . body == null ) {
successSteps ( new Uint8Array ( ) )
return promise . promise
}
// 6. Otherwise, fully read object’ s body given successSteps,
// errorSteps, and object’ s relevant global object.
fullyReadBody ( object [ kState ] . body , successSteps , errorSteps )
// 7. Return promise.
return promise . promise
}
// https://fetch.spec.whatwg.org/#body-unusable
function bodyUnusable ( body ) {
// An object including the Body interface mixin is
// said to be unusable if its body is non-null and
// its body’ s stream is disturbed or locked.
return body != null && ( body . stream . locked || util . isDisturbed ( body . stream ) )
}
/ * *
* @ see https : //encoding.spec.whatwg.org/#utf-8-decode
* @ param { Buffer } buffer
* /
function utf8DecodeBytes ( buffer ) {
if ( buffer . length === 0 ) {
return ''
}
// 1. Let buffer be the result of peeking three bytes from
// ioQueue, converted to a byte sequence.
// 2. If buffer is 0xEF 0xBB 0xBF, then read three
// bytes from ioQueue. (Do nothing with those bytes.)
if ( buffer [ 0 ] === 0xEF && buffer [ 1 ] === 0xBB && buffer [ 2 ] === 0xBF ) {
buffer = buffer . subarray ( 3 )
}
// 3. Process a queue with an instance of UTF-8’ s
// decoder, ioQueue, output, and "replacement".
const output = new TextDecoder ( ) . decode ( buffer )
// 4. Return output.
return output
}
/ * *
* @ see https : //infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value
* @ param { Uint8Array } bytes
* /
function parseJSONFromBytes ( bytes ) {
return JSON . parse ( utf8DecodeBytes ( bytes ) )
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#concept-body-mime-type
* @ param { import ( './response' ) . Response | import ( './request' ) . Request } object
* /
function bodyMimeType ( object ) {
const { headersList } = object [ kState ]
const contentType = headersList . get ( 'content-type' )
if ( contentType === null ) {
return 'failure'
}
return parseMIMEType ( contentType )
}
module . exports = {
extractBody ,
safelyExtractBody ,
cloneBody ,
mixinBody
}
/***/ } ) ,
/***/ 1037 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { MessageChannel , receiveMessageOnPort } = _ _nccwpck _require _ _ ( 1267 )
const corsSafeListedMethods = [ 'GET' , 'HEAD' , 'POST' ]
const nullBodyStatus = [ 101 , 204 , 205 , 304 ]
const redirectStatus = [ 301 , 302 , 303 , 307 , 308 ]
// https://fetch.spec.whatwg.org/#block-bad-port
const badPorts = [
'1' , '7' , '9' , '11' , '13' , '15' , '17' , '19' , '20' , '21' , '22' , '23' , '25' , '37' , '42' , '43' , '53' , '69' , '77' , '79' ,
'87' , '95' , '101' , '102' , '103' , '104' , '109' , '110' , '111' , '113' , '115' , '117' , '119' , '123' , '135' , '137' ,
'139' , '143' , '161' , '179' , '389' , '427' , '465' , '512' , '513' , '514' , '515' , '526' , '530' , '531' , '532' ,
'540' , '548' , '554' , '556' , '563' , '587' , '601' , '636' , '989' , '990' , '993' , '995' , '1719' , '1720' , '1723' ,
'2049' , '3659' , '4045' , '5060' , '5061' , '6000' , '6566' , '6665' , '6666' , '6667' , '6668' , '6669' , '6697' ,
'10080'
]
// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies
const referrerPolicy = [
'' ,
'no-referrer' ,
'no-referrer-when-downgrade' ,
'same-origin' ,
'origin' ,
'strict-origin' ,
'origin-when-cross-origin' ,
'strict-origin-when-cross-origin' ,
'unsafe-url'
]
const requestRedirect = [ 'follow' , 'manual' , 'error' ]
const safeMethods = [ 'GET' , 'HEAD' , 'OPTIONS' , 'TRACE' ]
const requestMode = [ 'navigate' , 'same-origin' , 'no-cors' , 'cors' ]
const requestCredentials = [ 'omit' , 'same-origin' , 'include' ]
const requestCache = [
'default' ,
'no-store' ,
'reload' ,
'no-cache' ,
'force-cache' ,
'only-if-cached'
]
// https://fetch.spec.whatwg.org/#request-body-header-name
const requestBodyHeader = [
'content-encoding' ,
'content-language' ,
'content-location' ,
'content-type' ,
// See https://github.com/nodejs/undici/issues/2021
// 'Content-Length' is a forbidden header name, which is typically
// removed in the Headers implementation. However, undici doesn't
// filter out headers, so we add it here.
'content-length'
]
// https://fetch.spec.whatwg.org/#enumdef-requestduplex
const requestDuplex = [
'half'
]
// http://fetch.spec.whatwg.org/#forbidden-method
const forbiddenMethods = [ 'CONNECT' , 'TRACE' , 'TRACK' ]
const subresource = [
'audio' ,
'audioworklet' ,
'font' ,
'image' ,
'manifest' ,
'paintworklet' ,
'script' ,
'style' ,
'track' ,
'video' ,
'xslt' ,
''
]
/** @type {globalThis['DOMException']} */
const DOMException = globalThis . DOMException ? ? ( ( ) => {
// DOMException was only made a global in Node v17.0.0,
// but fetch supports >= v16.8.
try {
atob ( '~' )
} catch ( err ) {
return Object . getPrototypeOf ( err ) . constructor
}
} ) ( )
let channel
/** @type {globalThis['structuredClone']} */
const structuredClone =
globalThis . structuredClone ? ?
// https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js
// structuredClone was added in v17.0.0, but fetch supports v16.8
function structuredClone ( value , options = undefined ) {
if ( arguments . length === 0 ) {
throw new TypeError ( 'missing argument' )
}
if ( ! channel ) {
channel = new MessageChannel ( )
}
channel . port1 . unref ( )
channel . port2 . unref ( )
channel . port1 . postMessage ( value , options ? . transfer )
return receiveMessageOnPort ( channel . port2 ) . message
}
module . exports = {
DOMException ,
structuredClone ,
subresource ,
forbiddenMethods ,
requestBodyHeader ,
referrerPolicy ,
requestRedirect ,
requestMode ,
requestCredentials ,
requestCache ,
redirectStatus ,
corsSafeListedMethods ,
nullBodyStatus ,
safeMethods ,
badPorts ,
requestDuplex
}
/***/ } ) ,
/***/ 685 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
const assert = _ _nccwpck _require _ _ ( 9491 )
const { atob } = _ _nccwpck _require _ _ ( 4300 )
const { isomorphicDecode } = _ _nccwpck _require _ _ ( 2538 )
const encoder = new TextEncoder ( )
/ * *
* @ see https : //mimesniff.spec.whatwg.org/#http-token-code-point
* /
const HTTP _TOKEN _CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/
const HTTP _WHITESPACE _REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line
/ * *
* @ see https : //mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point
* /
const HTTP _QUOTED _STRING _TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line
// https://fetch.spec.whatwg.org/#data-url-processor
/** @param {URL} dataURL */
function dataURLProcessor ( dataURL ) {
// 1. Assert: dataURL’ s scheme is "data".
assert ( dataURL . protocol === 'data:' )
// 2. Let input be the result of running the URL
// serializer on dataURL with exclude fragment
// set to true.
let input = URLSerializer ( dataURL , true )
// 3. Remove the leading "data:" string from input.
input = input . slice ( 5 )
// 4. Let position point at the start of input.
const position = { position : 0 }
// 5. Let mimeType be the result of collecting a
// sequence of code points that are not equal
// to U+002C (,), given position.
let mimeType = collectASequenceOfCodePointsFast (
',' ,
input ,
position
)
// 6. Strip leading and trailing ASCII whitespace
// from mimeType.
// Undici implementation note: we need to store the
// length because if the mimetype has spaces removed,
// the wrong amount will be sliced from the input in
// step #9
const mimeTypeLength = mimeType . length
mimeType = removeASCIIWhitespace ( mimeType , true , true )
// 7. If position is past the end of input, then
// return failure
if ( position . position >= input . length ) {
return 'failure'
}
// 8. Advance position by 1.
position . position ++
// 9. Let encodedBody be the remainder of input.
const encodedBody = input . slice ( mimeTypeLength + 1 )
// 10. Let body be the percent-decoding of encodedBody.
let body = stringPercentDecode ( encodedBody )
// 11. If mimeType ends with U+003B (;), followed by
// zero or more U+0020 SPACE, followed by an ASCII
// case-insensitive match for "base64", then:
if ( /;(\u0020){0,}base64$/i . test ( mimeType ) ) {
// 1. Let stringBody be the isomorphic decode of body.
const stringBody = isomorphicDecode ( body )
// 2. Set body to the forgiving-base64 decode of
// stringBody.
body = forgivingBase64 ( stringBody )
// 3. If body is failure, then return failure.
if ( body === 'failure' ) {
return 'failure'
}
// 4. Remove the last 6 code points from mimeType.
mimeType = mimeType . slice ( 0 , - 6 )
// 5. Remove trailing U+0020 SPACE code points from mimeType,
// if any.
mimeType = mimeType . replace ( /(\u0020)+$/ , '' )
// 6. Remove the last U+003B (;) code point from mimeType.
mimeType = mimeType . slice ( 0 , - 1 )
}
// 12. If mimeType starts with U+003B (;), then prepend
// "text/plain" to mimeType.
if ( mimeType . startsWith ( ';' ) ) {
mimeType = 'text/plain' + mimeType
}
// 13. Let mimeTypeRecord be the result of parsing
// mimeType.
let mimeTypeRecord = parseMIMEType ( mimeType )
// 14. If mimeTypeRecord is failure, then set
// mimeTypeRecord to text/plain;charset=US-ASCII.
if ( mimeTypeRecord === 'failure' ) {
mimeTypeRecord = parseMIMEType ( 'text/plain;charset=US-ASCII' )
}
// 15. Return a new data: URL struct whose MIME
// type is mimeTypeRecord and body is body.
// https://fetch.spec.whatwg.org/#data-url-struct
return { mimeType : mimeTypeRecord , body }
}
// https://url.spec.whatwg.org/#concept-url-serializer
/ * *
* @ param { URL } url
* @ param { boolean } excludeFragment
* /
function URLSerializer ( url , excludeFragment = false ) {
const href = url . href
if ( ! excludeFragment ) {
return href
}
const hash = href . lastIndexOf ( '#' )
if ( hash === - 1 ) {
return href
}
return href . slice ( 0 , hash )
}
// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
/ * *
* @ param { ( char : string ) => boolean } condition
* @ param { string } input
* @ param { { position : number } } position
* /
function collectASequenceOfCodePoints ( condition , input , position ) {
// 1. Let result be the empty string.
let result = ''
// 2. While position doesn’ t point past the end of input and the
// code point at position within input meets the condition condition:
while ( position . position < input . length && condition ( input [ position . position ] ) ) {
// 1. Append that code point to the end of result.
result += input [ position . position ]
// 2. Advance position by 1.
position . position ++
}
// 3. Return result.
return result
}
/ * *
* A faster collectASequenceOfCodePoints that only works when comparing a single character .
* @ param { string } char
* @ param { string } input
* @ param { { position : number } } position
* /
function collectASequenceOfCodePointsFast ( char , input , position ) {
const idx = input . indexOf ( char , position . position )
const start = position . position
if ( idx === - 1 ) {
position . position = input . length
return input . slice ( start )
}
position . position = idx
return input . slice ( start , position . position )
}
// https://url.spec.whatwg.org/#string-percent-decode
/** @param {string} input */
function stringPercentDecode ( input ) {
// 1. Let bytes be the UTF-8 encoding of input.
const bytes = encoder . encode ( input )
// 2. Return the percent-decoding of bytes.
return percentDecode ( bytes )
}
// https://url.spec.whatwg.org/#percent-decode
/** @param {Uint8Array} input */
function percentDecode ( input ) {
// 1. Let output be an empty byte sequence.
/** @type {number[]} */
const output = [ ]
// 2. For each byte byte in input:
for ( let i = 0 ; i < input . length ; i ++ ) {
const byte = input [ i ]
// 1. If byte is not 0x25 (%), then append byte to output.
if ( byte !== 0x25 ) {
output . push ( byte )
// 2. Otherwise, if byte is 0x25 (%) and the next two bytes
// after byte in input are not in the ranges
// 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F),
// and 0x61 (a) to 0x66 (f), all inclusive, append byte
// to output.
} else if (
byte === 0x25 &&
! /^[0-9A-Fa-f]{2}$/i . test ( String . fromCharCode ( input [ i + 1 ] , input [ i + 2 ] ) )
) {
output . push ( 0x25 )
// 3. Otherwise:
} else {
// 1. Let bytePoint be the two bytes after byte in input,
// decoded, and then interpreted as hexadecimal number.
const nextTwoBytes = String . fromCharCode ( input [ i + 1 ] , input [ i + 2 ] )
const bytePoint = Number . parseInt ( nextTwoBytes , 16 )
// 2. Append a byte whose value is bytePoint to output.
output . push ( bytePoint )
// 3. Skip the next two bytes in input.
i += 2
}
}
// 3. Return output.
return Uint8Array . from ( output )
}
// https://mimesniff.spec.whatwg.org/#parse-a-mime-type
/** @param {string} input */
function parseMIMEType ( input ) {
// 1. Remove any leading and trailing HTTP whitespace
// from input.
input = removeHTTPWhitespace ( input , true , true )
// 2. Let position be a position variable for input,
// initially pointing at the start of input.
const position = { position : 0 }
// 3. Let type be the result of collecting a sequence
// of code points that are not U+002F (/) from
// input, given position.
const type = collectASequenceOfCodePointsFast (
'/' ,
input ,
position
)
// 4. If type is the empty string or does not solely
// contain HTTP token code points, then return failure.
// https://mimesniff.spec.whatwg.org/#http-token-code-point
if ( type . length === 0 || ! HTTP _TOKEN _CODEPOINTS . test ( type ) ) {
return 'failure'
}
// 5. If position is past the end of input, then return
// failure
if ( position . position > input . length ) {
return 'failure'
}
// 6. Advance position by 1. (This skips past U+002F (/).)
position . position ++
// 7. Let subtype be the result of collecting a sequence of
// code points that are not U+003B (;) from input, given
// position.
let subtype = collectASequenceOfCodePointsFast (
';' ,
input ,
position
)
// 8. Remove any trailing HTTP whitespace from subtype.
subtype = removeHTTPWhitespace ( subtype , false , true )
// 9. If subtype is the empty string or does not solely
// contain HTTP token code points, then return failure.
if ( subtype . length === 0 || ! HTTP _TOKEN _CODEPOINTS . test ( subtype ) ) {
return 'failure'
}
const typeLowercase = type . toLowerCase ( )
const subtypeLowercase = subtype . toLowerCase ( )
// 10. Let mimeType be a new MIME type record whose type
// is type, in ASCII lowercase, and subtype is subtype,
// in ASCII lowercase.
// https://mimesniff.spec.whatwg.org/#mime-type
const mimeType = {
type : typeLowercase ,
subtype : subtypeLowercase ,
/** @type {Map<string, string>} */
parameters : new Map ( ) ,
// https://mimesniff.spec.whatwg.org/#mime-type-essence
essence : ` ${ typeLowercase } / ${ subtypeLowercase } `
}
// 11. While position is not past the end of input:
while ( position . position < input . length ) {
// 1. Advance position by 1. (This skips past U+003B (;).)
position . position ++
// 2. Collect a sequence of code points that are HTTP
// whitespace from input given position.
collectASequenceOfCodePoints (
// https://fetch.spec.whatwg.org/#http-whitespace
char => HTTP _WHITESPACE _REGEX . test ( char ) ,
input ,
position
)
// 3. Let parameterName be the result of collecting a
// sequence of code points that are not U+003B (;)
// or U+003D (=) from input, given position.
let parameterName = collectASequenceOfCodePoints (
( char ) => char !== ';' && char !== '=' ,
input ,
position
)
// 4. Set parameterName to parameterName, in ASCII
// lowercase.
parameterName = parameterName . toLowerCase ( )
// 5. If position is not past the end of input, then:
if ( position . position < input . length ) {
// 1. If the code point at position within input is
// U+003B (;), then continue.
if ( input [ position . position ] === ';' ) {
continue
}
// 2. Advance position by 1. (This skips past U+003D (=).)
position . position ++
}
// 6. If position is past the end of input, then break.
if ( position . position > input . length ) {
break
}
// 7. Let parameterValue be null.
let parameterValue = null
// 8. If the code point at position within input is
// U+0022 ("), then:
if ( input [ position . position ] === '"' ) {
// 1. Set parameterValue to the result of collecting
// an HTTP quoted string from input, given position
// and the extract-value flag.
parameterValue = collectAnHTTPQuotedString ( input , position , true )
// 2. Collect a sequence of code points that are not
// U+003B (;) from input, given position.
collectASequenceOfCodePointsFast (
';' ,
input ,
position
)
// 9. Otherwise:
} else {
// 1. Set parameterValue to the result of collecting
// a sequence of code points that are not U+003B (;)
// from input, given position.
parameterValue = collectASequenceOfCodePointsFast (
';' ,
input ,
position
)
// 2. Remove any trailing HTTP whitespace from parameterValue.
parameterValue = removeHTTPWhitespace ( parameterValue , false , true )
// 3. If parameterValue is the empty string, then continue.
if ( parameterValue . length === 0 ) {
continue
}
}
// 10. If all of the following are true
// - parameterName is not the empty string
// - parameterName solely contains HTTP token code points
// - parameterValue solely contains HTTP quoted-string token code points
// - mimeType’ s parameters[parameterName] does not exist
// then set mimeType’ s parameters[parameterName] to parameterValue.
if (
parameterName . length !== 0 &&
HTTP _TOKEN _CODEPOINTS . test ( parameterName ) &&
( parameterValue . length === 0 || HTTP _QUOTED _STRING _TOKENS . test ( parameterValue ) ) &&
! mimeType . parameters . has ( parameterName )
) {
mimeType . parameters . set ( parameterName , parameterValue )
}
}
// 12. Return mimeType.
return mimeType
}
// https://infra.spec.whatwg.org/#forgiving-base64-decode
/** @param {string} data */
function forgivingBase64 ( data ) {
// 1. Remove all ASCII whitespace from data.
data = data . replace ( /[\u0009\u000A\u000C\u000D\u0020]/g , '' ) // eslint-disable-line
// 2. If data’ s code point length divides by 4 leaving
// no remainder, then:
if ( data . length % 4 === 0 ) {
// 1. If data ends with one or two U+003D (=) code points,
// then remove them from data.
data = data . replace ( /=?=$/ , '' )
}
// 3. If data’ s code point length divides by 4 leaving
// a remainder of 1, then return failure.
if ( data . length % 4 === 1 ) {
return 'failure'
}
// 4. If data contains a code point that is not one of
// U+002B (+)
// U+002F (/)
// ASCII alphanumeric
// then return failure.
if ( /[^+/0-9A-Za-z]/ . test ( data ) ) {
return 'failure'
}
const binary = atob ( data )
const bytes = new Uint8Array ( binary . length )
for ( let byte = 0 ; byte < binary . length ; byte ++ ) {
bytes [ byte ] = binary . charCodeAt ( byte )
}
return bytes
}
// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string
// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string
/ * *
* @ param { string } input
* @ param { { position : number } } position
* @ param { boolean ? } extractValue
* /
function collectAnHTTPQuotedString ( input , position , extractValue ) {
// 1. Let positionStart be position.
const positionStart = position . position
// 2. Let value be the empty string.
let value = ''
// 3. Assert: the code point at position within input
// is U+0022 (").
assert ( input [ position . position ] === '"' )
// 4. Advance position by 1.
position . position ++
// 5. While true:
while ( true ) {
// 1. Append the result of collecting a sequence of code points
// that are not U+0022 (") or U+005C (\) from input, given
// position, to value.
value += collectASequenceOfCodePoints (
( char ) => char !== '"' && char !== '\\' ,
input ,
position
)
// 2. If position is past the end of input, then break.
if ( position . position >= input . length ) {
break
}
// 3. Let quoteOrBackslash be the code point at position within
// input.
const quoteOrBackslash = input [ position . position ]
// 4. Advance position by 1.
position . position ++
// 5. If quoteOrBackslash is U+005C (\), then:
if ( quoteOrBackslash === '\\' ) {
// 1. If position is past the end of input, then append
// U+005C (\) to value and break.
if ( position . position >= input . length ) {
value += '\\'
break
}
// 2. Append the code point at position within input to value.
value += input [ position . position ]
// 3. Advance position by 1.
position . position ++
// 6. Otherwise:
} else {
// 1. Assert: quoteOrBackslash is U+0022 (").
assert ( quoteOrBackslash === '"' )
// 2. Break.
break
}
}
// 6. If the extract-value flag is set, then return value.
if ( extractValue ) {
return value
}
// 7. Return the code points from positionStart to position,
// inclusive, within input.
return input . slice ( positionStart , position . position )
}
/ * *
* @ see https : //mimesniff.spec.whatwg.org/#serialize-a-mime-type
* /
function serializeAMimeType ( mimeType ) {
assert ( mimeType !== 'failure' )
const { parameters , essence } = mimeType
// 1. Let serialization be the concatenation of mimeType’ s
// type, U+002F (/), and mimeType’ s subtype.
let serialization = essence
// 2. For each name → value of mimeType’ s parameters:
for ( let [ name , value ] of parameters . entries ( ) ) {
// 1. Append U+003B (;) to serialization.
serialization += ';'
// 2. Append name to serialization.
serialization += name
// 3. Append U+003D (=) to serialization.
serialization += '='
// 4. If value does not solely contain HTTP token code
// points or value is the empty string, then:
if ( ! HTTP _TOKEN _CODEPOINTS . test ( value ) ) {
// 1. Precede each occurence of U+0022 (") or
// U+005C (\) in value with U+005C (\).
value = value . replace ( /(\\|")/g , '\\$1' )
// 2. Prepend U+0022 (") to value.
value = '"' + value
// 3. Append U+0022 (") to value.
value += '"'
}
// 5. Append value to serialization.
serialization += value
}
// 3. Return serialization.
return serialization
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#http-whitespace
* @ param { string } char
* /
function isHTTPWhiteSpace ( char ) {
return char === '\r' || char === '\n' || char === '\t' || char === ' '
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#http-whitespace
* @ param { string } str
* /
function removeHTTPWhitespace ( str , leading = true , trailing = true ) {
let lead = 0
let trail = str . length - 1
if ( leading ) {
for ( ; lead < str . length && isHTTPWhiteSpace ( str [ lead ] ) ; lead ++ ) ;
}
if ( trailing ) {
for ( ; trail > 0 && isHTTPWhiteSpace ( str [ trail ] ) ; trail -- ) ;
}
return str . slice ( lead , trail + 1 )
}
/ * *
* @ see https : //infra.spec.whatwg.org/#ascii-whitespace
* @ param { string } char
* /
function isASCIIWhitespace ( char ) {
return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' '
}
/ * *
* @ see https : //infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace
* /
function removeASCIIWhitespace ( str , leading = true , trailing = true ) {
let lead = 0
let trail = str . length - 1
if ( leading ) {
for ( ; lead < str . length && isASCIIWhitespace ( str [ lead ] ) ; lead ++ ) ;
}
if ( trailing ) {
for ( ; trail > 0 && isASCIIWhitespace ( str [ trail ] ) ; trail -- ) ;
}
return str . slice ( lead , trail + 1 )
}
module . exports = {
dataURLProcessor ,
URLSerializer ,
collectASequenceOfCodePoints ,
collectASequenceOfCodePointsFast ,
stringPercentDecode ,
parseMIMEType ,
collectAnHTTPQuotedString ,
serializeAMimeType
}
/***/ } ) ,
/***/ 8511 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { Blob , File : NativeFile } = _ _nccwpck _require _ _ ( 4300 )
const { types } = _ _nccwpck _require _ _ ( 3837 )
const { kState } = _ _nccwpck _require _ _ ( 5861 )
const { isBlobLike } = _ _nccwpck _require _ _ ( 2538 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { parseMIMEType , serializeAMimeType } = _ _nccwpck _require _ _ ( 685 )
const { kEnumerableProperty } = _ _nccwpck _require _ _ ( 3983 )
class File extends Blob {
constructor ( fileBits , fileName , options = { } ) {
// The File constructor is invoked with two or three parameters, depending
// on whether the optional dictionary parameter is used. When the File()
// constructor is invoked, user agents must run the following steps:
webidl . argumentLengthCheck ( arguments , 2 , { header : 'File constructor' } )
fileBits = webidl . converters [ 'sequence<BlobPart>' ] ( fileBits )
fileName = webidl . converters . USVString ( fileName )
options = webidl . converters . FilePropertyBag ( options )
// 1. Let bytes be the result of processing blob parts given fileBits and
// options.
// Note: Blob handles this for us
// 2. Let n be the fileName argument to the constructor.
const n = fileName
// 3. Process FilePropertyBag dictionary argument by running the following
// substeps:
// 1. If the type member is provided and is not the empty string, let t
// be set to the type dictionary member. If t contains any characters
// outside the range U+0020 to U+007E, then set t to the empty string
// and return from these substeps.
// 2. Convert every character in t to ASCII lowercase.
let t = options . type
let d
// eslint-disable-next-line no-labels
substep : {
if ( t ) {
t = parseMIMEType ( t )
if ( t === 'failure' ) {
t = ''
// eslint-disable-next-line no-labels
break substep
}
t = serializeAMimeType ( t ) . toLowerCase ( )
}
// 3. If the lastModified member is provided, let d be set to the
// lastModified dictionary member. If it is not provided, set d to the
// current date and time represented as the number of milliseconds since
// the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
d = options . lastModified
}
// 4. Return a new File object F such that:
// F refers to the bytes byte sequence.
// F.size is set to the number of total bytes in bytes.
// F.name is set to n.
// F.type is set to t.
// F.lastModified is set to d.
super ( processBlobParts ( fileBits , options ) , { type : t } )
this [ kState ] = {
name : n ,
lastModified : d ,
type : t
}
}
get name ( ) {
webidl . brandCheck ( this , File )
return this [ kState ] . name
}
get lastModified ( ) {
webidl . brandCheck ( this , File )
return this [ kState ] . lastModified
}
get type ( ) {
webidl . brandCheck ( this , File )
return this [ kState ] . type
}
}
class FileLike {
constructor ( blobLike , fileName , options = { } ) {
// TODO: argument idl type check
// The File constructor is invoked with two or three parameters, depending
// on whether the optional dictionary parameter is used. When the File()
// constructor is invoked, user agents must run the following steps:
// 1. Let bytes be the result of processing blob parts given fileBits and
// options.
// 2. Let n be the fileName argument to the constructor.
const n = fileName
// 3. Process FilePropertyBag dictionary argument by running the following
// substeps:
// 1. If the type member is provided and is not the empty string, let t
// be set to the type dictionary member. If t contains any characters
// outside the range U+0020 to U+007E, then set t to the empty string
// and return from these substeps.
// TODO
const t = options . type
// 2. Convert every character in t to ASCII lowercase.
// TODO
// 3. If the lastModified member is provided, let d be set to the
// lastModified dictionary member. If it is not provided, set d to the
// current date and time represented as the number of milliseconds since
// the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
const d = options . lastModified ? ? Date . now ( )
// 4. Return a new File object F such that:
// F refers to the bytes byte sequence.
// F.size is set to the number of total bytes in bytes.
// F.name is set to n.
// F.type is set to t.
// F.lastModified is set to d.
this [ kState ] = {
blobLike ,
name : n ,
type : t ,
lastModified : d
}
}
stream ( ... args ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . stream ( ... args )
}
arrayBuffer ( ... args ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . arrayBuffer ( ... args )
}
slice ( ... args ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . slice ( ... args )
}
text ( ... args ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . text ( ... args )
}
get size ( ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . size
}
get type ( ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . type
}
get name ( ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . name
}
get lastModified ( ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . lastModified
}
get [ Symbol . toStringTag ] ( ) {
return 'File'
}
}
Object . defineProperties ( File . prototype , {
[ Symbol . toStringTag ] : {
value : 'File' ,
configurable : true
} ,
name : kEnumerableProperty ,
lastModified : kEnumerableProperty
} )
webidl . converters . Blob = webidl . interfaceConverter ( Blob )
webidl . converters . BlobPart = function ( V , opts ) {
if ( webidl . util . Type ( V ) === 'Object' ) {
if ( isBlobLike ( V ) ) {
return webidl . converters . Blob ( V , { strict : false } )
}
if (
ArrayBuffer . isView ( V ) ||
types . isAnyArrayBuffer ( V )
) {
return webidl . converters . BufferSource ( V , opts )
}
}
return webidl . converters . USVString ( V , opts )
}
webidl . converters [ 'sequence<BlobPart>' ] = webidl . sequenceConverter (
webidl . converters . BlobPart
)
// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag
webidl . converters . FilePropertyBag = webidl . dictionaryConverter ( [
{
key : 'lastModified' ,
converter : webidl . converters [ 'long long' ] ,
get defaultValue ( ) {
return Date . now ( )
}
} ,
{
key : 'type' ,
converter : webidl . converters . DOMString ,
defaultValue : ''
} ,
{
key : 'endings' ,
converter : ( value ) => {
value = webidl . converters . DOMString ( value )
value = value . toLowerCase ( )
if ( value !== 'native' ) {
value = 'transparent'
}
return value
} ,
defaultValue : 'transparent'
}
] )
/ * *
* @ see https : //www.w3.org/TR/FileAPI/#process-blob-parts
* @ param { ( NodeJS . TypedArray | Blob | string ) [ ] } parts
* @ param { { type : string , endings : string } } options
* /
function processBlobParts ( parts , options ) {
// 1. Let bytes be an empty sequence of bytes.
/** @type {NodeJS.TypedArray[]} */
const bytes = [ ]
// 2. For each element in parts:
for ( const element of parts ) {
// 1. If element is a USVString, run the following substeps:
if ( typeof element === 'string' ) {
// 1. Let s be element.
let s = element
// 2. If the endings member of options is "native", set s
// to the result of converting line endings to native
// of element.
if ( options . endings === 'native' ) {
s = convertLineEndingsNative ( s )
}
// 3. Append the result of UTF-8 encoding s to bytes.
bytes . push ( new TextEncoder ( ) . encode ( s ) )
} else if (
types . isAnyArrayBuffer ( element ) ||
types . isTypedArray ( element )
) {
// 2. If element is a BufferSource, get a copy of the
// bytes held by the buffer source, and append those
// bytes to bytes.
if ( ! element . buffer ) { // ArrayBuffer
bytes . push ( new Uint8Array ( element ) )
} else {
bytes . push (
new Uint8Array ( element . buffer , element . byteOffset , element . byteLength )
)
}
} else if ( isBlobLike ( element ) ) {
// 3. If element is a Blob, append the bytes it represents
// to bytes.
bytes . push ( element )
}
}
// 3. Return bytes.
return bytes
}
/ * *
* @ see https : //www.w3.org/TR/FileAPI/#convert-line-endings-to-native
* @ param { string } s
* /
function convertLineEndingsNative ( s ) {
// 1. Let native line ending be be the code point U+000A LF.
let nativeLineEnding = '\n'
// 2. If the underlying platform’ s conventions are to
// represent newlines as a carriage return and line feed
// sequence, set native line ending to the code point
// U+000D CR followed by the code point U+000A LF.
if ( process . platform === 'win32' ) {
nativeLineEnding = '\r\n'
}
return s . replace ( /\r?\n/g , nativeLineEnding )
}
// If this function is moved to ./util.js, some tools (such as
// rollup) will warn about circular dependencies. See:
// https://github.com/nodejs/undici/issues/1629
function isFileLike ( object ) {
return (
( NativeFile && object instanceof NativeFile ) ||
object instanceof File || (
object &&
( typeof object . stream === 'function' ||
typeof object . arrayBuffer === 'function' ) &&
object [ Symbol . toStringTag ] === 'File'
)
)
}
module . exports = { File , FileLike , isFileLike }
/***/ } ) ,
/***/ 2015 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { isBlobLike , toUSVString , makeIterator } = _ _nccwpck _require _ _ ( 2538 )
const { kState } = _ _nccwpck _require _ _ ( 5861 )
const { File : UndiciFile , FileLike , isFileLike } = _ _nccwpck _require _ _ ( 8511 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { Blob , File : NativeFile } = _ _nccwpck _require _ _ ( 4300 )
/** @type {globalThis['File']} */
const File = NativeFile ? ? UndiciFile
// https://xhr.spec.whatwg.org/#formdata
class FormData {
constructor ( form ) {
if ( form !== undefined ) {
throw webidl . errors . conversionFailed ( {
prefix : 'FormData constructor' ,
argument : 'Argument 1' ,
types : [ 'undefined' ]
} )
}
this [ kState ] = [ ]
}
append ( name , value , filename = undefined ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 2 , { header : 'FormData.append' } )
if ( arguments . length === 3 && ! isBlobLike ( value ) ) {
throw new TypeError (
"Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'"
)
}
// 1. Let value be value if given; otherwise blobValue.
name = webidl . converters . USVString ( name )
value = isBlobLike ( value )
? webidl . converters . Blob ( value , { strict : false } )
: webidl . converters . USVString ( value )
filename = arguments . length === 3
? webidl . converters . USVString ( filename )
: undefined
// 2. Let entry be the result of creating an entry with
// name, value, and filename if given.
const entry = makeEntry ( name , value , filename )
// 3. Append entry to this’ s entry list.
this [ kState ] . push ( entry )
}
delete ( name ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FormData.delete' } )
name = webidl . converters . USVString ( name )
// The delete(name) method steps are to remove all entries whose name
// is name from this’ s entry list.
this [ kState ] = this [ kState ] . filter ( entry => entry . name !== name )
}
get ( name ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FormData.get' } )
name = webidl . converters . USVString ( name )
// 1. If there is no entry whose name is name in this’ s entry list,
// then return null.
const idx = this [ kState ] . findIndex ( ( entry ) => entry . name === name )
if ( idx === - 1 ) {
return null
}
// 2. Return the value of the first entry whose name is name from
// this’ s entry list.
return this [ kState ] [ idx ] . value
}
getAll ( name ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FormData.getAll' } )
name = webidl . converters . USVString ( name )
// 1. If there is no entry whose name is name in this’ s entry list,
// then return the empty list.
// 2. Return the values of all entries whose name is name, in order,
// from this’ s entry list.
return this [ kState ]
. filter ( ( entry ) => entry . name === name )
. map ( ( entry ) => entry . value )
}
has ( name ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FormData.has' } )
name = webidl . converters . USVString ( name )
// The has(name) method steps are to return true if there is an entry
// whose name is name in this’ s entry list; otherwise false.
return this [ kState ] . findIndex ( ( entry ) => entry . name === name ) !== - 1
}
set ( name , value , filename = undefined ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 2 , { header : 'FormData.set' } )
if ( arguments . length === 3 && ! isBlobLike ( value ) ) {
throw new TypeError (
"Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'"
)
}
// The set(name, value) and set(name, blobValue, filename) method steps
// are:
// 1. Let value be value if given; otherwise blobValue.
name = webidl . converters . USVString ( name )
value = isBlobLike ( value )
? webidl . converters . Blob ( value , { strict : false } )
: webidl . converters . USVString ( value )
filename = arguments . length === 3
? toUSVString ( filename )
: undefined
// 2. Let entry be the result of creating an entry with name, value, and
// filename if given.
const entry = makeEntry ( name , value , filename )
// 3. If there are entries in this’ s entry list whose name is name, then
// replace the first such entry with entry and remove the others.
const idx = this [ kState ] . findIndex ( ( entry ) => entry . name === name )
if ( idx !== - 1 ) {
this [ kState ] = [
... this [ kState ] . slice ( 0 , idx ) ,
entry ,
... this [ kState ] . slice ( idx + 1 ) . filter ( ( entry ) => entry . name !== name )
]
} else {
// 4. Otherwise, append entry to this’ s entry list.
this [ kState ] . push ( entry )
}
}
entries ( ) {
webidl . brandCheck ( this , FormData )
return makeIterator (
( ) => this [ kState ] . map ( pair => [ pair . name , pair . value ] ) ,
'FormData' ,
'key+value'
)
}
keys ( ) {
webidl . brandCheck ( this , FormData )
return makeIterator (
( ) => this [ kState ] . map ( pair => [ pair . name , pair . value ] ) ,
'FormData' ,
'key'
)
}
values ( ) {
webidl . brandCheck ( this , FormData )
return makeIterator (
( ) => this [ kState ] . map ( pair => [ pair . name , pair . value ] ) ,
'FormData' ,
'value'
)
}
/ * *
* @ param { ( value : string , key : string , self : FormData ) => void } callbackFn
* @ param { unknown } thisArg
* /
forEach ( callbackFn , thisArg = globalThis ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FormData.forEach' } )
if ( typeof callbackFn !== 'function' ) {
throw new TypeError (
"Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'."
)
}
for ( const [ key , value ] of this ) {
callbackFn . apply ( thisArg , [ value , key , this ] )
}
}
}
FormData . prototype [ Symbol . iterator ] = FormData . prototype . entries
Object . defineProperties ( FormData . prototype , {
[ Symbol . toStringTag ] : {
value : 'FormData' ,
configurable : true
}
} )
/ * *
* @ see https : //html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry
* @ param { string } name
* @ param { string | Blob } value
* @ param { ? string } filename
* @ returns
* /
function makeEntry ( name , value , filename ) {
// 1. Set name to the result of converting name into a scalar value string.
// "To convert a string into a scalar value string, replace any surrogates
// with U+FFFD."
// see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end
name = Buffer . from ( name ) . toString ( 'utf8' )
// 2. If value is a string, then set value to the result of converting
// value into a scalar value string.
if ( typeof value === 'string' ) {
value = Buffer . from ( value ) . toString ( 'utf8' )
} else {
// 3. Otherwise:
// 1. If value is not a File object, then set value to a new File object,
// representing the same bytes, whose name attribute value is "blob"
if ( ! isFileLike ( value ) ) {
value = value instanceof Blob
? new File ( [ value ] , 'blob' , { type : value . type } )
: new FileLike ( value , 'blob' , { type : value . type } )
}
// 2. If filename is given, then set value to a new File object,
// representing the same bytes, whose name attribute is filename.
if ( filename !== undefined ) {
/** @type {FilePropertyBag} */
const options = {
type : value . type ,
lastModified : value . lastModified
}
value = ( NativeFile && value instanceof NativeFile ) || value instanceof UndiciFile
? new File ( [ value ] , filename , options )
: new FileLike ( value , filename , options )
}
}
// 4. Return an entry whose name is name and whose value is value.
return { name , value }
}
module . exports = { FormData }
/***/ } ) ,
/***/ 1246 :
/***/ ( ( module ) => {
"use strict" ;
// In case of breaking changes, increase the version
// number to avoid conflicts.
const globalOrigin = Symbol . for ( 'undici.globalOrigin.1' )
function getGlobalOrigin ( ) {
return globalThis [ globalOrigin ]
}
function setGlobalOrigin ( newOrigin ) {
if (
newOrigin !== undefined &&
typeof newOrigin !== 'string' &&
! ( newOrigin instanceof URL )
) {
throw new Error ( 'Invalid base url' )
}
if ( newOrigin === undefined ) {
Object . defineProperty ( globalThis , globalOrigin , {
value : undefined ,
writable : true ,
enumerable : false ,
configurable : false
} )
return
}
const parsedURL = new URL ( newOrigin )
if ( parsedURL . protocol !== 'http:' && parsedURL . protocol !== 'https:' ) {
throw new TypeError ( ` Only http & https urls are allowed, received ${ parsedURL . protocol } ` )
}
Object . defineProperty ( globalThis , globalOrigin , {
value : parsedURL ,
writable : true ,
enumerable : false ,
configurable : false
} )
}
module . exports = {
getGlobalOrigin ,
setGlobalOrigin
}
/***/ } ) ,
/***/ 554 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
// https://github.com/Ethan-Arrowood/undici-fetch
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
const { kGuard } = _ _nccwpck _require _ _ ( 5861 )
const { kEnumerableProperty } = _ _nccwpck _require _ _ ( 3983 )
const {
makeIterator ,
isValidHeaderName ,
isValidHeaderValue
} = _ _nccwpck _require _ _ ( 2538 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const kHeadersMap = Symbol ( 'headers map' )
const kHeadersSortedMap = Symbol ( 'headers map sorted' )
/ * *
* @ see https : //fetch.spec.whatwg.org/#concept-header-value-normalize
* @ param { string } potentialValue
* /
function headerValueNormalize ( potentialValue ) {
// To normalize a byte sequence potentialValue, remove
// any leading and trailing HTTP whitespace bytes from
// potentialValue.
// Trimming the end with `.replace()` and a RegExp is typically subject to
// ReDoS. This is safer and faster.
let i = potentialValue . length
while ( /[\r\n\t ]/ . test ( potentialValue . charAt ( -- i ) ) ) ;
return potentialValue . slice ( 0 , i + 1 ) . replace ( /^[\r\n\t ]+/ , '' )
}
function fill ( headers , object ) {
// To fill a Headers object headers with a given object object, run these steps:
// 1. If object is a sequence, then for each header in object:
// Note: webidl conversion to array has already been done.
if ( Array . isArray ( object ) ) {
for ( const header of object ) {
// 1. If header does not contain exactly two items, then throw a TypeError.
if ( header . length !== 2 ) {
throw webidl . errors . exception ( {
header : 'Headers constructor' ,
message : ` expected name/value pair to be length 2, found ${ header . length } . `
} )
}
// 2. Append (header’ s first item, header’ s second item) to headers.
headers . append ( header [ 0 ] , header [ 1 ] )
}
} else if ( typeof object === 'object' && object !== null ) {
// Note: null should throw
// 2. Otherwise, object is a record, then for each key → value in object,
// append (key, value) to headers
for ( const [ key , value ] of Object . entries ( object ) ) {
headers . append ( key , value )
}
} else {
throw webidl . errors . conversionFailed ( {
prefix : 'Headers constructor' ,
argument : 'Argument 1' ,
types : [ 'sequence<sequence<ByteString>>' , 'record<ByteString, ByteString>' ]
} )
}
}
class HeadersList {
/** @type {[string, string][]|null} */
cookies = null
constructor ( init ) {
if ( init instanceof HeadersList ) {
this [ kHeadersMap ] = new Map ( init [ kHeadersMap ] )
this [ kHeadersSortedMap ] = init [ kHeadersSortedMap ]
this . cookies = init . cookies
} else {
this [ kHeadersMap ] = new Map ( init )
this [ kHeadersSortedMap ] = null
}
}
// https://fetch.spec.whatwg.org/#header-list-contains
contains ( name ) {
// A header list list contains a header name name if list
// contains a header whose name is a byte-case-insensitive
// match for name.
name = name . toLowerCase ( )
return this [ kHeadersMap ] . has ( name )
}
clear ( ) {
this [ kHeadersMap ] . clear ( )
this [ kHeadersSortedMap ] = null
this . cookies = null
}
// https://fetch.spec.whatwg.org/#concept-header-list-append
append ( name , value ) {
this [ kHeadersSortedMap ] = null
// 1. If list contains name, then set name to the first such
// header’ s name.
const lowercaseName = name . toLowerCase ( )
const exists = this [ kHeadersMap ] . get ( lowercaseName )
// 2. Append (name, value) to list.
if ( exists ) {
const delimiter = lowercaseName === 'cookie' ? '; ' : ', '
this [ kHeadersMap ] . set ( lowercaseName , {
name : exists . name ,
value : ` ${ exists . value } ${ delimiter } ${ value } `
} )
} else {
this [ kHeadersMap ] . set ( lowercaseName , { name , value } )
}
if ( lowercaseName === 'set-cookie' ) {
this . cookies ? ? = [ ]
this . cookies . push ( value )
}
}
// https://fetch.spec.whatwg.org/#concept-header-list-set
set ( name , value ) {
this [ kHeadersSortedMap ] = null
const lowercaseName = name . toLowerCase ( )
if ( lowercaseName === 'set-cookie' ) {
this . cookies = [ value ]
}
// 1. If list contains name, then set the value of
// the first such header to value and remove the
// others.
// 2. Otherwise, append header (name, value) to list.
return this [ kHeadersMap ] . set ( lowercaseName , { name , value } )
}
// https://fetch.spec.whatwg.org/#concept-header-list-delete
delete ( name ) {
this [ kHeadersSortedMap ] = null
name = name . toLowerCase ( )
if ( name === 'set-cookie' ) {
this . cookies = null
}
return this [ kHeadersMap ] . delete ( name )
}
// https://fetch.spec.whatwg.org/#concept-header-list-get
get ( name ) {
// 1. If list does not contain name, then return null.
if ( ! this . contains ( name ) ) {
return null
}
// 2. Return the values of all headers in list whose name
// is a byte-case-insensitive match for name,
// separated from each other by 0x2C 0x20, in order.
return this [ kHeadersMap ] . get ( name . toLowerCase ( ) ) ? . value ? ? null
}
* [ Symbol . iterator ] ( ) {
// use the lowercased name
for ( const [ name , { value } ] of this [ kHeadersMap ] ) {
yield [ name , value ]
}
}
get entries ( ) {
const headers = { }
if ( this [ kHeadersMap ] . size ) {
for ( const { name , value } of this [ kHeadersMap ] . values ( ) ) {
headers [ name ] = value
}
}
return headers
}
}
// https://fetch.spec.whatwg.org/#headers-class
class Headers {
constructor ( init = undefined ) {
this [ kHeadersList ] = new HeadersList ( )
// The new Headers(init) constructor steps are:
// 1. Set this’ s guard to "none".
this [ kGuard ] = 'none'
// 2. If init is given, then fill this with init.
if ( init !== undefined ) {
init = webidl . converters . HeadersInit ( init )
fill ( this , init )
}
}
// https://fetch.spec.whatwg.org/#dom-headers-append
append ( name , value ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 2 , { header : 'Headers.append' } )
name = webidl . converters . ByteString ( name )
value = webidl . converters . ByteString ( value )
// 1. Normalize value.
value = headerValueNormalize ( value )
// 2. If name is not a header name or value is not a
// header value, then throw a TypeError.
if ( ! isValidHeaderName ( name ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.append' ,
value : name ,
type : 'header name'
} )
} else if ( ! isValidHeaderValue ( value ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.append' ,
value ,
type : 'header value'
} )
}
// 3. If headers’ s guard is "immutable", then throw a TypeError.
// 4. Otherwise, if headers’ s guard is "request" and name is a
// forbidden header name, return.
// Note: undici does not implement forbidden header names
if ( this [ kGuard ] === 'immutable' ) {
throw new TypeError ( 'immutable' )
} else if ( this [ kGuard ] === 'request-no-cors' ) {
// 5. Otherwise, if headers’ s guard is "request-no-cors":
// TODO
}
// 6. Otherwise, if headers’ s guard is "response" and name is a
// forbidden response-header name, return.
// 7. Append (name, value) to headers’ s header list.
// 8. If headers’ s guard is "request-no-cors", then remove
// privileged no-CORS request headers from headers
return this [ kHeadersList ] . append ( name , value )
}
// https://fetch.spec.whatwg.org/#dom-headers-delete
delete ( name ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Headers.delete' } )
name = webidl . converters . ByteString ( name )
// 1. If name is not a header name, then throw a TypeError.
if ( ! isValidHeaderName ( name ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.delete' ,
value : name ,
type : 'header name'
} )
}
// 2. If this’ s guard is "immutable", then throw a TypeError.
// 3. Otherwise, if this’ s guard is "request" and name is a
// forbidden header name, return.
// 4. Otherwise, if this’ s guard is "request-no-cors", name
// is not a no-CORS-safelisted request-header name, and
// name is not a privileged no-CORS request-header name,
// return.
// 5. Otherwise, if this’ s guard is "response" and name is
// a forbidden response-header name, return.
// Note: undici does not implement forbidden header names
if ( this [ kGuard ] === 'immutable' ) {
throw new TypeError ( 'immutable' )
} else if ( this [ kGuard ] === 'request-no-cors' ) {
// TODO
}
// 6. If this’ s header list does not contain name, then
// return.
if ( ! this [ kHeadersList ] . contains ( name ) ) {
return
}
// 7. Delete name from this’ s header list.
// 8. If this’ s guard is "request-no-cors", then remove
// privileged no-CORS request headers from this.
return this [ kHeadersList ] . delete ( name )
}
// https://fetch.spec.whatwg.org/#dom-headers-get
get ( name ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Headers.get' } )
name = webidl . converters . ByteString ( name )
// 1. If name is not a header name, then throw a TypeError.
if ( ! isValidHeaderName ( name ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.get' ,
value : name ,
type : 'header name'
} )
}
// 2. Return the result of getting name from this’ s header
// list.
return this [ kHeadersList ] . get ( name )
}
// https://fetch.spec.whatwg.org/#dom-headers-has
has ( name ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Headers.has' } )
name = webidl . converters . ByteString ( name )
// 1. If name is not a header name, then throw a TypeError.
if ( ! isValidHeaderName ( name ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.has' ,
value : name ,
type : 'header name'
} )
}
// 2. Return true if this’ s header list contains name;
// otherwise false.
return this [ kHeadersList ] . contains ( name )
}
// https://fetch.spec.whatwg.org/#dom-headers-set
set ( name , value ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 2 , { header : 'Headers.set' } )
name = webidl . converters . ByteString ( name )
value = webidl . converters . ByteString ( value )
// 1. Normalize value.
value = headerValueNormalize ( value )
// 2. If name is not a header name or value is not a
// header value, then throw a TypeError.
if ( ! isValidHeaderName ( name ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.set' ,
value : name ,
type : 'header name'
} )
} else if ( ! isValidHeaderValue ( value ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.set' ,
value ,
type : 'header value'
} )
}
// 3. If this’ s guard is "immutable", then throw a TypeError.
// 4. Otherwise, if this’ s guard is "request" and name is a
// forbidden header name, return.
// 5. Otherwise, if this’ s guard is "request-no-cors" and
// name/value is not a no-CORS-safelisted request-header,
// return.
// 6. Otherwise, if this’ s guard is "response" and name is a
// forbidden response-header name, return.
// Note: undici does not implement forbidden header names
if ( this [ kGuard ] === 'immutable' ) {
throw new TypeError ( 'immutable' )
} else if ( this [ kGuard ] === 'request-no-cors' ) {
// TODO
}
// 7. Set (name, value) in this’ s header list.
// 8. If this’ s guard is "request-no-cors", then remove
// privileged no-CORS request headers from this
return this [ kHeadersList ] . set ( name , value )
}
// https://fetch.spec.whatwg.org/#dom-headers-getsetcookie
getSetCookie ( ) {
webidl . brandCheck ( this , Headers )
// 1. If this’ s header list does not contain `Set-Cookie`, then return « ».
// 2. Return the values of all headers in this’ s header list whose name is
// a byte-case-insensitive match for `Set-Cookie`, in order.
const list = this [ kHeadersList ] . cookies
if ( list ) {
return [ ... list ]
}
return [ ]
}
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
get [ kHeadersSortedMap ] ( ) {
if ( this [ kHeadersList ] [ kHeadersSortedMap ] ) {
return this [ kHeadersList ] [ kHeadersSortedMap ]
}
// 1. Let headers be an empty list of headers with the key being the name
// and value the value.
const headers = [ ]
// 2. Let names be the result of convert header names to a sorted-lowercase
// set with all the names of the headers in list.
const names = [ ... this [ kHeadersList ] ] . sort ( ( a , b ) => a [ 0 ] < b [ 0 ] ? - 1 : 1 )
const cookies = this [ kHeadersList ] . cookies
// 3. For each name of names:
for ( const [ name , value ] of names ) {
// 1. If name is `set-cookie`, then:
if ( name === 'set-cookie' ) {
// 1. Let values be a list of all values of headers in list whose name
// is a byte-case-insensitive match for name, in order.
// 2. For each value of values:
// 1. Append (name, value) to headers.
for ( const value of cookies ) {
headers . push ( [ name , value ] )
}
} else {
// 2. Otherwise:
// 1. Let value be the result of getting name from list.
// 2. Assert: value is non-null.
assert ( value !== null )
// 3. Append (name, value) to headers.
headers . push ( [ name , value ] )
}
}
this [ kHeadersList ] [ kHeadersSortedMap ] = headers
// 4. Return headers.
return headers
}
keys ( ) {
webidl . brandCheck ( this , Headers )
return makeIterator (
( ) => [ ... this [ kHeadersSortedMap ] . values ( ) ] ,
'Headers' ,
'key'
)
}
values ( ) {
webidl . brandCheck ( this , Headers )
return makeIterator (
( ) => [ ... this [ kHeadersSortedMap ] . values ( ) ] ,
'Headers' ,
'value'
)
}
entries ( ) {
webidl . brandCheck ( this , Headers )
return makeIterator (
( ) => [ ... this [ kHeadersSortedMap ] . values ( ) ] ,
'Headers' ,
'key+value'
)
}
/ * *
* @ param { ( value : string , key : string , self : Headers ) => void } callbackFn
* @ param { unknown } thisArg
* /
forEach ( callbackFn , thisArg = globalThis ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Headers.forEach' } )
if ( typeof callbackFn !== 'function' ) {
throw new TypeError (
"Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'."
)
}
for ( const [ key , value ] of this ) {
callbackFn . apply ( thisArg , [ value , key , this ] )
}
}
[ Symbol . for ( 'nodejs.util.inspect.custom' ) ] ( ) {
webidl . brandCheck ( this , Headers )
return this [ kHeadersList ]
}
}
Headers . prototype [ Symbol . iterator ] = Headers . prototype . entries
Object . defineProperties ( Headers . prototype , {
append : kEnumerableProperty ,
delete : kEnumerableProperty ,
get : kEnumerableProperty ,
has : kEnumerableProperty ,
set : kEnumerableProperty ,
getSetCookie : kEnumerableProperty ,
keys : kEnumerableProperty ,
values : kEnumerableProperty ,
entries : kEnumerableProperty ,
forEach : kEnumerableProperty ,
[ Symbol . iterator ] : { enumerable : false } ,
[ Symbol . toStringTag ] : {
value : 'Headers' ,
configurable : true
}
} )
webidl . converters . HeadersInit = function ( V ) {
if ( webidl . util . Type ( V ) === 'Object' ) {
if ( V [ Symbol . iterator ] ) {
return webidl . converters [ 'sequence<sequence<ByteString>>' ] ( V )
}
return webidl . converters [ 'record<ByteString, ByteString>' ] ( V )
}
throw webidl . errors . conversionFailed ( {
prefix : 'Headers constructor' ,
argument : 'Argument 1' ,
types : [ 'sequence<sequence<ByteString>>' , 'record<ByteString, ByteString>' ]
} )
}
module . exports = {
fill ,
Headers ,
HeadersList
}
/***/ } ) ,
/***/ 4881 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
// https://github.com/Ethan-Arrowood/undici-fetch
const {
Response ,
makeNetworkError ,
makeAppropriateNetworkError ,
filterResponse ,
makeResponse
} = _ _nccwpck _require _ _ ( 7823 )
const { Headers } = _ _nccwpck _require _ _ ( 554 )
const { Request , makeRequest } = _ _nccwpck _require _ _ ( 8359 )
const zlib = _ _nccwpck _require _ _ ( 9796 )
const {
bytesMatch ,
makePolicyContainer ,
clonePolicyContainer ,
requestBadPort ,
TAOCheck ,
appendRequestOriginHeader ,
responseLocationURL ,
requestCurrentURL ,
setRequestReferrerPolicyOnRedirect ,
tryUpgradeRequestToAPotentiallyTrustworthyURL ,
createOpaqueTimingInfo ,
appendFetchMetadata ,
corsCheck ,
crossOriginResourcePolicyCheck ,
determineRequestsReferrer ,
coarsenedSharedCurrentTime ,
createDeferredPromise ,
isBlobLike ,
sameOrigin ,
isCancelled ,
isAborted ,
isErrorLike ,
fullyReadBody ,
readableStreamClose ,
isomorphicEncode ,
urlIsLocal ,
urlIsHttpHttpsScheme ,
urlHasHttpsScheme
} = _ _nccwpck _require _ _ ( 2538 )
const { kState , kHeaders , kGuard , kRealm } = _ _nccwpck _require _ _ ( 5861 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { safelyExtractBody } = _ _nccwpck _require _ _ ( 9990 )
const {
redirectStatus ,
nullBodyStatus ,
safeMethods ,
requestBodyHeader ,
subresource ,
DOMException
} = _ _nccwpck _require _ _ ( 1037 )
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
const EE = _ _nccwpck _require _ _ ( 2361 )
const { Readable , pipeline } = _ _nccwpck _require _ _ ( 2781 )
const { isErrored , isReadable , nodeMajor , nodeMinor } = _ _nccwpck _require _ _ ( 3983 )
const { dataURLProcessor , serializeAMimeType } = _ _nccwpck _require _ _ ( 685 )
const { TransformStream } = _ _nccwpck _require _ _ ( 5356 )
const { getGlobalDispatcher } = _ _nccwpck _require _ _ ( 1892 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { STATUS _CODES } = _ _nccwpck _require _ _ ( 3685 )
/** @type {import('buffer').resolveObjectURL} */
let resolveObjectURL
let ReadableStream = globalThis . ReadableStream
class Fetch extends EE {
constructor ( dispatcher ) {
super ( )
this . dispatcher = dispatcher
this . connection = null
this . dump = false
this . state = 'ongoing'
// 2 terminated listeners get added per request,
// but only 1 gets removed. If there are 20 redirects,
// 21 listeners will be added.
// See https://github.com/nodejs/undici/issues/1711
// TODO (fix): Find and fix root cause for leaked listener.
this . setMaxListeners ( 21 )
}
terminate ( reason ) {
if ( this . state !== 'ongoing' ) {
return
}
this . state = 'terminated'
this . connection ? . destroy ( reason )
this . emit ( 'terminated' , reason )
}
// https://fetch.spec.whatwg.org/#fetch-controller-abort
abort ( error ) {
if ( this . state !== 'ongoing' ) {
return
}
// 1. Set controller’ s state to "aborted".
this . state = 'aborted'
// 2. Let fallbackError be an "AbortError" DOMException.
// 3. Set error to fallbackError if it is not given.
if ( ! error ) {
error = new DOMException ( 'The operation was aborted.' , 'AbortError' )
}
// 4. Let serializedError be StructuredSerialize(error).
// If that threw an exception, catch it, and let
// serializedError be StructuredSerialize(fallbackError).
// 5. Set controller’ s serialized abort reason to serializedError.
this . serializedAbortReason = error
this . connection ? . destroy ( error )
this . emit ( 'terminated' , error )
}
}
// https://fetch.spec.whatwg.org/#fetch-method
async function fetch ( input , init = { } ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'globalThis.fetch' } )
// 1. Let p be a new promise.
const p = createDeferredPromise ( )
// 2. Let requestObject be the result of invoking the initial value of
// Request as constructor with input and init as arguments. If this throws
// an exception, reject p with it and return p.
let requestObject
try {
requestObject = new Request ( input , init )
} catch ( e ) {
p . reject ( e )
return p . promise
}
// 3. Let request be requestObject’ s request.
const request = requestObject [ kState ]
// 4. If requestObject’ s signal’ s aborted flag is set, then:
if ( requestObject . signal . aborted ) {
// 1. Abort the fetch() call with p, request, null, and
// requestObject’ s signal’ s abort reason.
abortFetch ( p , request , null , requestObject . signal . reason )
// 2. Return p.
return p . promise
}
// 5. Let globalObject be request’ s client’ s global object.
const globalObject = request . client . globalObject
// 6. If globalObject is a ServiceWorkerGlobalScope object, then set
// request’ s service-workers mode to "none".
if ( globalObject ? . constructor ? . name === 'ServiceWorkerGlobalScope' ) {
request . serviceWorkers = 'none'
}
// 7. Let responseObject be null.
let responseObject = null
// 8. Let relevantRealm be this’ s relevant Realm.
const relevantRealm = null
// 9. Let locallyAborted be false.
let locallyAborted = false
// 10. Let controller be null.
let controller = null
// 11. Add the following abort steps to requestObject’ s signal:
requestObject . signal . addEventListener (
'abort' ,
( ) => {
// 1. Set locallyAborted to true.
locallyAborted = true
// 2. Abort the fetch() call with p, request, responseObject,
// and requestObject’ s signal’ s abort reason.
abortFetch ( p , request , responseObject , requestObject . signal . reason )
// 3. If controller is not null, then abort controller.
if ( controller != null ) {
controller . abort ( )
}
} ,
{ once : true }
)
// 12. Let handleFetchDone given response response be to finalize and
// report timing with response, globalObject, and "fetch".
const handleFetchDone = ( response ) =>
finalizeAndReportTiming ( response , 'fetch' )
// 13. Set controller to the result of calling fetch given request,
// with processResponseEndOfBody set to handleFetchDone, and processResponse
// given response being these substeps:
const processResponse = ( response ) => {
// 1. If locallyAborted is true, terminate these substeps.
if ( locallyAborted ) {
return
}
// 2. If response’ s aborted flag is set, then:
if ( response . aborted ) {
// 1. Let deserializedError be the result of deserialize a serialized
// abort reason given controller’ s serialized abort reason and
// relevantRealm.
// 2. Abort the fetch() call with p, request, responseObject, and
// deserializedError.
abortFetch ( p , request , responseObject , controller . serializedAbortReason )
return
}
// 3. If response is a network error, then reject p with a TypeError
// and terminate these substeps.
if ( response . type === 'error' ) {
p . reject (
Object . assign ( new TypeError ( 'fetch failed' ) , { cause : response . error } )
)
return
}
// 4. Set responseObject to the result of creating a Response object,
// given response, "immutable", and relevantRealm.
responseObject = new Response ( )
responseObject [ kState ] = response
responseObject [ kRealm ] = relevantRealm
responseObject [ kHeaders ] [ kHeadersList ] = response . headersList
responseObject [ kHeaders ] [ kGuard ] = 'immutable'
responseObject [ kHeaders ] [ kRealm ] = relevantRealm
// 5. Resolve p with responseObject.
p . resolve ( responseObject )
}
controller = fetching ( {
request ,
processResponseEndOfBody : handleFetchDone ,
processResponse ,
dispatcher : init . dispatcher ? ? getGlobalDispatcher ( ) // undici
} )
// 14. Return p.
return p . promise
}
// https://fetch.spec.whatwg.org/#finalize-and-report-timing
function finalizeAndReportTiming ( response , initiatorType = 'other' ) {
// 1. If response is an aborted network error, then return.
if ( response . type === 'error' && response . aborted ) {
return
}
// 2. If response’ s URL list is null or empty, then return.
if ( ! response . urlList ? . length ) {
return
}
// 3. Let originalURL be response’ s URL list[0].
const originalURL = response . urlList [ 0 ]
// 4. Let timingInfo be response’ s timing info.
let timingInfo = response . timingInfo
// 5. Let cacheState be response’ s cache state.
let cacheState = response . cacheState
// 6. If originalURL’ s scheme is not an HTTP(S) scheme, then return.
if ( ! urlIsHttpHttpsScheme ( originalURL ) ) {
return
}
// 7. If timingInfo is null, then return.
if ( timingInfo === null ) {
return
}
// 8. If response’ s timing allow passed flag is not set, then:
if ( ! timingInfo . timingAllowPassed ) {
// 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo.
timingInfo = createOpaqueTimingInfo ( {
startTime : timingInfo . startTime
} )
// 2. Set cacheState to the empty string.
cacheState = ''
}
// 9. Set timingInfo’ s end time to the coarsened shared current time
// given global’ s relevant settings object’ s cross-origin isolated
// capability.
// TODO: given global’ s relevant settings object’ s cross-origin isolated
// capability?
timingInfo . endTime = coarsenedSharedCurrentTime ( )
// 10. Set response’ s timing info to timingInfo.
response . timingInfo = timingInfo
// 11. Mark resource timing for timingInfo, originalURL, initiatorType,
// global, and cacheState.
markResourceTiming (
timingInfo ,
originalURL ,
initiatorType ,
globalThis ,
cacheState
)
}
// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing
function markResourceTiming ( timingInfo , originalURL , initiatorType , globalThis , cacheState ) {
if ( nodeMajor > 18 || ( nodeMajor === 18 && nodeMinor >= 2 ) ) {
performance . markResourceTiming ( timingInfo , originalURL , initiatorType , globalThis , cacheState )
}
}
// https://fetch.spec.whatwg.org/#abort-fetch
function abortFetch ( p , request , responseObject , error ) {
// Note: AbortSignal.reason was added in node v17.2.0
// which would give us an undefined error to reject with.
// Remove this once node v16 is no longer supported.
if ( ! error ) {
error = new DOMException ( 'The operation was aborted.' , 'AbortError' )
}
// 1. Reject promise with error.
p . reject ( error )
// 2. If request’ s body is not null and is readable, then cancel request’ s
// body with error.
if ( request . body != null && isReadable ( request . body ? . stream ) ) {
request . body . stream . cancel ( error ) . catch ( ( err ) => {
if ( err . code === 'ERR_INVALID_STATE' ) {
// Node bug?
return
}
throw err
} )
}
// 3. If responseObject is null, then return.
if ( responseObject == null ) {
return
}
// 4. Let response be responseObject’ s response.
const response = responseObject [ kState ]
// 5. If response’ s body is not null and is readable, then error response’ s
// body with error.
if ( response . body != null && isReadable ( response . body ? . stream ) ) {
response . body . stream . cancel ( error ) . catch ( ( err ) => {
if ( err . code === 'ERR_INVALID_STATE' ) {
// Node bug?
return
}
throw err
} )
}
}
// https://fetch.spec.whatwg.org/#fetching
function fetching ( {
request ,
processRequestBodyChunkLength ,
processRequestEndOfBody ,
processResponse ,
processResponseEndOfBody ,
processResponseConsumeBody ,
useParallelQueue = false ,
dispatcher // undici
} ) {
// 1. Let taskDestination be null.
let taskDestination = null
// 2. Let crossOriginIsolatedCapability be false.
let crossOriginIsolatedCapability = false
// 3. If request’ s client is non-null, then:
if ( request . client != null ) {
// 1. Set taskDestination to request’ s client’ s global object.
taskDestination = request . client . globalObject
// 2. Set crossOriginIsolatedCapability to request’ s client’ s cross-origin
// isolated capability.
crossOriginIsolatedCapability =
request . client . crossOriginIsolatedCapability
}
// 4. If useParallelQueue is true, then set taskDestination to the result of
// starting a new parallel queue.
// TODO
// 5. Let timingInfo be a new fetch timing info whose start time and
// post-redirect start time are the coarsened shared current time given
// crossOriginIsolatedCapability.
const currenTime = coarsenedSharedCurrentTime ( crossOriginIsolatedCapability )
const timingInfo = createOpaqueTimingInfo ( {
startTime : currenTime
} )
// 6. Let fetchParams be a new fetch params whose
// request is request,
// timing info is timingInfo,
// process request body chunk length is processRequestBodyChunkLength,
// process request end-of-body is processRequestEndOfBody,
// process response is processResponse,
// process response consume body is processResponseConsumeBody,
// process response end-of-body is processResponseEndOfBody,
// task destination is taskDestination,
// and cross-origin isolated capability is crossOriginIsolatedCapability.
const fetchParams = {
controller : new Fetch ( dispatcher ) ,
request ,
timingInfo ,
processRequestBodyChunkLength ,
processRequestEndOfBody ,
processResponse ,
processResponseConsumeBody ,
processResponseEndOfBody ,
taskDestination ,
crossOriginIsolatedCapability
}
// 7. If request’ s body is a byte sequence, then set request’ s body to
// request’ s body as a body.
// NOTE: Since fetching is only called from fetch, body should already be
// extracted.
assert ( ! request . body || request . body . stream )
// 8. If request’ s window is "client", then set request’ s window to request’ s
// client, if request’ s client’ s global object is a Window object; otherwise
// "no-window".
if ( request . window === 'client' ) {
// TODO: What if request.client is null?
request . window =
request . client ? . globalObject ? . constructor ? . name === 'Window'
? request . client
: 'no-window'
}
// 9. If request’ s origin is "client", then set request’ s origin to request’ s
// client’ s origin.
if ( request . origin === 'client' ) {
// TODO: What if request.client is null?
request . origin = request . client ? . origin
}
// 10. If all of the following conditions are true:
// TODO
// 11. If request’ s policy container is "client", then:
if ( request . policyContainer === 'client' ) {
// 1. If request’ s client is non-null, then set request’ s policy
// container to a clone of request’ s client’ s policy container. [HTML]
if ( request . client != null ) {
request . policyContainer = clonePolicyContainer (
request . client . policyContainer
)
} else {
// 2. Otherwise, set request’ s policy container to a new policy
// container.
request . policyContainer = makePolicyContainer ( )
}
}
// 12. If request’ s header list does not contain `Accept`, then:
if ( ! request . headersList . contains ( 'accept' ) ) {
// 1. Let value be `*/*`.
const value = '*/*'
// 2. A user agent should set value to the first matching statement, if
// any, switching on request’ s destination:
// "document"
// "frame"
// "iframe"
// `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8`
// "image"
// `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5`
// "style"
// `text/css,*/*;q=0.1`
// TODO
// 3. Append `Accept`/value to request’ s header list.
request . headersList . append ( 'accept' , value )
}
// 13. If request’ s header list does not contain `Accept-Language`, then
// user agents should append `Accept-Language`/an appropriate value to
// request’ s header list.
if ( ! request . headersList . contains ( 'accept-language' ) ) {
request . headersList . append ( 'accept-language' , '*' )
}
// 14. If request’ s priority is null, then use request’ s initiator and
// destination appropriately in setting request’ s priority to a
// user-agent-defined object.
if ( request . priority === null ) {
// TODO
}
// 15. If request is a subresource request, then:
if ( subresource . includes ( request . destination ) ) {
// TODO
}
// 16. Run main fetch given fetchParams.
mainFetch ( fetchParams )
. catch ( err => {
fetchParams . controller . terminate ( err )
} )
// 17. Return fetchParam's controller
return fetchParams . controller
}
// https://fetch.spec.whatwg.org/#concept-main-fetch
async function mainFetch ( fetchParams , recursive = false ) {
// 1. Let request be fetchParams’ s request.
const request = fetchParams . request
// 2. Let response be null.
let response = null
// 3. If request’ s local-URLs-only flag is set and request’ s current URL is
// not local, then set response to a network error.
if ( request . localURLsOnly && ! urlIsLocal ( requestCurrentURL ( request ) ) ) {
response = makeNetworkError ( 'local URLs only' )
}
// 4. Run report Content Security Policy violations for request.
// TODO
// 5. Upgrade request to a potentially trustworthy URL, if appropriate.
tryUpgradeRequestToAPotentiallyTrustworthyURL ( request )
// 6. If should request be blocked due to a bad port, should fetching request
// be blocked as mixed content, or should request be blocked by Content
// Security Policy returns blocked, then set response to a network error.
if ( requestBadPort ( request ) === 'blocked' ) {
response = makeNetworkError ( 'bad port' )
}
// TODO: should fetching request be blocked as mixed content?
// TODO: should request be blocked by Content Security Policy?
// 7. If request’ s referrer policy is the empty string, then set request’ s
// referrer policy to request’ s policy container’ s referrer policy.
if ( request . referrerPolicy === '' ) {
request . referrerPolicy = request . policyContainer . referrerPolicy
}
// 8. If request’ s referrer is not "no-referrer", then set request’ s
// referrer to the result of invoking determine request’ s referrer.
if ( request . referrer !== 'no-referrer' ) {
request . referrer = determineRequestsReferrer ( request )
}
// 9. Set request’ s current URL’ s scheme to "https" if all of the following
// conditions are true:
// - request’ s current URL’ s scheme is "http"
// - request’ s current URL’ s host is a domain
// - Matching request’ s current URL’ s host per Known HSTS Host Domain Name
// Matching results in either a superdomain match with an asserted
// includeSubDomains directive or a congruent match (with or without an
// asserted includeSubDomains directive). [HSTS]
// TODO
// 10. If recursive is false, then run the remaining steps in parallel.
// TODO
// 11. If response is null, then set response to the result of running
// the steps corresponding to the first matching statement:
if ( response === null ) {
response = await ( async ( ) => {
const currentURL = requestCurrentURL ( request )
if (
// - request’ s current URL’ s origin is same origin with request’ s origin,
// and request’ s response tainting is "basic"
( sameOrigin ( currentURL , request . url ) && request . responseTainting === 'basic' ) ||
// request’ s current URL’ s scheme is "data"
( currentURL . protocol === 'data:' ) ||
// - request’ s mode is "navigate" or "websocket"
( request . mode === 'navigate' || request . mode === 'websocket' )
) {
// 1. Set request’ s response tainting to "basic".
request . responseTainting = 'basic'
// 2. Return the result of running scheme fetch given fetchParams.
return await schemeFetch ( fetchParams )
}
// request’ s mode is "same-origin"
if ( request . mode === 'same-origin' ) {
// 1. Return a network error.
return makeNetworkError ( 'request mode cannot be "same-origin"' )
}
// request’ s mode is "no-cors"
if ( request . mode === 'no-cors' ) {
// 1. If request’ s redirect mode is not "follow", then return a network
// error.
if ( request . redirect !== 'follow' ) {
return makeNetworkError (
'redirect mode cannot be "follow" for "no-cors" request'
)
}
// 2. Set request’ s response tainting to "opaque".
request . responseTainting = 'opaque'
// 3. Return the result of running scheme fetch given fetchParams.
return await schemeFetch ( fetchParams )
}
// request’ s current URL’ s scheme is not an HTTP(S) scheme
if ( ! urlIsHttpHttpsScheme ( requestCurrentURL ( request ) ) ) {
// Return a network error.
return makeNetworkError ( 'URL scheme must be a HTTP(S) scheme' )
}
// - request’ s use-CORS-preflight flag is set
// - request’ s unsafe-request flag is set and either request’ s method is
// not a CORS-safelisted method or CORS-unsafe request-header names with
// request’ s header list is not empty
// 1. Set request’ s response tainting to "cors".
// 2. Let corsWithPreflightResponse be the result of running HTTP fetch
// given fetchParams and true.
// 3. If corsWithPreflightResponse is a network error, then clear cache
// entries using request.
// 4. Return corsWithPreflightResponse.
// TODO
// Otherwise
// 1. Set request’ s response tainting to "cors".
request . responseTainting = 'cors'
// 2. Return the result of running HTTP fetch given fetchParams.
return await httpFetch ( fetchParams )
} ) ( )
}
// 12. If recursive is true, then return response.
if ( recursive ) {
return response
}
// 13. If response is not a network error and response is not a filtered
// response, then:
if ( response . status !== 0 && ! response . internalResponse ) {
// If request’ s response tainting is "cors", then:
if ( request . responseTainting === 'cors' ) {
// 1. Let headerNames be the result of extracting header list values
// given `Access-Control-Expose-Headers` and response’ s header list.
// TODO
// 2. If request’ s credentials mode is not "include" and headerNames
// contains `*`, then set response’ s CORS-exposed header-name list to
// all unique header names in response’ s header list.
// TODO
// 3. Otherwise, if headerNames is not null or failure, then set
// response’ s CORS-exposed header-name list to headerNames.
// TODO
}
// Set response to the following filtered response with response as its
// internal response, depending on request’ s response tainting:
if ( request . responseTainting === 'basic' ) {
response = filterResponse ( response , 'basic' )
} else if ( request . responseTainting === 'cors' ) {
response = filterResponse ( response , 'cors' )
} else if ( request . responseTainting === 'opaque' ) {
response = filterResponse ( response , 'opaque' )
} else {
assert ( false )
}
}
// 14. Let internalResponse be response, if response is a network error,
// and response’ s internal response otherwise.
let internalResponse =
response . status === 0 ? response : response . internalResponse
// 15. If internalResponse’ s URL list is empty, then set it to a clone of
// request’ s URL list.
if ( internalResponse . urlList . length === 0 ) {
internalResponse . urlList . push ( ... request . urlList )
}
// 16. If request’ s timing allow failed flag is unset, then set
// internalResponse’ s timing allow passed flag.
if ( ! request . timingAllowFailed ) {
response . timingAllowPassed = true
}
// 17. If response is not a network error and any of the following returns
// blocked
// - should internalResponse to request be blocked as mixed content
// - should internalResponse to request be blocked by Content Security Policy
// - should internalResponse to request be blocked due to its MIME type
// - should internalResponse to request be blocked due to nosniff
// TODO
// 18. If response’ s type is "opaque", internalResponse’ s status is 206,
// internalResponse’ s range-requested flag is set, and request’ s header
// list does not contain `Range`, then set response and internalResponse
// to a network error.
if (
response . type === 'opaque' &&
internalResponse . status === 206 &&
internalResponse . rangeRequested &&
! request . headers . contains ( 'range' )
) {
response = internalResponse = makeNetworkError ( )
}
// 19. If response is not a network error and either request’ s method is
// `HEAD` or `CONNECT`, or internalResponse’ s status is a null body status,
// set internalResponse’ s body to null and disregard any enqueuing toward
// it (if any).
if (
response . status !== 0 &&
( request . method === 'HEAD' ||
request . method === 'CONNECT' ||
nullBodyStatus . includes ( internalResponse . status ) )
) {
internalResponse . body = null
fetchParams . controller . dump = true
}
// 20. If request’ s integrity metadata is not the empty string, then:
if ( request . integrity ) {
// 1. Let processBodyError be this step: run fetch finale given fetchParams
// and a network error.
const processBodyError = ( reason ) =>
fetchFinale ( fetchParams , makeNetworkError ( reason ) )
// 2. If request’ s response tainting is "opaque", or response’ s body is null,
// then run processBodyError and abort these steps.
if ( request . responseTainting === 'opaque' || response . body == null ) {
processBodyError ( response . error )
return
}
// 3. Let processBody given bytes be these steps:
const processBody = ( bytes ) => {
// 1. If bytes do not match request’ s integrity metadata,
// then run processBodyError and abort these steps. [SRI]
if ( ! bytesMatch ( bytes , request . integrity ) ) {
processBodyError ( 'integrity mismatch' )
return
}
// 2. Set response’ s body to bytes as a body.
response . body = safelyExtractBody ( bytes ) [ 0 ]
// 3. Run fetch finale given fetchParams and response.
fetchFinale ( fetchParams , response )
}
// 4. Fully read response’ s body given processBody and processBodyError.
await fullyReadBody ( response . body , processBody , processBodyError )
} else {
// 21. Otherwise, run fetch finale given fetchParams and response.
fetchFinale ( fetchParams , response )
}
}
// https://fetch.spec.whatwg.org/#concept-scheme-fetch
// given a fetch params fetchParams
async function schemeFetch ( fetchParams ) {
// Note: since the connection is destroyed on redirect, which sets fetchParams to a
// cancelled state, we do not want this condition to trigger *unless* there have been
// no redirects. See https://github.com/nodejs/undici/issues/1776
// 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.
if ( isCancelled ( fetchParams ) && fetchParams . request . redirectCount === 0 ) {
return makeAppropriateNetworkError ( fetchParams )
}
// 2. Let request be fetchParams’ s request.
const { request } = fetchParams
const { protocol : scheme } = requestCurrentURL ( request )
// 3. Switch on request’ s current URL’ s scheme and run the associated steps:
switch ( scheme ) {
case 'about:' : {
// If request’ s current URL’ s path is the string "blank", then return a new response
// whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) »,
// and body is the empty byte sequence as a body.
// Otherwise, return a network error.
return makeNetworkError ( 'about scheme is not supported' )
}
case 'blob:' : {
if ( ! resolveObjectURL ) {
resolveObjectURL = ( _ _nccwpck _require _ _ ( 4300 ) . resolveObjectURL )
}
// 1. Let blobURLEntry be request’ s current URL’ s blob URL entry.
const blobURLEntry = requestCurrentURL ( request )
// https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56
// Buffer.resolveObjectURL does not ignore URL queries.
if ( blobURLEntry . search . length !== 0 ) {
return makeNetworkError ( 'NetworkError when attempting to fetch resource.' )
}
const blobURLEntryObject = resolveObjectURL ( blobURLEntry . toString ( ) )
// 2. If request’ s method is not `GET`, blobURLEntry is null, or blobURLEntry’ s
// object is not a Blob object, then return a network error.
if ( request . method !== 'GET' || ! isBlobLike ( blobURLEntryObject ) ) {
return makeNetworkError ( 'invalid method' )
}
// 3. Let bodyWithType be the result of safely extracting blobURLEntry’ s object.
const bodyWithType = safelyExtractBody ( blobURLEntryObject )
// 4. Let body be bodyWithType’ s body.
const body = bodyWithType [ 0 ]
// 5. Let length be body’ s length, serialized and isomorphic encoded.
const length = isomorphicEncode ( ` ${ body . length } ` )
// 6. Let type be bodyWithType’ s type if it is non-null; otherwise the empty byte sequence.
const type = bodyWithType [ 1 ] ? ? ''
// 7. Return a new response whose status message is `OK`, header list is
// « (`Content-Length`, length), (`Content-Type`, type) », and body is body.
const response = makeResponse ( {
statusText : 'OK' ,
headersList : [
[ 'content-length' , { name : 'Content-Length' , value : length } ] ,
[ 'content-type' , { name : 'Content-Type' , value : type } ]
]
} )
response . body = body
return response
}
case 'data:' : {
// 1. Let dataURLStruct be the result of running the
// data: URL processor on request’ s current URL.
const currentURL = requestCurrentURL ( request )
const dataURLStruct = dataURLProcessor ( currentURL )
// 2. If dataURLStruct is failure, then return a
// network error.
if ( dataURLStruct === 'failure' ) {
return makeNetworkError ( 'failed to fetch the data URL' )
}
// 3. Let mimeType be dataURLStruct’ s MIME type, serialized.
const mimeType = serializeAMimeType ( dataURLStruct . mimeType )
// 4. Return a response whose status message is `OK`,
// header list is « (`Content-Type`, mimeType) »,
// and body is dataURLStruct’ s body as a body.
return makeResponse ( {
statusText : 'OK' ,
headersList : [
[ 'content-type' , { name : 'Content-Type' , value : mimeType } ]
] ,
body : safelyExtractBody ( dataURLStruct . body ) [ 0 ]
} )
}
case 'file:' : {
// For now, unfortunate as it is, file URLs are left as an exercise for the reader.
// When in doubt, return a network error.
return makeNetworkError ( 'not implemented... yet...' )
}
case 'http:' :
case 'https:' : {
// Return the result of running HTTP fetch given fetchParams.
return await httpFetch ( fetchParams )
. catch ( ( err ) => makeNetworkError ( err ) )
}
default : {
return makeNetworkError ( 'unknown scheme' )
}
}
}
// https://fetch.spec.whatwg.org/#finalize-response
function finalizeResponse ( fetchParams , response ) {
// 1. Set fetchParams’ s request’ s done flag.
fetchParams . request . done = true
// 2, If fetchParams’ s process response done is not null, then queue a fetch
// task to run fetchParams’ s process response done given response, with
// fetchParams’ s task destination.
if ( fetchParams . processResponseDone != null ) {
queueMicrotask ( ( ) => fetchParams . processResponseDone ( response ) )
}
}
// https://fetch.spec.whatwg.org/#fetch-finale
async function fetchFinale ( fetchParams , response ) {
// 1. If response is a network error, then:
if ( response . type === 'error' ) {
// 1. Set response’ s URL list to « fetchParams’ s request’ s URL list[0] ».
response . urlList = [ fetchParams . request . urlList [ 0 ] ]
// 2. Set response’ s timing info to the result of creating an opaque timing
// info for fetchParams’ s timing info.
response . timingInfo = createOpaqueTimingInfo ( {
startTime : fetchParams . timingInfo . startTime
} )
}
// 2. Let processResponseEndOfBody be the following steps:
const processResponseEndOfBody = ( ) => {
// 1. Set fetchParams’ s request’ s done flag.
fetchParams . request . done = true
// If fetchParams’ s process response end-of-body is not null,
// then queue a fetch task to run fetchParams’ s process response
// end-of-body given response with fetchParams’ s task destination.
if ( fetchParams . processResponseEndOfBody != null ) {
queueMicrotask ( ( ) => fetchParams . processResponseEndOfBody ( response ) )
}
}
// 3. If fetchParams’ s process response is non-null, then queue a fetch task
// to run fetchParams’ s process response given response, with fetchParams’ s
// task destination.
if ( fetchParams . processResponse != null ) {
queueMicrotask ( ( ) => fetchParams . processResponse ( response ) )
}
// 4. If response’ s body is null, then run processResponseEndOfBody.
if ( response . body == null ) {
processResponseEndOfBody ( )
} else {
// 5. Otherwise:
// 1. Let transformStream be a new a TransformStream.
// 2. Let identityTransformAlgorithm be an algorithm which, given chunk,
// enqueues chunk in transformStream.
const identityTransformAlgorithm = ( chunk , controller ) => {
controller . enqueue ( chunk )
}
// 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm
// and flushAlgorithm set to processResponseEndOfBody.
const transformStream = new TransformStream ( {
start ( ) { } ,
transform : identityTransformAlgorithm ,
flush : processResponseEndOfBody
} , {
size ( ) {
return 1
}
} , {
size ( ) {
return 1
}
} )
// 4. Set response’ s body to the result of piping response’ s body through transformStream.
response . body = { stream : response . body . stream . pipeThrough ( transformStream ) }
}
// 6. If fetchParams’ s process response consume body is non-null, then:
if ( fetchParams . processResponseConsumeBody != null ) {
// 1. Let processBody given nullOrBytes be this step: run fetchParams’ s
// process response consume body given response and nullOrBytes.
const processBody = ( nullOrBytes ) => fetchParams . processResponseConsumeBody ( response , nullOrBytes )
// 2. Let processBodyError be this step: run fetchParams’ s process
// response consume body given response and failure.
const processBodyError = ( failure ) => fetchParams . processResponseConsumeBody ( response , failure )
// 3. If response’ s body is null, then queue a fetch task to run processBody
// given null, with fetchParams’ s task destination.
if ( response . body == null ) {
queueMicrotask ( ( ) => processBody ( null ) )
} else {
// 4. Otherwise, fully read response’ s body given processBody, processBodyError,
// and fetchParams’ s task destination.
await fullyReadBody ( response . body , processBody , processBodyError )
}
}
}
// https://fetch.spec.whatwg.org/#http-fetch
async function httpFetch ( fetchParams ) {
// 1. Let request be fetchParams’ s request.
const request = fetchParams . request
// 2. Let response be null.
let response = null
// 3. Let actualResponse be null.
let actualResponse = null
// 4. Let timingInfo be fetchParams’ s timing info.
const timingInfo = fetchParams . timingInfo
// 5. If request’ s service-workers mode is "all", then:
if ( request . serviceWorkers === 'all' ) {
// TODO
}
// 6. If response is null, then:
if ( response === null ) {
// 1. If makeCORSPreflight is true and one of these conditions is true:
// TODO
// 2. If request’ s redirect mode is "follow", then set request’ s
// service-workers mode to "none".
if ( request . redirect === 'follow' ) {
request . serviceWorkers = 'none'
}
// 3. Set response and actualResponse to the result of running
// HTTP-network-or-cache fetch given fetchParams.
actualResponse = response = await httpNetworkOrCacheFetch ( fetchParams )
// 4. If request’ s response tainting is "cors" and a CORS check
// for request and response returns failure, then return a network error.
if (
request . responseTainting === 'cors' &&
corsCheck ( request , response ) === 'failure'
) {
return makeNetworkError ( 'cors failure' )
}
// 5. If the TAO check for request and response returns failure, then set
// request’ s timing allow failed flag.
if ( TAOCheck ( request , response ) === 'failure' ) {
request . timingAllowFailed = true
}
}
// 7. If either request’ s response tainting or response’ s type
// is "opaque", and the cross-origin resource policy check with
// request’ s origin, request’ s client, request’ s destination,
// and actualResponse returns blocked, then return a network error.
if (
( request . responseTainting === 'opaque' || response . type === 'opaque' ) &&
crossOriginResourcePolicyCheck (
request . origin ,
request . client ,
request . destination ,
actualResponse
) === 'blocked'
) {
return makeNetworkError ( 'blocked' )
}
// 8. If actualResponse’ s status is a redirect status, then:
if ( redirectStatus . includes ( actualResponse . status ) ) {
// 1. If actualResponse’ s status is not 303, request’ s body is not null,
// and the connection uses HTTP/2, then user agents may, and are even
// encouraged to, transmit an RST_STREAM frame.
// See, https://github.com/whatwg/fetch/issues/1288
if ( request . redirect !== 'manual' ) {
fetchParams . controller . connection . destroy ( )
}
// 2. Switch on request’ s redirect mode:
if ( request . redirect === 'error' ) {
// Set response to a network error.
response = makeNetworkError ( 'unexpected redirect' )
} else if ( request . redirect === 'manual' ) {
// Set response to an opaque-redirect filtered response whose internal
// response is actualResponse.
// NOTE(spec): On the web this would return an `opaqueredirect` response,
// but that doesn't make sense server side.
// See https://github.com/nodejs/undici/issues/1193.
response = actualResponse
} else if ( request . redirect === 'follow' ) {
// Set response to the result of running HTTP-redirect fetch given
// fetchParams and response.
response = await httpRedirectFetch ( fetchParams , response )
} else {
assert ( false )
}
}
// 9. Set response’ s timing info to timingInfo.
response . timingInfo = timingInfo
// 10. Return response.
return response
}
// https://fetch.spec.whatwg.org/#http-redirect-fetch
async function httpRedirectFetch ( fetchParams , response ) {
// 1. Let request be fetchParams’ s request.
const request = fetchParams . request
// 2. Let actualResponse be response, if response is not a filtered response,
// and response’ s internal response otherwise.
const actualResponse = response . internalResponse
? response . internalResponse
: response
// 3. Let locationURL be actualResponse’ s location URL given request’ s current
// URL’ s fragment.
let locationURL
try {
locationURL = responseLocationURL (
actualResponse ,
requestCurrentURL ( request ) . hash
)
// 4. If locationURL is null, then return response.
if ( locationURL == null ) {
return response
}
} catch ( err ) {
// 5. If locationURL is failure, then return a network error.
return makeNetworkError ( err )
}
// 6. If locationURL’ s scheme is not an HTTP(S) scheme, then return a network
// error.
if ( ! urlIsHttpHttpsScheme ( locationURL ) ) {
return makeNetworkError ( 'URL scheme must be a HTTP(S) scheme' )
}
// 7. If request’ s redirect count is 20, then return a network error.
if ( request . redirectCount === 20 ) {
return makeNetworkError ( 'redirect count exceeded' )
}
// 8. Increase request’ s redirect count by 1.
request . redirectCount += 1
// 9. If request’ s mode is "cors", locationURL includes credentials, and
// request’ s origin is not same origin with locationURL’ s origin, then return
// a network error.
if (
request . mode === 'cors' &&
( locationURL . username || locationURL . password ) &&
! sameOrigin ( request , locationURL )
) {
return makeNetworkError ( 'cross origin not allowed for request mode "cors"' )
}
// 10. If request’ s response tainting is "cors" and locationURL includes
// credentials, then return a network error.
if (
request . responseTainting === 'cors' &&
( locationURL . username || locationURL . password )
) {
return makeNetworkError (
'URL cannot contain credentials for request mode "cors"'
)
}
// 11. If actualResponse’ s status is not 303, request’ s body is non-null,
// and request’ s body’ s source is null, then return a network error.
if (
actualResponse . status !== 303 &&
request . body != null &&
request . body . source == null
) {
return makeNetworkError ( )
}
// 12. If one of the following is true
// - actualResponse’ s status is 301 or 302 and request’ s method is `POST`
// - actualResponse’ s status is 303 and request’ s method is not `GET` or `HEAD`
if (
( [ 301 , 302 ] . includes ( actualResponse . status ) && request . method === 'POST' ) ||
( actualResponse . status === 303 &&
! [ 'GET' , 'HEAD' ] . includes ( request . method ) )
) {
// then:
// 1. Set request’ s method to `GET` and request’ s body to null.
request . method = 'GET'
request . body = null
// 2. For each headerName of request-body-header name, delete headerName from
// request’ s header list.
for ( const headerName of requestBodyHeader ) {
request . headersList . delete ( headerName )
}
}
// 13. If request’ s current URL’ s origin is not same origin with locationURL’ s
// origin, then for each headerName of CORS non-wildcard request-header name,
// delete headerName from request’ s header list.
if ( ! sameOrigin ( requestCurrentURL ( request ) , locationURL ) ) {
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
request . headersList . delete ( 'authorization' )
}
// 14. If request’ s body is non-null, then set request’ s body to the first return
// value of safely extracting request’ s body’ s source.
if ( request . body != null ) {
assert ( request . body . source != null )
request . body = safelyExtractBody ( request . body . source ) [ 0 ]
}
// 15. Let timingInfo be fetchParams’ s timing info.
const timingInfo = fetchParams . timingInfo
// 16. Set timingInfo’ s redirect end time and post-redirect start time to the
// coarsened shared current time given fetchParams’ s cross-origin isolated
// capability.
timingInfo . redirectEndTime = timingInfo . postRedirectStartTime =
coarsenedSharedCurrentTime ( fetchParams . crossOriginIsolatedCapability )
// 17. If timingInfo’ s redirect start time is 0, then set timingInfo’ s
// redirect start time to timingInfo’ s start time.
if ( timingInfo . redirectStartTime === 0 ) {
timingInfo . redirectStartTime = timingInfo . startTime
}
// 18. Append locationURL to request’ s URL list.
request . urlList . push ( locationURL )
// 19. Invoke set request’ s referrer policy on redirect on request and
// actualResponse.
setRequestReferrerPolicyOnRedirect ( request , actualResponse )
// 20. Return the result of running main fetch given fetchParams and true.
return mainFetch ( fetchParams , true )
}
// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch
async function httpNetworkOrCacheFetch (
fetchParams ,
isAuthenticationFetch = false ,
isNewConnectionFetch = false
) {
// 1. Let request be fetchParams’ s request.
const request = fetchParams . request
// 2. Let httpFetchParams be null.
let httpFetchParams = null
// 3. Let httpRequest be null.
let httpRequest = null
// 4. Let response be null.
let response = null
// 5. Let storedResponse be null.
// TODO: cache
// 6. Let httpCache be null.
const httpCache = null
// 7. Let the revalidatingFlag be unset.
const revalidatingFlag = false
// 8. Run these steps, but abort when the ongoing fetch is terminated:
// 1. If request’ s window is "no-window" and request’ s redirect mode is
// "error", then set httpFetchParams to fetchParams and httpRequest to
// request.
if ( request . window === 'no-window' && request . redirect === 'error' ) {
httpFetchParams = fetchParams
httpRequest = request
} else {
// Otherwise:
// 1. Set httpRequest to a clone of request.
httpRequest = makeRequest ( request )
// 2. Set httpFetchParams to a copy of fetchParams.
httpFetchParams = { ... fetchParams }
// 3. Set httpFetchParams’ s request to httpRequest.
httpFetchParams . request = httpRequest
}
// 3. Let includeCredentials be true if one of
const includeCredentials =
request . credentials === 'include' ||
( request . credentials === 'same-origin' &&
request . responseTainting === 'basic' )
// 4. Let contentLength be httpRequest’ s body’ s length, if httpRequest’ s
// body is non-null; otherwise null.
const contentLength = httpRequest . body ? httpRequest . body . length : null
// 5. Let contentLengthHeaderValue be null.
let contentLengthHeaderValue = null
// 6. If httpRequest’ s body is null and httpRequest’ s method is `POST` or
// `PUT`, then set contentLengthHeaderValue to `0`.
if (
httpRequest . body == null &&
[ 'POST' , 'PUT' ] . includes ( httpRequest . method )
) {
contentLengthHeaderValue = '0'
}
// 7. If contentLength is non-null, then set contentLengthHeaderValue to
// contentLength, serialized and isomorphic encoded.
if ( contentLength != null ) {
contentLengthHeaderValue = isomorphicEncode ( ` ${ contentLength } ` )
}
// 8. If contentLengthHeaderValue is non-null, then append
// `Content-Length`/contentLengthHeaderValue to httpRequest’ s header
// list.
if ( contentLengthHeaderValue != null ) {
httpRequest . headersList . append ( 'content-length' , contentLengthHeaderValue )
}
// 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`,
// contentLengthHeaderValue) to httpRequest’ s header list.
// 10. If contentLength is non-null and httpRequest’ s keepalive is true,
// then:
if ( contentLength != null && httpRequest . keepalive ) {
// NOTE: keepalive is a noop outside of browser context.
}
// 11. If httpRequest’ s referrer is a URL, then append
// `Referer`/httpRequest’ s referrer, serialized and isomorphic encoded,
// to httpRequest’ s header list.
if ( httpRequest . referrer instanceof URL ) {
httpRequest . headersList . append ( 'referer' , isomorphicEncode ( httpRequest . referrer . href ) )
}
// 12. Append a request `Origin` header for httpRequest.
appendRequestOriginHeader ( httpRequest )
// 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA]
appendFetchMetadata ( httpRequest )
// 14. If httpRequest’ s header list does not contain `User-Agent`, then
// user agents should append `User-Agent`/default `User-Agent` value to
// httpRequest’ s header list.
if ( ! httpRequest . headersList . contains ( 'user-agent' ) ) {
httpRequest . headersList . append ( 'user-agent' , 'undici' )
}
// 15. If httpRequest’ s cache mode is "default" and httpRequest’ s header
// list contains `If-Modified-Since`, `If-None-Match`,
// `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set
// httpRequest’ s cache mode to "no-store".
if (
httpRequest . cache === 'default' &&
( httpRequest . headersList . contains ( 'if-modified-since' ) ||
httpRequest . headersList . contains ( 'if-none-match' ) ||
httpRequest . headersList . contains ( 'if-unmodified-since' ) ||
httpRequest . headersList . contains ( 'if-match' ) ||
httpRequest . headersList . contains ( 'if-range' ) )
) {
httpRequest . cache = 'no-store'
}
// 16. If httpRequest’ s cache mode is "no-cache", httpRequest’ s prevent
// no-cache cache-control header modification flag is unset, and
// httpRequest’ s header list does not contain `Cache-Control`, then append
// `Cache-Control`/`max-age=0` to httpRequest’ s header list.
if (
httpRequest . cache === 'no-cache' &&
! httpRequest . preventNoCacheCacheControlHeaderModification &&
! httpRequest . headersList . contains ( 'cache-control' )
) {
httpRequest . headersList . append ( 'cache-control' , 'max-age=0' )
}
// 17. If httpRequest’ s cache mode is "no-store" or "reload", then:
if ( httpRequest . cache === 'no-store' || httpRequest . cache === 'reload' ) {
// 1. If httpRequest’ s header list does not contain `Pragma`, then append
// `Pragma`/`no-cache` to httpRequest’ s header list.
if ( ! httpRequest . headersList . contains ( 'pragma' ) ) {
httpRequest . headersList . append ( 'pragma' , 'no-cache' )
}
// 2. If httpRequest’ s header list does not contain `Cache-Control`,
// then append `Cache-Control`/`no-cache` to httpRequest’ s header list.
if ( ! httpRequest . headersList . contains ( 'cache-control' ) ) {
httpRequest . headersList . append ( 'cache-control' , 'no-cache' )
}
}
// 18. If httpRequest’ s header list contains `Range`, then append
// `Accept-Encoding`/`identity` to httpRequest’ s header list.
if ( httpRequest . headersList . contains ( 'range' ) ) {
httpRequest . headersList . append ( 'accept-encoding' , 'identity' )
}
// 19. Modify httpRequest’ s header list per HTTP. Do not append a given
// header if httpRequest’ s header list contains that header’ s name.
// TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129
if ( ! httpRequest . headersList . contains ( 'accept-encoding' ) ) {
if ( urlHasHttpsScheme ( requestCurrentURL ( httpRequest ) ) ) {
httpRequest . headersList . append ( 'accept-encoding' , 'br, gzip, deflate' )
} else {
httpRequest . headersList . append ( 'accept-encoding' , 'gzip, deflate' )
}
}
// 20. If includeCredentials is true, then:
if ( includeCredentials ) {
// 1. If the user agent is not configured to block cookies for httpRequest
// (see section 7 of [COOKIES]), then:
// TODO: credentials
// 2. If httpRequest’ s header list does not contain `Authorization`, then:
// TODO: credentials
}
// 21. If there’ s a proxy-authentication entry, use it as appropriate.
// TODO: proxy-authentication
// 22. Set httpCache to the result of determining the HTTP cache
// partition, given httpRequest.
// TODO: cache
// 23. If httpCache is null, then set httpRequest’ s cache mode to
// "no-store".
if ( httpCache == null ) {
httpRequest . cache = 'no-store'
}
// 24. If httpRequest’ s cache mode is neither "no-store" nor "reload",
// then:
if ( httpRequest . mode !== 'no-store' && httpRequest . mode !== 'reload' ) {
// TODO: cache
}
// 9. If aborted, then return the appropriate network error for fetchParams.
// TODO
// 10. If response is null, then:
if ( response == null ) {
// 1. If httpRequest’ s cache mode is "only-if-cached", then return a
// network error.
if ( httpRequest . mode === 'only-if-cached' ) {
return makeNetworkError ( 'only if cached' )
}
// 2. Let forwardResponse be the result of running HTTP-network fetch
// given httpFetchParams, includeCredentials, and isNewConnectionFetch.
const forwardResponse = await httpNetworkFetch (
httpFetchParams ,
includeCredentials ,
isNewConnectionFetch
)
// 3. If httpRequest’ s method is unsafe and forwardResponse’ s status is
// in the range 200 to 399, inclusive, invalidate appropriate stored
// responses in httpCache, as per the "Invalidation" chapter of HTTP
// Caching, and set storedResponse to null. [HTTP-CACHING]
if (
! safeMethods . includes ( httpRequest . method ) &&
forwardResponse . status >= 200 &&
forwardResponse . status <= 399
) {
// TODO: cache
}
// 4. If the revalidatingFlag is set and forwardResponse’ s status is 304,
// then:
if ( revalidatingFlag && forwardResponse . status === 304 ) {
// TODO: cache
}
// 5. If response is null, then:
if ( response == null ) {
// 1. Set response to forwardResponse.
response = forwardResponse
// 2. Store httpRequest and forwardResponse in httpCache, as per the
// "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING]
// TODO: cache
}
}
// 11. Set response’ s URL list to a clone of httpRequest’ s URL list.
response . urlList = [ ... httpRequest . urlList ]
// 12. If httpRequest’ s header list contains `Range`, then set response’ s
// range-requested flag.
if ( httpRequest . headersList . contains ( 'range' ) ) {
response . rangeRequested = true
}
// 13. Set response’ s request-includes-credentials to includeCredentials.
response . requestIncludesCredentials = includeCredentials
// 14. If response’ s status is 401, httpRequest’ s response tainting is not
// "cors", includeCredentials is true, and request’ s window is an environment
// settings object, then:
// TODO
// 15. If response’ s status is 407, then:
if ( response . status === 407 ) {
// 1. If request’ s window is "no-window", then return a network error.
if ( request . window === 'no-window' ) {
return makeNetworkError ( )
}
// 2. ???
// 3. If fetchParams is canceled, then return the appropriate network error for fetchParams.
if ( isCancelled ( fetchParams ) ) {
return makeAppropriateNetworkError ( fetchParams )
}
// 4. Prompt the end user as appropriate in request’ s window and store
// the result as a proxy-authentication entry. [HTTP-AUTH]
// TODO: Invoke some kind of callback?
// 5. Set response to the result of running HTTP-network-or-cache fetch given
// fetchParams.
// TODO
return makeNetworkError ( 'proxy authentication required' )
}
// 16. If all of the following are true
if (
// response’ s status is 421
response . status === 421 &&
// isNewConnectionFetch is false
! isNewConnectionFetch &&
// request’ s body is null, or request’ s body is non-null and request’ s body’ s source is non-null
( request . body == null || request . body . source != null )
) {
// then:
// 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.
if ( isCancelled ( fetchParams ) ) {
return makeAppropriateNetworkError ( fetchParams )
}
// 2. Set response to the result of running HTTP-network-or-cache
// fetch given fetchParams, isAuthenticationFetch, and true.
// TODO (spec): The spec doesn't specify this but we need to cancel
// the active response before we can start a new one.
// https://github.com/whatwg/fetch/issues/1293
fetchParams . controller . connection . destroy ( )
response = await httpNetworkOrCacheFetch (
fetchParams ,
isAuthenticationFetch ,
true
)
}
// 17. If isAuthenticationFetch is true, then create an authentication entry
if ( isAuthenticationFetch ) {
// TODO
}
// 18. Return response.
return response
}
// https://fetch.spec.whatwg.org/#http-network-fetch
async function httpNetworkFetch (
fetchParams ,
includeCredentials = false ,
forceNewConnection = false
) {
assert ( ! fetchParams . controller . connection || fetchParams . controller . connection . destroyed )
fetchParams . controller . connection = {
abort : null ,
destroyed : false ,
destroy ( err ) {
if ( ! this . destroyed ) {
this . destroyed = true
this . abort ? . ( err ? ? new DOMException ( 'The operation was aborted.' , 'AbortError' ) )
}
}
}
// 1. Let request be fetchParams’ s request.
const request = fetchParams . request
// 2. Let response be null.
let response = null
// 3. Let timingInfo be fetchParams’ s timing info.
const timingInfo = fetchParams . timingInfo
// 4. Let httpCache be the result of determining the HTTP cache partition,
// given request.
// TODO: cache
const httpCache = null
// 5. If httpCache is null, then set request’ s cache mode to "no-store".
if ( httpCache == null ) {
request . cache = 'no-store'
}
// 6. Let networkPartitionKey be the result of determining the network
// partition key given request.
// TODO
// 7. Let newConnection be "yes" if forceNewConnection is true; otherwise
// "no".
const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars
// 8. Switch on request’ s mode:
if ( request . mode === 'websocket' ) {
// Let connection be the result of obtaining a WebSocket connection,
// given request’ s current URL.
// TODO
} else {
// Let connection be the result of obtaining a connection, given
// networkPartitionKey, request’ s current URL’ s origin,
// includeCredentials, and forceNewConnection.
// TODO
}
// 9. Run these steps, but abort when the ongoing fetch is terminated:
// 1. If connection is failure, then return a network error.
// 2. Set timingInfo’ s final connection timing info to the result of
// calling clamp and coarsen connection timing info with connection’ s
// timing info, timingInfo’ s post-redirect start time, and fetchParams’ s
// cross-origin isolated capability.
// 3. If connection is not an HTTP/2 connection, request’ s body is non-null,
// and request’ s body’ s source is null, then append (`Transfer-Encoding`,
// `chunked`) to request’ s header list.
// 4. Set timingInfo’ s final network-request start time to the coarsened
// shared current time given fetchParams’ s cross-origin isolated
// capability.
// 5. Set response to the result of making an HTTP request over connection
// using request with the following caveats:
// - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS]
// [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH]
// - If request’ s body is non-null, and request’ s body’ s source is null,
// then the user agent may have a buffer of up to 64 kibibytes and store
// a part of request’ s body in that buffer. If the user agent reads from
// request’ s body beyond that buffer’ s size and the user agent needs to
// resend request, then instead return a network error.
// - Set timingInfo’ s final network-response start time to the coarsened
// shared current time given fetchParams’ s cross-origin isolated capability,
// immediately after the user agent’ s HTTP parser receives the first byte
// of the response (e.g., frame header bytes for HTTP/2 or response status
// line for HTTP/1.x).
// - Wait until all the headers are transmitted.
// - Any responses whose status is in the range 100 to 199, inclusive,
// and is not 101, are to be ignored, except for the purposes of setting
// timingInfo’ s final network-response start time above.
// - If request’ s header list contains `Transfer-Encoding`/`chunked` and
// response is transferred via HTTP/1.0 or older, then return a network
// error.
// - If the HTTP request results in a TLS client certificate dialog, then:
// 1. If request’ s window is an environment settings object, make the
// dialog available in request’ s window.
// 2. Otherwise, return a network error.
// To transmit request’ s body body, run these steps:
let requestBody = null
// 1. If body is null and fetchParams’ s process request end-of-body is
// non-null, then queue a fetch task given fetchParams’ s process request
// end-of-body and fetchParams’ s task destination.
if ( request . body == null && fetchParams . processRequestEndOfBody ) {
queueMicrotask ( ( ) => fetchParams . processRequestEndOfBody ( ) )
} else if ( request . body != null ) {
// 2. Otherwise, if body is non-null:
// 1. Let processBodyChunk given bytes be these steps:
const processBodyChunk = async function * ( bytes ) {
// 1. If the ongoing fetch is terminated, then abort these steps.
if ( isCancelled ( fetchParams ) ) {
return
}
// 2. Run this step in parallel: transmit bytes.
yield bytes
// 3. If fetchParams’ s process request body is non-null, then run
// fetchParams’ s process request body given bytes’ s length.
fetchParams . processRequestBodyChunkLength ? . ( bytes . byteLength )
}
// 2. Let processEndOfBody be these steps:
const processEndOfBody = ( ) => {
// 1. If fetchParams is canceled, then abort these steps.
if ( isCancelled ( fetchParams ) ) {
return
}
// 2. If fetchParams’ s process request end-of-body is non-null,
// then run fetchParams’ s process request end-of-body.
if ( fetchParams . processRequestEndOfBody ) {
fetchParams . processRequestEndOfBody ( )
}
}
// 3. Let processBodyError given e be these steps:
const processBodyError = ( e ) => {
// 1. If fetchParams is canceled, then abort these steps.
if ( isCancelled ( fetchParams ) ) {
return
}
// 2. If e is an "AbortError" DOMException, then abort fetchParams’ s controller.
if ( e . name === 'AbortError' ) {
fetchParams . controller . abort ( )
} else {
fetchParams . controller . terminate ( e )
}
}
// 4. Incrementally read request’ s body given processBodyChunk, processEndOfBody,
// processBodyError, and fetchParams’ s task destination.
requestBody = ( async function * ( ) {
try {
for await ( const bytes of request . body . stream ) {
yield * processBodyChunk ( bytes )
}
processEndOfBody ( )
} catch ( err ) {
processBodyError ( err )
}
} ) ( )
}
try {
// socket is only provided for websockets
const { body , status , statusText , headersList , socket } = await dispatch ( { body : requestBody } )
if ( socket ) {
response = makeResponse ( { status , statusText , headersList , socket } )
} else {
const iterator = body [ Symbol . asyncIterator ] ( )
fetchParams . controller . next = ( ) => iterator . next ( )
response = makeResponse ( { status , statusText , headersList } )
}
} catch ( err ) {
// 10. If aborted, then:
if ( err . name === 'AbortError' ) {
// 1. If connection uses HTTP/2, then transmit an RST_STREAM frame.
fetchParams . controller . connection . destroy ( )
// 2. Return the appropriate network error for fetchParams.
return makeAppropriateNetworkError ( fetchParams )
}
return makeNetworkError ( err )
}
// 11. Let pullAlgorithm be an action that resumes the ongoing fetch
// if it is suspended.
const pullAlgorithm = ( ) => {
fetchParams . controller . resume ( )
}
// 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’ s
// controller with reason, given reason.
const cancelAlgorithm = ( reason ) => {
fetchParams . controller . abort ( reason )
}
// 13. Let highWaterMark be a non-negative, non-NaN number, chosen by
// the user agent.
// TODO
// 14. Let sizeAlgorithm be an algorithm that accepts a chunk object
// and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent.
// TODO
// 15. Let stream be a new ReadableStream.
// 16. Set up stream with pullAlgorithm set to pullAlgorithm,
// cancelAlgorithm set to cancelAlgorithm, highWaterMark set to
// highWaterMark, and sizeAlgorithm set to sizeAlgorithm.
if ( ! ReadableStream ) {
ReadableStream = ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
}
const stream = new ReadableStream (
{
async start ( controller ) {
fetchParams . controller . controller = controller
} ,
async pull ( controller ) {
await pullAlgorithm ( controller )
} ,
async cancel ( reason ) {
await cancelAlgorithm ( reason )
}
} ,
{
highWaterMark : 0 ,
size ( ) {
return 1
}
}
)
// 17. Run these steps, but abort when the ongoing fetch is terminated:
// 1. Set response’ s body to a new body whose stream is stream.
response . body = { stream }
// 2. If response is not a network error and request’ s cache mode is
// not "no-store", then update response in httpCache for request.
// TODO
// 3. If includeCredentials is true and the user agent is not configured
// to block cookies for request (see section 7 of [COOKIES]), then run the
// "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on
// the value of each header whose name is a byte-case-insensitive match for
// `Set-Cookie` in response’ s header list, if any, and request’ s current URL.
// TODO
// 18. If aborted, then:
// TODO
// 19. Run these steps in parallel:
// 1. Run these steps, but abort when fetchParams is canceled:
fetchParams . controller . on ( 'terminated' , onAborted )
fetchParams . controller . resume = async ( ) => {
// 1. While true
while ( true ) {
// 1-3. See onData...
// 4. Set bytes to the result of handling content codings given
// codings and bytes.
let bytes
let isFailure
try {
const { done , value } = await fetchParams . controller . next ( )
if ( isAborted ( fetchParams ) ) {
break
}
bytes = done ? undefined : value
} catch ( err ) {
if ( fetchParams . controller . ended && ! timingInfo . encodedBodySize ) {
// zlib doesn't like empty streams.
bytes = undefined
} else {
bytes = err
// err may be propagated from the result of calling readablestream.cancel,
// which might not be an error. https://github.com/nodejs/undici/issues/2009
isFailure = true
}
}
if ( bytes === undefined ) {
// 2. Otherwise, if the bytes transmission for response’ s message
// body is done normally and stream is readable, then close
// stream, finalize response for fetchParams and response, and
// abort these in-parallel steps.
readableStreamClose ( fetchParams . controller . controller )
finalizeResponse ( fetchParams , response )
return
}
// 5. Increase timingInfo’ s decoded body size by bytes’ s length.
timingInfo . decodedBodySize += bytes ? . byteLength ? ? 0
// 6. If bytes is failure, then terminate fetchParams’ s controller.
if ( isFailure ) {
fetchParams . controller . terminate ( bytes )
return
}
// 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes
// into stream.
fetchParams . controller . controller . enqueue ( new Uint8Array ( bytes ) )
// 8. If stream is errored, then terminate the ongoing fetch.
if ( isErrored ( stream ) ) {
fetchParams . controller . terminate ( )
return
}
// 9. If stream doesn’ t need more data ask the user agent to suspend
// the ongoing fetch.
if ( ! fetchParams . controller . controller . desiredSize ) {
return
}
}
}
// 2. If aborted, then:
function onAborted ( reason ) {
// 2. If fetchParams is aborted, then:
if ( isAborted ( fetchParams ) ) {
// 1. Set response’ s aborted flag.
response . aborted = true
// 2. If stream is readable, then error stream with the result of
// deserialize a serialized abort reason given fetchParams’ s
// controller’ s serialized abort reason and an
// implementation-defined realm.
if ( isReadable ( stream ) ) {
fetchParams . controller . controller . error (
fetchParams . controller . serializedAbortReason
)
}
} else {
// 3. Otherwise, if stream is readable, error stream with a TypeError.
if ( isReadable ( stream ) ) {
fetchParams . controller . controller . error ( new TypeError ( 'terminated' , {
cause : isErrorLike ( reason ) ? reason : undefined
} ) )
}
}
// 4. If connection uses HTTP/2, then transmit an RST_STREAM frame.
// 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so.
fetchParams . controller . connection . destroy ( )
}
// 20. Return response.
return response
async function dispatch ( { body } ) {
const url = requestCurrentURL ( request )
/** @type {import('../..').Agent} */
const agent = fetchParams . controller . dispatcher
return new Promise ( ( resolve , reject ) => agent . dispatch (
{
path : url . pathname + url . search ,
origin : url . origin ,
method : request . method ,
body : fetchParams . controller . dispatcher . isMockActive ? request . body && request . body . source : body ,
headers : request . headersList . entries ,
maxRedirections : 0 ,
upgrade : request . mode === 'websocket' ? 'websocket' : undefined
} ,
{
body : null ,
abort : null ,
onConnect ( abort ) {
// TODO (fix): Do we need connection here?
const { connection } = fetchParams . controller
if ( connection . destroyed ) {
abort ( new DOMException ( 'The operation was aborted.' , 'AbortError' ) )
} else {
fetchParams . controller . on ( 'terminated' , abort )
this . abort = connection . abort = abort
}
} ,
onHeaders ( status , headersList , resume , statusText ) {
if ( status < 200 ) {
return
}
let codings = [ ]
let location = ''
const headers = new Headers ( )
for ( let n = 0 ; n < headersList . length ; n += 2 ) {
const key = headersList [ n + 0 ] . toString ( 'latin1' )
const val = headersList [ n + 1 ] . toString ( 'latin1' )
if ( key . toLowerCase ( ) === 'content-encoding' ) {
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
// "All content-coding values are case-insensitive..."
codings = val . toLowerCase ( ) . split ( ',' ) . map ( ( x ) => x . trim ( ) )
} else if ( key . toLowerCase ( ) === 'location' ) {
location = val
}
headers . append ( key , val )
}
this . body = new Readable ( { read : resume } )
const decoders = [ ]
const willFollow = request . redirect === 'follow' &&
location &&
redirectStatus . includes ( status )
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
if ( request . method !== 'HEAD' && request . method !== 'CONNECT' && ! nullBodyStatus . includes ( status ) && ! willFollow ) {
for ( const coding of codings ) {
// https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2
if ( coding === 'x-gzip' || coding === 'gzip' ) {
decoders . push ( zlib . createGunzip ( ) )
} else if ( coding === 'deflate' ) {
decoders . push ( zlib . createInflate ( ) )
} else if ( coding === 'br' ) {
decoders . push ( zlib . createBrotliDecompress ( ) )
} else {
decoders . length = 0
break
}
}
}
resolve ( {
status ,
statusText ,
headersList : headers [ kHeadersList ] ,
body : decoders . length
? pipeline ( this . body , ... decoders , ( ) => { } )
: this . body . on ( 'error' , ( ) => { } )
} )
return true
} ,
onData ( chunk ) {
if ( fetchParams . controller . dump ) {
return
}
// 1. If one or more bytes have been transmitted from response’ s
// message body, then:
// 1. Let bytes be the transmitted bytes.
const bytes = chunk
// 2. Let codings be the result of extracting header list values
// given `Content-Encoding` and response’ s header list.
// See pullAlgorithm.
// 3. Increase timingInfo’ s encoded body size by bytes’ s length.
timingInfo . encodedBodySize += bytes . byteLength
// 4. See pullAlgorithm...
return this . body . push ( bytes )
} ,
onComplete ( ) {
if ( this . abort ) {
fetchParams . controller . off ( 'terminated' , this . abort )
}
fetchParams . controller . ended = true
this . body . push ( null )
} ,
onError ( error ) {
if ( this . abort ) {
fetchParams . controller . off ( 'terminated' , this . abort )
}
this . body ? . destroy ( error )
fetchParams . controller . terminate ( error )
reject ( error )
} ,
onUpgrade ( status , headersList , socket ) {
if ( status !== 101 ) {
return
}
const headers = new Headers ( )
for ( let n = 0 ; n < headersList . length ; n += 2 ) {
const key = headersList [ n + 0 ] . toString ( 'latin1' )
const val = headersList [ n + 1 ] . toString ( 'latin1' )
headers . append ( key , val )
}
resolve ( {
status ,
statusText : STATUS _CODES [ status ] ,
headersList : headers [ kHeadersList ] ,
socket
} )
return true
}
}
) )
}
}
module . exports = {
fetch ,
Fetch ,
fetching ,
finalizeAndReportTiming
}
/***/ } ) ,
/***/ 8359 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
/* globals AbortController */
const { extractBody , mixinBody , cloneBody } = _ _nccwpck _require _ _ ( 9990 )
const { Headers , fill : fillHeaders , HeadersList } = _ _nccwpck _require _ _ ( 554 )
const { FinalizationRegistry } = _ _nccwpck _require _ _ ( 6436 ) ( )
const util = _ _nccwpck _require _ _ ( 3983 )
const {
isValidHTTPToken ,
sameOrigin ,
normalizeMethod ,
makePolicyContainer
} = _ _nccwpck _require _ _ ( 2538 )
const {
forbiddenMethods ,
corsSafeListedMethods ,
referrerPolicy ,
requestRedirect ,
requestMode ,
requestCredentials ,
requestCache ,
requestDuplex
} = _ _nccwpck _require _ _ ( 1037 )
const { kEnumerableProperty } = util
const { kHeaders , kSignal , kState , kGuard , kRealm } = _ _nccwpck _require _ _ ( 5861 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { getGlobalOrigin } = _ _nccwpck _require _ _ ( 1246 )
const { URLSerializer } = _ _nccwpck _require _ _ ( 685 )
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { getMaxListeners , setMaxListeners , getEventListeners , defaultMaxListeners } = _ _nccwpck _require _ _ ( 2361 )
let TransformStream = globalThis . TransformStream
const kInit = Symbol ( 'init' )
const kAbortController = Symbol ( 'abortController' )
const requestFinalizer = new FinalizationRegistry ( ( { signal , abort } ) => {
signal . removeEventListener ( 'abort' , abort )
} )
// https://fetch.spec.whatwg.org/#request-class
class Request {
// https://fetch.spec.whatwg.org/#dom-request
constructor ( input , init = { } ) {
if ( input === kInit ) {
return
}
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Request constructor' } )
input = webidl . converters . RequestInfo ( input )
init = webidl . converters . RequestInit ( init )
// https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object
this [ kRealm ] = {
settingsObject : {
baseUrl : getGlobalOrigin ( ) ,
get origin ( ) {
return this . baseUrl ? . origin
} ,
policyContainer : makePolicyContainer ( )
}
}
// 1. Let request be null.
let request = null
// 2. Let fallbackMode be null.
let fallbackMode = null
// 3. Let baseURL be this’ s relevant settings object’ s API base URL.
const baseUrl = this [ kRealm ] . settingsObject . baseUrl
// 4. Let signal be null.
let signal = null
// 5. If input is a string, then:
if ( typeof input === 'string' ) {
// 1. Let parsedURL be the result of parsing input with baseURL.
// 2. If parsedURL is failure, then throw a TypeError.
let parsedURL
try {
parsedURL = new URL ( input , baseUrl )
} catch ( err ) {
throw new TypeError ( 'Failed to parse URL from ' + input , { cause : err } )
}
// 3. If parsedURL includes credentials, then throw a TypeError.
if ( parsedURL . username || parsedURL . password ) {
throw new TypeError (
'Request cannot be constructed from a URL that includes credentials: ' +
input
)
}
// 4. Set request to a new request whose URL is parsedURL.
request = makeRequest ( { urlList : [ parsedURL ] } )
// 5. Set fallbackMode to "cors".
fallbackMode = 'cors'
} else {
// 6. Otherwise:
// 7. Assert: input is a Request object.
assert ( input instanceof Request )
// 8. Set request to input’ s request.
request = input [ kState ]
// 9. Set signal to input’ s signal.
signal = input [ kSignal ]
}
// 7. Let origin be this’ s relevant settings object’ s origin.
const origin = this [ kRealm ] . settingsObject . origin
// 8. Let window be "client".
let window = 'client'
// 9. If request’ s window is an environment settings object and its origin
// is same origin with origin, then set window to request’ s window.
if (
request . window ? . constructor ? . name === 'EnvironmentSettingsObject' &&
sameOrigin ( request . window , origin )
) {
window = request . window
}
// 10. If init["window"] exists and is non-null, then throw a TypeError.
if ( init . window != null ) {
throw new TypeError ( ` 'window' option ' ${ window } ' must be null ` )
}
// 11. If init["window"] exists, then set window to "no-window".
if ( 'window' in init ) {
window = 'no-window'
}
// 12. Set request to a new request with the following properties:
request = makeRequest ( {
// URL request’ s URL.
// undici implementation note: this is set as the first item in request's urlList in makeRequest
// method request’ s method.
method : request . method ,
// header list A copy of request’ s header list.
// undici implementation note: headersList is cloned in makeRequest
headersList : request . headersList ,
// unsafe-request flag Set.
unsafeRequest : request . unsafeRequest ,
// client This’ s relevant settings object.
client : this [ kRealm ] . settingsObject ,
// window window.
window ,
// priority request’ s priority.
priority : request . priority ,
// origin request’ s origin. The propagation of the origin is only significant for navigation requests
// being handled by a service worker. In this scenario a request can have an origin that is different
// from the current client.
origin : request . origin ,
// referrer request’ s referrer.
referrer : request . referrer ,
// referrer policy request’ s referrer policy.
referrerPolicy : request . referrerPolicy ,
// mode request’ s mode.
mode : request . mode ,
// credentials mode request’ s credentials mode.
credentials : request . credentials ,
// cache mode request’ s cache mode.
cache : request . cache ,
// redirect mode request’ s redirect mode.
redirect : request . redirect ,
// integrity metadata request’ s integrity metadata.
integrity : request . integrity ,
// keepalive request’ s keepalive.
keepalive : request . keepalive ,
// reload-navigation flag request’ s reload-navigation flag.
reloadNavigation : request . reloadNavigation ,
// history-navigation flag request’ s history-navigation flag.
historyNavigation : request . historyNavigation ,
// URL list A clone of request’ s URL list.
urlList : [ ... request . urlList ]
} )
// 13. If init is not empty, then:
if ( Object . keys ( init ) . length > 0 ) {
// 1. If request’ s mode is "navigate", then set it to "same-origin".
if ( request . mode === 'navigate' ) {
request . mode = 'same-origin'
}
// 2. Unset request’ s reload-navigation flag.
request . reloadNavigation = false
// 3. Unset request’ s history-navigation flag.
request . historyNavigation = false
// 4. Set request’ s origin to "client".
request . origin = 'client'
// 5. Set request’ s referrer to "client"
request . referrer = 'client'
// 6. Set request’ s referrer policy to the empty string.
request . referrerPolicy = ''
// 7. Set request’ s URL to request’ s current URL.
request . url = request . urlList [ request . urlList . length - 1 ]
// 8. Set request’ s URL list to « request’ s URL ».
request . urlList = [ request . url ]
}
// 14. If init["referrer"] exists, then:
if ( init . referrer !== undefined ) {
// 1. Let referrer be init["referrer"].
const referrer = init . referrer
// 2. If referrer is the empty string, then set request’ s referrer to "no-referrer".
if ( referrer === '' ) {
request . referrer = 'no-referrer'
} else {
// 1. Let parsedReferrer be the result of parsing referrer with
// baseURL.
// 2. If parsedReferrer is failure, then throw a TypeError.
let parsedReferrer
try {
parsedReferrer = new URL ( referrer , baseUrl )
} catch ( err ) {
throw new TypeError ( ` Referrer " ${ referrer } " is not a valid URL. ` , { cause : err } )
}
// 3. If one of the following is true
// parsedReferrer’ s cannot-be-a-base-URL is true, scheme is "about",
// and path contains a single string "client"
// parsedReferrer’ s origin is not same origin with origin
// then set request’ s referrer to "client".
// TODO
// 4. Otherwise, set request’ s referrer to parsedReferrer.
request . referrer = parsedReferrer
}
}
// 15. If init["referrerPolicy"] exists, then set request’ s referrer policy
// to it.
if ( init . referrerPolicy !== undefined ) {
request . referrerPolicy = init . referrerPolicy
}
// 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise.
let mode
if ( init . mode !== undefined ) {
mode = init . mode
} else {
mode = fallbackMode
}
// 17. If mode is "navigate", then throw a TypeError.
if ( mode === 'navigate' ) {
throw webidl . errors . exception ( {
header : 'Request constructor' ,
message : 'invalid request mode navigate.'
} )
}
// 18. If mode is non-null, set request’ s mode to mode.
if ( mode != null ) {
request . mode = mode
}
// 19. If init["credentials"] exists, then set request’ s credentials mode
// to it.
if ( init . credentials !== undefined ) {
request . credentials = init . credentials
}
// 18. If init["cache"] exists, then set request’ s cache mode to it.
if ( init . cache !== undefined ) {
request . cache = init . cache
}
// 21. If request’ s cache mode is "only-if-cached" and request’ s mode is
// not "same-origin", then throw a TypeError.
if ( request . cache === 'only-if-cached' && request . mode !== 'same-origin' ) {
throw new TypeError (
"'only-if-cached' can be set only with 'same-origin' mode"
)
}
// 22. If init["redirect"] exists, then set request’ s redirect mode to it.
if ( init . redirect !== undefined ) {
request . redirect = init . redirect
}
// 23. If init["integrity"] exists, then set request’ s integrity metadata to it.
if ( init . integrity !== undefined && init . integrity != null ) {
request . integrity = String ( init . integrity )
}
// 24. If init["keepalive"] exists, then set request’ s keepalive to it.
if ( init . keepalive !== undefined ) {
request . keepalive = Boolean ( init . keepalive )
}
// 25. If init["method"] exists, then:
if ( init . method !== undefined ) {
// 1. Let method be init["method"].
let method = init . method
// 2. If method is not a method or method is a forbidden method, then
// throw a TypeError.
if ( ! isValidHTTPToken ( init . method ) ) {
throw TypeError ( ` ' ${ init . method } ' is not a valid HTTP method. ` )
}
if ( forbiddenMethods . indexOf ( method . toUpperCase ( ) ) !== - 1 ) {
throw TypeError ( ` ' ${ init . method } ' HTTP method is unsupported. ` )
}
// 3. Normalize method.
method = normalizeMethod ( init . method )
// 4. Set request’ s method to method.
request . method = method
}
// 26. If init["signal"] exists, then set signal to it.
if ( init . signal !== undefined ) {
signal = init . signal
}
// 27. Set this’ s request to request.
this [ kState ] = request
// 28. Set this’ s signal to a new AbortSignal object with this’ s relevant
// Realm.
const ac = new AbortController ( )
this [ kSignal ] = ac . signal
this [ kSignal ] [ kRealm ] = this [ kRealm ]
// 29. If signal is not null, then make this’ s signal follow signal.
if ( signal != null ) {
if (
! signal ||
typeof signal . aborted !== 'boolean' ||
typeof signal . addEventListener !== 'function'
) {
throw new TypeError (
"Failed to construct 'Request': member signal is not of type AbortSignal."
)
}
if ( signal . aborted ) {
ac . abort ( signal . reason )
} else {
// Keep a strong ref to ac while request object
// is alive. This is needed to prevent AbortController
// from being prematurely garbage collected.
// See, https://github.com/nodejs/undici/issues/1926.
this [ kAbortController ] = ac
const acRef = new WeakRef ( ac )
const abort = function ( ) {
const ac = acRef . deref ( )
if ( ac !== undefined ) {
ac . abort ( this . reason )
}
}
// Third-party AbortControllers may not work with these.
// See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619.
try {
// If the max amount of listeners is equal to the default, increase it
// This is only available in node >= v19.9.0
if ( typeof getMaxListeners === 'function' && getMaxListeners ( signal ) === defaultMaxListeners ) {
setMaxListeners ( 100 , signal )
} else if ( getEventListeners ( signal , 'abort' ) . length >= defaultMaxListeners ) {
setMaxListeners ( 100 , signal )
}
} catch { }
signal . addEventListener ( 'abort' , abort , { once : true } )
requestFinalizer . register ( ac , { signal , abort } )
}
}
// 30. Set this’ s headers to a new Headers object with this’ s relevant
// Realm, whose header list is request’ s header list and guard is
// "request".
this [ kHeaders ] = new Headers ( )
this [ kHeaders ] [ kHeadersList ] = request . headersList
this [ kHeaders ] [ kGuard ] = 'request'
this [ kHeaders ] [ kRealm ] = this [ kRealm ]
// 31. If this’ s request’ s mode is "no-cors", then:
if ( mode === 'no-cors' ) {
// 1. If this’ s request’ s method is not a CORS-safelisted method,
// then throw a TypeError.
if ( ! corsSafeListedMethods . includes ( request . method ) ) {
throw new TypeError (
` ' ${ request . method } is unsupported in no-cors mode. `
)
}
// 2. Set this’ s headers’ s guard to "request-no-cors".
this [ kHeaders ] [ kGuard ] = 'request-no-cors'
}
// 32. If init is not empty, then:
if ( Object . keys ( init ) . length !== 0 ) {
// 1. Let headers be a copy of this’ s headers and its associated header
// list.
let headers = new Headers ( this [ kHeaders ] )
// 2. If init["headers"] exists, then set headers to init["headers"].
if ( init . headers !== undefined ) {
headers = init . headers
}
// 3. Empty this’ s headers’ s header list.
this [ kHeaders ] [ kHeadersList ] . clear ( )
// 4. If headers is a Headers object, then for each header in its header
// list, append header’ s name/header’ s value to this’ s headers.
if ( headers . constructor . name === 'Headers' ) {
for ( const [ key , val ] of headers ) {
this [ kHeaders ] . append ( key , val )
}
} else {
// 5. Otherwise, fill this’ s headers with headers.
fillHeaders ( this [ kHeaders ] , headers )
}
}
// 33. Let inputBody be input’ s request’ s body if input is a Request
// object; otherwise null.
const inputBody = input instanceof Request ? input [ kState ] . body : null
// 34. If either init["body"] exists and is non-null or inputBody is
// non-null, and request’ s method is `GET` or `HEAD`, then throw a
// TypeError.
if (
( init . body != null || inputBody != null ) &&
( request . method === 'GET' || request . method === 'HEAD' )
) {
throw new TypeError ( 'Request with GET/HEAD method cannot have body.' )
}
// 35. Let initBody be null.
let initBody = null
// 36. If init["body"] exists and is non-null, then:
if ( init . body != null ) {
// 1. Let Content-Type be null.
// 2. Set initBody and Content-Type to the result of extracting
// init["body"], with keepalive set to request’ s keepalive.
const [ extractedBody , contentType ] = extractBody (
init . body ,
request . keepalive
)
initBody = extractedBody
// 3, If Content-Type is non-null and this’ s headers’ s header list does
// not contain `Content-Type`, then append `Content-Type`/Content-Type to
// this’ s headers.
if ( contentType && ! this [ kHeaders ] [ kHeadersList ] . contains ( 'content-type' ) ) {
this [ kHeaders ] . append ( 'content-type' , contentType )
}
}
// 37. Let inputOrInitBody be initBody if it is non-null; otherwise
// inputBody.
const inputOrInitBody = initBody ? ? inputBody
// 38. If inputOrInitBody is non-null and inputOrInitBody’ s source is
// null, then:
if ( inputOrInitBody != null && inputOrInitBody . source == null ) {
// 1. If initBody is non-null and init["duplex"] does not exist,
// then throw a TypeError.
if ( initBody != null && init . duplex == null ) {
throw new TypeError ( 'RequestInit: duplex option is required when sending a body.' )
}
// 2. If this’ s request’ s mode is neither "same-origin" nor "cors",
// then throw a TypeError.
if ( request . mode !== 'same-origin' && request . mode !== 'cors' ) {
throw new TypeError (
'If request is made from ReadableStream, mode should be "same-origin" or "cors"'
)
}
// 3. Set this’ s request’ s use-CORS-preflight flag.
request . useCORSPreflightFlag = true
}
// 39. Let finalBody be inputOrInitBody.
let finalBody = inputOrInitBody
// 40. If initBody is null and inputBody is non-null, then:
if ( initBody == null && inputBody != null ) {
// 1. If input is unusable, then throw a TypeError.
if ( util . isDisturbed ( inputBody . stream ) || inputBody . stream . locked ) {
throw new TypeError (
'Cannot construct a Request with a Request object that has already been used.'
)
}
// 2. Set finalBody to the result of creating a proxy for inputBody.
if ( ! TransformStream ) {
TransformStream = ( _ _nccwpck _require _ _ ( 5356 ) . TransformStream )
}
// https://streams.spec.whatwg.org/#readablestream-create-a-proxy
const identityTransform = new TransformStream ( )
inputBody . stream . pipeThrough ( identityTransform )
finalBody = {
source : inputBody . source ,
length : inputBody . length ,
stream : identityTransform . readable
}
}
// 41. Set this’ s request’ s body to finalBody.
this [ kState ] . body = finalBody
}
// Returns request’ s HTTP method, which is "GET" by default.
get method ( ) {
webidl . brandCheck ( this , Request )
// The method getter steps are to return this’ s request’ s method.
return this [ kState ] . method
}
// Returns the URL of request as a string.
get url ( ) {
webidl . brandCheck ( this , Request )
// The url getter steps are to return this’ s request’ s URL, serialized.
return URLSerializer ( this [ kState ] . url )
}
// Returns a Headers object consisting of the headers associated with request.
// Note that headers added in the network layer by the user agent will not
// be accounted for in this object, e.g., the "Host" header.
get headers ( ) {
webidl . brandCheck ( this , Request )
// The headers getter steps are to return this’ s headers.
return this [ kHeaders ]
}
// Returns the kind of resource requested by request, e.g., "document"
// or "script".
get destination ( ) {
webidl . brandCheck ( this , Request )
// The destination getter are to return this’ s request’ s destination.
return this [ kState ] . destination
}
// Returns the referrer of request. Its value can be a same-origin URL if
// explicitly set in init, the empty string to indicate no referrer, and
// "about:client" when defaulting to the global’ s default. This is used
// during fetching to determine the value of the `Referer` header of the
// request being made.
get referrer ( ) {
webidl . brandCheck ( this , Request )
// 1. If this’ s request’ s referrer is "no-referrer", then return the
// empty string.
if ( this [ kState ] . referrer === 'no-referrer' ) {
return ''
}
// 2. If this’ s request’ s referrer is "client", then return
// "about:client".
if ( this [ kState ] . referrer === 'client' ) {
return 'about:client'
}
// Return this’ s request’ s referrer, serialized.
return this [ kState ] . referrer . toString ( )
}
// Returns the referrer policy associated with request.
// This is used during fetching to compute the value of the request’ s
// referrer.
get referrerPolicy ( ) {
webidl . brandCheck ( this , Request )
// The referrerPolicy getter steps are to return this’ s request’ s referrer policy.
return this [ kState ] . referrerPolicy
}
// Returns the mode associated with request, which is a string indicating
// whether the request will use CORS, or will be restricted to same-origin
// URLs.
get mode ( ) {
webidl . brandCheck ( this , Request )
// The mode getter steps are to return this’ s request’ s mode.
return this [ kState ] . mode
}
// Returns the credentials mode associated with request,
// which is a string indicating whether credentials will be sent with the
// request always, never, or only when sent to a same-origin URL.
get credentials ( ) {
// The credentials getter steps are to return this’ s request’ s credentials mode.
return this [ kState ] . credentials
}
// Returns the cache mode associated with request,
// which is a string indicating how the request will
// interact with the browser’ s cache when fetching.
get cache ( ) {
webidl . brandCheck ( this , Request )
// The cache getter steps are to return this’ s request’ s cache mode.
return this [ kState ] . cache
}
// Returns the redirect mode associated with request,
// which is a string indicating how redirects for the
// request will be handled during fetching. A request
// will follow redirects by default.
get redirect ( ) {
webidl . brandCheck ( this , Request )
// The redirect getter steps are to return this’ s request’ s redirect mode.
return this [ kState ] . redirect
}
// Returns request’ s subresource integrity metadata, which is a
// cryptographic hash of the resource being fetched. Its value
// consists of multiple hashes separated by whitespace. [SRI]
get integrity ( ) {
webidl . brandCheck ( this , Request )
// The integrity getter steps are to return this’ s request’ s integrity
// metadata.
return this [ kState ] . integrity
}
// Returns a boolean indicating whether or not request can outlive the
// global in which it was created.
get keepalive ( ) {
webidl . brandCheck ( this , Request )
// The keepalive getter steps are to return this’ s request’ s keepalive.
return this [ kState ] . keepalive
}
// Returns a boolean indicating whether or not request is for a reload
// navigation.
get isReloadNavigation ( ) {
webidl . brandCheck ( this , Request )
// The isReloadNavigation getter steps are to return true if this’ s
// request’ s reload-navigation flag is set; otherwise false.
return this [ kState ] . reloadNavigation
}
// Returns a boolean indicating whether or not request is for a history
// navigation (a.k.a. back-foward navigation).
get isHistoryNavigation ( ) {
webidl . brandCheck ( this , Request )
// The isHistoryNavigation getter steps are to return true if this’ s request’ s
// history-navigation flag is set; otherwise false.
return this [ kState ] . historyNavigation
}
// Returns the signal associated with request, which is an AbortSignal
// object indicating whether or not request has been aborted, and its
// abort event handler.
get signal ( ) {
webidl . brandCheck ( this , Request )
// The signal getter steps are to return this’ s signal.
return this [ kSignal ]
}
get body ( ) {
webidl . brandCheck ( this , Request )
return this [ kState ] . body ? this [ kState ] . body . stream : null
}
get bodyUsed ( ) {
webidl . brandCheck ( this , Request )
return ! ! this [ kState ] . body && util . isDisturbed ( this [ kState ] . body . stream )
}
get duplex ( ) {
webidl . brandCheck ( this , Request )
return 'half'
}
// Returns a clone of request.
clone ( ) {
webidl . brandCheck ( this , Request )
// 1. If this is unusable, then throw a TypeError.
if ( this . bodyUsed || this . body ? . locked ) {
throw new TypeError ( 'unusable' )
}
// 2. Let clonedRequest be the result of cloning this’ s request.
const clonedRequest = cloneRequest ( this [ kState ] )
// 3. Let clonedRequestObject be the result of creating a Request object,
// given clonedRequest, this’ s headers’ s guard, and this’ s relevant Realm.
const clonedRequestObject = new Request ( kInit )
clonedRequestObject [ kState ] = clonedRequest
clonedRequestObject [ kRealm ] = this [ kRealm ]
clonedRequestObject [ kHeaders ] = new Headers ( )
clonedRequestObject [ kHeaders ] [ kHeadersList ] = clonedRequest . headersList
clonedRequestObject [ kHeaders ] [ kGuard ] = this [ kHeaders ] [ kGuard ]
clonedRequestObject [ kHeaders ] [ kRealm ] = this [ kHeaders ] [ kRealm ]
// 4. Make clonedRequestObject’ s signal follow this’ s signal.
const ac = new AbortController ( )
if ( this . signal . aborted ) {
ac . abort ( this . signal . reason )
} else {
this . signal . addEventListener (
'abort' ,
( ) => {
ac . abort ( this . signal . reason )
} ,
{ once : true }
)
}
clonedRequestObject [ kSignal ] = ac . signal
// 4. Return clonedRequestObject.
return clonedRequestObject
}
}
mixinBody ( Request )
function makeRequest ( init ) {
// https://fetch.spec.whatwg.org/#requests
const request = {
method : 'GET' ,
localURLsOnly : false ,
unsafeRequest : false ,
body : null ,
client : null ,
reservedClient : null ,
replacesClientId : '' ,
window : 'client' ,
keepalive : false ,
serviceWorkers : 'all' ,
initiator : '' ,
destination : '' ,
priority : null ,
origin : 'client' ,
policyContainer : 'client' ,
referrer : 'client' ,
referrerPolicy : '' ,
mode : 'no-cors' ,
useCORSPreflightFlag : false ,
credentials : 'same-origin' ,
useCredentials : false ,
cache : 'default' ,
redirect : 'follow' ,
integrity : '' ,
cryptoGraphicsNonceMetadata : '' ,
parserMetadata : '' ,
reloadNavigation : false ,
historyNavigation : false ,
userActivation : false ,
taintedOrigin : false ,
redirectCount : 0 ,
responseTainting : 'basic' ,
preventNoCacheCacheControlHeaderModification : false ,
done : false ,
timingAllowFailed : false ,
... init ,
headersList : init . headersList
? new HeadersList ( init . headersList )
: new HeadersList ( )
}
request . url = request . urlList [ 0 ]
return request
}
// https://fetch.spec.whatwg.org/#concept-request-clone
function cloneRequest ( request ) {
// To clone a request request, run these steps:
// 1. Let newRequest be a copy of request, except for its body.
const newRequest = makeRequest ( { ... request , body : null } )
// 2. If request’ s body is non-null, set newRequest’ s body to the
// result of cloning request’ s body.
if ( request . body != null ) {
newRequest . body = cloneBody ( request . body )
}
// 3. Return newRequest.
return newRequest
}
Object . defineProperties ( Request . prototype , {
method : kEnumerableProperty ,
url : kEnumerableProperty ,
headers : kEnumerableProperty ,
redirect : kEnumerableProperty ,
clone : kEnumerableProperty ,
signal : kEnumerableProperty ,
duplex : kEnumerableProperty ,
destination : kEnumerableProperty ,
body : kEnumerableProperty ,
bodyUsed : kEnumerableProperty ,
isHistoryNavigation : kEnumerableProperty ,
isReloadNavigation : kEnumerableProperty ,
keepalive : kEnumerableProperty ,
integrity : kEnumerableProperty ,
cache : kEnumerableProperty ,
credentials : kEnumerableProperty ,
attribute : kEnumerableProperty ,
referrerPolicy : kEnumerableProperty ,
referrer : kEnumerableProperty ,
mode : kEnumerableProperty ,
[ Symbol . toStringTag ] : {
value : 'Request' ,
configurable : true
}
} )
webidl . converters . Request = webidl . interfaceConverter (
Request
)
// https://fetch.spec.whatwg.org/#requestinfo
webidl . converters . RequestInfo = function ( V ) {
if ( typeof V === 'string' ) {
return webidl . converters . USVString ( V )
}
if ( V instanceof Request ) {
return webidl . converters . Request ( V )
}
return webidl . converters . USVString ( V )
}
webidl . converters . AbortSignal = webidl . interfaceConverter (
AbortSignal
)
// https://fetch.spec.whatwg.org/#requestinit
webidl . converters . RequestInit = webidl . dictionaryConverter ( [
{
key : 'method' ,
converter : webidl . converters . ByteString
} ,
{
key : 'headers' ,
converter : webidl . converters . HeadersInit
} ,
{
key : 'body' ,
converter : webidl . nullableConverter (
webidl . converters . BodyInit
)
} ,
{
key : 'referrer' ,
converter : webidl . converters . USVString
} ,
{
key : 'referrerPolicy' ,
converter : webidl . converters . DOMString ,
// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy
allowedValues : referrerPolicy
} ,
{
key : 'mode' ,
converter : webidl . converters . DOMString ,
// https://fetch.spec.whatwg.org/#concept-request-mode
allowedValues : requestMode
} ,
{
key : 'credentials' ,
converter : webidl . converters . DOMString ,
// https://fetch.spec.whatwg.org/#requestcredentials
allowedValues : requestCredentials
} ,
{
key : 'cache' ,
converter : webidl . converters . DOMString ,
// https://fetch.spec.whatwg.org/#requestcache
allowedValues : requestCache
} ,
{
key : 'redirect' ,
converter : webidl . converters . DOMString ,
// https://fetch.spec.whatwg.org/#requestredirect
allowedValues : requestRedirect
} ,
{
key : 'integrity' ,
converter : webidl . converters . DOMString
} ,
{
key : 'keepalive' ,
converter : webidl . converters . boolean
} ,
{
key : 'signal' ,
converter : webidl . nullableConverter (
( signal ) => webidl . converters . AbortSignal (
signal ,
{ strict : false }
)
)
} ,
{
key : 'window' ,
converter : webidl . converters . any
} ,
{
key : 'duplex' ,
converter : webidl . converters . DOMString ,
allowedValues : requestDuplex
}
] )
module . exports = { Request , makeRequest }
/***/ } ) ,
/***/ 7823 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { Headers , HeadersList , fill } = _ _nccwpck _require _ _ ( 554 )
const { extractBody , cloneBody , mixinBody } = _ _nccwpck _require _ _ ( 9990 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { kEnumerableProperty } = util
const {
isValidReasonPhrase ,
isCancelled ,
isAborted ,
isBlobLike ,
serializeJavascriptValueToJSONString ,
isErrorLike ,
isomorphicEncode
} = _ _nccwpck _require _ _ ( 2538 )
const {
redirectStatus ,
nullBodyStatus ,
DOMException
} = _ _nccwpck _require _ _ ( 1037 )
const { kState , kHeaders , kGuard , kRealm } = _ _nccwpck _require _ _ ( 5861 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { FormData } = _ _nccwpck _require _ _ ( 2015 )
const { getGlobalOrigin } = _ _nccwpck _require _ _ ( 1246 )
const { URLSerializer } = _ _nccwpck _require _ _ ( 685 )
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { types } = _ _nccwpck _require _ _ ( 3837 )
const ReadableStream = globalThis . ReadableStream || ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
// https://fetch.spec.whatwg.org/#response-class
class Response {
// Creates network error Response.
static error ( ) {
// TODO
const relevantRealm = { settingsObject : { } }
// The static error() method steps are to return the result of creating a
// Response object, given a new network error, "immutable", and this’ s
// relevant Realm.
const responseObject = new Response ( )
responseObject [ kState ] = makeNetworkError ( )
responseObject [ kRealm ] = relevantRealm
responseObject [ kHeaders ] [ kHeadersList ] = responseObject [ kState ] . headersList
responseObject [ kHeaders ] [ kGuard ] = 'immutable'
responseObject [ kHeaders ] [ kRealm ] = relevantRealm
return responseObject
}
// https://fetch.spec.whatwg.org/#dom-response-json
static json ( data = undefined , init = { } ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Response.json' } )
if ( init !== null ) {
init = webidl . converters . ResponseInit ( init )
}
// 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data.
const bytes = new TextEncoder ( 'utf-8' ) . encode (
serializeJavascriptValueToJSONString ( data )
)
// 2. Let body be the result of extracting bytes.
const body = extractBody ( bytes )
// 3. Let responseObject be the result of creating a Response object, given a new response,
// "response", and this’ s relevant Realm.
const relevantRealm = { settingsObject : { } }
const responseObject = new Response ( )
responseObject [ kRealm ] = relevantRealm
responseObject [ kHeaders ] [ kGuard ] = 'response'
responseObject [ kHeaders ] [ kRealm ] = relevantRealm
// 4. Perform initialize a response given responseObject, init, and (body, "application/json").
initializeResponse ( responseObject , init , { body : body [ 0 ] , type : 'application/json' } )
// 5. Return responseObject.
return responseObject
}
// Creates a redirect Response that redirects to url with status status.
static redirect ( url , status = 302 ) {
const relevantRealm = { settingsObject : { } }
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Response.redirect' } )
url = webidl . converters . USVString ( url )
status = webidl . converters [ 'unsigned short' ] ( status )
// 1. Let parsedURL be the result of parsing url with current settings
// object’ s API base URL.
// 2. If parsedURL is failure, then throw a TypeError.
// TODO: base-URL?
let parsedURL
try {
parsedURL = new URL ( url , getGlobalOrigin ( ) )
} catch ( err ) {
throw Object . assign ( new TypeError ( 'Failed to parse URL from ' + url ) , {
cause : err
} )
}
// 3. If status is not a redirect status, then throw a RangeError.
if ( ! redirectStatus . includes ( status ) ) {
throw new RangeError ( 'Invalid status code ' + status )
}
// 4. Let responseObject be the result of creating a Response object,
// given a new response, "immutable", and this’ s relevant Realm.
const responseObject = new Response ( )
responseObject [ kRealm ] = relevantRealm
responseObject [ kHeaders ] [ kGuard ] = 'immutable'
responseObject [ kHeaders ] [ kRealm ] = relevantRealm
// 5. Set responseObject’ s response’ s status to status.
responseObject [ kState ] . status = status
// 6. Let value be parsedURL, serialized and isomorphic encoded.
const value = isomorphicEncode ( URLSerializer ( parsedURL ) )
// 7. Append `Location`/value to responseObject’ s response’ s header list.
responseObject [ kState ] . headersList . append ( 'location' , value )
// 8. Return responseObject.
return responseObject
}
// https://fetch.spec.whatwg.org/#dom-response
constructor ( body = null , init = { } ) {
if ( body !== null ) {
body = webidl . converters . BodyInit ( body )
}
init = webidl . converters . ResponseInit ( init )
// TODO
this [ kRealm ] = { settingsObject : { } }
// 1. Set this’ s response to a new response.
this [ kState ] = makeResponse ( { } )
// 2. Set this’ s headers to a new Headers object with this’ s relevant
// Realm, whose header list is this’ s response’ s header list and guard
// is "response".
this [ kHeaders ] = new Headers ( )
this [ kHeaders ] [ kGuard ] = 'response'
this [ kHeaders ] [ kHeadersList ] = this [ kState ] . headersList
this [ kHeaders ] [ kRealm ] = this [ kRealm ]
// 3. Let bodyWithType be null.
let bodyWithType = null
// 4. If body is non-null, then set bodyWithType to the result of extracting body.
if ( body != null ) {
const [ extractedBody , type ] = extractBody ( body )
bodyWithType = { body : extractedBody , type }
}
// 5. Perform initialize a response given this, init, and bodyWithType.
initializeResponse ( this , init , bodyWithType )
}
// Returns response’ s type, e.g., "cors".
get type ( ) {
webidl . brandCheck ( this , Response )
// The type getter steps are to return this’ s response’ s type.
return this [ kState ] . type
}
// Returns response’ s URL, if it has one; otherwise the empty string.
get url ( ) {
webidl . brandCheck ( this , Response )
const urlList = this [ kState ] . urlList
// The url getter steps are to return the empty string if this’ s
// response’ s URL is null; otherwise this’ s response’ s URL,
// serialized with exclude fragment set to true.
const url = urlList [ urlList . length - 1 ] ? ? null
if ( url === null ) {
return ''
}
return URLSerializer ( url , true )
}
// Returns whether response was obtained through a redirect.
get redirected ( ) {
webidl . brandCheck ( this , Response )
// The redirected getter steps are to return true if this’ s response’ s URL
// list has more than one item; otherwise false.
return this [ kState ] . urlList . length > 1
}
// Returns response’ s status.
get status ( ) {
webidl . brandCheck ( this , Response )
// The status getter steps are to return this’ s response’ s status.
return this [ kState ] . status
}
// Returns whether response’ s status is an ok status.
get ok ( ) {
webidl . brandCheck ( this , Response )
// The ok getter steps are to return true if this’ s response’ s status is an
// ok status; otherwise false.
return this [ kState ] . status >= 200 && this [ kState ] . status <= 299
}
// Returns response’ s status message.
get statusText ( ) {
webidl . brandCheck ( this , Response )
// The statusText getter steps are to return this’ s response’ s status
// message.
return this [ kState ] . statusText
}
// Returns response’ s headers as Headers.
get headers ( ) {
webidl . brandCheck ( this , Response )
// The headers getter steps are to return this’ s headers.
return this [ kHeaders ]
}
get body ( ) {
webidl . brandCheck ( this , Response )
return this [ kState ] . body ? this [ kState ] . body . stream : null
}
get bodyUsed ( ) {
webidl . brandCheck ( this , Response )
return ! ! this [ kState ] . body && util . isDisturbed ( this [ kState ] . body . stream )
}
// Returns a clone of response.
clone ( ) {
webidl . brandCheck ( this , Response )
// 1. If this is unusable, then throw a TypeError.
if ( this . bodyUsed || ( this . body && this . body . locked ) ) {
throw webidl . errors . exception ( {
header : 'Response.clone' ,
message : 'Body has already been consumed.'
} )
}
// 2. Let clonedResponse be the result of cloning this’ s response.
const clonedResponse = cloneResponse ( this [ kState ] )
// 3. Return the result of creating a Response object, given
// clonedResponse, this’ s headers’ s guard, and this’ s relevant Realm.
const clonedResponseObject = new Response ( )
clonedResponseObject [ kState ] = clonedResponse
clonedResponseObject [ kRealm ] = this [ kRealm ]
clonedResponseObject [ kHeaders ] [ kHeadersList ] = clonedResponse . headersList
clonedResponseObject [ kHeaders ] [ kGuard ] = this [ kHeaders ] [ kGuard ]
clonedResponseObject [ kHeaders ] [ kRealm ] = this [ kHeaders ] [ kRealm ]
return clonedResponseObject
}
}
mixinBody ( Response )
Object . defineProperties ( Response . prototype , {
type : kEnumerableProperty ,
url : kEnumerableProperty ,
status : kEnumerableProperty ,
ok : kEnumerableProperty ,
redirected : kEnumerableProperty ,
statusText : kEnumerableProperty ,
headers : kEnumerableProperty ,
clone : kEnumerableProperty ,
body : kEnumerableProperty ,
bodyUsed : kEnumerableProperty ,
[ Symbol . toStringTag ] : {
value : 'Response' ,
configurable : true
}
} )
Object . defineProperties ( Response , {
json : kEnumerableProperty ,
redirect : kEnumerableProperty ,
error : kEnumerableProperty
} )
// https://fetch.spec.whatwg.org/#concept-response-clone
function cloneResponse ( response ) {
// To clone a response response, run these steps:
// 1. If response is a filtered response, then return a new identical
// filtered response whose internal response is a clone of response’ s
// internal response.
if ( response . internalResponse ) {
return filterResponse (
cloneResponse ( response . internalResponse ) ,
response . type
)
}
// 2. Let newResponse be a copy of response, except for its body.
const newResponse = makeResponse ( { ... response , body : null } )
// 3. If response’ s body is non-null, then set newResponse’ s body to the
// result of cloning response’ s body.
if ( response . body != null ) {
newResponse . body = cloneBody ( response . body )
}
// 4. Return newResponse.
return newResponse
}
function makeResponse ( init ) {
return {
aborted : false ,
rangeRequested : false ,
timingAllowPassed : false ,
requestIncludesCredentials : false ,
type : 'default' ,
status : 200 ,
timingInfo : null ,
cacheState : '' ,
statusText : '' ,
... init ,
headersList : init . headersList
? new HeadersList ( init . headersList )
: new HeadersList ( ) ,
urlList : init . urlList ? [ ... init . urlList ] : [ ]
}
}
function makeNetworkError ( reason ) {
const isError = isErrorLike ( reason )
return makeResponse ( {
type : 'error' ,
status : 0 ,
error : isError
? reason
: new Error ( reason ? String ( reason ) : reason ) ,
aborted : reason && reason . name === 'AbortError'
} )
}
function makeFilteredResponse ( response , state ) {
state = {
internalResponse : response ,
... state
}
return new Proxy ( response , {
get ( target , p ) {
return p in state ? state [ p ] : target [ p ]
} ,
set ( target , p , value ) {
assert ( ! ( p in state ) )
target [ p ] = value
return true
}
} )
}
// https://fetch.spec.whatwg.org/#concept-filtered-response
function filterResponse ( response , type ) {
// Set response to the following filtered response with response as its
// internal response, depending on request’ s response tainting:
if ( type === 'basic' ) {
// A basic filtered response is a filtered response whose type is "basic"
// and header list excludes any headers in internal response’ s header list
// whose name is a forbidden response-header name.
// Note: undici does not implement forbidden response-header names
return makeFilteredResponse ( response , {
type : 'basic' ,
headersList : response . headersList
} )
} else if ( type === 'cors' ) {
// A CORS filtered response is a filtered response whose type is "cors"
// and header list excludes any headers in internal response’ s header
// list whose name is not a CORS-safelisted response-header name, given
// internal response’ s CORS-exposed header-name list.
// Note: undici does not implement CORS-safelisted response-header names
return makeFilteredResponse ( response , {
type : 'cors' ,
headersList : response . headersList
} )
} else if ( type === 'opaque' ) {
// An opaque filtered response is a filtered response whose type is
// "opaque", URL list is the empty list, status is 0, status message
// is the empty byte sequence, header list is empty, and body is null.
return makeFilteredResponse ( response , {
type : 'opaque' ,
urlList : Object . freeze ( [ ] ) ,
status : 0 ,
statusText : '' ,
body : null
} )
} else if ( type === 'opaqueredirect' ) {
// An opaque-redirect filtered response is a filtered response whose type
// is "opaqueredirect", status is 0, status message is the empty byte
// sequence, header list is empty, and body is null.
return makeFilteredResponse ( response , {
type : 'opaqueredirect' ,
status : 0 ,
statusText : '' ,
headersList : [ ] ,
body : null
} )
} else {
assert ( false )
}
}
// https://fetch.spec.whatwg.org/#appropriate-network-error
function makeAppropriateNetworkError ( fetchParams ) {
// 1. Assert: fetchParams is canceled.
assert ( isCancelled ( fetchParams ) )
// 2. Return an aborted network error if fetchParams is aborted;
// otherwise return a network error.
return isAborted ( fetchParams )
? makeNetworkError ( new DOMException ( 'The operation was aborted.' , 'AbortError' ) )
: makeNetworkError ( 'Request was cancelled.' )
}
// https://whatpr.org/fetch/1392.html#initialize-a-response
function initializeResponse ( response , init , body ) {
// 1. If init["status"] is not in the range 200 to 599, inclusive, then
// throw a RangeError.
if ( init . status !== null && ( init . status < 200 || init . status > 599 ) ) {
throw new RangeError ( 'init["status"] must be in the range of 200 to 599, inclusive.' )
}
// 2. If init["statusText"] does not match the reason-phrase token production,
// then throw a TypeError.
if ( 'statusText' in init && init . statusText != null ) {
// See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2:
// reason-phrase = *( HTAB / SP / VCHAR / obs-text )
if ( ! isValidReasonPhrase ( String ( init . statusText ) ) ) {
throw new TypeError ( 'Invalid statusText' )
}
}
// 3. Set response’ s response’ s status to init["status"].
if ( 'status' in init && init . status != null ) {
response [ kState ] . status = init . status
}
// 4. Set response’ s response’ s status message to init["statusText"].
if ( 'statusText' in init && init . statusText != null ) {
response [ kState ] . statusText = init . statusText
}
// 5. If init["headers"] exists, then fill response’ s headers with init["headers"].
if ( 'headers' in init && init . headers != null ) {
fill ( response [ kHeaders ] , init . headers )
}
// 6. If body was given, then:
if ( body ) {
// 1. If response's status is a null body status, then throw a TypeError.
if ( nullBodyStatus . includes ( response . status ) ) {
throw webidl . errors . exception ( {
header : 'Response constructor' ,
message : 'Invalid response status code ' + response . status
} )
}
// 2. Set response's body to body's body.
response [ kState ] . body = body . body
// 3. If body's type is non-null and response's header list does not contain
// `Content-Type`, then append (`Content-Type`, body's type) to response's header list.
if ( body . type != null && ! response [ kState ] . headersList . contains ( 'Content-Type' ) ) {
response [ kState ] . headersList . append ( 'content-type' , body . type )
}
}
}
webidl . converters . ReadableStream = webidl . interfaceConverter (
ReadableStream
)
webidl . converters . FormData = webidl . interfaceConverter (
FormData
)
webidl . converters . URLSearchParams = webidl . interfaceConverter (
URLSearchParams
)
// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit
webidl . converters . XMLHttpRequestBodyInit = function ( V ) {
if ( typeof V === 'string' ) {
return webidl . converters . USVString ( V )
}
if ( isBlobLike ( V ) ) {
return webidl . converters . Blob ( V , { strict : false } )
}
if (
types . isAnyArrayBuffer ( V ) ||
types . isTypedArray ( V ) ||
types . isDataView ( V )
) {
return webidl . converters . BufferSource ( V )
}
if ( util . isFormDataLike ( V ) ) {
return webidl . converters . FormData ( V , { strict : false } )
}
if ( V instanceof URLSearchParams ) {
return webidl . converters . URLSearchParams ( V )
}
return webidl . converters . DOMString ( V )
}
// https://fetch.spec.whatwg.org/#bodyinit
webidl . converters . BodyInit = function ( V ) {
if ( V instanceof ReadableStream ) {
return webidl . converters . ReadableStream ( V )
}
// Note: the spec doesn't include async iterables,
// this is an undici extension.
if ( V ? . [ Symbol . asyncIterator ] ) {
return V
}
return webidl . converters . XMLHttpRequestBodyInit ( V )
}
webidl . converters . ResponseInit = webidl . dictionaryConverter ( [
{
key : 'status' ,
converter : webidl . converters [ 'unsigned short' ] ,
defaultValue : 200
} ,
{
key : 'statusText' ,
converter : webidl . converters . ByteString ,
defaultValue : ''
} ,
{
key : 'headers' ,
converter : webidl . converters . HeadersInit
}
] )
module . exports = {
makeNetworkError ,
makeResponse ,
makeAppropriateNetworkError ,
filterResponse ,
Response ,
cloneResponse
}
/***/ } ) ,
/***/ 5861 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = {
kUrl : Symbol ( 'url' ) ,
kHeaders : Symbol ( 'headers' ) ,
kSignal : Symbol ( 'signal' ) ,
kState : Symbol ( 'state' ) ,
kGuard : Symbol ( 'guard' ) ,
kRealm : Symbol ( 'realm' )
}
/***/ } ) ,
/***/ 2538 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { redirectStatus , badPorts , referrerPolicy : referrerPolicyTokens } = _ _nccwpck _require _ _ ( 1037 )
const { getGlobalOrigin } = _ _nccwpck _require _ _ ( 1246 )
const { performance } = _ _nccwpck _require _ _ ( 4074 )
const { isBlobLike , toUSVString , ReadableStreamFrom } = _ _nccwpck _require _ _ ( 3983 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { isUint8Array } = _ _nccwpck _require _ _ ( 9830 )
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
/** @type {import('crypto')|undefined} */
let crypto
try {
crypto = _ _nccwpck _require _ _ ( 6113 )
} catch {
}
function responseURL ( response ) {
// https://fetch.spec.whatwg.org/#responses
// A response has an associated URL. It is a pointer to the last URL
// in response’ s URL list and null if response’ s URL list is empty.
const urlList = response . urlList
const length = urlList . length
return length === 0 ? null : urlList [ length - 1 ] . toString ( )
}
// https://fetch.spec.whatwg.org/#concept-response-location-url
function responseLocationURL ( response , requestFragment ) {
// 1. If response’ s status is not a redirect status, then return null.
if ( ! redirectStatus . includes ( response . status ) ) {
return null
}
// 2. Let location be the result of extracting header list values given
// `Location` and response’ s header list.
let location = response . headersList . get ( 'location' )
// 3. If location is a header value, then set location to the result of
// parsing location with response’ s URL.
if ( location !== null && isValidHeaderValue ( location ) ) {
location = new URL ( location , responseURL ( response ) )
}
// 4. If location is a URL whose fragment is null, then set location’ s
// fragment to requestFragment.
if ( location && ! location . hash ) {
location . hash = requestFragment
}
// 5. Return location.
return location
}
/** @returns {URL} */
function requestCurrentURL ( request ) {
return request . urlList [ request . urlList . length - 1 ]
}
function requestBadPort ( request ) {
// 1. Let url be request’ s current URL.
const url = requestCurrentURL ( request )
// 2. If url’ s scheme is an HTTP(S) scheme and url’ s port is a bad port,
// then return blocked.
if ( urlIsHttpHttpsScheme ( url ) && badPorts . includes ( url . port ) ) {
return 'blocked'
}
// 3. Return allowed.
return 'allowed'
}
function isErrorLike ( object ) {
return object instanceof Error || (
object ? . constructor ? . name === 'Error' ||
object ? . constructor ? . name === 'DOMException'
)
}
// Check whether |statusText| is a ByteString and
// matches the Reason-Phrase token production.
// RFC 2616: https://tools.ietf.org/html/rfc2616
// RFC 7230: https://tools.ietf.org/html/rfc7230
// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )"
// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116
function isValidReasonPhrase ( statusText ) {
for ( let i = 0 ; i < statusText . length ; ++ i ) {
const c = statusText . charCodeAt ( i )
if (
! (
(
c === 0x09 || // HTAB
( c >= 0x20 && c <= 0x7e ) || // SP / VCHAR
( c >= 0x80 && c <= 0xff )
) // obs-text
)
) {
return false
}
}
return true
}
function isTokenChar ( c ) {
return ! (
c >= 0x7f ||
c <= 0x20 ||
c === '(' ||
c === ')' ||
c === '<' ||
c === '>' ||
c === '@' ||
c === ',' ||
c === ';' ||
c === ':' ||
c === '\\' ||
c === '"' ||
c === '/' ||
c === '[' ||
c === ']' ||
c === '?' ||
c === '=' ||
c === '{' ||
c === '}'
)
}
// See RFC 7230, Section 3.2.6.
// https://github.com/chromium/chromium/blob/d7da0240cae77824d1eda25745c4022757499131/third_party/blink/renderer/platform/network/http_parsers.cc#L321
function isValidHTTPToken ( characters ) {
if ( ! characters || typeof characters !== 'string' ) {
return false
}
for ( let i = 0 ; i < characters . length ; ++ i ) {
const c = characters . charCodeAt ( i )
if ( c > 0x7f || ! isTokenChar ( c ) ) {
return false
}
}
return true
}
// https://fetch.spec.whatwg.org/#header-name
// https://github.com/chromium/chromium/blob/b3d37e6f94f87d59e44662d6078f6a12de845d17/net/http/http_util.cc#L342
function isValidHeaderName ( potentialValue ) {
if ( potentialValue . length === 0 ) {
return false
}
return isValidHTTPToken ( potentialValue )
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#header-value
* @ param { string } potentialValue
* /
function isValidHeaderValue ( potentialValue ) {
// - Has no leading or trailing HTTP tab or space bytes.
// - Contains no 0x00 (NUL) or HTTP newline bytes.
if (
potentialValue . startsWith ( '\t' ) ||
potentialValue . startsWith ( ' ' ) ||
potentialValue . endsWith ( '\t' ) ||
potentialValue . endsWith ( ' ' )
) {
return false
}
if (
potentialValue . includes ( '\0' ) ||
potentialValue . includes ( '\r' ) ||
potentialValue . includes ( '\n' )
) {
return false
}
return true
}
// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect
function setRequestReferrerPolicyOnRedirect ( request , actualResponse ) {
// Given a request request and a response actualResponse, this algorithm
// updates request’ s referrer policy according to the Referrer-Policy
// header (if any) in actualResponse.
// 1. Let policy be the result of executing § 8.1 Parse a referrer policy
// from a Referrer-Policy header on actualResponse.
// 8.1 Parse a referrer policy from a Referrer-Policy header
// 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’ s header list.
const { headersList } = actualResponse
// 2. Let policy be the empty string.
// 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token.
// 4. Return policy.
const policyHeader = ( headersList . get ( 'referrer-policy' ) ? ? '' ) . split ( ',' )
// Note: As the referrer-policy can contain multiple policies
// separated by comma, we need to loop through all of them
// and pick the first valid one.
// Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy
let policy = ''
if ( policyHeader . length > 0 ) {
// The right-most policy takes precedence.
// The left-most policy is the fallback.
for ( let i = policyHeader . length ; i !== 0 ; i -- ) {
const token = policyHeader [ i - 1 ] . trim ( )
if ( referrerPolicyTokens . includes ( token ) ) {
policy = token
break
}
}
}
// 2. If policy is not the empty string, then set request’ s referrer policy to policy.
if ( policy !== '' ) {
request . referrerPolicy = policy
}
}
// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check
function crossOriginResourcePolicyCheck ( ) {
// TODO
return 'allowed'
}
// https://fetch.spec.whatwg.org/#concept-cors-check
function corsCheck ( ) {
// TODO
return 'success'
}
// https://fetch.spec.whatwg.org/#concept-tao-check
function TAOCheck ( ) {
// TODO
return 'success'
}
function appendFetchMetadata ( httpRequest ) {
// https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header
// TODO
// https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header
// 1. Assert: r’ s url is a potentially trustworthy URL.
// TODO
// 2. Let header be a Structured Header whose value is a token.
let header = null
// 3. Set header’ s value to r’ s mode.
header = httpRequest . mode
// 4. Set a structured field value `Sec-Fetch-Mode`/header in r’ s header list.
httpRequest . headersList . set ( 'sec-fetch-mode' , header )
// https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header
// TODO
// https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header
// TODO
}
// https://fetch.spec.whatwg.org/#append-a-request-origin-header
function appendRequestOriginHeader ( request ) {
// 1. Let serializedOrigin be the result of byte-serializing a request origin with request.
let serializedOrigin = request . origin
// 2. If request’ s response tainting is "cors" or request’ s mode is "websocket", then append (`Origin`, serializedOrigin) to request’ s header list.
if ( request . responseTainting === 'cors' || request . mode === 'websocket' ) {
if ( serializedOrigin ) {
request . headersList . append ( 'origin' , serializedOrigin )
}
// 3. Otherwise, if request’ s method is neither `GET` nor `HEAD`, then:
} else if ( request . method !== 'GET' && request . method !== 'HEAD' ) {
// 1. Switch on request’ s referrer policy:
switch ( request . referrerPolicy ) {
case 'no-referrer' :
// Set serializedOrigin to `null`.
serializedOrigin = null
break
case 'no-referrer-when-downgrade' :
case 'strict-origin' :
case 'strict-origin-when-cross-origin' :
// If request’ s origin is a tuple origin, its scheme is "https", and request’ s current URL’ s scheme is not "https", then set serializedOrigin to `null`.
if ( request . origin && urlHasHttpsScheme ( request . origin ) && ! urlHasHttpsScheme ( requestCurrentURL ( request ) ) ) {
serializedOrigin = null
}
break
case 'same-origin' :
// If request’ s origin is not same origin with request’ s current URL’ s origin, then set serializedOrigin to `null`.
if ( ! sameOrigin ( request , requestCurrentURL ( request ) ) ) {
serializedOrigin = null
}
break
default :
// Do nothing.
}
if ( serializedOrigin ) {
// 2. Append (`Origin`, serializedOrigin) to request’ s header list.
request . headersList . append ( 'origin' , serializedOrigin )
}
}
}
function coarsenedSharedCurrentTime ( crossOriginIsolatedCapability ) {
// TODO
return performance . now ( )
}
// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info
function createOpaqueTimingInfo ( timingInfo ) {
return {
startTime : timingInfo . startTime ? ? 0 ,
redirectStartTime : 0 ,
redirectEndTime : 0 ,
postRedirectStartTime : timingInfo . startTime ? ? 0 ,
finalServiceWorkerStartTime : 0 ,
finalNetworkResponseStartTime : 0 ,
finalNetworkRequestStartTime : 0 ,
endTime : 0 ,
encodedBodySize : 0 ,
decodedBodySize : 0 ,
finalConnectionTimingInfo : null
}
}
// https://html.spec.whatwg.org/multipage/origin.html#policy-container
function makePolicyContainer ( ) {
// Note: the fetch spec doesn't make use of embedder policy or CSP list
return {
referrerPolicy : 'strict-origin-when-cross-origin'
}
}
// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container
function clonePolicyContainer ( policyContainer ) {
return {
referrerPolicy : policyContainer . referrerPolicy
}
}
// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer
function determineRequestsReferrer ( request ) {
// 1. Let policy be request's referrer policy.
const policy = request . referrerPolicy
// Note: policy cannot (shouldn't) be null or an empty string.
assert ( policy )
// 2. Let environment be request’ s client.
let referrerSource = null
// 3. Switch on request’ s referrer:
if ( request . referrer === 'client' ) {
// Note: node isn't a browser and doesn't implement document/iframes,
// so we bypass this step and replace it with our own.
const globalOrigin = getGlobalOrigin ( )
if ( ! globalOrigin || globalOrigin . origin === 'null' ) {
return 'no-referrer'
}
// note: we need to clone it as it's mutated
referrerSource = new URL ( globalOrigin )
} else if ( request . referrer instanceof URL ) {
// Let referrerSource be request’ s referrer.
referrerSource = request . referrer
}
// 4. Let request’ s referrerURL be the result of stripping referrerSource for
// use as a referrer.
let referrerURL = stripURLForReferrer ( referrerSource )
// 5. Let referrerOrigin be the result of stripping referrerSource for use as
// a referrer, with the origin-only flag set to true.
const referrerOrigin = stripURLForReferrer ( referrerSource , true )
// 6. If the result of serializing referrerURL is a string whose length is
// greater than 4096, set referrerURL to referrerOrigin.
if ( referrerURL . toString ( ) . length > 4096 ) {
referrerURL = referrerOrigin
}
const areSameOrigin = sameOrigin ( request , referrerURL )
const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy ( referrerURL ) &&
! isURLPotentiallyTrustworthy ( request . url )
// 8. Execute the switch statements corresponding to the value of policy:
switch ( policy ) {
case 'origin' : return referrerOrigin != null ? referrerOrigin : stripURLForReferrer ( referrerSource , true )
case 'unsafe-url' : return referrerURL
case 'same-origin' :
return areSameOrigin ? referrerOrigin : 'no-referrer'
case 'origin-when-cross-origin' :
return areSameOrigin ? referrerURL : referrerOrigin
case 'strict-origin-when-cross-origin' : {
const currentURL = requestCurrentURL ( request )
// 1. If the origin of referrerURL and the origin of request’ s current
// URL are the same, then return referrerURL.
if ( sameOrigin ( referrerURL , currentURL ) ) {
return referrerURL
}
// 2. If referrerURL is a potentially trustworthy URL and request’ s
// current URL is not a potentially trustworthy URL, then return no
// referrer.
if ( isURLPotentiallyTrustworthy ( referrerURL ) && ! isURLPotentiallyTrustworthy ( currentURL ) ) {
return 'no-referrer'
}
// 3. Return referrerOrigin.
return referrerOrigin
}
case 'strict-origin' : // eslint-disable-line
/ * *
* 1. If referrerURL is a potentially trustworthy URL and
* request ’ s current URL is not a potentially trustworthy URL ,
* then return no referrer .
* 2. Return referrerOrigin
* /
case 'no-referrer-when-downgrade' : // eslint-disable-line
/ * *
* 1. If referrerURL is a potentially trustworthy URL and
* request ’ s current URL is not a potentially trustworthy URL ,
* then return no referrer .
* 2. Return referrerOrigin
* /
default : // eslint-disable-line
return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin
}
}
/ * *
* @ see https : //w3c.github.io/webappsec-referrer-policy/#strip-url
* @ param { URL } url
* @ param { boolean | undefined } originOnly
* /
function stripURLForReferrer ( url , originOnly ) {
// 1. Assert: url is a URL.
assert ( url instanceof URL )
// 2. If url’ s scheme is a local scheme, then return no referrer.
if ( url . protocol === 'file:' || url . protocol === 'about:' || url . protocol === 'blank:' ) {
return 'no-referrer'
}
// 3. Set url’ s username to the empty string.
url . username = ''
// 4. Set url’ s password to the empty string.
url . password = ''
// 5. Set url’ s fragment to null.
url . hash = ''
// 6. If the origin-only flag is true, then:
if ( originOnly ) {
// 1. Set url’ s path to « the empty string ».
url . pathname = ''
// 2. Set url’ s query to null.
url . search = ''
}
// 7. Return url.
return url
}
function isURLPotentiallyTrustworthy ( url ) {
if ( ! ( url instanceof URL ) ) {
return false
}
// If child of about, return true
if ( url . href === 'about:blank' || url . href === 'about:srcdoc' ) {
return true
}
// If scheme is data, return true
if ( url . protocol === 'data:' ) return true
// If file, return true
if ( url . protocol === 'file:' ) return true
return isOriginPotentiallyTrustworthy ( url . origin )
function isOriginPotentiallyTrustworthy ( origin ) {
// If origin is explicitly null, return false
if ( origin == null || origin === 'null' ) return false
const originAsURL = new URL ( origin )
// If secure, return true
if ( originAsURL . protocol === 'https:' || originAsURL . protocol === 'wss:' ) {
return true
}
// If localhost or variants, return true
if ( /^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/ . test ( originAsURL . hostname ) ||
( originAsURL . hostname === 'localhost' || originAsURL . hostname . includes ( 'localhost.' ) ) ||
( originAsURL . hostname . endsWith ( '.localhost' ) ) ) {
return true
}
// If any other, return false
return false
}
}
/ * *
* @ see https : //w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist
* @ param { Uint8Array } bytes
* @ param { string } metadataList
* /
function bytesMatch ( bytes , metadataList ) {
// If node is not built with OpenSSL support, we cannot check
// a request's integrity, so allow it by default (the spec will
// allow requests if an invalid hash is given, as precedence).
/* istanbul ignore if: only if node is built with --without-ssl */
if ( crypto === undefined ) {
return true
}
// 1. Let parsedMetadata be the result of parsing metadataList.
const parsedMetadata = parseMetadata ( metadataList )
// 2. If parsedMetadata is no metadata, return true.
if ( parsedMetadata === 'no metadata' ) {
return true
}
// 3. If parsedMetadata is the empty set, return true.
if ( parsedMetadata . length === 0 ) {
return true
}
// 4. Let metadata be the result of getting the strongest
// metadata from parsedMetadata.
const list = parsedMetadata . sort ( ( c , d ) => d . algo . localeCompare ( c . algo ) )
// get the strongest algorithm
const strongest = list [ 0 ] . algo
// get all entries that use the strongest algorithm; ignore weaker
const metadata = list . filter ( ( item ) => item . algo === strongest )
// 5. For each item in metadata:
for ( const item of metadata ) {
// 1. Let algorithm be the alg component of item.
const algorithm = item . algo
// 2. Let expectedValue be the val component of item.
const expectedValue = item . hash
// 3. Let actualValue be the result of applying algorithm to bytes.
const actualValue = crypto . createHash ( algorithm ) . update ( bytes ) . digest ( 'base64' )
// 4. If actualValue is a case-sensitive match for expectedValue,
// return true.
if ( actualValue === expectedValue ) {
return true
}
}
// 6. Return false.
return false
}
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
// https://www.w3.org/TR/CSP2/#source-list-syntax
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
/ * *
* @ see https : //w3c.github.io/webappsec-subresource-integrity/#parse-metadata
* @ param { string } metadata
* /
function parseMetadata ( metadata ) {
// 1. Let result be the empty set.
/** @type {{ algo: string, hash: string }[]} */
const result = [ ]
// 2. Let empty be equal to true.
let empty = true
const supportedHashes = crypto . getHashes ( )
// 3. For each token returned by splitting metadata on spaces:
for ( const token of metadata . split ( ' ' ) ) {
// 1. Set empty to false.
empty = false
// 2. Parse token as a hash-with-options.
const parsedToken = parseHashWithOptions . exec ( token )
// 3. If token does not parse, continue to the next token.
if ( parsedToken === null || parsedToken . groups === undefined ) {
// Note: Chromium blocks the request at this point, but Firefox
// gives a warning that an invalid integrity was given. The
// correct behavior is to ignore these, and subsequently not
// check the integrity of the resource.
continue
}
// 4. Let algorithm be the hash-algo component of token.
const algorithm = parsedToken . groups . algo
// 5. If algorithm is a hash function recognized by the user
// agent, add the parsed token to result.
if ( supportedHashes . includes ( algorithm . toLowerCase ( ) ) ) {
result . push ( parsedToken . groups )
}
}
// 4. Return no metadata if empty is true, otherwise return result.
if ( empty === true ) {
return 'no metadata'
}
return result
}
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
function tryUpgradeRequestToAPotentiallyTrustworthyURL ( request ) {
// TODO
}
/ * *
* @ link { https : //html.spec.whatwg.org/multipage/origin.html#same-origin}
* @ param { URL } A
* @ param { URL } B
* /
function sameOrigin ( A , B ) {
// 1. If A and B are the same opaque origin, then return true.
if ( A . origin === B . origin && A . origin === 'null' ) {
return true
}
// 2. If A and B are both tuple origins and their schemes,
// hosts, and port are identical, then return true.
if ( A . protocol === B . protocol && A . hostname === B . hostname && A . port === B . port ) {
return true
}
// 3. Return false.
return false
}
function createDeferredPromise ( ) {
let res
let rej
const promise = new Promise ( ( resolve , reject ) => {
res = resolve
rej = reject
} )
return { promise , resolve : res , reject : rej }
}
function isAborted ( fetchParams ) {
return fetchParams . controller . state === 'aborted'
}
function isCancelled ( fetchParams ) {
return fetchParams . controller . state === 'aborted' ||
fetchParams . controller . state === 'terminated'
}
// https://fetch.spec.whatwg.org/#concept-method-normalize
function normalizeMethod ( method ) {
return /^(DELETE|GET|HEAD|OPTIONS|POST|PUT)$/i . test ( method )
? method . toUpperCase ( )
: method
}
// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string
function serializeJavascriptValueToJSONString ( value ) {
// 1. Let result be ? Call(%JSON.stringify%, undefined, « value »).
const result = JSON . stringify ( value )
// 2. If result is undefined, then throw a TypeError.
if ( result === undefined ) {
throw new TypeError ( 'Value is not JSON serializable' )
}
// 3. Assert: result is a string.
assert ( typeof result === 'string' )
// 4. Return result.
return result
}
// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object
const esIteratorPrototype = Object . getPrototypeOf ( Object . getPrototypeOf ( [ ] [ Symbol . iterator ] ( ) ) )
/ * *
* @ see https : //webidl.spec.whatwg.org/#dfn-iterator-prototype-object
* @ param { ( ) => unknown [ ] } iterator
* @ param { string } name name of the instance
* @ param { 'key' | 'value' | 'key+value' } kind
* /
function makeIterator ( iterator , name , kind ) {
const object = {
index : 0 ,
kind ,
target : iterator
}
const i = {
next ( ) {
// 1. Let interface be the interface for which the iterator prototype object exists.
// 2. Let thisValue be the this value.
// 3. Let object be ? ToObject(thisValue).
// 4. If object is a platform object, then perform a security
// check, passing:
// 5. If object is not a default iterator object for interface,
// then throw a TypeError.
if ( Object . getPrototypeOf ( this ) !== i ) {
throw new TypeError (
` 'next' called on an object that does not implement interface ${ name } Iterator. `
)
}
// 6. Let index be object’ s index.
// 7. Let kind be object’ s kind.
// 8. Let values be object’ s target's value pairs to iterate over.
const { index , kind , target } = object
const values = target ( )
// 9. Let len be the length of values.
const len = values . length
// 10. If index is greater than or equal to len, then return
// CreateIterResultObject(undefined, true).
if ( index >= len ) {
return { value : undefined , done : true }
}
// 11. Let pair be the entry in values at index index.
const pair = values [ index ]
// 12. Set object’ s index to index + 1.
object . index = index + 1
// 13. Return the iterator result for pair and kind.
return iteratorResult ( pair , kind )
} ,
// The class string of an iterator prototype object for a given interface is the
// result of concatenating the identifier of the interface and the string " Iterator".
[ Symbol . toStringTag ] : ` ${ name } Iterator `
}
// The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%.
Object . setPrototypeOf ( i , esIteratorPrototype )
// esIteratorPrototype needs to be the prototype of i
// which is the prototype of an empty object. Yes, it's confusing.
return Object . setPrototypeOf ( { } , i )
}
// https://webidl.spec.whatwg.org/#iterator-result
function iteratorResult ( pair , kind ) {
let result
// 1. Let result be a value determined by the value of kind:
switch ( kind ) {
case 'key' : {
// 1. Let idlKey be pair’ s key.
// 2. Let key be the result of converting idlKey to an
// ECMAScript value.
// 3. result is key.
result = pair [ 0 ]
break
}
case 'value' : {
// 1. Let idlValue be pair’ s value.
// 2. Let value be the result of converting idlValue to
// an ECMAScript value.
// 3. result is value.
result = pair [ 1 ]
break
}
case 'key+value' : {
// 1. Let idlKey be pair’ s key.
// 2. Let idlValue be pair’ s value.
// 3. Let key be the result of converting idlKey to an
// ECMAScript value.
// 4. Let value be the result of converting idlValue to
// an ECMAScript value.
// 5. Let array be ! ArrayCreate(2).
// 6. Call ! CreateDataProperty(array, "0", key).
// 7. Call ! CreateDataProperty(array, "1", value).
// 8. result is array.
result = pair
break
}
}
// 2. Return CreateIterResultObject(result, false).
return { value : result , done : false }
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#body-fully-read
* /
function fullyReadBody ( body , processBody , processBodyError ) {
// 1. If taskDestination is null, then set taskDestination to
// the result of starting a new parallel queue.
// 2. Let successSteps given a byte sequence bytes be to queue a
// fetch task to run processBody given bytes, with taskDestination.
const successSteps = ( bytes ) => queueMicrotask ( ( ) => processBody ( bytes ) )
// 3. Let errorSteps be to queue a fetch task to run processBodyError,
// with taskDestination.
const errorSteps = ( error ) => queueMicrotask ( ( ) => processBodyError ( error ) )
// 4. Let reader be the result of getting a reader for body’ s stream.
// If that threw an exception, then run errorSteps with that
// exception and return.
let reader
try {
reader = body . stream . getReader ( )
} catch ( e ) {
errorSteps ( e )
return
}
// 5. Read all bytes from reader, given successSteps and errorSteps.
readAllBytes ( reader , successSteps , errorSteps )
}
/** @type {ReadableStream} */
let ReadableStream = globalThis . ReadableStream
function isReadableStreamLike ( stream ) {
if ( ! ReadableStream ) {
ReadableStream = ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
}
return stream instanceof ReadableStream || (
stream [ Symbol . toStringTag ] === 'ReadableStream' &&
typeof stream . tee === 'function'
)
}
const MAXIMUM _ARGUMENT _LENGTH = 65535
/ * *
* @ see https : //infra.spec.whatwg.org/#isomorphic-decode
* @ param { number [ ] | Uint8Array } input
* /
function isomorphicDecode ( input ) {
// 1. To isomorphic decode a byte sequence input, return a string whose code point
// length is equal to input’ s length and whose code points have the same values
// as the values of input’ s bytes, in the same order.
if ( input . length < MAXIMUM _ARGUMENT _LENGTH ) {
return String . fromCharCode ( ... input )
}
return input . reduce ( ( previous , current ) => previous + String . fromCharCode ( current ) , '' )
}
/ * *
* @ param { ReadableStreamController < Uint8Array > } controller
* /
function readableStreamClose ( controller ) {
try {
controller . close ( )
} catch ( err ) {
// TODO: add comment explaining why this error occurs.
if ( ! err . message . includes ( 'Controller is already closed' ) ) {
throw err
}
}
}
/ * *
* @ see https : //infra.spec.whatwg.org/#isomorphic-encode
* @ param { string } input
* /
function isomorphicEncode ( input ) {
// 1. Assert: input contains no code points greater than U+00FF.
for ( let i = 0 ; i < input . length ; i ++ ) {
assert ( input . charCodeAt ( i ) <= 0xFF )
}
// 2. Return a byte sequence whose length is equal to input’ s code
// point length and whose bytes have the same values as the
// values of input’ s code points, in the same order
return input
}
/ * *
* @ see https : //streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes
* @ see https : //streams.spec.whatwg.org/#read-loop
* @ param { ReadableStreamDefaultReader } reader
* @ param { ( bytes : Uint8Array ) => void } successSteps
* @ param { ( error : Error ) => void } failureSteps
* /
async function readAllBytes ( reader , successSteps , failureSteps ) {
const bytes = [ ]
let byteLength = 0
while ( true ) {
let done
let chunk
try {
( { done , value : chunk } = await reader . read ( ) )
} catch ( e ) {
// 1. Call failureSteps with e.
failureSteps ( e )
return
}
if ( done ) {
// 1. Call successSteps with bytes.
successSteps ( Buffer . concat ( bytes , byteLength ) )
return
}
// 1. If chunk is not a Uint8Array object, call failureSteps
// with a TypeError and abort these steps.
if ( ! isUint8Array ( chunk ) ) {
failureSteps ( new TypeError ( 'Received non-Uint8Array chunk' ) )
return
}
// 2. Append the bytes represented by chunk to bytes.
bytes . push ( chunk )
byteLength += chunk . length
// 3. Read-loop given reader, bytes, successSteps, and failureSteps.
}
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#is-local
* @ param { URL } url
* /
function urlIsLocal ( url ) {
assert ( 'protocol' in url ) // ensure it's a url object
const protocol = url . protocol
return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:'
}
/ * *
* @ param { string | URL } url
* /
function urlHasHttpsScheme ( url ) {
if ( typeof url === 'string' ) {
return url . startsWith ( 'https:' )
}
return url . protocol === 'https:'
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#http-scheme
* @ param { URL } url
* /
function urlIsHttpHttpsScheme ( url ) {
assert ( 'protocol' in url ) // ensure it's a url object
const protocol = url . protocol
return protocol === 'http:' || protocol === 'https:'
}
/ * *
* Fetch supports node >= 16.8 . 0 , but Object . hasOwn was added in v16 . 9.0 .
* /
const hasOwn = Object . hasOwn || ( ( dict , key ) => Object . prototype . hasOwnProperty . call ( dict , key ) )
module . exports = {
isAborted ,
isCancelled ,
createDeferredPromise ,
ReadableStreamFrom ,
toUSVString ,
tryUpgradeRequestToAPotentiallyTrustworthyURL ,
coarsenedSharedCurrentTime ,
determineRequestsReferrer ,
makePolicyContainer ,
clonePolicyContainer ,
appendFetchMetadata ,
appendRequestOriginHeader ,
TAOCheck ,
corsCheck ,
crossOriginResourcePolicyCheck ,
createOpaqueTimingInfo ,
setRequestReferrerPolicyOnRedirect ,
isValidHTTPToken ,
requestBadPort ,
requestCurrentURL ,
responseURL ,
responseLocationURL ,
isBlobLike ,
isURLPotentiallyTrustworthy ,
isValidReasonPhrase ,
sameOrigin ,
normalizeMethod ,
serializeJavascriptValueToJSONString ,
makeIterator ,
isValidHeaderName ,
isValidHeaderValue ,
hasOwn ,
isErrorLike ,
fullyReadBody ,
bytesMatch ,
isReadableStreamLike ,
readableStreamClose ,
isomorphicEncode ,
isomorphicDecode ,
urlIsLocal ,
urlHasHttpsScheme ,
urlIsHttpHttpsScheme ,
readAllBytes
}
/***/ } ) ,
/***/ 1744 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { types } = _ _nccwpck _require _ _ ( 3837 )
const { hasOwn , toUSVString } = _ _nccwpck _require _ _ ( 2538 )
/** @type {import('../../types/webidl').Webidl} */
const webidl = { }
webidl . converters = { }
webidl . util = { }
webidl . errors = { }
webidl . errors . exception = function ( message ) {
return new TypeError ( ` ${ message . header } : ${ message . message } ` )
}
webidl . errors . conversionFailed = function ( context ) {
const plural = context . types . length === 1 ? '' : ' one of'
const message =
` ${ context . argument } could not be converted to ` +
` ${ plural } : ${ context . types . join ( ', ' ) } . `
return webidl . errors . exception ( {
header : context . prefix ,
message
} )
}
webidl . errors . invalidArgument = function ( context ) {
return webidl . errors . exception ( {
header : context . prefix ,
message : ` " ${ context . value } " is an invalid ${ context . type } . `
} )
}
// https://webidl.spec.whatwg.org/#implements
webidl . brandCheck = function ( V , I , opts = undefined ) {
if ( opts ? . strict !== false && ! ( V instanceof I ) ) {
throw new TypeError ( 'Illegal invocation' )
} else {
return V ? . [ Symbol . toStringTag ] === I . prototype [ Symbol . toStringTag ]
}
}
webidl . argumentLengthCheck = function ( { length } , min , ctx ) {
if ( length < min ) {
throw webidl . errors . exception ( {
message : ` ${ min } argument ${ min !== 1 ? 's' : '' } required, ` +
` but ${ length ? ' only' : '' } ${ length } found. ` ,
... ctx
} )
}
}
webidl . illegalConstructor = function ( ) {
throw webidl . errors . exception ( {
header : 'TypeError' ,
message : 'Illegal constructor'
} )
}
// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values
webidl . util . Type = function ( V ) {
switch ( typeof V ) {
case 'undefined' : return 'Undefined'
case 'boolean' : return 'Boolean'
case 'string' : return 'String'
case 'symbol' : return 'Symbol'
case 'number' : return 'Number'
case 'bigint' : return 'BigInt'
case 'function' :
case 'object' : {
if ( V === null ) {
return 'Null'
}
return 'Object'
}
}
}
// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint
webidl . util . ConvertToInt = function ( V , bitLength , signedness , opts = { } ) {
let upperBound
let lowerBound
// 1. If bitLength is 64, then:
if ( bitLength === 64 ) {
// 1. Let upperBound be 2^53 − 1.
upperBound = Math . pow ( 2 , 53 ) - 1
// 2. If signedness is "unsigned", then let lowerBound be 0.
if ( signedness === 'unsigned' ) {
lowerBound = 0
} else {
// 3. Otherwise let lowerBound be − 2^53 + 1.
lowerBound = Math . pow ( - 2 , 53 ) + 1
}
} else if ( signedness === 'unsigned' ) {
// 2. Otherwise, if signedness is "unsigned", then:
// 1. Let lowerBound be 0.
lowerBound = 0
// 2. Let upperBound be 2^bitLength − 1.
upperBound = Math . pow ( 2 , bitLength ) - 1
} else {
// 3. Otherwise:
// 1. Let lowerBound be -2^bitLength − 1.
lowerBound = Math . pow ( - 2 , bitLength ) - 1
// 2. Let upperBound be 2^bitLength − 1 − 1.
upperBound = Math . pow ( 2 , bitLength - 1 ) - 1
}
// 4. Let x be ? ToNumber(V).
let x = Number ( V )
// 5. If x is − 0, then set x to +0.
if ( x === 0 ) {
x = 0
}
// 6. If the conversion is to an IDL type associated
// with the [EnforceRange] extended attribute, then:
if ( opts . enforceRange === true ) {
// 1. If x is NaN, +∞, or −∞, then throw a TypeError.
if (
Number . isNaN ( x ) ||
x === Number . POSITIVE _INFINITY ||
x === Number . NEGATIVE _INFINITY
) {
throw webidl . errors . exception ( {
header : 'Integer conversion' ,
message : ` Could not convert ${ V } to an integer. `
} )
}
// 2. Set x to IntegerPart(x).
x = webidl . util . IntegerPart ( x )
// 3. If x < lowerBound or x > upperBound, then
// throw a TypeError.
if ( x < lowerBound || x > upperBound ) {
throw webidl . errors . exception ( {
header : 'Integer conversion' ,
message : ` Value must be between ${ lowerBound } - ${ upperBound } , got ${ x } . `
} )
}
// 4. Return x.
return x
}
// 7. If x is not NaN and the conversion is to an IDL
// type associated with the [Clamp] extended
// attribute, then:
if ( ! Number . isNaN ( x ) && opts . clamp === true ) {
// 1. Set x to min(max(x, lowerBound), upperBound).
x = Math . min ( Math . max ( x , lowerBound ) , upperBound )
// 2. Round x to the nearest integer, choosing the
// even integer if it lies halfway between two,
// and choosing +0 rather than − 0.
if ( Math . floor ( x ) % 2 === 0 ) {
x = Math . floor ( x )
} else {
x = Math . ceil ( x )
}
// 3. Return x.
return x
}
// 8. If x is NaN, +0, +∞, or −∞, then return +0.
if (
Number . isNaN ( x ) ||
( x === 0 && Object . is ( 0 , x ) ) ||
x === Number . POSITIVE _INFINITY ||
x === Number . NEGATIVE _INFINITY
) {
return 0
}
// 9. Set x to IntegerPart(x).
x = webidl . util . IntegerPart ( x )
// 10. Set x to x modulo 2^bitLength.
x = x % Math . pow ( 2 , bitLength )
// 11. If signedness is "signed" and x ≥ 2^bitLength − 1,
// then return x − 2^bitLength.
if ( signedness === 'signed' && x >= Math . pow ( 2 , bitLength ) - 1 ) {
return x - Math . pow ( 2 , bitLength )
}
// 12. Otherwise, return x.
return x
}
// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart
webidl . util . IntegerPart = function ( n ) {
// 1. Let r be floor(abs(n)).
const r = Math . floor ( Math . abs ( n ) )
// 2. If n < 0, then return -1 × r.
if ( n < 0 ) {
return - 1 * r
}
// 3. Otherwise, return r.
return r
}
// https://webidl.spec.whatwg.org/#es-sequence
webidl . sequenceConverter = function ( converter ) {
return ( V ) => {
// 1. If Type(V) is not Object, throw a TypeError.
if ( webidl . util . Type ( V ) !== 'Object' ) {
throw webidl . errors . exception ( {
header : 'Sequence' ,
message : ` Value of type ${ webidl . util . Type ( V ) } is not an Object. `
} )
}
// 2. Let method be ? GetMethod(V, @@iterator).
/** @type {Generator} */
const method = V ? . [ Symbol . iterator ] ? . ( )
const seq = [ ]
// 3. If method is undefined, throw a TypeError.
if (
method === undefined ||
typeof method . next !== 'function'
) {
throw webidl . errors . exception ( {
header : 'Sequence' ,
message : 'Object is not an iterator.'
} )
}
// https://webidl.spec.whatwg.org/#create-sequence-from-iterable
while ( true ) {
const { done , value } = method . next ( )
if ( done ) {
break
}
seq . push ( converter ( value ) )
}
return seq
}
}
// https://webidl.spec.whatwg.org/#es-to-record
webidl . recordConverter = function ( keyConverter , valueConverter ) {
return ( O ) => {
// 1. If Type(O) is not Object, throw a TypeError.
if ( webidl . util . Type ( O ) !== 'Object' ) {
throw webidl . errors . exception ( {
header : 'Record' ,
message : ` Value of type ${ webidl . util . Type ( O ) } is not an Object. `
} )
}
// 2. Let result be a new empty instance of record<K, V>.
const result = { }
if ( ! types . isProxy ( O ) ) {
// Object.keys only returns enumerable properties
const keys = Object . keys ( O )
for ( const key of keys ) {
// 1. Let typedKey be key converted to an IDL value of type K.
const typedKey = keyConverter ( key )
// 2. Let value be ? Get(O, key).
// 3. Let typedValue be value converted to an IDL value of type V.
const typedValue = valueConverter ( O [ key ] )
// 4. Set result[typedKey] to typedValue.
result [ typedKey ] = typedValue
}
// 5. Return result.
return result
}
// 3. Let keys be ? O.[[OwnPropertyKeys]]().
const keys = Reflect . ownKeys ( O )
// 4. For each key of keys.
for ( const key of keys ) {
// 1. Let desc be ? O.[[GetOwnProperty]](key).
const desc = Reflect . getOwnPropertyDescriptor ( O , key )
// 2. If desc is not undefined and desc.[[Enumerable]] is true:
if ( desc ? . enumerable ) {
// 1. Let typedKey be key converted to an IDL value of type K.
const typedKey = keyConverter ( key )
// 2. Let value be ? Get(O, key).
// 3. Let typedValue be value converted to an IDL value of type V.
const typedValue = valueConverter ( O [ key ] )
// 4. Set result[typedKey] to typedValue.
result [ typedKey ] = typedValue
}
}
// 5. Return result.
return result
}
}
webidl . interfaceConverter = function ( i ) {
return ( V , opts = { } ) => {
if ( opts . strict !== false && ! ( V instanceof i ) ) {
throw webidl . errors . exception ( {
header : i . name ,
message : ` Expected ${ V } to be an instance of ${ i . name } . `
} )
}
return V
}
}
webidl . dictionaryConverter = function ( converters ) {
return ( dictionary ) => {
const type = webidl . util . Type ( dictionary )
const dict = { }
if ( type === 'Null' || type === 'Undefined' ) {
return dict
} else if ( type !== 'Object' ) {
throw webidl . errors . exception ( {
header : 'Dictionary' ,
message : ` Expected ${ dictionary } to be one of: Null, Undefined, Object. `
} )
}
for ( const options of converters ) {
const { key , defaultValue , required , converter } = options
if ( required === true ) {
if ( ! hasOwn ( dictionary , key ) ) {
throw webidl . errors . exception ( {
header : 'Dictionary' ,
message : ` Missing required key " ${ key } ". `
} )
}
}
let value = dictionary [ key ]
const hasDefault = hasOwn ( options , 'defaultValue' )
// Only use defaultValue if value is undefined and
// a defaultValue options was provided.
if ( hasDefault && value !== null ) {
value = value ? ? defaultValue
}
// A key can be optional and have no default value.
// When this happens, do not perform a conversion,
// and do not assign the key a value.
if ( required || hasDefault || value !== undefined ) {
value = converter ( value )
if (
options . allowedValues &&
! options . allowedValues . includes ( value )
) {
throw webidl . errors . exception ( {
header : 'Dictionary' ,
message : ` ${ value } is not an accepted type. Expected one of ${ options . allowedValues . join ( ', ' ) } . `
} )
}
dict [ key ] = value
}
}
return dict
}
}
webidl . nullableConverter = function ( converter ) {
return ( V ) => {
if ( V === null ) {
return V
}
return converter ( V )
}
}
// https://webidl.spec.whatwg.org/#es-DOMString
webidl . converters . DOMString = function ( V , opts = { } ) {
// 1. If V is null and the conversion is to an IDL type
// associated with the [LegacyNullToEmptyString]
// extended attribute, then return the DOMString value
// that represents the empty string.
if ( V === null && opts . legacyNullToEmptyString ) {
return ''
}
// 2. Let x be ? ToString(V).
if ( typeof V === 'symbol' ) {
throw new TypeError ( 'Could not convert argument of type symbol to string.' )
}
// 3. Return the IDL DOMString value that represents the
// same sequence of code units as the one the
// ECMAScript String value x represents.
return String ( V )
}
// https://webidl.spec.whatwg.org/#es-ByteString
webidl . converters . ByteString = function ( V ) {
// 1. Let x be ? ToString(V).
// Note: DOMString converter perform ? ToString(V)
const x = webidl . converters . DOMString ( V )
// 2. If the value of any element of x is greater than
// 255, then throw a TypeError.
for ( let index = 0 ; index < x . length ; index ++ ) {
const charCode = x . charCodeAt ( index )
if ( charCode > 255 ) {
throw new TypeError (
'Cannot convert argument to a ByteString because the character at ' +
` index ${ index } has a value of ${ charCode } which is greater than 255. `
)
}
}
// 3. Return an IDL ByteString value whose length is the
// length of x, and where the value of each element is
// the value of the corresponding element of x.
return x
}
// https://webidl.spec.whatwg.org/#es-USVString
webidl . converters . USVString = toUSVString
// https://webidl.spec.whatwg.org/#es-boolean
webidl . converters . boolean = function ( V ) {
// 1. Let x be the result of computing ToBoolean(V).
const x = Boolean ( V )
// 2. Return the IDL boolean value that is the one that represents
// the same truth value as the ECMAScript Boolean value x.
return x
}
// https://webidl.spec.whatwg.org/#es-any
webidl . converters . any = function ( V ) {
return V
}
// https://webidl.spec.whatwg.org/#es-long-long
webidl . converters [ 'long long' ] = function ( V ) {
// 1. Let x be ? ConvertToInt(V, 64, "signed").
const x = webidl . util . ConvertToInt ( V , 64 , 'signed' )
// 2. Return the IDL long long value that represents
// the same numeric value as x.
return x
}
// https://webidl.spec.whatwg.org/#es-unsigned-long-long
webidl . converters [ 'unsigned long long' ] = function ( V ) {
// 1. Let x be ? ConvertToInt(V, 64, "unsigned").
const x = webidl . util . ConvertToInt ( V , 64 , 'unsigned' )
// 2. Return the IDL unsigned long long value that
// represents the same numeric value as x.
return x
}
// https://webidl.spec.whatwg.org/#es-unsigned-long
webidl . converters [ 'unsigned long' ] = function ( V ) {
// 1. Let x be ? ConvertToInt(V, 32, "unsigned").
const x = webidl . util . ConvertToInt ( V , 32 , 'unsigned' )
// 2. Return the IDL unsigned long value that
// represents the same numeric value as x.
return x
}
// https://webidl.spec.whatwg.org/#es-unsigned-short
webidl . converters [ 'unsigned short' ] = function ( V , opts ) {
// 1. Let x be ? ConvertToInt(V, 16, "unsigned").
const x = webidl . util . ConvertToInt ( V , 16 , 'unsigned' , opts )
// 2. Return the IDL unsigned short value that represents
// the same numeric value as x.
return x
}
// https://webidl.spec.whatwg.org/#idl-ArrayBuffer
webidl . converters . ArrayBuffer = function ( V , opts = { } ) {
// 1. If Type(V) is not Object, or V does not have an
// [[ArrayBufferData]] internal slot, then throw a
// TypeError.
// see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances
// see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances
if (
webidl . util . Type ( V ) !== 'Object' ||
! types . isAnyArrayBuffer ( V )
) {
throw webidl . errors . conversionFailed ( {
prefix : ` ${ V } ` ,
argument : ` ${ V } ` ,
types : [ 'ArrayBuffer' ]
} )
}
// 2. If the conversion is not to an IDL type associated
// with the [AllowShared] extended attribute, and
// IsSharedArrayBuffer(V) is true, then throw a
// TypeError.
if ( opts . allowShared === false && types . isSharedArrayBuffer ( V ) ) {
throw webidl . errors . exception ( {
header : 'ArrayBuffer' ,
message : 'SharedArrayBuffer is not allowed.'
} )
}
// 3. If the conversion is not to an IDL type associated
// with the [AllowResizable] extended attribute, and
// IsResizableArrayBuffer(V) is true, then throw a
// TypeError.
// Note: resizable ArrayBuffers are currently a proposal.
// 4. Return the IDL ArrayBuffer value that is a
// reference to the same object as V.
return V
}
webidl . converters . TypedArray = function ( V , T , opts = { } ) {
// 1. Let T be the IDL type V is being converted to.
// 2. If Type(V) is not Object, or V does not have a
// [[TypedArrayName]] internal slot with a value
// equal to T’ s name, then throw a TypeError.
if (
webidl . util . Type ( V ) !== 'Object' ||
! types . isTypedArray ( V ) ||
V . constructor . name !== T . name
) {
throw webidl . errors . conversionFailed ( {
prefix : ` ${ T . name } ` ,
argument : ` ${ V } ` ,
types : [ T . name ]
} )
}
// 3. If the conversion is not to an IDL type associated
// with the [AllowShared] extended attribute, and
// IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is
// true, then throw a TypeError.
if ( opts . allowShared === false && types . isSharedArrayBuffer ( V . buffer ) ) {
throw webidl . errors . exception ( {
header : 'ArrayBuffer' ,
message : 'SharedArrayBuffer is not allowed.'
} )
}
// 4. If the conversion is not to an IDL type associated
// with the [AllowResizable] extended attribute, and
// IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
// true, then throw a TypeError.
// Note: resizable array buffers are currently a proposal
// 5. Return the IDL value of type T that is a reference
// to the same object as V.
return V
}
webidl . converters . DataView = function ( V , opts = { } ) {
// 1. If Type(V) is not Object, or V does not have a
// [[DataView]] internal slot, then throw a TypeError.
if ( webidl . util . Type ( V ) !== 'Object' || ! types . isDataView ( V ) ) {
throw webidl . errors . exception ( {
header : 'DataView' ,
message : 'Object is not a DataView.'
} )
}
// 2. If the conversion is not to an IDL type associated
// with the [AllowShared] extended attribute, and
// IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true,
// then throw a TypeError.
if ( opts . allowShared === false && types . isSharedArrayBuffer ( V . buffer ) ) {
throw webidl . errors . exception ( {
header : 'ArrayBuffer' ,
message : 'SharedArrayBuffer is not allowed.'
} )
}
// 3. If the conversion is not to an IDL type associated
// with the [AllowResizable] extended attribute, and
// IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
// true, then throw a TypeError.
// Note: resizable ArrayBuffers are currently a proposal
// 4. Return the IDL DataView value that is a reference
// to the same object as V.
return V
}
// https://webidl.spec.whatwg.org/#BufferSource
webidl . converters . BufferSource = function ( V , opts = { } ) {
if ( types . isAnyArrayBuffer ( V ) ) {
return webidl . converters . ArrayBuffer ( V , opts )
}
if ( types . isTypedArray ( V ) ) {
return webidl . converters . TypedArray ( V , V . constructor )
}
if ( types . isDataView ( V ) ) {
return webidl . converters . DataView ( V , opts )
}
throw new TypeError ( ` Could not convert ${ V } to a BufferSource. ` )
}
webidl . converters [ 'sequence<ByteString>' ] = webidl . sequenceConverter (
webidl . converters . ByteString
)
webidl . converters [ 'sequence<sequence<ByteString>>' ] = webidl . sequenceConverter (
webidl . converters [ 'sequence<ByteString>' ]
)
webidl . converters [ 'record<ByteString, ByteString>' ] = webidl . recordConverter (
webidl . converters . ByteString ,
webidl . converters . ByteString
)
module . exports = {
webidl
}
/***/ } ) ,
/***/ 4854 :
/***/ ( ( module ) => {
"use strict" ;
/ * *
* @ see https : //encoding.spec.whatwg.org/#concept-encoding-get
* @ param { string | undefined } label
* /
function getEncoding ( label ) {
if ( ! label ) {
return 'failure'
}
// 1. Remove any leading and trailing ASCII whitespace from label.
// 2. If label is an ASCII case-insensitive match for any of the
// labels listed in the table below, then return the
// corresponding encoding; otherwise return failure.
switch ( label . trim ( ) . toLowerCase ( ) ) {
case 'unicode-1-1-utf-8' :
case 'unicode11utf8' :
case 'unicode20utf8' :
case 'utf-8' :
case 'utf8' :
case 'x-unicode20utf8' :
return 'UTF-8'
case '866' :
case 'cp866' :
case 'csibm866' :
case 'ibm866' :
return 'IBM866'
case 'csisolatin2' :
case 'iso-8859-2' :
case 'iso-ir-101' :
case 'iso8859-2' :
case 'iso88592' :
case 'iso_8859-2' :
case 'iso_8859-2:1987' :
case 'l2' :
case 'latin2' :
return 'ISO-8859-2'
case 'csisolatin3' :
case 'iso-8859-3' :
case 'iso-ir-109' :
case 'iso8859-3' :
case 'iso88593' :
case 'iso_8859-3' :
case 'iso_8859-3:1988' :
case 'l3' :
case 'latin3' :
return 'ISO-8859-3'
case 'csisolatin4' :
case 'iso-8859-4' :
case 'iso-ir-110' :
case 'iso8859-4' :
case 'iso88594' :
case 'iso_8859-4' :
case 'iso_8859-4:1988' :
case 'l4' :
case 'latin4' :
return 'ISO-8859-4'
case 'csisolatincyrillic' :
case 'cyrillic' :
case 'iso-8859-5' :
case 'iso-ir-144' :
case 'iso8859-5' :
case 'iso88595' :
case 'iso_8859-5' :
case 'iso_8859-5:1988' :
return 'ISO-8859-5'
case 'arabic' :
case 'asmo-708' :
case 'csiso88596e' :
case 'csiso88596i' :
case 'csisolatinarabic' :
case 'ecma-114' :
case 'iso-8859-6' :
case 'iso-8859-6-e' :
case 'iso-8859-6-i' :
case 'iso-ir-127' :
case 'iso8859-6' :
case 'iso88596' :
case 'iso_8859-6' :
case 'iso_8859-6:1987' :
return 'ISO-8859-6'
case 'csisolatingreek' :
case 'ecma-118' :
case 'elot_928' :
case 'greek' :
case 'greek8' :
case 'iso-8859-7' :
case 'iso-ir-126' :
case 'iso8859-7' :
case 'iso88597' :
case 'iso_8859-7' :
case 'iso_8859-7:1987' :
case 'sun_eu_greek' :
return 'ISO-8859-7'
case 'csiso88598e' :
case 'csisolatinhebrew' :
case 'hebrew' :
case 'iso-8859-8' :
case 'iso-8859-8-e' :
case 'iso-ir-138' :
case 'iso8859-8' :
case 'iso88598' :
case 'iso_8859-8' :
case 'iso_8859-8:1988' :
case 'visual' :
return 'ISO-8859-8'
case 'csiso88598i' :
case 'iso-8859-8-i' :
case 'logical' :
return 'ISO-8859-8-I'
case 'csisolatin6' :
case 'iso-8859-10' :
case 'iso-ir-157' :
case 'iso8859-10' :
case 'iso885910' :
case 'l6' :
case 'latin6' :
return 'ISO-8859-10'
case 'iso-8859-13' :
case 'iso8859-13' :
case 'iso885913' :
return 'ISO-8859-13'
case 'iso-8859-14' :
case 'iso8859-14' :
case 'iso885914' :
return 'ISO-8859-14'
case 'csisolatin9' :
case 'iso-8859-15' :
case 'iso8859-15' :
case 'iso885915' :
case 'iso_8859-15' :
case 'l9' :
return 'ISO-8859-15'
case 'iso-8859-16' :
return 'ISO-8859-16'
case 'cskoi8r' :
case 'koi' :
case 'koi8' :
case 'koi8-r' :
case 'koi8_r' :
return 'KOI8-R'
case 'koi8-ru' :
case 'koi8-u' :
return 'KOI8-U'
case 'csmacintosh' :
case 'mac' :
case 'macintosh' :
case 'x-mac-roman' :
return 'macintosh'
case 'iso-8859-11' :
case 'iso8859-11' :
case 'iso885911' :
case 'tis-620' :
case 'windows-874' :
return 'windows-874'
case 'cp1250' :
case 'windows-1250' :
case 'x-cp1250' :
return 'windows-1250'
case 'cp1251' :
case 'windows-1251' :
case 'x-cp1251' :
return 'windows-1251'
case 'ansi_x3.4-1968' :
case 'ascii' :
case 'cp1252' :
case 'cp819' :
case 'csisolatin1' :
case 'ibm819' :
case 'iso-8859-1' :
case 'iso-ir-100' :
case 'iso8859-1' :
case 'iso88591' :
case 'iso_8859-1' :
case 'iso_8859-1:1987' :
case 'l1' :
case 'latin1' :
case 'us-ascii' :
case 'windows-1252' :
case 'x-cp1252' :
return 'windows-1252'
case 'cp1253' :
case 'windows-1253' :
case 'x-cp1253' :
return 'windows-1253'
case 'cp1254' :
case 'csisolatin5' :
case 'iso-8859-9' :
case 'iso-ir-148' :
case 'iso8859-9' :
case 'iso88599' :
case 'iso_8859-9' :
case 'iso_8859-9:1989' :
case 'l5' :
case 'latin5' :
case 'windows-1254' :
case 'x-cp1254' :
return 'windows-1254'
case 'cp1255' :
case 'windows-1255' :
case 'x-cp1255' :
return 'windows-1255'
case 'cp1256' :
case 'windows-1256' :
case 'x-cp1256' :
return 'windows-1256'
case 'cp1257' :
case 'windows-1257' :
case 'x-cp1257' :
return 'windows-1257'
case 'cp1258' :
case 'windows-1258' :
case 'x-cp1258' :
return 'windows-1258'
case 'x-mac-cyrillic' :
case 'x-mac-ukrainian' :
return 'x-mac-cyrillic'
case 'chinese' :
case 'csgb2312' :
case 'csiso58gb231280' :
case 'gb2312' :
case 'gb_2312' :
case 'gb_2312-80' :
case 'gbk' :
case 'iso-ir-58' :
case 'x-gbk' :
return 'GBK'
case 'gb18030' :
return 'gb18030'
case 'big5' :
case 'big5-hkscs' :
case 'cn-big5' :
case 'csbig5' :
case 'x-x-big5' :
return 'Big5'
case 'cseucpkdfmtjapanese' :
case 'euc-jp' :
case 'x-euc-jp' :
return 'EUC-JP'
case 'csiso2022jp' :
case 'iso-2022-jp' :
return 'ISO-2022-JP'
case 'csshiftjis' :
case 'ms932' :
case 'ms_kanji' :
case 'shift-jis' :
case 'shift_jis' :
case 'sjis' :
case 'windows-31j' :
case 'x-sjis' :
return 'Shift_JIS'
case 'cseuckr' :
case 'csksc56011987' :
case 'euc-kr' :
case 'iso-ir-149' :
case 'korean' :
case 'ks_c_5601-1987' :
case 'ks_c_5601-1989' :
case 'ksc5601' :
case 'ksc_5601' :
case 'windows-949' :
return 'EUC-KR'
case 'csiso2022kr' :
case 'hz-gb-2312' :
case 'iso-2022-cn' :
case 'iso-2022-cn-ext' :
case 'iso-2022-kr' :
case 'replacement' :
return 'replacement'
case 'unicodefffe' :
case 'utf-16be' :
return 'UTF-16BE'
case 'csunicode' :
case 'iso-10646-ucs-2' :
case 'ucs-2' :
case 'unicode' :
case 'unicodefeff' :
case 'utf-16' :
case 'utf-16le' :
return 'UTF-16LE'
case 'x-user-defined' :
return 'x-user-defined'
default : return 'failure'
}
}
module . exports = {
getEncoding
}
/***/ } ) ,
/***/ 1446 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
staticPropertyDescriptors ,
readOperation ,
fireAProgressEvent
} = _ _nccwpck _require _ _ ( 7530 )
const {
kState ,
kError ,
kResult ,
kEvents ,
kAborted
} = _ _nccwpck _require _ _ ( 9054 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { kEnumerableProperty } = _ _nccwpck _require _ _ ( 3983 )
class FileReader extends EventTarget {
constructor ( ) {
super ( )
this [ kState ] = 'empty'
this [ kResult ] = null
this [ kError ] = null
this [ kEvents ] = {
loadend : null ,
error : null ,
abort : null ,
load : null ,
progress : null ,
loadstart : null
}
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
* @ param { import ( 'buffer' ) . Blob } blob
* /
readAsArrayBuffer ( blob ) {
webidl . brandCheck ( this , FileReader )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FileReader.readAsArrayBuffer' } )
blob = webidl . converters . Blob ( blob , { strict : false } )
// The readAsArrayBuffer(blob) method, when invoked,
// must initiate a read operation for blob with ArrayBuffer.
readOperation ( this , blob , 'ArrayBuffer' )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#readAsBinaryString
* @ param { import ( 'buffer' ) . Blob } blob
* /
readAsBinaryString ( blob ) {
webidl . brandCheck ( this , FileReader )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FileReader.readAsBinaryString' } )
blob = webidl . converters . Blob ( blob , { strict : false } )
// The readAsBinaryString(blob) method, when invoked,
// must initiate a read operation for blob with BinaryString.
readOperation ( this , blob , 'BinaryString' )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#readAsDataText
* @ param { import ( 'buffer' ) . Blob } blob
* @ param { string ? } encoding
* /
readAsText ( blob , encoding = undefined ) {
webidl . brandCheck ( this , FileReader )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FileReader.readAsText' } )
blob = webidl . converters . Blob ( blob , { strict : false } )
if ( encoding !== undefined ) {
encoding = webidl . converters . DOMString ( encoding )
}
// The readAsText(blob, encoding) method, when invoked,
// must initiate a read operation for blob with Text and encoding.
readOperation ( this , blob , 'Text' , encoding )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dfn-readAsDataURL
* @ param { import ( 'buffer' ) . Blob } blob
* /
readAsDataURL ( blob ) {
webidl . brandCheck ( this , FileReader )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FileReader.readAsDataURL' } )
blob = webidl . converters . Blob ( blob , { strict : false } )
// The readAsDataURL(blob) method, when invoked, must
// initiate a read operation for blob with DataURL.
readOperation ( this , blob , 'DataURL' )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dfn-abort
* /
abort ( ) {
// 1. If this's state is "empty" or if this's state is
// "done" set this's result to null and terminate
// this algorithm.
if ( this [ kState ] === 'empty' || this [ kState ] === 'done' ) {
this [ kResult ] = null
return
}
// 2. If this's state is "loading" set this's state to
// "done" and set this's result to null.
if ( this [ kState ] === 'loading' ) {
this [ kState ] = 'done'
this [ kResult ] = null
}
// 3. If there are any tasks from this on the file reading
// task source in an affiliated task queue, then remove
// those tasks from that task queue.
this [ kAborted ] = true
// 4. Terminate the algorithm for the read method being processed.
// TODO
// 5. Fire a progress event called abort at this.
fireAProgressEvent ( 'abort' , this )
// 6. If this's state is not "loading", fire a progress
// event called loadend at this.
if ( this [ kState ] !== 'loading' ) {
fireAProgressEvent ( 'loadend' , this )
}
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dom-filereader-readystate
* /
get readyState ( ) {
webidl . brandCheck ( this , FileReader )
switch ( this [ kState ] ) {
case 'empty' : return this . EMPTY
case 'loading' : return this . LOADING
case 'done' : return this . DONE
}
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dom-filereader-result
* /
get result ( ) {
webidl . brandCheck ( this , FileReader )
// The result attribute’ s getter, when invoked, must return
// this's result.
return this [ kResult ]
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dom-filereader-error
* /
get error ( ) {
webidl . brandCheck ( this , FileReader )
// The error attribute’ s getter, when invoked, must return
// this's error.
return this [ kError ]
}
get onloadend ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . loadend
}
set onloadend ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . loadend ) {
this . removeEventListener ( 'loadend' , this [ kEvents ] . loadend )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . loadend = fn
this . addEventListener ( 'loadend' , fn )
} else {
this [ kEvents ] . loadend = null
}
}
get onerror ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . error
}
set onerror ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . error ) {
this . removeEventListener ( 'error' , this [ kEvents ] . error )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . error = fn
this . addEventListener ( 'error' , fn )
} else {
this [ kEvents ] . error = null
}
}
get onloadstart ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . loadstart
}
set onloadstart ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . loadstart ) {
this . removeEventListener ( 'loadstart' , this [ kEvents ] . loadstart )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . loadstart = fn
this . addEventListener ( 'loadstart' , fn )
} else {
this [ kEvents ] . loadstart = null
}
}
get onprogress ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . progress
}
set onprogress ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . progress ) {
this . removeEventListener ( 'progress' , this [ kEvents ] . progress )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . progress = fn
this . addEventListener ( 'progress' , fn )
} else {
this [ kEvents ] . progress = null
}
}
get onload ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . load
}
set onload ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . load ) {
this . removeEventListener ( 'load' , this [ kEvents ] . load )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . load = fn
this . addEventListener ( 'load' , fn )
} else {
this [ kEvents ] . load = null
}
}
get onabort ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . abort
}
set onabort ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . abort ) {
this . removeEventListener ( 'abort' , this [ kEvents ] . abort )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . abort = fn
this . addEventListener ( 'abort' , fn )
} else {
this [ kEvents ] . abort = null
}
}
}
// https://w3c.github.io/FileAPI/#dom-filereader-empty
FileReader . EMPTY = FileReader . prototype . EMPTY = 0
// https://w3c.github.io/FileAPI/#dom-filereader-loading
FileReader . LOADING = FileReader . prototype . LOADING = 1
// https://w3c.github.io/FileAPI/#dom-filereader-done
FileReader . DONE = FileReader . prototype . DONE = 2
Object . defineProperties ( FileReader . prototype , {
EMPTY : staticPropertyDescriptors ,
LOADING : staticPropertyDescriptors ,
DONE : staticPropertyDescriptors ,
readAsArrayBuffer : kEnumerableProperty ,
readAsBinaryString : kEnumerableProperty ,
readAsText : kEnumerableProperty ,
readAsDataURL : kEnumerableProperty ,
abort : kEnumerableProperty ,
readyState : kEnumerableProperty ,
result : kEnumerableProperty ,
error : kEnumerableProperty ,
onloadstart : kEnumerableProperty ,
onprogress : kEnumerableProperty ,
onload : kEnumerableProperty ,
onabort : kEnumerableProperty ,
onerror : kEnumerableProperty ,
onloadend : kEnumerableProperty ,
[ Symbol . toStringTag ] : {
value : 'FileReader' ,
writable : false ,
enumerable : false ,
configurable : true
}
} )
Object . defineProperties ( FileReader , {
EMPTY : staticPropertyDescriptors ,
LOADING : staticPropertyDescriptors ,
DONE : staticPropertyDescriptors
} )
module . exports = {
FileReader
}
/***/ } ) ,
/***/ 5504 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const kState = Symbol ( 'ProgressEvent state' )
/ * *
* @ see https : //xhr.spec.whatwg.org/#progressevent
* /
class ProgressEvent extends Event {
constructor ( type , eventInitDict = { } ) {
type = webidl . converters . DOMString ( type )
eventInitDict = webidl . converters . ProgressEventInit ( eventInitDict ? ? { } )
super ( type , eventInitDict )
this [ kState ] = {
lengthComputable : eventInitDict . lengthComputable ,
loaded : eventInitDict . loaded ,
total : eventInitDict . total
}
}
get lengthComputable ( ) {
webidl . brandCheck ( this , ProgressEvent )
return this [ kState ] . lengthComputable
}
get loaded ( ) {
webidl . brandCheck ( this , ProgressEvent )
return this [ kState ] . loaded
}
get total ( ) {
webidl . brandCheck ( this , ProgressEvent )
return this [ kState ] . total
}
}
webidl . converters . ProgressEventInit = webidl . dictionaryConverter ( [
{
key : 'lengthComputable' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'loaded' ,
converter : webidl . converters [ 'unsigned long long' ] ,
defaultValue : 0
} ,
{
key : 'total' ,
converter : webidl . converters [ 'unsigned long long' ] ,
defaultValue : 0
} ,
{
key : 'bubbles' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'cancelable' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'composed' ,
converter : webidl . converters . boolean ,
defaultValue : false
}
] )
module . exports = {
ProgressEvent
}
/***/ } ) ,
/***/ 9054 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = {
kState : Symbol ( 'FileReader state' ) ,
kResult : Symbol ( 'FileReader result' ) ,
kError : Symbol ( 'FileReader error' ) ,
kLastProgressEventFired : Symbol ( 'FileReader last progress event fired timestamp' ) ,
kEvents : Symbol ( 'FileReader events' ) ,
kAborted : Symbol ( 'FileReader aborted' )
}
/***/ } ) ,
/***/ 7530 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
kState ,
kError ,
kResult ,
kAborted ,
kLastProgressEventFired
} = _ _nccwpck _require _ _ ( 9054 )
const { ProgressEvent } = _ _nccwpck _require _ _ ( 5504 )
const { getEncoding } = _ _nccwpck _require _ _ ( 4854 )
const { DOMException } = _ _nccwpck _require _ _ ( 1037 )
const { serializeAMimeType , parseMIMEType } = _ _nccwpck _require _ _ ( 685 )
const { types } = _ _nccwpck _require _ _ ( 3837 )
const { StringDecoder } = _ _nccwpck _require _ _ ( 1576 )
const { btoa } = _ _nccwpck _require _ _ ( 4300 )
/** @type {PropertyDescriptor} */
const staticPropertyDescriptors = {
2022-12-25 13:58:23 +08:00
enumerable : true ,
2023-07-27 11:01:06 +00:00
writable : false ,
configurable : false
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#readOperation
* @ param { import ( './filereader' ) . FileReader } fr
* @ param { import ( 'buffer' ) . Blob } blob
* @ param { string } type
* @ param { string ? } encodingName
* /
function readOperation ( fr , blob , type , encodingName ) {
// 1. If fr’ s state is "loading", throw an InvalidStateError
// DOMException.
if ( fr [ kState ] === 'loading' ) {
throw new DOMException ( 'Invalid state' , 'InvalidStateError' )
}
// 2. Set fr’ s state to "loading".
fr [ kState ] = 'loading'
// 3. Set fr’ s result to null.
fr [ kResult ] = null
// 4. Set fr’ s error to null.
fr [ kError ] = null
// 5. Let stream be the result of calling get stream on blob.
/** @type {import('stream/web').ReadableStream} */
const stream = blob . stream ( )
// 6. Let reader be the result of getting a reader from stream.
const reader = stream . getReader ( )
// 7. Let bytes be an empty byte sequence.
/** @type {Uint8Array[]} */
const bytes = [ ]
// 8. Let chunkPromise be the result of reading a chunk from
// stream with reader.
let chunkPromise = reader . read ( )
// 9. Let isFirstChunk be true.
let isFirstChunk = true
// 10. In parallel, while true:
// Note: "In parallel" just means non-blocking
// Note 2: readOperation itself cannot be async as double
// reading the body would then reject the promise, instead
// of throwing an error.
; ( async ( ) => {
while ( ! fr [ kAborted ] ) {
// 1. Wait for chunkPromise to be fulfilled or rejected.
try {
const { done , value } = await chunkPromise
// 2. If chunkPromise is fulfilled, and isFirstChunk is
// true, queue a task to fire a progress event called
// loadstart at fr.
if ( isFirstChunk && ! fr [ kAborted ] ) {
queueMicrotask ( ( ) => {
fireAProgressEvent ( 'loadstart' , fr )
} )
}
// 3. Set isFirstChunk to false.
isFirstChunk = false
// 4. If chunkPromise is fulfilled with an object whose
// done property is false and whose value property is
// a Uint8Array object, run these steps:
if ( ! done && types . isUint8Array ( value ) ) {
// 1. Let bs be the byte sequence represented by the
// Uint8Array object.
// 2. Append bs to bytes.
bytes . push ( value )
// 3. If roughly 50ms have passed since these steps
// were last invoked, queue a task to fire a
// progress event called progress at fr.
if (
(
fr [ kLastProgressEventFired ] === undefined ||
Date . now ( ) - fr [ kLastProgressEventFired ] >= 50
) &&
! fr [ kAborted ]
) {
fr [ kLastProgressEventFired ] = Date . now ( )
queueMicrotask ( ( ) => {
fireAProgressEvent ( 'progress' , fr )
} )
}
// 4. Set chunkPromise to the result of reading a
// chunk from stream with reader.
chunkPromise = reader . read ( )
} else if ( done ) {
// 5. Otherwise, if chunkPromise is fulfilled with an
// object whose done property is true, queue a task
// to run the following steps and abort this algorithm:
queueMicrotask ( ( ) => {
// 1. Set fr’ s state to "done".
fr [ kState ] = 'done'
// 2. Let result be the result of package data given
// bytes, type, blob’ s type, and encodingName.
try {
const result = packageData ( bytes , type , blob . type , encodingName )
// 4. Else:
if ( fr [ kAborted ] ) {
return
}
// 1. Set fr’ s result to result.
fr [ kResult ] = result
// 2. Fire a progress event called load at the fr.
fireAProgressEvent ( 'load' , fr )
} catch ( error ) {
// 3. If package data threw an exception error:
// 1. Set fr’ s error to error.
fr [ kError ] = error
// 2. Fire a progress event called error at fr.
fireAProgressEvent ( 'error' , fr )
}
// 5. If fr’ s state is not "loading", fire a progress
// event called loadend at the fr.
if ( fr [ kState ] !== 'loading' ) {
fireAProgressEvent ( 'loadend' , fr )
}
} )
break
}
} catch ( error ) {
if ( fr [ kAborted ] ) {
return
}
// 6. Otherwise, if chunkPromise is rejected with an
// error error, queue a task to run the following
// steps and abort this algorithm:
queueMicrotask ( ( ) => {
// 1. Set fr’ s state to "done".
fr [ kState ] = 'done'
// 2. Set fr’ s error to error.
fr [ kError ] = error
// 3. Fire a progress event called error at fr.
fireAProgressEvent ( 'error' , fr )
// 4. If fr’ s state is not "loading", fire a progress
// event called loadend at fr.
if ( fr [ kState ] !== 'loading' ) {
fireAProgressEvent ( 'loadend' , fr )
}
} )
break
}
}
} ) ( )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#fire-a-progress-event
* @ see https : //dom.spec.whatwg.org/#concept-event-fire
* @ param { string } e The name of the event
* @ param { import ( './filereader' ) . FileReader } reader
* /
function fireAProgressEvent ( e , reader ) {
// The progress event e does not bubble. e.bubbles must be false
// The progress event e is NOT cancelable. e.cancelable must be false
const event = new ProgressEvent ( e , {
bubbles : false ,
cancelable : false
} )
reader . dispatchEvent ( event )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#blob-package-data
* @ param { Uint8Array [ ] } bytes
* @ param { string } type
* @ param { string ? } mimeType
* @ param { string ? } encodingName
* /
function packageData ( bytes , type , mimeType , encodingName ) {
// 1. A Blob has an associated package data algorithm, given
// bytes, a type, a optional mimeType, and a optional
// encodingName, which switches on type and runs the
// associated steps:
switch ( type ) {
case 'DataURL' : {
// 1. Return bytes as a DataURL [RFC2397] subject to
// the considerations below:
// * Use mimeType as part of the Data URL if it is
// available in keeping with the Data URL
// specification [RFC2397].
// * If mimeType is not available return a Data URL
// without a media-type. [RFC2397].
// https://datatracker.ietf.org/doc/html/rfc2397#section-3
// dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
// mediatype := [ type "/" subtype ] *( ";" parameter )
// data := *urlchar
// parameter := attribute "=" value
let dataURL = 'data:'
const parsed = parseMIMEType ( mimeType || 'application/octet-stream' )
if ( parsed !== 'failure' ) {
dataURL += serializeAMimeType ( parsed )
}
dataURL += ';base64,'
const decoder = new StringDecoder ( 'latin1' )
for ( const chunk of bytes ) {
dataURL += btoa ( decoder . write ( chunk ) )
}
dataURL += btoa ( decoder . end ( ) )
return dataURL
}
case 'Text' : {
// 1. Let encoding be failure
let encoding = 'failure'
// 2. If the encodingName is present, set encoding to the
// result of getting an encoding from encodingName.
if ( encodingName ) {
encoding = getEncoding ( encodingName )
}
// 3. If encoding is failure, and mimeType is present:
if ( encoding === 'failure' && mimeType ) {
// 1. Let type be the result of parse a MIME type
// given mimeType.
const type = parseMIMEType ( mimeType )
// 2. If type is not failure, set encoding to the result
// of getting an encoding from type’ s parameters["charset"].
if ( type !== 'failure' ) {
encoding = getEncoding ( type . parameters . get ( 'charset' ) )
}
}
// 4. If encoding is failure, then set encoding to UTF-8.
if ( encoding === 'failure' ) {
encoding = 'UTF-8'
}
// 5. Decode bytes using fallback encoding encoding, and
// return the result.
return decode ( bytes , encoding )
}
case 'ArrayBuffer' : {
// Return a new ArrayBuffer whose contents are bytes.
const sequence = combineByteSequences ( bytes )
return sequence . buffer
}
case 'BinaryString' : {
// Return bytes as a binary string, in which every byte
// is represented by a code unit of equal value [0..255].
let binaryString = ''
const decoder = new StringDecoder ( 'latin1' )
for ( const chunk of bytes ) {
binaryString += decoder . write ( chunk )
}
binaryString += decoder . end ( )
return binaryString
}
}
}
/ * *
* @ see https : //encoding.spec.whatwg.org/#decode
* @ param { Uint8Array [ ] } ioQueue
* @ param { string } encoding
* /
function decode ( ioQueue , encoding ) {
const bytes = combineByteSequences ( ioQueue )
// 1. Let BOMEncoding be the result of BOM sniffing ioQueue.
const BOMEncoding = BOMSniffing ( bytes )
let slice = 0
// 2. If BOMEncoding is non-null:
if ( BOMEncoding !== null ) {
// 1. Set encoding to BOMEncoding.
encoding = BOMEncoding
// 2. Read three bytes from ioQueue, if BOMEncoding is
// UTF-8; otherwise read two bytes.
// (Do nothing with those bytes.)
slice = BOMEncoding === 'UTF-8' ? 3 : 2
}
// 3. Process a queue with an instance of encoding’ s
// decoder, ioQueue, output, and "replacement".
// 4. Return output.
const sliced = bytes . slice ( slice )
return new TextDecoder ( encoding ) . decode ( sliced )
}
/ * *
* @ see https : //encoding.spec.whatwg.org/#bom-sniff
* @ param { Uint8Array } ioQueue
* /
function BOMSniffing ( ioQueue ) {
// 1. Let BOM be the result of peeking 3 bytes from ioQueue,
// converted to a byte sequence.
const [ a , b , c ] = ioQueue
// 2. For each of the rows in the table below, starting with
// the first one and going down, if BOM starts with the
// bytes given in the first column, then return the
// encoding given in the cell in the second column of that
// row. Otherwise, return null.
if ( a === 0xEF && b === 0xBB && c === 0xBF ) {
return 'UTF-8'
} else if ( a === 0xFE && b === 0xFF ) {
return 'UTF-16BE'
} else if ( a === 0xFF && b === 0xFE ) {
return 'UTF-16LE'
}
return null
}
/ * *
* @ param { Uint8Array [ ] } sequences
* /
function combineByteSequences ( sequences ) {
const size = sequences . reduce ( ( a , b ) => {
return a + b . byteLength
} , 0 )
let offset = 0
return sequences . reduce ( ( a , b ) => {
a . set ( b , offset )
offset += b . byteLength
return a
} , new Uint8Array ( size ) )
}
module . exports = {
staticPropertyDescriptors ,
readOperation ,
fireAProgressEvent
}
/***/ } ) ,
/***/ 1892 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
// We include a version number for the Dispatcher API. In case of breaking changes,
// this version number must be increased to avoid conflicts.
const globalDispatcher = Symbol . for ( 'undici.globalDispatcher.1' )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const Agent = _ _nccwpck _require _ _ ( 7890 )
if ( getGlobalDispatcher ( ) === undefined ) {
setGlobalDispatcher ( new Agent ( ) )
}
function setGlobalDispatcher ( agent ) {
if ( ! agent || typeof agent . dispatch !== 'function' ) {
throw new InvalidArgumentError ( 'Argument agent must implement Agent' )
}
Object . defineProperty ( globalThis , globalDispatcher , {
value : agent ,
writable : true ,
enumerable : false ,
configurable : false
} )
}
function getGlobalDispatcher ( ) {
return globalThis [ globalDispatcher ]
}
module . exports = {
setGlobalDispatcher ,
getGlobalDispatcher
}
/***/ } ) ,
/***/ 6930 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = class DecoratorHandler {
constructor ( handler ) {
this . handler = handler
}
onConnect ( ... args ) {
return this . handler . onConnect ( ... args )
}
onError ( ... args ) {
return this . handler . onError ( ... args )
}
onUpgrade ( ... args ) {
return this . handler . onUpgrade ( ... args )
}
onHeaders ( ... args ) {
return this . handler . onHeaders ( ... args )
}
onData ( ... args ) {
return this . handler . onData ( ... args )
}
onComplete ( ... args ) {
return this . handler . onComplete ( ... args )
}
onBodySent ( ... args ) {
return this . handler . onBodySent ( ... args )
}
}
/***/ } ) ,
/***/ 2860 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const util = _ _nccwpck _require _ _ ( 3983 )
const { kBodyUsed } = _ _nccwpck _require _ _ ( 2785 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const EE = _ _nccwpck _require _ _ ( 2361 )
const redirectableStatusCodes = [ 300 , 301 , 302 , 303 , 307 , 308 ]
const kBody = Symbol ( 'body' )
class BodyAsyncIterable {
constructor ( body ) {
this [ kBody ] = body
this [ kBodyUsed ] = false
}
async * [ Symbol . asyncIterator ] ( ) {
assert ( ! this [ kBodyUsed ] , 'disturbed' )
this [ kBodyUsed ] = true
yield * this [ kBody ]
}
}
class RedirectHandler {
constructor ( dispatch , maxRedirections , opts , handler ) {
if ( maxRedirections != null && ( ! Number . isInteger ( maxRedirections ) || maxRedirections < 0 ) ) {
throw new InvalidArgumentError ( 'maxRedirections must be a positive number' )
}
util . validateHandler ( handler , opts . method , opts . upgrade )
this . dispatch = dispatch
this . location = null
this . abort = null
this . opts = { ... opts , maxRedirections : 0 } // opts must be a copy
this . maxRedirections = maxRedirections
this . handler = handler
this . history = [ ]
if ( util . isStream ( this . opts . body ) ) {
// TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
// so that it can be dispatched again?
// TODO (fix): Do we need 100-expect support to provide a way to do this properly?
if ( util . bodyLength ( this . opts . body ) === 0 ) {
this . opts . body
. on ( 'data' , function ( ) {
assert ( false )
} )
}
if ( typeof this . opts . body . readableDidRead !== 'boolean' ) {
this . opts . body [ kBodyUsed ] = false
EE . prototype . on . call ( this . opts . body , 'data' , function ( ) {
this [ kBodyUsed ] = true
} )
}
} else if ( this . opts . body && typeof this . opts . body . pipeTo === 'function' ) {
// TODO (fix): We can't access ReadableStream internal state
// to determine whether or not it has been disturbed. This is just
// a workaround.
this . opts . body = new BodyAsyncIterable ( this . opts . body )
} else if (
this . opts . body &&
typeof this . opts . body !== 'string' &&
! ArrayBuffer . isView ( this . opts . body ) &&
util . isIterable ( this . opts . body )
) {
// TODO: Should we allow re-using iterable if !this.opts.idempotent
// or through some other flag?
this . opts . body = new BodyAsyncIterable ( this . opts . body )
}
}
onConnect ( abort ) {
this . abort = abort
this . handler . onConnect ( abort , { history : this . history } )
}
onUpgrade ( statusCode , headers , socket ) {
this . handler . onUpgrade ( statusCode , headers , socket )
}
onError ( error ) {
this . handler . onError ( error )
}
onHeaders ( statusCode , headers , resume , statusText ) {
this . location = this . history . length >= this . maxRedirections || util . isDisturbed ( this . opts . body )
? null
: parseLocation ( statusCode , headers )
if ( this . opts . origin ) {
this . history . push ( new URL ( this . opts . path , this . opts . origin ) )
}
if ( ! this . location ) {
return this . handler . onHeaders ( statusCode , headers , resume , statusText )
}
const { origin , pathname , search } = util . parseURL ( new URL ( this . location , this . opts . origin && new URL ( this . opts . path , this . opts . origin ) ) )
const path = search ? ` ${ pathname } ${ search } ` : pathname
// Remove headers referring to the original URL.
// By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers.
// https://tools.ietf.org/html/rfc7231#section-6.4
this . opts . headers = cleanRequestHeaders ( this . opts . headers , statusCode === 303 , this . opts . origin !== origin )
this . opts . path = path
this . opts . origin = origin
this . opts . maxRedirections = 0
this . opts . query = null
// https://tools.ietf.org/html/rfc7231#section-6.4.4
// In case of HTTP 303, always replace method to be either HEAD or GET
if ( statusCode === 303 && this . opts . method !== 'HEAD' ) {
this . opts . method = 'GET'
this . opts . body = null
}
}
onData ( chunk ) {
if ( this . location ) {
/ *
https : //tools.ietf.org/html/rfc7231#section-6.4
TLDR : undici always ignores 3 xx response bodies .
Redirection is used to serve the requested resource from another URL , so it is assumes that
no body is generated ( and thus can be ignored ) . Even though generating a body is not prohibited .
For status 301 , 302 , 303 , 307 and 308 ( the latter from RFC 7238 ) , the specs mention that the body usually
( which means it ' s optional and not mandated ) contain just an hyperlink to the value of
the Location response header , so the body can be ignored safely .
For status 300 , which is "Multiple Choices" , the spec mentions both generating a Location
response header AND a response body with the other possible location to follow .
Since the spec explicitily chooses not to specify a format for such body and leave it to
servers and browsers implementors , we ignore the body as there is no specified way to eventually parse it .
* /
} else {
return this . handler . onData ( chunk )
}
}
onComplete ( trailers ) {
if ( this . location ) {
/ *
https : //tools.ietf.org/html/rfc7231#section-6.4
TLDR : undici always ignores 3 xx response trailers as they are not expected in case of redirections
and neither are useful if present .
See comment on onData method above for more detailed informations .
* /
this . location = null
this . abort = null
this . dispatch ( this . opts , this )
} else {
this . handler . onComplete ( trailers )
}
}
onBodySent ( chunk ) {
if ( this . handler . onBodySent ) {
this . handler . onBodySent ( chunk )
}
}
}
function parseLocation ( statusCode , headers ) {
if ( redirectableStatusCodes . indexOf ( statusCode ) === - 1 ) {
return null
}
for ( let i = 0 ; i < headers . length ; i += 2 ) {
if ( headers [ i ] . toString ( ) . toLowerCase ( ) === 'location' ) {
return headers [ i + 1 ]
}
}
}
// https://tools.ietf.org/html/rfc7231#section-6.4.4
function shouldRemoveHeader ( header , removeContent , unknownOrigin ) {
return (
( header . length === 4 && header . toString ( ) . toLowerCase ( ) === 'host' ) ||
( removeContent && header . toString ( ) . toLowerCase ( ) . indexOf ( 'content-' ) === 0 ) ||
( unknownOrigin && header . length === 13 && header . toString ( ) . toLowerCase ( ) === 'authorization' ) ||
( unknownOrigin && header . length === 6 && header . toString ( ) . toLowerCase ( ) === 'cookie' )
)
}
// https://tools.ietf.org/html/rfc7231#section-6.4
function cleanRequestHeaders ( headers , removeContent , unknownOrigin ) {
const ret = [ ]
if ( Array . isArray ( headers ) ) {
for ( let i = 0 ; i < headers . length ; i += 2 ) {
if ( ! shouldRemoveHeader ( headers [ i ] , removeContent , unknownOrigin ) ) {
ret . push ( headers [ i ] , headers [ i + 1 ] )
}
}
} else if ( headers && typeof headers === 'object' ) {
for ( const key of Object . keys ( headers ) ) {
if ( ! shouldRemoveHeader ( key , removeContent , unknownOrigin ) ) {
ret . push ( key , headers [ key ] )
}
}
} else {
assert ( headers == null , 'headers must be an object or an array' )
}
return ret
}
module . exports = RedirectHandler
/***/ } ) ,
/***/ 8861 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
const RedirectHandler = _ _nccwpck _require _ _ ( 2860 )
function createRedirectInterceptor ( { maxRedirections : defaultMaxRedirections } ) {
return ( dispatch ) => {
return function Intercept ( opts , handler ) {
const { maxRedirections = defaultMaxRedirections } = opts
if ( ! maxRedirections ) {
return dispatch ( opts , handler )
}
const redirectHandler = new RedirectHandler ( dispatch , maxRedirections , opts , handler )
opts = { ... opts , maxRedirections : 0 } // Stop sub dispatcher from also redirecting.
return dispatch ( opts , redirectHandler )
}
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
module . exports = createRedirectInterceptor
/***/ } ) ,
/***/ 953 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . SPECIAL _HEADERS = exports . HEADER _STATE = exports . MINOR = exports . MAJOR = exports . CONNECTION _TOKEN _CHARS = exports . HEADER _CHARS = exports . TOKEN = exports . STRICT _TOKEN = exports . HEX = exports . URL _CHAR = exports . STRICT _URL _CHAR = exports . USERINFO _CHARS = exports . MARK = exports . ALPHANUM = exports . NUM = exports . HEX _MAP = exports . NUM _MAP = exports . ALPHA = exports . FINISH = exports . H _METHOD _MAP = exports . METHOD _MAP = exports . METHODS _RTSP = exports . METHODS _ICE = exports . METHODS _HTTP = exports . METHODS = exports . LENIENT _FLAGS = exports . FLAGS = exports . TYPE = exports . ERROR = void 0 ;
const utils _1 = _ _nccwpck _require _ _ ( 1891 ) ;
// C headers
var ERROR ;
( function ( ERROR ) {
ERROR [ ERROR [ "OK" ] = 0 ] = "OK" ;
ERROR [ ERROR [ "INTERNAL" ] = 1 ] = "INTERNAL" ;
ERROR [ ERROR [ "STRICT" ] = 2 ] = "STRICT" ;
ERROR [ ERROR [ "LF_EXPECTED" ] = 3 ] = "LF_EXPECTED" ;
ERROR [ ERROR [ "UNEXPECTED_CONTENT_LENGTH" ] = 4 ] = "UNEXPECTED_CONTENT_LENGTH" ;
ERROR [ ERROR [ "CLOSED_CONNECTION" ] = 5 ] = "CLOSED_CONNECTION" ;
ERROR [ ERROR [ "INVALID_METHOD" ] = 6 ] = "INVALID_METHOD" ;
ERROR [ ERROR [ "INVALID_URL" ] = 7 ] = "INVALID_URL" ;
ERROR [ ERROR [ "INVALID_CONSTANT" ] = 8 ] = "INVALID_CONSTANT" ;
ERROR [ ERROR [ "INVALID_VERSION" ] = 9 ] = "INVALID_VERSION" ;
ERROR [ ERROR [ "INVALID_HEADER_TOKEN" ] = 10 ] = "INVALID_HEADER_TOKEN" ;
ERROR [ ERROR [ "INVALID_CONTENT_LENGTH" ] = 11 ] = "INVALID_CONTENT_LENGTH" ;
ERROR [ ERROR [ "INVALID_CHUNK_SIZE" ] = 12 ] = "INVALID_CHUNK_SIZE" ;
ERROR [ ERROR [ "INVALID_STATUS" ] = 13 ] = "INVALID_STATUS" ;
ERROR [ ERROR [ "INVALID_EOF_STATE" ] = 14 ] = "INVALID_EOF_STATE" ;
ERROR [ ERROR [ "INVALID_TRANSFER_ENCODING" ] = 15 ] = "INVALID_TRANSFER_ENCODING" ;
ERROR [ ERROR [ "CB_MESSAGE_BEGIN" ] = 16 ] = "CB_MESSAGE_BEGIN" ;
ERROR [ ERROR [ "CB_HEADERS_COMPLETE" ] = 17 ] = "CB_HEADERS_COMPLETE" ;
ERROR [ ERROR [ "CB_MESSAGE_COMPLETE" ] = 18 ] = "CB_MESSAGE_COMPLETE" ;
ERROR [ ERROR [ "CB_CHUNK_HEADER" ] = 19 ] = "CB_CHUNK_HEADER" ;
ERROR [ ERROR [ "CB_CHUNK_COMPLETE" ] = 20 ] = "CB_CHUNK_COMPLETE" ;
ERROR [ ERROR [ "PAUSED" ] = 21 ] = "PAUSED" ;
ERROR [ ERROR [ "PAUSED_UPGRADE" ] = 22 ] = "PAUSED_UPGRADE" ;
ERROR [ ERROR [ "PAUSED_H2_UPGRADE" ] = 23 ] = "PAUSED_H2_UPGRADE" ;
ERROR [ ERROR [ "USER" ] = 24 ] = "USER" ;
} ) ( ERROR = exports . ERROR || ( exports . ERROR = { } ) ) ;
var TYPE ;
( function ( TYPE ) {
TYPE [ TYPE [ "BOTH" ] = 0 ] = "BOTH" ;
TYPE [ TYPE [ "REQUEST" ] = 1 ] = "REQUEST" ;
TYPE [ TYPE [ "RESPONSE" ] = 2 ] = "RESPONSE" ;
} ) ( TYPE = exports . TYPE || ( exports . TYPE = { } ) ) ;
var FLAGS ;
( function ( FLAGS ) {
FLAGS [ FLAGS [ "CONNECTION_KEEP_ALIVE" ] = 1 ] = "CONNECTION_KEEP_ALIVE" ;
FLAGS [ FLAGS [ "CONNECTION_CLOSE" ] = 2 ] = "CONNECTION_CLOSE" ;
FLAGS [ FLAGS [ "CONNECTION_UPGRADE" ] = 4 ] = "CONNECTION_UPGRADE" ;
FLAGS [ FLAGS [ "CHUNKED" ] = 8 ] = "CHUNKED" ;
FLAGS [ FLAGS [ "UPGRADE" ] = 16 ] = "UPGRADE" ;
FLAGS [ FLAGS [ "CONTENT_LENGTH" ] = 32 ] = "CONTENT_LENGTH" ;
FLAGS [ FLAGS [ "SKIPBODY" ] = 64 ] = "SKIPBODY" ;
FLAGS [ FLAGS [ "TRAILING" ] = 128 ] = "TRAILING" ;
// 1 << 8 is unused
FLAGS [ FLAGS [ "TRANSFER_ENCODING" ] = 512 ] = "TRANSFER_ENCODING" ;
} ) ( FLAGS = exports . FLAGS || ( exports . FLAGS = { } ) ) ;
var LENIENT _FLAGS ;
( function ( LENIENT _FLAGS ) {
LENIENT _FLAGS [ LENIENT _FLAGS [ "HEADERS" ] = 1 ] = "HEADERS" ;
LENIENT _FLAGS [ LENIENT _FLAGS [ "CHUNKED_LENGTH" ] = 2 ] = "CHUNKED_LENGTH" ;
LENIENT _FLAGS [ LENIENT _FLAGS [ "KEEP_ALIVE" ] = 4 ] = "KEEP_ALIVE" ;
} ) ( LENIENT _FLAGS = exports . LENIENT _FLAGS || ( exports . LENIENT _FLAGS = { } ) ) ;
var METHODS ;
( function ( METHODS ) {
METHODS [ METHODS [ "DELETE" ] = 0 ] = "DELETE" ;
METHODS [ METHODS [ "GET" ] = 1 ] = "GET" ;
METHODS [ METHODS [ "HEAD" ] = 2 ] = "HEAD" ;
METHODS [ METHODS [ "POST" ] = 3 ] = "POST" ;
METHODS [ METHODS [ "PUT" ] = 4 ] = "PUT" ;
/* pathological */
METHODS [ METHODS [ "CONNECT" ] = 5 ] = "CONNECT" ;
METHODS [ METHODS [ "OPTIONS" ] = 6 ] = "OPTIONS" ;
METHODS [ METHODS [ "TRACE" ] = 7 ] = "TRACE" ;
/* WebDAV */
METHODS [ METHODS [ "COPY" ] = 8 ] = "COPY" ;
METHODS [ METHODS [ "LOCK" ] = 9 ] = "LOCK" ;
METHODS [ METHODS [ "MKCOL" ] = 10 ] = "MKCOL" ;
METHODS [ METHODS [ "MOVE" ] = 11 ] = "MOVE" ;
METHODS [ METHODS [ "PROPFIND" ] = 12 ] = "PROPFIND" ;
METHODS [ METHODS [ "PROPPATCH" ] = 13 ] = "PROPPATCH" ;
METHODS [ METHODS [ "SEARCH" ] = 14 ] = "SEARCH" ;
METHODS [ METHODS [ "UNLOCK" ] = 15 ] = "UNLOCK" ;
METHODS [ METHODS [ "BIND" ] = 16 ] = "BIND" ;
METHODS [ METHODS [ "REBIND" ] = 17 ] = "REBIND" ;
METHODS [ METHODS [ "UNBIND" ] = 18 ] = "UNBIND" ;
METHODS [ METHODS [ "ACL" ] = 19 ] = "ACL" ;
/* subversion */
METHODS [ METHODS [ "REPORT" ] = 20 ] = "REPORT" ;
METHODS [ METHODS [ "MKACTIVITY" ] = 21 ] = "MKACTIVITY" ;
METHODS [ METHODS [ "CHECKOUT" ] = 22 ] = "CHECKOUT" ;
METHODS [ METHODS [ "MERGE" ] = 23 ] = "MERGE" ;
/* upnp */
METHODS [ METHODS [ "M-SEARCH" ] = 24 ] = "M-SEARCH" ;
METHODS [ METHODS [ "NOTIFY" ] = 25 ] = "NOTIFY" ;
METHODS [ METHODS [ "SUBSCRIBE" ] = 26 ] = "SUBSCRIBE" ;
METHODS [ METHODS [ "UNSUBSCRIBE" ] = 27 ] = "UNSUBSCRIBE" ;
/* RFC-5789 */
METHODS [ METHODS [ "PATCH" ] = 28 ] = "PATCH" ;
METHODS [ METHODS [ "PURGE" ] = 29 ] = "PURGE" ;
/* CalDAV */
METHODS [ METHODS [ "MKCALENDAR" ] = 30 ] = "MKCALENDAR" ;
/* RFC-2068, section 19.6.1.2 */
METHODS [ METHODS [ "LINK" ] = 31 ] = "LINK" ;
METHODS [ METHODS [ "UNLINK" ] = 32 ] = "UNLINK" ;
/* icecast */
METHODS [ METHODS [ "SOURCE" ] = 33 ] = "SOURCE" ;
/* RFC-7540, section 11.6 */
METHODS [ METHODS [ "PRI" ] = 34 ] = "PRI" ;
/* RFC-2326 RTSP */
METHODS [ METHODS [ "DESCRIBE" ] = 35 ] = "DESCRIBE" ;
METHODS [ METHODS [ "ANNOUNCE" ] = 36 ] = "ANNOUNCE" ;
METHODS [ METHODS [ "SETUP" ] = 37 ] = "SETUP" ;
METHODS [ METHODS [ "PLAY" ] = 38 ] = "PLAY" ;
METHODS [ METHODS [ "PAUSE" ] = 39 ] = "PAUSE" ;
METHODS [ METHODS [ "TEARDOWN" ] = 40 ] = "TEARDOWN" ;
METHODS [ METHODS [ "GET_PARAMETER" ] = 41 ] = "GET_PARAMETER" ;
METHODS [ METHODS [ "SET_PARAMETER" ] = 42 ] = "SET_PARAMETER" ;
METHODS [ METHODS [ "REDIRECT" ] = 43 ] = "REDIRECT" ;
METHODS [ METHODS [ "RECORD" ] = 44 ] = "RECORD" ;
/* RAOP */
METHODS [ METHODS [ "FLUSH" ] = 45 ] = "FLUSH" ;
} ) ( METHODS = exports . METHODS || ( exports . METHODS = { } ) ) ;
exports . METHODS _HTTP = [
METHODS . DELETE ,
METHODS . GET ,
METHODS . HEAD ,
METHODS . POST ,
METHODS . PUT ,
METHODS . CONNECT ,
METHODS . OPTIONS ,
METHODS . TRACE ,
METHODS . COPY ,
METHODS . LOCK ,
METHODS . MKCOL ,
METHODS . MOVE ,
METHODS . PROPFIND ,
METHODS . PROPPATCH ,
METHODS . SEARCH ,
METHODS . UNLOCK ,
METHODS . BIND ,
METHODS . REBIND ,
METHODS . UNBIND ,
METHODS . ACL ,
METHODS . REPORT ,
METHODS . MKACTIVITY ,
METHODS . CHECKOUT ,
METHODS . MERGE ,
METHODS [ 'M-SEARCH' ] ,
METHODS . NOTIFY ,
METHODS . SUBSCRIBE ,
METHODS . UNSUBSCRIBE ,
METHODS . PATCH ,
METHODS . PURGE ,
METHODS . MKCALENDAR ,
METHODS . LINK ,
METHODS . UNLINK ,
METHODS . PRI ,
// TODO(indutny): should we allow it with HTTP?
METHODS . SOURCE ,
] ;
exports . METHODS _ICE = [
METHODS . SOURCE ,
] ;
exports . METHODS _RTSP = [
METHODS . OPTIONS ,
METHODS . DESCRIBE ,
METHODS . ANNOUNCE ,
METHODS . SETUP ,
METHODS . PLAY ,
METHODS . PAUSE ,
METHODS . TEARDOWN ,
METHODS . GET _PARAMETER ,
METHODS . SET _PARAMETER ,
METHODS . REDIRECT ,
METHODS . RECORD ,
METHODS . FLUSH ,
// For AirPlay
METHODS . GET ,
METHODS . POST ,
] ;
exports . METHOD _MAP = utils _1 . enumToMap ( METHODS ) ;
exports . H _METHOD _MAP = { } ;
Object . keys ( exports . METHOD _MAP ) . forEach ( ( key ) => {
if ( /^H/ . test ( key ) ) {
exports . H _METHOD _MAP [ key ] = exports . METHOD _MAP [ key ] ;
}
2022-12-25 13:58:23 +08:00
} ) ;
2023-07-27 11:01:06 +00:00
var FINISH ;
( function ( FINISH ) {
FINISH [ FINISH [ "SAFE" ] = 0 ] = "SAFE" ;
FINISH [ FINISH [ "SAFE_WITH_CB" ] = 1 ] = "SAFE_WITH_CB" ;
FINISH [ FINISH [ "UNSAFE" ] = 2 ] = "UNSAFE" ;
} ) ( FINISH = exports . FINISH || ( exports . FINISH = { } ) ) ;
exports . ALPHA = [ ] ;
for ( let i = 'A' . charCodeAt ( 0 ) ; i <= 'Z' . charCodeAt ( 0 ) ; i ++ ) {
// Upper case
exports . ALPHA . push ( String . fromCharCode ( i ) ) ;
// Lower case
exports . ALPHA . push ( String . fromCharCode ( i + 0x20 ) ) ;
}
exports . NUM _MAP = {
0 : 0 , 1 : 1 , 2 : 2 , 3 : 3 , 4 : 4 ,
5 : 5 , 6 : 6 , 7 : 7 , 8 : 8 , 9 : 9 ,
} ;
exports . HEX _MAP = {
0 : 0 , 1 : 1 , 2 : 2 , 3 : 3 , 4 : 4 ,
5 : 5 , 6 : 6 , 7 : 7 , 8 : 8 , 9 : 9 ,
A : 0XA , B : 0XB , C : 0XC , D : 0XD , E : 0XE , F : 0XF ,
a : 0xa , b : 0xb , c : 0xc , d : 0xd , e : 0xe , f : 0xf ,
} ;
exports . NUM = [
'0' , '1' , '2' , '3' , '4' , '5' , '6' , '7' , '8' , '9' ,
] ;
exports . ALPHANUM = exports . ALPHA . concat ( exports . NUM ) ;
exports . MARK = [ '-' , '_' , '.' , '!' , '~' , '*' , '\'' , '(' , ')' ] ;
exports . USERINFO _CHARS = exports . ALPHANUM
. concat ( exports . MARK )
. concat ( [ '%' , ';' , ':' , '&' , '=' , '+' , '$' , ',' ] ) ;
// TODO(indutny): use RFC
exports . STRICT _URL _CHAR = [
'!' , '"' , '$' , '%' , '&' , '\'' ,
'(' , ')' , '*' , '+' , ',' , '-' , '.' , '/' ,
':' , ';' , '<' , '=' , '>' ,
'@' , '[' , '\\' , ']' , '^' , '_' ,
'`' ,
'{' , '|' , '}' , '~' ,
] . concat ( exports . ALPHANUM ) ;
exports . URL _CHAR = exports . STRICT _URL _CHAR
. concat ( [ '\t' , '\f' ] ) ;
// All characters with 0x80 bit set to 1
for ( let i = 0x80 ; i <= 0xff ; i ++ ) {
exports . URL _CHAR . push ( i ) ;
}
exports . HEX = exports . NUM . concat ( [ 'a' , 'b' , 'c' , 'd' , 'e' , 'f' , 'A' , 'B' , 'C' , 'D' , 'E' , 'F' ] ) ;
/ * T o k e n s a s d e f i n e d b y r f c 2 6 1 6 . A l s o l o w e r c a s e s t h e m .
* token = 1 * < any CHAR except CTLs or separators >
* separators = "(" | ")" | "<" | ">" | "@"
* | "," | ";" | ":" | "\" | <" >
* | "/" | "[" | "]" | "?" | "="
* | "{" | "}" | SP | HT
* /
exports . STRICT _TOKEN = [
'!' , '#' , '$' , '%' , '&' , '\'' ,
'*' , '+' , '-' , '.' ,
'^' , '_' , '`' ,
'|' , '~' ,
] . concat ( exports . ALPHANUM ) ;
exports . TOKEN = exports . STRICT _TOKEN . concat ( [ ' ' ] ) ;
/ *
* Verify that a char is a valid visible ( printable ) US - ASCII
* character or % x80 - FF
* /
exports . HEADER _CHARS = [ '\t' ] ;
for ( let i = 32 ; i <= 255 ; i ++ ) {
if ( i !== 127 ) {
exports . HEADER _CHARS . push ( i ) ;
}
}
// ',' = \x44
exports . CONNECTION _TOKEN _CHARS = exports . HEADER _CHARS . filter ( ( c ) => c !== 44 ) ;
exports . MAJOR = exports . NUM _MAP ;
exports . MINOR = exports . MAJOR ;
var HEADER _STATE ;
( function ( HEADER _STATE ) {
HEADER _STATE [ HEADER _STATE [ "GENERAL" ] = 0 ] = "GENERAL" ;
HEADER _STATE [ HEADER _STATE [ "CONNECTION" ] = 1 ] = "CONNECTION" ;
HEADER _STATE [ HEADER _STATE [ "CONTENT_LENGTH" ] = 2 ] = "CONTENT_LENGTH" ;
HEADER _STATE [ HEADER _STATE [ "TRANSFER_ENCODING" ] = 3 ] = "TRANSFER_ENCODING" ;
HEADER _STATE [ HEADER _STATE [ "UPGRADE" ] = 4 ] = "UPGRADE" ;
HEADER _STATE [ HEADER _STATE [ "CONNECTION_KEEP_ALIVE" ] = 5 ] = "CONNECTION_KEEP_ALIVE" ;
HEADER _STATE [ HEADER _STATE [ "CONNECTION_CLOSE" ] = 6 ] = "CONNECTION_CLOSE" ;
HEADER _STATE [ HEADER _STATE [ "CONNECTION_UPGRADE" ] = 7 ] = "CONNECTION_UPGRADE" ;
HEADER _STATE [ HEADER _STATE [ "TRANSFER_ENCODING_CHUNKED" ] = 8 ] = "TRANSFER_ENCODING_CHUNKED" ;
} ) ( HEADER _STATE = exports . HEADER _STATE || ( exports . HEADER _STATE = { } ) ) ;
exports . SPECIAL _HEADERS = {
'connection' : HEADER _STATE . CONNECTION ,
'content-length' : HEADER _STATE . CONTENT _LENGTH ,
'proxy-connection' : HEADER _STATE . CONNECTION ,
'transfer-encoding' : HEADER _STATE . TRANSFER _ENCODING ,
'upgrade' : HEADER _STATE . UPGRADE ,
} ;
//# sourceMappingURL=constants.js.map
/***/ } ) ,
/***/ 1145 :
/***/ ( ( module ) => {
module . exports = ' AGFzbQEAAAABMAhgAX8Bf2ADf39 / AX9gBH9 / f38Bf2AAAGADf39 / AGABfwBgAn9 / AGAGf39 / f39 / AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAAMBBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCtnkAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQy4CAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDLgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMuAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0 + P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H + sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75 + AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja + AgAAPC0HqoYCAAA8LQbStgIAADwtB0q + AgAAPC0HfsoCAAA8LQdKygIAAD
/***/ } ) ,
/***/ 5627 :
/***/ ( ( module ) => {
module . exports = ' AGFzbQEAAAABMAhgAX8Bf2ADf39 / AX9gBH9 / f38Bf2AAAGADf39 / AGABfwBgAn9 / AGAGf39 / f39 / AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAAMBBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsnkAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQy4CAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDLgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMuAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0 + P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H + sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75 + AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja + AgAAPC0HqoYCAAA8LQbStgIAADwtB0q + AgAAPC0HfsoCAAA8LQdKygIAAD
/***/ } ) ,
/***/ 1891 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . enumToMap = void 0 ;
function enumToMap ( obj ) {
const res = { } ;
Object . keys ( obj ) . forEach ( ( key ) => {
const value = obj [ key ] ;
if ( typeof value === 'number' ) {
res [ key ] = value ;
}
} ) ;
return res ;
}
exports . enumToMap = enumToMap ;
//# sourceMappingURL=utils.js.map
/***/ } ) ,
/***/ 6771 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { kClients } = _ _nccwpck _require _ _ ( 2785 )
const Agent = _ _nccwpck _require _ _ ( 7890 )
const {
kAgent ,
kMockAgentSet ,
kMockAgentGet ,
kDispatches ,
kIsMockActive ,
kNetConnect ,
kGetNetConnect ,
kOptions ,
kFactory
} = _ _nccwpck _require _ _ ( 4347 )
const MockClient = _ _nccwpck _require _ _ ( 8687 )
const MockPool = _ _nccwpck _require _ _ ( 6193 )
const { matchValue , buildMockOptions } = _ _nccwpck _require _ _ ( 9323 )
const { InvalidArgumentError , UndiciError } = _ _nccwpck _require _ _ ( 8045 )
const Dispatcher = _ _nccwpck _require _ _ ( 412 )
const Pluralizer = _ _nccwpck _require _ _ ( 8891 )
const PendingInterceptorsFormatter = _ _nccwpck _require _ _ ( 6823 )
class FakeWeakRef {
constructor ( value ) {
this . value = value
}
deref ( ) {
return this . value
}
}
class MockAgent extends Dispatcher {
constructor ( opts ) {
super ( opts )
this [ kNetConnect ] = true
this [ kIsMockActive ] = true
// Instantiate Agent and encapsulate
if ( ( opts && opts . agent && typeof opts . agent . dispatch !== 'function' ) ) {
throw new InvalidArgumentError ( 'Argument opts.agent must implement Agent' )
}
const agent = opts && opts . agent ? opts . agent : new Agent ( opts )
this [ kAgent ] = agent
this [ kClients ] = agent [ kClients ]
this [ kOptions ] = buildMockOptions ( opts )
}
get ( origin ) {
let dispatcher = this [ kMockAgentGet ] ( origin )
if ( ! dispatcher ) {
dispatcher = this [ kFactory ] ( origin )
this [ kMockAgentSet ] ( origin , dispatcher )
}
return dispatcher
}
dispatch ( opts , handler ) {
// Call MockAgent.get to perform additional setup before dispatching as normal
this . get ( opts . origin )
return this [ kAgent ] . dispatch ( opts , handler )
}
async close ( ) {
await this [ kAgent ] . close ( )
this [ kClients ] . clear ( )
}
deactivate ( ) {
this [ kIsMockActive ] = false
}
activate ( ) {
this [ kIsMockActive ] = true
}
enableNetConnect ( matcher ) {
if ( typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp ) {
if ( Array . isArray ( this [ kNetConnect ] ) ) {
this [ kNetConnect ] . push ( matcher )
} else {
this [ kNetConnect ] = [ matcher ]
}
} else if ( typeof matcher === 'undefined' ) {
this [ kNetConnect ] = true
} else {
throw new InvalidArgumentError ( 'Unsupported matcher. Must be one of String|Function|RegExp.' )
}
}
disableNetConnect ( ) {
this [ kNetConnect ] = false
}
// This is required to bypass issues caused by using global symbols - see:
// https://github.com/nodejs/undici/issues/1447
get isMockActive ( ) {
return this [ kIsMockActive ]
}
[ kMockAgentSet ] ( origin , dispatcher ) {
this [ kClients ] . set ( origin , new FakeWeakRef ( dispatcher ) )
}
[ kFactory ] ( origin ) {
const mockOptions = Object . assign ( { agent : this } , this [ kOptions ] )
return this [ kOptions ] && this [ kOptions ] . connections === 1
? new MockClient ( origin , mockOptions )
: new MockPool ( origin , mockOptions )
}
[ kMockAgentGet ] ( origin ) {
// First check if we can immediately find it
const ref = this [ kClients ] . get ( origin )
if ( ref ) {
return ref . deref ( )
}
// If the origin is not a string create a dummy parent pool and return to user
if ( typeof origin !== 'string' ) {
const dispatcher = this [ kFactory ] ( 'http://localhost:9999' )
this [ kMockAgentSet ] ( origin , dispatcher )
return dispatcher
}
// If we match, create a pool and assign the same dispatches
for ( const [ keyMatcher , nonExplicitRef ] of Array . from ( this [ kClients ] ) ) {
const nonExplicitDispatcher = nonExplicitRef . deref ( )
if ( nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue ( keyMatcher , origin ) ) {
const dispatcher = this [ kFactory ] ( origin )
this [ kMockAgentSet ] ( origin , dispatcher )
dispatcher [ kDispatches ] = nonExplicitDispatcher [ kDispatches ]
return dispatcher
}
}
}
[ kGetNetConnect ] ( ) {
return this [ kNetConnect ]
}
pendingInterceptors ( ) {
const mockAgentClients = this [ kClients ]
return Array . from ( mockAgentClients . entries ( ) )
. flatMap ( ( [ origin , scope ] ) => scope . deref ( ) [ kDispatches ] . map ( dispatch => ( { ... dispatch , origin } ) ) )
. filter ( ( { pending } ) => pending )
}
assertNoPendingInterceptors ( { pendingInterceptorsFormatter = new PendingInterceptorsFormatter ( ) } = { } ) {
const pending = this . pendingInterceptors ( )
if ( pending . length === 0 ) {
return
}
const pluralizer = new Pluralizer ( 'interceptor' , 'interceptors' ) . pluralize ( pending . length )
throw new UndiciError ( `
$ { pluralizer . count } $ { pluralizer . noun } $ { pluralizer . is } pending :
$ { pendingInterceptorsFormatter . format ( pending ) }
` .trim())
}
}
module . exports = MockAgent
/***/ } ) ,
/***/ 8687 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { promisify } = _ _nccwpck _require _ _ ( 3837 )
const Client = _ _nccwpck _require _ _ ( 3598 )
const { buildMockDispatch } = _ _nccwpck _require _ _ ( 9323 )
const {
kDispatches ,
kMockAgent ,
kClose ,
kOriginalClose ,
kOrigin ,
kOriginalDispatch ,
kConnected
} = _ _nccwpck _require _ _ ( 4347 )
const { MockInterceptor } = _ _nccwpck _require _ _ ( 410 )
const Symbols = _ _nccwpck _require _ _ ( 2785 )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
/ * *
* MockClient provides an API that extends the Client to influence the mockDispatches .
* /
class MockClient extends Client {
constructor ( origin , opts ) {
super ( origin , opts )
if ( ! opts || ! opts . agent || typeof opts . agent . dispatch !== 'function' ) {
throw new InvalidArgumentError ( 'Argument opts.agent must implement Agent' )
}
this [ kMockAgent ] = opts . agent
this [ kOrigin ] = origin
this [ kDispatches ] = [ ]
this [ kConnected ] = 1
this [ kOriginalDispatch ] = this . dispatch
this [ kOriginalClose ] = this . close . bind ( this )
this . dispatch = buildMockDispatch . call ( this )
this . close = this [ kClose ]
}
get [ Symbols . kConnected ] ( ) {
return this [ kConnected ]
}
/ * *
* Sets up the base interceptor for mocking replies from undici .
* /
intercept ( opts ) {
return new MockInterceptor ( opts , this [ kDispatches ] )
}
async [ kClose ] ( ) {
await promisify ( this [ kOriginalClose ] ) ( )
this [ kConnected ] = 0
this [ kMockAgent ] [ Symbols . kClients ] . delete ( this [ kOrigin ] )
}
}
module . exports = MockClient
/***/ } ) ,
/***/ 888 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { UndiciError } = _ _nccwpck _require _ _ ( 8045 )
class MockNotMatchedError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , MockNotMatchedError )
this . name = 'MockNotMatchedError'
this . message = message || 'The request does not match any registered mock dispatches'
this . code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'
}
}
module . exports = {
MockNotMatchedError
}
/***/ } ) ,
/***/ 410 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { getResponseData , buildKey , addMockDispatch } = _ _nccwpck _require _ _ ( 9323 )
const {
kDispatches ,
kDispatchKey ,
kDefaultHeaders ,
kDefaultTrailers ,
kContentLength ,
kMockDispatch
} = _ _nccwpck _require _ _ ( 4347 )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const { buildURL } = _ _nccwpck _require _ _ ( 3983 )
/ * *
* Defines the scope API for an interceptor reply
* /
class MockScope {
constructor ( mockDispatch ) {
this [ kMockDispatch ] = mockDispatch
}
/ * *
* Delay a reply by a set amount in ms .
* /
delay ( waitInMs ) {
if ( typeof waitInMs !== 'number' || ! Number . isInteger ( waitInMs ) || waitInMs <= 0 ) {
throw new InvalidArgumentError ( 'waitInMs must be a valid integer > 0' )
}
this [ kMockDispatch ] . delay = waitInMs
return this
}
/ * *
* For a defined reply , never mark as consumed .
* /
persist ( ) {
this [ kMockDispatch ] . persist = true
return this
}
/ * *
* Allow one to define a reply for a set amount of matching requests .
* /
times ( repeatTimes ) {
if ( typeof repeatTimes !== 'number' || ! Number . isInteger ( repeatTimes ) || repeatTimes <= 0 ) {
throw new InvalidArgumentError ( 'repeatTimes must be a valid integer > 0' )
}
this [ kMockDispatch ] . times = repeatTimes
return this
}
}
/ * *
* Defines an interceptor for a Mock
* /
class MockInterceptor {
constructor ( opts , mockDispatches ) {
if ( typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'opts must be an object' )
}
if ( typeof opts . path === 'undefined' ) {
throw new InvalidArgumentError ( 'opts.path must be defined' )
}
if ( typeof opts . method === 'undefined' ) {
opts . method = 'GET'
}
// See https://github.com/nodejs/undici/issues/1245
// As per RFC 3986, clients are not supposed to send URI
// fragments to servers when they retrieve a document,
if ( typeof opts . path === 'string' ) {
if ( opts . query ) {
opts . path = buildURL ( opts . path , opts . query )
} else {
// Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811
const parsedURL = new URL ( opts . path , 'data://' )
opts . path = parsedURL . pathname + parsedURL . search
}
}
if ( typeof opts . method === 'string' ) {
opts . method = opts . method . toUpperCase ( )
}
this [ kDispatchKey ] = buildKey ( opts )
this [ kDispatches ] = mockDispatches
this [ kDefaultHeaders ] = { }
this [ kDefaultTrailers ] = { }
this [ kContentLength ] = false
}
createMockScopeDispatchData ( statusCode , data , responseOptions = { } ) {
const responseData = getResponseData ( data )
const contentLength = this [ kContentLength ] ? { 'content-length' : responseData . length } : { }
const headers = { ... this [ kDefaultHeaders ] , ... contentLength , ... responseOptions . headers }
const trailers = { ... this [ kDefaultTrailers ] , ... responseOptions . trailers }
return { statusCode , data , headers , trailers }
}
validateReplyParameters ( statusCode , data , responseOptions ) {
if ( typeof statusCode === 'undefined' ) {
throw new InvalidArgumentError ( 'statusCode must be defined' )
}
if ( typeof data === 'undefined' ) {
throw new InvalidArgumentError ( 'data must be defined' )
}
if ( typeof responseOptions !== 'object' ) {
throw new InvalidArgumentError ( 'responseOptions must be an object' )
}
}
/ * *
* Mock an undici request with a defined reply .
* /
reply ( replyData ) {
// Values of reply aren't available right now as they
// can only be available when the reply callback is invoked.
if ( typeof replyData === 'function' ) {
// We'll first wrap the provided callback in another function,
// this function will properly resolve the data from the callback
// when invoked.
const wrappedDefaultsCallback = ( opts ) => {
// Our reply options callback contains the parameter for statusCode, data and options.
const resolvedData = replyData ( opts )
// Check if it is in the right format
if ( typeof resolvedData !== 'object' ) {
throw new InvalidArgumentError ( 'reply options callback must return an object' )
}
const { statusCode , data = '' , responseOptions = { } } = resolvedData
this . validateReplyParameters ( statusCode , data , responseOptions )
// Since the values can be obtained immediately we return them
// from this higher order function that will be resolved later.
return {
... this . createMockScopeDispatchData ( statusCode , data , responseOptions )
}
}
// Add usual dispatch data, but this time set the data parameter to function that will eventually provide data.
const newMockDispatch = addMockDispatch ( this [ kDispatches ] , this [ kDispatchKey ] , wrappedDefaultsCallback )
return new MockScope ( newMockDispatch )
}
// We can have either one or three parameters, if we get here,
// we should have 1-3 parameters. So we spread the arguments of
// this function to obtain the parameters, since replyData will always
// just be the statusCode.
const [ statusCode , data = '' , responseOptions = { } ] = [ ... arguments ]
this . validateReplyParameters ( statusCode , data , responseOptions )
// Send in-already provided data like usual
const dispatchData = this . createMockScopeDispatchData ( statusCode , data , responseOptions )
const newMockDispatch = addMockDispatch ( this [ kDispatches ] , this [ kDispatchKey ] , dispatchData )
return new MockScope ( newMockDispatch )
}
/ * *
* Mock an undici request with a defined error .
* /
replyWithError ( error ) {
if ( typeof error === 'undefined' ) {
throw new InvalidArgumentError ( 'error must be defined' )
}
const newMockDispatch = addMockDispatch ( this [ kDispatches ] , this [ kDispatchKey ] , { error } )
return new MockScope ( newMockDispatch )
}
/ * *
* Set default reply headers on the interceptor for subsequent replies
* /
defaultReplyHeaders ( headers ) {
if ( typeof headers === 'undefined' ) {
throw new InvalidArgumentError ( 'headers must be defined' )
}
this [ kDefaultHeaders ] = headers
return this
}
/ * *
* Set default reply trailers on the interceptor for subsequent replies
* /
defaultReplyTrailers ( trailers ) {
if ( typeof trailers === 'undefined' ) {
throw new InvalidArgumentError ( 'trailers must be defined' )
}
this [ kDefaultTrailers ] = trailers
return this
}
/ * *
* Set reply content length header for replies on the interceptor
* /
replyContentLength ( ) {
this [ kContentLength ] = true
return this
}
}
module . exports . MockInterceptor = MockInterceptor
module . exports . MockScope = MockScope
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ 6193 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const { promisify } = _ _nccwpck _require _ _ ( 3837 )
const Pool = _ _nccwpck _require _ _ ( 4634 )
const { buildMockDispatch } = _ _nccwpck _require _ _ ( 9323 )
const {
kDispatches ,
kMockAgent ,
kClose ,
kOriginalClose ,
kOrigin ,
kOriginalDispatch ,
kConnected
} = _ _nccwpck _require _ _ ( 4347 )
const { MockInterceptor } = _ _nccwpck _require _ _ ( 410 )
const Symbols = _ _nccwpck _require _ _ ( 2785 )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/ * *
* MockPool provides an API that extends the Pool to influence the mockDispatches .
* /
class MockPool extends Pool {
constructor ( origin , opts ) {
super ( origin , opts )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( ! opts || ! opts . agent || typeof opts . agent . dispatch !== 'function' ) {
throw new InvalidArgumentError ( 'Argument opts.agent must implement Agent' )
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
this [ kMockAgent ] = opts . agent
this [ kOrigin ] = origin
this [ kDispatches ] = [ ]
this [ kConnected ] = 1
this [ kOriginalDispatch ] = this . dispatch
this [ kOriginalClose ] = this . close . bind ( this )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
this . dispatch = buildMockDispatch . call ( this )
this . close = this [ kClose ]
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
get [ Symbols . kConnected ] ( ) {
return this [ kConnected ]
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/ * *
* Sets up the base interceptor for mocking replies from undici .
* /
intercept ( opts ) {
return new MockInterceptor ( opts , this [ kDispatches ] )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
async [ kClose ] ( ) {
await promisify ( this [ kOriginalClose ] ) ( )
this [ kConnected ] = 0
this [ kMockAgent ] [ Symbols . kClients ] . delete ( this [ kOrigin ] )
}
}
module . exports = MockPool
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 4347 :
/***/ ( ( module ) => {
2022-12-25 13:58:23 +08:00
"use strict" ;
2023-07-27 11:01:06 +00:00
module . exports = {
kAgent : Symbol ( 'agent' ) ,
kOptions : Symbol ( 'options' ) ,
kFactory : Symbol ( 'factory' ) ,
kDispatches : Symbol ( 'dispatches' ) ,
kDispatchKey : Symbol ( 'dispatch key' ) ,
kDefaultHeaders : Symbol ( 'default headers' ) ,
kDefaultTrailers : Symbol ( 'default trailers' ) ,
kContentLength : Symbol ( 'content length' ) ,
kMockAgent : Symbol ( 'mock agent' ) ,
kMockAgentSet : Symbol ( 'mock agent set' ) ,
kMockAgentGet : Symbol ( 'mock agent get' ) ,
kMockDispatch : Symbol ( 'mock dispatch' ) ,
kClose : Symbol ( 'close' ) ,
kOriginalClose : Symbol ( 'original agent close' ) ,
kOrigin : Symbol ( 'origin' ) ,
kIsMockActive : Symbol ( 'is mock active' ) ,
kNetConnect : Symbol ( 'net connect' ) ,
kGetNetConnect : Symbol ( 'get net connect' ) ,
kConnected : Symbol ( 'connected' )
}
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 9323 :
2022-12-25 13:58:23 +08:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
2023-07-27 11:01:06 +00:00
const { MockNotMatchedError } = _ _nccwpck _require _ _ ( 888 )
const {
kDispatches ,
kMockAgent ,
kOriginalDispatch ,
kOrigin ,
kGetNetConnect
} = _ _nccwpck _require _ _ ( 4347 )
const { buildURL , nop } = _ _nccwpck _require _ _ ( 3983 )
const { STATUS _CODES } = _ _nccwpck _require _ _ ( 3685 )
const {
types : {
isPromise
}
} = _ _nccwpck _require _ _ ( 3837 )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function matchValue ( match , value ) {
if ( typeof match === 'string' ) {
return match === value
}
if ( match instanceof RegExp ) {
return match . test ( value )
}
if ( typeof match === 'function' ) {
return match ( value ) === true
}
return false
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function lowerCaseEntries ( headers ) {
return Object . fromEntries (
Object . entries ( headers ) . map ( ( [ headerName , headerValue ] ) => {
return [ headerName . toLocaleLowerCase ( ) , headerValue ]
} )
)
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/ * *
* @ param { import ( '../../index' ) . Headers | string [ ] | Record < string , string > } headers
* @ param { string } key
* /
function getHeaderByName ( headers , key ) {
if ( Array . isArray ( headers ) ) {
for ( let i = 0 ; i < headers . length ; i += 2 ) {
if ( headers [ i ] . toLocaleLowerCase ( ) === key . toLocaleLowerCase ( ) ) {
return headers [ i + 1 ]
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
return undefined
} else if ( typeof headers . get === 'function' ) {
return headers . get ( key )
} else {
return lowerCaseEntries ( headers ) [ key . toLocaleLowerCase ( ) ]
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/** @param {string[]} headers */
function buildHeadersFromArray ( headers ) { // fetch HeadersList
const clone = headers . slice ( )
const entries = [ ]
for ( let index = 0 ; index < clone . length ; index += 2 ) {
entries . push ( [ clone [ index ] , clone [ index + 1 ] ] )
}
return Object . fromEntries ( entries )
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function matchHeaders ( mockDispatch , headers ) {
if ( typeof mockDispatch . headers === 'function' ) {
if ( Array . isArray ( headers ) ) { // fetch HeadersList
headers = buildHeadersFromArray ( headers )
}
return mockDispatch . headers ( headers ? lowerCaseEntries ( headers ) : { } )
}
if ( typeof mockDispatch . headers === 'undefined' ) {
return true
}
if ( typeof headers !== 'object' || typeof mockDispatch . headers !== 'object' ) {
return false
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
for ( const [ matchHeaderName , matchHeaderValue ] of Object . entries ( mockDispatch . headers ) ) {
const headerValue = getHeaderByName ( headers , matchHeaderName )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( ! matchValue ( matchHeaderValue , headerValue ) ) {
return false
}
}
return true
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function safeUrl ( path ) {
if ( typeof path !== 'string' ) {
return path
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const pathSegments = path . split ( '?' )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( pathSegments . length !== 2 ) {
return path
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const qp = new URLSearchParams ( pathSegments . pop ( ) )
qp . sort ( )
return [ ... pathSegments , qp . toString ( ) ] . join ( '?' )
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function matchKey ( mockDispatch , { path , method , body , headers } ) {
const pathMatch = matchValue ( mockDispatch . path , path )
const methodMatch = matchValue ( mockDispatch . method , method )
const bodyMatch = typeof mockDispatch . body !== 'undefined' ? matchValue ( mockDispatch . body , body ) : true
const headersMatch = matchHeaders ( mockDispatch , headers )
return pathMatch && methodMatch && bodyMatch && headersMatch
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function getResponseData ( data ) {
if ( Buffer . isBuffer ( data ) ) {
return data
} else if ( typeof data === 'object' ) {
return JSON . stringify ( data )
} else {
return data . toString ( )
}
}
function getMockDispatch ( mockDispatches , key ) {
const basePath = key . query ? buildURL ( key . path , key . query ) : key . path
const resolvedPath = typeof basePath === 'string' ? safeUrl ( basePath ) : basePath
// Match path
let matchedMockDispatches = mockDispatches . filter ( ( { consumed } ) => ! consumed ) . filter ( ( { path } ) => matchValue ( safeUrl ( path ) , resolvedPath ) )
if ( matchedMockDispatches . length === 0 ) {
throw new MockNotMatchedError ( ` Mock dispatch not matched for path ' ${ resolvedPath } ' ` )
}
// Match method
matchedMockDispatches = matchedMockDispatches . filter ( ( { method } ) => matchValue ( method , key . method ) )
if ( matchedMockDispatches . length === 0 ) {
throw new MockNotMatchedError ( ` Mock dispatch not matched for method ' ${ key . method } ' ` )
}
// Match body
matchedMockDispatches = matchedMockDispatches . filter ( ( { body } ) => typeof body !== 'undefined' ? matchValue ( body , key . body ) : true )
if ( matchedMockDispatches . length === 0 ) {
throw new MockNotMatchedError ( ` Mock dispatch not matched for body ' ${ key . body } ' ` )
}
// Match headers
matchedMockDispatches = matchedMockDispatches . filter ( ( mockDispatch ) => matchHeaders ( mockDispatch , key . headers ) )
if ( matchedMockDispatches . length === 0 ) {
throw new MockNotMatchedError ( ` Mock dispatch not matched for headers ' ${ typeof key . headers === 'object' ? JSON . stringify ( key . headers ) : key . headers } ' ` )
}
return matchedMockDispatches [ 0 ]
}
function addMockDispatch ( mockDispatches , key , data ) {
const baseData = { timesInvoked : 0 , times : 1 , persist : false , consumed : false }
const replyData = typeof data === 'function' ? { callback : data } : { ... data }
const newMockDispatch = { ... baseData , ... key , pending : true , data : { error : null , ... replyData } }
mockDispatches . push ( newMockDispatch )
return newMockDispatch
}
function deleteMockDispatch ( mockDispatches , key ) {
const index = mockDispatches . findIndex ( dispatch => {
if ( ! dispatch . consumed ) {
return false
}
return matchKey ( dispatch , key )
2022-12-25 13:58:23 +08:00
} )
2023-07-27 11:01:06 +00:00
if ( index !== - 1 ) {
mockDispatches . splice ( index , 1 )
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function buildKey ( opts ) {
const { path , method , body , headers , query } = opts
return {
path ,
method ,
body ,
headers ,
query
2022-12-25 13:58:23 +08:00
}
}
2023-07-27 11:01:06 +00:00
function generateKeyValues ( data ) {
return Object . entries ( data ) . reduce ( ( keyValuePairs , [ key , value ] ) => [
... keyValuePairs ,
Buffer . from ( ` ${ key } ` ) ,
Array . isArray ( value ) ? value . map ( x => Buffer . from ( ` ${ x } ` ) ) : Buffer . from ( ` ${ value } ` )
] , [ ] )
}
/ * *
* @ see https : //developer.mozilla.org/en-US/docs/Web/HTTP/Status
* @ param { number } statusCode
* /
function getStatusText ( statusCode ) {
return STATUS _CODES [ statusCode ] || 'unknown'
}
async function getResponse ( body ) {
const buffers = [ ]
for await ( const data of body ) {
buffers . push ( data )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
return Buffer . concat ( buffers ) . toString ( 'utf8' )
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* Mock dispatch function used to simulate undici dispatches
* /
function mockDispatch ( opts , handler ) {
// Get mock dispatch from built key
const key = buildKey ( opts )
const mockDispatch = getMockDispatch ( this [ kDispatches ] , key )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
mockDispatch . timesInvoked ++
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Here's where we resolve a callback if a callback is present for the dispatch data.
if ( mockDispatch . data . callback ) {
mockDispatch . data = { ... mockDispatch . data , ... mockDispatch . data . callback ( opts ) }
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Parse mockDispatch data
const { data : { statusCode , data , headers , trailers , error } , delay , persist } = mockDispatch
const { timesInvoked , times } = mockDispatch
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// If it's used up and not persistent, mark as consumed
mockDispatch . consumed = ! persist && timesInvoked >= times
mockDispatch . pending = timesInvoked < times
// If specified, trigger dispatch error
if ( error !== null ) {
deleteMockDispatch ( this [ kDispatches ] , key )
handler . onError ( error )
return true
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
// Handle the request with a delay if necessary
if ( typeof delay === 'number' && delay > 0 ) {
setTimeout ( ( ) => {
handleReply ( this [ kDispatches ] )
} , delay )
} else {
handleReply ( this [ kDispatches ] )
}
function handleReply ( mockDispatches , _data = data ) {
// fetch's HeadersList is a 1D string array
const optsHeaders = Array . isArray ( opts . headers )
? buildHeadersFromArray ( opts . headers )
: opts . headers
const body = typeof _data === 'function'
? _data ( { ... opts , headers : optsHeaders } )
: _data
// util.types.isPromise is likely needed for jest.
if ( isPromise ( body ) ) {
// If handleReply is asynchronous, throwing an error
// in the callback will reject the promise, rather than
// synchronously throw the error, which breaks some tests.
// Rather, we wait for the callback to resolve if it is a
// promise, and then re-run handleReply with the new body.
body . then ( ( newData ) => handleReply ( mockDispatches , newData ) )
return
}
const responseData = getResponseData ( body )
const responseHeaders = generateKeyValues ( headers )
const responseTrailers = generateKeyValues ( trailers )
handler . abort = nop
handler . onHeaders ( statusCode , responseHeaders , resume , getStatusText ( statusCode ) )
handler . onData ( Buffer . from ( responseData ) )
handler . onComplete ( responseTrailers )
deleteMockDispatch ( mockDispatches , key )
}
function resume ( ) { }
return true
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
function buildMockDispatch ( ) {
const agent = this [ kMockAgent ]
const origin = this [ kOrigin ]
const originalDispatch = this [ kOriginalDispatch ]
return function dispatch ( opts , handler ) {
if ( agent . isMockActive ) {
try {
mockDispatch . call ( this , opts , handler )
} catch ( error ) {
if ( error instanceof MockNotMatchedError ) {
const netConnect = agent [ kGetNetConnect ] ( )
if ( netConnect === false ) {
throw new MockNotMatchedError ( ` ${ error . message } : subsequent request to origin ${ origin } was not allowed (net.connect disabled) ` )
}
if ( checkNetConnect ( netConnect , origin ) ) {
originalDispatch . call ( this , opts , handler )
} else {
throw new MockNotMatchedError ( ` ${ error . message } : subsequent request to origin ${ origin } was not allowed (net.connect is not enabled for this origin) ` )
}
} else {
throw error
}
}
} else {
originalDispatch . call ( this , opts , handler )
}
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function checkNetConnect ( netConnect , origin ) {
const url = new URL ( origin )
if ( netConnect === true ) {
return true
} else if ( Array . isArray ( netConnect ) && netConnect . some ( ( matcher ) => matchValue ( matcher , url . host ) ) ) {
return true
}
return false
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
function buildMockOptions ( opts ) {
if ( opts ) {
const { agent , ... mockOptions } = opts
return mockOptions
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
module . exports = {
getResponseData ,
getMockDispatch ,
addMockDispatch ,
deleteMockDispatch ,
buildKey ,
generateKeyValues ,
matchValue ,
getResponse ,
getStatusText ,
mockDispatch ,
buildMockDispatch ,
checkNetConnect ,
buildMockOptions ,
getHeaderByName
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
/***/ 6823 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { Transform } = _ _nccwpck _require _ _ ( 2781 )
const { Console } = _ _nccwpck _require _ _ ( 6206 )
/ * *
* Gets the output of ` console.table(…) ` as a string .
* /
module . exports = class PendingInterceptorsFormatter {
constructor ( { disableColors } = { } ) {
this . transform = new Transform ( {
transform ( chunk , _enc , cb ) {
cb ( null , chunk )
}
} )
this . logger = new Console ( {
stdout : this . transform ,
inspectOptions : {
colors : ! disableColors && ! process . env . CI
}
} )
}
format ( pendingInterceptors ) {
const withPrettyHeaders = pendingInterceptors . map (
( { method , path , data : { statusCode } , persist , times , timesInvoked , origin } ) => ( {
Method : method ,
Origin : origin ,
Path : path ,
'Status code' : statusCode ,
Persistent : persist ? '✅' : '❌' ,
Invocations : timesInvoked ,
Remaining : persist ? Infinity : times - timesInvoked
} ) )
this . logger . table ( withPrettyHeaders )
return this . transform . read ( ) . toString ( )
}
2022-12-25 13:58:23 +08:00
}
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ 8891 :
/***/ ( ( module ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const singulars = {
pronoun : 'it' ,
is : 'is' ,
was : 'was' ,
this : 'this'
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const plurals = {
pronoun : 'they' ,
is : 'are' ,
was : 'were' ,
this : 'these'
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
module . exports = class Pluralizer {
constructor ( singular , plural ) {
this . singular = singular
this . plural = plural
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
pluralize ( count ) {
const one = count === 1
const keys = one ? singulars : plurals
const noun = one ? this . singular : this . plural
return { ... keys , count , noun }
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ 8266 :
/***/ ( ( module ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
/* eslint-disable */
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Extracted from node/lib/internal/fixed_queue.js
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.
const kSize = 2048 ;
const kMask = kSize - 1 ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
// The FixedQueue is implemented as a singly-linked list of fixed-size
// circular buffers. It looks something like this:
//
// head tail
// | |
// v v
// +-----------+ <-----\ +-----------+ <------\ +-----------+
// | [null] | \----- | next | \------- | next |
// +-----------+ +-----------+ +-----------+
// | item | <-- bottom | item | <-- bottom | [empty] |
// | item | | item | | [empty] |
// | item | | item | | [empty] |
// | item | | item | | [empty] |
// | item | | item | bottom --> | item |
// | item | | item | | item |
// | ... | | ... | | ... |
// | item | | item | | item |
// | item | | item | | item |
// | [empty] | <-- top | item | | item |
// | [empty] | | item | | item |
// | [empty] | | [empty] | <-- top top --> | [empty] |
// +-----------+ +-----------+ +-----------+
//
// Or, if there is only one circular buffer, it looks something
// like either of these:
//
// head tail head tail
// | | | |
// v v v v
// +-----------+ +-----------+
// | [null] | | [null] |
// +-----------+ +-----------+
// | [empty] | | item |
// | [empty] | | item |
// | item | <-- bottom top --> | [empty] |
// | item | | [empty] |
// | [empty] | <-- top bottom --> | item |
// | [empty] | | item |
// +-----------+ +-----------+
//
// Adding a value means moving `top` forward by one, removing means
// moving `bottom` forward by one. After reaching the end, the queue
// wraps around.
//
// When `top === bottom` the current queue is empty and when
// `top + 1 === bottom` it's full. This wastes a single space of storage
// but allows much quicker checks.
class FixedCircularBuffer {
constructor ( ) {
this . bottom = 0 ;
this . top = 0 ;
this . list = new Array ( kSize ) ;
this . next = null ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
isEmpty ( ) {
return this . top === this . bottom ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
isFull ( ) {
return ( ( this . top + 1 ) & kMask ) === this . bottom ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
push ( data ) {
this . list [ this . top ] = data ;
this . top = ( this . top + 1 ) & kMask ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
shift ( ) {
const nextItem = this . list [ this . bottom ] ;
if ( nextItem === undefined )
return null ;
this . list [ this . bottom ] = undefined ;
this . bottom = ( this . bottom + 1 ) & kMask ;
return nextItem ;
}
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
module . exports = class FixedQueue {
constructor ( ) {
this . head = this . tail = new FixedCircularBuffer ( ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
isEmpty ( ) {
return this . head . isEmpty ( ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
push ( data ) {
if ( this . head . isFull ( ) ) {
// Head is full: Creates a new queue, sets the old queue's `.next` to it,
// and sets it as the new main queue.
this . head = this . head . next = new FixedCircularBuffer ( ) ;
}
this . head . push ( data ) ;
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
shift ( ) {
const tail = this . tail ;
const next = tail . shift ( ) ;
if ( tail . isEmpty ( ) && tail . next !== null ) {
// If there is another queue, it forms the new tail.
this . tail = tail . next ;
}
return next ;
}
} ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
/***/ 3198 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const DispatcherBase = _ _nccwpck _require _ _ ( 4839 )
const FixedQueue = _ _nccwpck _require _ _ ( 8266 )
const { kConnected , kSize , kRunning , kPending , kQueued , kBusy , kFree , kUrl , kClose , kDestroy , kDispatch } = _ _nccwpck _require _ _ ( 2785 )
const PoolStats = _ _nccwpck _require _ _ ( 9689 )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const kClients = Symbol ( 'clients' )
const kNeedDrain = Symbol ( 'needDrain' )
const kQueue = Symbol ( 'queue' )
const kClosedResolve = Symbol ( 'closed resolve' )
const kOnDrain = Symbol ( 'onDrain' )
const kOnConnect = Symbol ( 'onConnect' )
const kOnDisconnect = Symbol ( 'onDisconnect' )
const kOnConnectionError = Symbol ( 'onConnectionError' )
const kGetDispatcher = Symbol ( 'get dispatcher' )
const kAddClient = Symbol ( 'add client' )
const kRemoveClient = Symbol ( 'remove client' )
const kStats = Symbol ( 'stats' )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
class PoolBase extends DispatcherBase {
constructor ( ) {
super ( )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
this [ kQueue ] = new FixedQueue ( )
this [ kClients ] = [ ]
this [ kQueued ] = 0
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
const pool = this
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
this [ kOnDrain ] = function onDrain ( origin , targets ) {
const queue = pool [ kQueue ]
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
let needDrain = false
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
while ( ! needDrain ) {
const item = queue . shift ( )
if ( ! item ) {
break
}
pool [ kQueued ] --
needDrain = ! this . dispatch ( item . opts , item . handler )
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
this [ kNeedDrain ] = needDrain
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
if ( ! this [ kNeedDrain ] && pool [ kNeedDrain ] ) {
pool [ kNeedDrain ] = false
pool . emit ( 'drain' , origin , [ pool , ... targets ] )
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
if ( pool [ kClosedResolve ] && queue . isEmpty ( ) ) {
Promise
. all ( pool [ kClients ] . map ( c => c . close ( ) ) )
. then ( pool [ kClosedResolve ] )
}
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
this [ kOnConnect ] = ( origin , targets ) => {
pool . emit ( 'connect' , origin , [ pool , ... targets ] )
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
this [ kOnDisconnect ] = ( origin , targets , err ) => {
pool . emit ( 'disconnect' , origin , [ pool , ... targets ] , err )
}
this [ kOnConnectionError ] = ( origin , targets , err ) => {
pool . emit ( 'connectionError' , origin , [ pool , ... targets ] , err )
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
this [ kStats ] = new PoolStats ( this )
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get [ kBusy ] ( ) {
return this [ kNeedDrain ]
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get [ kConnected ] ( ) {
return this [ kClients ] . filter ( client => client [ kConnected ] ) . length
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get [ kFree ] ( ) {
return this [ kClients ] . filter ( client => client [ kConnected ] && ! client [ kNeedDrain ] ) . length
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get [ kPending ] ( ) {
let ret = this [ kQueued ]
for ( const { [ kPending ] : pending } of this [ kClients ] ) {
ret += pending
}
return ret
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get [ kRunning ] ( ) {
let ret = 0
for ( const { [ kRunning ] : running } of this [ kClients ] ) {
ret += running
}
return ret
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get [ kSize ] ( ) {
let ret = this [ kQueued ]
for ( const { [ kSize ] : size } of this [ kClients ] ) {
ret += size
}
return ret
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get stats ( ) {
return this [ kStats ]
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
async [ kClose ] ( ) {
if ( this [ kQueue ] . isEmpty ( ) ) {
return Promise . all ( this [ kClients ] . map ( c => c . close ( ) ) )
} else {
return new Promise ( ( resolve ) => {
this [ kClosedResolve ] = resolve
} )
}
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
async [ kDestroy ] ( err ) {
while ( true ) {
const item = this [ kQueue ] . shift ( )
if ( ! item ) {
break
}
item . handler . onError ( err )
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
return Promise . all ( this [ kClients ] . map ( c => c . destroy ( err ) ) )
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
[ kDispatch ] ( opts , handler ) {
const dispatcher = this [ kGetDispatcher ] ( )
if ( ! dispatcher ) {
this [ kNeedDrain ] = true
this [ kQueue ] . push ( { opts , handler } )
this [ kQueued ] ++
} else if ( ! dispatcher . dispatch ( opts , handler ) ) {
dispatcher [ kNeedDrain ] = true
this [ kNeedDrain ] = ! this [ kGetDispatcher ] ( )
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
return ! this [ kNeedDrain ]
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
[ kAddClient ] ( client ) {
client
. on ( 'drain' , this [ kOnDrain ] )
. on ( 'connect' , this [ kOnConnect ] )
. on ( 'disconnect' , this [ kOnDisconnect ] )
. on ( 'connectionError' , this [ kOnConnectionError ] )
this [ kClients ] . push ( client )
if ( this [ kNeedDrain ] ) {
process . nextTick ( ( ) => {
if ( this [ kNeedDrain ] ) {
this [ kOnDrain ] ( client [ kUrl ] , [ this , client ] )
}
} )
}
return this
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
[ kRemoveClient ] ( client ) {
client . close ( ( ) => {
const idx = this [ kClients ] . indexOf ( client )
if ( idx !== - 1 ) {
this [ kClients ] . splice ( idx , 1 )
}
} )
this [ kNeedDrain ] = this [ kClients ] . some ( dispatcher => (
! dispatcher [ kNeedDrain ] &&
dispatcher . closed !== true &&
dispatcher . destroyed !== true
) )
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
module . exports = {
PoolBase ,
kClients ,
kNeedDrain ,
kAddClient ,
kRemoveClient ,
kGetDispatcher
}
2020-08-27 20:39:35 +08:00
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
/***/ 9689 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
const { kFree , kConnected , kPending , kQueued , kRunning , kSize } = _ _nccwpck _require _ _ ( 2785 )
const kPool = Symbol ( 'pool' )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
class PoolStats {
constructor ( pool ) {
this [ kPool ] = pool
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
get connected ( ) {
return this [ kPool ] [ kConnected ]
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
get free ( ) {
return this [ kPool ] [ kFree ]
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
get pending ( ) {
return this [ kPool ] [ kPending ]
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
get queued ( ) {
return this [ kPool ] [ kQueued ]
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get running ( ) {
return this [ kPool ] [ kRunning ]
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
get size ( ) {
return this [ kPool ] [ kSize ]
}
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
module . exports = PoolStats
/***/ } ) ,
/***/ 4634 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
PoolBase ,
kClients ,
kNeedDrain ,
kAddClient ,
kGetDispatcher
} = _ _nccwpck _require _ _ ( 3198 )
const Client = _ _nccwpck _require _ _ ( 3598 )
const {
InvalidArgumentError
} = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { kUrl , kInterceptors } = _ _nccwpck _require _ _ ( 2785 )
const buildConnector = _ _nccwpck _require _ _ ( 2067 )
const kOptions = Symbol ( 'options' )
const kConnections = Symbol ( 'connections' )
const kFactory = Symbol ( 'factory' )
function defaultFactory ( origin , opts ) {
return new Client ( origin , opts )
}
class Pool extends PoolBase {
constructor ( origin , {
connections ,
factory = defaultFactory ,
connect ,
connectTimeout ,
tls ,
maxCachedSessions ,
socketPath ,
autoSelectFamily ,
autoSelectFamilyAttemptTimeout ,
... options
} = { } ) {
super ( )
if ( connections != null && ( ! Number . isFinite ( connections ) || connections < 0 ) ) {
throw new InvalidArgumentError ( 'invalid connections' )
}
if ( typeof factory !== 'function' ) {
throw new InvalidArgumentError ( 'factory must be a function.' )
}
if ( connect != null && typeof connect !== 'function' && typeof connect !== 'object' ) {
throw new InvalidArgumentError ( 'connect must be a function or an object' )
}
if ( typeof connect !== 'function' ) {
connect = buildConnector ( {
... tls ,
maxCachedSessions ,
socketPath ,
timeout : connectTimeout == null ? 10e3 : connectTimeout ,
... ( util . nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily , autoSelectFamilyAttemptTimeout } : undefined ) ,
... connect
} )
}
this [ kInterceptors ] = options . interceptors && options . interceptors . Pool && Array . isArray ( options . interceptors . Pool )
? options . interceptors . Pool
: [ ]
this [ kConnections ] = connections || null
this [ kUrl ] = util . parseOrigin ( origin )
this [ kOptions ] = { ... util . deepClone ( options ) , connect }
this [ kOptions ] . interceptors = options . interceptors
? { ... options . interceptors }
: undefined
this [ kFactory ] = factory
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
[ kGetDispatcher ] ( ) {
let dispatcher = this [ kClients ] . find ( dispatcher => ! dispatcher [ kNeedDrain ] )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( dispatcher ) {
return dispatcher
}
if ( ! this [ kConnections ] || this [ kClients ] . length < this [ kConnections ] ) {
dispatcher = this [ kFactory ] ( this [ kUrl ] , this [ kOptions ] )
this [ kAddClient ] ( dispatcher )
}
return dispatcher
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
module . exports = Pool
/***/ } ) ,
/***/ 7858 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { kProxy , kClose , kDestroy , kInterceptors } = _ _nccwpck _require _ _ ( 2785 )
const { URL } = _ _nccwpck _require _ _ ( 7310 )
const Agent = _ _nccwpck _require _ _ ( 7890 )
const Pool = _ _nccwpck _require _ _ ( 4634 )
const DispatcherBase = _ _nccwpck _require _ _ ( 4839 )
const { InvalidArgumentError , RequestAbortedError } = _ _nccwpck _require _ _ ( 8045 )
const buildConnector = _ _nccwpck _require _ _ ( 2067 )
const kAgent = Symbol ( 'proxy agent' )
const kClient = Symbol ( 'proxy client' )
const kProxyHeaders = Symbol ( 'proxy headers' )
const kRequestTls = Symbol ( 'request tls settings' )
const kProxyTls = Symbol ( 'proxy tls settings' )
const kConnectEndpoint = Symbol ( 'connect endpoint function' )
function defaultProtocolPort ( protocol ) {
return protocol === 'https:' ? 443 : 80
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
function buildProxyOptions ( opts ) {
if ( typeof opts === 'string' ) {
opts = { uri : opts }
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
if ( ! opts || ! opts . uri ) {
throw new InvalidArgumentError ( 'Proxy opts.uri is mandatory' )
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
return {
uri : opts . uri ,
protocol : opts . protocol || 'https'
}
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
function defaultFactory ( origin , opts ) {
return new Pool ( origin , opts )
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
class ProxyAgent extends DispatcherBase {
constructor ( opts ) {
super ( opts )
this [ kProxy ] = buildProxyOptions ( opts )
this [ kAgent ] = new Agent ( opts )
this [ kInterceptors ] = opts . interceptors && opts . interceptors . ProxyAgent && Array . isArray ( opts . interceptors . ProxyAgent )
? opts . interceptors . ProxyAgent
: [ ]
if ( typeof opts === 'string' ) {
opts = { uri : opts }
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( ! opts || ! opts . uri ) {
throw new InvalidArgumentError ( 'Proxy opts.uri is mandatory' )
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
const { clientFactory = defaultFactory } = opts
if ( typeof clientFactory !== 'function' ) {
throw new InvalidArgumentError ( 'Proxy opts.clientFactory must be a function.' )
}
this [ kRequestTls ] = opts . requestTls
this [ kProxyTls ] = opts . proxyTls
this [ kProxyHeaders ] = opts . headers || { }
if ( opts . auth && opts . token ) {
throw new InvalidArgumentError ( 'opts.auth cannot be used in combination with opts.token' )
} else if ( opts . auth ) {
/* @deprecated in favour of opts.token */
this [ kProxyHeaders ] [ 'proxy-authorization' ] = ` Basic ${ opts . auth } `
} else if ( opts . token ) {
this [ kProxyHeaders ] [ 'proxy-authorization' ] = opts . token
}
const resolvedUrl = new URL ( opts . uri )
const { origin , port , host } = resolvedUrl
const connect = buildConnector ( { ... opts . proxyTls } )
this [ kConnectEndpoint ] = buildConnector ( { ... opts . requestTls } )
this [ kClient ] = clientFactory ( resolvedUrl , { connect } )
this [ kAgent ] = new Agent ( {
... opts ,
connect : async ( opts , callback ) => {
let requestedHost = opts . host
if ( ! opts . port ) {
requestedHost += ` : ${ defaultProtocolPort ( opts . protocol ) } `
}
try {
const { socket , statusCode } = await this [ kClient ] . connect ( {
origin ,
port ,
path : requestedHost ,
signal : opts . signal ,
headers : {
... this [ kProxyHeaders ] ,
host
}
} )
if ( statusCode !== 200 ) {
socket . on ( 'error' , ( ) => { } ) . destroy ( )
callback ( new RequestAbortedError ( 'Proxy response !== 200 when HTTP Tunneling' ) )
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
if ( opts . protocol !== 'https:' ) {
callback ( null , socket )
return
}
let servername
if ( this [ kRequestTls ] ) {
servername = this [ kRequestTls ] . servername
} else {
servername = opts . servername
}
this [ kConnectEndpoint ] ( { ... opts , servername , httpSocket : socket } , callback )
} catch ( err ) {
callback ( err )
2020-12-06 17:56:38 +08:00
}
}
2023-07-27 11:01:06 +00:00
} )
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
dispatch ( opts , handler ) {
const { host } = new URL ( opts . origin )
const headers = buildHeaders ( opts . headers )
throwIfProxyAuthIsSent ( headers )
return this [ kAgent ] . dispatch (
{
... opts ,
headers : {
... headers ,
host
}
} ,
handler
)
}
async [ kClose ] ( ) {
await this [ kAgent ] . close ( )
await this [ kClient ] . close ( )
}
async [ kDestroy ] ( ) {
await this [ kAgent ] . destroy ( )
await this [ kClient ] . destroy ( )
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
/ * *
* @ param { string [ ] | Record < string , string > } headers
* @ returns { Record < string , string > }
* /
function buildHeaders ( headers ) {
// When using undici.fetch, the headers list is stored
// as an array.
if ( Array . isArray ( headers ) ) {
/** @type {Record<string, string>} */
const headersPair = { }
for ( let i = 0 ; i < headers . length ; i += 2 ) {
headersPair [ headers [ i ] ] = headers [ i + 1 ]
}
return headersPair
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
return headers
}
/ * *
* @ param { Record < string , string > } headers
*
* Previous versions of ProxyAgent suggests the Proxy - Authorization in request headers
* Nevertheless , it was changed and to avoid a security vulnerability by end users
* this check was created .
* It should be removed in the next major version for performance reasons
* /
function throwIfProxyAuthIsSent ( headers ) {
const existProxyAuth = headers && Object . keys ( headers )
. find ( ( key ) => key . toLowerCase ( ) === 'proxy-authorization' )
if ( existProxyAuth ) {
throw new InvalidArgumentError ( 'Proxy-Authorization should be sent in ProxyAgent constructor' )
2020-12-06 17:56:38 +08:00
}
}
2023-07-27 11:01:06 +00:00
module . exports = ProxyAgent
/***/ } ) ,
/***/ 9459 :
/***/ ( ( module ) => {
"use strict" ;
let fastNow = Date . now ( )
let fastNowTimeout
const fastTimers = [ ]
function onTimeout ( ) {
fastNow = Date . now ( )
let len = fastTimers . length
let idx = 0
while ( idx < len ) {
const timer = fastTimers [ idx ]
if ( timer . state === 0 ) {
timer . state = fastNow + timer . delay
} else if ( timer . state > 0 && fastNow >= timer . state ) {
timer . state = - 1
timer . callback ( timer . opaque )
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
if ( timer . state === - 1 ) {
timer . state = - 2
if ( idx !== len - 1 ) {
fastTimers [ idx ] = fastTimers . pop ( )
} else {
fastTimers . pop ( )
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
len -= 1
} else {
idx += 1
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
}
if ( fastTimers . length > 0 ) {
refreshTimeout ( )
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
function refreshTimeout ( ) {
if ( fastNowTimeout && fastNowTimeout . refresh ) {
fastNowTimeout . refresh ( )
} else {
clearTimeout ( fastNowTimeout )
fastNowTimeout = setTimeout ( onTimeout , 1e3 )
if ( fastNowTimeout . unref ) {
fastNowTimeout . unref ( )
}
}
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
class Timeout {
constructor ( callback , delay , opaque ) {
this . callback = callback
this . delay = delay
this . opaque = opaque
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
// -2 not in timer list
// -1 in timer list but inactive
// 0 in timer list waiting for time
// > 0 in timer list waiting for time to expire
this . state = - 2
this . refresh ( )
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
refresh ( ) {
if ( this . state === - 2 ) {
fastTimers . push ( this )
if ( ! fastNowTimeout || fastTimers . length === 1 ) {
refreshTimeout ( )
}
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
this . state = 0
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
clear ( ) {
this . state = - 1
}
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
module . exports = {
setTimeout ( callback , delay , opaque ) {
return delay < 1e3
? setTimeout ( callback , delay , opaque )
: new Timeout ( callback , delay , opaque )
} ,
clearTimeout ( timeout ) {
if ( timeout instanceof Timeout ) {
timeout . clear ( )
} else {
clearTimeout ( timeout )
}
}
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
/***/ 5354 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
const { randomBytes , createHash } = _ _nccwpck _require _ _ ( 6113 )
const diagnosticsChannel = _ _nccwpck _require _ _ ( 7643 )
const { uid , states } = _ _nccwpck _require _ _ ( 9188 )
const {
kReadyState ,
kSentClose ,
kByteParser ,
kReceivedClose
} = _ _nccwpck _require _ _ ( 7578 )
const { fireEvent , failWebsocketConnection } = _ _nccwpck _require _ _ ( 5515 )
const { CloseEvent } = _ _nccwpck _require _ _ ( 2611 )
const { makeRequest } = _ _nccwpck _require _ _ ( 8359 )
const { fetching } = _ _nccwpck _require _ _ ( 4881 )
const { Headers } = _ _nccwpck _require _ _ ( 554 )
const { getGlobalDispatcher } = _ _nccwpck _require _ _ ( 1892 )
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
const channels = { }
channels . open = diagnosticsChannel . channel ( 'undici:websocket:open' )
channels . close = diagnosticsChannel . channel ( 'undici:websocket:close' )
channels . socketError = diagnosticsChannel . channel ( 'undici:websocket:socket_error' )
/ * *
* @ see https : //websockets.spec.whatwg.org/#concept-websocket-establish
* @ param { URL } url
* @ param { string | string [ ] } protocols
* @ param { import ( './websocket' ) . WebSocket } ws
* @ param { ( response : any ) => void } onEstablish
* @ param { Partial < import ( '../../types/websocket' ) . WebSocketInit > } options
* /
function establishWebSocketConnection ( url , protocols , ws , onEstablish , options ) {
// 1. Let requestURL be a copy of url, with its scheme set to "http", if url’ s
// scheme is "ws", and to "https" otherwise.
const requestURL = url
requestURL . protocol = url . protocol === 'ws:' ? 'http:' : 'https:'
// 2. Let request be a new request, whose URL is requestURL, client is client,
// service-workers mode is "none", referrer is "no-referrer", mode is
// "websocket", credentials mode is "include", cache mode is "no-store" ,
// and redirect mode is "error".
const request = makeRequest ( {
urlList : [ requestURL ] ,
serviceWorkers : 'none' ,
referrer : 'no-referrer' ,
mode : 'websocket' ,
credentials : 'include' ,
cache : 'no-store' ,
redirect : 'error'
} )
// Note: undici extension, allow setting custom headers.
if ( options . headers ) {
const headersList = new Headers ( options . headers ) [ kHeadersList ]
request . headersList = headersList
}
// 3. Append (`Upgrade`, `websocket`) to request’ s header list.
// 4. Append (`Connection`, `Upgrade`) to request’ s header list.
// Note: both of these are handled by undici currently.
// https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397
// 5. Let keyValue be a nonce consisting of a randomly selected
// 16-byte value that has been forgiving-base64-encoded and
// isomorphic encoded.
const keyValue = randomBytes ( 16 ) . toString ( 'base64' )
// 6. Append (`Sec-WebSocket-Key`, keyValue) to request’ s
// header list.
request . headersList . append ( 'sec-websocket-key' , keyValue )
// 7. Append (`Sec-WebSocket-Version`, `13`) to request’ s
// header list.
request . headersList . append ( 'sec-websocket-version' , '13' )
// 8. For each protocol in protocols, combine
// (`Sec-WebSocket-Protocol`, protocol) in request’ s header
// list.
for ( const protocol of protocols ) {
request . headersList . append ( 'sec-websocket-protocol' , protocol )
}
// 9. Let permessageDeflate be a user-agent defined
// "permessage-deflate" extension header value.
// https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673
// TODO: enable once permessage-deflate is supported
const permessageDeflate = '' // 'permessage-deflate; 15'
// 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to
// request’ s header list.
// request.headersList.append('sec-websocket-extensions', permessageDeflate)
// 11. Fetch request with useParallelQueue set to true, and
// processResponse given response being these steps:
const controller = fetching ( {
request ,
useParallelQueue : true ,
dispatcher : options . dispatcher ? ? getGlobalDispatcher ( ) ,
processResponse ( response ) {
// 1. If response is a network error or its status is not 101,
// fail the WebSocket connection.
if ( response . type === 'error' || response . status !== 101 ) {
failWebsocketConnection ( ws , 'Received network error or non-101 status code.' )
return
}
// 2. If protocols is not the empty list and extracting header
// list values given `Sec-WebSocket-Protocol` and response’ s
// header list results in null, failure, or the empty byte
// sequence, then fail the WebSocket connection.
if ( protocols . length !== 0 && ! response . headersList . get ( 'Sec-WebSocket-Protocol' ) ) {
failWebsocketConnection ( ws , 'Server did not respond with sent protocols.' )
return
}
// 3. Follow the requirements stated step 2 to step 6, inclusive,
// of the last set of steps in section 4.1 of The WebSocket
// Protocol to validate response. This either results in fail
// the WebSocket connection or the WebSocket connection is
// established.
// 2. If the response lacks an |Upgrade| header field or the |Upgrade|
// header field contains a value that is not an ASCII case-
// insensitive match for the value "websocket", the client MUST
// _Fail the WebSocket Connection_.
if ( response . headersList . get ( 'Upgrade' ) ? . toLowerCase ( ) !== 'websocket' ) {
failWebsocketConnection ( ws , 'Server did not set Upgrade header to "websocket".' )
return
}
// 3. If the response lacks a |Connection| header field or the
// |Connection| header field doesn't contain a token that is an
// ASCII case-insensitive match for the value "Upgrade", the client
// MUST _Fail the WebSocket Connection_.
if ( response . headersList . get ( 'Connection' ) ? . toLowerCase ( ) !== 'upgrade' ) {
failWebsocketConnection ( ws , 'Server did not set Connection header to "upgrade".' )
return
}
// 4. If the response lacks a |Sec-WebSocket-Accept| header field or
// the |Sec-WebSocket-Accept| contains a value other than the
// base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket-
// Key| (as a string, not base64-decoded) with the string "258EAFA5-
// E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and
// trailing whitespace, the client MUST _Fail the WebSocket
// Connection_.
const secWSAccept = response . headersList . get ( 'Sec-WebSocket-Accept' )
const digest = createHash ( 'sha1' ) . update ( keyValue + uid ) . digest ( 'base64' )
if ( secWSAccept !== digest ) {
failWebsocketConnection ( ws , 'Incorrect hash received in Sec-WebSocket-Accept header.' )
return
}
// 5. If the response includes a |Sec-WebSocket-Extensions| header
// field and this header field indicates the use of an extension
// that was not present in the client's handshake (the server has
// indicated an extension not requested by the client), the client
// MUST _Fail the WebSocket Connection_. (The parsing of this
// header field to determine which extensions are requested is
// discussed in Section 9.1.)
const secExtension = response . headersList . get ( 'Sec-WebSocket-Extensions' )
if ( secExtension !== null && secExtension !== permessageDeflate ) {
failWebsocketConnection ( ws , 'Received different permessage-deflate than the one set.' )
return
}
// 6. If the response includes a |Sec-WebSocket-Protocol| header field
// and this header field indicates the use of a subprotocol that was
// not present in the client's handshake (the server has indicated a
// subprotocol not requested by the client), the client MUST _Fail
// the WebSocket Connection_.
const secProtocol = response . headersList . get ( 'Sec-WebSocket-Protocol' )
if ( secProtocol !== null && secProtocol !== request . headersList . get ( 'Sec-WebSocket-Protocol' ) ) {
failWebsocketConnection ( ws , 'Protocol was not set in the opening handshake.' )
return
}
response . socket . on ( 'data' , onSocketData )
response . socket . on ( 'close' , onSocketClose )
response . socket . on ( 'error' , onSocketError )
if ( channels . open . hasSubscribers ) {
channels . open . publish ( {
address : response . socket . address ( ) ,
protocol : secProtocol ,
extensions : secExtension
} )
}
onEstablish ( response )
}
} )
return controller
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* @ param { Buffer } chunk
* /
function onSocketData ( chunk ) {
if ( ! this . ws [ kByteParser ] . write ( chunk ) ) {
this . pause ( )
}
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
/ * *
* @ see https : //websockets.spec.whatwg.org/#feedback-from-the-protocol
* @ see https : //datatracker.ietf.org/doc/html/rfc6455#section-7.1.4
* /
function onSocketClose ( ) {
const { ws } = this
// If the TCP connection was closed after the
// WebSocket closing handshake was completed, the WebSocket connection
// is said to have been closed _cleanly_.
const wasClean = ws [ kSentClose ] && ws [ kReceivedClose ]
let code = 1005
let reason = ''
const result = ws [ kByteParser ] . closingInfo
if ( result ) {
code = result . code ? ? 1005
reason = result . reason
} else if ( ! ws [ kSentClose ] ) {
// If _The WebSocket
// Connection is Closed_ and no Close control frame was received by the
// endpoint (such as could occur if the underlying transport connection
// is lost), _The WebSocket Connection Close Code_ is considered to be
// 1006.
code = 1006
}
// 1. Change the ready state to CLOSED (3).
ws [ kReadyState ] = states . CLOSED
// 2. If the user agent was required to fail the WebSocket
// connection, or if the WebSocket connection was closed
// after being flagged as full, fire an event named error
// at the WebSocket object.
// TODO
// 3. Fire an event named close at the WebSocket object,
// using CloseEvent, with the wasClean attribute
// initialized to true if the connection closed cleanly
// and false otherwise, the code attribute initialized to
// the WebSocket connection close code, and the reason
// attribute initialized to the result of applying UTF-8
// decode without BOM to the WebSocket connection close
// reason.
fireEvent ( 'close' , ws , CloseEvent , {
wasClean , code , reason
2020-08-27 20:39:35 +08:00
} )
2023-07-27 11:01:06 +00:00
if ( channels . close . hasSubscribers ) {
channels . close . publish ( {
websocket : ws ,
code ,
reason
} )
}
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
function onSocketError ( error ) {
const { ws } = this
ws [ kReadyState ] = states . CLOSING
if ( channels . socketError . hasSubscribers ) {
channels . socketError . publish ( error )
}
this . destroy ( )
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
module . exports = {
establishWebSocketConnection
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
/***/ 9188 :
/***/ ( ( module ) => {
"use strict" ;
// This is a Globally Unique Identifier unique used
// to validate that the endpoint accepts websocket
// connections.
// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3
const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
/** @type {PropertyDescriptor} */
const staticPropertyDescriptors = {
enumerable : true ,
writable : false ,
configurable : false
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
const states = {
CONNECTING : 0 ,
OPEN : 1 ,
CLOSING : 2 ,
CLOSED : 3
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
const opcodes = {
CONTINUATION : 0x0 ,
TEXT : 0x1 ,
BINARY : 0x2 ,
CLOSE : 0x8 ,
PING : 0x9 ,
PONG : 0xA
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
const maxUnsigned16Bit = 2 * * 16 - 1 // 65535
const parserStates = {
INFO : 0 ,
PAYLOADLENGTH _16 : 2 ,
PAYLOADLENGTH _64 : 3 ,
READ _DATA : 4
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
const emptyBuffer = Buffer . allocUnsafe ( 0 )
module . exports = {
uid ,
staticPropertyDescriptors ,
states ,
opcodes ,
maxUnsigned16Bit ,
parserStates ,
emptyBuffer
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
/***/ 2611 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { kEnumerableProperty } = _ _nccwpck _require _ _ ( 3983 )
const { MessagePort } = _ _nccwpck _require _ _ ( 1267 )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
/ * *
* @ see https : //html.spec.whatwg.org/multipage/comms.html#messageevent
* /
class MessageEvent extends Event {
# eventInit
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
constructor ( type , eventInitDict = { } ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'MessageEvent constructor' } )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
type = webidl . converters . DOMString ( type )
eventInitDict = webidl . converters . MessageEventInit ( eventInitDict )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
super ( type , eventInitDict )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
this . # eventInit = eventInitDict
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get data ( ) {
webidl . brandCheck ( this , MessageEvent )
return this . # eventInit . data
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get origin ( ) {
webidl . brandCheck ( this , MessageEvent )
return this . # eventInit . origin
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get lastEventId ( ) {
webidl . brandCheck ( this , MessageEvent )
return this . # eventInit . lastEventId
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get source ( ) {
webidl . brandCheck ( this , MessageEvent )
return this . # eventInit . source
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get ports ( ) {
webidl . brandCheck ( this , MessageEvent )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( ! Object . isFrozen ( this . # eventInit . ports ) ) {
Object . freeze ( this . # eventInit . ports )
}
return this . # eventInit . ports
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
initMessageEvent (
type ,
bubbles = false ,
cancelable = false ,
data = null ,
origin = '' ,
lastEventId = '' ,
source = null ,
ports = [ ]
) {
webidl . brandCheck ( this , MessageEvent )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'MessageEvent.initMessageEvent' } )
return new MessageEvent ( type , {
bubbles , cancelable , data , origin , lastEventId , source , ports
} )
}
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* @ see https : //websockets.spec.whatwg.org/#the-closeevent-interface
* /
class CloseEvent extends Event {
# eventInit
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
constructor ( type , eventInitDict = { } ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'CloseEvent constructor' } )
type = webidl . converters . DOMString ( type )
eventInitDict = webidl . converters . CloseEventInit ( eventInitDict )
super ( type , eventInitDict )
this . # eventInit = eventInitDict
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get wasClean ( ) {
webidl . brandCheck ( this , CloseEvent )
return this . # eventInit . wasClean
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get code ( ) {
webidl . brandCheck ( this , CloseEvent )
return this . # eventInit . code
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get reason ( ) {
webidl . brandCheck ( this , CloseEvent )
return this . # eventInit . reason
}
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface
class ErrorEvent extends Event {
# eventInit
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
constructor ( type , eventInitDict ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'ErrorEvent constructor' } )
super ( type , eventInitDict )
type = webidl . converters . DOMString ( type )
eventInitDict = webidl . converters . ErrorEventInit ( eventInitDict ? ? { } )
this . # eventInit = eventInitDict
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get message ( ) {
webidl . brandCheck ( this , ErrorEvent )
return this . # eventInit . message
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
get filename ( ) {
webidl . brandCheck ( this , ErrorEvent )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
return this . # eventInit . filename
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
get lineno ( ) {
webidl . brandCheck ( this , ErrorEvent )
return this . # eventInit . lineno
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
get colno ( ) {
webidl . brandCheck ( this , ErrorEvent )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
return this . # eventInit . colno
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
get error ( ) {
webidl . brandCheck ( this , ErrorEvent )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
return this . # eventInit . error
}
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
Object . defineProperties ( MessageEvent . prototype , {
[ Symbol . toStringTag ] : {
value : 'MessageEvent' ,
configurable : true
} ,
data : kEnumerableProperty ,
origin : kEnumerableProperty ,
lastEventId : kEnumerableProperty ,
source : kEnumerableProperty ,
ports : kEnumerableProperty ,
initMessageEvent : kEnumerableProperty
} )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
Object . defineProperties ( CloseEvent . prototype , {
[ Symbol . toStringTag ] : {
value : 'CloseEvent' ,
configurable : true
} ,
reason : kEnumerableProperty ,
code : kEnumerableProperty ,
wasClean : kEnumerableProperty
} )
Object . defineProperties ( ErrorEvent . prototype , {
[ Symbol . toStringTag ] : {
value : 'ErrorEvent' ,
configurable : true
} ,
message : kEnumerableProperty ,
filename : kEnumerableProperty ,
lineno : kEnumerableProperty ,
colno : kEnumerableProperty ,
error : kEnumerableProperty
} )
webidl . converters . MessagePort = webidl . interfaceConverter ( MessagePort )
webidl . converters [ 'sequence<MessagePort>' ] = webidl . sequenceConverter (
webidl . converters . MessagePort
)
const eventInit = [
{
key : 'bubbles' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'cancelable' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'composed' ,
converter : webidl . converters . boolean ,
defaultValue : false
}
]
webidl . converters . MessageEventInit = webidl . dictionaryConverter ( [
... eventInit ,
{
key : 'data' ,
converter : webidl . converters . any ,
defaultValue : null
} ,
{
key : 'origin' ,
converter : webidl . converters . USVString ,
defaultValue : ''
} ,
{
key : 'lastEventId' ,
converter : webidl . converters . DOMString ,
defaultValue : ''
} ,
{
key : 'source' ,
// Node doesn't implement WindowProxy or ServiceWorker, so the only
// valid value for source is a MessagePort.
converter : webidl . nullableConverter ( webidl . converters . MessagePort ) ,
defaultValue : null
} ,
{
key : 'ports' ,
converter : webidl . converters [ 'sequence<MessagePort>' ] ,
get defaultValue ( ) {
return [ ]
2020-08-27 20:39:35 +08:00
}
}
2023-07-27 11:01:06 +00:00
] )
webidl . converters . CloseEventInit = webidl . dictionaryConverter ( [
... eventInit ,
{
key : 'wasClean' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'code' ,
converter : webidl . converters [ 'unsigned short' ] ,
defaultValue : 0
} ,
{
key : 'reason' ,
converter : webidl . converters . USVString ,
defaultValue : ''
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
] )
webidl . converters . ErrorEventInit = webidl . dictionaryConverter ( [
... eventInit ,
{
key : 'message' ,
converter : webidl . converters . DOMString ,
defaultValue : ''
} ,
{
key : 'filename' ,
converter : webidl . converters . USVString ,
defaultValue : ''
} ,
{
key : 'lineno' ,
converter : webidl . converters [ 'unsigned long' ] ,
defaultValue : 0
} ,
{
key : 'colno' ,
converter : webidl . converters [ 'unsigned long' ] ,
defaultValue : 0
} ,
{
key : 'error' ,
converter : webidl . converters . any
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
] )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
module . exports = {
MessageEvent ,
CloseEvent ,
ErrorEvent
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
/***/ } ) ,
/***/ 5444 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { randomBytes } = _ _nccwpck _require _ _ ( 6113 )
const { maxUnsigned16Bit } = _ _nccwpck _require _ _ ( 9188 )
class WebsocketFrameSend {
/ * *
* @ param { Buffer | undefined } data
* /
constructor ( data ) {
this . frameData = data
this . maskKey = randomBytes ( 4 )
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
createFrame ( opcode ) {
const bodyLength = this . frameData ? . byteLength ? ? 0
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
/** @type {number} */
let payloadLength = bodyLength // 0-125
let offset = 6
if ( bodyLength > maxUnsigned16Bit ) {
offset += 8 // payload length is next 8 bytes
payloadLength = 127
} else if ( bodyLength > 125 ) {
offset += 2 // payload length is next 2 bytes
payloadLength = 126
}
const buffer = Buffer . allocUnsafe ( bodyLength + offset )
// Clear first 2 bytes, everything else is overwritten
buffer [ 0 ] = buffer [ 1 ] = 0
buffer [ 0 ] |= 0x80 // FIN
buffer [ 0 ] = ( buffer [ 0 ] & 0xF0 ) + opcode // opcode
/*! ws. MIT License. Einar Otto Stangvik <einaros@gmail.com> */
buffer [ offset - 4 ] = this . maskKey [ 0 ]
buffer [ offset - 3 ] = this . maskKey [ 1 ]
buffer [ offset - 2 ] = this . maskKey [ 2 ]
buffer [ offset - 1 ] = this . maskKey [ 3 ]
buffer [ 1 ] = payloadLength
if ( payloadLength === 126 ) {
buffer . writeUInt16BE ( bodyLength , 2 )
} else if ( payloadLength === 127 ) {
// Clear extended payload length
buffer [ 2 ] = buffer [ 3 ] = 0
buffer . writeUIntBE ( bodyLength , 4 , 6 )
}
buffer [ 1 ] |= 0x80 // MASK
// mask body
for ( let i = 0 ; i < bodyLength ; i ++ ) {
buffer [ offset + i ] = this . frameData [ i ] ^ this . maskKey [ i % 4 ]
}
return buffer
}
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
module . exports = {
WebsocketFrameSend
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
/***/ 1688 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
const { Writable } = _ _nccwpck _require _ _ ( 2781 )
const diagnosticsChannel = _ _nccwpck _require _ _ ( 7643 )
const { parserStates , opcodes , states , emptyBuffer } = _ _nccwpck _require _ _ ( 9188 )
const { kReadyState , kSentClose , kResponse , kReceivedClose } = _ _nccwpck _require _ _ ( 7578 )
const { isValidStatusCode , failWebsocketConnection , websocketMessageReceived } = _ _nccwpck _require _ _ ( 5515 )
const { WebsocketFrameSend } = _ _nccwpck _require _ _ ( 5444 )
// This code was influenced by ws released under the MIT license.
// Copyright (c) 2011 Einar Otto Stangvik <einaros@gmail.com>
// Copyright (c) 2013 Arnout Kazemier and contributors
// Copyright (c) 2016 Luigi Pinca and contributors
const channels = { }
channels . ping = diagnosticsChannel . channel ( 'undici:websocket:ping' )
channels . pong = diagnosticsChannel . channel ( 'undici:websocket:pong' )
class ByteParser extends Writable {
# buffers = [ ]
# byteOffset = 0
# state = parserStates . INFO
# info = { }
# fragments = [ ]
constructor ( ws ) {
super ( )
this . ws = ws
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* @ param { Buffer } chunk
* @ param { ( ) => void } callback
* /
_write ( chunk , _ , callback ) {
this . # buffers . push ( chunk )
this . # byteOffset += chunk . length
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
this . run ( callback )
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* Runs whenever a new chunk is received .
* Callback is called whenever there are no more chunks buffering ,
* or not enough bytes are buffered to parse .
* /
run ( callback ) {
while ( true ) {
if ( this . # state === parserStates . INFO ) {
// If there aren't enough bytes to parse the payload length, etc.
if ( this . # byteOffset < 2 ) {
return callback ( )
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
const buffer = this . consume ( 2 )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
this . # info . fin = ( buffer [ 0 ] & 0x80 ) !== 0
this . # info . opcode = buffer [ 0 ] & 0x0F
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
// If we receive a fragmented message, we use the type of the first
// frame to parse the full message as binary/text, when it's terminated
this . # info . originalOpcode ? ? = this . # info . opcode
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
this . # info . fragmented = ! this . # info . fin && this . # info . opcode !== opcodes . CONTINUATION
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
if ( this . # info . fragmented && this . # info . opcode !== opcodes . BINARY && this . # info . opcode !== opcodes . TEXT ) {
// Only text and binary frames can be fragmented
failWebsocketConnection ( this . ws , 'Invalid frame type was fragmented.' )
return
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
const payloadLength = buffer [ 1 ] & 0x7F
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
if ( payloadLength <= 125 ) {
this . # info . payloadLength = payloadLength
this . # state = parserStates . READ _DATA
} else if ( payloadLength === 126 ) {
this . # state = parserStates . PAYLOADLENGTH _16
} else if ( payloadLength === 127 ) {
this . # state = parserStates . PAYLOADLENGTH _64
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
if ( this . # info . fragmented && payloadLength > 125 ) {
// A fragmented frame can't be fragmented itself
failWebsocketConnection ( this . ws , 'Fragmented frame exceeded 125 bytes.' )
return
} else if (
( this . # info . opcode === opcodes . PING ||
this . # info . opcode === opcodes . PONG ||
this . # info . opcode === opcodes . CLOSE ) &&
payloadLength > 125
) {
// Control frames can have a payload length of 125 bytes MAX
failWebsocketConnection ( this . ws , 'Payload length for control frame exceeded 125 bytes.' )
return
} else if ( this . # info . opcode === opcodes . CLOSE ) {
if ( payloadLength === 1 ) {
failWebsocketConnection ( this . ws , 'Received close frame with a 1-byte body.' )
return
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
const body = this . consume ( payloadLength )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
this . # info . closeInfo = this . parseCloseBody ( false , body )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
if ( ! this . ws [ kSentClose ] ) {
// If an endpoint receives a Close frame and did not previously send a
// Close frame, the endpoint MUST send a Close frame in response. (When
// sending a Close frame in response, the endpoint typically echos the
// status code it received.)
const body = Buffer . allocUnsafe ( 2 )
body . writeUInt16BE ( this . # info . closeInfo . code , 0 )
const closeFrame = new WebsocketFrameSend ( body )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
this . ws [ kResponse ] . socket . write (
closeFrame . createFrame ( opcodes . CLOSE ) ,
( err ) => {
if ( ! err ) {
this . ws [ kSentClose ] = true
}
}
)
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
// Upon either sending or receiving a Close control frame, it is said
// that _The WebSocket Closing Handshake is Started_ and that the
// WebSocket connection is in the CLOSING state.
this . ws [ kReadyState ] = states . CLOSING
this . ws [ kReceivedClose ] = true
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
this . end ( )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
return
} else if ( this . # info . opcode === opcodes . PING ) {
// Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in
// response, unless it already received a Close frame.
// A Pong frame sent in response to a Ping frame must have identical
// "Application data"
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
const body = this . consume ( payloadLength )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
if ( ! this . ws [ kReceivedClose ] ) {
const frame = new WebsocketFrameSend ( body )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
this . ws [ kResponse ] . socket . write ( frame . createFrame ( opcodes . PONG ) )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( channels . ping . hasSubscribers ) {
channels . ping . publish ( {
payload : body
} )
}
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
this . # state = parserStates . INFO
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( this . # byteOffset > 0 ) {
continue
} else {
callback ( )
return
}
} else if ( this . # info . opcode === opcodes . PONG ) {
// A Pong frame MAY be sent unsolicited. This serves as a
// unidirectional heartbeat. A response to an unsolicited Pong frame is
// not expected.
const body = this . consume ( payloadLength )
if ( channels . pong . hasSubscribers ) {
channels . pong . publish ( {
payload : body
} )
}
if ( this . # byteOffset > 0 ) {
continue
} else {
callback ( )
return
}
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
} else if ( this . # state === parserStates . PAYLOADLENGTH _16 ) {
if ( this . # byteOffset < 2 ) {
return callback ( )
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
const buffer = this . consume ( 2 )
this . # info . payloadLength = buffer . readUInt16BE ( 0 )
this . # state = parserStates . READ _DATA
} else if ( this . # state === parserStates . PAYLOADLENGTH _64 ) {
if ( this . # byteOffset < 8 ) {
return callback ( )
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
const buffer = this . consume ( 8 )
const upper = buffer . readUInt32BE ( 0 )
// 2^31 is the maxinimum bytes an arraybuffer can contain
// on 32-bit systems. Although, on 64-bit systems, this is
// 2^53-1 bytes.
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length
// https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275
// https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e
if ( upper > 2 * * 31 - 1 ) {
failWebsocketConnection ( this . ws , 'Received payload length > 2^31 bytes.' )
return
}
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
const lower = buffer . readUInt32BE ( 4 )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
this . # info . payloadLength = ( upper << 8 ) + lower
this . # state = parserStates . READ _DATA
} else if ( this . # state === parserStates . READ _DATA ) {
if ( this . # byteOffset < this . # info . payloadLength ) {
// If there is still more data in this chunk that needs to be read
return callback ( )
} else if ( this . # byteOffset >= this . # info . payloadLength ) {
// If the server sent multiple frames in a single chunk
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
const body = this . consume ( this . # info . payloadLength )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
this . # fragments . push ( body )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
// If the frame is unfragmented, or a fragmented frame was terminated,
// a message was received
if ( ! this . # info . fragmented || ( this . # info . fin && this . # info . opcode === opcodes . CONTINUATION ) ) {
const fullMessage = Buffer . concat ( this . # fragments )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
websocketMessageReceived ( this . ws , this . # info . originalOpcode , fullMessage )
2020-08-27 20:39:35 +08:00
2023-07-27 11:01:06 +00:00
this . # info = { }
this . # fragments . length = 0
}
this . # state = parserStates . INFO
}
}
if ( this . # byteOffset > 0 ) {
continue
} else {
callback ( )
break
}
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* Take n bytes from the buffered Buffers
* @ param { number } n
* @ returns { Buffer | null }
* /
consume ( n ) {
if ( n > this . # byteOffset ) {
return null
} else if ( n === 0 ) {
return emptyBuffer
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( this . # buffers [ 0 ] . length === n ) {
this . # byteOffset -= this . # buffers [ 0 ] . length
return this . # buffers . shift ( )
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
const buffer = Buffer . allocUnsafe ( n )
let offset = 0
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
while ( offset !== n ) {
const next = this . # buffers [ 0 ]
const { length } = next
if ( length + offset === n ) {
buffer . set ( this . # buffers . shift ( ) , offset )
break
} else if ( length + offset > n ) {
buffer . set ( next . subarray ( 0 , n - offset ) , offset )
this . # buffers [ 0 ] = next . subarray ( n - offset )
break
} else {
buffer . set ( this . # buffers . shift ( ) , offset )
offset += next . length
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
this . # byteOffset -= n
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
return buffer
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
parseCloseBody ( onlyCode , data ) {
// https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5
/** @type {number|undefined} */
let code
if ( data . length >= 2 ) {
// _The WebSocket Connection Close Code_ is
// defined as the status code (Section 7.4) contained in the first Close
// control frame received by the application
code = data . readUInt16BE ( 0 )
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( onlyCode ) {
if ( ! isValidStatusCode ( code ) ) {
return null
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
return { code }
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
// https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6
/** @type {Buffer} */
let reason = data . subarray ( 2 )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
// Remove BOM
if ( reason [ 0 ] === 0xEF && reason [ 1 ] === 0xBB && reason [ 2 ] === 0xBF ) {
reason = reason . subarray ( 3 )
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( code !== undefined && ! isValidStatusCode ( code ) ) {
return null
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
try {
// TODO: optimize this
reason = new TextDecoder ( 'utf-8' , { fatal : true } ) . decode ( reason )
} catch {
return null
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
return { code , reason }
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
get closingInfo ( ) {
return this . # info . closeInfo
2020-12-06 17:56:38 +08:00
}
}
2023-07-27 11:01:06 +00:00
module . exports = {
ByteParser
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
/***/ } ) ,
/***/ 7578 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = {
kWebSocketURL : Symbol ( 'url' ) ,
kReadyState : Symbol ( 'ready state' ) ,
kController : Symbol ( 'controller' ) ,
kResponse : Symbol ( 'response' ) ,
kBinaryType : Symbol ( 'binary type' ) ,
kSentClose : Symbol ( 'sent close' ) ,
kReceivedClose : Symbol ( 'received close' ) ,
kByteParser : Symbol ( 'byte parser' )
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
/***/ } ) ,
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
/***/ 5515 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
const { kReadyState , kController , kResponse , kBinaryType , kWebSocketURL } = _ _nccwpck _require _ _ ( 7578 )
const { states , opcodes } = _ _nccwpck _require _ _ ( 9188 )
const { MessageEvent , ErrorEvent } = _ _nccwpck _require _ _ ( 2611 )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
/* globals Blob */
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
/ * *
* @ param { import ( './websocket' ) . WebSocket } ws
* /
function isEstablished ( ws ) {
// If the server's response is validated as provided for above, it is
// said that _The WebSocket Connection is Established_ and that the
// WebSocket Connection is in the OPEN state.
return ws [ kReadyState ] === states . OPEN
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
/ * *
* @ param { import ( './websocket' ) . WebSocket } ws
* /
function isClosing ( ws ) {
// Upon either sending or receiving a Close control frame, it is said
// that _The WebSocket Closing Handshake is Started_ and that the
// WebSocket connection is in the CLOSING state.
return ws [ kReadyState ] === states . CLOSING
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* @ param { import ( './websocket' ) . WebSocket } ws
* /
function isClosed ( ws ) {
return ws [ kReadyState ] === states . CLOSED
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* @ see https : //dom.spec.whatwg.org/#concept-event-fire
* @ param { string } e
* @ param { EventTarget } target
* @ param { EventInit | undefined } eventInitDict
* /
function fireEvent ( e , target , eventConstructor = Event , eventInitDict ) {
// 1. If eventConstructor is not given, then let eventConstructor be Event.
// 2. Let event be the result of creating an event given eventConstructor,
// in the relevant realm of target.
// 3. Initialize event’ s type attribute to e.
const event = new eventConstructor ( e , eventInitDict ) // eslint-disable-line new-cap
// 4. Initialize any other IDL attributes of event as described in the
// invocation of this algorithm.
// 5. Return the result of dispatching event at target, with legacy target
// override flag set if set.
target . dispatchEvent ( event )
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* @ see https : //websockets.spec.whatwg.org/#feedback-from-the-protocol
* @ param { import ( './websocket' ) . WebSocket } ws
* @ param { number } type Opcode
* @ param { Buffer } data application data
* /
function websocketMessageReceived ( ws , type , data ) {
// 1. If ready state is not OPEN (1), then return.
if ( ws [ kReadyState ] !== states . OPEN ) {
return
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
// 2. Let dataForEvent be determined by switching on type and binary type:
let dataForEvent
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( type === opcodes . TEXT ) {
// -> type indicates that the data is Text
// a new DOMString containing data
try {
dataForEvent = new TextDecoder ( 'utf-8' , { fatal : true } ) . decode ( data )
} catch {
failWebsocketConnection ( ws , 'Received invalid UTF-8 in text frame.' )
return
}
} else if ( type === opcodes . BINARY ) {
if ( ws [ kBinaryType ] === 'blob' ) {
// -> type indicates that the data is Binary and binary type is "blob"
// a new Blob object, created in the relevant Realm of the WebSocket
// object, that represents data as its raw data
dataForEvent = new Blob ( [ data ] )
} else {
// -> type indicates that the data is Binary and binary type is "arraybuffer"
// a new ArrayBuffer object, created in the relevant Realm of the
// WebSocket object, whose contents are data
dataForEvent = new Uint8Array ( data ) . buffer
}
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
// 3. Fire an event named message at the WebSocket object, using MessageEvent,
// with the origin attribute initialized to the serialization of the WebSocket
// object’ s url's origin, and the data attribute initialized to dataForEvent.
fireEvent ( 'message' , ws , MessageEvent , {
origin : ws [ kWebSocketURL ] . origin ,
data : dataForEvent
} )
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
/ * *
* @ see https : //datatracker.ietf.org/doc/html/rfc6455
* @ see https : //datatracker.ietf.org/doc/html/rfc2616
* @ see https : //bugs.chromium.org/p/chromium/issues/detail?id=398407
* @ param { string } protocol
* /
function isValidSubprotocol ( protocol ) {
// If present, this value indicates one
// or more comma-separated subprotocol the client wishes to speak,
// ordered by preference. The elements that comprise this value
// MUST be non-empty strings with characters in the range U+0021 to
// U+007E not including separator characters as defined in
// [RFC2616] and MUST all be unique strings.
if ( protocol . length === 0 ) {
return false
}
for ( const char of protocol ) {
const code = char . charCodeAt ( 0 )
if (
code < 0x21 ||
code > 0x7E ||
char === '(' ||
char === ')' ||
char === '<' ||
char === '>' ||
char === '@' ||
char === ',' ||
char === ';' ||
char === ':' ||
char === '\\' ||
char === '"' ||
char === '/' ||
char === '[' ||
char === ']' ||
char === '?' ||
char === '=' ||
char === '{' ||
char === '}' ||
code === 32 || // SP
code === 9 // HT
2020-12-06 17:56:38 +08:00
) {
2023-07-27 11:01:06 +00:00
return false
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
return true
}
/ * *
* @ see https : //datatracker.ietf.org/doc/html/rfc6455#section-7-4
* @ param { number } code
* /
function isValidStatusCode ( code ) {
if ( code >= 1000 && code < 1015 ) {
return (
code !== 1004 && // reserved
code !== 1005 && // "MUST NOT be set as a status code"
code !== 1006 // "MUST NOT be set as a status code"
)
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
return code >= 3000 && code <= 4999
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* @ param { import ( './websocket' ) . WebSocket } ws
* @ param { string | undefined } reason
* /
function failWebsocketConnection ( ws , reason ) {
const { [ kController ] : controller , [ kResponse ] : response } = ws
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
controller . abort ( )
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( response ? . socket && ! response . socket . destroyed ) {
response . socket . destroy ( )
}
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
if ( reason ) {
fireEvent ( 'error' , ws , ErrorEvent , {
error : new Error ( reason )
} )
}
2022-12-25 13:58:23 +08:00
}
module . exports = {
2023-07-27 11:01:06 +00:00
isEstablished ,
isClosing ,
isClosed ,
fireEvent ,
isValidSubprotocol ,
isValidStatusCode ,
failWebsocketConnection ,
websocketMessageReceived
}
2022-12-25 13:58:23 +08:00
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 4284 :
2021-07-15 07:24:13 +08:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { DOMException } = _ _nccwpck _require _ _ ( 1037 )
const { URLSerializer } = _ _nccwpck _require _ _ ( 685 )
const { staticPropertyDescriptors , states , opcodes , emptyBuffer } = _ _nccwpck _require _ _ ( 9188 )
const {
kWebSocketURL ,
kReadyState ,
kController ,
kBinaryType ,
kResponse ,
kSentClose ,
kByteParser
} = _ _nccwpck _require _ _ ( 7578 )
const { isEstablished , isClosing , isValidSubprotocol , failWebsocketConnection , fireEvent } = _ _nccwpck _require _ _ ( 5515 )
const { establishWebSocketConnection } = _ _nccwpck _require _ _ ( 5354 )
const { WebsocketFrameSend } = _ _nccwpck _require _ _ ( 5444 )
const { ByteParser } = _ _nccwpck _require _ _ ( 1688 )
const { kEnumerableProperty , isBlobLike } = _ _nccwpck _require _ _ ( 3983 )
const { getGlobalDispatcher } = _ _nccwpck _require _ _ ( 1892 )
const { types } = _ _nccwpck _require _ _ ( 3837 )
let experimentalWarned = false
// https://websockets.spec.whatwg.org/#interface-definition
class WebSocket extends EventTarget {
# events = {
open : null ,
error : null ,
close : null ,
message : null
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
# bufferedAmount = 0
# protocol = ''
# extensions = ''
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
/ * *
* @ param { string } url
* @ param { string | string [ ] } protocols
* /
constructor ( url , protocols = [ ] ) {
super ( )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
webidl . argumentLengthCheck ( arguments , 1 , { header : 'WebSocket constructor' } )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( ! experimentalWarned ) {
experimentalWarned = true
process . emitWarning ( 'WebSockets are experimental, expect them to change at any time.' , {
code : 'UNDICI-WS'
} )
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
const options = webidl . converters [ 'DOMString or sequence<DOMString> or WebSocketInit' ] ( protocols )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
url = webidl . converters . USVString ( url )
protocols = options . protocols
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
// 1. Let urlRecord be the result of applying the URL parser to url.
let urlRecord
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
try {
urlRecord = new URL ( url )
} catch ( e ) {
// 2. If urlRecord is failure, then throw a "SyntaxError" DOMException.
throw new DOMException ( e , 'SyntaxError' )
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
// 3. If urlRecord’ s scheme is not "ws" or "wss", then throw a
// "SyntaxError" DOMException.
if ( urlRecord . protocol !== 'ws:' && urlRecord . protocol !== 'wss:' ) {
throw new DOMException (
` Expected a ws: or wss: protocol, got ${ urlRecord . protocol } ` ,
'SyntaxError'
)
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
// 4. If urlRecord’ s fragment is non-null, then throw a "SyntaxError"
// DOMException.
if ( urlRecord . hash ) {
throw new DOMException ( 'Got fragment' , 'SyntaxError' )
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
// 5. If protocols is a string, set protocols to a sequence consisting
// of just that string.
if ( typeof protocols === 'string' ) {
protocols = [ protocols ]
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
// 6. If any of the values in protocols occur more than once or otherwise
// fail to match the requirements for elements that comprise the value
// of `Sec-WebSocket-Protocol` fields as defined by The WebSocket
// protocol, then throw a "SyntaxError" DOMException.
if ( protocols . length !== new Set ( protocols . map ( p => p . toLowerCase ( ) ) ) . size ) {
throw new DOMException ( 'Invalid Sec-WebSocket-Protocol value' , 'SyntaxError' )
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( protocols . length > 0 && ! protocols . every ( p => isValidSubprotocol ( p ) ) ) {
throw new DOMException ( 'Invalid Sec-WebSocket-Protocol value' , 'SyntaxError' )
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
// 7. Set this's url to urlRecord.
this [ kWebSocketURL ] = urlRecord
// 8. Let client be this's relevant settings object.
// 9. Run this step in parallel:
// 1. Establish a WebSocket connection given urlRecord, protocols,
// and client.
this [ kController ] = establishWebSocketConnection (
urlRecord ,
protocols ,
this ,
( response ) => this . # onConnectionEstablished ( response ) ,
options
)
// Each WebSocket object has an associated ready state, which is a
// number representing the state of the connection. Initially it must
// be CONNECTING (0).
this [ kReadyState ] = WebSocket . CONNECTING
// The extensions attribute must initially return the empty string.
// The protocol attribute must initially return the empty string.
// Each WebSocket object has an associated binary type, which is a
// BinaryType. Initially it must be "blob".
this [ kBinaryType ] = 'blob'
}
/ * *
* @ see https : //websockets.spec.whatwg.org/#dom-websocket-close
* @ param { number | undefined } code
* @ param { string | undefined } reason
* /
close ( code = undefined , reason = undefined ) {
webidl . brandCheck ( this , WebSocket )
if ( code !== undefined ) {
code = webidl . converters [ 'unsigned short' ] ( code , { clamp : true } )
}
if ( reason !== undefined ) {
reason = webidl . converters . USVString ( reason )
}
// 1. If code is present, but is neither an integer equal to 1000 nor an
// integer in the range 3000 to 4999, inclusive, throw an
// "InvalidAccessError" DOMException.
if ( code !== undefined ) {
if ( code !== 1000 && ( code < 3000 || code > 4999 ) ) {
throw new DOMException ( 'invalid code' , 'InvalidAccessError' )
2020-12-06 17:56:38 +08:00
}
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
let reasonByteLength = 0
// 2. If reason is present, then run these substeps:
if ( reason !== undefined ) {
// 1. Let reasonBytes be the result of encoding reason.
// 2. If reasonBytes is longer than 123 bytes, then throw a
// "SyntaxError" DOMException.
reasonByteLength = Buffer . byteLength ( reason )
if ( reasonByteLength > 123 ) {
throw new DOMException (
` Reason must be less than 123 bytes; received ${ reasonByteLength } ` ,
'SyntaxError'
)
}
}
// 3. Run the first matching steps from the following list:
if ( this [ kReadyState ] === WebSocket . CLOSING || this [ kReadyState ] === WebSocket . CLOSED ) {
// If this's ready state is CLOSING (2) or CLOSED (3)
// Do nothing.
} else if ( ! isEstablished ( this ) ) {
// If the WebSocket connection is not yet established
// Fail the WebSocket connection and set this's ready state
// to CLOSING (2).
failWebsocketConnection ( this , 'Connection was closed before it was established.' )
this [ kReadyState ] = WebSocket . CLOSING
} else if ( ! isClosing ( this ) ) {
// If the WebSocket closing handshake has not yet been started
// Start the WebSocket closing handshake and set this's ready
// state to CLOSING (2).
// - If neither code nor reason is present, the WebSocket Close
// message must not have a body.
// - If code is present, then the status code to use in the
// WebSocket Close message must be the integer given by code.
// - If reason is also present, then reasonBytes must be
// provided in the Close message after the status code.
const frame = new WebsocketFrameSend ( )
// If neither code nor reason is present, the WebSocket Close
// message must not have a body.
// If code is present, then the status code to use in the
// WebSocket Close message must be the integer given by code.
if ( code !== undefined && reason === undefined ) {
frame . frameData = Buffer . allocUnsafe ( 2 )
frame . frameData . writeUInt16BE ( code , 0 )
} else if ( code !== undefined && reason !== undefined ) {
// If reason is also present, then reasonBytes must be
// provided in the Close message after the status code.
frame . frameData = Buffer . allocUnsafe ( 2 + reasonByteLength )
frame . frameData . writeUInt16BE ( code , 0 )
// the body MAY contain UTF-8-encoded data with value /reason/
frame . frameData . write ( reason , 2 , 'utf-8' )
} else {
frame . frameData = emptyBuffer
}
/** @type {import('stream').Duplex} */
const socket = this [ kResponse ] . socket
socket . write ( frame . createFrame ( opcodes . CLOSE ) , ( err ) => {
if ( ! err ) {
this [ kSentClose ] = true
}
} )
// Upon either sending or receiving a Close control frame, it is said
// that _The WebSocket Closing Handshake is Started_ and that the
// WebSocket connection is in the CLOSING state.
this [ kReadyState ] = states . CLOSING
} else {
// Otherwise
// Set this's ready state to CLOSING (2).
this [ kReadyState ] = WebSocket . CLOSING
}
}
/ * *
* @ see https : //websockets.spec.whatwg.org/#dom-websocket-send
* @ param { NodeJS . TypedArray | ArrayBuffer | Blob | string } data
* /
send ( data ) {
webidl . brandCheck ( this , WebSocket )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'WebSocket.send' } )
data = webidl . converters . WebSocketSendData ( data )
// 1. If this's ready state is CONNECTING, then throw an
// "InvalidStateError" DOMException.
if ( this [ kReadyState ] === WebSocket . CONNECTING ) {
throw new DOMException ( 'Sent before connected.' , 'InvalidStateError' )
}
// 2. Run the appropriate set of steps from the following list:
// https://datatracker.ietf.org/doc/html/rfc6455#section-6.1
// https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
if ( ! isEstablished ( this ) || isClosing ( this ) ) {
return
}
/** @type {import('stream').Duplex} */
const socket = this [ kResponse ] . socket
// If data is a string
if ( typeof data === 'string' ) {
// If the WebSocket connection is established and the WebSocket
// closing handshake has not yet started, then the user agent
// must send a WebSocket Message comprised of the data argument
// using a text frame opcode; if the data cannot be sent, e.g.
// because it would need to be buffered but the buffer is full,
// the user agent must flag the WebSocket as full and then close
// the WebSocket connection. Any invocation of this method with a
// string argument that does not throw an exception must increase
// the bufferedAmount attribute by the number of bytes needed to
// express the argument as UTF-8.
const value = Buffer . from ( data )
const frame = new WebsocketFrameSend ( value )
const buffer = frame . createFrame ( opcodes . TEXT )
this . # bufferedAmount += value . byteLength
socket . write ( buffer , ( ) => {
this . # bufferedAmount -= value . byteLength
} )
} else if ( types . isArrayBuffer ( data ) ) {
// If the WebSocket connection is established, and the WebSocket
// closing handshake has not yet started, then the user agent must
// send a WebSocket Message comprised of data using a binary frame
// opcode; if the data cannot be sent, e.g. because it would need
// to be buffered but the buffer is full, the user agent must flag
// the WebSocket as full and then close the WebSocket connection.
// The data to be sent is the data stored in the buffer described
// by the ArrayBuffer object. Any invocation of this method with an
// ArrayBuffer argument that does not throw an exception must
// increase the bufferedAmount attribute by the length of the
// ArrayBuffer in bytes.
const value = Buffer . from ( data )
const frame = new WebsocketFrameSend ( value )
const buffer = frame . createFrame ( opcodes . BINARY )
this . # bufferedAmount += value . byteLength
socket . write ( buffer , ( ) => {
this . # bufferedAmount -= value . byteLength
} )
} else if ( ArrayBuffer . isView ( data ) ) {
// If the WebSocket connection is established, and the WebSocket
// closing handshake has not yet started, then the user agent must
// send a WebSocket Message comprised of data using a binary frame
// opcode; if the data cannot be sent, e.g. because it would need to
// be buffered but the buffer is full, the user agent must flag the
// WebSocket as full and then close the WebSocket connection. The
// data to be sent is the data stored in the section of the buffer
// described by the ArrayBuffer object that data references. Any
// invocation of this method with this kind of argument that does
// not throw an exception must increase the bufferedAmount attribute
// by the length of data’ s buffer in bytes.
const ab = Buffer . from ( data , data . byteOffset , data . byteLength )
const frame = new WebsocketFrameSend ( ab )
const buffer = frame . createFrame ( opcodes . BINARY )
this . # bufferedAmount += ab . byteLength
socket . write ( buffer , ( ) => {
this . # bufferedAmount -= ab . byteLength
} )
} else if ( isBlobLike ( data ) ) {
// If the WebSocket connection is established, and the WebSocket
// closing handshake has not yet started, then the user agent must
// send a WebSocket Message comprised of data using a binary frame
// opcode; if the data cannot be sent, e.g. because it would need to
// be buffered but the buffer is full, the user agent must flag the
// WebSocket as full and then close the WebSocket connection. The data
// to be sent is the raw data represented by the Blob object. Any
// invocation of this method with a Blob argument that does not throw
// an exception must increase the bufferedAmount attribute by the size
// of the Blob object’ s raw data, in bytes.
const frame = new WebsocketFrameSend ( )
data . arrayBuffer ( ) . then ( ( ab ) => {
const value = Buffer . from ( ab )
frame . frameData = value
const buffer = frame . createFrame ( opcodes . BINARY )
this . # bufferedAmount += value . byteLength
socket . write ( buffer , ( ) => {
this . # bufferedAmount -= value . byteLength
} )
} )
}
}
get readyState ( ) {
webidl . brandCheck ( this , WebSocket )
// The readyState getter steps are to return this's ready state.
return this [ kReadyState ]
}
get bufferedAmount ( ) {
webidl . brandCheck ( this , WebSocket )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
return this . # bufferedAmount
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
get url ( ) {
webidl . brandCheck ( this , WebSocket )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
// The url getter steps are to return this's url, serialized.
return URLSerializer ( this [ kWebSocketURL ] )
}
get extensions ( ) {
webidl . brandCheck ( this , WebSocket )
return this . # extensions
}
get protocol ( ) {
webidl . brandCheck ( this , WebSocket )
return this . # protocol
}
get onopen ( ) {
webidl . brandCheck ( this , WebSocket )
return this . # events . open
}
set onopen ( fn ) {
webidl . brandCheck ( this , WebSocket )
if ( this . # events . open ) {
this . removeEventListener ( 'open' , this . # events . open )
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( typeof fn === 'function' ) {
this . # events . open = fn
this . addEventListener ( 'open' , fn )
} else {
this . # events . open = null
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
get onerror ( ) {
webidl . brandCheck ( this , WebSocket )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
return this . # events . error
}
set onerror ( fn ) {
webidl . brandCheck ( this , WebSocket )
if ( this . # events . error ) {
this . removeEventListener ( 'error' , this . # events . error )
}
if ( typeof fn === 'function' ) {
this . # events . error = fn
this . addEventListener ( 'error' , fn )
} else {
this . # events . error = null
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
get onclose ( ) {
webidl . brandCheck ( this , WebSocket )
return this . # events . close
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
set onclose ( fn ) {
webidl . brandCheck ( this , WebSocket )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( this . # events . close ) {
this . removeEventListener ( 'close' , this . # events . close )
}
if ( typeof fn === 'function' ) {
this . # events . close = fn
this . addEventListener ( 'close' , fn )
} else {
this . # events . close = null
}
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
get onmessage ( ) {
webidl . brandCheck ( this , WebSocket )
return this . # events . message
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
set onmessage ( fn ) {
webidl . brandCheck ( this , WebSocket )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( this . # events . message ) {
this . removeEventListener ( 'message' , this . # events . message )
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
if ( typeof fn === 'function' ) {
this . # events . message = fn
this . addEventListener ( 'message' , fn )
} else {
this . # events . message = null
2020-12-06 17:56:38 +08:00
}
}
2023-07-27 11:01:06 +00:00
get binaryType ( ) {
webidl . brandCheck ( this , WebSocket )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
return this [ kBinaryType ]
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
set binaryType ( type ) {
webidl . brandCheck ( this , WebSocket )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( type !== 'blob' && type !== 'arraybuffer' ) {
this [ kBinaryType ] = 'blob'
} else {
this [ kBinaryType ] = type
}
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
/ * *
* @ see https : //websockets.spec.whatwg.org/#feedback-from-the-protocol
* /
# onConnectionEstablished ( response ) {
// processResponse is called when the "response’ s header list has been received and initialized."
// once this happens, the connection is open
this [ kResponse ] = response
const parser = new ByteParser ( this )
parser . on ( 'drain' , function onParserDrain ( ) {
this . ws [ kResponse ] . socket . resume ( )
} )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
response . socket . ws = this
this [ kByteParser ] = parser
// 1. Change the ready state to OPEN (1).
this [ kReadyState ] = states . OPEN
// 2. Change the extensions attribute’ s value to the extensions in use, if
// it is not the null value.
// https://datatracker.ietf.org/doc/html/rfc6455#section-9.1
const extensions = response . headersList . get ( 'sec-websocket-extensions' )
if ( extensions !== null ) {
this . # extensions = extensions
}
// 3. Change the protocol attribute’ s value to the subprotocol in use, if
// it is not the null value.
// https://datatracker.ietf.org/doc/html/rfc6455#section-1.9
const protocol = response . headersList . get ( 'sec-websocket-protocol' )
if ( protocol !== null ) {
this . # protocol = protocol
}
// 4. Fire an event named open at the WebSocket object.
fireEvent ( 'open' , this )
}
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
// https://websockets.spec.whatwg.org/#dom-websocket-connecting
WebSocket . CONNECTING = WebSocket . prototype . CONNECTING = states . CONNECTING
// https://websockets.spec.whatwg.org/#dom-websocket-open
WebSocket . OPEN = WebSocket . prototype . OPEN = states . OPEN
// https://websockets.spec.whatwg.org/#dom-websocket-closing
WebSocket . CLOSING = WebSocket . prototype . CLOSING = states . CLOSING
// https://websockets.spec.whatwg.org/#dom-websocket-closed
WebSocket . CLOSED = WebSocket . prototype . CLOSED = states . CLOSED
Object . defineProperties ( WebSocket . prototype , {
CONNECTING : staticPropertyDescriptors ,
OPEN : staticPropertyDescriptors ,
CLOSING : staticPropertyDescriptors ,
CLOSED : staticPropertyDescriptors ,
url : kEnumerableProperty ,
readyState : kEnumerableProperty ,
bufferedAmount : kEnumerableProperty ,
onopen : kEnumerableProperty ,
onerror : kEnumerableProperty ,
onclose : kEnumerableProperty ,
close : kEnumerableProperty ,
onmessage : kEnumerableProperty ,
binaryType : kEnumerableProperty ,
send : kEnumerableProperty ,
extensions : kEnumerableProperty ,
protocol : kEnumerableProperty ,
[ Symbol . toStringTag ] : {
value : 'WebSocket' ,
writable : false ,
enumerable : false ,
configurable : true
}
} )
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
Object . defineProperties ( WebSocket , {
CONNECTING : staticPropertyDescriptors ,
OPEN : staticPropertyDescriptors ,
CLOSING : staticPropertyDescriptors ,
CLOSED : staticPropertyDescriptors
} )
webidl . converters [ 'sequence<DOMString>' ] = webidl . sequenceConverter (
webidl . converters . DOMString
)
webidl . converters [ 'DOMString or sequence<DOMString>' ] = function ( V ) {
if ( webidl . util . Type ( V ) === 'Object' && Symbol . iterator in V ) {
return webidl . converters [ 'sequence<DOMString>' ] ( V )
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
return webidl . converters . DOMString ( V )
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
// This implements the propsal made in https://github.com/whatwg/websockets/issues/42
webidl . converters . WebSocketInit = webidl . dictionaryConverter ( [
{
key : 'protocols' ,
converter : webidl . converters [ 'DOMString or sequence<DOMString>' ] ,
get defaultValue ( ) {
return [ ]
}
} ,
{
key : 'dispatcher' ,
converter : ( V ) => V ,
get defaultValue ( ) {
return getGlobalDispatcher ( )
2020-08-27 20:39:35 +08:00
}
2023-07-27 11:01:06 +00:00
} ,
{
key : 'headers' ,
converter : webidl . nullableConverter ( webidl . converters . HeadersInit )
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
] )
webidl . converters [ 'DOMString or sequence<DOMString> or WebSocketInit' ] = function ( V ) {
if ( webidl . util . Type ( V ) === 'Object' && ! ( Symbol . iterator in V ) ) {
return webidl . converters . WebSocketInit ( V )
}
return { protocols : webidl . converters [ 'DOMString or sequence<DOMString>' ] ( V ) }
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
webidl . converters . WebSocketSendData = function ( V ) {
if ( webidl . util . Type ( V ) === 'Object' ) {
if ( isBlobLike ( V ) ) {
return webidl . converters . Blob ( V , { strict : false } )
}
2020-12-06 17:56:38 +08:00
2023-07-27 11:01:06 +00:00
if ( ArrayBuffer . isView ( V ) || types . isAnyArrayBuffer ( V ) ) {
return webidl . converters . BufferSource ( V )
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
}
2023-07-27 11:01:06 +00:00
return webidl . converters . USVString ( V )
}
module . exports = {
WebSocket
2020-12-06 17:56:38 +08:00
}
2022-12-25 13:58:23 +08:00
/***/ } ) ,
/***/ 5030 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
function getUserAgent ( ) {
if ( typeof navigator === "object" && "userAgent" in navigator ) {
return navigator . userAgent ;
}
if ( typeof process === "object" && "version" in process ) {
return ` Node.js/ ${ process . version . substr ( 1 ) } ( ${ process . platform } ; ${ process . arch } ) ` ;
}
return "<environment undetectable>" ;
}
exports . getUserAgent = getUserAgent ;
//# sourceMappingURL=index.js.map
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 2707 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
* /
var byteToHex = [ ] ;
for ( var i = 0 ; i < 256 ; ++ i ) {
byteToHex [ i ] = ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ;
}
function bytesToUuid ( buf , offset ) {
var i = offset || 0 ;
var bth = byteToHex ;
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
2021-07-16 17:22:55 +08:00
return ( [
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ]
] ) . join ( '' ) ;
2020-12-06 17:56:38 +08:00
}
module . exports = bytesToUuid ;
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 5859 :
2021-07-15 07:24:13 +08:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-12-06 17:56:38 +08:00
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
2022-08-09 17:17:26 +00:00
var crypto = _ _nccwpck _require _ _ ( 6113 ) ;
2020-12-06 17:56:38 +08:00
module . exports = function nodeRNG ( ) {
return crypto . randomBytes ( 16 ) ;
} ;
/***/ } ) ,
/***/ 824 :
2021-07-15 07:24:13 +08:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-12-06 17:56:38 +08:00
2022-08-09 17:17:26 +00:00
var rng = _ _nccwpck _require _ _ ( 5859 ) ;
var bytesToUuid = _ _nccwpck _require _ _ ( 2707 ) ;
2020-12-06 17:56:38 +08:00
function v4 ( options , buf , offset ) {
var i = buf && offset || 0 ;
if ( typeof ( options ) == 'string' ) {
buf = options === 'binary' ? new Array ( 16 ) : null ;
options = null ;
}
options = options || { } ;
var rnds = options . random || ( options . rng || rng ) ( ) ;
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = ( rnds [ 6 ] & 0x0f ) | 0x40 ;
rnds [ 8 ] = ( rnds [ 8 ] & 0x3f ) | 0x80 ;
// Copy bytes to buffer, if provided
if ( buf ) {
for ( var ii = 0 ; ii < 16 ; ++ ii ) {
buf [ i + ii ] = rnds [ ii ] ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
}
return buf || bytesToUuid ( rnds ) ;
2020-08-27 20:39:35 +08:00
}
2020-12-06 17:56:38 +08:00
module . exports = v4 ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
/***/ 7461 :
/***/ ( ( module ) => {
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
module . exports = wrappy
function wrappy ( fn , cb ) {
if ( fn && cb ) return wrappy ( fn ) ( cb )
if ( typeof fn !== 'function' )
throw new TypeError ( 'need wrapper function' )
Object . keys ( fn ) . forEach ( function ( k ) {
wrapper [ k ] = fn [ k ]
} )
return wrapper
function wrapper ( ) {
var args = new Array ( arguments . length )
for ( var i = 0 ; i < args . length ; i ++ ) {
args [ i ] = arguments [ i ]
}
var ret = fn . apply ( this , args )
var cb = args [ args . length - 1 ]
if ( typeof ret === 'function' && ret !== cb ) {
Object . keys ( cb ) . forEach ( function ( k ) {
ret [ k ] = cb [ k ]
} )
}
return ret
}
}
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 9491 :
2022-12-25 13:58:23 +08:00
/***/ ( ( module ) => {
2023-07-27 11:01:06 +00:00
"use strict" ;
module . exports = require ( "assert" ) ;
/***/ } ) ,
/***/ 852 :
/***/ ( ( module ) => {
2022-12-25 13:58:23 +08:00
2023-07-27 11:01:06 +00:00
"use strict" ;
module . exports = require ( "async_hooks" ) ;
2022-12-25 13:58:23 +08:00
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 4300 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2023-07-27 11:01:06 +00:00
module . exports = require ( "buffer" ) ;
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 2081 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "child_process" ) ;
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 6206 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "console" ) ;
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 6113 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "crypto" ) ;
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 7643 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "diagnostics_channel" ) ;
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 2361 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "events" ) ;
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 7147 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "fs" ) ;
2020-08-27 20:39:35 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 3685 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
2020-08-27 20:39:35 +08:00
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "http" ) ;
2020-08-27 20:39:35 +08:00
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 5687 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "https" ) ;
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 1808 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "net" ) ;
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 2037 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "os" ) ;
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 1017 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "path" ) ;
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 4074 :
2022-12-25 13:58:23 +08:00
/***/ ( ( module ) => {
"use strict" ;
2023-07-27 11:01:06 +00:00
module . exports = require ( "perf_hooks" ) ;
/***/ } ) ,
/***/ 3477 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "querystring" ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 2781 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "stream" ) ;
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 5356 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "stream/web" ) ;
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 1576 :
2021-07-15 07:24:13 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "string_decoder" ) ;
2021-07-15 07:24:13 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 9512 :
2021-07-15 07:24:13 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "timers" ) ;
2021-07-15 07:24:13 +08:00
/***/ } ) ,
2022-08-09 17:17:26 +00:00
/***/ 4404 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2021-08-20 17:05:57 +00:00
module . exports = require ( "tls" ) ;
2020-12-06 17:56:38 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 7310 :
2022-12-25 13:58:23 +08:00
/***/ ( ( module ) => {
"use strict" ;
2023-07-27 11:01:06 +00:00
module . exports = require ( "url" ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 3837 :
2022-12-25 13:58:23 +08:00
/***/ ( ( module ) => {
"use strict" ;
2023-07-27 11:01:06 +00:00
module . exports = require ( "util" ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 9830 :
2020-12-06 17:56:38 +08:00
/***/ ( ( module ) => {
"use strict" ;
2023-07-27 11:01:06 +00:00
module . exports = require ( "util/types" ) ;
2020-08-27 11:53:14 +08:00
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 1267 :
2022-12-25 13:58:23 +08:00
/***/ ( ( module ) => {
"use strict" ;
2023-07-27 11:01:06 +00:00
module . exports = require ( "worker_threads" ) ;
2022-12-25 13:58:23 +08:00
/***/ } ) ,
2023-07-27 11:01:06 +00:00
/***/ 9796 :
2022-12-25 13:58:23 +08:00
/***/ ( ( module ) => {
"use strict" ;
2023-07-27 11:01:06 +00:00
module . exports = require ( "zlib" ) ;
2022-12-25 13:58:23 +08:00
2020-08-27 11:53:14 +08:00
/***/ } )
2020-12-06 17:56:38 +08:00
/******/ } ) ;
/************************************************************************/
/******/ // The module cache
/******/ var _ _webpack _module _cache _ _ = { } ;
/******/
/******/ // The require function
2021-07-15 07:24:13 +08:00
/******/ function _ _nccwpck _require _ _ ( moduleId ) {
2020-12-06 17:56:38 +08:00
/******/ // Check if module is in cache
2021-07-15 07:24:13 +08:00
/******/ var cachedModule = _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ if ( cachedModule !== undefined ) {
/******/ return cachedModule . exports ;
2020-12-06 17:56:38 +08:00
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = _ _webpack _module _cache _ _ [ moduleId ] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ var threw = true ;
/******/ try {
2021-07-15 07:24:13 +08:00
/******/ _ _webpack _modules _ _ [ moduleId ] . call ( module . exports , module , module . exports , _ _nccwpck _require _ _ ) ;
2020-12-06 17:56:38 +08:00
/******/ threw = false ;
/******/ } finally {
/******/ if ( threw ) delete _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ }
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/************************************************************************/
/******/ /* webpack/runtime/compat */
/******/
2021-08-20 17:05:57 +00:00
/******/ if ( typeof _ _nccwpck _require _ _ !== 'undefined' ) _ _nccwpck _require _ _ . ab = _ _dirname + "/" ;
/******/
/************************************************************************/
2021-07-15 07:24:13 +08:00
/******/
2020-12-06 17:56:38 +08:00
/******/ // startup
/******/ // Load entry module and return exports
2021-07-15 07:24:13 +08:00
/******/ // This entry module is referenced by other modules so it can't be inlined
2022-08-09 17:17:26 +00:00
/******/ var _ _webpack _exports _ _ = _ _nccwpck _require _ _ ( 3109 ) ;
2021-07-15 07:24:13 +08:00
/******/ module . exports = _ _webpack _exports _ _ ;
/******/
2020-12-06 17:56:38 +08:00
/******/ } ) ( )
;
2020-08-27 11:53:14 +08:00
//# sourceMappingURL=index.js.map