2023-03-09 17:42:29 +01:00
/******/ ( ( ) => { // webpackBootstrap
/******/ var _ _webpack _modules _ _ = ( {
2020-05-07 12:11:11 -04:00
2023-03-09 17:42:29 +01:00
/***/ 7219 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-05-07 12:11:11 -04:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2020-05-07 12:11:11 -04:00
2023-03-09 17:42:29 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
2020-05-07 12:11:11 -04:00
} ) ;
2023-03-09 17:42:29 +01:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . directoryExistsSync = directoryExistsSync ;
exports . existsSync = existsSync ;
exports . fileExistsSync = fileExistsSync ;
2023-03-09 17:42:29 +01:00
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
function directoryExistsSync ( path , required ) {
2024-04-24 12:04:10 -04:00
var _a ;
2023-03-09 17:42:29 +01:00
if ( ! path ) {
throw new Error ( "Arg 'path' must not be empty" ) ;
}
let stats ;
try {
stats = fs . statSync ( path ) ;
}
catch ( error ) {
2024-04-24 12:04:10 -04:00
if ( ( error === null || error === void 0 ? void 0 : error . code ) === 'ENOENT' ) {
2023-03-09 17:42:29 +01:00
if ( ! required ) {
return false ;
}
throw new Error ( ` Directory ' ${ path } ' does not exist ` ) ;
}
2024-04-24 12:04:10 -04:00
throw new Error ( ` Encountered an error when checking whether path ' ${ path } ' exists: ${ ( _a = error === null || error === void 0 ? void 0 : error . message ) !== null && _a !== void 0 ? _a : error } ` ) ;
2023-03-09 17:42:29 +01:00
}
if ( stats . isDirectory ( ) ) {
return true ;
}
else if ( ! required ) {
return false ;
}
throw new Error ( ` Directory ' ${ path } ' does not exist ` ) ;
}
function existsSync ( path ) {
2024-04-24 12:04:10 -04:00
var _a ;
2023-03-09 17:42:29 +01:00
if ( ! path ) {
throw new Error ( "Arg 'path' must not be empty" ) ;
}
try {
fs . statSync ( path ) ;
}
catch ( error ) {
2024-04-24 12:04:10 -04:00
if ( ( error === null || error === void 0 ? void 0 : error . code ) === 'ENOENT' ) {
2023-03-09 17:42:29 +01:00
return false ;
}
2024-04-24 12:04:10 -04:00
throw new Error ( ` Encountered an error when checking whether path ' ${ path } ' exists: ${ ( _a = error === null || error === void 0 ? void 0 : error . message ) !== null && _a !== void 0 ? _a : error } ` ) ;
2023-03-09 17:42:29 +01:00
}
return true ;
}
function fileExistsSync ( path ) {
2024-04-24 12:04:10 -04:00
var _a ;
2023-03-09 17:42:29 +01:00
if ( ! path ) {
throw new Error ( "Arg 'path' must not be empty" ) ;
}
let stats ;
try {
stats = fs . statSync ( path ) ;
}
catch ( error ) {
2024-04-24 12:04:10 -04:00
if ( ( error === null || error === void 0 ? void 0 : error . code ) === 'ENOENT' ) {
2023-03-09 17:42:29 +01:00
return false ;
}
2024-04-24 12:04:10 -04:00
throw new Error ( ` Encountered an error when checking whether path ' ${ path } ' exists: ${ ( _a = error === null || error === void 0 ? void 0 : error . message ) !== null && _a !== void 0 ? _a : error } ` ) ;
2023-03-09 17:42:29 +01:00
}
if ( ! stats . isDirectory ( ) ) {
return true ;
}
return false ;
}
2019-12-03 10:28:59 -05:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 2565 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2019-12-03 10:28:59 -05:00
"use strict" ;
2022-12-12 13:44:24 -05:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2022-12-12 13:44:24 -05:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-03-09 17:42:29 +01:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2022-12-12 13:44:24 -05:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2019-12-03 10:28:59 -05:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2023-03-09 17:42:29 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . createAuthHelper = createAuthHelper ;
2023-03-09 17:42:29 +01:00
const assert = _ _importStar ( _ _nccwpck _require _ _ ( 9491 ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const exec = _ _importStar ( _ _nccwpck _require _ _ ( 1514 ) ) ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const regexpHelper = _ _importStar ( _ _nccwpck _require _ _ ( 3120 ) ) ;
2024-04-24 12:04:10 -04:00
const stateHelper = _ _importStar ( _ _nccwpck _require _ _ ( 4866 ) ) ;
2023-03-09 17:42:29 +01:00
const urlHelper = _ _importStar ( _ _nccwpck _require _ _ ( 9437 ) ) ;
2024-04-24 12:04:10 -04:00
const uuid _1 = _ _nccwpck _require _ _ ( 5840 ) ;
2023-03-09 17:42:29 +01:00
const IS _WINDOWS = process . platform === 'win32' ;
const SSH _COMMAND _KEY = 'core.sshCommand' ;
function createAuthHelper ( git , settings ) {
return new GitAuthHelper ( git , settings ) ;
}
class GitAuthHelper {
constructor ( gitCommandManager , gitSourceSettings ) {
this . insteadOfValues = [ ] ;
this . sshCommand = '' ;
this . sshKeyPath = '' ;
this . sshKnownHostsPath = '' ;
this . temporaryHomePath = '' ;
this . git = gitCommandManager ;
this . settings = gitSourceSettings || { } ;
// Token auth header
const serverUrl = urlHelper . getServerUrl ( this . settings . githubServerUrl ) ;
this . tokenConfigKey = ` http. ${ serverUrl . origin } /.extraheader ` ; // "origin" is SCHEME://HOSTNAME[:PORT]
const basicCredential = Buffer . from ( ` x-access-token: ${ this . settings . authToken } ` , 'utf8' ) . toString ( 'base64' ) ;
core . setSecret ( basicCredential ) ;
this . tokenPlaceholderConfigValue = ` AUTHORIZATION: basic *** ` ;
this . tokenConfigValue = ` AUTHORIZATION: basic ${ basicCredential } ` ;
// Instead of SSH URL
this . insteadOfKey = ` url. ${ serverUrl . origin } /.insteadOf ` ; // "origin" is SCHEME://HOSTNAME[:PORT]
this . insteadOfValues . push ( ` git@ ${ serverUrl . hostname } : ` ) ;
if ( this . settings . workflowOrganizationId ) {
this . insteadOfValues . push ( ` org- ${ this . settings . workflowOrganizationId } @github.com: ` ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
}
configureAuth ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Remove possible previous values
yield this . removeAuth ( ) ;
// Configure new values
yield this . configureSsh ( ) ;
yield this . configureToken ( ) ;
} ) ;
}
configureTempGlobalConfig ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
var _a ;
2023-03-09 17:42:29 +01:00
// Already setup global config
if ( ( ( _a = this . temporaryHomePath ) === null || _a === void 0 ? void 0 : _a . length ) > 0 ) {
return path . join ( this . temporaryHomePath , '.gitconfig' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
// Create a temp home directory
const runnerTemp = process . env [ 'RUNNER_TEMP' ] || '' ;
assert . ok ( runnerTemp , 'RUNNER_TEMP is not defined' ) ;
2024-04-24 12:04:10 -04:00
const uniqueId = ( 0 , uuid _1 . v4 ) ( ) ;
2023-03-09 17:42:29 +01:00
this . temporaryHomePath = path . join ( runnerTemp , uniqueId ) ;
yield fs . promises . mkdir ( this . temporaryHomePath , { recursive : true } ) ;
// Copy the global git config
const gitConfigPath = path . join ( process . env [ 'HOME' ] || os . homedir ( ) , '.gitconfig' ) ;
const newGitConfigPath = path . join ( this . temporaryHomePath , '.gitconfig' ) ;
let configExists = false ;
try {
yield fs . promises . stat ( gitConfigPath ) ;
configExists = true ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
catch ( err ) {
2024-04-24 12:04:10 -04:00
if ( ( err === null || err === void 0 ? void 0 : err . code ) !== 'ENOENT' ) {
2023-03-09 17:42:29 +01:00
throw err ;
}
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
if ( configExists ) {
core . info ( ` Copying ' ${ gitConfigPath } ' to ' ${ newGitConfigPath } ' ` ) ;
yield io . cp ( gitConfigPath , newGitConfigPath ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
else {
yield fs . promises . writeFile ( newGitConfigPath , '' ) ;
}
// Override HOME
core . info ( ` Temporarily overriding HOME=' ${ this . temporaryHomePath } ' before making global git config changes ` ) ;
this . git . setEnvironmentVariable ( 'HOME' , this . temporaryHomePath ) ;
return newGitConfigPath ;
} ) ;
}
configureGlobalAuth ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// 'configureTempGlobalConfig' noops if already set, just returns the path
const newGitConfigPath = yield this . configureTempGlobalConfig ( ) ;
try {
// Configure the token
yield this . configureToken ( newGitConfigPath , true ) ;
// Configure HTTPS instead of SSH
yield this . git . tryConfigUnset ( this . insteadOfKey , true ) ;
if ( ! this . settings . sshKey ) {
for ( const insteadOfValue of this . insteadOfValues ) {
yield this . git . config ( this . insteadOfKey , insteadOfValue , true , true ) ;
}
2019-12-03 10:28:59 -05:00
}
}
2023-03-09 17:42:29 +01:00
catch ( err ) {
// Unset in case somehow written to the real global config
core . info ( 'Encountered an error when attempting to configure token. Attempting unconfigure.' ) ;
yield this . git . tryConfigUnset ( this . tokenConfigKey , true ) ;
throw err ;
2022-12-12 13:44:24 -05:00
}
2023-03-09 17:42:29 +01:00
} ) ;
}
configureSubmoduleAuth ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Remove possible previous HTTPS instead of SSH
yield this . removeGitConfig ( this . insteadOfKey , true ) ;
if ( this . settings . persistCredentials ) {
// Configure a placeholder value. This approach avoids the credential being captured
// by process creation audit events, which are commonly logged. For more information,
// refer to https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/manage/component-updates/command-line-process-auditing
const output = yield this . git . submoduleForeach (
// wrap the pipeline in quotes to make sure it's handled properly by submoduleForeach, rather than just the first part of the pipeline
` sh -c "git config --local ' ${ this . tokenConfigKey } ' ' ${ this . tokenPlaceholderConfigValue } ' && git config --local --show-origin --name-only --get-regexp remote.origin.url" ` , this . settings . nestedSubmodules ) ;
// Replace the placeholder
const configPaths = output . match ( /(?<=(^|\n)file:)[^\t]+(?=\tremote\.origin\.url)/g ) || [ ] ;
for ( const configPath of configPaths ) {
core . debug ( ` Replacing token placeholder in ' ${ configPath } ' ` ) ;
yield this . replaceTokenPlaceholder ( configPath ) ;
}
if ( this . settings . sshKey ) {
// Configure core.sshCommand
yield this . git . submoduleForeach ( ` git config --local ' ${ SSH _COMMAND _KEY } ' ' ${ this . sshCommand } ' ` , this . settings . nestedSubmodules ) ;
2019-12-03 10:28:59 -05:00
}
else {
2023-03-09 17:42:29 +01:00
// Configure HTTPS instead of SSH
for ( const insteadOfValue of this . insteadOfValues ) {
yield this . git . submoduleForeach ( ` git config --local --add ' ${ this . insteadOfKey } ' ' ${ insteadOfValue } ' ` , this . settings . nestedSubmodules ) ;
}
2019-12-03 10:28:59 -05:00
}
}
2023-03-09 17:42:29 +01:00
} ) ;
}
removeAuth ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield this . removeSsh ( ) ;
yield this . removeToken ( ) ;
} ) ;
}
removeGlobalConfig ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
var _a ;
2023-03-09 17:42:29 +01:00
if ( ( ( _a = this . temporaryHomePath ) === null || _a === void 0 ? void 0 : _a . length ) > 0 ) {
core . debug ( ` Unsetting HOME override ` ) ;
this . git . removeEnvironmentVariable ( 'HOME' ) ;
yield io . rmRF ( this . temporaryHomePath ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
} ) ;
}
configureSsh ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! this . settings . sshKey ) {
return ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
// Write key
const runnerTemp = process . env [ 'RUNNER_TEMP' ] || '' ;
assert . ok ( runnerTemp , 'RUNNER_TEMP is not defined' ) ;
2024-04-24 12:04:10 -04:00
const uniqueId = ( 0 , uuid _1 . v4 ) ( ) ;
2023-03-09 17:42:29 +01:00
this . sshKeyPath = path . join ( runnerTemp , uniqueId ) ;
stateHelper . setSshKeyPath ( this . sshKeyPath ) ;
yield fs . promises . mkdir ( runnerTemp , { recursive : true } ) ;
yield fs . promises . writeFile ( this . sshKeyPath , this . settings . sshKey . trim ( ) + '\n' , { mode : 0o600 } ) ;
// Remove inherited permissions on Windows
if ( IS _WINDOWS ) {
const icacls = yield io . which ( 'icacls.exe' ) ;
yield exec . exec ( ` " ${ icacls } " " ${ this . sshKeyPath } " /grant:r " ${ process . env [ 'USERDOMAIN' ] } \\ ${ process . env [ 'USERNAME' ] } :F" ` ) ;
yield exec . exec ( ` " ${ icacls } " " ${ this . sshKeyPath } " /inheritance:r ` ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
// Write known hosts
const userKnownHostsPath = path . join ( os . homedir ( ) , '.ssh' , 'known_hosts' ) ;
let userKnownHosts = '' ;
2019-12-03 10:28:59 -05:00
try {
2023-03-09 17:42:29 +01:00
userKnownHosts = ( yield fs . promises . readFile ( userKnownHostsPath ) ) . toString ( ) ;
2019-12-03 10:28:59 -05:00
}
catch ( err ) {
2024-04-24 12:04:10 -04:00
if ( ( err === null || err === void 0 ? void 0 : err . code ) !== 'ENOENT' ) {
2019-12-03 10:28:59 -05:00
throw err ;
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
let knownHosts = '' ;
if ( userKnownHosts ) {
knownHosts += ` # Begin from ${ userKnownHostsPath } \n ${ userKnownHosts } \n # End from ${ userKnownHostsPath } \n ` ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
if ( this . settings . sshKnownHosts ) {
knownHosts += ` # Begin from input known hosts \n ${ this . settings . sshKnownHosts } \n # end from input known hosts \n ` ;
2019-12-03 10:28:59 -05:00
}
2023-03-23 22:34:48 -07:00
knownHosts += ` # Begin implicitly added github.com \n github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk= \n # End implicitly added github.com \n ` ;
2023-03-09 17:42:29 +01:00
this . sshKnownHostsPath = path . join ( runnerTemp , ` ${ uniqueId } _known_hosts ` ) ;
stateHelper . setSshKnownHostsPath ( this . sshKnownHostsPath ) ;
yield fs . promises . writeFile ( this . sshKnownHostsPath , knownHosts ) ;
// Configure GIT_SSH_COMMAND
const sshPath = yield io . which ( 'ssh' , true ) ;
this . sshCommand = ` " ${ sshPath } " -i " $ RUNNER_TEMP/ ${ path . basename ( this . sshKeyPath ) } " ` ;
if ( this . settings . sshStrict ) {
this . sshCommand += ' -o StrictHostKeyChecking=yes -o CheckHostIP=no' ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
this . sshCommand += ` -o "UserKnownHostsFile= $ RUNNER_TEMP/ ${ path . basename ( this . sshKnownHostsPath ) } " ` ;
core . info ( ` Temporarily overriding GIT_SSH_COMMAND= ${ this . sshCommand } ` ) ;
this . git . setEnvironmentVariable ( 'GIT_SSH_COMMAND' , this . sshCommand ) ;
// Configure core.sshCommand
if ( this . settings . persistCredentials ) {
yield this . git . config ( SSH _COMMAND _KEY , this . sshCommand ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
} ) ;
}
configureToken ( configPath , globalConfig ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Validate args
assert . ok ( ( configPath && globalConfig ) || ( ! configPath && ! globalConfig ) , 'Unexpected configureToken parameter combinations' ) ;
// Default config path
if ( ! configPath && ! globalConfig ) {
configPath = path . join ( this . git . getWorkingDirectory ( ) , '.git' , 'config' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
// Configure a placeholder value. This approach avoids the credential being captured
// by process creation audit events, which are commonly logged. For more information,
// refer to https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/manage/component-updates/command-line-process-auditing
yield this . git . config ( this . tokenConfigKey , this . tokenPlaceholderConfigValue , globalConfig ) ;
// Replace the placeholder
yield this . replaceTokenPlaceholder ( configPath || '' ) ;
} ) ;
}
replaceTokenPlaceholder ( configPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert . ok ( configPath , 'configPath is not defined' ) ;
let content = ( yield fs . promises . readFile ( configPath ) ) . toString ( ) ;
const placeholderIndex = content . indexOf ( this . tokenPlaceholderConfigValue ) ;
if ( placeholderIndex < 0 ||
placeholderIndex != content . lastIndexOf ( this . tokenPlaceholderConfigValue ) ) {
throw new Error ( ` Unable to replace auth placeholder in ${ configPath } ` ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
assert . ok ( this . tokenConfigValue , 'tokenConfigValue is not defined' ) ;
content = content . replace ( this . tokenPlaceholderConfigValue , this . tokenConfigValue ) ;
yield fs . promises . writeFile ( configPath , content ) ;
} ) ;
}
removeSsh ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
var _a ;
2023-03-09 17:42:29 +01:00
// SSH key
const keyPath = this . sshKeyPath || stateHelper . SshKeyPath ;
if ( keyPath ) {
try {
yield io . rmRF ( keyPath ) ;
}
catch ( err ) {
2024-04-24 12:04:10 -04:00
core . debug ( ` ${ ( _a = err === null || err === void 0 ? void 0 : err . message ) !== null && _a !== void 0 ? _a : err } ` ) ;
2023-03-09 17:42:29 +01:00
core . warning ( ` Failed to remove SSH key ' ${ keyPath } ' ` ) ;
}
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
// SSH known hosts
const knownHostsPath = this . sshKnownHostsPath || stateHelper . SshKnownHostsPath ;
if ( knownHostsPath ) {
try {
yield io . rmRF ( knownHostsPath ) ;
}
2024-04-24 12:04:10 -04:00
catch ( _b ) {
2023-03-09 17:42:29 +01:00
// Intentionally empty
}
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
// SSH command
yield this . removeGitConfig ( SSH _COMMAND _KEY ) ;
} ) ;
}
removeToken ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// HTTP extra header
yield this . removeGitConfig ( this . tokenConfigKey ) ;
} ) ;
}
2024-04-24 12:04:10 -04:00
removeGitConfig ( configKey _1 ) {
return _ _awaiter ( this , arguments , void 0 , function * ( configKey , submoduleOnly = false ) {
2023-03-09 17:42:29 +01:00
if ( ! submoduleOnly ) {
if ( ( yield this . git . configExists ( configKey ) ) &&
! ( yield this . git . tryConfigUnset ( configKey ) ) ) {
// Load the config contents
core . warning ( ` Failed to remove ' ${ configKey } ' from the git config ` ) ;
2019-12-03 10:28:59 -05:00
}
}
2023-03-09 17:42:29 +01:00
const pattern = regexpHelper . escape ( configKey ) ;
yield this . git . submoduleForeach (
// wrap the pipeline in quotes to make sure it's handled properly by submoduleForeach, rather than just the first part of the pipeline
` sh -c "git config --local --name-only --get-regexp ' ${ pattern } ' && git config --local --unset-all ' ${ configKey } ' || :" ` , true ) ;
} ) ;
}
2019-12-03 10:28:59 -05:00
}
2022-10-03 18:04:49 +01:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 738 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-10-03 18:04:49 +01:00
"use strict" ;
2023-03-09 17:42:29 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
2022-10-03 18:04:49 +01:00
} ) ;
2023-03-09 17:42:29 +01:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2019-12-03 10:28:59 -05:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2023-03-09 17:42:29 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . MinimumGitSparseCheckoutVersion = exports . MinimumGitVersion = void 0 ;
exports . createCommandManager = createCommandManager ;
2023-03-09 17:42:29 +01:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const exec = _ _importStar ( _ _nccwpck _require _ _ ( 1514 ) ) ;
2023-06-09 15:08:21 +02:00
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
2023-03-09 17:42:29 +01:00
const fshelper = _ _importStar ( _ _nccwpck _require _ _ ( 7219 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const refHelper = _ _importStar ( _ _nccwpck _require _ _ ( 8601 ) ) ;
const regexpHelper = _ _importStar ( _ _nccwpck _require _ _ ( 3120 ) ) ;
const retryHelper = _ _importStar ( _ _nccwpck _require _ _ ( 2155 ) ) ;
const git _version _1 = _ _nccwpck _require _ _ ( 3142 ) ;
// Auth header not supported before 2.9
// Wire protocol v2 not supported before 2.18
2024-03-14 15:40:14 +01:00
// sparse-checkout not [well-]supported before 2.28 (see https://github.com/actions/checkout/issues/1386)
2023-03-09 17:42:29 +01:00
exports . MinimumGitVersion = new git _version _1 . GitVersion ( '2.18' ) ;
2024-03-14 15:40:14 +01:00
exports . MinimumGitSparseCheckoutVersion = new git _version _1 . GitVersion ( '2.28' ) ;
2023-06-09 15:08:21 +02:00
function createCommandManager ( workingDirectory , lfs , doSparseCheckout ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-06-09 15:08:21 +02:00
return yield GitCommandManager . createCommandManager ( workingDirectory , lfs , doSparseCheckout ) ;
2023-03-09 17:42:29 +01:00
} ) ;
}
class GitCommandManager {
// Private constructor; use createCommandManager()
constructor ( ) {
this . gitEnv = {
2024-04-24 12:04:10 -04:00
GIT _TERMINAL _PROMPT : '0' , // Disable git prompt
2023-03-09 17:42:29 +01:00
GCM _INTERACTIVE : 'Never' // Disable prompting for git credential manager
} ;
this . gitPath = '' ;
this . lfs = false ;
2023-06-09 15:08:21 +02:00
this . doSparseCheckout = false ;
2023-03-09 17:42:29 +01:00
this . workingDirectory = '' ;
2024-03-14 15:40:14 +01:00
this . gitVersion = new git _version _1 . GitVersion ( ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
branchDelete ( remote , branch ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ 'branch' , '--delete' , '--force' ] ;
if ( remote ) {
args . push ( '--remote' ) ;
}
args . push ( branch ) ;
yield this . execGit ( args ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
branchExists ( remote , pattern ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ 'branch' , '--list' ] ;
if ( remote ) {
args . push ( '--remote' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
args . push ( pattern ) ;
const output = yield this . execGit ( args ) ;
return ! ! output . stdout . trim ( ) ;
} ) ;
}
branchList ( remote ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const result = [ ] ;
// Note, this implementation uses "rev-parse --symbolic-full-name" because the output from
// "branch --list" is more difficult when in a detached HEAD state.
// TODO(https://github.com/actions/checkout/issues/786): this implementation uses
// "rev-parse --symbolic-full-name" because there is a bug
// in Git 2.18 that causes "rev-parse --symbolic" to output symbolic full names. When
// 2.18 is no longer supported, we can switch back to --symbolic.
const args = [ 'rev-parse' , '--symbolic-full-name' ] ;
if ( remote ) {
args . push ( '--remotes=origin' ) ;
2019-12-03 10:28:59 -05:00
}
else {
2023-03-09 17:42:29 +01:00
args . push ( '--branches' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
const stderr = [ ] ;
const errline = [ ] ;
const stdout = [ ] ;
const stdline = [ ] ;
const listeners = {
stderr : ( data ) => {
stderr . push ( data . toString ( ) ) ;
} ,
errline : ( data ) => {
errline . push ( data . toString ( ) ) ;
} ,
stdout : ( data ) => {
stdout . push ( data . toString ( ) ) ;
} ,
stdline : ( data ) => {
stdline . push ( data . toString ( ) ) ;
}
} ;
// Suppress the output in order to avoid flooding annotations with innocuous errors.
yield this . execGit ( args , false , true , listeners ) ;
core . debug ( ` stderr callback is: ${ stderr } ` ) ;
core . debug ( ` errline callback is: ${ errline } ` ) ;
core . debug ( ` stdout callback is: ${ stdout } ` ) ;
core . debug ( ` stdline callback is: ${ stdline } ` ) ;
for ( let branch of stdline ) {
branch = branch . trim ( ) ;
if ( ! branch ) {
continue ;
}
if ( branch . startsWith ( 'refs/heads/' ) ) {
branch = branch . substring ( 'refs/heads/' . length ) ;
}
else if ( branch . startsWith ( 'refs/remotes/' ) ) {
branch = branch . substring ( 'refs/remotes/' . length ) ;
}
result . push ( branch ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
return result ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2024-02-21 13:56:19 +01:00
disableSparseCheckout ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield this . execGit ( [ 'sparse-checkout' , 'disable' ] ) ;
2024-04-23 22:07:43 +02:00
// Disabling 'sparse-checkout` leaves behind an undesirable side-effect in config (even in a pristine environment).
yield this . tryConfigUnset ( 'extensions.worktreeConfig' , false ) ;
2024-02-21 13:56:19 +01:00
} ) ;
}
2023-06-09 15:08:21 +02:00
sparseCheckout ( sparseCheckout ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield this . execGit ( [ 'sparse-checkout' , 'set' , ... sparseCheckout ] ) ;
} ) ;
}
sparseCheckoutNonConeMode ( sparseCheckout ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield this . execGit ( [ 'config' , 'core.sparseCheckout' , 'true' ] ) ;
const output = yield this . execGit ( [
'rev-parse' ,
'--git-path' ,
'info/sparse-checkout'
] ) ;
const sparseCheckoutPath = path . join ( this . workingDirectory , output . stdout . trimRight ( ) ) ;
yield fs . promises . appendFile ( sparseCheckoutPath , ` \n ${ sparseCheckout . join ( '\n' ) } \n ` ) ;
} ) ;
}
2023-03-09 17:42:29 +01:00
checkout ( ref , startPoint ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ 'checkout' , '--progress' , '--force' ] ;
if ( startPoint ) {
args . push ( '-B' , ref , startPoint ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
else {
args . push ( ref ) ;
}
yield this . execGit ( args ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
checkoutDetach ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ 'checkout' , '--detach' ] ;
yield this . execGit ( args ) ;
} ) ;
}
config ( configKey , configValue , globalConfig , add ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ 'config' , globalConfig ? '--global' : '--local' ] ;
if ( add ) {
args . push ( '--add' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
args . push ( ... [ configKey , configValue ] ) ;
yield this . execGit ( args ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
configExists ( configKey , globalConfig ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const pattern = regexpHelper . escape ( configKey ) ;
const output = yield this . execGit ( [
'config' ,
globalConfig ? '--global' : '--local' ,
'--name-only' ,
'--get-regexp' ,
pattern
] , true ) ;
return output . exitCode === 0 ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-06-09 15:08:21 +02:00
fetch ( refSpec , options ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ '-c' , 'protocol.version=2' , 'fetch' ] ;
2023-08-16 22:34:54 +02:00
if ( ! refSpec . some ( x => x === refHelper . tagsRefSpec ) && ! options . fetchTags ) {
2023-03-09 17:42:29 +01:00
args . push ( '--no-tags' ) ;
2019-12-03 10:28:59 -05:00
}
2023-09-01 14:19:18 -04:00
args . push ( '--prune' , '--no-recurse-submodules' ) ;
if ( options . showProgress ) {
args . push ( '--progress' ) ;
}
2023-06-09 15:08:21 +02:00
if ( options . filter ) {
args . push ( ` --filter= ${ options . filter } ` ) ;
}
if ( options . fetchDepth && options . fetchDepth > 0 ) {
args . push ( ` --depth= ${ options . fetchDepth } ` ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
else if ( fshelper . fileExistsSync ( path . join ( this . workingDirectory , '.git' , 'shallow' ) ) ) {
args . push ( '--unshallow' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
args . push ( 'origin' ) ;
for ( const arg of refSpec ) {
args . push ( arg ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
const that = this ;
yield retryHelper . execute ( ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield that . execGit ( args ) ;
} ) ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
getDefaultBranch ( repositoryUrl ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let output ;
yield retryHelper . execute ( ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
output = yield this . execGit ( [
'ls-remote' ,
'--quiet' ,
'--exit-code' ,
'--symref' ,
repositoryUrl ,
'HEAD'
] ) ;
} ) ) ;
if ( output ) {
// Satisfy compiler, will always be set
for ( let line of output . stdout . trim ( ) . split ( '\n' ) ) {
line = line . trim ( ) ;
if ( line . startsWith ( 'ref:' ) || line . endsWith ( 'HEAD' ) ) {
return line
. substr ( 'ref:' . length , line . length - 'ref:' . length - 'HEAD' . length )
. trim ( ) ;
}
}
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
throw new Error ( 'Unexpected output when retrieving default branch' ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
getWorkingDirectory ( ) {
return this . workingDirectory ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
init ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield this . execGit ( [ 'init' , this . workingDirectory ] ) ;
} ) ;
}
isDetached ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Note, "branch --show-current" would be simpler but isn't available until Git 2.22
const output = yield this . execGit ( [ 'rev-parse' , '--symbolic-full-name' , '--verify' , '--quiet' , 'HEAD' ] , true ) ;
return ! output . stdout . trim ( ) . startsWith ( 'refs/heads/' ) ;
} ) ;
}
lfsFetch ( ref ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ 'lfs' , 'fetch' , 'origin' , ref ] ;
const that = this ;
yield retryHelper . execute ( ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield that . execGit ( args ) ;
} ) ) ;
} ) ;
}
lfsInstall ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield this . execGit ( [ 'lfs' , 'install' , '--local' ] ) ;
} ) ;
}
log1 ( format ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-08-16 22:34:54 +02:00
const args = format ? [ 'log' , '-1' , format ] : [ 'log' , '-1' ] ;
const silent = format ? false : true ;
2023-03-09 17:42:29 +01:00
const output = yield this . execGit ( args , false , silent ) ;
return output . stdout ;
} ) ;
}
remoteAdd ( remoteName , remoteUrl ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield this . execGit ( [ 'remote' , 'add' , remoteName , remoteUrl ] ) ;
} ) ;
}
removeEnvironmentVariable ( name ) {
delete this . gitEnv [ name ] ;
2019-12-03 10:28:59 -05:00
}
/ * *
2023-03-09 17:42:29 +01:00
* Resolves a ref to a SHA . For a branch or lightweight tag , the commit SHA is returned .
* For an annotated tag , the tag SHA is returned .
* @ param { string } ref For example : 'refs/heads/main' or '/refs/tags/v1'
* @ returns { Promise < string > }
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
revParse ( ref ) {
2019-12-03 10:28:59 -05:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-03-09 17:42:29 +01:00
const output = yield this . execGit ( [ 'rev-parse' , ref ] ) ;
return output . stdout . trim ( ) ;
2019-12-03 10:28:59 -05:00
} ) ;
}
2023-03-09 17:42:29 +01:00
setEnvironmentVariable ( name , value ) {
this . gitEnv [ name ] = value ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
shaExists ( sha ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ 'rev-parse' , '--verify' , '--quiet' , ` ${ sha } ^{object} ` ] ;
const output = yield this . execGit ( args , true ) ;
return output . exitCode === 0 ;
} ) ;
}
submoduleForeach ( command , recursive ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ 'submodule' , 'foreach' ] ;
if ( recursive ) {
args . push ( '--recursive' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
args . push ( command ) ;
const output = yield this . execGit ( args ) ;
return output . stdout ;
} ) ;
}
submoduleSync ( recursive ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ 'submodule' , 'sync' ] ;
if ( recursive ) {
args . push ( '--recursive' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
yield this . execGit ( args ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
submoduleUpdate ( fetchDepth , recursive ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const args = [ '-c' , 'protocol.version=2' ] ;
args . push ( 'submodule' , 'update' , '--init' , '--force' ) ;
if ( fetchDepth > 0 ) {
args . push ( ` --depth= ${ fetchDepth } ` ) ;
}
if ( recursive ) {
args . push ( '--recursive' ) ;
}
yield this . execGit ( args ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-14 03:26:47 -07:00
submoduleStatus ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const output = yield this . execGit ( [ 'submodule' , 'status' ] , true ) ;
core . debug ( output . stdout ) ;
return output . exitCode === 0 ;
} ) ;
}
2023-03-09 17:42:29 +01:00
tagExists ( pattern ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const output = yield this . execGit ( [ 'tag' , '--list' , pattern ] ) ;
return ! ! output . stdout . trim ( ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
tryClean ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const output = yield this . execGit ( [ 'clean' , '-ffdx' ] , true ) ;
return output . exitCode === 0 ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
tryConfigUnset ( configKey , globalConfig ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const output = yield this . execGit ( [
'config' ,
globalConfig ? '--global' : '--local' ,
'--unset-all' ,
configKey
] , true ) ;
return output . exitCode === 0 ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
tryDisableAutomaticGarbageCollection ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const output = yield this . execGit ( [ 'config' , '--local' , 'gc.auto' , '0' ] , true ) ;
return output . exitCode === 0 ;
} ) ;
}
tryGetFetchUrl ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const output = yield this . execGit ( [ 'config' , '--local' , '--get' , 'remote.origin.url' ] , true ) ;
if ( output . exitCode !== 0 ) {
return '' ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
const stdout = output . stdout . trim ( ) ;
if ( stdout . includes ( '\n' ) ) {
return '' ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
return stdout ;
} ) ;
}
tryReset ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const output = yield this . execGit ( [ 'reset' , '--hard' , 'HEAD' ] , true ) ;
return output . exitCode === 0 ;
} ) ;
}
2024-03-14 15:40:14 +01:00
version ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . gitVersion ;
} ) ;
}
2023-06-09 15:08:21 +02:00
static createCommandManager ( workingDirectory , lfs , doSparseCheckout ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const result = new GitCommandManager ( ) ;
2023-06-09 15:08:21 +02:00
yield result . initializeCommandManager ( workingDirectory , lfs , doSparseCheckout ) ;
2023-03-09 17:42:29 +01:00
return result ;
} ) ;
}
2024-04-24 12:04:10 -04:00
execGit ( args _1 ) {
return _ _awaiter ( this , arguments , void 0 , function * ( args , allowAllExitCodes = false , silent = false , customListeners = { } ) {
2023-03-09 17:42:29 +01:00
fshelper . directoryExistsSync ( this . workingDirectory , true ) ;
const result = new GitOutput ( ) ;
const env = { } ;
for ( const key of Object . keys ( process . env ) ) {
env [ key ] = process . env [ key ] ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
for ( const key of Object . keys ( this . gitEnv ) ) {
env [ key ] = this . gitEnv [ key ] ;
}
const defaultListener = {
stdout : ( data ) => {
stdout . push ( data . toString ( ) ) ;
}
} ;
const mergedListeners = Object . assign ( Object . assign ( { } , defaultListener ) , customListeners ) ;
const stdout = [ ] ;
const options = {
cwd : this . workingDirectory ,
env ,
silent ,
ignoreReturnCode : allowAllExitCodes ,
listeners : mergedListeners
} ;
result . exitCode = yield exec . exec ( ` " ${ this . gitPath } " ` , args , options ) ;
result . stdout = stdout . join ( '' ) ;
core . debug ( result . exitCode . toString ( ) ) ;
core . debug ( result . stdout ) ;
return result ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-06-09 15:08:21 +02:00
initializeCommandManager ( workingDirectory , lfs , doSparseCheckout ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
this . workingDirectory = workingDirectory ;
// Git-lfs will try to pull down assets if any of the local/user/system setting exist.
// If the user didn't enable `LFS` in their pipeline definition, disable LFS fetch/checkout.
this . lfs = lfs ;
if ( ! this . lfs ) {
this . gitEnv [ 'GIT_LFS_SKIP_SMUDGE' ] = '1' ;
}
this . gitPath = yield io . which ( 'git' , true ) ;
// Git version
core . debug ( 'Getting git version' ) ;
2024-03-14 15:40:14 +01:00
this . gitVersion = new git _version _1 . GitVersion ( ) ;
2023-03-09 17:42:29 +01:00
let gitOutput = yield this . execGit ( [ 'version' ] ) ;
let stdout = gitOutput . stdout . trim ( ) ;
if ( ! stdout . includes ( '\n' ) ) {
const match = stdout . match ( /\d+\.\d+(\.\d+)?/ ) ;
if ( match ) {
2024-03-14 15:40:14 +01:00
this . gitVersion = new git _version _1 . GitVersion ( match [ 0 ] ) ;
2023-03-09 17:42:29 +01:00
}
}
2024-03-14 15:40:14 +01:00
if ( ! this . gitVersion . isValid ( ) ) {
2023-03-09 17:42:29 +01:00
throw new Error ( 'Unable to determine git version' ) ;
}
// Minimum git version
2024-03-14 15:40:14 +01:00
if ( ! this . gitVersion . checkMinimum ( exports . MinimumGitVersion ) ) {
throw new Error ( ` Minimum required git version is ${ exports . MinimumGitVersion } . Your git (' ${ this . gitPath } ') is ${ this . gitVersion } ` ) ;
2023-03-09 17:42:29 +01:00
}
if ( this . lfs ) {
// Git-lfs version
core . debug ( 'Getting git-lfs version' ) ;
let gitLfsVersion = new git _version _1 . GitVersion ( ) ;
const gitLfsPath = yield io . which ( 'git-lfs' , true ) ;
gitOutput = yield this . execGit ( [ 'lfs' , 'version' ] ) ;
stdout = gitOutput . stdout . trim ( ) ;
if ( ! stdout . includes ( '\n' ) ) {
const match = stdout . match ( /\d+\.\d+(\.\d+)?/ ) ;
if ( match ) {
gitLfsVersion = new git _version _1 . GitVersion ( match [ 0 ] ) ;
}
}
if ( ! gitLfsVersion . isValid ( ) ) {
throw new Error ( 'Unable to determine git-lfs version' ) ;
}
// Minimum git-lfs version
// Note:
// - Auth header not supported before 2.1
const minimumGitLfsVersion = new git _version _1 . GitVersion ( '2.1' ) ;
if ( ! gitLfsVersion . checkMinimum ( minimumGitLfsVersion ) ) {
throw new Error ( ` Minimum required git-lfs version is ${ minimumGitLfsVersion } . Your git-lfs (' ${ gitLfsPath } ') is ${ gitLfsVersion } ` ) ;
}
}
2023-06-09 15:08:21 +02:00
this . doSparseCheckout = doSparseCheckout ;
if ( this . doSparseCheckout ) {
2024-03-14 15:40:14 +01:00
if ( ! this . gitVersion . checkMinimum ( exports . MinimumGitSparseCheckoutVersion ) ) {
throw new Error ( ` Minimum Git version required for sparse checkout is ${ exports . MinimumGitSparseCheckoutVersion } . Your git (' ${ this . gitPath } ') is ${ this . gitVersion } ` ) ;
2023-06-09 15:08:21 +02:00
}
}
2023-03-09 17:42:29 +01:00
// Set the user agent
2024-03-14 15:40:14 +01:00
const gitHttpUserAgent = ` git/ ${ this . gitVersion } (github-actions-checkout) ` ;
2023-03-09 17:42:29 +01:00
core . debug ( ` Set git useragent to: ${ gitHttpUserAgent } ` ) ;
this . gitEnv [ 'GIT_HTTP_USER_AGENT' ] = gitHttpUserAgent ;
} ) ;
2019-12-03 10:28:59 -05:00
}
}
2023-03-09 17:42:29 +01:00
class GitOutput {
constructor ( ) {
this . stdout = '' ;
this . exitCode = 0 ;
2019-12-03 10:28:59 -05:00
}
}
2021-10-19 10:05:28 -05:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 8609 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-10-19 10:05:28 -05:00
"use strict" ;
2023-03-09 17:42:29 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
2021-10-19 10:05:28 -05:00
} ;
2023-03-09 17:42:29 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
2021-10-19 10:05:28 -05:00
} ;
2023-03-09 17:42:29 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . prepareExistingDirectory = prepareExistingDirectory ;
2023-03-09 17:42:29 +01:00
const assert = _ _importStar ( _ _nccwpck _require _ _ ( 9491 ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const fsHelper = _ _importStar ( _ _nccwpck _require _ _ ( 7219 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
function prepareExistingDirectory ( git , repositoryPath , repositoryUrl , clean , ref ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
var _a ;
2023-03-09 17:42:29 +01:00
assert . ok ( repositoryPath , 'Expected repositoryPath to be defined' ) ;
assert . ok ( repositoryUrl , 'Expected repositoryUrl to be defined' ) ;
// Indicates whether to delete the directory contents
let remove = false ;
// Check whether using git or REST API
if ( ! git ) {
remove = true ;
}
// Fetch URL does not match
else if ( ! fsHelper . directoryExistsSync ( path . join ( repositoryPath , '.git' ) ) ||
repositoryUrl !== ( yield git . tryGetFetchUrl ( ) ) ) {
remove = true ;
}
else {
// Delete any index.lock and shallow.lock left by a previously canceled run or crashed git process
const lockPaths = [
path . join ( repositoryPath , '.git' , 'index.lock' ) ,
path . join ( repositoryPath , '.git' , 'shallow.lock' )
] ;
for ( const lockPath of lockPaths ) {
try {
yield io . rmRF ( lockPath ) ;
}
catch ( error ) {
2024-04-24 12:04:10 -04:00
core . debug ( ` Unable to delete ' ${ lockPath } '. ${ ( _a = error === null || error === void 0 ? void 0 : error . message ) !== null && _a !== void 0 ? _a : error } ` ) ;
2023-03-09 17:42:29 +01:00
}
}
try {
core . startGroup ( 'Removing previously created refs, to avoid conflicts' ) ;
// Checkout detached HEAD
if ( ! ( yield git . isDetached ( ) ) ) {
yield git . checkoutDetach ( ) ;
}
// Remove all refs/heads/*
let branches = yield git . branchList ( false ) ;
for ( const branch of branches ) {
yield git . branchDelete ( false , branch ) ;
}
// Remove any conflicting refs/remotes/origin/*
// Example 1: Consider ref is refs/heads/foo and previously fetched refs/remotes/origin/foo/bar
// Example 2: Consider ref is refs/heads/foo/bar and previously fetched refs/remotes/origin/foo
if ( ref ) {
ref = ref . startsWith ( 'refs/' ) ? ref : ` refs/heads/ ${ ref } ` ;
if ( ref . startsWith ( 'refs/heads/' ) ) {
const upperName1 = ref . toUpperCase ( ) . substr ( 'REFS/HEADS/' . length ) ;
const upperName1Slash = ` ${ upperName1 } / ` ;
branches = yield git . branchList ( true ) ;
for ( const branch of branches ) {
const upperName2 = branch . substr ( 'origin/' . length ) . toUpperCase ( ) ;
const upperName2Slash = ` ${ upperName2 } / ` ;
if ( upperName1 . startsWith ( upperName2Slash ) ||
upperName2 . startsWith ( upperName1Slash ) ) {
yield git . branchDelete ( true , branch ) ;
}
}
}
}
core . endGroup ( ) ;
2023-04-14 03:26:47 -07:00
// Check for submodules and delete any existing files if submodules are present
if ( ! ( yield git . submoduleStatus ( ) ) ) {
remove = true ;
core . info ( 'Bad Submodules found, removing existing files' ) ;
}
2023-03-09 17:42:29 +01:00
// Clean
if ( clean ) {
core . startGroup ( 'Cleaning the repository' ) ;
if ( ! ( yield git . tryClean ( ) ) ) {
2023-04-19 17:55:10 +03:00
core . debug ( ` The clean command failed. This might be caused by: 1) path too long, 2) permission issue, or 3) file in use. For further investigation, manually run 'git clean -ffdx' on the directory ' ${ repositoryPath } '. ` ) ;
2023-03-09 17:42:29 +01:00
remove = true ;
}
else if ( ! ( yield git . tryReset ( ) ) ) {
remove = true ;
}
core . endGroup ( ) ;
if ( remove ) {
core . warning ( ` Unable to clean or reset the repository. The repository will be recreated instead. ` ) ;
}
}
}
catch ( error ) {
core . warning ( ` Unable to prepare the existing repository. The repository will be recreated instead. ` ) ;
remove = true ;
}
}
if ( remove ) {
// Delete the contents of the directory. Don't delete the directory itself
// since it might be the current working directory.
core . info ( ` Deleting the contents of ' ${ repositoryPath } ' ` ) ;
for ( const file of yield fs . promises . readdir ( repositoryPath ) ) {
yield io . rmRF ( path . join ( repositoryPath , file ) ) ;
}
}
} ) ;
2019-12-03 10:28:59 -05:00
}
2022-10-03 18:04:49 +01:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 9210 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-10-03 18:04:49 +01:00
"use strict" ;
2023-03-09 17:42:29 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
2022-10-03 18:04:49 +01:00
} ) ;
2023-03-09 17:42:29 +01:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . getSource = getSource ;
exports . cleanup = cleanup ;
2023-03-09 17:42:29 +01:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const fsHelper = _ _importStar ( _ _nccwpck _require _ _ ( 7219 ) ) ;
const gitAuthHelper = _ _importStar ( _ _nccwpck _require _ _ ( 2565 ) ) ;
const gitCommandManager = _ _importStar ( _ _nccwpck _require _ _ ( 738 ) ) ;
const gitDirectoryHelper = _ _importStar ( _ _nccwpck _require _ _ ( 8609 ) ) ;
const githubApiHelper = _ _importStar ( _ _nccwpck _require _ _ ( 138 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const refHelper = _ _importStar ( _ _nccwpck _require _ _ ( 8601 ) ) ;
2024-04-24 12:04:10 -04:00
const stateHelper = _ _importStar ( _ _nccwpck _require _ _ ( 4866 ) ) ;
2023-03-09 17:42:29 +01:00
const urlHelper = _ _importStar ( _ _nccwpck _require _ _ ( 9437 ) ) ;
2024-03-14 15:40:14 +01:00
const git _command _manager _1 = _ _nccwpck _require _ _ ( 738 ) ;
2023-03-09 17:42:29 +01:00
function getSource ( settings ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Repository URL
core . info ( ` Syncing repository: ${ settings . repositoryOwner } / ${ settings . repositoryName } ` ) ;
const repositoryUrl = urlHelper . getFetchUrl ( settings ) ;
// Remove conflicting file path
if ( fsHelper . fileExistsSync ( settings . repositoryPath ) ) {
yield io . rmRF ( settings . repositoryPath ) ;
}
// Create directory
let isExisting = true ;
if ( ! fsHelper . directoryExistsSync ( settings . repositoryPath ) ) {
isExisting = false ;
yield io . mkdirP ( settings . repositoryPath ) ;
}
// Git command manager
core . startGroup ( 'Getting Git version info' ) ;
const git = yield getGitCommandManager ( settings ) ;
core . endGroup ( ) ;
let authHelper = null ;
try {
if ( git ) {
authHelper = gitAuthHelper . createAuthHelper ( git , settings ) ;
if ( settings . setSafeDirectory ) {
// Setup the repository path as a safe directory, so if we pass this into a container job with a different user it doesn't fail
// Otherwise all git commands we run in a container fail
yield authHelper . configureTempGlobalConfig ( ) ;
core . info ( ` Adding repository directory to the temporary git global config as a safe directory ` ) ;
yield git
. config ( 'safe.directory' , settings . repositoryPath , true , true )
. catch ( error => {
core . info ( ` Failed to initialize safe directory with error: ${ error } ` ) ;
} ) ;
stateHelper . setSafeDirectory ( ) ;
}
}
// Prepare existing directory, otherwise recreate
if ( isExisting ) {
yield gitDirectoryHelper . prepareExistingDirectory ( git , settings . repositoryPath , repositoryUrl , settings . clean , settings . ref ) ;
}
if ( ! git ) {
// Downloading using REST API
core . info ( ` The repository will be downloaded using the GitHub REST API ` ) ;
core . info ( ` To create a local Git repository instead, add Git ${ gitCommandManager . MinimumGitVersion } or higher to the PATH ` ) ;
if ( settings . submodules ) {
throw new Error ( ` Input 'submodules' not supported when falling back to download using the GitHub REST API. To create a local Git repository instead, add Git ${ gitCommandManager . MinimumGitVersion } or higher to the PATH. ` ) ;
}
else if ( settings . sshKey ) {
throw new Error ( ` Input 'ssh-key' not supported when falling back to download using the GitHub REST API. To create a local Git repository instead, add Git ${ gitCommandManager . MinimumGitVersion } or higher to the PATH. ` ) ;
}
yield githubApiHelper . downloadRepository ( settings . authToken , settings . repositoryOwner , settings . repositoryName , settings . ref , settings . commit , settings . repositoryPath , settings . githubServerUrl ) ;
return ;
}
// Save state for POST action
stateHelper . setRepositoryPath ( settings . repositoryPath ) ;
// Initialize the repository
if ( ! fsHelper . directoryExistsSync ( path . join ( settings . repositoryPath , '.git' ) ) ) {
core . startGroup ( 'Initializing the repository' ) ;
yield git . init ( ) ;
yield git . remoteAdd ( 'origin' , repositoryUrl ) ;
core . endGroup ( ) ;
}
// Disable automatic garbage collection
core . startGroup ( 'Disabling automatic garbage collection' ) ;
if ( ! ( yield git . tryDisableAutomaticGarbageCollection ( ) ) ) {
core . warning ( ` Unable to turn off git automatic garbage collection. The git fetch operation may trigger garbage collection and cause a delay. ` ) ;
}
core . endGroup ( ) ;
// If we didn't initialize it above, do it now
if ( ! authHelper ) {
authHelper = gitAuthHelper . createAuthHelper ( git , settings ) ;
}
// Configure auth
core . startGroup ( 'Setting up auth' ) ;
yield authHelper . configureAuth ( ) ;
core . endGroup ( ) ;
// Determine the default branch
if ( ! settings . ref && ! settings . commit ) {
core . startGroup ( 'Determining the default branch' ) ;
if ( settings . sshKey ) {
settings . ref = yield git . getDefaultBranch ( repositoryUrl ) ;
}
else {
settings . ref = yield githubApiHelper . getDefaultBranch ( settings . authToken , settings . repositoryOwner , settings . repositoryName , settings . githubServerUrl ) ;
}
core . endGroup ( ) ;
}
// LFS install
if ( settings . lfs ) {
yield git . lfsInstall ( ) ;
}
// Fetch
core . startGroup ( 'Fetching the repository' ) ;
2023-06-09 15:08:21 +02:00
const fetchOptions = { } ;
2023-09-22 18:30:36 +01:00
if ( settings . filter ) {
fetchOptions . filter = settings . filter ;
}
else if ( settings . sparseCheckout ) {
2023-06-09 15:08:21 +02:00
fetchOptions . filter = 'blob:none' ;
2023-09-22 18:30:36 +01:00
}
2023-03-09 17:42:29 +01:00
if ( settings . fetchDepth <= 0 ) {
// Fetch all branches and tags
let refSpec = refHelper . getRefSpecForAllHistory ( settings . ref , settings . commit ) ;
2023-06-09 15:08:21 +02:00
yield git . fetch ( refSpec , fetchOptions ) ;
2023-03-09 17:42:29 +01:00
// When all history is fetched, the ref we're interested in may have moved to a different
// commit (push or force push). If so, fetch again with a targeted refspec.
if ( ! ( yield refHelper . testRef ( git , settings . ref , settings . commit ) ) ) {
refSpec = refHelper . getRefSpec ( settings . ref , settings . commit ) ;
2023-06-09 15:08:21 +02:00
yield git . fetch ( refSpec , fetchOptions ) ;
2023-03-09 17:42:29 +01:00
}
}
else {
2023-06-09 15:08:21 +02:00
fetchOptions . fetchDepth = settings . fetchDepth ;
2023-08-16 22:34:54 +02:00
fetchOptions . fetchTags = settings . fetchTags ;
2023-03-09 17:42:29 +01:00
const refSpec = refHelper . getRefSpec ( settings . ref , settings . commit ) ;
2023-06-09 15:08:21 +02:00
yield git . fetch ( refSpec , fetchOptions ) ;
2023-03-09 17:42:29 +01:00
}
core . endGroup ( ) ;
// Checkout info
core . startGroup ( 'Determining the checkout info' ) ;
const checkoutInfo = yield refHelper . getCheckoutInfo ( git , settings . ref , settings . commit ) ;
core . endGroup ( ) ;
// LFS fetch
// Explicit lfs-fetch to avoid slow checkout (fetches one lfs object at a time).
// Explicit lfs fetch will fetch lfs objects in parallel.
2023-06-09 15:08:21 +02:00
// For sparse checkouts, let `checkout` fetch the needed objects lazily.
if ( settings . lfs && ! settings . sparseCheckout ) {
2023-03-09 17:42:29 +01:00
core . startGroup ( 'Fetching LFS objects' ) ;
yield git . lfsFetch ( checkoutInfo . startPoint || checkoutInfo . ref ) ;
core . endGroup ( ) ;
}
2023-06-09 15:08:21 +02:00
// Sparse checkout
2024-02-21 13:56:19 +01:00
if ( ! settings . sparseCheckout ) {
2024-03-14 15:40:14 +01:00
let gitVersion = yield git . version ( ) ;
// no need to disable sparse-checkout if the installed git runtime doesn't even support it.
if ( gitVersion . checkMinimum ( git _command _manager _1 . MinimumGitSparseCheckoutVersion ) ) {
yield git . disableSparseCheckout ( ) ;
}
2024-02-21 13:56:19 +01:00
}
else {
2023-06-09 15:08:21 +02:00
core . startGroup ( 'Setting up sparse checkout' ) ;
if ( settings . sparseCheckoutConeMode ) {
yield git . sparseCheckout ( settings . sparseCheckout ) ;
}
else {
yield git . sparseCheckoutNonConeMode ( settings . sparseCheckout ) ;
}
core . endGroup ( ) ;
}
2023-03-09 17:42:29 +01:00
// Checkout
core . startGroup ( 'Checking out the ref' ) ;
yield git . checkout ( checkoutInfo . ref , checkoutInfo . startPoint ) ;
core . endGroup ( ) ;
// Submodules
if ( settings . submodules ) {
// Temporarily override global config
core . startGroup ( 'Setting up auth for fetching submodules' ) ;
yield authHelper . configureGlobalAuth ( ) ;
core . endGroup ( ) ;
// Checkout submodules
core . startGroup ( 'Fetching submodules' ) ;
yield git . submoduleSync ( settings . nestedSubmodules ) ;
yield git . submoduleUpdate ( settings . fetchDepth , settings . nestedSubmodules ) ;
yield git . submoduleForeach ( 'git config --local gc.auto 0' , settings . nestedSubmodules ) ;
core . endGroup ( ) ;
// Persist credentials
if ( settings . persistCredentials ) {
core . startGroup ( 'Persisting credentials for submodules' ) ;
yield authHelper . configureSubmoduleAuth ( ) ;
core . endGroup ( ) ;
}
}
// Get commit information
const commitInfo = yield git . log1 ( ) ;
// Log commit sha
2024-09-05 08:57:13 -07:00
const commitSHA = yield git . log1 ( '--format=%H' ) ;
core . setOutput ( 'commit' , commitSHA . trim ( ) ) ;
2023-03-09 17:42:29 +01:00
// Check for incorrect pull request merge commit
yield refHelper . checkCommitInfo ( settings . authToken , commitInfo , settings . repositoryOwner , settings . repositoryName , settings . ref , settings . commit , settings . githubServerUrl ) ;
}
finally {
// Remove auth
if ( authHelper ) {
if ( ! settings . persistCredentials ) {
core . startGroup ( 'Removing auth' ) ;
yield authHelper . removeAuth ( ) ;
core . endGroup ( ) ;
}
authHelper . removeGlobalConfig ( ) ;
}
}
} ) ;
}
function cleanup ( repositoryPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Repo exists?
if ( ! repositoryPath ||
! fsHelper . fileExistsSync ( path . join ( repositoryPath , '.git' , 'config' ) ) ) {
return ;
}
let git ;
try {
2023-06-09 15:08:21 +02:00
git = yield gitCommandManager . createCommandManager ( repositoryPath , false , false ) ;
2023-03-09 17:42:29 +01:00
}
catch ( _a ) {
return ;
}
// Remove auth
const authHelper = gitAuthHelper . createAuthHelper ( git ) ;
try {
if ( stateHelper . PostSetSafeDirectory ) {
// Setup the repository path as a safe directory, so if we pass this into a container job with a different user it doesn't fail
// Otherwise all git commands we run in a container fail
yield authHelper . configureTempGlobalConfig ( ) ;
core . info ( ` Adding repository directory to the temporary git global config as a safe directory ` ) ;
yield git
. config ( 'safe.directory' , repositoryPath , true , true )
. catch ( error => {
core . info ( ` Failed to initialize safe directory with error: ${ error } ` ) ;
} ) ;
}
yield authHelper . removeAuth ( ) ;
}
finally {
yield authHelper . removeGlobalConfig ( ) ;
}
} ) ;
}
function getGitCommandManager ( settings ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
core . info ( ` Working directory is ' ${ settings . repositoryPath } ' ` ) ;
try {
2023-06-09 15:08:21 +02:00
return yield gitCommandManager . createCommandManager ( settings . repositoryPath , settings . lfs , settings . sparseCheckout != null ) ;
2023-03-09 17:42:29 +01:00
}
catch ( err ) {
// Git is required for LFS
if ( settings . lfs ) {
throw err ;
}
// Otherwise fallback to REST API
return undefined ;
}
} ) ;
}
2022-10-03 18:04:49 +01:00
2021-10-19 10:05:28 -05:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 3142 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . GitVersion = void 0 ;
class GitVersion {
/ * *
* Used for comparing the version of git and git - lfs against the minimum required version
* @ param version the version string , e . g . 1.2 or 1.2 . 3
* /
constructor ( version ) {
this . major = NaN ;
this . minor = NaN ;
this . patch = NaN ;
if ( version ) {
const match = version . match ( /^(\d+)\.(\d+)(\.(\d+))?$/ ) ;
if ( match ) {
this . major = Number ( match [ 1 ] ) ;
this . minor = Number ( match [ 2 ] ) ;
if ( match [ 4 ] ) {
this . patch = Number ( match [ 4 ] ) ;
}
}
2021-10-19 10:05:28 -05:00
}
}
2023-03-09 17:42:29 +01:00
/ * *
* Compares the instance against a minimum required version
* @ param minimum Minimum version
* /
checkMinimum ( minimum ) {
if ( ! minimum . isValid ( ) ) {
throw new Error ( 'Arg minimum is not a valid version' ) ;
}
// Major is insufficient
if ( this . major < minimum . major ) {
return false ;
}
// Major is equal
if ( this . major === minimum . major ) {
// Minor is insufficient
if ( this . minor < minimum . minor ) {
return false ;
}
// Minor is equal
if ( this . minor === minimum . minor ) {
// Patch is insufficient
if ( this . patch && this . patch < ( minimum . patch || 0 ) ) {
return false ;
}
}
}
return true ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
/ * *
* Indicates whether the instance was constructed from a valid version string
* /
isValid ( ) {
return ! isNaN ( this . major ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
/ * *
* Returns the version as a string , e . g . 1.2 or 1.2 . 3
* /
toString ( ) {
let result = '' ;
if ( this . isValid ( ) ) {
result = ` ${ this . major } . ${ this . minor } ` ;
if ( ! isNaN ( this . patch ) ) {
result += ` . ${ this . patch } ` ;
}
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
return result ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
}
exports . GitVersion = GitVersion ;
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
/***/ 138 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . downloadRepository = downloadRepository ;
exports . getDefaultBranch = getDefaultBranch ;
2023-03-09 17:42:29 +01:00
const assert = _ _importStar ( _ _nccwpck _require _ _ ( 9491 ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
2023-04-12 19:55:27 +08:00
const github = _ _importStar ( _ _nccwpck _require _ _ ( 5438 ) ) ;
2023-03-09 17:42:29 +01:00
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const retryHelper = _ _importStar ( _ _nccwpck _require _ _ ( 2155 ) ) ;
const toolCache = _ _importStar ( _ _nccwpck _require _ _ ( 7784 ) ) ;
2024-04-24 12:04:10 -04:00
const uuid _1 = _ _nccwpck _require _ _ ( 5840 ) ;
2023-04-13 14:25:50 +02:00
const url _helper _1 = _ _nccwpck _require _ _ ( 9437 ) ;
2023-03-09 17:42:29 +01:00
const IS _WINDOWS = process . platform === 'win32' ;
function downloadRepository ( authToken , owner , repo , ref , commit , repositoryPath , baseUrl ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Determine the default branch
if ( ! ref && ! commit ) {
core . info ( 'Determining the default branch' ) ;
ref = yield getDefaultBranch ( authToken , owner , repo , baseUrl ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
// Download the archive
let archiveData = yield retryHelper . execute ( ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
core . info ( 'Downloading the archive' ) ;
return yield downloadArchive ( authToken , owner , repo , ref , commit , baseUrl ) ;
} ) ) ;
// Write archive to disk
core . info ( 'Writing archive to disk' ) ;
2024-04-24 12:04:10 -04:00
const uniqueId = ( 0 , uuid _1 . v4 ) ( ) ;
2024-05-16 13:40:36 -04:00
const archivePath = IS _WINDOWS
? path . join ( repositoryPath , ` ${ uniqueId } .zip ` )
: path . join ( repositoryPath , ` ${ uniqueId } .tar.gz ` ) ;
2023-03-09 17:42:29 +01:00
yield fs . promises . writeFile ( archivePath , archiveData ) ;
archiveData = Buffer . from ( '' ) ; // Free memory
// Extract archive
core . info ( 'Extracting the archive' ) ;
const extractPath = path . join ( repositoryPath , uniqueId ) ;
yield io . mkdirP ( extractPath ) ;
if ( IS _WINDOWS ) {
yield toolCache . extractZip ( archivePath , extractPath ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
else {
yield toolCache . extractTar ( archivePath , extractPath ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
yield io . rmRF ( archivePath ) ;
// Determine the path of the repository content. The archive contains
// a top-level folder and the repository content is inside.
const archiveFileNames = yield fs . promises . readdir ( extractPath ) ;
assert . ok ( archiveFileNames . length == 1 , 'Expected exactly one directory inside archive' ) ;
const archiveVersion = archiveFileNames [ 0 ] ; // The top-level folder name includes the short SHA
core . info ( ` Resolved version ${ archiveVersion } ` ) ;
const tempRepositoryPath = path . join ( extractPath , archiveVersion ) ;
// Move the files
for ( const fileName of yield fs . promises . readdir ( tempRepositoryPath ) ) {
const sourcePath = path . join ( tempRepositoryPath , fileName ) ;
const targetPath = path . join ( repositoryPath , fileName ) ;
if ( IS _WINDOWS ) {
yield io . cp ( sourcePath , targetPath , { recursive : true } ) ; // Copy on Windows (Windows Defender may have a lock)
}
else {
yield io . mv ( sourcePath , targetPath ) ;
}
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
yield io . rmRF ( extractPath ) ;
} ) ;
}
/ * *
* Looks up the default branch name
* /
function getDefaultBranch ( authToken , owner , repo , baseUrl ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield retryHelper . execute ( ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
core . info ( 'Retrieving the default branch name' ) ;
2023-04-13 14:25:50 +02:00
const octokit = github . getOctokit ( authToken , {
baseUrl : ( 0 , url _helper _1 . getServerApiUrl ) ( baseUrl )
} ) ;
2023-03-09 17:42:29 +01:00
let result ;
try {
// Get the default branch from the repo info
2023-04-12 19:55:27 +08:00
const response = yield octokit . rest . repos . get ( { owner , repo } ) ;
2023-03-09 17:42:29 +01:00
result = response . data . default _branch ;
assert . ok ( result , 'default_branch cannot be empty' ) ;
}
catch ( err ) {
// Handle .wiki repo
2024-04-24 12:04:10 -04:00
if ( ( err === null || err === void 0 ? void 0 : err . status ) === 404 &&
2023-03-09 17:42:29 +01:00
repo . toUpperCase ( ) . endsWith ( '.WIKI' ) ) {
result = 'master' ;
}
// Otherwise error
else {
throw err ;
}
}
// Print the default branch
core . info ( ` Default branch ' ${ result } ' ` ) ;
// Prefix with 'refs/heads'
if ( ! result . startsWith ( 'refs/' ) ) {
result = ` refs/heads/ ${ result } ` ;
}
return result ;
} ) ) ;
} ) ;
}
function downloadArchive ( authToken , owner , repo , ref , commit , baseUrl ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-04-13 14:25:50 +02:00
const octokit = github . getOctokit ( authToken , {
baseUrl : ( 0 , url _helper _1 . getServerApiUrl ) ( baseUrl )
} ) ;
2023-04-12 19:55:27 +08:00
const download = IS _WINDOWS
? octokit . rest . repos . downloadZipballArchive
: octokit . rest . repos . downloadTarballArchive ;
const response = yield download ( {
2023-03-09 17:42:29 +01:00
owner : owner ,
repo : repo ,
ref : commit || ref
2023-04-12 19:55:27 +08:00
} ) ;
2023-03-09 17:42:29 +01:00
return Buffer . from ( response . data ) ; // response.data is ArrayBuffer
} ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
/***/ 5480 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . getInputs = getInputs ;
2023-03-09 17:42:29 +01:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const fsHelper = _ _importStar ( _ _nccwpck _require _ _ ( 7219 ) ) ;
const github = _ _importStar ( _ _nccwpck _require _ _ ( 5438 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const workflowContextHelper = _ _importStar ( _ _nccwpck _require _ _ ( 9568 ) ) ;
function getInputs ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const result = { } ;
// GitHub workspace
let githubWorkspacePath = process . env [ 'GITHUB_WORKSPACE' ] ;
if ( ! githubWorkspacePath ) {
throw new Error ( 'GITHUB_WORKSPACE not defined' ) ;
}
githubWorkspacePath = path . resolve ( githubWorkspacePath ) ;
core . debug ( ` GITHUB_WORKSPACE = ' ${ githubWorkspacePath } ' ` ) ;
fsHelper . directoryExistsSync ( githubWorkspacePath , true ) ;
// Qualified repository
const qualifiedRepository = core . getInput ( 'repository' ) ||
` ${ github . context . repo . owner } / ${ github . context . repo . repo } ` ;
core . debug ( ` qualified repository = ' ${ qualifiedRepository } ' ` ) ;
const splitRepository = qualifiedRepository . split ( '/' ) ;
if ( splitRepository . length !== 2 ||
! splitRepository [ 0 ] ||
! splitRepository [ 1 ] ) {
throw new Error ( ` Invalid repository ' ${ qualifiedRepository } '. Expected format {owner}/{repo}. ` ) ;
}
result . repositoryOwner = splitRepository [ 0 ] ;
result . repositoryName = splitRepository [ 1 ] ;
// Repository path
result . repositoryPath = core . getInput ( 'path' ) || '.' ;
result . repositoryPath = path . resolve ( githubWorkspacePath , result . repositoryPath ) ;
if ( ! ( result . repositoryPath + path . sep ) . startsWith ( githubWorkspacePath + path . sep ) ) {
throw new Error ( ` Repository path ' ${ result . repositoryPath } ' is not under ' ${ githubWorkspacePath } ' ` ) ;
}
// Workflow repository?
const isWorkflowRepository = qualifiedRepository . toUpperCase ( ) ===
` ${ github . context . repo . owner } / ${ github . context . repo . repo } ` . toUpperCase ( ) ;
// Source branch, source version
result . ref = core . getInput ( 'ref' ) ;
if ( ! result . ref ) {
if ( isWorkflowRepository ) {
result . ref = github . context . ref ;
result . commit = github . context . sha ;
// Some events have an unqualifed ref. For example when a PR is merged (pull_request closed event),
// the ref is unqualifed like "main" instead of "refs/heads/main".
if ( result . commit && result . ref && ! result . ref . startsWith ( 'refs/' ) ) {
result . ref = ` refs/heads/ ${ result . ref } ` ;
}
}
}
// SHA?
else if ( result . ref . match ( /^[0-9a-fA-F]{40}$/ ) ) {
result . commit = result . ref ;
result . ref = '' ;
}
core . debug ( ` ref = ' ${ result . ref } ' ` ) ;
core . debug ( ` commit = ' ${ result . commit } ' ` ) ;
// Clean
result . clean = ( core . getInput ( 'clean' ) || 'true' ) . toUpperCase ( ) === 'TRUE' ;
core . debug ( ` clean = ${ result . clean } ` ) ;
2023-09-22 18:30:36 +01:00
// Filter
const filter = core . getInput ( 'filter' ) ;
if ( filter ) {
result . filter = filter ;
}
core . debug ( ` filter = ${ result . filter } ` ) ;
2023-06-09 15:08:21 +02:00
// Sparse checkout
const sparseCheckout = core . getMultilineInput ( 'sparse-checkout' ) ;
if ( sparseCheckout . length ) {
result . sparseCheckout = sparseCheckout ;
core . debug ( ` sparse checkout = ${ result . sparseCheckout } ` ) ;
}
result . sparseCheckoutConeMode =
( core . getInput ( 'sparse-checkout-cone-mode' ) || 'true' ) . toUpperCase ( ) ===
'TRUE' ;
2023-03-09 17:42:29 +01:00
// Fetch depth
result . fetchDepth = Math . floor ( Number ( core . getInput ( 'fetch-depth' ) || '1' ) ) ;
if ( isNaN ( result . fetchDepth ) || result . fetchDepth < 0 ) {
result . fetchDepth = 0 ;
}
core . debug ( ` fetch depth = ${ result . fetchDepth } ` ) ;
2023-08-16 22:34:54 +02:00
// Fetch tags
result . fetchTags =
( core . getInput ( 'fetch-tags' ) || 'false' ) . toUpperCase ( ) === 'TRUE' ;
core . debug ( ` fetch tags = ${ result . fetchTags } ` ) ;
2023-09-01 14:19:18 -04:00
// Show fetch progress
result . showProgress =
( core . getInput ( 'show-progress' ) || 'true' ) . toUpperCase ( ) === 'TRUE' ;
core . debug ( ` show progress = ${ result . showProgress } ` ) ;
2023-03-09 17:42:29 +01:00
// LFS
result . lfs = ( core . getInput ( 'lfs' ) || 'false' ) . toUpperCase ( ) === 'TRUE' ;
core . debug ( ` lfs = ${ result . lfs } ` ) ;
// Submodules
result . submodules = false ;
result . nestedSubmodules = false ;
const submodulesString = ( core . getInput ( 'submodules' ) || '' ) . toUpperCase ( ) ;
if ( submodulesString == 'RECURSIVE' ) {
result . submodules = true ;
result . nestedSubmodules = true ;
}
else if ( submodulesString == 'TRUE' ) {
result . submodules = true ;
}
core . debug ( ` submodules = ${ result . submodules } ` ) ;
core . debug ( ` recursive submodules = ${ result . nestedSubmodules } ` ) ;
// Auth token
result . authToken = core . getInput ( 'token' , { required : true } ) ;
// SSH
result . sshKey = core . getInput ( 'ssh-key' ) ;
result . sshKnownHosts = core . getInput ( 'ssh-known-hosts' ) ;
result . sshStrict =
( core . getInput ( 'ssh-strict' ) || 'true' ) . toUpperCase ( ) === 'TRUE' ;
2024-04-18 15:29:55 -04:00
result . sshUser = core . getInput ( 'ssh-user' ) ;
2023-03-09 17:42:29 +01:00
// Persist credentials
result . persistCredentials =
( core . getInput ( 'persist-credentials' ) || 'false' ) . toUpperCase ( ) === 'TRUE' ;
// Workflow organization ID
2024-04-24 12:04:10 -04:00
result . workflowOrganizationId =
yield workflowContextHelper . getOrganizationId ( ) ;
2023-03-09 17:42:29 +01:00
// Set safe.directory in git global config.
result . setSafeDirectory =
( core . getInput ( 'set-safe-directory' ) || 'true' ) . toUpperCase ( ) === 'TRUE' ;
// Determine the GitHub URL that the repository is being hosted from
result . githubServerUrl = core . getInput ( 'github-server-url' ) ;
core . debug ( ` GitHub Host URL = ${ result . githubServerUrl } ` ) ;
return result ;
} ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
/***/ 3109 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const coreCommand = _ _importStar ( _ _nccwpck _require _ _ ( 7351 ) ) ;
const gitSourceProvider = _ _importStar ( _ _nccwpck _require _ _ ( 9210 ) ) ;
const inputHelper = _ _importStar ( _ _nccwpck _require _ _ ( 5480 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
2024-04-24 12:04:10 -04:00
const stateHelper = _ _importStar ( _ _nccwpck _require _ _ ( 4866 ) ) ;
2023-03-09 17:42:29 +01:00
function run ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
var _a ;
2021-10-19 10:05:28 -05:00
try {
2023-03-09 17:42:29 +01:00
const sourceSettings = yield inputHelper . getInputs ( ) ;
try {
// Register problem matcher
coreCommand . issueCommand ( 'add-matcher' , { } , path . join ( _ _dirname , 'problem-matcher.json' ) ) ;
// Get sources
yield gitSourceProvider . getSource ( sourceSettings ) ;
2024-09-05 08:57:13 -07:00
core . setOutput ( 'ref' , sourceSettings . ref ) ;
2023-03-09 17:42:29 +01:00
}
finally {
// Unregister problem matcher
coreCommand . issueCommand ( 'remove-matcher' , { owner : 'checkout-git' } , '' ) ;
}
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
catch ( error ) {
2024-04-24 12:04:10 -04:00
core . setFailed ( ` ${ ( _a = error === null || error === void 0 ? void 0 : error . message ) !== null && _a !== void 0 ? _a : error } ` ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
} ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
function cleanup ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
var _a ;
2021-10-19 10:05:28 -05:00
try {
2023-03-09 17:42:29 +01:00
yield gitSourceProvider . cleanup ( stateHelper . RepositoryPath ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
catch ( error ) {
2024-04-24 12:04:10 -04:00
core . warning ( ` ${ ( _a = error === null || error === void 0 ? void 0 : error . message ) !== null && _a !== void 0 ? _a : error } ` ) ;
2023-03-09 17:42:29 +01:00
}
} ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
// Main
if ( ! stateHelper . IsPost ) {
run ( ) ;
2021-10-19 10:05:28 -05:00
}
2023-03-09 17:42:29 +01:00
// Post
else {
cleanup ( ) ;
2021-10-19 10:05:28 -05:00
}
2019-12-03 10:28:59 -05:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 8601 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2019-12-03 10:28:59 -05:00
"use strict" ;
2023-03-09 17:42:29 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
2019-12-03 10:28:59 -05:00
} ;
2023-03-09 17:42:29 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
2019-12-03 10:28:59 -05:00
} ;
2023-03-09 17:42:29 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . tagsRefSpec = void 0 ;
exports . getCheckoutInfo = getCheckoutInfo ;
exports . getRefSpecForAllHistory = getRefSpecForAllHistory ;
exports . getRefSpec = getRefSpec ;
exports . testRef = testRef ;
exports . checkCommitInfo = checkCommitInfo ;
2023-03-09 17:42:29 +01:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const github = _ _importStar ( _ _nccwpck _require _ _ ( 5438 ) ) ;
const url _helper _1 = _ _nccwpck _require _ _ ( 9437 ) ;
exports . tagsRefSpec = '+refs/tags/*:refs/tags/*' ;
function getCheckoutInfo ( git , ref , commit ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! git ) {
throw new Error ( 'Arg git cannot be empty' ) ;
}
if ( ! ref && ! commit ) {
throw new Error ( 'Args ref and commit cannot both be empty' ) ;
}
const result = { } ;
const upperRef = ( ref || '' ) . toUpperCase ( ) ;
// SHA only
if ( ! ref ) {
result . ref = commit ;
}
// refs/heads/
else if ( upperRef . startsWith ( 'REFS/HEADS/' ) ) {
const branch = ref . substring ( 'refs/heads/' . length ) ;
result . ref = branch ;
result . startPoint = ` refs/remotes/origin/ ${ branch } ` ;
}
// refs/pull/
else if ( upperRef . startsWith ( 'REFS/PULL/' ) ) {
const branch = ref . substring ( 'refs/pull/' . length ) ;
result . ref = ` refs/remotes/pull/ ${ branch } ` ;
}
// refs/tags/
2024-06-12 17:01:40 +02:00
else if ( upperRef . startsWith ( 'REFS/TAGS/' ) ) {
2023-03-09 17:42:29 +01:00
result . ref = ref ;
}
2024-06-12 17:01:40 +02:00
// refs/
2024-10-02 02:24:28 +02:00
else if ( upperRef . startsWith ( 'REFS/' ) ) {
result . ref = commit ? commit : ref ;
2024-06-12 17:01:40 +02:00
}
2023-03-09 17:42:29 +01:00
// Unqualified ref, check for a matching branch or tag
else {
if ( yield git . branchExists ( true , ` origin/ ${ ref } ` ) ) {
result . ref = ref ;
result . startPoint = ` refs/remotes/origin/ ${ ref } ` ;
}
else if ( yield git . tagExists ( ` ${ ref } ` ) ) {
result . ref = ` refs/tags/ ${ ref } ` ;
}
else {
throw new Error ( ` A branch or tag with the name ' ${ ref } ' could not be found ` ) ;
}
}
return result ;
} ) ;
2022-10-03 18:04:49 +01:00
}
2023-03-09 17:42:29 +01:00
function getRefSpecForAllHistory ( ref , commit ) {
const result = [ '+refs/heads/*:refs/remotes/origin/*' , exports . tagsRefSpec ] ;
if ( ref && ref . toUpperCase ( ) . startsWith ( 'REFS/PULL/' ) ) {
const branch = ref . substring ( 'refs/pull/' . length ) ;
result . push ( ` + ${ commit || ref } :refs/remotes/pull/ ${ branch } ` ) ;
}
return result ;
}
function getRefSpec ( ref , commit ) {
if ( ! ref && ! commit ) {
throw new Error ( 'Args ref and commit cannot both be empty' ) ;
}
const upperRef = ( ref || '' ) . toUpperCase ( ) ;
// SHA
if ( commit ) {
// refs/heads
if ( upperRef . startsWith ( 'REFS/HEADS/' ) ) {
const branch = ref . substring ( 'refs/heads/' . length ) ;
return [ ` + ${ commit } :refs/remotes/origin/ ${ branch } ` ] ;
}
// refs/pull/
else if ( upperRef . startsWith ( 'REFS/PULL/' ) ) {
const branch = ref . substring ( 'refs/pull/' . length ) ;
return [ ` + ${ commit } :refs/remotes/pull/ ${ branch } ` ] ;
}
// refs/tags/
else if ( upperRef . startsWith ( 'REFS/TAGS/' ) ) {
return [ ` + ${ commit } : ${ ref } ` ] ;
}
// Otherwise no destination ref
else {
return [ commit ] ;
}
}
// Unqualified ref, check for a matching branch or tag
else if ( ! upperRef . startsWith ( 'REFS/' ) ) {
return [
` +refs/heads/ ${ ref } *:refs/remotes/origin/ ${ ref } * ` ,
` +refs/tags/ ${ ref } *:refs/tags/ ${ ref } * `
] ;
}
// refs/heads/
else if ( upperRef . startsWith ( 'REFS/HEADS/' ) ) {
const branch = ref . substring ( 'refs/heads/' . length ) ;
return [ ` + ${ ref } :refs/remotes/origin/ ${ branch } ` ] ;
}
// refs/pull/
else if ( upperRef . startsWith ( 'REFS/PULL/' ) ) {
const branch = ref . substring ( 'refs/pull/' . length ) ;
return [ ` + ${ ref } :refs/remotes/pull/ ${ branch } ` ] ;
}
// refs/tags/
else {
return [ ` + ${ ref } : ${ ref } ` ] ;
}
}
2022-10-03 18:04:49 +01:00
/ * *
2023-03-09 17:42:29 +01:00
* Tests whether the initial fetch created the ref at the expected commit
2022-10-03 18:04:49 +01:00
* /
2023-03-09 17:42:29 +01:00
function testRef ( git , ref , commit ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! git ) {
throw new Error ( 'Arg git cannot be empty' ) ;
}
if ( ! ref && ! commit ) {
throw new Error ( 'Args ref and commit cannot both be empty' ) ;
}
// No SHA? Nothing to test
if ( ! commit ) {
return true ;
}
// SHA only?
else if ( ! ref ) {
return yield git . shaExists ( commit ) ;
}
const upperRef = ref . toUpperCase ( ) ;
// refs/heads/
if ( upperRef . startsWith ( 'REFS/HEADS/' ) ) {
const branch = ref . substring ( 'refs/heads/' . length ) ;
return ( ( yield git . branchExists ( true , ` origin/ ${ branch } ` ) ) &&
commit === ( yield git . revParse ( ` refs/remotes/origin/ ${ branch } ` ) ) ) ;
}
// refs/pull/
else if ( upperRef . startsWith ( 'REFS/PULL/' ) ) {
// Assume matches because fetched using the commit
return true ;
}
// refs/tags/
else if ( upperRef . startsWith ( 'REFS/TAGS/' ) ) {
const tagName = ref . substring ( 'refs/tags/' . length ) ;
return ( ( yield git . tagExists ( tagName ) ) && commit === ( yield git . revParse ( ref ) ) ) ;
}
// Unexpected
else {
core . debug ( ` Unexpected ref format ' ${ ref } ' when testing ref info ` ) ;
return true ;
}
} ) ;
2022-10-03 18:04:49 +01:00
}
2023-03-09 17:42:29 +01:00
function checkCommitInfo ( token , commitInfo , repositoryOwner , repositoryName , ref , commit , baseUrl ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
var _a ;
2023-03-09 17:42:29 +01:00
try {
// GHES?
if ( ( 0 , url _helper _1 . isGhes ) ( baseUrl ) ) {
return ;
}
// Auth token?
if ( ! token ) {
return ;
}
// Public PR synchronize, for workflow repo?
if ( fromPayload ( 'repository.private' ) !== false ||
github . context . eventName !== 'pull_request' ||
fromPayload ( 'action' ) !== 'synchronize' ||
repositoryOwner !== github . context . repo . owner ||
repositoryName !== github . context . repo . repo ||
ref !== github . context . ref ||
! ref . startsWith ( 'refs/pull/' ) ||
commit !== github . context . sha ) {
return ;
}
// Head SHA
const expectedHeadSha = fromPayload ( 'after' ) ;
if ( ! expectedHeadSha ) {
core . debug ( 'Unable to determine head sha' ) ;
return ;
}
// Base SHA
const expectedBaseSha = fromPayload ( 'pull_request.base.sha' ) ;
if ( ! expectedBaseSha ) {
core . debug ( 'Unable to determine base sha' ) ;
return ;
}
// Expected message?
const expectedMessage = ` Merge ${ expectedHeadSha } into ${ expectedBaseSha } ` ;
if ( commitInfo . indexOf ( expectedMessage ) >= 0 ) {
return ;
}
// Extract details from message
const match = commitInfo . match ( /Merge ([0-9a-f]{40}) into ([0-9a-f]{40})/ ) ;
if ( ! match ) {
core . debug ( 'Unexpected message format' ) ;
return ;
}
// Post telemetry
const actualHeadSha = match [ 1 ] ;
if ( actualHeadSha !== expectedHeadSha ) {
core . debug ( ` Expected head sha ${ expectedHeadSha } ; actual head sha ${ actualHeadSha } ` ) ;
2023-04-12 19:55:27 +08:00
const octokit = github . getOctokit ( token , {
2023-04-13 14:25:50 +02:00
baseUrl : ( 0 , url _helper _1 . getServerApiUrl ) ( baseUrl ) ,
2023-03-09 17:42:29 +01:00
userAgent : ` actions-checkout-tracepoint/1.0 (code=STALE_MERGE;owner= ${ repositoryOwner } ;repo= ${ repositoryName } ;pr= ${ fromPayload ( 'number' ) } ;run_id= ${ process . env [ 'GITHUB_RUN_ID' ] } ;expected_head_sha= ${ expectedHeadSha } ;actual_head_sha= ${ actualHeadSha } ) `
} ) ;
2023-04-12 19:55:27 +08:00
yield octokit . rest . repos . get ( {
owner : repositoryOwner ,
repo : repositoryName
} ) ;
2023-03-09 17:42:29 +01:00
}
}
catch ( err ) {
2024-04-24 12:04:10 -04:00
core . debug ( ` Error when validating commit info: ${ ( _a = err === null || err === void 0 ? void 0 : err . stack ) !== null && _a !== void 0 ? _a : err } ` ) ;
2023-03-09 17:42:29 +01:00
}
} ) ;
}
function fromPayload ( path ) {
return select ( github . context . payload , path ) ;
}
function select ( obj , path ) {
if ( ! obj ) {
return undefined ;
}
const i = path . indexOf ( '.' ) ;
if ( i < 0 ) {
return obj [ path ] ;
}
const key = path . substr ( 0 , i ) ;
return select ( obj [ key ] , path . substr ( i + 1 ) ) ;
2022-10-03 18:04:49 +01:00
}
2021-10-19 10:05:28 -05:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 3120 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2021-10-19 10:05:28 -05:00
"use strict" ;
2023-03-09 17:42:29 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . escape = escape ;
2023-03-09 17:42:29 +01:00
function escape ( value ) {
return value . replace ( /[^a-zA-Z0-9_]/g , x => {
return ` \\ ${ x } ` ;
} ) ;
}
2021-10-19 10:05:28 -05:00
2020-03-25 15:12:22 -04:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 2155 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-03-25 15:12:22 -04:00
"use strict" ;
2021-10-19 09:52:57 -05:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2021-10-19 09:52:57 -05:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
2020-03-25 15:12:22 -04:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2021-10-19 09:52:57 -05:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
2020-03-25 15:12:22 -04:00
return result ;
} ;
2023-03-09 17:42:29 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . RetryHelper = void 0 ;
exports . execute = execute ;
2023-03-09 17:42:29 +01:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const defaultMaxAttempts = 3 ;
const defaultMinSeconds = 10 ;
const defaultMaxSeconds = 20 ;
class RetryHelper {
constructor ( maxAttempts = defaultMaxAttempts , minSeconds = defaultMinSeconds , maxSeconds = defaultMaxSeconds ) {
this . maxAttempts = maxAttempts ;
this . minSeconds = Math . floor ( minSeconds ) ;
this . maxSeconds = Math . floor ( maxSeconds ) ;
if ( this . minSeconds > this . maxSeconds ) {
throw new Error ( 'min seconds should be less than or equal to max seconds' ) ;
}
2020-03-25 15:12:22 -04:00
}
2023-03-09 17:42:29 +01:00
execute ( action ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let attempt = 1 ;
while ( attempt < this . maxAttempts ) {
// Try
try {
return yield action ( ) ;
}
catch ( err ) {
2024-04-24 12:04:10 -04:00
core . info ( err === null || err === void 0 ? void 0 : err . message ) ;
2023-03-09 17:42:29 +01:00
}
// Sleep
const seconds = this . getSleepAmount ( ) ;
core . info ( ` Waiting ${ seconds } seconds before trying again ` ) ;
yield this . sleep ( seconds ) ;
attempt ++ ;
}
// Last attempt
return yield action ( ) ;
} ) ;
}
getSleepAmount ( ) {
return ( Math . floor ( Math . random ( ) * ( this . maxSeconds - this . minSeconds + 1 ) ) +
this . minSeconds ) ;
}
sleep ( seconds ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( resolve => setTimeout ( resolve , seconds * 1000 ) ) ;
} ) ;
}
}
exports . RetryHelper = RetryHelper ;
function execute ( action ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const retryHelper = new RetryHelper ( ) ;
return yield retryHelper . execute ( action ) ;
} ) ;
}
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 4866 :
2023-03-09 17:42:29 +01:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . SshKnownHostsPath = exports . SshKeyPath = exports . PostSetSafeDirectory = exports . RepositoryPath = exports . IsPost = void 0 ;
exports . setRepositoryPath = setRepositoryPath ;
exports . setSshKeyPath = setSshKeyPath ;
exports . setSshKnownHostsPath = setSshKnownHostsPath ;
exports . setSafeDirectory = setSafeDirectory ;
2023-03-09 17:42:29 +01:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
/ * *
* Indicates whether the POST action is running
* /
exports . IsPost = ! ! core . getState ( 'isPost' ) ;
/ * *
* The repository path for the POST action . The value is empty during the MAIN action .
* /
exports . RepositoryPath = core . getState ( 'repositoryPath' ) ;
/ * *
* The set - safe - directory for the POST action . The value is set if input : 'safe-directory' is set during the MAIN action .
* /
exports . PostSetSafeDirectory = core . getState ( 'setSafeDirectory' ) === 'true' ;
/ * *
* The SSH key path for the POST action . The value is empty during the MAIN action .
* /
exports . SshKeyPath = core . getState ( 'sshKeyPath' ) ;
/ * *
* The SSH known hosts path for the POST action . The value is empty during the MAIN action .
* /
exports . SshKnownHostsPath = core . getState ( 'sshKnownHostsPath' ) ;
/ * *
* Save the repository path so the POST action can retrieve the value .
* /
function setRepositoryPath ( repositoryPath ) {
core . saveState ( 'repositoryPath' , repositoryPath ) ;
}
/ * *
* Save the SSH key path so the POST action can retrieve the value .
* /
function setSshKeyPath ( sshKeyPath ) {
core . saveState ( 'sshKeyPath' , sshKeyPath ) ;
}
/ * *
* Save the SSH known hosts path so the POST action can retrieve the value .
* /
function setSshKnownHostsPath ( sshKnownHostsPath ) {
core . saveState ( 'sshKnownHostsPath' , sshKnownHostsPath ) ;
}
/ * *
* Save the set - safe - directory input so the POST action can retrieve the value .
* /
function setSafeDirectory ( ) {
core . saveState ( 'setSafeDirectory' , 'true' ) ;
}
// Publish a variable so that when the POST action runs, it can determine it should run the cleanup logic.
// This is necessary since we don't have a separate entry point.
if ( ! exports . IsPost ) {
core . saveState ( 'isPost' , 'true' ) ;
}
/***/ } ) ,
/***/ 9437 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . getFetchUrl = getFetchUrl ;
exports . getServerUrl = getServerUrl ;
exports . getServerApiUrl = getServerApiUrl ;
exports . isGhes = isGhes ;
2023-03-09 17:42:29 +01:00
const assert = _ _importStar ( _ _nccwpck _require _ _ ( 9491 ) ) ;
const url _1 = _ _nccwpck _require _ _ ( 7310 ) ;
function getFetchUrl ( settings ) {
assert . ok ( settings . repositoryOwner , 'settings.repositoryOwner must be defined' ) ;
assert . ok ( settings . repositoryName , 'settings.repositoryName must be defined' ) ;
const serviceUrl = getServerUrl ( settings . githubServerUrl ) ;
const encodedOwner = encodeURIComponent ( settings . repositoryOwner ) ;
const encodedName = encodeURIComponent ( settings . repositoryName ) ;
if ( settings . sshKey ) {
2024-04-18 15:29:55 -04:00
const user = settings . sshUser . length > 0 ? settings . sshUser : 'git' ;
return ` ${ user } @ ${ serviceUrl . hostname } : ${ encodedOwner } / ${ encodedName } .git ` ;
2023-03-09 17:42:29 +01:00
}
// "origin" is SCHEME://HOSTNAME[:PORT]
return ` ${ serviceUrl . origin } / ${ encodedOwner } / ${ encodedName } ` ;
}
function getServerUrl ( url ) {
let urlValue = url && url . trim ( ) . length > 0
2022-09-26 17:34:52 +01:00
? url
: process . env [ 'GITHUB_SERVER_URL' ] || 'https://github.com' ;
return new url _1 . URL ( urlValue ) ;
2020-03-25 15:12:22 -04:00
}
2022-09-26 17:34:52 +01:00
function getServerApiUrl ( url ) {
let apiUrl = 'https://api.github.com' ;
if ( isGhes ( url ) ) {
const serverUrl = getServerUrl ( url ) ;
apiUrl = new url _1 . URL ( ` ${ serverUrl . origin } /api/v3 ` ) . toString ( ) ;
}
return apiUrl ;
}
function isGhes ( url ) {
const ghUrl = getServerUrl ( url ) ;
return ghUrl . hostname . toUpperCase ( ) !== 'GITHUB.COM' ;
}
2020-03-25 15:12:22 -04:00
2021-10-13 16:07:05 -05:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 9568 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-10-13 16:07:05 -05:00
"use strict" ;
2023-03-09 17:42:29 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-03-09 17:42:29 +01:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-09-05 15:04:17 -04:00
exports . getOrganizationId = getOrganizationId ;
2023-03-09 17:42:29 +01:00
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
2022-10-03 18:04:49 +01:00
/ * *
2023-03-09 17:42:29 +01:00
* Gets the organization ID of the running workflow or undefined if the value cannot be loaded from the GITHUB _EVENT _PATH
2022-10-03 18:04:49 +01:00
* /
2023-03-09 17:42:29 +01:00
function getOrganizationId ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
var _a , _b ;
2023-03-09 17:42:29 +01:00
try {
const eventPath = process . env . GITHUB _EVENT _PATH ;
if ( ! eventPath ) {
core . debug ( ` GITHUB_EVENT_PATH is not defined ` ) ;
return ;
}
const content = yield fs . promises . readFile ( eventPath , { encoding : 'utf8' } ) ;
const event = JSON . parse ( content ) ;
const id = ( _b = ( _a = event === null || event === void 0 ? void 0 : event . repository ) === null || _a === void 0 ? void 0 : _a . owner ) === null || _b === void 0 ? void 0 : _b . id ;
if ( typeof id !== 'number' ) {
core . debug ( 'Repository owner ID not found within GITHUB event info' ) ;
return ;
}
return id ;
}
catch ( err ) {
2024-04-24 12:04:10 -04:00
core . debug ( ` Unable to load organization ID from GITHUB_EVENT_PATH: ${ err . message || err } ` ) ;
2023-03-09 17:42:29 +01:00
}
} ) ;
2022-10-03 18:04:49 +01:00
}
2019-12-03 10:28:59 -05:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 7351 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2022-10-03 18:04:49 +01:00
"use strict" ;
2023-03-09 17:42:29 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . issue = exports . issueCommand = void 0 ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
2022-10-03 18:04:49 +01:00
}
2023-03-09 17:42:29 +01:00
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
2022-10-03 18:04:49 +01:00
}
2023-03-09 17:42:29 +01:00
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
2022-10-03 18:04:49 +01:00
}
2023-03-09 17:42:29 +01:00
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
2022-10-03 18:04:49 +01:00
}
}
2023-03-09 17:42:29 +01:00
function escapeData ( s ) {
return utils _1 . toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
return utils _1 . toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
2022-10-03 18:04:49 +01:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
/***/ 2186 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-10-13 16:07:05 -05:00
"use strict" ;
2022-10-03 18:04:49 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
2021-10-13 16:07:05 -05:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2022-10-03 18:04:49 +01:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
2021-10-13 16:07:05 -05:00
return result ;
} ;
2023-03-09 17:42:29 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
2021-10-13 16:07:05 -05:00
} ) ;
2019-12-03 10:28:59 -05:00
} ;
2023-03-09 17:42:29 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getIDToken = exports . getState = exports . saveState = exports . group = exports . endGroup = exports . startGroup = exports . info = exports . notice = exports . warning = exports . error = exports . debug = exports . isDebug = exports . setFailed = exports . setCommandEcho = exports . setOutput = exports . getBooleanInput = exports . getMultilineInput = exports . getInput = exports . addPath = exports . setSecret = exports . exportVariable = exports . ExitCode = void 0 ;
const command _1 = _ _nccwpck _require _ _ ( 7351 ) ;
const file _command _1 = _ _nccwpck _require _ _ ( 717 ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const oidc _utils _1 = _ _nccwpck _require _ _ ( 8041 ) ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* The code to exit an action
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
const convertedVal = utils _1 . toCommandValue ( val ) ;
process . env [ name ] = convertedVal ;
const filePath = process . env [ 'GITHUB_ENV' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'ENV' , file _command _1 . prepareKeyValueMessage ( name , val ) ) ;
}
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
}
exports . exportVariable = exportVariable ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Registers a secret which will get masked from logs
* @ param secret value of the secret
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . setSecret = setSecret ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function addPath ( inputPath ) {
const filePath = process . env [ 'GITHUB_PATH' ] || '' ;
if ( filePath ) {
file _command _1 . issueFileCommand ( 'PATH' , inputPath ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
else {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
}
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . addPath = addPath ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Gets the value of an input .
* Unless trimWhitespace is set to false in InputOptions , the value is also trimmed .
* Returns an empty string if the value is not defined .
2019-12-03 10:28:59 -05:00
*
2023-03-09 17:42:29 +01:00
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
if ( options && options . trimWhitespace === false ) {
return val ;
}
return val . trim ( ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . getInput = getInput ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Gets the values of an multiline input . Each value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string [ ]
2019-12-03 10:28:59 -05:00
*
* /
2023-03-09 17:42:29 +01:00
function getMultilineInput ( name , options ) {
const inputs = getInput ( name , options )
. split ( '\n' )
. filter ( x => x !== '' ) ;
if ( options && options . trimWhitespace === false ) {
return inputs ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
return inputs . map ( input => input . trim ( ) ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . getMultilineInput = getMultilineInput ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification .
* Support boolean input list : ` true | True | TRUE | false | False | FALSE ` .
* The return value is also in boolean type .
* ref : https : //yaml.org/spec/1.2/spec.html#id2804923
2019-12-03 10:28:59 -05:00
*
2023-03-09 17:42:29 +01:00
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns boolean
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function getBooleanInput ( name , options ) {
const trueValue = [ 'true' , 'True' , 'TRUE' ] ;
const falseValue = [ 'false' , 'False' , 'FALSE' ] ;
const val = getInput ( name , options ) ;
if ( trueValue . includes ( val ) )
return true ;
if ( falseValue . includes ( val ) )
return false ;
throw new TypeError ( ` Input does not meet YAML 1.2 "Core Schema" specification: ${ name } \n ` +
` Support boolean input list: \` true | True | TRUE | false | False | FALSE \` ` ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . getBooleanInput = getBooleanInput ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Sets the value of an output .
2019-12-03 10:28:59 -05:00
*
2023-03-09 17:42:29 +01:00
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
const filePath = process . env [ 'GITHUB_OUTPUT' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'OUTPUT' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
process . stdout . write ( os . EOL ) ;
command _1 . issueCommand ( 'set-output' , { name } , utils _1 . toCommandValue ( value ) ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . setOutput = setOutput ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
2019-12-03 10:28:59 -05:00
*
* /
2023-03-09 17:42:29 +01:00
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Gets whether Actions Step Debug is on or not
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . isDebug = isDebug ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Writes debug message to user log
* @ param message debug message
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function error ( message , properties = { } ) {
command _1 . issueCommand ( 'error' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . error = error ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Adds a warning issue
* @ param message warning issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function warning ( message , properties = { } ) {
command _1 . issueCommand ( 'warning' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . warning = warning ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Adds a notice issue
* @ param message notice issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function notice ( message , properties = { } ) {
command _1 . issueCommand ( 'notice' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . notice = notice ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Writes info to log with console . log .
* @ param message info message
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . info = info ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Begin an output group .
2019-12-03 10:28:59 -05:00
*
2023-03-09 17:42:29 +01:00
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . startGroup = startGroup ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* End an output group .
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . endGroup = endGroup ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Wrap an asynchronous function call in a group .
2019-12-03 10:28:59 -05:00
*
2023-03-09 17:42:29 +01:00
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
2019-12-03 10:28:59 -05:00
*
2023-03-09 17:42:29 +01:00
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
const filePath = process . env [ 'GITHUB_STATE' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'STATE' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
command _1 . issueCommand ( 'save-state' , { name } , utils _1 . toCommandValue ( value ) ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . saveState = saveState ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Gets the value of an state set by this action ' s main execution .
2019-12-03 10:28:59 -05:00
*
2023-03-09 17:42:29 +01:00
* @ param name name of the state to get
* @ returns string
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . getState = getState ;
function getIDToken ( aud ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield oidc _utils _1 . OidcClient . getIDToken ( aud ) ;
} ) ;
}
exports . getIDToken = getIDToken ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Summary exports
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
var summary _1 = _ _nccwpck _require _ _ ( 1327 ) ;
Object . defineProperty ( exports , "summary" , ( { enumerable : true , get : function ( ) { return summary _1 . summary ; } } ) ) ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* @ deprecated use core . summary
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
var summary _2 = _ _nccwpck _require _ _ ( 1327 ) ;
Object . defineProperty ( exports , "markdownSummary" , ( { enumerable : true , get : function ( ) { return summary _2 . markdownSummary ; } } ) ) ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Path exports
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
var path _utils _1 = _ _nccwpck _require _ _ ( 2981 ) ;
Object . defineProperty ( exports , "toPosixPath" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toPosixPath ; } } ) ) ;
Object . defineProperty ( exports , "toWin32Path" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toWin32Path ; } } ) ) ;
Object . defineProperty ( exports , "toPlatformPath" , ( { enumerable : true , get : function ( ) { return path _utils _1 . toPlatformPath ; } } ) ) ;
//# sourceMappingURL=core.js.map
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ 717 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
// For internal use, subject to change.
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . prepareKeyValueMessage = exports . issueFileCommand = void 0 ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const uuid _1 = _ _nccwpck _require _ _ ( 8974 ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
function issueFileCommand ( command , message ) {
const filePath = process . env [ ` GITHUB_ ${ command } ` ] ;
if ( ! filePath ) {
throw new Error ( ` Unable to find environment variable for file command ${ command } ` ) ;
}
if ( ! fs . existsSync ( filePath ) ) {
throw new Error ( ` Missing file at path: ${ filePath } ` ) ;
}
fs . appendFileSync ( filePath , ` ${ utils _1 . toCommandValue ( message ) } ${ os . EOL } ` , {
encoding : 'utf8'
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . issueFileCommand = issueFileCommand ;
function prepareKeyValueMessage ( key , value ) {
const delimiter = ` ghadelimiter_ ${ uuid _1 . v4 ( ) } ` ;
const convertedValue = utils _1 . toCommandValue ( value ) ;
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if ( key . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: name should not contain the delimiter " ${ delimiter } " ` ) ;
}
if ( convertedValue . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: value should not contain the delimiter " ${ delimiter } " ` ) ;
}
return ` ${ key } << ${ delimiter } ${ os . EOL } ${ convertedValue } ${ os . EOL } ${ delimiter } ` ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . prepareKeyValueMessage = prepareKeyValueMessage ;
//# sourceMappingURL=file-command.js.map
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ 8041 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . OidcClient = void 0 ;
2023-04-12 19:55:27 +08:00
const http _client _1 = _ _nccwpck _require _ _ ( 6255 ) ;
const auth _1 = _ _nccwpck _require _ _ ( 5526 ) ;
2023-03-09 17:42:29 +01:00
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
class OidcClient {
static createHttpClient ( allowRetry = true , maxRetry = 10 ) {
const requestOptions = {
allowRetries : allowRetry ,
maxRetries : maxRetry
} ;
return new http _client _1 . HttpClient ( 'actions/oidc-client' , [ new auth _1 . BearerCredentialHandler ( OidcClient . getRequestToken ( ) ) ] , requestOptions ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
static getRequestToken ( ) {
const token = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_TOKEN' ] ;
if ( ! token ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable' ) ;
}
return token ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
static getIDTokenUrl ( ) {
const runtimeUrl = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_URL' ] ;
if ( ! runtimeUrl ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
return runtimeUrl ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
static getCall ( id _token _url ) {
var _a ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpclient = OidcClient . createHttpClient ( ) ;
const res = yield httpclient
. getJson ( id _token _url )
. catch ( error => {
throw new Error ( ` Failed to get ID Token. \n
Error Code : $ { error . statusCode } \ n
2024-04-24 12:04:10 -04:00
Error Message : $ { error . message } ` );
2023-03-09 17:42:29 +01:00
} ) ;
const id _token = ( _a = res . result ) === null || _a === void 0 ? void 0 : _a . value ;
if ( ! id _token ) {
throw new Error ( 'Response json body do not have ID Token field' ) ;
}
return id _token ;
} ) ;
}
static getIDToken ( audience ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
// New ID Token is requested from action service
let id _token _url = OidcClient . getIDTokenUrl ( ) ;
if ( audience ) {
const encodedAudience = encodeURIComponent ( audience ) ;
id _token _url = ` ${ id _token _url } &audience= ${ encodedAudience } ` ;
}
core _1 . debug ( ` ID token url is ${ id _token _url } ` ) ;
const id _token = yield OidcClient . getCall ( id _token _url ) ;
core _1 . setSecret ( id _token ) ;
return id _token ;
}
catch ( error ) {
throw new Error ( ` Error message: ${ error . message } ` ) ;
}
} ) ;
2019-12-03 10:28:59 -05:00
}
}
2023-03-09 17:42:29 +01:00
exports . OidcClient = OidcClient ;
//# sourceMappingURL=oidc-utils.js.map
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ 2981 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . toPlatformPath = exports . toWin32Path = exports . toPosixPath = void 0 ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* toPosixPath converts the given path to the posix form . On Windows , \ \ will be
* replaced with / .
2019-12-03 10:28:59 -05:00
*
2023-03-09 17:42:29 +01:00
* @ param pth . Path to transform .
* @ return string Posix path .
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function toPosixPath ( pth ) {
return pth . replace ( /[\\]/g , '/' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . toPosixPath = toPosixPath ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* toWin32Path converts the given path to the win32 form . On Linux , / w i l l b e
* replaced with \ \ .
2019-12-03 10:28:59 -05:00
*
2023-03-09 17:42:29 +01:00
* @ param pth . Path to transform .
* @ return string Win32 path .
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function toWin32Path ( pth ) {
return pth . replace ( /[/]/g , '\\' ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . toWin32Path = toWin32Path ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* toPlatformPath converts the given path to a platform - specific path . It does
* this by replacing instances of / a n d \ w i t h t h e p l a t f o r m - s p e c i f i c p a t h
* separator .
2019-12-03 10:28:59 -05:00
*
2023-03-09 17:42:29 +01:00
* @ param pth The path to platformize .
* @ return string The platform - specific path .
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function toPlatformPath ( pth ) {
return pth . replace ( /[/\\]/g , path . sep ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . toPlatformPath = toPlatformPath ;
//# sourceMappingURL=path-utils.js.map
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ 1327 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . summary = exports . markdownSummary = exports . SUMMARY _DOCS _URL = exports . SUMMARY _ENV _VAR = void 0 ;
const os _1 = _ _nccwpck _require _ _ ( 2037 ) ;
const fs _1 = _ _nccwpck _require _ _ ( 7147 ) ;
const { access , appendFile , writeFile } = fs _1 . promises ;
exports . SUMMARY _ENV _VAR = 'GITHUB_STEP_SUMMARY' ;
exports . SUMMARY _DOCS _URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary' ;
class Summary {
constructor ( ) {
this . _buffer = '' ;
}
/ * *
* Finds the summary file path from the environment , rejects if env var is not found or file does not exist
* Also checks r / w permissions .
*
* @ returns step summary file path
* /
filePath ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _filePath ) {
return this . _filePath ;
}
const pathFromEnv = process . env [ exports . SUMMARY _ENV _VAR ] ;
if ( ! pathFromEnv ) {
throw new Error ( ` Unable to find environment variable for $ ${ exports . SUMMARY _ENV _VAR } . Check if your runtime environment supports job summaries. ` ) ;
}
try {
yield access ( pathFromEnv , fs _1 . constants . R _OK | fs _1 . constants . W _OK ) ;
}
catch ( _a ) {
throw new Error ( ` Unable to access summary file: ' ${ pathFromEnv } '. Check if the file has correct read/write permissions. ` ) ;
}
this . _filePath = pathFromEnv ;
return this . _filePath ;
} ) ;
}
/ * *
* Wraps content in an HTML tag , adding any HTML attributes
*
* @ param { string } tag HTML tag to wrap
* @ param { string | null } content content within the tag
* @ param { [ attribute : string ] : string } attrs key - value list of HTML attributes to add
*
* @ returns { string } content wrapped in HTML element
* /
wrap ( tag , content , attrs = { } ) {
const htmlAttrs = Object . entries ( attrs )
. map ( ( [ key , value ] ) => ` ${ key } =" ${ value } " ` )
. join ( '' ) ;
if ( ! content ) {
return ` < ${ tag } ${ htmlAttrs } > ` ;
}
return ` < ${ tag } ${ htmlAttrs } > ${ content } </ ${ tag } > ` ;
}
/ * *
* Writes text in the buffer to the summary buffer file and empties buffer . Will append by default .
*
* @ param { SummaryWriteOptions } [ options ] ( optional ) options for write operation
*
* @ returns { Promise < Summary > } summary instance
* /
write ( options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const overwrite = ! ! ( options === null || options === void 0 ? void 0 : options . overwrite ) ;
const filePath = yield this . filePath ( ) ;
const writeFunc = overwrite ? writeFile : appendFile ;
yield writeFunc ( filePath , this . _buffer , { encoding : 'utf8' } ) ;
return this . emptyBuffer ( ) ;
} ) ;
}
/ * *
* Clears the summary buffer and wipes the summary file
*
* @ returns { Summary } summary instance
* /
clear ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . emptyBuffer ( ) . write ( { overwrite : true } ) ;
} ) ;
}
/ * *
* Returns the current summary buffer as a string
*
* @ returns { string } string of summary buffer
* /
stringify ( ) {
return this . _buffer ;
}
/ * *
* If the summary buffer is empty
*
* @ returns { boolen } true if the buffer is empty
* /
isEmptyBuffer ( ) {
return this . _buffer . length === 0 ;
}
/ * *
* Resets the summary buffer without writing to summary file
*
* @ returns { Summary } summary instance
* /
emptyBuffer ( ) {
this . _buffer = '' ;
return this ;
}
/ * *
* Adds raw text to the summary buffer
*
* @ param { string } text content to add
* @ param { boolean } [ addEOL = false ] ( optional ) append an EOL to the raw text ( default : false )
*
* @ returns { Summary } summary instance
* /
addRaw ( text , addEOL = false ) {
this . _buffer += text ;
return addEOL ? this . addEOL ( ) : this ;
}
/ * *
* Adds the operating system - specific end - of - line marker to the buffer
*
* @ returns { Summary } summary instance
* /
addEOL ( ) {
return this . addRaw ( os _1 . EOL ) ;
}
/ * *
* Adds an HTML codeblock to the summary buffer
*
* @ param { string } code content to render within fenced code block
* @ param { string } lang ( optional ) language to syntax highlight code
*
* @ returns { Summary } summary instance
* /
addCodeBlock ( code , lang ) {
const attrs = Object . assign ( { } , ( lang && { lang } ) ) ;
const element = this . wrap ( 'pre' , this . wrap ( 'code' , code ) , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML list to the summary buffer
*
* @ param { string [ ] } items list of items to render
* @ param { boolean } [ ordered = false ] ( optional ) if the rendered list should be ordered or not ( default : false )
*
* @ returns { Summary } summary instance
* /
addList ( items , ordered = false ) {
const tag = ordered ? 'ol' : 'ul' ;
const listItems = items . map ( item => this . wrap ( 'li' , item ) ) . join ( '' ) ;
const element = this . wrap ( tag , listItems ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML table to the summary buffer
*
* @ param { SummaryTableCell [ ] } rows table rows
*
* @ returns { Summary } summary instance
* /
addTable ( rows ) {
const tableBody = rows
. map ( row => {
const cells = row
. map ( cell => {
if ( typeof cell === 'string' ) {
return this . wrap ( 'td' , cell ) ;
}
const { header , data , colspan , rowspan } = cell ;
const tag = header ? 'th' : 'td' ;
const attrs = Object . assign ( Object . assign ( { } , ( colspan && { colspan } ) ) , ( rowspan && { rowspan } ) ) ;
return this . wrap ( tag , data , attrs ) ;
} )
. join ( '' ) ;
return this . wrap ( 'tr' , cells ) ;
} )
. join ( '' ) ;
const element = this . wrap ( 'table' , tableBody ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds a collapsable HTML details element to the summary buffer
*
* @ param { string } label text for the closed state
* @ param { string } content collapsable content
*
* @ returns { Summary } summary instance
* /
addDetails ( label , content ) {
const element = this . wrap ( 'details' , this . wrap ( 'summary' , label ) + content ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML image tag to the summary buffer
*
* @ param { string } src path to the image you to embed
* @ param { string } alt text description of the image
* @ param { SummaryImageOptions } options ( optional ) addition image attributes
*
* @ returns { Summary } summary instance
* /
addImage ( src , alt , options ) {
const { width , height } = options || { } ;
const attrs = Object . assign ( Object . assign ( { } , ( width && { width } ) ) , ( height && { height } ) ) ;
const element = this . wrap ( 'img' , null , Object . assign ( { src , alt } , attrs ) ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML section heading element
*
* @ param { string } text heading text
* @ param { number | string } [ level = 1 ] ( optional ) the heading level , default : 1
*
* @ returns { Summary } summary instance
* /
addHeading ( text , level ) {
const tag = ` h ${ level } ` ;
const allowedTag = [ 'h1' , 'h2' , 'h3' , 'h4' , 'h5' , 'h6' ] . includes ( tag )
? tag
: 'h1' ;
const element = this . wrap ( allowedTag , text ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML thematic break ( < hr > ) to the summary buffer
*
* @ returns { Summary } summary instance
* /
addSeparator ( ) {
const element = this . wrap ( 'hr' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML line break ( < br > ) to the summary buffer
*
* @ returns { Summary } summary instance
* /
addBreak ( ) {
const element = this . wrap ( 'br' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML blockquote to the summary buffer
*
* @ param { string } text quote text
* @ param { string } cite ( optional ) citation url
*
* @ returns { Summary } summary instance
* /
addQuote ( text , cite ) {
const attrs = Object . assign ( { } , ( cite && { cite } ) ) ;
const element = this . wrap ( 'blockquote' , text , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML anchor tag to the summary buffer
*
* @ param { string } text link text / content
* @ param { string } href hyperlink
*
* @ returns { Summary } summary instance
* /
addLink ( text , href ) {
const element = this . wrap ( 'a' , text , { href } ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
}
const _summary = new Summary ( ) ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* @ deprecated use ` core.summary `
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
exports . markdownSummary = _summary ;
exports . summary = _summary ;
//# sourceMappingURL=summary.js.map
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ 5278 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . toCommandProperties = exports . toCommandValue = void 0 ;
2019-12-03 10:28:59 -05:00
/ * *
2023-03-09 17:42:29 +01:00
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
}
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . toCommandValue = toCommandValue ;
2019-12-03 10:28:59 -05:00
/ * *
*
2023-03-09 17:42:29 +01:00
* @ param annotationProperties
* @ returns The command properties to send with the actual annotation command
* See IssueCommandProperties : https : //github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
2019-12-03 10:28:59 -05:00
* /
2023-03-09 17:42:29 +01:00
function toCommandProperties ( annotationProperties ) {
if ( ! Object . keys ( annotationProperties ) . length ) {
return { } ;
}
return {
title : annotationProperties . title ,
file : annotationProperties . file ,
line : annotationProperties . startLine ,
endLine : annotationProperties . endLine ,
col : annotationProperties . startColumn ,
endColumn : annotationProperties . endColumn
} ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
exports . toCommandProperties = toCommandProperties ;
//# sourceMappingURL=utils.js.map
2019-12-03 10:28:59 -05:00
2019-12-12 13:16:16 -05:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
/***/ 8974 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2022-10-03 18:04:49 +01:00
"use strict" ;
2023-04-12 19:55:27 +08:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
Object . defineProperty ( exports , "v1" , ( {
enumerable : true ,
get : function ( ) {
return _v . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v3" , ( {
enumerable : true ,
get : function ( ) {
return _v2 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v4" , ( {
enumerable : true ,
get : function ( ) {
return _v3 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v5" , ( {
enumerable : true ,
get : function ( ) {
return _v4 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "NIL" , ( {
enumerable : true ,
get : function ( ) {
return _nil . default ;
}
} ) ) ;
Object . defineProperty ( exports , "version" , ( {
enumerable : true ,
get : function ( ) {
return _version . default ;
}
} ) ) ;
Object . defineProperty ( exports , "validate" , ( {
enumerable : true ,
get : function ( ) {
return _validate . default ;
}
} ) ) ;
Object . defineProperty ( exports , "stringify" , ( {
enumerable : true ,
get : function ( ) {
return _stringify . default ;
}
} ) ) ;
Object . defineProperty ( exports , "parse" , ( {
enumerable : true ,
get : function ( ) {
return _parse . default ;
}
} ) ) ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1595 ) ) ;
var _v2 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6993 ) ) ;
var _v3 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1472 ) ) ;
var _v4 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6217 ) ) ;
var _nil = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2381 ) ) ;
var _version = _interopRequireDefault ( _ _nccwpck _require _ _ ( 427 ) ) ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6385 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
/***/ } ) ,
/***/ 5842 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function md5 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'md5' ) . update ( bytes ) . digest ( ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
var _default = md5 ;
exports [ "default" ] = _default ;
2022-10-03 18:04:49 +01:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
/***/ 2381 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2022-10-03 18:04:49 +01:00
"use strict" ;
2023-04-12 19:55:27 +08:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
2023-03-09 17:42:29 +01:00
} ) ) ;
2023-04-12 19:55:27 +08:00
exports [ "default" ] = void 0 ;
var _default = '00000000-0000-0000-0000-000000000000' ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6385 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function parse ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
let v ;
const arr = new Uint8Array ( 16 ) ; // Parse ########-....-....-....-............
arr [ 0 ] = ( v = parseInt ( uuid . slice ( 0 , 8 ) , 16 ) ) >>> 24 ;
arr [ 1 ] = v >>> 16 & 0xff ;
arr [ 2 ] = v >>> 8 & 0xff ;
arr [ 3 ] = v & 0xff ; // Parse ........-####-....-....-............
arr [ 4 ] = ( v = parseInt ( uuid . slice ( 9 , 13 ) , 16 ) ) >>> 8 ;
arr [ 5 ] = v & 0xff ; // Parse ........-....-####-....-............
arr [ 6 ] = ( v = parseInt ( uuid . slice ( 14 , 18 ) , 16 ) ) >>> 8 ;
arr [ 7 ] = v & 0xff ; // Parse ........-....-....-####-............
arr [ 8 ] = ( v = parseInt ( uuid . slice ( 19 , 23 ) , 16 ) ) >>> 8 ;
arr [ 9 ] = v & 0xff ; // Parse ........-....-....-....-############
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
arr [ 10 ] = ( v = parseInt ( uuid . slice ( 24 , 36 ) , 16 ) ) / 0x10000000000 & 0xff ;
arr [ 11 ] = v / 0x100000000 & 0xff ;
arr [ 12 ] = v >>> 24 & 0xff ;
arr [ 13 ] = v >>> 16 & 0xff ;
arr [ 14 ] = v >>> 8 & 0xff ;
arr [ 15 ] = v & 0xff ;
return arr ;
2022-10-03 18:04:49 +01:00
}
2023-04-12 19:55:27 +08:00
var _default = parse ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6230 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 9784 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = rng ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const rnds8Pool = new Uint8Array ( 256 ) ; // # of random values to pre-allocate
let poolPtr = rnds8Pool . length ;
function rng ( ) {
if ( poolPtr > rnds8Pool . length - 16 ) {
_crypto . default . randomFillSync ( rnds8Pool ) ;
poolPtr = 0 ;
}
return rnds8Pool . slice ( poolPtr , poolPtr += 16 ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
/***/ } ) ,
/***/ 8844 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function sha1 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'sha1' ) . update ( bytes ) . digest ( ) ;
2019-12-12 13:16:16 -05:00
}
2023-04-12 19:55:27 +08:00
var _default = sha1 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 1458 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
* /
const byteToHex = [ ] ;
for ( let i = 0 ; i < 256 ; ++ i ) {
byteToHex . push ( ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ) ;
2019-12-12 13:16:16 -05:00
}
2023-04-12 19:55:27 +08:00
function stringify ( arr , offset = 0 ) {
// Note: Be careful editing this code! It's been tuned for performance
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
const uuid = ( byteToHex [ arr [ offset + 0 ] ] + byteToHex [ arr [ offset + 1 ] ] + byteToHex [ arr [ offset + 2 ] ] + byteToHex [ arr [ offset + 3 ] ] + '-' + byteToHex [ arr [ offset + 4 ] ] + byteToHex [ arr [ offset + 5 ] ] + '-' + byteToHex [ arr [ offset + 6 ] ] + byteToHex [ arr [ offset + 7 ] ] + '-' + byteToHex [ arr [ offset + 8 ] ] + byteToHex [ arr [ offset + 9 ] ] + '-' + byteToHex [ arr [ offset + 10 ] ] + byteToHex [ arr [ offset + 11 ] ] + byteToHex [ arr [ offset + 12 ] ] + byteToHex [ arr [ offset + 13 ] ] + byteToHex [ arr [ offset + 14 ] ] + byteToHex [ arr [ offset + 15 ] ] ) . toLowerCase ( ) ; // Consistency check for valid UUID. If this throws, it's likely due to one
// of the following:
// - One or more input array values don't map to a hex octet (leading to
// "undefined" in the uuid)
// - Invalid input values for the RFC `version` or `variant` fields
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Stringified UUID is invalid' ) ;
}
return uuid ;
}
var _default = stringify ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 1595 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 9784 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
let _nodeId ;
let _clockseq ; // Previous uuid creation time
let _lastMSecs = 0 ;
let _lastNSecs = 0 ; // See https://github.com/uuidjs/uuid for API details
function v1 ( options , buf , offset ) {
let i = buf && offset || 0 ;
const b = buf || new Array ( 16 ) ;
options = options || { } ;
let node = options . node || _nodeId ;
let clockseq = options . clockseq !== undefined ? options . clockseq : _clockseq ; // node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
if ( node == null || clockseq == null ) {
const seedBytes = options . random || ( options . rng || _rng . default ) ( ) ;
if ( node == null ) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [ seedBytes [ 0 ] | 0x01 , seedBytes [ 1 ] , seedBytes [ 2 ] , seedBytes [ 3 ] , seedBytes [ 4 ] , seedBytes [ 5 ] ] ;
2019-12-12 13:16:16 -05:00
}
2023-04-12 19:55:27 +08:00
if ( clockseq == null ) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = ( seedBytes [ 6 ] << 8 | seedBytes [ 7 ] ) & 0x3fff ;
2019-12-12 13:16:16 -05:00
}
2023-04-12 19:55:27 +08:00
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
let msecs = options . msecs !== undefined ? options . msecs : Date . now ( ) ; // Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
let nsecs = options . nsecs !== undefined ? options . nsecs : _lastNSecs + 1 ; // Time since last uuid creation (in msecs)
const dt = msecs - _lastMSecs + ( nsecs - _lastNSecs ) / 10000 ; // Per 4.2.1.2, Bump clockseq on clock regression
if ( dt < 0 && options . clockseq === undefined ) {
clockseq = clockseq + 1 & 0x3fff ;
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
if ( ( dt < 0 || msecs > _lastMSecs ) && options . nsecs === undefined ) {
nsecs = 0 ;
} // Per 4.2.1.2 Throw error if too many uuids are requested
if ( nsecs >= 10000 ) {
throw new Error ( "uuid.v1(): Can't create more than 10M uuids/sec" ) ;
}
_lastMSecs = msecs ;
_lastNSecs = nsecs ;
_clockseq = clockseq ; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
msecs += 12219292800000 ; // `time_low`
const tl = ( ( msecs & 0xfffffff ) * 10000 + nsecs ) % 0x100000000 ;
b [ i ++ ] = tl >>> 24 & 0xff ;
b [ i ++ ] = tl >>> 16 & 0xff ;
b [ i ++ ] = tl >>> 8 & 0xff ;
b [ i ++ ] = tl & 0xff ; // `time_mid`
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff ;
b [ i ++ ] = tmh >>> 8 & 0xff ;
b [ i ++ ] = tmh & 0xff ; // `time_high_and_version`
b [ i ++ ] = tmh >>> 24 & 0xf | 0x10 ; // include version
b [ i ++ ] = tmh >>> 16 & 0xff ; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
b [ i ++ ] = clockseq >>> 8 | 0x80 ; // `clock_seq_low`
b [ i ++ ] = clockseq & 0xff ; // `node`
for ( let n = 0 ; n < 6 ; ++ n ) {
b [ i + n ] = node [ n ] ;
}
return buf || ( 0 , _stringify . default ) ( b ) ;
}
var _default = v1 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6993 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5920 ) ) ;
var _md = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5842 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v3 = ( 0 , _v . default ) ( 'v3' , 0x30 , _md . default ) ;
var _default = v3 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 5920 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = _default ;
exports . URL = exports . DNS = void 0 ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6385 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function stringToBytes ( str ) {
str = unescape ( encodeURIComponent ( str ) ) ; // UTF8 escape
const bytes = [ ] ;
for ( let i = 0 ; i < str . length ; ++ i ) {
bytes . push ( str . charCodeAt ( i ) ) ;
}
return bytes ;
}
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8' ;
exports . DNS = DNS ;
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8' ;
exports . URL = URL ;
function _default ( name , version , hashfunc ) {
function generateUUID ( value , namespace , buf , offset ) {
if ( typeof value === 'string' ) {
value = stringToBytes ( value ) ;
2021-10-19 10:05:28 -05:00
}
2023-04-12 19:55:27 +08:00
if ( typeof namespace === 'string' ) {
namespace = ( 0 , _parse . default ) ( namespace ) ;
2021-10-19 10:05:28 -05:00
}
2023-04-12 19:55:27 +08:00
if ( namespace . length !== 16 ) {
throw TypeError ( 'Namespace must be array-like (16 iterable integer values, 0-255)' ) ;
} // Compute hash of namespace and value, Per 4.3
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
// hashfunc([...namespace, ... value])`
let bytes = new Uint8Array ( 16 + value . length ) ;
bytes . set ( namespace ) ;
bytes . set ( value , namespace . length ) ;
bytes = hashfunc ( bytes ) ;
bytes [ 6 ] = bytes [ 6 ] & 0x0f | version ;
bytes [ 8 ] = bytes [ 8 ] & 0x3f | 0x80 ;
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = bytes [ i ] ;
}
return buf ;
2019-12-12 13:16:16 -05:00
}
2023-04-12 19:55:27 +08:00
return ( 0 , _stringify . default ) ( bytes ) ;
} // Function#name is not settable on some platforms (#270)
try {
generateUUID . name = name ; // eslint-disable-next-line no-empty
} catch ( err ) { } // For CommonJS default export support
generateUUID . DNS = DNS ;
generateUUID . URL = URL ;
return generateUUID ;
}
/***/ } ) ,
/***/ 1472 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 9784 ) ) ;
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 1458 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function v4 ( options , buf , offset ) {
options = options || { } ;
const rnds = options . random || ( options . rng || _rng . default ) ( ) ; // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = rnds [ 6 ] & 0x0f | 0x40 ;
rnds [ 8 ] = rnds [ 8 ] & 0x3f | 0x80 ; // Copy bytes to buffer, if provided
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = rnds [ i ] ;
2019-12-12 13:16:16 -05:00
}
2023-04-12 19:55:27 +08:00
return buf ;
}
return ( 0 , _stringify . default ) ( rnds ) ;
}
var _default = v4 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6217 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5920 ) ) ;
var _sha = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8844 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v5 = ( 0 , _v . default ) ( 'v5' , 0x50 , _sha . default ) ;
var _default = v5 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 2609 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _regex = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6230 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function validate ( uuid ) {
return typeof uuid === 'string' && _regex . default . test ( uuid ) ;
}
var _default = validate ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 427 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2609 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function version ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
return parseInt ( uuid . substr ( 14 , 1 ) , 16 ) ;
}
var _default = version ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 1514 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
2024-04-24 12:04:10 -04:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2023-04-12 19:55:27 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-04-24 12:04:10 -04:00
exports . getExecOutput = exports . exec = void 0 ;
const string _decoder _1 = _ _nccwpck _require _ _ ( 1576 ) ;
const tr = _ _importStar ( _ _nccwpck _require _ _ ( 8159 ) ) ;
2023-04-12 19:55:27 +08:00
/ * *
* Exec a command .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < number > exit code
* /
function exec ( commandLine , args , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const commandArgs = tr . argStringToArray ( commandLine ) ;
if ( commandArgs . length === 0 ) {
throw new Error ( ` Parameter 'commandLine' cannot be null or empty. ` ) ;
}
// Path to tool to execute should be first arg
const toolPath = commandArgs [ 0 ] ;
args = commandArgs . slice ( 1 ) . concat ( args || [ ] ) ;
const runner = new tr . ToolRunner ( toolPath , args , options ) ;
return runner . exec ( ) ;
} ) ;
}
exports . exec = exec ;
2024-04-24 12:04:10 -04:00
/ * *
* Exec a command and get the output .
* Output will be streamed to the live console .
* Returns promise with the exit code and collected stdout and stderr
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < ExecOutput > exit code , stdout , and stderr
* /
function getExecOutput ( commandLine , args , options ) {
var _a , _b ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stdout = '' ;
let stderr = '' ;
//Using string decoder covers the case where a mult-byte character is split
const stdoutDecoder = new string _decoder _1 . StringDecoder ( 'utf8' ) ;
const stderrDecoder = new string _decoder _1 . StringDecoder ( 'utf8' ) ;
const originalStdoutListener = ( _a = options === null || options === void 0 ? void 0 : options . listeners ) === null || _a === void 0 ? void 0 : _a . stdout ;
const originalStdErrListener = ( _b = options === null || options === void 0 ? void 0 : options . listeners ) === null || _b === void 0 ? void 0 : _b . stderr ;
const stdErrListener = ( data ) => {
stderr += stderrDecoder . write ( data ) ;
if ( originalStdErrListener ) {
originalStdErrListener ( data ) ;
}
} ;
const stdOutListener = ( data ) => {
stdout += stdoutDecoder . write ( data ) ;
if ( originalStdoutListener ) {
originalStdoutListener ( data ) ;
}
} ;
const listeners = Object . assign ( Object . assign ( { } , options === null || options === void 0 ? void 0 : options . listeners ) , { stdout : stdOutListener , stderr : stdErrListener } ) ;
const exitCode = yield exec ( commandLine , args , Object . assign ( Object . assign ( { } , options ) , { listeners } ) ) ;
//flush any remaining characters
stdout += stdoutDecoder . end ( ) ;
stderr += stderrDecoder . end ( ) ;
return {
exitCode ,
stdout ,
stderr
} ;
} ) ;
}
exports . getExecOutput = getExecOutput ;
2023-04-12 19:55:27 +08:00
//# sourceMappingURL=exec.js.map
/***/ } ) ,
/***/ 8159 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
2024-04-24 12:04:10 -04:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2023-04-12 19:55:27 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-04-24 12:04:10 -04:00
exports . argStringToArray = exports . ToolRunner = void 0 ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const events = _ _importStar ( _ _nccwpck _require _ _ ( 2361 ) ) ;
const child = _ _importStar ( _ _nccwpck _require _ _ ( 2081 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const ioUtil = _ _importStar ( _ _nccwpck _require _ _ ( 1962 ) ) ;
const timers _1 = _ _nccwpck _require _ _ ( 9512 ) ;
2023-04-12 19:55:27 +08:00
/* eslint-disable @typescript-eslint/unbound-method */
const IS _WINDOWS = process . platform === 'win32' ;
/ *
* Class for running command line tools . Handles quoting and arg parsing in a platform agnostic way .
* /
class ToolRunner extends events . EventEmitter {
constructor ( toolPath , args , options ) {
super ( ) ;
if ( ! toolPath ) {
throw new Error ( "Parameter 'toolPath' cannot be null or empty." ) ;
}
this . toolPath = toolPath ;
this . args = args || [ ] ;
this . options = options || { } ;
2019-12-12 13:16:16 -05:00
}
2023-04-12 19:55:27 +08:00
_debug ( message ) {
if ( this . options . listeners && this . options . listeners . debug ) {
this . options . listeners . debug ( message ) ;
}
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
_getCommandString ( options , noPrefix ) {
const toolPath = this . _getSpawnFileName ( ) ;
const args = this . _getSpawnArgs ( options ) ;
let cmd = noPrefix ? '' : '[command]' ; // omit prefix when piped to a second tool
if ( IS _WINDOWS ) {
// Windows + cmd file
if ( this . _isCmdFile ( ) ) {
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
}
// Windows + verbatim
else if ( options . windowsVerbatimArguments ) {
cmd += ` " ${ toolPath } " ` ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
}
// Windows (regular)
else {
cmd += this . _windowsQuoteCmdArg ( toolPath ) ;
for ( const a of args ) {
cmd += ` ${ this . _windowsQuoteCmdArg ( a ) } ` ;
2023-03-09 17:42:29 +01:00
}
}
2021-10-19 10:05:28 -05:00
}
2023-04-12 19:55:27 +08:00
else {
// OSX/Linux - this can likely be improved with some form of quoting.
// creating processes on Unix is fundamentally different than Windows.
// on Unix, execvp() takes an arg array.
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
2023-03-09 17:42:29 +01:00
}
2021-10-19 10:05:28 -05:00
}
2023-04-12 19:55:27 +08:00
return cmd ;
}
_processLineBuffer ( data , strBuffer , onLine ) {
try {
let s = strBuffer + data . toString ( ) ;
let n = s . indexOf ( os . EOL ) ;
while ( n > - 1 ) {
const line = s . substring ( 0 , n ) ;
onLine ( line ) ;
// the rest of the string ...
s = s . substring ( n + os . EOL . length ) ;
n = s . indexOf ( os . EOL ) ;
2021-10-19 10:05:28 -05:00
}
2024-04-24 12:04:10 -04:00
return s ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
catch ( err ) {
// streaming lines to console is best effort. Don't fail a build.
this . _debug ( ` error processing line. Failed with error ${ err } ` ) ;
2024-04-24 12:04:10 -04:00
return '' ;
2021-10-19 10:05:28 -05:00
}
}
2023-04-12 19:55:27 +08:00
_getSpawnFileName ( ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
return process . env [ 'COMSPEC' ] || 'cmd.exe' ;
}
}
return this . toolPath ;
2021-10-19 10:05:28 -05:00
}
2023-04-12 19:55:27 +08:00
_getSpawnArgs ( options ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
let argline = ` /D /S /C " ${ this . _windowsQuoteCmdArg ( this . toolPath ) } ` ;
for ( const a of this . args ) {
argline += ' ' ;
argline += options . windowsVerbatimArguments
? a
: this . _windowsQuoteCmdArg ( a ) ;
}
argline += '"' ;
return [ argline ] ;
2021-10-19 10:05:28 -05:00
}
}
2023-04-12 19:55:27 +08:00
return this . args ;
2021-10-19 10:05:28 -05:00
}
2023-04-12 19:55:27 +08:00
_endsWith ( str , end ) {
return str . endsWith ( end ) ;
2021-10-19 10:05:28 -05:00
}
2023-04-12 19:55:27 +08:00
_isCmdFile ( ) {
const upperToolPath = this . toolPath . toUpperCase ( ) ;
return ( this . _endsWith ( upperToolPath , '.CMD' ) ||
this . _endsWith ( upperToolPath , '.BAT' ) ) ;
2021-10-19 10:05:28 -05:00
}
2023-04-12 19:55:27 +08:00
_windowsQuoteCmdArg ( arg ) {
// for .exe, apply the normal quoting rules that libuv applies
if ( ! this . _isCmdFile ( ) ) {
return this . _uvQuoteCmdArg ( arg ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// otherwise apply quoting rules specific to the cmd.exe command line parser.
// the libuv rules are generic and are not designed specifically for cmd.exe
// command line parser.
//
// for a detailed description of the cmd.exe command line parser, refer to
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
// need quotes for empty arg
if ( ! arg ) {
return '""' ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// determine whether the arg needs to be quoted
const cmdSpecialChars = [
' ' ,
'\t' ,
'&' ,
'(' ,
')' ,
'[' ,
']' ,
'{' ,
'}' ,
'^' ,
'=' ,
';' ,
'!' ,
"'" ,
'+' ,
',' ,
'`' ,
'~' ,
'|' ,
'<' ,
'>' ,
'"'
] ;
let needsQuotes = false ;
for ( const char of arg ) {
if ( cmdSpecialChars . some ( x => x === char ) ) {
needsQuotes = true ;
break ;
}
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// short-circuit if quotes not needed
if ( ! needsQuotes ) {
return arg ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// the following quoting rules are very similar to the rules that by libuv applies.
//
// 1) wrap the string in quotes
//
// 2) double-up quotes - i.e. " => ""
//
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
// doesn't work well with a cmd.exe command line.
//
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
// for example, the command line:
// foo.exe "myarg:""my val"""
// is parsed by a .NET console app into an arg array:
// [ "myarg:\"my val\"" ]
// which is the same end result when applying libuv quoting rules. although the actual
// command line from libuv quoting rules would look like:
// foo.exe "myarg:\"my val\""
//
// 3) double-up slashes that precede a quote,
// e.g. hello \world => "hello \world"
// hello\"world => "hello\\""world"
// hello\\"world => "hello\\\\""world"
// hello world\ => "hello world\\"
//
// technically this is not required for a cmd.exe command line, or the batch argument parser.
// the reasons for including this as a .cmd quoting rule are:
//
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
//
// b) it's what we've been doing previously (by deferring to node default behavior) and we
// haven't heard any complaints about that aspect.
//
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
// by using %%.
//
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
//
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
// to an external program.
//
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
// % can be escaped within a .cmd file.
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ; // double the slash
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '"' ; // double the quote
2023-03-09 17:42:29 +01:00
}
else {
2023-04-12 19:55:27 +08:00
quoteHit = false ;
2023-03-09 17:42:29 +01:00
}
}
2023-04-12 19:55:27 +08:00
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_uvQuoteCmdArg ( arg ) {
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
// is used.
//
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
// pasting copyright notice from Node within this function:
//
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
if ( ! arg ) {
// Need double quotation for empty argument
return '""' ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
if ( ! arg . includes ( ' ' ) && ! arg . includes ( '\t' ) && ! arg . includes ( '"' ) ) {
// No quotation needed
return arg ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
if ( ! arg . includes ( '"' ) && ! arg . includes ( '\\' ) ) {
// No embedded double quotes or backslashes, so I can just wrap
// quote marks around the whole thing.
return ` " ${ arg } " ` ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// Expected input/output:
// input : hello"world
// output: "hello\"world"
// input : hello""world
// output: "hello\"\"world"
// input : hello\world
// output: hello\world
// input : hello\\world
// output: hello\\world
// input : hello\"world
// output: "hello\\\"world"
// input : hello\\"world
// output: "hello\\\\\"world"
// input : hello world\
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
// but it appears the comment is wrong, it should be "hello world\\"
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ;
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '\\' ;
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
_cloneExecOptions ( options ) {
options = options || { } ;
const result = {
cwd : options . cwd || process . cwd ( ) ,
env : options . env || process . env ,
silent : options . silent || false ,
windowsVerbatimArguments : options . windowsVerbatimArguments || false ,
failOnStdErr : options . failOnStdErr || false ,
ignoreReturnCode : options . ignoreReturnCode || false ,
delay : options . delay || 10000
} ;
result . outStream = options . outStream || process . stdout ;
result . errStream = options . errStream || process . stderr ;
return result ;
}
_getSpawnOptions ( options , toolPath ) {
options = options || { } ;
const result = { } ;
result . cwd = options . cwd ;
result . env = options . env ;
result [ 'windowsVerbatimArguments' ] =
options . windowsVerbatimArguments || this . _isCmdFile ( ) ;
if ( options . windowsVerbatimArguments ) {
result . argv0 = ` " ${ toolPath } " ` ;
}
return result ;
}
/ * *
* Exec a tool .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param tool path to tool to exec
* @ param options optional exec options . See ExecOptions
* @ returns number
* /
exec ( ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
// root the tool path if it is unrooted and contains relative pathing
if ( ! ioUtil . isRooted ( this . toolPath ) &&
( this . toolPath . includes ( '/' ) ||
( IS _WINDOWS && this . toolPath . includes ( '\\' ) ) ) ) {
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
this . toolPath = path . resolve ( process . cwd ( ) , this . options . cwd || process . cwd ( ) , this . toolPath ) ;
}
// if the tool is only a file name, then resolve it from the PATH
// otherwise verify it exists (add extension on Windows if necessary)
this . toolPath = yield io . which ( this . toolPath , true ) ;
return new Promise ( ( resolve , reject ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-04-12 19:55:27 +08:00
this . _debug ( ` exec tool: ${ this . toolPath } ` ) ;
this . _debug ( 'arguments:' ) ;
for ( const arg of this . args ) {
this . _debug ( ` ${ arg } ` ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
const optionsNonNull = this . _cloneExecOptions ( this . options ) ;
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( this . _getCommandString ( optionsNonNull ) + os . EOL ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
const state = new ExecState ( optionsNonNull , this . toolPath ) ;
state . on ( 'debug' , ( message ) => {
this . _debug ( message ) ;
} ) ;
2024-04-24 12:04:10 -04:00
if ( this . options . cwd && ! ( yield ioUtil . exists ( this . options . cwd ) ) ) {
return reject ( new Error ( ` The cwd: ${ this . options . cwd } does not exist! ` ) ) ;
}
2023-04-12 19:55:27 +08:00
const fileName = this . _getSpawnFileName ( ) ;
const cp = child . spawn ( fileName , this . _getSpawnArgs ( optionsNonNull ) , this . _getSpawnOptions ( this . options , fileName ) ) ;
2024-04-24 12:04:10 -04:00
let stdbuffer = '' ;
2023-04-12 19:55:27 +08:00
if ( cp . stdout ) {
cp . stdout . on ( 'data' , ( data ) => {
if ( this . options . listeners && this . options . listeners . stdout ) {
this . options . listeners . stdout ( data ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( data ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
stdbuffer = this . _processLineBuffer ( data , stdbuffer , ( line ) => {
2023-04-12 19:55:27 +08:00
if ( this . options . listeners && this . options . listeners . stdline ) {
this . options . listeners . stdline ( line ) ;
}
} ) ;
} ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
let errbuffer = '' ;
2023-04-12 19:55:27 +08:00
if ( cp . stderr ) {
cp . stderr . on ( 'data' , ( data ) => {
state . processStderr = true ;
if ( this . options . listeners && this . options . listeners . stderr ) {
this . options . listeners . stderr ( data ) ;
}
if ( ! optionsNonNull . silent &&
optionsNonNull . errStream &&
optionsNonNull . outStream ) {
const s = optionsNonNull . failOnStdErr
? optionsNonNull . errStream
: optionsNonNull . outStream ;
s . write ( data ) ;
}
2024-04-24 12:04:10 -04:00
errbuffer = this . _processLineBuffer ( data , errbuffer , ( line ) => {
2023-04-12 19:55:27 +08:00
if ( this . options . listeners && this . options . listeners . errline ) {
this . options . listeners . errline ( line ) ;
}
} ) ;
} ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
cp . on ( 'error' , ( err ) => {
state . processError = err . message ;
state . processExited = true ;
state . processClosed = true ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'exit' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
this . _debug ( ` Exit code ${ code } received from tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'close' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
state . processClosed = true ;
this . _debug ( ` STDIO streams have closed for tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
state . on ( 'done' , ( error , exitCode ) => {
if ( stdbuffer . length > 0 ) {
this . emit ( 'stdline' , stdbuffer ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
if ( errbuffer . length > 0 ) {
this . emit ( 'errline' , errbuffer ) ;
}
cp . removeAllListeners ( ) ;
if ( error ) {
reject ( error ) ;
2023-03-09 17:42:29 +01:00
}
else {
2023-04-12 19:55:27 +08:00
resolve ( exitCode ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
} ) ;
2024-04-24 12:04:10 -04:00
if ( this . options . input ) {
if ( ! cp . stdin ) {
throw new Error ( 'child process missing stdin' ) ;
}
cp . stdin . end ( this . options . input ) ;
}
} ) ) ;
2023-03-09 17:42:29 +01:00
} ) ;
}
}
2023-04-12 19:55:27 +08:00
exports . ToolRunner = ToolRunner ;
/ * *
* Convert an arg string to an array of args . Handles escaping
*
* @ param argString string of arguments
* @ returns string [ ] array of arguments
* /
function argStringToArray ( argString ) {
const args = [ ] ;
let inQuotes = false ;
let escaped = false ;
let arg = '' ;
function append ( c ) {
// we only escape double quotes.
if ( escaped && c !== '"' ) {
arg += '\\' ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
arg += c ;
escaped = false ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
for ( let i = 0 ; i < argString . length ; i ++ ) {
const c = argString . charAt ( i ) ;
if ( c === '"' ) {
if ( ! escaped ) {
inQuotes = ! inQuotes ;
}
else {
append ( c ) ;
}
continue ;
}
if ( c === '\\' && escaped ) {
append ( c ) ;
continue ;
}
if ( c === '\\' && inQuotes ) {
escaped = true ;
continue ;
}
if ( c === ' ' && ! inQuotes ) {
if ( arg . length > 0 ) {
args . push ( arg ) ;
arg = '' ;
}
continue ;
}
append ( c ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
if ( arg . length > 0 ) {
args . push ( arg . trim ( ) ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
return args ;
}
exports . argStringToArray = argStringToArray ;
class ExecState extends events . EventEmitter {
constructor ( options , toolPath ) {
super ( ) ;
this . processClosed = false ; // tracks whether the process has exited and stdio is closed
this . processError = '' ;
this . processExitCode = 0 ;
this . processExited = false ; // tracks whether the process has exited
this . processStderr = false ; // tracks whether stderr was written to
this . delay = 10000 ; // 10 seconds
this . done = false ;
this . timeout = null ;
if ( ! toolPath ) {
throw new Error ( 'toolPath must not be empty' ) ;
}
this . options = options ;
this . toolPath = toolPath ;
if ( options . delay ) {
this . delay = options . delay ;
}
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
CheckComplete ( ) {
if ( this . done ) {
return ;
}
if ( this . processClosed ) {
this . _setResult ( ) ;
}
else if ( this . processExited ) {
2024-04-24 12:04:10 -04:00
this . timeout = timers _1 . setTimeout ( ExecState . HandleTimeout , this . delay , this ) ;
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
_debug ( message ) {
this . emit ( 'debug' , message ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
_setResult ( ) {
// determine whether there is an error
let error ;
if ( this . processExited ) {
if ( this . processError ) {
error = new Error ( ` There was an error when attempting to execute the process ' ${ this . toolPath } '. This may indicate the process failed to start. Error: ${ this . processError } ` ) ;
}
else if ( this . processExitCode !== 0 && ! this . options . ignoreReturnCode ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed with exit code ${ this . processExitCode } ` ) ;
}
else if ( this . processStderr && this . options . failOnStdErr ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed because one or more lines were written to the STDERR stream ` ) ;
}
}
// clear the timeout
if ( this . timeout ) {
clearTimeout ( this . timeout ) ;
this . timeout = null ;
}
this . done = true ;
this . emit ( 'done' , error , this . processExitCode ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
static HandleTimeout ( state ) {
if ( state . done ) {
return ;
}
if ( ! state . processClosed && state . processExited ) {
const message = ` The STDIO streams did not close within ${ state . delay /
1000 } seconds of the exit event from process '${state.toolPath}' . This may indicate a child process inherited the STDIO streams and has not yet exited . ` ;
state . _debug ( message ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
state . _setResult ( ) ;
2023-03-09 17:42:29 +01:00
}
2019-12-12 13:16:16 -05:00
}
2023-04-12 19:55:27 +08:00
//# sourceMappingURL=toolrunner.js.map
2019-12-03 10:28:59 -05:00
2019-12-12 13:16:16 -05:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
/***/ 4087 :
2023-03-09 17:42:29 +01:00
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2019-12-12 13:16:16 -05:00
"use strict" ;
2023-04-12 19:55:27 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . Context = void 0 ;
const fs _1 = _ _nccwpck _require _ _ ( 7147 ) ;
const os _1 = _ _nccwpck _require _ _ ( 2037 ) ;
class Context {
/ * *
* Hydrate the context from the environment
* /
constructor ( ) {
var _a , _b , _c ;
this . payload = { } ;
if ( process . env . GITHUB _EVENT _PATH ) {
2024-04-24 12:04:10 -04:00
if ( ( 0 , fs _1 . existsSync ) ( process . env . GITHUB _EVENT _PATH ) ) {
this . payload = JSON . parse ( ( 0 , fs _1 . readFileSync ) ( process . env . GITHUB _EVENT _PATH , { encoding : 'utf8' } ) ) ;
2023-04-12 19:55:27 +08:00
}
else {
const path = process . env . GITHUB _EVENT _PATH ;
process . stdout . write ( ` GITHUB_EVENT_PATH ${ path } does not exist ${ os _1 . EOL } ` ) ;
}
}
this . eventName = process . env . GITHUB _EVENT _NAME ;
this . sha = process . env . GITHUB _SHA ;
this . ref = process . env . GITHUB _REF ;
this . workflow = process . env . GITHUB _WORKFLOW ;
this . action = process . env . GITHUB _ACTION ;
this . actor = process . env . GITHUB _ACTOR ;
this . job = process . env . GITHUB _JOB ;
this . runNumber = parseInt ( process . env . GITHUB _RUN _NUMBER , 10 ) ;
this . runId = parseInt ( process . env . GITHUB _RUN _ID , 10 ) ;
this . apiUrl = ( _a = process . env . GITHUB _API _URL ) !== null && _a !== void 0 ? _a : ` https://api.github.com ` ;
this . serverUrl = ( _b = process . env . GITHUB _SERVER _URL ) !== null && _b !== void 0 ? _b : ` https://github.com ` ;
2024-04-24 12:04:10 -04:00
this . graphqlUrl =
( _c = process . env . GITHUB _GRAPHQL _URL ) !== null && _c !== void 0 ? _c : ` https://api.github.com/graphql ` ;
2023-04-12 19:55:27 +08:00
}
get issue ( ) {
const payload = this . payload ;
return Object . assign ( Object . assign ( { } , this . repo ) , { number : ( payload . issue || payload . pull _request || payload ) . number } ) ;
}
get repo ( ) {
if ( process . env . GITHUB _REPOSITORY ) {
const [ owner , repo ] = process . env . GITHUB _REPOSITORY . split ( '/' ) ;
return { owner , repo } ;
}
if ( this . payload . repository ) {
return {
owner : this . payload . repository . owner . login ,
repo : this . payload . repository . name
} ;
}
throw new Error ( "context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'" ) ;
}
}
exports . Context = Context ;
//# sourceMappingURL=context.js.map
2019-12-03 10:28:59 -05:00
2021-10-19 10:05:28 -05:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
/***/ 5438 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2021-10-19 10:05:28 -05:00
"use strict" ;
2023-04-12 19:55:27 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-04-12 19:55:27 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-04-24 12:04:10 -04:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-04-12 19:55:27 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getOctokit = exports . context = void 0 ;
const Context = _ _importStar ( _ _nccwpck _require _ _ ( 4087 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 3030 ) ;
exports . context = new Context . Context ( ) ;
/ * *
* Returns a hydrated octokit ready to use for GitHub Actions
*
* @ param token the repo PAT or GITHUB _TOKEN
* @ param options other options to set
* /
function getOctokit ( token , options , ... additionalPlugins ) {
const GitHubWithPlugins = utils _1 . GitHub . plugin ( ... additionalPlugins ) ;
2024-04-24 12:04:10 -04:00
return new GitHubWithPlugins ( ( 0 , utils _1 . getOctokitOptions ) ( token , options ) ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
exports . getOctokit = getOctokit ;
//# sourceMappingURL=github.js.map
2021-10-19 10:05:28 -05:00
2019-12-03 10:28:59 -05:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
/***/ 7914 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2019-12-03 10:28:59 -05:00
2020-01-27 10:21:50 -05:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-04-12 19:55:27 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
2023-03-09 17:42:29 +01:00
} ) ) ;
2023-04-12 19:55:27 +08:00
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-04-24 12:04:10 -04:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-04-12 19:55:27 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2024-04-24 12:04:10 -04:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2023-04-12 19:55:27 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-04-24 12:04:10 -04:00
exports . getApiBaseUrl = exports . getProxyFetch = exports . getProxyAgentDispatcher = exports . getProxyAgent = exports . getAuthString = void 0 ;
2023-04-12 19:55:27 +08:00
const httpClient = _ _importStar ( _ _nccwpck _require _ _ ( 6255 ) ) ;
2024-04-24 12:04:10 -04:00
const undici _1 = _ _nccwpck _require _ _ ( 1773 ) ;
2023-04-12 19:55:27 +08:00
function getAuthString ( token , options ) {
if ( ! token && ! options . auth ) {
throw new Error ( 'Parameter token or opts.auth is required' ) ;
}
else if ( token && options . auth ) {
throw new Error ( 'Parameters token and opts.auth may not both be specified' ) ;
}
return typeof options . auth === 'string' ? options . auth : ` token ${ token } ` ;
}
exports . getAuthString = getAuthString ;
function getProxyAgent ( destinationUrl ) {
const hc = new httpClient . HttpClient ( ) ;
return hc . getAgent ( destinationUrl ) ;
}
exports . getProxyAgent = getProxyAgent ;
2024-04-24 12:04:10 -04:00
function getProxyAgentDispatcher ( destinationUrl ) {
const hc = new httpClient . HttpClient ( ) ;
return hc . getAgentDispatcher ( destinationUrl ) ;
}
exports . getProxyAgentDispatcher = getProxyAgentDispatcher ;
function getProxyFetch ( destinationUrl ) {
const httpDispatcher = getProxyAgentDispatcher ( destinationUrl ) ;
const proxyFetch = ( url , opts ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return ( 0 , undici _1 . fetch ) ( url , Object . assign ( Object . assign ( { } , opts ) , { dispatcher : httpDispatcher } ) ) ;
} ) ;
return proxyFetch ;
}
exports . getProxyFetch = getProxyFetch ;
2023-04-12 19:55:27 +08:00
function getApiBaseUrl ( ) {
return process . env [ 'GITHUB_API_URL' ] || 'https://api.github.com' ;
}
exports . getApiBaseUrl = getApiBaseUrl ;
//# sourceMappingURL=utils.js.map
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
/***/ 3030 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-04-12 19:55:27 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
2023-03-09 17:42:29 +01:00
} ) ) ;
2023-04-12 19:55:27 +08:00
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-04-24 12:04:10 -04:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-04-12 19:55:27 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getOctokitOptions = exports . GitHub = exports . defaults = exports . context = void 0 ;
const Context = _ _importStar ( _ _nccwpck _require _ _ ( 4087 ) ) ;
const Utils = _ _importStar ( _ _nccwpck _require _ _ ( 7914 ) ) ;
// octokit + plugins
const core _1 = _ _nccwpck _require _ _ ( 6762 ) ;
const plugin _rest _endpoint _methods _1 = _ _nccwpck _require _ _ ( 3044 ) ;
const plugin _paginate _rest _1 = _ _nccwpck _require _ _ ( 4193 ) ;
exports . context = new Context . Context ( ) ;
const baseUrl = Utils . getApiBaseUrl ( ) ;
exports . defaults = {
baseUrl ,
request : {
2024-04-24 12:04:10 -04:00
agent : Utils . getProxyAgent ( baseUrl ) ,
fetch : Utils . getProxyFetch ( baseUrl )
2023-04-12 19:55:27 +08:00
}
} ;
exports . GitHub = core _1 . Octokit . plugin ( plugin _rest _endpoint _methods _1 . restEndpointMethods , plugin _paginate _rest _1 . paginateRest ) . defaults ( exports . defaults ) ;
/ * *
* Convience function to correctly format Octokit Options to pass into the constructor .
*
* @ param token the repo PAT or GITHUB _TOKEN
* @ param options other options to set
* /
function getOctokitOptions ( token , options ) {
const opts = Object . assign ( { } , options || { } ) ; // Shallow clone - don't mutate the object provided by the caller
// Auth
const auth = Utils . getAuthString ( token , opts ) ;
if ( auth ) {
opts . auth = auth ;
}
return opts ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . getOctokitOptions = getOctokitOptions ;
//# sourceMappingURL=utils.js.map
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
/***/ 5526 :
/***/ ( function ( _ _unused _webpack _module , exports ) {
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . PersonalAccessTokenCredentialHandler = exports . BearerCredentialHandler = exports . BasicCredentialHandler = void 0 ;
class BasicCredentialHandler {
constructor ( username , password ) {
this . username = username ;
this . password = password ;
}
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` ${ this . username } : ${ this . password } ` ) . toString ( 'base64' ) } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
}
exports . BasicCredentialHandler = BasicCredentialHandler ;
class BearerCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Bearer ${ this . token } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
}
exports . BearerCredentialHandler = BearerCredentialHandler ;
class PersonalAccessTokenCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` PAT: ${ this . token } ` ) . toString ( 'base64' ) } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
}
exports . PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler ;
//# sourceMappingURL=auth.js.map
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
/***/ 6255 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
/* eslint-disable @typescript-eslint/no-explicit-any */
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
2024-04-24 12:04:10 -04:00
var desc = Object . getOwnPropertyDescriptor ( m , k ) ;
if ( ! desc || ( "get" in desc ? ! m . _ _esModule : desc . writable || desc . configurable ) ) {
desc = { enumerable : true , get : function ( ) { return m [ k ] ; } } ;
}
Object . defineProperty ( o , k2 , desc ) ;
2023-04-12 19:55:27 +08:00
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
2023-03-09 17:42:29 +01:00
} ) ) ;
2023-04-12 19:55:27 +08:00
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2024-04-24 12:04:10 -04:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . prototype . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2023-04-12 19:55:27 +08:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . HttpClient = exports . isHttps = exports . HttpClientResponse = exports . HttpClientError = exports . getProxyUrl = exports . MediaTypes = exports . Headers = exports . HttpCodes = void 0 ;
const http = _ _importStar ( _ _nccwpck _require _ _ ( 3685 ) ) ;
const https = _ _importStar ( _ _nccwpck _require _ _ ( 5687 ) ) ;
const pm = _ _importStar ( _ _nccwpck _require _ _ ( 9835 ) ) ;
const tunnel = _ _importStar ( _ _nccwpck _require _ _ ( 4294 ) ) ;
2024-04-24 12:04:10 -04:00
const undici _1 = _ _nccwpck _require _ _ ( 1773 ) ;
2023-04-12 19:55:27 +08:00
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
2024-04-24 12:04:10 -04:00
} ) ( HttpCodes || ( exports . HttpCodes = HttpCodes = { } ) ) ;
2023-04-12 19:55:27 +08:00
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
2024-04-24 12:04:10 -04:00
} ) ( Headers || ( exports . Headers = Headers = { } ) ) ;
2023-04-12 19:55:27 +08:00
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
2024-04-24 12:04:10 -04:00
} ) ( MediaTypes || ( exports . MediaTypes = MediaTypes = { } ) ) ;
2023-03-09 17:42:29 +01:00
/ * *
2023-04-12 19:55:27 +08:00
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
2023-03-09 17:42:29 +01:00
* /
2023-04-12 19:55:27 +08:00
function getProxyUrl ( serverUrl ) {
const proxyUrl = pm . getProxyUrl ( new URL ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientError extends Error {
constructor ( message , statusCode ) {
super ( message ) ;
this . name = 'HttpClientError' ;
this . statusCode = statusCode ;
Object . setPrototypeOf ( this , HttpClientError . prototype ) ;
}
2020-01-27 10:21:50 -05:00
}
2023-04-12 19:55:27 +08:00
exports . HttpClientError = HttpClientError ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
2022-09-26 17:34:52 +01:00
}
2023-04-12 19:55:27 +08:00
readBody ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ) ;
} ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
readBodyBuffer ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const chunks = [ ] ;
this . message . on ( 'data' , ( chunk ) => {
chunks . push ( chunk ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( Buffer . concat ( chunks ) ) ;
} ) ;
} ) ) ;
} ) ;
}
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
const parsedUrl = new URL ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
}
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
options ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
get ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
del ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
2021-10-19 10:05:28 -05:00
}
2023-04-12 19:55:27 +08:00
post ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
2020-01-27 10:21:50 -05:00
}
2023-04-12 19:55:27 +08:00
patch ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
2020-01-27 10:21:50 -05:00
}
2023-04-12 19:55:27 +08:00
put ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
head ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
2020-01-27 10:21:50 -05:00
}
2023-04-12 19:55:27 +08:00
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
} ) ;
2020-01-27 10:21:50 -05:00
}
2023-04-12 19:55:27 +08:00
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
getJson ( requestUrl , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
const res = yield this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
postJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
putJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
patchJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
request ( verb , requestUrl , data , headers ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
}
const parsedUrl = new URL ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
const maxTries = this . _allowRetries && RetryableHttpVerbs . includes ( verb )
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
do {
response = yield this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( const handler of this . handlers ) {
if ( handler . canHandleAuthentication ( response ) ) {
authenticationHandler = handler ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
}
let redirectsRemaining = this . _maxRedirects ;
while ( response . message . statusCode &&
HttpRedirectCodes . includes ( response . message . statusCode ) &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
const parsedRedirectUrl = new URL ( redirectUrl ) ;
if ( parsedUrl . protocol === 'https:' &&
parsedUrl . protocol !== parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
yield response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( const header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = yield this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
}
if ( ! response . message . statusCode ||
! HttpResponseRetryCodes . includes ( response . message . statusCode ) ) {
// If not a retry code, return immediately instead of retrying
return response ;
}
numTries += 1 ;
if ( numTries < maxTries ) {
yield response . readBody ( ) ;
yield this . _performExponentialBackoff ( numTries ) ;
}
} while ( numTries < maxTries ) ;
return response ;
} ) ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
2020-01-27 10:21:50 -05:00
}
2023-04-12 19:55:27 +08:00
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => {
function callbackForResult ( err , res ) {
if ( err ) {
reject ( err ) ;
}
else if ( ! res ) {
// If `err` is not passed, then `res` must be passed.
reject ( new Error ( 'Unknown error' ) ) ;
}
else {
resolve ( res ) ;
}
}
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
if ( typeof data === 'string' ) {
if ( ! info . options . headers ) {
info . options . headers = { } ;
}
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
2020-01-27 10:21:50 -05:00
}
2023-04-12 19:55:27 +08:00
let callbackCalled = false ;
function handleResult ( err , res ) {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
2023-03-09 17:42:29 +01:00
}
}
2023-04-12 19:55:27 +08:00
const req = info . httpModule . request ( info . options , ( msg ) => {
const res = new HttpClientResponse ( msg ) ;
handleResult ( undefined , res ) ;
} ) ;
let socket ;
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( ` Request timeout: ${ info . options . path } ` ) ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err ) ;
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
const parsedUrl = new URL ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
2024-04-24 12:04:10 -04:00
getAgentDispatcher ( serverUrl ) {
const parsedUrl = new URL ( serverUrl ) ;
const proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
const useProxy = proxyUrl && proxyUrl . hostname ;
if ( ! useProxy ) {
return ;
}
return this . _getProxyAgentDispatcher ( parsedUrl , proxyUrl ) ;
}
2023-04-12 19:55:27 +08:00
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
for ( const handler of this . handlers ) {
handler . prepareRequest ( info . options ) ;
2023-03-09 17:42:29 +01:00
}
}
2023-04-12 19:55:27 +08:00
return info ;
2020-01-27 10:21:50 -05:00
}
2023-04-12 19:55:27 +08:00
_mergeHeaders ( headers ) {
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers || { } ) ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
const proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
const useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
if ( ! useProxy ) {
2023-04-12 19:55:27 +08:00
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
if ( proxyUrl && proxyUrl . hostname ) {
const agentOptions = {
maxSockets ,
keepAlive : this . _keepAlive ,
proxy : Object . assign ( Object . assign ( { } , ( ( proxyUrl . username || proxyUrl . password ) && {
proxyAuth : ` ${ proxyUrl . username } : ${ proxyUrl . password } `
} ) ) , { host : proxyUrl . hostname , port : proxyUrl . port } )
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
2023-03-09 17:42:29 +01:00
}
else {
2023-04-12 19:55:27 +08:00
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
// if tunneling agent isn't assigned create a new agent
if ( ! agent ) {
2023-04-12 19:55:27 +08:00
const options = { keepAlive : this . _keepAlive , maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
_getProxyAgentDispatcher ( parsedUrl , proxyUrl ) {
let proxyAgent ;
if ( this . _keepAlive ) {
proxyAgent = this . _proxyAgentDispatcher ;
}
// if agent is already assigned use that agent.
if ( proxyAgent ) {
return proxyAgent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
proxyAgent = new undici _1 . ProxyAgent ( Object . assign ( { uri : proxyUrl . href , pipelining : ! this . _keepAlive ? 0 : 1 } , ( ( proxyUrl . username || proxyUrl . password ) && {
token : ` ${ proxyUrl . username } : ${ proxyUrl . password } `
} ) ) ) ;
this . _proxyAgentDispatcher = proxyAgent ;
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
proxyAgent . options = Object . assign ( proxyAgent . options . requestTls || { } , {
rejectUnauthorized : false
} ) ;
}
return proxyAgent ;
}
2023-04-12 19:55:27 +08:00
_performExponentialBackoff ( retryNumber ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-04-12 19:55:27 +08:00
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
} ) ;
}
_processResponse ( res , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const statusCode = res . message . statusCode || 0 ;
const response = {
statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode === HttpCodes . NotFound ) {
resolve ( response ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// get the result from the body
function dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
const a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
}
return value ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
let obj ;
let contents ;
try {
contents = yield res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , dateTimeDeserializer ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
else {
obj = JSON . parse ( contents ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
response . result = obj ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
2023-03-09 17:42:29 +01:00
}
else {
2023-04-12 19:55:27 +08:00
msg = ` Failed request: ( ${ statusCode } ) ` ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
const err = new HttpClientError ( msg , statusCode ) ;
err . result = response . result ;
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ) ;
2023-03-09 17:42:29 +01:00
} ) ;
}
}
2023-04-12 19:55:27 +08:00
exports . HttpClient = HttpClient ;
const lowercaseKeys = ( obj ) => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 9835 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . checkBypass = exports . getProxyUrl = void 0 ;
function getProxyUrl ( reqUrl ) {
const usingSsl = reqUrl . protocol === 'https:' ;
if ( checkBypass ( reqUrl ) ) {
return undefined ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
const proxyVar = ( ( ) => {
if ( usingSsl ) {
return process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
else {
return process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
} ) ( ) ;
if ( proxyVar ) {
2024-04-24 12:04:10 -04:00
try {
return new URL ( proxyVar ) ;
}
catch ( _a ) {
if ( ! proxyVar . startsWith ( 'http://' ) && ! proxyVar . startsWith ( 'https://' ) )
return new URL ( ` http:// ${ proxyVar } ` ) ;
}
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
else {
return undefined ;
2023-03-09 17:42:29 +01:00
}
}
2023-04-12 19:55:27 +08:00
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
const reqHost = reqUrl . hostname ;
if ( isLoopbackAddress ( reqHost ) ) {
return true ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
const noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// Format the request hostname and hostname with port
const upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// Compare request host against noproxy
for ( const upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperNoProxyItem === '*' ||
upperReqHosts . some ( x => x === upperNoProxyItem ||
x . endsWith ( ` . ${ upperNoProxyItem } ` ) ||
( upperNoProxyItem . startsWith ( '.' ) &&
x . endsWith ( ` ${ upperNoProxyItem } ` ) ) ) ) {
return true ;
2023-03-09 17:42:29 +01:00
}
}
2023-04-12 19:55:27 +08:00
return false ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
exports . checkBypass = checkBypass ;
function isLoopbackAddress ( host ) {
const hostLower = host . toLowerCase ( ) ;
return ( hostLower === 'localhost' ||
hostLower . startsWith ( '127.' ) ||
hostLower . startsWith ( '[::1]' ) ||
hostLower . startsWith ( '[0:0:0:0:0:0:0:1]' ) ) ;
}
//# sourceMappingURL=proxy.js.map
2020-01-27 10:21:50 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2020-01-27 10:21:50 -05:00
2023-04-12 19:55:27 +08:00
/***/ 1962 :
2023-03-09 17:42:29 +01:00
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 10:21:50 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2020-01-27 10:21:50 -05:00
2023-04-12 19:55:27 +08:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
2023-03-09 17:42:29 +01:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-04-12 19:55:27 +08:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
2023-03-09 17:42:29 +01:00
return result ;
} ;
2023-04-12 19:55:27 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _a ;
2023-03-09 17:42:29 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2023-04-12 19:55:27 +08:00
exports . getCmdPath = exports . tryGetExecutablePath = exports . isRooted = exports . isDirectory = exports . exists = exports . READONLY = exports . UV _FS _O _EXLOCK = exports . IS _WINDOWS = exports . unlink = exports . symlink = exports . stat = exports . rmdir = exports . rm = exports . rename = exports . readlink = exports . readdir = exports . open = exports . mkdir = exports . lstat = exports . copyFile = exports . chmod = void 0 ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
_a = fs . promises
// export const {open} = 'fs'
, exports . chmod = _a . chmod , exports . copyFile = _a . copyFile , exports . lstat = _a . lstat , exports . mkdir = _a . mkdir , exports . open = _a . open , exports . readdir = _a . readdir , exports . readlink = _a . readlink , exports . rename = _a . rename , exports . rm = _a . rm , exports . rmdir = _a . rmdir , exports . stat = _a . stat , exports . symlink = _a . symlink , exports . unlink = _a . unlink ;
// export const {open} = 'fs'
exports . IS _WINDOWS = process . platform === 'win32' ;
// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691
exports . UV _FS _O _EXLOCK = 0x10000000 ;
exports . READONLY = fs . constants . O _RDONLY ;
function exists ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield exports . stat ( fsPath ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
catch ( err ) {
if ( err . code === 'ENOENT' ) {
return false ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
throw err ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
return true ;
} ) ;
}
exports . exists = exists ;
function isDirectory ( fsPath , useStat = false ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = useStat ? yield exports . stat ( fsPath ) : yield exports . lstat ( fsPath ) ;
return stats . isDirectory ( ) ;
} ) ;
}
exports . isDirectory = isDirectory ;
/ * *
* On OSX / Linux , true if path starts with '/' . On Windows , true for paths like :
* \ , \ hello , \ \ hello \ share , C : , and C : \ hello ( and corresponding alternate separator cases ) .
* /
function isRooted ( p ) {
p = normalizeSeparators ( p ) ;
if ( ! p ) {
throw new Error ( 'isRooted() parameter "p" cannot be empty' ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
if ( exports . IS _WINDOWS ) {
return ( p . startsWith ( '\\' ) || /^[A-Z]:/i . test ( p ) // e.g. \ or \hello or \\hello
) ; // e.g. C: or C:\hello
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
return p . startsWith ( '/' ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
exports . isRooted = isRooted ;
2023-03-09 17:42:29 +01:00
/ * *
2023-04-12 19:55:27 +08:00
* Best effort attempt to determine whether a file exists and is executable .
* @ param filePath file path to check
* @ param extensions additional file extensions to try
* @ return if file exists and is executable , returns the file path . otherwise empty string .
2023-03-09 17:42:29 +01:00
* /
2023-04-12 19:55:27 +08:00
function tryGetExecutablePath ( filePath , extensions ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stats = undefined ;
try {
// test file exists
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
2023-03-09 17:42:29 +01:00
}
}
2023-04-12 19:55:27 +08:00
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// on Windows, test for valid extension
const upperExt = path . extname ( filePath ) . toUpperCase ( ) ;
if ( extensions . some ( validExt => validExt . toUpperCase ( ) === upperExt ) ) {
return filePath ;
2023-03-09 17:42:29 +01:00
}
}
2023-04-12 19:55:27 +08:00
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
}
}
// try each extension
const originalFilePath = filePath ;
for ( const extension of extensions ) {
filePath = originalFilePath + extension ;
stats = undefined ;
try {
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path . dirname ( filePath ) ;
const upperName = path . basename ( filePath ) . toUpperCase ( ) ;
for ( const actualName of yield exports . readdir ( directory ) ) {
if ( upperName === actualName . toUpperCase ( ) ) {
filePath = path . join ( directory , actualName ) ;
break ;
}
2023-03-09 17:42:29 +01:00
}
}
2023-04-12 19:55:27 +08:00
catch ( err ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine the actual case of the file ' ${ filePath } ': ${ err } ` ) ;
}
return filePath ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
2023-03-09 17:42:29 +01:00
}
}
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
return '' ;
} ) ;
}
exports . tryGetExecutablePath = tryGetExecutablePath ;
function normalizeSeparators ( p ) {
p = p || '' ;
if ( exports . IS _WINDOWS ) {
// convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// remove redundant slashes
return p . replace ( /\\\\+/g , '\\' ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
// remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
}
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
function isUnixExecutable ( stats ) {
return ( ( stats . mode & 1 ) > 0 ||
( ( stats . mode & 8 ) > 0 && stats . gid === process . getgid ( ) ) ||
( ( stats . mode & 64 ) > 0 && stats . uid === process . getuid ( ) ) ) ;
}
// Get the path of cmd.exe in windows
function getCmdPath ( ) {
var _a ;
return ( _a = process . env [ 'COMSPEC' ] ) !== null && _a !== void 0 ? _a : ` cmd.exe ` ;
}
exports . getCmdPath = getCmdPath ;
//# sourceMappingURL=io-util.js.map
/***/ } ) ,
/***/ 7436 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . findInPath = exports . which = exports . mkdirP = exports . rmRF = exports . mv = exports . cp = void 0 ;
const assert _1 = _ _nccwpck _require _ _ ( 9491 ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const ioUtil = _ _importStar ( _ _nccwpck _require _ _ ( 1962 ) ) ;
/ * *
* Copies a file or folder .
* Based off of shelljs - https : //github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See CopyOptions .
* /
function cp ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const { force , recursive , copySourceDirectory } = readCopyOptions ( options ) ;
const destStat = ( yield ioUtil . exists ( dest ) ) ? yield ioUtil . stat ( dest ) : null ;
// Dest is an existing file, but not forcing
if ( destStat && destStat . isFile ( ) && ! force ) {
return ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// If dest is an existing directory, should copy inside.
const newDest = destStat && destStat . isDirectory ( ) && copySourceDirectory
? path . join ( dest , path . basename ( source ) )
: dest ;
if ( ! ( yield ioUtil . exists ( source ) ) ) {
throw new Error ( ` no such file or directory: ${ source } ` ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
const sourceStat = yield ioUtil . stat ( source ) ;
if ( sourceStat . isDirectory ( ) ) {
if ( ! recursive ) {
throw new Error ( ` Failed to copy. ${ source } is a directory, but tried to copy without recursive flag. ` ) ;
2023-03-09 17:42:29 +01:00
}
else {
2023-04-12 19:55:27 +08:00
yield cpDirRecursive ( source , newDest , 0 , force ) ;
2023-03-09 17:42:29 +01:00
}
}
2023-04-12 19:55:27 +08:00
else {
if ( path . relative ( source , newDest ) === '' ) {
// a file cannot be copied to itself
throw new Error ( ` ' ${ newDest } ' and ' ${ source } ' are the same file ` ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
yield copyFile ( source , newDest , force ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
} ) ;
}
exports . cp = cp ;
/ * *
* Moves a path .
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See MoveOptions .
* /
function mv ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( yield ioUtil . exists ( dest ) ) {
let destExists = true ;
if ( yield ioUtil . isDirectory ( dest ) ) {
// If dest is directory copy src into dest
dest = path . join ( dest , path . basename ( source ) ) ;
destExists = yield ioUtil . exists ( dest ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
if ( destExists ) {
if ( options . force == null || options . force ) {
yield rmRF ( dest ) ;
2023-03-09 17:42:29 +01:00
}
else {
2023-04-12 19:55:27 +08:00
throw new Error ( 'Destination already exists' ) ;
2023-03-09 17:42:29 +01:00
}
}
2023-04-12 19:55:27 +08:00
}
yield mkdirP ( path . dirname ( dest ) ) ;
yield ioUtil . rename ( source , dest ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . mv = mv ;
/ * *
* Remove a path recursively with force
*
* @ param inputPath path to remove
* /
function rmRF ( inputPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ioUtil . IS _WINDOWS ) {
// Check for invalid characters
// https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
if ( /[*"<>|]/ . test ( inputPath ) ) {
throw new Error ( 'File path must not contain `*`, `"`, `<`, `>` or `|` on Windows' ) ;
}
}
try {
// note if path does not exist, error is silent
yield ioUtil . rm ( inputPath , {
force : true ,
maxRetries : 3 ,
recursive : true ,
retryDelay : 300
} ) ;
}
catch ( err ) {
throw new Error ( ` File was unable to be removed ${ err } ` ) ;
}
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . rmRF = rmRF ;
/ * *
* Make a directory . Creates the full path with folders in between
* Will throw if it fails
*
* @ param fsPath path to create
* @ returns Promise < void >
* /
function mkdirP ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( fsPath , 'a path argument must be provided' ) ;
yield ioUtil . mkdir ( fsPath , { recursive : true } ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . mkdirP = mkdirP ;
/ * *
* Returns path of a tool had the tool actually been invoked . Resolves via paths .
* If you check and the tool does not exist , it will throw .
*
* @ param tool name of the tool
* @ param check whether to check if tool exists
* @ returns Promise < string > path to tool
* /
function which ( tool , check ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// recursive when check=true
if ( check ) {
const result = yield which ( tool , false ) ;
if ( ! result ) {
if ( ioUtil . IS _WINDOWS ) {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file. ` ) ;
}
else {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable. ` ) ;
}
}
return result ;
}
const matches = yield findInPath ( tool ) ;
if ( matches && matches . length > 0 ) {
return matches [ 0 ] ;
}
return '' ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . which = which ;
/ * *
* Returns a list of all occurrences of the given tool on the system path .
*
* @ returns Promise < string [ ] > the paths of the tool
* /
function findInPath ( tool ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// build the list of extensions to try
const extensions = [ ] ;
if ( ioUtil . IS _WINDOWS && process . env [ 'PATHEXT' ] ) {
for ( const extension of process . env [ 'PATHEXT' ] . split ( path . delimiter ) ) {
if ( extension ) {
extensions . push ( extension ) ;
}
}
}
// if it's rooted, return it if exists. otherwise return empty.
if ( ioUtil . isRooted ( tool ) ) {
const filePath = yield ioUtil . tryGetExecutablePath ( tool , extensions ) ;
if ( filePath ) {
return [ filePath ] ;
}
return [ ] ;
}
// if any path separators, return empty
if ( tool . includes ( path . sep ) ) {
return [ ] ;
}
// build the list of directories
//
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
// it feels like we should not do this. Checking the current directory seems like more of a use
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
// across platforms.
const directories = [ ] ;
if ( process . env . PATH ) {
for ( const p of process . env . PATH . split ( path . delimiter ) ) {
if ( p ) {
directories . push ( p ) ;
}
}
}
// find all matches
const matches = [ ] ;
for ( const directory of directories ) {
const filePath = yield ioUtil . tryGetExecutablePath ( path . join ( directory , tool ) , extensions ) ;
if ( filePath ) {
matches . push ( filePath ) ;
}
}
return matches ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . findInPath = findInPath ;
function readCopyOptions ( options ) {
const force = options . force == null ? true : options . force ;
const recursive = Boolean ( options . recursive ) ;
const copySourceDirectory = options . copySourceDirectory == null
? true
: Boolean ( options . copySourceDirectory ) ;
return { force , recursive , copySourceDirectory } ;
}
function cpDirRecursive ( sourceDir , destDir , currentDepth , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Ensure there is not a run away recursive copy
if ( currentDepth >= 255 )
return ;
currentDepth ++ ;
yield mkdirP ( destDir ) ;
const files = yield ioUtil . readdir ( sourceDir ) ;
for ( const fileName of files ) {
const srcFile = ` ${ sourceDir } / ${ fileName } ` ;
const destFile = ` ${ destDir } / ${ fileName } ` ;
const srcFileStat = yield ioUtil . lstat ( srcFile ) ;
if ( srcFileStat . isDirectory ( ) ) {
// Recurse
yield cpDirRecursive ( srcFile , destFile , currentDepth , force ) ;
}
else {
yield copyFile ( srcFile , destFile , force ) ;
}
}
// Change the mode for the newly created directory
yield ioUtil . chmod ( destDir , ( yield ioUtil . stat ( sourceDir ) ) . mode ) ;
} ) ;
}
// Buffered file copy
function copyFile ( srcFile , destFile , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ( yield ioUtil . lstat ( srcFile ) ) . isSymbolicLink ( ) ) {
// unlink/re-link it
try {
yield ioUtil . lstat ( destFile ) ;
yield ioUtil . unlink ( destFile ) ;
}
catch ( e ) {
// Try to override file permission
if ( e . code === 'EPERM' ) {
yield ioUtil . chmod ( destFile , '0666' ) ;
yield ioUtil . unlink ( destFile ) ;
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
const symlinkFull = yield ioUtil . readlink ( srcFile ) ;
yield ioUtil . symlink ( symlinkFull , destFile , ioUtil . IS _WINDOWS ? 'junction' : null ) ;
}
else if ( ! ( yield ioUtil . exists ( destFile ) ) || force ) {
yield ioUtil . copyFile ( srcFile , destFile ) ;
}
} ) ;
}
//# sourceMappingURL=io.js.map
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 2473 :
/***/ ( function ( module , exports , _ _nccwpck _require _ _ ) {
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2023-04-12 19:55:27 +08:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
2023-03-09 17:42:29 +01:00
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-04-24 12:04:10 -04:00
exports . _readLinuxVersionFile = exports . _getOsVersion = exports . _findMatch = void 0 ;
const semver = _ _importStar ( _ _nccwpck _require _ _ ( 5911 ) ) ;
const core _1 = _ _nccwpck _require _ _ ( 2186 ) ;
// needs to be require for core node modules to be mocked
/* eslint @typescript-eslint/no-require-imports: 0 */
2023-04-12 19:55:27 +08:00
const os = _ _nccwpck _require _ _ ( 2037 ) ;
2024-04-24 12:04:10 -04:00
const cp = _ _nccwpck _require _ _ ( 2081 ) ;
const fs = _ _nccwpck _require _ _ ( 7147 ) ;
function _findMatch ( versionSpec , stable , candidates , archFilter ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const platFilter = os . platform ( ) ;
let result ;
let match ;
let file ;
for ( const candidate of candidates ) {
const version = candidate . version ;
core _1 . debug ( ` check ${ version } satisfies ${ versionSpec } ` ) ;
if ( semver . satisfies ( version , versionSpec ) &&
( ! stable || candidate . stable === stable ) ) {
file = candidate . files . find ( item => {
core _1 . debug ( ` ${ item . arch } === ${ archFilter } && ${ item . platform } === ${ platFilter } ` ) ;
let chk = item . arch === archFilter && item . platform === platFilter ;
if ( chk && item . platform _version ) {
const osVersion = module . exports . _getOsVersion ( ) ;
if ( osVersion === item . platform _version ) {
chk = true ;
}
else {
chk = semver . satisfies ( osVersion , item . platform _version ) ;
}
}
return chk ;
} ) ;
if ( file ) {
core _1 . debug ( ` matched ${ candidate . version } ` ) ;
match = candidate ;
break ;
}
}
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
if ( match && file ) {
// clone since we're mutating the file list to be only the file that matches
result = Object . assign ( { } , match ) ;
result . files = [ file ] ;
}
return result ;
} ) ;
}
exports . _findMatch = _findMatch ;
function _getOsVersion ( ) {
// TODO: add windows and other linux, arm variants
// right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)
const plat = os . platform ( ) ;
let version = '' ;
if ( plat === 'darwin' ) {
version = cp . execSync ( 'sw_vers -productVersion' ) . toString ( ) ;
}
else if ( plat === 'linux' ) {
// lsb_release process not in some containers, readfile
// Run cat /etc/lsb-release
// DISTRIB_ID=Ubuntu
// DISTRIB_RELEASE=18.04
// DISTRIB_CODENAME=bionic
// DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS"
const lsbContents = module . exports . _readLinuxVersionFile ( ) ;
if ( lsbContents ) {
const lines = lsbContents . split ( '\n' ) ;
for ( const line of lines ) {
const parts = line . split ( '=' ) ;
if ( parts . length === 2 &&
( parts [ 0 ] . trim ( ) === 'VERSION_ID' ||
parts [ 0 ] . trim ( ) === 'DISTRIB_RELEASE' ) ) {
version = parts [ 1 ]
. trim ( )
. replace ( /^"/ , '' )
. replace ( /"$/ , '' ) ;
break ;
}
}
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return version ;
}
exports . _getOsVersion = _getOsVersion ;
function _readLinuxVersionFile ( ) {
const lsbReleaseFile = '/etc/lsb-release' ;
const osReleaseFile = '/etc/os-release' ;
let contents = '' ;
if ( fs . existsSync ( lsbReleaseFile ) ) {
contents = fs . readFileSync ( lsbReleaseFile ) . toString ( ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
else if ( fs . existsSync ( osReleaseFile ) ) {
contents = fs . readFileSync ( osReleaseFile ) . toString ( ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return contents ;
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
exports . _readLinuxVersionFile = _readLinuxVersionFile ;
//# sourceMappingURL=manifest.js.map
/***/ } ) ,
/***/ 8279 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . RetryHelper = void 0 ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
/ * *
* Internal class for retries
* /
class RetryHelper {
constructor ( maxAttempts , minSeconds , maxSeconds ) {
if ( maxAttempts < 1 ) {
throw new Error ( 'max attempts should be greater than or equal to 1' ) ;
}
this . maxAttempts = maxAttempts ;
this . minSeconds = Math . floor ( minSeconds ) ;
this . maxSeconds = Math . floor ( maxSeconds ) ;
if ( this . minSeconds > this . maxSeconds ) {
throw new Error ( 'min seconds should be less than or equal to max seconds' ) ;
}
}
execute ( action , isRetryable ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let attempt = 1 ;
while ( attempt < this . maxAttempts ) {
// Try
try {
return yield action ( ) ;
}
catch ( err ) {
if ( isRetryable && ! isRetryable ( err ) ) {
throw err ;
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
core . info ( err . message ) ;
}
// Sleep
const seconds = this . getSleepAmount ( ) ;
core . info ( ` Waiting ${ seconds } seconds before trying again ` ) ;
yield this . sleep ( seconds ) ;
attempt ++ ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
// Last attempt
return yield action ( ) ;
} ) ;
}
getSleepAmount ( ) {
return ( Math . floor ( Math . random ( ) * ( this . maxSeconds - this . minSeconds + 1 ) ) +
this . minSeconds ) ;
}
sleep ( seconds ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( resolve => setTimeout ( resolve , seconds * 1000 ) ) ;
} ) ;
}
}
exports . RetryHelper = RetryHelper ;
//# sourceMappingURL=retry-helper.js.map
/***/ } ) ,
/***/ 7784 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . evaluateVersions = exports . isExplicitVersion = exports . findFromManifest = exports . getManifestFromRepo = exports . findAllVersions = exports . find = exports . cacheFile = exports . cacheDir = exports . extractZip = exports . extractXar = exports . extractTar = exports . extract7z = exports . downloadTool = exports . HTTPError = void 0 ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 7147 ) ) ;
const mm = _ _importStar ( _ _nccwpck _require _ _ ( 2473 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2037 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 1017 ) ) ;
const httpm = _ _importStar ( _ _nccwpck _require _ _ ( 6255 ) ) ;
const semver = _ _importStar ( _ _nccwpck _require _ _ ( 5911 ) ) ;
const stream = _ _importStar ( _ _nccwpck _require _ _ ( 2781 ) ) ;
const util = _ _importStar ( _ _nccwpck _require _ _ ( 3837 ) ) ;
const assert _1 = _ _nccwpck _require _ _ ( 9491 ) ;
const v4 _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 7468 ) ) ;
const exec _1 = _ _nccwpck _require _ _ ( 1514 ) ;
const retry _helper _1 = _ _nccwpck _require _ _ ( 8279 ) ;
class HTTPError extends Error {
constructor ( httpStatusCode ) {
super ( ` Unexpected HTTP response: ${ httpStatusCode } ` ) ;
this . httpStatusCode = httpStatusCode ;
Object . setPrototypeOf ( this , new . target . prototype ) ;
}
}
exports . HTTPError = HTTPError ;
const IS _WINDOWS = process . platform === 'win32' ;
const IS _MAC = process . platform === 'darwin' ;
const userAgent = 'actions/tool-cache' ;
/ * *
* Download a tool from an url and stream it into a file
*
* @ param url url of tool to download
* @ param dest path to download tool
* @ param auth authorization header
* @ param headers other headers
* @ returns path to downloaded tool
* /
function downloadTool ( url , dest , auth , headers ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
dest = dest || path . join ( _getTempDirectory ( ) , v4 _1 . default ( ) ) ;
yield io . mkdirP ( path . dirname ( dest ) ) ;
core . debug ( ` Downloading ${ url } ` ) ;
core . debug ( ` Destination ${ dest } ` ) ;
const maxAttempts = 3 ;
const minSeconds = _getGlobal ( 'TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS' , 10 ) ;
const maxSeconds = _getGlobal ( 'TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS' , 20 ) ;
const retryHelper = new retry _helper _1 . RetryHelper ( maxAttempts , minSeconds , maxSeconds ) ;
return yield retryHelper . execute ( ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield downloadToolAttempt ( url , dest || '' , auth , headers ) ;
} ) , ( err ) => {
if ( err instanceof HTTPError && err . httpStatusCode ) {
// Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
if ( err . httpStatusCode < 500 &&
err . httpStatusCode !== 408 &&
err . httpStatusCode !== 429 ) {
return false ;
}
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// Otherwise retry
return true ;
} ) ;
2023-03-09 17:42:29 +01:00
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . downloadTool = downloadTool ;
2024-04-24 12:04:10 -04:00
function downloadToolAttempt ( url , dest , auth , headers ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( fs . existsSync ( dest ) ) {
throw new Error ( ` Destination file path ${ dest } already exists ` ) ;
}
// Get the response headers
const http = new httpm . HttpClient ( userAgent , [ ] , {
allowRetries : false
} ) ;
if ( auth ) {
core . debug ( 'set auth' ) ;
if ( headers === undefined ) {
headers = { } ;
}
headers . authorization = auth ;
}
const response = yield http . get ( url , headers ) ;
if ( response . message . statusCode !== 200 ) {
const err = new HTTPError ( response . message . statusCode ) ;
core . debug ( ` Failed to download from " ${ url } ". Code( ${ response . message . statusCode } ) Message( ${ response . message . statusMessage } ) ` ) ;
throw err ;
}
// Download the response body
const pipeline = util . promisify ( stream . pipeline ) ;
const responseMessageFactory = _getGlobal ( 'TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY' , ( ) => response . message ) ;
const readStream = responseMessageFactory ( ) ;
let succeeded = false ;
try {
yield pipeline ( readStream , fs . createWriteStream ( dest ) ) ;
core . debug ( 'download complete' ) ;
succeeded = true ;
return dest ;
}
finally {
// Error, delete dest before retry
if ( ! succeeded ) {
core . debug ( 'download failed' ) ;
try {
yield io . rmRF ( dest ) ;
}
catch ( err ) {
core . debug ( ` Failed to delete ' ${ dest } '. ${ err . message } ` ) ;
}
}
}
} ) ;
}
2023-04-12 19:55:27 +08:00
/ * *
* Extract a . 7 z file
*
* @ param file path to the . 7 z file
* @ param dest destination directory . Optional .
* @ param _7zPath path to 7 zr . exe . Optional , for long path support . Most . 7 z archives do not have this
* problem . If your . 7 z archive contains very long paths , you can pass the path to 7 zr . exe which will
* gracefully handle long paths . By default 7 zdec . exe is used because it is a very small program and is
* bundled with the tool lib . However it does not support long paths . 7 zr . exe is the reduced command line
* interface , it is smaller than the full command line interface , and it does support long paths . At the
* time of this writing , it is freely available from the LZMA SDK that is available on the 7 zip website .
* Be sure to check the current license agreement . If 7 zr . exe is bundled with your action , then the path
* to 7 zr . exe can be pass to this function .
* @ returns path to the destination directory
* /
function extract7z ( file , dest , _7zPath ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-04-12 19:55:27 +08:00
assert _1 . ok ( IS _WINDOWS , 'extract7z() not supported on current OS' ) ;
assert _1 . ok ( file , 'parameter "file" is required' ) ;
2024-04-24 12:04:10 -04:00
dest = yield _createExtractFolder ( dest ) ;
2023-04-12 19:55:27 +08:00
const originalCwd = process . cwd ( ) ;
process . chdir ( dest ) ;
if ( _7zPath ) {
try {
2024-04-24 12:04:10 -04:00
const logLevel = core . isDebug ( ) ? '-bb1' : '-bb0' ;
2023-04-12 19:55:27 +08:00
const args = [
'x' ,
2024-04-24 12:04:10 -04:00
logLevel ,
2023-04-12 19:55:27 +08:00
'-bd' ,
'-sccUTF-8' ,
file
] ;
const options = {
silent : true
} ;
yield exec _1 . exec ( ` " ${ _7zPath } " ` , args , options ) ;
}
finally {
process . chdir ( originalCwd ) ;
}
}
else {
const escapedScript = path
. join ( _ _dirname , '..' , 'scripts' , 'Invoke-7zdec.ps1' )
. replace ( /'/g , "''" )
. replace ( /"|\n|\r/g , '' ) ; // double-up single quotes, remove double quotes and newlines
const escapedFile = file . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ;
const escapedTarget = dest . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ;
const command = ` & ' ${ escapedScript } ' -Source ' ${ escapedFile } ' -Target ' ${ escapedTarget } ' ` ;
const args = [
'-NoLogo' ,
'-Sta' ,
'-NoProfile' ,
'-NonInteractive' ,
'-ExecutionPolicy' ,
'Unrestricted' ,
'-Command' ,
command
] ;
const options = {
silent : true
} ;
try {
const powershellPath = yield io . which ( 'powershell' , true ) ;
yield exec _1 . exec ( ` " ${ powershellPath } " ` , args , options ) ;
}
finally {
process . chdir ( originalCwd ) ;
}
}
return dest ;
2023-03-09 17:42:29 +01:00
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . extract7z = extract7z ;
2023-03-09 17:42:29 +01:00
/ * *
2024-04-24 12:04:10 -04:00
* Extract a compressed tar archive
2023-04-12 19:55:27 +08:00
*
* @ param file path to the tar
* @ param dest destination directory . Optional .
2024-04-24 12:04:10 -04:00
* @ param flags flags for the tar command to use for extraction . Defaults to 'xz' ( extracting gzipped tars ) . Optional .
2023-04-12 19:55:27 +08:00
* @ returns path to the destination directory
2023-03-09 17:42:29 +01:00
* /
2023-04-12 19:55:27 +08:00
function extractTar ( file , dest , flags = 'xz' ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! file ) {
throw new Error ( "parameter 'file' is required" ) ;
}
2024-04-24 12:04:10 -04:00
// Create dest
dest = yield _createExtractFolder ( dest ) ;
// Determine whether GNU tar
core . debug ( 'Checking tar --version' ) ;
let versionOutput = '' ;
yield exec _1 . exec ( 'tar --version' , [ ] , {
ignoreReturnCode : true ,
silent : true ,
listeners : {
stdout : ( data ) => ( versionOutput += data . toString ( ) ) ,
stderr : ( data ) => ( versionOutput += data . toString ( ) )
}
} ) ;
core . debug ( versionOutput . trim ( ) ) ;
const isGnuTar = versionOutput . toUpperCase ( ) . includes ( 'GNU TAR' ) ;
// Initialize args
let args ;
if ( flags instanceof Array ) {
args = flags ;
}
else {
args = [ flags ] ;
}
if ( core . isDebug ( ) && ! flags . includes ( 'v' ) ) {
args . push ( '-v' ) ;
}
let destArg = dest ;
let fileArg = file ;
if ( IS _WINDOWS && isGnuTar ) {
args . push ( '--force-local' ) ;
destArg = dest . replace ( /\\/g , '/' ) ;
// Technically only the dest needs to have `/` but for aesthetic consistency
// convert slashes in the file arg too.
fileArg = file . replace ( /\\/g , '/' ) ;
}
if ( isGnuTar ) {
// Suppress warnings when using GNU tar to extract archives created by BSD tar
args . push ( '--warning=no-unknown-keyword' ) ;
args . push ( '--overwrite' ) ;
}
args . push ( '-C' , destArg , '-f' , fileArg ) ;
yield exec _1 . exec ( ` tar ` , args ) ;
2023-04-12 19:55:27 +08:00
return dest ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . extractTar = extractTar ;
2024-04-24 12:04:10 -04:00
/ * *
* Extract a xar compatible archive
*
* @ param file path to the archive
* @ param dest destination directory . Optional .
* @ param flags flags for the xar . Optional .
* @ returns path to the destination directory
* /
function extractXar ( file , dest , flags = [ ] ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( IS _MAC , 'extractXar() not supported on current OS' ) ;
assert _1 . ok ( file , 'parameter "file" is required' ) ;
dest = yield _createExtractFolder ( dest ) ;
let args ;
if ( flags instanceof Array ) {
args = flags ;
}
else {
args = [ flags ] ;
}
args . push ( '-x' , '-C' , dest , '-f' , file ) ;
if ( core . isDebug ( ) ) {
args . push ( '-v' ) ;
}
const xarPath = yield io . which ( 'xar' , true ) ;
yield exec _1 . exec ( ` " ${ xarPath } " ` , _unique ( args ) ) ;
return dest ;
} ) ;
}
exports . extractXar = extractXar ;
2023-03-09 17:42:29 +01:00
/ * *
2023-04-12 19:55:27 +08:00
* Extract a zip
*
* @ param file path to the zip
* @ param dest destination directory . Optional .
* @ returns path to the destination directory
2023-03-09 17:42:29 +01:00
* /
2023-04-12 19:55:27 +08:00
function extractZip ( file , dest ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-04-12 19:55:27 +08:00
if ( ! file ) {
throw new Error ( "parameter 'file' is required" ) ;
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
dest = yield _createExtractFolder ( dest ) ;
2023-04-12 19:55:27 +08:00
if ( IS _WINDOWS ) {
yield extractZipWin ( file , dest ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
else {
yield extractZipNix ( file , dest ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
return dest ;
2023-03-09 17:42:29 +01:00
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
exports . extractZip = extractZip ;
function extractZipWin ( file , dest ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// build the powershell command
const escapedFile = file . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ; // double-up single quotes, remove double quotes and newlines
const escapedDest = dest . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ;
2024-04-24 12:04:10 -04:00
const pwshPath = yield io . which ( 'pwsh' , false ) ;
//To match the file overwrite behavior on nix systems, we use the overwrite = true flag for ExtractToDirectory
//and the -Force flag for Expand-Archive as a fallback
if ( pwshPath ) {
//attempt to use pwsh with ExtractToDirectory, if this fails attempt Expand-Archive
const pwshCommand = [
` $ ErrorActionPreference = 'Stop' ; ` ,
` try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ; ` ,
` try { [System.IO.Compression.ZipFile]::ExtractToDirectory(' ${ escapedFile } ', ' ${ escapedDest } ', $ true) } ` ,
` catch { if (( $ _.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ( $ _.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath ' ${ escapedFile } ' -DestinationPath ' ${ escapedDest } ' -Force } else { throw $ _ } } ; `
] . join ( ' ' ) ;
const args = [
'-NoLogo' ,
'-NoProfile' ,
'-NonInteractive' ,
'-ExecutionPolicy' ,
'Unrestricted' ,
'-Command' ,
pwshCommand
] ;
core . debug ( ` Using pwsh at path: ${ pwshPath } ` ) ;
yield exec _1 . exec ( ` " ${ pwshPath } " ` , args ) ;
}
else {
const powershellCommand = [
` $ ErrorActionPreference = 'Stop' ; ` ,
` try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; ` ,
` if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath ' ${ escapedFile } ' -DestinationPath ' ${ escapedDest } ' -Force } ` ,
` else {[System.IO.Compression.ZipFile]::ExtractToDirectory(' ${ escapedFile } ', ' ${ escapedDest } ', $ true) } `
] . join ( ' ' ) ;
const args = [
'-NoLogo' ,
'-Sta' ,
'-NoProfile' ,
'-NonInteractive' ,
'-ExecutionPolicy' ,
'Unrestricted' ,
'-Command' ,
powershellCommand
] ;
const powershellPath = yield io . which ( 'powershell' , true ) ;
core . debug ( ` Using powershell at path: ${ powershellPath } ` ) ;
yield exec _1 . exec ( ` " ${ powershellPath } " ` , args ) ;
}
2023-04-12 19:55:27 +08:00
} ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
function extractZipNix ( file , dest ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
const unzipPath = yield io . which ( 'unzip' , true ) ;
const args = [ file ] ;
if ( ! core . isDebug ( ) ) {
args . unshift ( '-q' ) ;
}
args . unshift ( '-o' ) ; //overwrite with -o, otherwise a prompt is shown which freezes the run
yield exec _1 . exec ( ` " ${ unzipPath } " ` , args , { cwd : dest } ) ;
2020-03-02 11:33:30 -05:00
} ) ;
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
/ * *
2023-04-12 19:55:27 +08:00
* Caches a directory and installs it into the tool cacheDir
2023-03-09 17:42:29 +01:00
*
2023-04-12 19:55:27 +08:00
* @ param sourceDir the directory to cache into tools
* @ param tool tool name
* @ param version version of the tool . semver format
* @ param arch architecture of the tool . Optional . Defaults to machine architecture
2023-03-09 17:42:29 +01:00
* /
2023-04-12 19:55:27 +08:00
function cacheDir ( sourceDir , tool , version , arch ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-04-12 19:55:27 +08:00
version = semver . clean ( version ) || version ;
arch = arch || os . arch ( ) ;
core . debug ( ` Caching tool ${ tool } ${ version } ${ arch } ` ) ;
core . debug ( ` source dir: ${ sourceDir } ` ) ;
if ( ! fs . statSync ( sourceDir ) . isDirectory ( ) ) {
throw new Error ( 'sourceDir is not a directory' ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// Create the tool dir
const destPath = yield _createToolPath ( tool , version , arch ) ;
// copy each child item. do not move. move can fail on Windows
// due to anti-virus software having an open handle on a file.
for ( const itemName of fs . readdirSync ( sourceDir ) ) {
const s = path . join ( sourceDir , itemName ) ;
yield io . cp ( s , destPath , { recursive : true } ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// write .complete
_completeToolPath ( tool , version , arch ) ;
return destPath ;
2023-03-09 17:42:29 +01:00
} ) ;
}
2023-04-12 19:55:27 +08:00
exports . cacheDir = cacheDir ;
2023-03-09 17:42:29 +01:00
/ * *
2023-04-12 19:55:27 +08:00
* Caches a downloaded file ( GUID ) and installs it
* into the tool cache with a given targetName
2023-03-09 17:42:29 +01:00
*
2023-04-12 19:55:27 +08:00
* @ param sourceFile the file to cache into tools . Typically a result of downloadTool which is a guid .
* @ param targetFile the name of the file name in the tools directory
* @ param tool tool name
* @ param version version of the tool . semver format
* @ param arch architecture of the tool . Optional . Defaults to machine architecture
2023-03-09 17:42:29 +01:00
* /
2023-04-12 19:55:27 +08:00
function cacheFile ( sourceFile , targetFile , tool , version , arch ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-04-12 19:55:27 +08:00
version = semver . clean ( version ) || version ;
arch = arch || os . arch ( ) ;
core . debug ( ` Caching tool ${ tool } ${ version } ${ arch } ` ) ;
core . debug ( ` source file: ${ sourceFile } ` ) ;
if ( ! fs . statSync ( sourceFile ) . isFile ( ) ) {
throw new Error ( 'sourceFile is not a file' ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
// create the tool dir
const destFolder = yield _createToolPath ( tool , version , arch ) ;
// copy instead of move. move can fail on Windows due to
// anti-virus software having an open handle on a file.
const destPath = path . join ( destFolder , targetFile ) ;
core . debug ( ` destination file ${ destPath } ` ) ;
yield io . cp ( sourceFile , destPath ) ;
// write .complete
_completeToolPath ( tool , version , arch ) ;
return destFolder ;
2023-03-09 17:42:29 +01:00
} ) ;
}
2023-04-12 19:55:27 +08:00
exports . cacheFile = cacheFile ;
2023-03-09 17:42:29 +01:00
/ * *
2023-04-12 19:55:27 +08:00
* Finds the path to a tool version in the local installed tool cache
2023-03-09 17:42:29 +01:00
*
2023-04-12 19:55:27 +08:00
* @ param toolName name of the tool
* @ param versionSpec version of the tool
* @ param arch optional arch . defaults to arch of computer
2023-03-09 17:42:29 +01:00
* /
2023-04-12 19:55:27 +08:00
function find ( toolName , versionSpec , arch ) {
if ( ! toolName ) {
throw new Error ( 'toolName parameter is required' ) ;
}
if ( ! versionSpec ) {
throw new Error ( 'versionSpec parameter is required' ) ;
}
arch = arch || os . arch ( ) ;
// attempt to resolve an explicit version
2024-04-24 12:04:10 -04:00
if ( ! isExplicitVersion ( versionSpec ) ) {
2023-04-12 19:55:27 +08:00
const localVersions = findAllVersions ( toolName , arch ) ;
2024-04-24 12:04:10 -04:00
const match = evaluateVersions ( localVersions , versionSpec ) ;
2023-04-12 19:55:27 +08:00
versionSpec = match ;
}
// check for the explicit version in the cache
let toolPath = '' ;
if ( versionSpec ) {
versionSpec = semver . clean ( versionSpec ) || '' ;
2024-04-24 12:04:10 -04:00
const cachePath = path . join ( _getCacheDirectory ( ) , toolName , versionSpec , arch ) ;
2023-04-12 19:55:27 +08:00
core . debug ( ` checking cache: ${ cachePath } ` ) ;
if ( fs . existsSync ( cachePath ) && fs . existsSync ( ` ${ cachePath } .complete ` ) ) {
core . debug ( ` Found tool in cache ${ toolName } ${ versionSpec } ${ arch } ` ) ;
toolPath = cachePath ;
2023-03-15 15:45:08 -04:00
}
2023-04-12 19:55:27 +08:00
else {
core . debug ( 'not found' ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
}
return toolPath ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
exports . find = find ;
2023-03-09 17:42:29 +01:00
/ * *
2023-04-12 19:55:27 +08:00
* Finds the paths to all versions of a tool that are installed in the local tool cache
2023-03-09 17:42:29 +01:00
*
2023-04-12 19:55:27 +08:00
* @ param toolName name of the tool
* @ param arch optional arch . defaults to arch of computer
2023-03-09 17:42:29 +01:00
* /
2023-04-12 19:55:27 +08:00
function findAllVersions ( toolName , arch ) {
const versions = [ ] ;
arch = arch || os . arch ( ) ;
2024-04-24 12:04:10 -04:00
const toolPath = path . join ( _getCacheDirectory ( ) , toolName ) ;
2023-04-12 19:55:27 +08:00
if ( fs . existsSync ( toolPath ) ) {
const children = fs . readdirSync ( toolPath ) ;
for ( const child of children ) {
2024-04-24 12:04:10 -04:00
if ( isExplicitVersion ( child ) ) {
2023-04-12 19:55:27 +08:00
const fullPath = path . join ( toolPath , child , arch || '' ) ;
if ( fs . existsSync ( fullPath ) && fs . existsSync ( ` ${ fullPath } .complete ` ) ) {
versions . push ( child ) ;
2023-03-09 17:42:29 +01:00
}
2020-03-11 15:55:17 -04:00
}
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
}
return versions ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
exports . findAllVersions = findAllVersions ;
2024-04-24 12:04:10 -04:00
function getManifestFromRepo ( owner , repo , auth , branch = 'master' ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let releases = [ ] ;
const treeUrl = ` https://api.github.com/repos/ ${ owner } / ${ repo } /git/trees/ ${ branch } ` ;
const http = new httpm . HttpClient ( 'tool-cache' ) ;
const headers = { } ;
if ( auth ) {
core . debug ( 'set auth' ) ;
headers . authorization = auth ;
}
const response = yield http . getJson ( treeUrl , headers ) ;
if ( ! response . result ) {
return releases ;
}
let manifestUrl = '' ;
for ( const item of response . result . tree ) {
if ( item . path === 'versions-manifest.json' ) {
manifestUrl = item . url ;
break ;
}
}
headers [ 'accept' ] = 'application/vnd.github.VERSION.raw' ;
let versionsRaw = yield ( yield http . get ( manifestUrl , headers ) ) . readBody ( ) ;
if ( versionsRaw ) {
// shouldn't be needed but protects against invalid json saved with BOM
versionsRaw = versionsRaw . replace ( /^\uFEFF/ , '' ) ;
try {
releases = JSON . parse ( versionsRaw ) ;
}
catch ( _a ) {
core . debug ( 'Invalid json' ) ;
}
}
return releases ;
} ) ;
}
exports . getManifestFromRepo = getManifestFromRepo ;
function findFromManifest ( versionSpec , stable , manifest , archFilter = os . arch ( ) ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// wrap the internal impl
const match = yield mm . _findMatch ( versionSpec , stable , manifest , archFilter ) ;
return match ;
} ) ;
}
exports . findFromManifest = findFromManifest ;
2023-04-12 19:55:27 +08:00
function _createExtractFolder ( dest ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2023-04-12 19:55:27 +08:00
if ( ! dest ) {
// create a temp dir
2024-04-24 12:04:10 -04:00
dest = path . join ( _getTempDirectory ( ) , v4 _1 . default ( ) ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
yield io . mkdirP ( dest ) ;
return dest ;
2023-03-09 17:42:29 +01:00
} ) ;
}
2023-04-12 19:55:27 +08:00
function _createToolPath ( tool , version , arch ) {
2023-03-09 17:42:29 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2024-04-24 12:04:10 -04:00
const folderPath = path . join ( _getCacheDirectory ( ) , tool , semver . clean ( version ) || version , arch || '' ) ;
2023-04-12 19:55:27 +08:00
core . debug ( ` destination ${ folderPath } ` ) ;
const markerPath = ` ${ folderPath } .complete ` ;
yield io . rmRF ( folderPath ) ;
yield io . rmRF ( markerPath ) ;
yield io . mkdirP ( folderPath ) ;
return folderPath ;
2023-03-09 17:42:29 +01:00
} ) ;
}
2023-04-12 19:55:27 +08:00
function _completeToolPath ( tool , version , arch ) {
2024-04-24 12:04:10 -04:00
const folderPath = path . join ( _getCacheDirectory ( ) , tool , semver . clean ( version ) || version , arch || '' ) ;
2023-04-12 19:55:27 +08:00
const markerPath = ` ${ folderPath } .complete ` ;
fs . writeFileSync ( markerPath , '' ) ;
core . debug ( 'finished caching tool' ) ;
}
2024-04-24 12:04:10 -04:00
/ * *
* Check if version string is explicit
*
* @ param versionSpec version string to check
* /
function isExplicitVersion ( versionSpec ) {
2023-04-12 19:55:27 +08:00
const c = semver . clean ( versionSpec ) || '' ;
core . debug ( ` isExplicit: ${ c } ` ) ;
const valid = semver . valid ( c ) != null ;
core . debug ( ` explicit? ${ valid } ` ) ;
return valid ;
}
2024-04-24 12:04:10 -04:00
exports . isExplicitVersion = isExplicitVersion ;
/ * *
* Get the highest satisfiying semantic version in ` versions ` which satisfies ` versionSpec `
*
* @ param versions array of versions to evaluate
* @ param versionSpec semantic version spec to satisfy
* /
function evaluateVersions ( versions , versionSpec ) {
2023-04-12 19:55:27 +08:00
let version = '' ;
core . debug ( ` evaluating ${ versions . length } versions ` ) ;
versions = versions . sort ( ( a , b ) => {
if ( semver . gt ( a , b ) ) {
return 1 ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
return - 1 ;
2023-03-09 17:42:29 +01:00
} ) ;
2023-04-12 19:55:27 +08:00
for ( let i = versions . length - 1 ; i >= 0 ; i -- ) {
const potential = versions [ i ] ;
const satisfied = semver . satisfies ( potential , versionSpec ) ;
if ( satisfied ) {
version = potential ;
break ;
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
if ( version ) {
core . debug ( ` matched: ${ version } ` ) ;
2019-12-03 10:28:59 -05:00
}
2023-04-12 19:55:27 +08:00
else {
core . debug ( 'match not found' ) ;
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
return version ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
exports . evaluateVersions = evaluateVersions ;
/ * *
* Gets RUNNER _TOOL _CACHE
* /
function _getCacheDirectory ( ) {
const cacheDirectory = process . env [ 'RUNNER_TOOL_CACHE' ] || '' ;
assert _1 . ok ( cacheDirectory , 'Expected RUNNER_TOOL_CACHE to be defined' ) ;
return cacheDirectory ;
}
/ * *
* Gets RUNNER _TEMP
* /
function _getTempDirectory ( ) {
const tempDirectory = process . env [ 'RUNNER_TEMP' ] || '' ;
assert _1 . ok ( tempDirectory , 'Expected RUNNER_TEMP to be defined' ) ;
return tempDirectory ;
}
/ * *
* Gets a global variable
* /
function _getGlobal ( key , defaultValue ) {
/* eslint-disable @typescript-eslint/no-explicit-any */
const value = global [ key ] ;
/* eslint-enable @typescript-eslint/no-explicit-any */
return value !== undefined ? value : defaultValue ;
}
/ * *
* Returns an array of unique values .
* @ param values Values to make unique .
* /
function _unique ( values ) {
return Array . from ( new Set ( values ) ) ;
}
2023-04-12 19:55:27 +08:00
//# sourceMappingURL=tool-cache.js.map
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 7701 :
/***/ ( ( module ) => {
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
* /
var byteToHex = [ ] ;
for ( var i = 0 ; i < 256 ; ++ i ) {
byteToHex [ i ] = ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ;
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function bytesToUuid ( buf , offset ) {
var i = offset || 0 ;
var bth = byteToHex ;
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
return ( [
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ]
] ) . join ( '' ) ;
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
module . exports = bytesToUuid ;
2022-10-03 18:04:49 +01:00
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 7269 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var crypto = _ _nccwpck _require _ _ ( 6113 ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
module . exports = function nodeRNG ( ) {
return crypto . randomBytes ( 16 ) ;
} ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 7468 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var rng = _ _nccwpck _require _ _ ( 7269 ) ;
var bytesToUuid = _ _nccwpck _require _ _ ( 7701 ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function v4 ( options , buf , offset ) {
var i = buf && offset || 0 ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
if ( typeof ( options ) == 'string' ) {
buf = options === 'binary' ? new Array ( 16 ) : null ;
options = null ;
}
options = options || { } ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var rnds = options . random || ( options . rng || rng ) ( ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = ( rnds [ 6 ] & 0x0f ) | 0x40 ;
rnds [ 8 ] = ( rnds [ 8 ] & 0x3f ) | 0x80 ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Copy bytes to buffer, if provided
if ( buf ) {
for ( var ii = 0 ; ii < 16 ; ++ ii ) {
buf [ i + ii ] = rnds [ ii ] ;
}
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return buf || bytesToUuid ( rnds ) ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
module . exports = v4 ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 334 :
/***/ ( ( module ) => {
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
createTokenAuth : ( ) => createTokenAuth
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/auth.js
var REGEX _IS _INSTALLATION _LEGACY = /^v1\./ ;
var REGEX _IS _INSTALLATION = /^ghs_/ ;
var REGEX _IS _USER _TO _SERVER = /^ghu_/ ;
async function auth ( token ) {
const isApp = token . split ( /\./ ) . length === 3 ;
const isInstallation = REGEX _IS _INSTALLATION _LEGACY . test ( token ) || REGEX _IS _INSTALLATION . test ( token ) ;
const isUserToServer = REGEX _IS _USER _TO _SERVER . test ( token ) ;
const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth" ;
return {
type : "token" ,
token ,
tokenType
} ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/with-authorization-prefix.js
function withAuthorizationPrefix ( token ) {
if ( token . split ( /\./ ) . length === 3 ) {
return ` bearer ${ token } ` ;
}
return ` token ${ token } ` ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/hook.js
async function hook ( token , request , route , parameters ) {
const endpoint = request . endpoint . merge (
route ,
parameters
) ;
endpoint . headers . authorization = withAuthorizationPrefix ( token ) ;
return request ( endpoint ) ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/index.js
var createTokenAuth = function createTokenAuth2 ( token ) {
if ( ! token ) {
throw new Error ( "[@octokit/auth-token] No token passed to createTokenAuth" ) ;
}
if ( typeof token !== "string" ) {
throw new Error (
"[@octokit/auth-token] Token passed to createTokenAuth is not a string"
) ;
}
token = token . replace ( /^(token|bearer) +/i , "" ) ;
return Object . assign ( auth . bind ( null , token ) , {
hook : hook . bind ( null , token )
} ) ;
} ;
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 6762 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-05-07 12:11:11 -04:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2020-05-07 12:11:11 -04:00
2024-04-24 12:04:10 -04:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2020-05-07 12:11:11 -04:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
Octokit : ( ) => Octokit
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
var import _universal _user _agent = _ _nccwpck _require _ _ ( 5030 ) ;
var import _before _after _hook = _ _nccwpck _require _ _ ( 3682 ) ;
var import _request = _ _nccwpck _require _ _ ( 6234 ) ;
var import _graphql = _ _nccwpck _require _ _ ( 8467 ) ;
var import _auth _token = _ _nccwpck _require _ _ ( 334 ) ;
// pkg/dist-src/version.js
var VERSION = "5.2.0" ;
// pkg/dist-src/index.js
var noop = ( ) => {
} ;
var consoleWarn = console . warn . bind ( console ) ;
var consoleError = console . error . bind ( console ) ;
var userAgentTrail = ` octokit-core.js/ ${ VERSION } ${ ( 0 , import _universal _user _agent . getUserAgent ) ( ) } ` ;
var Octokit = class {
static {
this . VERSION = VERSION ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
static defaults ( defaults ) {
const OctokitWithDefaults = class extends this {
constructor ( ... args ) {
const options = args [ 0 ] || { } ;
if ( typeof defaults === "function" ) {
super ( defaults ( options ) ) ;
return ;
}
super (
Object . assign (
{ } ,
defaults ,
options ,
options . userAgent && defaults . userAgent ? {
userAgent : ` ${ options . userAgent } ${ defaults . userAgent } `
} : null
)
) ;
}
} ;
return OctokitWithDefaults ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
static {
this . plugins = [ ] ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
/ * *
* Attach a plugin ( or many ) to your Octokit instance .
*
* @ example
* const API = Octokit . plugin ( plugin1 , plugin2 , plugin3 , ... )
* /
static plugin ( ... newPlugins ) {
const currentPlugins = this . plugins ;
const NewOctokit = class extends this {
static {
this . plugins = currentPlugins . concat (
newPlugins . filter ( ( plugin ) => ! currentPlugins . includes ( plugin ) )
) ;
}
} ;
return NewOctokit ;
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
constructor ( options = { } ) {
const hook = new import _before _after _hook . Collection ( ) ;
const requestDefaults = {
baseUrl : import _request . request . endpoint . DEFAULTS . baseUrl ,
headers : { } ,
request : Object . assign ( { } , options . request , {
// @ts-ignore internal usage only, no need to type
hook : hook . bind ( null , "request" )
} ) ,
mediaType : {
previews : [ ] ,
format : ""
}
} ;
requestDefaults . headers [ "user-agent" ] = options . userAgent ? ` ${ options . userAgent } ${ userAgentTrail } ` : userAgentTrail ;
if ( options . baseUrl ) {
requestDefaults . baseUrl = options . baseUrl ;
2020-05-07 12:11:11 -04:00
}
2024-04-24 12:04:10 -04:00
if ( options . previews ) {
requestDefaults . mediaType . previews = options . previews ;
}
if ( options . timeZone ) {
requestDefaults . headers [ "time-zone" ] = options . timeZone ;
}
this . request = import _request . request . defaults ( requestDefaults ) ;
this . graphql = ( 0 , import _graphql . withCustomRequest ) ( this . request ) . defaults ( requestDefaults ) ;
this . log = Object . assign (
{
debug : noop ,
info : noop ,
warn : consoleWarn ,
error : consoleError
} ,
options . log
) ;
this . hook = hook ;
if ( ! options . authStrategy ) {
if ( ! options . auth ) {
this . auth = async ( ) => ( {
type : "unauthenticated"
} ) ;
} else {
const auth = ( 0 , import _auth _token . createTokenAuth ) ( options . auth ) ;
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
}
2023-03-09 17:42:29 +01:00
} else {
2024-04-24 12:04:10 -04:00
const { authStrategy , ... otherOptions } = options ;
const auth = authStrategy (
Object . assign (
{
request : this . request ,
log : this . log ,
// we pass the current octokit instance as well as its constructor options
// to allow for authentication strategies that return a new octokit instance
// that shares the same internal state as the current one. The original
// requirement for this was the "event-octokit" authentication strategy
// of https://github.com/probot/octokit-auth-probot.
octokit : this ,
octokitOptions : otherOptions
} ,
options . auth
)
) ;
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
}
const classConstructor = this . constructor ;
for ( let i = 0 ; i < classConstructor . plugins . length ; ++ i ) {
Object . assign ( this , classConstructor . plugins [ i ] ( this , options ) ) ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
} ;
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2020-05-07 12:11:11 -04:00
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2020-05-07 12:11:11 -04:00
2024-04-24 12:04:10 -04:00
/***/ 9440 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-05-07 12:11:11 -04:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2020-05-07 12:11:11 -04:00
2024-04-24 12:04:10 -04:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
endpoint : ( ) => endpoint
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/defaults.js
var import _universal _user _agent = _ _nccwpck _require _ _ ( 5030 ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/version.js
var VERSION = "9.0.5" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/defaults.js
var userAgent = ` octokit-endpoint.js/ ${ VERSION } ${ ( 0 , import _universal _user _agent . getUserAgent ) ( ) } ` ;
var DEFAULTS = {
method : "GET" ,
baseUrl : "https://api.github.com" ,
headers : {
accept : "application/vnd.github.v3+json" ,
"user-agent" : userAgent
} ,
mediaType : {
format : ""
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
} ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/util/lowercase-keys.js
function lowercaseKeys ( object ) {
if ( ! object ) {
return { } ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return Object . keys ( object ) . reduce ( ( newObj , key ) => {
newObj [ key . toLowerCase ( ) ] = object [ key ] ;
return newObj ;
} , { } ) ;
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/util/is-plain-object.js
function isPlainObject ( value ) {
if ( typeof value !== "object" || value === null )
return false ;
if ( Object . prototype . toString . call ( value ) !== "[object Object]" )
return false ;
const proto = Object . getPrototypeOf ( value ) ;
if ( proto === null )
return true ;
const Ctor = Object . prototype . hasOwnProperty . call ( proto , "constructor" ) && proto . constructor ;
return typeof Ctor === "function" && Ctor instanceof Ctor && Function . prototype . call ( Ctor ) === Function . prototype . call ( value ) ;
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/util/merge-deep.js
function mergeDeep ( defaults , options ) {
const result = Object . assign ( { } , defaults ) ;
Object . keys ( options ) . forEach ( ( key ) => {
if ( isPlainObject ( options [ key ] ) ) {
if ( ! ( key in defaults ) )
Object . assign ( result , { [ key ] : options [ key ] } ) ;
else
result [ key ] = mergeDeep ( defaults [ key ] , options [ key ] ) ;
} else {
Object . assign ( result , { [ key ] : options [ key ] } ) ;
}
} ) ;
return result ;
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/util/remove-undefined-properties.js
function removeUndefinedProperties ( obj ) {
for ( const key in obj ) {
if ( obj [ key ] === void 0 ) {
delete obj [ key ] ;
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return obj ;
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/merge.js
function merge ( defaults , route , options ) {
if ( typeof route === "string" ) {
let [ method , url ] = route . split ( " " ) ;
options = Object . assign ( url ? { method , url } : { url : method } , options ) ;
} else {
options = Object . assign ( { } , route ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
options . headers = lowercaseKeys ( options . headers ) ;
removeUndefinedProperties ( options ) ;
removeUndefinedProperties ( options . headers ) ;
const mergedOptions = mergeDeep ( defaults || { } , options ) ;
if ( options . url === "/graphql" ) {
if ( defaults && defaults . mediaType . previews ? . length ) {
mergedOptions . mediaType . previews = defaults . mediaType . previews . filter (
( preview ) => ! mergedOptions . mediaType . previews . includes ( preview )
) . concat ( mergedOptions . mediaType . previews ) ;
}
mergedOptions . mediaType . previews = ( mergedOptions . mediaType . previews || [ ] ) . map ( ( preview ) => preview . replace ( /-preview/ , "" ) ) ;
}
return mergedOptions ;
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/util/add-query-parameters.js
function addQueryParameters ( url , parameters ) {
const separator = /\?/ . test ( url ) ? "&" : "?" ;
const names = Object . keys ( parameters ) ;
if ( names . length === 0 ) {
return url ;
2022-10-03 18:04:49 +01:00
}
2024-04-24 12:04:10 -04:00
return url + separator + names . map ( ( name ) => {
if ( name === "q" ) {
return "q=" + parameters . q . split ( "+" ) . map ( encodeURIComponent ) . join ( "+" ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return ` ${ name } = ${ encodeURIComponent ( parameters [ name ] ) } ` ;
} ) . join ( "&" ) ;
2022-10-03 18:04:49 +01:00
}
2024-04-24 12:04:10 -04:00
// pkg/dist-src/util/extract-url-variable-names.js
var urlVariableRegex = /\{[^}]+\}/g ;
function removeNonChars ( variableName ) {
return variableName . replace ( /^\W+|\W+$/g , "" ) . split ( /,/ ) ;
}
function extractUrlVariableNames ( url ) {
const matches = url . match ( urlVariableRegex ) ;
if ( ! matches ) {
return [ ] ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return matches . map ( removeNonChars ) . reduce ( ( a , b ) => a . concat ( b ) , [ ] ) ;
}
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/util/omit.js
function omit ( object , keysToOmit ) {
const result = { _ _proto _ _ : null } ;
for ( const key of Object . keys ( object ) ) {
if ( keysToOmit . indexOf ( key ) === - 1 ) {
result [ key ] = object [ key ] ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
return result ;
2023-03-09 17:42:29 +01:00
}
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/util/url-template.js
function encodeReserved ( str ) {
return str . split ( /(%[0-9A-Fa-f]{2})/g ) . map ( function ( part ) {
if ( ! /%[0-9A-Fa-f]/ . test ( part ) ) {
part = encodeURI ( part ) . replace ( /%5B/g , "[" ) . replace ( /%5D/g , "]" ) ;
}
return part ;
} ) . join ( "" ) ;
}
function encodeUnreserved ( str ) {
return encodeURIComponent ( str ) . replace ( /[!'()*]/g , function ( c ) {
return "%" + c . charCodeAt ( 0 ) . toString ( 16 ) . toUpperCase ( ) ;
} ) ;
}
function encodeValue ( operator , value , key ) {
value = operator === "+" || operator === "#" ? encodeReserved ( value ) : encodeUnreserved ( value ) ;
if ( key ) {
return encodeUnreserved ( key ) + "=" + value ;
} else {
return value ;
}
}
function isDefined ( value ) {
return value !== void 0 && value !== null ;
}
function isKeyOperator ( operator ) {
return operator === ";" || operator === "&" || operator === "?" ;
}
function getValues ( context , operator , key , modifier ) {
var value = context [ key ] , result = [ ] ;
if ( isDefined ( value ) && value !== "" ) {
if ( typeof value === "string" || typeof value === "number" || typeof value === "boolean" ) {
value = value . toString ( ) ;
if ( modifier && modifier !== "*" ) {
value = value . substring ( 0 , parseInt ( modifier , 10 ) ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
result . push (
encodeValue ( operator , value , isKeyOperator ( operator ) ? key : "" )
) ;
} else {
if ( modifier === "*" ) {
if ( Array . isArray ( value ) ) {
value . filter ( isDefined ) . forEach ( function ( value2 ) {
result . push (
encodeValue ( operator , value2 , isKeyOperator ( operator ) ? key : "" )
) ;
} ) ;
} else {
Object . keys ( value ) . forEach ( function ( k ) {
if ( isDefined ( value [ k ] ) ) {
result . push ( encodeValue ( operator , value [ k ] , k ) ) ;
}
} ) ;
}
2023-03-09 17:42:29 +01:00
} else {
2024-04-24 12:04:10 -04:00
const tmp = [ ] ;
if ( Array . isArray ( value ) ) {
value . filter ( isDefined ) . forEach ( function ( value2 ) {
tmp . push ( encodeValue ( operator , value2 ) ) ;
} ) ;
} else {
Object . keys ( value ) . forEach ( function ( k ) {
if ( isDefined ( value [ k ] ) ) {
tmp . push ( encodeUnreserved ( k ) ) ;
tmp . push ( encodeValue ( operator , value [ k ] . toString ( ) ) ) ;
}
} ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
if ( isKeyOperator ( operator ) ) {
result . push ( encodeUnreserved ( key ) + "=" + tmp . join ( "," ) ) ;
} else if ( tmp . length !== 0 ) {
result . push ( tmp . join ( "," ) ) ;
2023-03-09 17:42:29 +01:00
}
}
2024-04-24 12:04:10 -04:00
}
} else {
if ( operator === ";" ) {
if ( isDefined ( value ) ) {
result . push ( encodeUnreserved ( key ) ) ;
}
} else if ( value === "" && ( operator === "&" || operator === "?" ) ) {
result . push ( encodeUnreserved ( key ) + "=" ) ;
} else if ( value === "" ) {
result . push ( "" ) ;
}
}
return result ;
}
function parseUrl ( template ) {
return {
expand : expand . bind ( null , template )
} ;
}
function expand ( template , context ) {
var operators = [ "+" , "#" , "." , "/" , ";" , "?" , "&" ] ;
template = template . replace (
/\{([^\{\}]+)\}|([^\{\}]+)/g ,
function ( _ , expression , literal ) {
if ( expression ) {
let operator = "" ;
const values = [ ] ;
if ( operators . indexOf ( expression . charAt ( 0 ) ) !== - 1 ) {
operator = expression . charAt ( 0 ) ;
expression = expression . substr ( 1 ) ;
}
expression . split ( /,/g ) . forEach ( function ( variable ) {
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/ . exec ( variable ) ;
values . push ( getValues ( context , operator , tmp [ 1 ] , tmp [ 2 ] || tmp [ 3 ] ) ) ;
} ) ;
if ( operator && operator !== "+" ) {
var separator = "," ;
if ( operator === "?" ) {
separator = "&" ;
} else if ( operator !== "#" ) {
separator = operator ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return ( values . length !== 0 ? operator : "" ) + values . join ( separator ) ;
2023-03-09 17:42:29 +01:00
} else {
2024-04-24 12:04:10 -04:00
return values . join ( "," ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
} else {
return encodeReserved ( literal ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
) ;
if ( template === "/" ) {
return template ;
} else {
return template . replace ( /\/$/ , "" ) ;
2023-03-09 17:42:29 +01:00
}
}
2020-01-27 10:21:50 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/parse.js
function parse ( options ) {
let method = options . method . toUpperCase ( ) ;
let url = ( options . url || "/" ) . replace ( /:([a-z]\w+)/g , "{$1}" ) ;
let headers = Object . assign ( { } , options . headers ) ;
let body ;
let parameters = omit ( options , [
"method" ,
"baseUrl" ,
"url" ,
"headers" ,
"request" ,
"mediaType"
] ) ;
const urlVariableNames = extractUrlVariableNames ( url ) ;
url = parseUrl ( url ) . expand ( parameters ) ;
if ( ! /^http/ . test ( url ) ) {
url = options . baseUrl + url ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
const omittedParameters = Object . keys ( options ) . filter ( ( option ) => urlVariableNames . includes ( option ) ) . concat ( "baseUrl" ) ;
const remainingParameters = omit ( parameters , omittedParameters ) ;
const isBinaryRequest = /application\/octet-stream/i . test ( headers . accept ) ;
if ( ! isBinaryRequest ) {
if ( options . mediaType . format ) {
headers . accept = headers . accept . split ( /,/ ) . map (
( format ) => format . replace (
/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/ ,
` application/vnd $ 1 $ 2. ${ options . mediaType . format } `
)
) . join ( "," ) ;
}
if ( url . endsWith ( "/graphql" ) ) {
if ( options . mediaType . previews ? . length ) {
const previewsFromAcceptHeader = headers . accept . match ( /[\w-]+(?=-preview)/g ) || [ ] ;
headers . accept = previewsFromAcceptHeader . concat ( options . mediaType . previews ) . map ( ( preview ) => {
const format = options . mediaType . format ? ` . ${ options . mediaType . format } ` : "+json" ;
return ` application/vnd.github. ${ preview } -preview ${ format } ` ;
} ) . join ( "," ) ;
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
if ( [ "GET" , "HEAD" ] . includes ( method ) ) {
url = addQueryParameters ( url , remainingParameters ) ;
} else {
if ( "data" in remainingParameters ) {
body = remainingParameters . data ;
} else {
if ( Object . keys ( remainingParameters ) . length ) {
body = remainingParameters ;
2023-03-09 17:42:29 +01:00
}
}
}
2024-04-24 12:04:10 -04:00
if ( ! headers [ "content-type" ] && typeof body !== "undefined" ) {
headers [ "content-type" ] = "application/json; charset=utf-8" ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
if ( [ "PATCH" , "PUT" ] . includes ( method ) && typeof body === "undefined" ) {
body = "" ;
}
return Object . assign (
{ method , url , headers } ,
typeof body !== "undefined" ? { body } : null ,
options . request ? { request : options . request } : null
) ;
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
// pkg/dist-src/endpoint-with-defaults.js
function endpointWithDefaults ( defaults , route , options ) {
return parse ( merge ( defaults , route , options ) ) ;
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/with-defaults.js
function withDefaults ( oldDefaults , newDefaults ) {
const DEFAULTS2 = merge ( oldDefaults , newDefaults ) ;
const endpoint2 = endpointWithDefaults . bind ( null , DEFAULTS2 ) ;
return Object . assign ( endpoint2 , {
DEFAULTS : DEFAULTS2 ,
defaults : withDefaults . bind ( null , DEFAULTS2 ) ,
merge : merge . bind ( null , DEFAULTS2 ) ,
parse
} ) ;
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/index.js
var endpoint = withDefaults ( null , DEFAULTS ) ;
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 8467 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
GraphqlResponseError : ( ) => GraphqlResponseError ,
graphql : ( ) => graphql2 ,
withCustomRequest : ( ) => withCustomRequest
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
var import _request3 = _ _nccwpck _require _ _ ( 6234 ) ;
var import _universal _user _agent = _ _nccwpck _require _ _ ( 5030 ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/version.js
var VERSION = "7.1.0" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/with-defaults.js
var import _request2 = _ _nccwpck _require _ _ ( 6234 ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/graphql.js
var import _request = _ _nccwpck _require _ _ ( 6234 ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/error.js
function _buildMessageForResponseErrors ( data ) {
return ` Request failed due to following response errors:
` + data.errors.map((e) => ` - $ { e . message } ` ).join(" \n ");
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
var GraphqlResponseError = class extends Error {
constructor ( request2 , headers , response ) {
super ( _buildMessageForResponseErrors ( response ) ) ;
this . request = request2 ;
this . headers = headers ;
this . response = response ;
this . name = "GraphqlResponseError" ;
this . errors = response . errors ;
this . data = response . data ;
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
}
} ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/graphql.js
var NON _VARIABLE _OPTIONS = [
"method" ,
"baseUrl" ,
"url" ,
"headers" ,
"request" ,
"query" ,
"mediaType"
] ;
var FORBIDDEN _VARIABLE _OPTIONS = [ "query" , "method" , "url" ] ;
var GHES _V3 _SUFFIX _REGEX = /\/api\/v3\/?$/ ;
function graphql ( request2 , query , options ) {
if ( options ) {
if ( typeof query === "string" && "query" in options ) {
return Promise . reject (
new Error ( ` [@octokit/graphql] "query" cannot be used as variable name ` )
) ;
}
for ( const key in options ) {
if ( ! FORBIDDEN _VARIABLE _OPTIONS . includes ( key ) )
continue ;
return Promise . reject (
new Error (
` [@octokit/graphql] " ${ key } " cannot be used as variable name `
)
) ;
}
}
const parsedOptions = typeof query === "string" ? Object . assign ( { query } , options ) : query ;
const requestOptions = Object . keys (
parsedOptions
) . reduce ( ( result , key ) => {
if ( NON _VARIABLE _OPTIONS . includes ( key ) ) {
result [ key ] = parsedOptions [ key ] ;
return result ;
}
if ( ! result . variables ) {
result . variables = { } ;
}
result . variables [ key ] = parsedOptions [ key ] ;
return result ;
} , { } ) ;
const baseUrl = parsedOptions . baseUrl || request2 . endpoint . DEFAULTS . baseUrl ;
if ( GHES _V3 _SUFFIX _REGEX . test ( baseUrl ) ) {
requestOptions . url = baseUrl . replace ( GHES _V3 _SUFFIX _REGEX , "/api/graphql" ) ;
}
return request2 ( requestOptions ) . then ( ( response ) => {
if ( response . data . errors ) {
const headers = { } ;
for ( const key of Object . keys ( response . headers ) ) {
headers [ key ] = response . headers [ key ] ;
}
throw new GraphqlResponseError (
requestOptions ,
headers ,
response . data
) ;
}
return response . data . data ;
} ) ;
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/with-defaults.js
function withDefaults ( request2 , newDefaults ) {
const newRequest = request2 . defaults ( newDefaults ) ;
const newApi = ( query , options ) => {
return graphql ( newRequest , query , options ) ;
} ;
return Object . assign ( newApi , {
defaults : withDefaults . bind ( null , newRequest ) ,
endpoint : newRequest . endpoint
} ) ;
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/index.js
var graphql2 = withDefaults ( import _request3 . request , {
headers : {
"user-agent" : ` octokit-graphql.js/ ${ VERSION } ${ ( 0 , import _universal _user _agent . getUserAgent ) ( ) } `
} ,
method : "POST" ,
url : "/graphql"
} ) ;
function withCustomRequest ( customRequest ) {
return withDefaults ( customRequest , {
method : "POST" ,
url : "/graphql"
} ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 4193 :
/***/ ( ( module ) => {
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
composePaginateRest : ( ) => composePaginateRest ,
isPaginatingEndpoint : ( ) => isPaginatingEndpoint ,
paginateRest : ( ) => paginateRest ,
paginatingEndpoints : ( ) => paginatingEndpoints
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/version.js
var VERSION = "9.2.1" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/normalize-paginated-list-response.js
function normalizePaginatedListResponse ( response ) {
if ( ! response . data ) {
return {
... response ,
data : [ ]
} ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
const responseNeedsNormalization = "total_count" in response . data && ! ( "url" in response . data ) ;
if ( ! responseNeedsNormalization )
return response ;
const incompleteResults = response . data . incomplete _results ;
const repositorySelection = response . data . repository _selection ;
const totalCount = response . data . total _count ;
delete response . data . incomplete _results ;
delete response . data . repository _selection ;
delete response . data . total _count ;
const namespaceKey = Object . keys ( response . data ) [ 0 ] ;
const data = response . data [ namespaceKey ] ;
response . data = data ;
if ( typeof incompleteResults !== "undefined" ) {
response . data . incomplete _results = incompleteResults ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
if ( typeof repositorySelection !== "undefined" ) {
response . data . repository _selection = repositorySelection ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
response . data . total _count = totalCount ;
return response ;
2023-03-09 17:42:29 +01:00
}
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/iterator.js
function iterator ( octokit , route , parameters ) {
const options = typeof route === "function" ? route . endpoint ( parameters ) : octokit . request . endpoint ( route , parameters ) ;
const requestMethod = typeof route === "function" ? route : octokit . request ;
const method = options . method ;
const headers = options . headers ;
let url = options . url ;
return {
[ Symbol . asyncIterator ] : ( ) => ( {
async next ( ) {
if ( ! url )
return { done : true } ;
try {
const response = await requestMethod ( { method , url , headers } ) ;
const normalizedResponse = normalizePaginatedListResponse ( response ) ;
url = ( ( normalizedResponse . headers . link || "" ) . match (
/<([^>]+)>;\s*rel="next"/
) || [ ] ) [ 1 ] ;
return { value : normalizedResponse } ;
} catch ( error ) {
if ( error . status !== 409 )
throw error ;
url = "" ;
return {
value : {
status : 200 ,
headers : { } ,
data : [ ]
}
} ;
}
}
} )
} ;
2023-03-09 17:42:29 +01:00
}
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/paginate.js
function paginate ( octokit , route , parameters , mapFn ) {
if ( typeof parameters === "function" ) {
mapFn = parameters ;
parameters = void 0 ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return gather (
octokit ,
[ ] ,
iterator ( octokit , route , parameters ) [ Symbol . asyncIterator ] ( ) ,
mapFn
) ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
function gather ( octokit , results , iterator2 , mapFn ) {
return iterator2 . next ( ) . then ( ( result ) => {
if ( result . done ) {
return results ;
2021-10-19 10:05:28 -05:00
}
2024-04-24 12:04:10 -04:00
let earlyExit = false ;
function done ( ) {
earlyExit = true ;
2022-10-03 18:04:49 +01:00
}
2024-04-24 12:04:10 -04:00
results = results . concat (
mapFn ? mapFn ( result . value , done ) : result . value . data
) ;
if ( earlyExit ) {
return results ;
2022-10-03 18:04:49 +01:00
}
2024-04-24 12:04:10 -04:00
return gather ( octokit , results , iterator2 , mapFn ) ;
} ) ;
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
// pkg/dist-src/compose-paginate.js
var composePaginateRest = Object . assign ( paginate , {
iterator
} ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/generated/paginating-endpoints.js
var paginatingEndpoints = [
"GET /advisories" ,
"GET /app/hook/deliveries" ,
"GET /app/installation-requests" ,
"GET /app/installations" ,
"GET /assignments/{assignment_id}/accepted_assignments" ,
"GET /classrooms" ,
"GET /classrooms/{classroom_id}/assignments" ,
"GET /enterprises/{enterprise}/dependabot/alerts" ,
"GET /enterprises/{enterprise}/secret-scanning/alerts" ,
"GET /events" ,
"GET /gists" ,
"GET /gists/public" ,
"GET /gists/starred" ,
"GET /gists/{gist_id}/comments" ,
"GET /gists/{gist_id}/commits" ,
"GET /gists/{gist_id}/forks" ,
"GET /installation/repositories" ,
"GET /issues" ,
"GET /licenses" ,
"GET /marketplace_listing/plans" ,
"GET /marketplace_listing/plans/{plan_id}/accounts" ,
"GET /marketplace_listing/stubbed/plans" ,
"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" ,
"GET /networks/{owner}/{repo}/events" ,
"GET /notifications" ,
"GET /organizations" ,
"GET /orgs/{org}/actions/cache/usage-by-repository" ,
"GET /orgs/{org}/actions/permissions/repositories" ,
"GET /orgs/{org}/actions/runners" ,
"GET /orgs/{org}/actions/secrets" ,
"GET /orgs/{org}/actions/secrets/{secret_name}/repositories" ,
"GET /orgs/{org}/actions/variables" ,
"GET /orgs/{org}/actions/variables/{name}/repositories" ,
"GET /orgs/{org}/blocks" ,
"GET /orgs/{org}/code-scanning/alerts" ,
"GET /orgs/{org}/codespaces" ,
"GET /orgs/{org}/codespaces/secrets" ,
"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" ,
"GET /orgs/{org}/copilot/billing/seats" ,
"GET /orgs/{org}/dependabot/alerts" ,
"GET /orgs/{org}/dependabot/secrets" ,
"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" ,
"GET /orgs/{org}/events" ,
"GET /orgs/{org}/failed_invitations" ,
"GET /orgs/{org}/hooks" ,
"GET /orgs/{org}/hooks/{hook_id}/deliveries" ,
"GET /orgs/{org}/installations" ,
"GET /orgs/{org}/invitations" ,
"GET /orgs/{org}/invitations/{invitation_id}/teams" ,
"GET /orgs/{org}/issues" ,
"GET /orgs/{org}/members" ,
"GET /orgs/{org}/members/{username}/codespaces" ,
"GET /orgs/{org}/migrations" ,
"GET /orgs/{org}/migrations/{migration_id}/repositories" ,
"GET /orgs/{org}/organization-roles/{role_id}/teams" ,
"GET /orgs/{org}/organization-roles/{role_id}/users" ,
"GET /orgs/{org}/outside_collaborators" ,
"GET /orgs/{org}/packages" ,
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions" ,
"GET /orgs/{org}/personal-access-token-requests" ,
"GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" ,
"GET /orgs/{org}/personal-access-tokens" ,
"GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" ,
"GET /orgs/{org}/projects" ,
"GET /orgs/{org}/properties/values" ,
"GET /orgs/{org}/public_members" ,
"GET /orgs/{org}/repos" ,
"GET /orgs/{org}/rulesets" ,
"GET /orgs/{org}/rulesets/rule-suites" ,
"GET /orgs/{org}/secret-scanning/alerts" ,
"GET /orgs/{org}/security-advisories" ,
"GET /orgs/{org}/teams" ,
"GET /orgs/{org}/teams/{team_slug}/discussions" ,
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ,
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" ,
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" ,
"GET /orgs/{org}/teams/{team_slug}/invitations" ,
"GET /orgs/{org}/teams/{team_slug}/members" ,
"GET /orgs/{org}/teams/{team_slug}/projects" ,
"GET /orgs/{org}/teams/{team_slug}/repos" ,
"GET /orgs/{org}/teams/{team_slug}/teams" ,
"GET /projects/columns/{column_id}/cards" ,
"GET /projects/{project_id}/collaborators" ,
"GET /projects/{project_id}/columns" ,
"GET /repos/{owner}/{repo}/actions/artifacts" ,
"GET /repos/{owner}/{repo}/actions/caches" ,
"GET /repos/{owner}/{repo}/actions/organization-secrets" ,
"GET /repos/{owner}/{repo}/actions/organization-variables" ,
"GET /repos/{owner}/{repo}/actions/runners" ,
"GET /repos/{owner}/{repo}/actions/runs" ,
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" ,
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" ,
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" ,
"GET /repos/{owner}/{repo}/actions/secrets" ,
"GET /repos/{owner}/{repo}/actions/variables" ,
"GET /repos/{owner}/{repo}/actions/workflows" ,
"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" ,
"GET /repos/{owner}/{repo}/activity" ,
"GET /repos/{owner}/{repo}/assignees" ,
"GET /repos/{owner}/{repo}/branches" ,
"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" ,
"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" ,
"GET /repos/{owner}/{repo}/code-scanning/alerts" ,
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" ,
"GET /repos/{owner}/{repo}/code-scanning/analyses" ,
"GET /repos/{owner}/{repo}/codespaces" ,
"GET /repos/{owner}/{repo}/codespaces/devcontainers" ,
"GET /repos/{owner}/{repo}/codespaces/secrets" ,
"GET /repos/{owner}/{repo}/collaborators" ,
"GET /repos/{owner}/{repo}/comments" ,
"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" ,
"GET /repos/{owner}/{repo}/commits" ,
"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" ,
"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" ,
"GET /repos/{owner}/{repo}/commits/{ref}/check-runs" ,
"GET /repos/{owner}/{repo}/commits/{ref}/check-suites" ,
"GET /repos/{owner}/{repo}/commits/{ref}/status" ,
"GET /repos/{owner}/{repo}/commits/{ref}/statuses" ,
"GET /repos/{owner}/{repo}/contributors" ,
"GET /repos/{owner}/{repo}/dependabot/alerts" ,
"GET /repos/{owner}/{repo}/dependabot/secrets" ,
"GET /repos/{owner}/{repo}/deployments" ,
"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ,
"GET /repos/{owner}/{repo}/environments" ,
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" ,
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" ,
"GET /repos/{owner}/{repo}/events" ,
"GET /repos/{owner}/{repo}/forks" ,
"GET /repos/{owner}/{repo}/hooks" ,
"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" ,
"GET /repos/{owner}/{repo}/invitations" ,
"GET /repos/{owner}/{repo}/issues" ,
"GET /repos/{owner}/{repo}/issues/comments" ,
"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" ,
"GET /repos/{owner}/{repo}/issues/events" ,
"GET /repos/{owner}/{repo}/issues/{issue_number}/comments" ,
"GET /repos/{owner}/{repo}/issues/{issue_number}/events" ,
"GET /repos/{owner}/{repo}/issues/{issue_number}/labels" ,
"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions" ,
"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" ,
"GET /repos/{owner}/{repo}/keys" ,
"GET /repos/{owner}/{repo}/labels" ,
"GET /repos/{owner}/{repo}/milestones" ,
"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" ,
"GET /repos/{owner}/{repo}/notifications" ,
"GET /repos/{owner}/{repo}/pages/builds" ,
"GET /repos/{owner}/{repo}/projects" ,
"GET /repos/{owner}/{repo}/pulls" ,
"GET /repos/{owner}/{repo}/pulls/comments" ,
"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" ,
"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" ,
"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits" ,
"GET /repos/{owner}/{repo}/pulls/{pull_number}/files" ,
"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ,
"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" ,
"GET /repos/{owner}/{repo}/releases" ,
"GET /repos/{owner}/{repo}/releases/{release_id}/assets" ,
"GET /repos/{owner}/{repo}/releases/{release_id}/reactions" ,
"GET /repos/{owner}/{repo}/rules/branches/{branch}" ,
"GET /repos/{owner}/{repo}/rulesets" ,
"GET /repos/{owner}/{repo}/rulesets/rule-suites" ,
"GET /repos/{owner}/{repo}/secret-scanning/alerts" ,
"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" ,
"GET /repos/{owner}/{repo}/security-advisories" ,
"GET /repos/{owner}/{repo}/stargazers" ,
"GET /repos/{owner}/{repo}/subscribers" ,
"GET /repos/{owner}/{repo}/tags" ,
"GET /repos/{owner}/{repo}/teams" ,
"GET /repos/{owner}/{repo}/topics" ,
"GET /repositories" ,
"GET /repositories/{repository_id}/environments/{environment_name}/secrets" ,
"GET /repositories/{repository_id}/environments/{environment_name}/variables" ,
"GET /search/code" ,
"GET /search/commits" ,
"GET /search/issues" ,
"GET /search/labels" ,
"GET /search/repositories" ,
"GET /search/topics" ,
"GET /search/users" ,
"GET /teams/{team_id}/discussions" ,
"GET /teams/{team_id}/discussions/{discussion_number}/comments" ,
"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions" ,
"GET /teams/{team_id}/discussions/{discussion_number}/reactions" ,
"GET /teams/{team_id}/invitations" ,
"GET /teams/{team_id}/members" ,
"GET /teams/{team_id}/projects" ,
"GET /teams/{team_id}/repos" ,
"GET /teams/{team_id}/teams" ,
"GET /user/blocks" ,
"GET /user/codespaces" ,
"GET /user/codespaces/secrets" ,
"GET /user/emails" ,
"GET /user/followers" ,
"GET /user/following" ,
"GET /user/gpg_keys" ,
"GET /user/installations" ,
"GET /user/installations/{installation_id}/repositories" ,
"GET /user/issues" ,
"GET /user/keys" ,
"GET /user/marketplace_purchases" ,
"GET /user/marketplace_purchases/stubbed" ,
"GET /user/memberships/orgs" ,
"GET /user/migrations" ,
"GET /user/migrations/{migration_id}/repositories" ,
"GET /user/orgs" ,
"GET /user/packages" ,
"GET /user/packages/{package_type}/{package_name}/versions" ,
"GET /user/public_emails" ,
"GET /user/repos" ,
"GET /user/repository_invitations" ,
"GET /user/social_accounts" ,
"GET /user/ssh_signing_keys" ,
"GET /user/starred" ,
"GET /user/subscriptions" ,
"GET /user/teams" ,
"GET /users" ,
"GET /users/{username}/events" ,
"GET /users/{username}/events/orgs/{org}" ,
"GET /users/{username}/events/public" ,
"GET /users/{username}/followers" ,
"GET /users/{username}/following" ,
"GET /users/{username}/gists" ,
"GET /users/{username}/gpg_keys" ,
"GET /users/{username}/keys" ,
"GET /users/{username}/orgs" ,
"GET /users/{username}/packages" ,
"GET /users/{username}/projects" ,
"GET /users/{username}/received_events" ,
"GET /users/{username}/received_events/public" ,
"GET /users/{username}/repos" ,
"GET /users/{username}/social_accounts" ,
"GET /users/{username}/ssh_signing_keys" ,
"GET /users/{username}/starred" ,
"GET /users/{username}/subscriptions"
] ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/paginating-endpoints.js
function isPaginatingEndpoint ( arg ) {
if ( typeof arg === "string" ) {
return paginatingEndpoints . includes ( arg ) ;
} else {
return false ;
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/index.js
function paginateRest ( octokit ) {
return {
paginate : Object . assign ( paginate . bind ( null , octokit ) , {
iterator : iterator . bind ( null , octokit )
} )
} ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
paginateRest . VERSION = VERSION ;
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 3044 :
/***/ ( ( module ) => {
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
legacyRestEndpointMethods : ( ) => legacyRestEndpointMethods ,
restEndpointMethods : ( ) => restEndpointMethods
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/version.js
var VERSION = "10.4.1" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// pkg/dist-src/generated/endpoints.js
var Endpoints = {
actions : {
addCustomLabelsToSelfHostedRunnerForOrg : [
"POST /orgs/{org}/actions/runners/{runner_id}/labels"
] ,
addCustomLabelsToSelfHostedRunnerForRepo : [
"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
] ,
addSelectedRepoToOrgSecret : [
"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
] ,
addSelectedRepoToOrgVariable : [
"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
] ,
approveWorkflowRun : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"
] ,
cancelWorkflowRun : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"
] ,
createEnvironmentVariable : [
"POST /repositories/{repository_id}/environments/{environment_name}/variables"
] ,
createOrUpdateEnvironmentSecret : [
"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
] ,
createOrUpdateOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}" ] ,
createOrUpdateRepoSecret : [
"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"
] ,
createOrgVariable : [ "POST /orgs/{org}/actions/variables" ] ,
createRegistrationTokenForOrg : [
"POST /orgs/{org}/actions/runners/registration-token"
] ,
createRegistrationTokenForRepo : [
"POST /repos/{owner}/{repo}/actions/runners/registration-token"
] ,
createRemoveTokenForOrg : [ "POST /orgs/{org}/actions/runners/remove-token" ] ,
createRemoveTokenForRepo : [
"POST /repos/{owner}/{repo}/actions/runners/remove-token"
] ,
createRepoVariable : [ "POST /repos/{owner}/{repo}/actions/variables" ] ,
createWorkflowDispatch : [
"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"
] ,
deleteActionsCacheById : [
"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"
] ,
deleteActionsCacheByKey : [
"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"
] ,
deleteArtifact : [
"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"
] ,
deleteEnvironmentSecret : [
"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
] ,
deleteEnvironmentVariable : [
"DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
] ,
deleteOrgSecret : [ "DELETE /orgs/{org}/actions/secrets/{secret_name}" ] ,
deleteOrgVariable : [ "DELETE /orgs/{org}/actions/variables/{name}" ] ,
deleteRepoSecret : [
"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"
] ,
deleteRepoVariable : [
"DELETE /repos/{owner}/{repo}/actions/variables/{name}"
] ,
deleteSelfHostedRunnerFromOrg : [
"DELETE /orgs/{org}/actions/runners/{runner_id}"
] ,
deleteSelfHostedRunnerFromRepo : [
"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"
] ,
deleteWorkflowRun : [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
deleteWorkflowRunLogs : [
"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
] ,
disableSelectedRepositoryGithubActionsOrganization : [
"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"
] ,
disableWorkflow : [
"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"
] ,
downloadArtifact : [
"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"
] ,
downloadJobLogsForWorkflowRun : [
"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"
] ,
downloadWorkflowRunAttemptLogs : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"
] ,
downloadWorkflowRunLogs : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
] ,
enableSelectedRepositoryGithubActionsOrganization : [
"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"
] ,
enableWorkflow : [
"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"
] ,
forceCancelWorkflowRun : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel"
] ,
generateRunnerJitconfigForOrg : [
"POST /orgs/{org}/actions/runners/generate-jitconfig"
] ,
generateRunnerJitconfigForRepo : [
"POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig"
] ,
getActionsCacheList : [ "GET /repos/{owner}/{repo}/actions/caches" ] ,
getActionsCacheUsage : [ "GET /repos/{owner}/{repo}/actions/cache/usage" ] ,
getActionsCacheUsageByRepoForOrg : [
"GET /orgs/{org}/actions/cache/usage-by-repository"
] ,
getActionsCacheUsageForOrg : [ "GET /orgs/{org}/actions/cache/usage" ] ,
getAllowedActionsOrganization : [
"GET /orgs/{org}/actions/permissions/selected-actions"
] ,
getAllowedActionsRepository : [
"GET /repos/{owner}/{repo}/actions/permissions/selected-actions"
] ,
getArtifact : [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ] ,
getCustomOidcSubClaimForRepo : [
"GET /repos/{owner}/{repo}/actions/oidc/customization/sub"
] ,
getEnvironmentPublicKey : [
"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"
] ,
getEnvironmentSecret : [
"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
] ,
getEnvironmentVariable : [
"GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
] ,
getGithubActionsDefaultWorkflowPermissionsOrganization : [
"GET /orgs/{org}/actions/permissions/workflow"
] ,
getGithubActionsDefaultWorkflowPermissionsRepository : [
"GET /repos/{owner}/{repo}/actions/permissions/workflow"
] ,
getGithubActionsPermissionsOrganization : [
"GET /orgs/{org}/actions/permissions"
] ,
getGithubActionsPermissionsRepository : [
"GET /repos/{owner}/{repo}/actions/permissions"
] ,
getJobForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}" ] ,
getOrgPublicKey : [ "GET /orgs/{org}/actions/secrets/public-key" ] ,
getOrgSecret : [ "GET /orgs/{org}/actions/secrets/{secret_name}" ] ,
getOrgVariable : [ "GET /orgs/{org}/actions/variables/{name}" ] ,
getPendingDeploymentsForRun : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
] ,
getRepoPermissions : [
"GET /repos/{owner}/{repo}/actions/permissions" ,
{ } ,
{ renamed : [ "actions" , "getGithubActionsPermissionsRepository" ] }
] ,
getRepoPublicKey : [ "GET /repos/{owner}/{repo}/actions/secrets/public-key" ] ,
getRepoSecret : [ "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
getRepoVariable : [ "GET /repos/{owner}/{repo}/actions/variables/{name}" ] ,
getReviewsForRun : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"
] ,
getSelfHostedRunnerForOrg : [ "GET /orgs/{org}/actions/runners/{runner_id}" ] ,
getSelfHostedRunnerForRepo : [
"GET /repos/{owner}/{repo}/actions/runners/{runner_id}"
] ,
getWorkflow : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}" ] ,
getWorkflowAccessToRepository : [
"GET /repos/{owner}/{repo}/actions/permissions/access"
] ,
getWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
getWorkflowRunAttempt : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"
] ,
getWorkflowRunUsage : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"
] ,
getWorkflowUsage : [
"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"
] ,
listArtifactsForRepo : [ "GET /repos/{owner}/{repo}/actions/artifacts" ] ,
listEnvironmentSecrets : [
"GET /repositories/{repository_id}/environments/{environment_name}/secrets"
] ,
listEnvironmentVariables : [
"GET /repositories/{repository_id}/environments/{environment_name}/variables"
] ,
listJobsForWorkflowRun : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"
] ,
listJobsForWorkflowRunAttempt : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"
] ,
listLabelsForSelfHostedRunnerForOrg : [
"GET /orgs/{org}/actions/runners/{runner_id}/labels"
] ,
listLabelsForSelfHostedRunnerForRepo : [
"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
] ,
listOrgSecrets : [ "GET /orgs/{org}/actions/secrets" ] ,
listOrgVariables : [ "GET /orgs/{org}/actions/variables" ] ,
listRepoOrganizationSecrets : [
"GET /repos/{owner}/{repo}/actions/organization-secrets"
] ,
listRepoOrganizationVariables : [
"GET /repos/{owner}/{repo}/actions/organization-variables"
] ,
listRepoSecrets : [ "GET /repos/{owner}/{repo}/actions/secrets" ] ,
listRepoVariables : [ "GET /repos/{owner}/{repo}/actions/variables" ] ,
listRepoWorkflows : [ "GET /repos/{owner}/{repo}/actions/workflows" ] ,
listRunnerApplicationsForOrg : [ "GET /orgs/{org}/actions/runners/downloads" ] ,
listRunnerApplicationsForRepo : [
"GET /repos/{owner}/{repo}/actions/runners/downloads"
] ,
listSelectedReposForOrgSecret : [
"GET /orgs/{org}/actions/secrets/{secret_name}/repositories"
] ,
listSelectedReposForOrgVariable : [
"GET /orgs/{org}/actions/variables/{name}/repositories"
] ,
listSelectedRepositoriesEnabledGithubActionsOrganization : [
"GET /orgs/{org}/actions/permissions/repositories"
] ,
listSelfHostedRunnersForOrg : [ "GET /orgs/{org}/actions/runners" ] ,
listSelfHostedRunnersForRepo : [ "GET /repos/{owner}/{repo}/actions/runners" ] ,
listWorkflowRunArtifacts : [
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"
] ,
listWorkflowRuns : [
"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"
] ,
listWorkflowRunsForRepo : [ "GET /repos/{owner}/{repo}/actions/runs" ] ,
reRunJobForWorkflowRun : [
"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"
] ,
reRunWorkflow : [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun" ] ,
reRunWorkflowFailedJobs : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"
] ,
removeAllCustomLabelsFromSelfHostedRunnerForOrg : [
"DELETE /orgs/{org}/actions/runners/{runner_id}/labels"
] ,
removeAllCustomLabelsFromSelfHostedRunnerForRepo : [
"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
] ,
removeCustomLabelFromSelfHostedRunnerForOrg : [
"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"
] ,
removeCustomLabelFromSelfHostedRunnerForRepo : [
"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"
] ,
removeSelectedRepoFromOrgSecret : [
"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
] ,
removeSelectedRepoFromOrgVariable : [
"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
] ,
reviewCustomGatesForRun : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule"
] ,
reviewPendingDeploymentsForRun : [
"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
] ,
setAllowedActionsOrganization : [
"PUT /orgs/{org}/actions/permissions/selected-actions"
] ,
setAllowedActionsRepository : [
"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"
] ,
setCustomLabelsForSelfHostedRunnerForOrg : [
"PUT /orgs/{org}/actions/runners/{runner_id}/labels"
] ,
setCustomLabelsForSelfHostedRunnerForRepo : [
"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
] ,
setCustomOidcSubClaimForRepo : [
"PUT /repos/{owner}/{repo}/actions/oidc/customization/sub"
] ,
setGithubActionsDefaultWorkflowPermissionsOrganization : [
"PUT /orgs/{org}/actions/permissions/workflow"
] ,
setGithubActionsDefaultWorkflowPermissionsRepository : [
"PUT /repos/{owner}/{repo}/actions/permissions/workflow"
] ,
setGithubActionsPermissionsOrganization : [
"PUT /orgs/{org}/actions/permissions"
] ,
setGithubActionsPermissionsRepository : [
"PUT /repos/{owner}/{repo}/actions/permissions"
] ,
setSelectedReposForOrgSecret : [
"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"
] ,
setSelectedReposForOrgVariable : [
"PUT /orgs/{org}/actions/variables/{name}/repositories"
] ,
setSelectedRepositoriesEnabledGithubActionsOrganization : [
"PUT /orgs/{org}/actions/permissions/repositories"
] ,
setWorkflowAccessToRepository : [
"PUT /repos/{owner}/{repo}/actions/permissions/access"
] ,
updateEnvironmentVariable : [
"PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
] ,
updateOrgVariable : [ "PATCH /orgs/{org}/actions/variables/{name}" ] ,
updateRepoVariable : [
"PATCH /repos/{owner}/{repo}/actions/variables/{name}"
]
} ,
activity : {
checkRepoIsStarredByAuthenticatedUser : [ "GET /user/starred/{owner}/{repo}" ] ,
deleteRepoSubscription : [ "DELETE /repos/{owner}/{repo}/subscription" ] ,
deleteThreadSubscription : [
"DELETE /notifications/threads/{thread_id}/subscription"
] ,
getFeeds : [ "GET /feeds" ] ,
getRepoSubscription : [ "GET /repos/{owner}/{repo}/subscription" ] ,
getThread : [ "GET /notifications/threads/{thread_id}" ] ,
getThreadSubscriptionForAuthenticatedUser : [
"GET /notifications/threads/{thread_id}/subscription"
] ,
listEventsForAuthenticatedUser : [ "GET /users/{username}/events" ] ,
listNotificationsForAuthenticatedUser : [ "GET /notifications" ] ,
listOrgEventsForAuthenticatedUser : [
"GET /users/{username}/events/orgs/{org}"
] ,
listPublicEvents : [ "GET /events" ] ,
listPublicEventsForRepoNetwork : [ "GET /networks/{owner}/{repo}/events" ] ,
listPublicEventsForUser : [ "GET /users/{username}/events/public" ] ,
listPublicOrgEvents : [ "GET /orgs/{org}/events" ] ,
listReceivedEventsForUser : [ "GET /users/{username}/received_events" ] ,
listReceivedPublicEventsForUser : [
"GET /users/{username}/received_events/public"
] ,
listRepoEvents : [ "GET /repos/{owner}/{repo}/events" ] ,
listRepoNotificationsForAuthenticatedUser : [
"GET /repos/{owner}/{repo}/notifications"
] ,
listReposStarredByAuthenticatedUser : [ "GET /user/starred" ] ,
listReposStarredByUser : [ "GET /users/{username}/starred" ] ,
listReposWatchedByUser : [ "GET /users/{username}/subscriptions" ] ,
listStargazersForRepo : [ "GET /repos/{owner}/{repo}/stargazers" ] ,
listWatchedReposForAuthenticatedUser : [ "GET /user/subscriptions" ] ,
listWatchersForRepo : [ "GET /repos/{owner}/{repo}/subscribers" ] ,
markNotificationsAsRead : [ "PUT /notifications" ] ,
markRepoNotificationsAsRead : [ "PUT /repos/{owner}/{repo}/notifications" ] ,
markThreadAsDone : [ "DELETE /notifications/threads/{thread_id}" ] ,
markThreadAsRead : [ "PATCH /notifications/threads/{thread_id}" ] ,
setRepoSubscription : [ "PUT /repos/{owner}/{repo}/subscription" ] ,
setThreadSubscription : [
"PUT /notifications/threads/{thread_id}/subscription"
] ,
starRepoForAuthenticatedUser : [ "PUT /user/starred/{owner}/{repo}" ] ,
unstarRepoForAuthenticatedUser : [ "DELETE /user/starred/{owner}/{repo}" ]
} ,
apps : {
addRepoToInstallation : [
"PUT /user/installations/{installation_id}/repositories/{repository_id}" ,
{ } ,
{ renamed : [ "apps" , "addRepoToInstallationForAuthenticatedUser" ] }
] ,
addRepoToInstallationForAuthenticatedUser : [
"PUT /user/installations/{installation_id}/repositories/{repository_id}"
] ,
checkToken : [ "POST /applications/{client_id}/token" ] ,
createFromManifest : [ "POST /app-manifests/{code}/conversions" ] ,
createInstallationAccessToken : [
"POST /app/installations/{installation_id}/access_tokens"
] ,
deleteAuthorization : [ "DELETE /applications/{client_id}/grant" ] ,
deleteInstallation : [ "DELETE /app/installations/{installation_id}" ] ,
deleteToken : [ "DELETE /applications/{client_id}/token" ] ,
getAuthenticated : [ "GET /app" ] ,
getBySlug : [ "GET /apps/{app_slug}" ] ,
getInstallation : [ "GET /app/installations/{installation_id}" ] ,
getOrgInstallation : [ "GET /orgs/{org}/installation" ] ,
getRepoInstallation : [ "GET /repos/{owner}/{repo}/installation" ] ,
getSubscriptionPlanForAccount : [
"GET /marketplace_listing/accounts/{account_id}"
] ,
getSubscriptionPlanForAccountStubbed : [
"GET /marketplace_listing/stubbed/accounts/{account_id}"
] ,
getUserInstallation : [ "GET /users/{username}/installation" ] ,
getWebhookConfigForApp : [ "GET /app/hook/config" ] ,
getWebhookDelivery : [ "GET /app/hook/deliveries/{delivery_id}" ] ,
listAccountsForPlan : [ "GET /marketplace_listing/plans/{plan_id}/accounts" ] ,
listAccountsForPlanStubbed : [
"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"
] ,
listInstallationReposForAuthenticatedUser : [
"GET /user/installations/{installation_id}/repositories"
] ,
listInstallationRequestsForAuthenticatedApp : [
"GET /app/installation-requests"
] ,
listInstallations : [ "GET /app/installations" ] ,
listInstallationsForAuthenticatedUser : [ "GET /user/installations" ] ,
listPlans : [ "GET /marketplace_listing/plans" ] ,
listPlansStubbed : [ "GET /marketplace_listing/stubbed/plans" ] ,
listReposAccessibleToInstallation : [ "GET /installation/repositories" ] ,
listSubscriptionsForAuthenticatedUser : [ "GET /user/marketplace_purchases" ] ,
listSubscriptionsForAuthenticatedUserStubbed : [
"GET /user/marketplace_purchases/stubbed"
] ,
listWebhookDeliveries : [ "GET /app/hook/deliveries" ] ,
redeliverWebhookDelivery : [
"POST /app/hook/deliveries/{delivery_id}/attempts"
] ,
removeRepoFromInstallation : [
"DELETE /user/installations/{installation_id}/repositories/{repository_id}" ,
{ } ,
{ renamed : [ "apps" , "removeRepoFromInstallationForAuthenticatedUser" ] }
] ,
removeRepoFromInstallationForAuthenticatedUser : [
"DELETE /user/installations/{installation_id}/repositories/{repository_id}"
] ,
resetToken : [ "PATCH /applications/{client_id}/token" ] ,
revokeInstallationAccessToken : [ "DELETE /installation/token" ] ,
scopeToken : [ "POST /applications/{client_id}/token/scoped" ] ,
suspendInstallation : [ "PUT /app/installations/{installation_id}/suspended" ] ,
unsuspendInstallation : [
"DELETE /app/installations/{installation_id}/suspended"
] ,
updateWebhookConfigForApp : [ "PATCH /app/hook/config" ]
} ,
billing : {
getGithubActionsBillingOrg : [ "GET /orgs/{org}/settings/billing/actions" ] ,
getGithubActionsBillingUser : [
"GET /users/{username}/settings/billing/actions"
] ,
getGithubPackagesBillingOrg : [ "GET /orgs/{org}/settings/billing/packages" ] ,
getGithubPackagesBillingUser : [
"GET /users/{username}/settings/billing/packages"
] ,
getSharedStorageBillingOrg : [
"GET /orgs/{org}/settings/billing/shared-storage"
] ,
getSharedStorageBillingUser : [
"GET /users/{username}/settings/billing/shared-storage"
]
} ,
checks : {
create : [ "POST /repos/{owner}/{repo}/check-runs" ] ,
createSuite : [ "POST /repos/{owner}/{repo}/check-suites" ] ,
get : [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}" ] ,
getSuite : [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}" ] ,
listAnnotations : [
"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"
] ,
listForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-runs" ] ,
listForSuite : [
"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"
] ,
listSuitesForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-suites" ] ,
rerequestRun : [
"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"
] ,
rerequestSuite : [
"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"
] ,
setSuitesPreferences : [
"PATCH /repos/{owner}/{repo}/check-suites/preferences"
] ,
update : [ "PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}" ]
} ,
codeScanning : {
deleteAnalysis : [
"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"
] ,
getAlert : [
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" ,
{ } ,
{ renamedParameters : { alert _id : "alert_number" } }
] ,
getAnalysis : [
"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"
] ,
getCodeqlDatabase : [
"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}"
] ,
getDefaultSetup : [ "GET /repos/{owner}/{repo}/code-scanning/default-setup" ] ,
getSarif : [ "GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}" ] ,
listAlertInstances : [
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"
] ,
listAlertsForOrg : [ "GET /orgs/{org}/code-scanning/alerts" ] ,
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/code-scanning/alerts" ] ,
listAlertsInstances : [
"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" ,
{ } ,
{ renamed : [ "codeScanning" , "listAlertInstances" ] }
] ,
listCodeqlDatabases : [
"GET /repos/{owner}/{repo}/code-scanning/codeql/databases"
] ,
listRecentAnalyses : [ "GET /repos/{owner}/{repo}/code-scanning/analyses" ] ,
updateAlert : [
"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"
] ,
updateDefaultSetup : [
"PATCH /repos/{owner}/{repo}/code-scanning/default-setup"
] ,
uploadSarif : [ "POST /repos/{owner}/{repo}/code-scanning/sarifs" ]
} ,
codesOfConduct : {
getAllCodesOfConduct : [ "GET /codes_of_conduct" ] ,
getConductCode : [ "GET /codes_of_conduct/{key}" ]
} ,
codespaces : {
addRepositoryForSecretForAuthenticatedUser : [
"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
] ,
addSelectedRepoToOrgSecret : [
"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
] ,
checkPermissionsForDevcontainer : [
"GET /repos/{owner}/{repo}/codespaces/permissions_check"
] ,
codespaceMachinesForAuthenticatedUser : [
"GET /user/codespaces/{codespace_name}/machines"
] ,
createForAuthenticatedUser : [ "POST /user/codespaces" ] ,
createOrUpdateOrgSecret : [
"PUT /orgs/{org}/codespaces/secrets/{secret_name}"
] ,
createOrUpdateRepoSecret : [
"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
] ,
createOrUpdateSecretForAuthenticatedUser : [
"PUT /user/codespaces/secrets/{secret_name}"
] ,
createWithPrForAuthenticatedUser : [
"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"
] ,
createWithRepoForAuthenticatedUser : [
"POST /repos/{owner}/{repo}/codespaces"
] ,
deleteForAuthenticatedUser : [ "DELETE /user/codespaces/{codespace_name}" ] ,
deleteFromOrganization : [
"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"
] ,
deleteOrgSecret : [ "DELETE /orgs/{org}/codespaces/secrets/{secret_name}" ] ,
deleteRepoSecret : [
"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
] ,
deleteSecretForAuthenticatedUser : [
"DELETE /user/codespaces/secrets/{secret_name}"
] ,
exportForAuthenticatedUser : [
"POST /user/codespaces/{codespace_name}/exports"
] ,
getCodespacesForUserInOrg : [
"GET /orgs/{org}/members/{username}/codespaces"
] ,
getExportDetailsForAuthenticatedUser : [
"GET /user/codespaces/{codespace_name}/exports/{export_id}"
] ,
getForAuthenticatedUser : [ "GET /user/codespaces/{codespace_name}" ] ,
getOrgPublicKey : [ "GET /orgs/{org}/codespaces/secrets/public-key" ] ,
getOrgSecret : [ "GET /orgs/{org}/codespaces/secrets/{secret_name}" ] ,
getPublicKeyForAuthenticatedUser : [
"GET /user/codespaces/secrets/public-key"
] ,
getRepoPublicKey : [
"GET /repos/{owner}/{repo}/codespaces/secrets/public-key"
] ,
getRepoSecret : [
"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
] ,
getSecretForAuthenticatedUser : [
"GET /user/codespaces/secrets/{secret_name}"
] ,
listDevcontainersInRepositoryForAuthenticatedUser : [
"GET /repos/{owner}/{repo}/codespaces/devcontainers"
] ,
listForAuthenticatedUser : [ "GET /user/codespaces" ] ,
listInOrganization : [
"GET /orgs/{org}/codespaces" ,
{ } ,
{ renamedParameters : { org _id : "org" } }
] ,
listInRepositoryForAuthenticatedUser : [
"GET /repos/{owner}/{repo}/codespaces"
] ,
listOrgSecrets : [ "GET /orgs/{org}/codespaces/secrets" ] ,
listRepoSecrets : [ "GET /repos/{owner}/{repo}/codespaces/secrets" ] ,
listRepositoriesForSecretForAuthenticatedUser : [
"GET /user/codespaces/secrets/{secret_name}/repositories"
] ,
listSecretsForAuthenticatedUser : [ "GET /user/codespaces/secrets" ] ,
listSelectedReposForOrgSecret : [
"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
] ,
preFlightWithRepoForAuthenticatedUser : [
"GET /repos/{owner}/{repo}/codespaces/new"
] ,
publishForAuthenticatedUser : [
"POST /user/codespaces/{codespace_name}/publish"
] ,
removeRepositoryForSecretForAuthenticatedUser : [
"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
] ,
removeSelectedRepoFromOrgSecret : [
"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
] ,
repoMachinesForAuthenticatedUser : [
"GET /repos/{owner}/{repo}/codespaces/machines"
] ,
setRepositoriesForSecretForAuthenticatedUser : [
"PUT /user/codespaces/secrets/{secret_name}/repositories"
] ,
setSelectedReposForOrgSecret : [
"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
] ,
startForAuthenticatedUser : [ "POST /user/codespaces/{codespace_name}/start" ] ,
stopForAuthenticatedUser : [ "POST /user/codespaces/{codespace_name}/stop" ] ,
stopInOrganization : [
"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"
] ,
updateForAuthenticatedUser : [ "PATCH /user/codespaces/{codespace_name}" ]
} ,
copilot : {
addCopilotSeatsForTeams : [
"POST /orgs/{org}/copilot/billing/selected_teams"
] ,
addCopilotSeatsForUsers : [
"POST /orgs/{org}/copilot/billing/selected_users"
] ,
cancelCopilotSeatAssignmentForTeams : [
"DELETE /orgs/{org}/copilot/billing/selected_teams"
] ,
cancelCopilotSeatAssignmentForUsers : [
"DELETE /orgs/{org}/copilot/billing/selected_users"
] ,
getCopilotOrganizationDetails : [ "GET /orgs/{org}/copilot/billing" ] ,
getCopilotSeatDetailsForUser : [
"GET /orgs/{org}/members/{username}/copilot"
] ,
listCopilotSeats : [ "GET /orgs/{org}/copilot/billing/seats" ]
} ,
dependabot : {
addSelectedRepoToOrgSecret : [
"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
] ,
createOrUpdateOrgSecret : [
"PUT /orgs/{org}/dependabot/secrets/{secret_name}"
] ,
createOrUpdateRepoSecret : [
"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
] ,
deleteOrgSecret : [ "DELETE /orgs/{org}/dependabot/secrets/{secret_name}" ] ,
deleteRepoSecret : [
"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
] ,
getAlert : [ "GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" ] ,
getOrgPublicKey : [ "GET /orgs/{org}/dependabot/secrets/public-key" ] ,
getOrgSecret : [ "GET /orgs/{org}/dependabot/secrets/{secret_name}" ] ,
getRepoPublicKey : [
"GET /repos/{owner}/{repo}/dependabot/secrets/public-key"
] ,
getRepoSecret : [
"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
] ,
listAlertsForEnterprise : [
"GET /enterprises/{enterprise}/dependabot/alerts"
] ,
listAlertsForOrg : [ "GET /orgs/{org}/dependabot/alerts" ] ,
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/dependabot/alerts" ] ,
listOrgSecrets : [ "GET /orgs/{org}/dependabot/secrets" ] ,
listRepoSecrets : [ "GET /repos/{owner}/{repo}/dependabot/secrets" ] ,
listSelectedReposForOrgSecret : [
"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
] ,
removeSelectedRepoFromOrgSecret : [
"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
] ,
setSelectedReposForOrgSecret : [
"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
] ,
updateAlert : [
"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"
]
} ,
dependencyGraph : {
createRepositorySnapshot : [
"POST /repos/{owner}/{repo}/dependency-graph/snapshots"
] ,
diffRange : [
"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"
] ,
exportSbom : [ "GET /repos/{owner}/{repo}/dependency-graph/sbom" ]
} ,
emojis : { get : [ "GET /emojis" ] } ,
gists : {
checkIsStarred : [ "GET /gists/{gist_id}/star" ] ,
create : [ "POST /gists" ] ,
createComment : [ "POST /gists/{gist_id}/comments" ] ,
delete : [ "DELETE /gists/{gist_id}" ] ,
deleteComment : [ "DELETE /gists/{gist_id}/comments/{comment_id}" ] ,
fork : [ "POST /gists/{gist_id}/forks" ] ,
get : [ "GET /gists/{gist_id}" ] ,
getComment : [ "GET /gists/{gist_id}/comments/{comment_id}" ] ,
getRevision : [ "GET /gists/{gist_id}/{sha}" ] ,
list : [ "GET /gists" ] ,
listComments : [ "GET /gists/{gist_id}/comments" ] ,
listCommits : [ "GET /gists/{gist_id}/commits" ] ,
listForUser : [ "GET /users/{username}/gists" ] ,
listForks : [ "GET /gists/{gist_id}/forks" ] ,
listPublic : [ "GET /gists/public" ] ,
listStarred : [ "GET /gists/starred" ] ,
star : [ "PUT /gists/{gist_id}/star" ] ,
unstar : [ "DELETE /gists/{gist_id}/star" ] ,
update : [ "PATCH /gists/{gist_id}" ] ,
updateComment : [ "PATCH /gists/{gist_id}/comments/{comment_id}" ]
} ,
git : {
createBlob : [ "POST /repos/{owner}/{repo}/git/blobs" ] ,
createCommit : [ "POST /repos/{owner}/{repo}/git/commits" ] ,
createRef : [ "POST /repos/{owner}/{repo}/git/refs" ] ,
createTag : [ "POST /repos/{owner}/{repo}/git/tags" ] ,
createTree : [ "POST /repos/{owner}/{repo}/git/trees" ] ,
deleteRef : [ "DELETE /repos/{owner}/{repo}/git/refs/{ref}" ] ,
getBlob : [ "GET /repos/{owner}/{repo}/git/blobs/{file_sha}" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/git/commits/{commit_sha}" ] ,
getRef : [ "GET /repos/{owner}/{repo}/git/ref/{ref}" ] ,
getTag : [ "GET /repos/{owner}/{repo}/git/tags/{tag_sha}" ] ,
getTree : [ "GET /repos/{owner}/{repo}/git/trees/{tree_sha}" ] ,
listMatchingRefs : [ "GET /repos/{owner}/{repo}/git/matching-refs/{ref}" ] ,
updateRef : [ "PATCH /repos/{owner}/{repo}/git/refs/{ref}" ]
} ,
gitignore : {
getAllTemplates : [ "GET /gitignore/templates" ] ,
getTemplate : [ "GET /gitignore/templates/{name}" ]
} ,
interactions : {
getRestrictionsForAuthenticatedUser : [ "GET /user/interaction-limits" ] ,
getRestrictionsForOrg : [ "GET /orgs/{org}/interaction-limits" ] ,
getRestrictionsForRepo : [ "GET /repos/{owner}/{repo}/interaction-limits" ] ,
getRestrictionsForYourPublicRepos : [
"GET /user/interaction-limits" ,
{ } ,
{ renamed : [ "interactions" , "getRestrictionsForAuthenticatedUser" ] }
] ,
removeRestrictionsForAuthenticatedUser : [ "DELETE /user/interaction-limits" ] ,
removeRestrictionsForOrg : [ "DELETE /orgs/{org}/interaction-limits" ] ,
removeRestrictionsForRepo : [
"DELETE /repos/{owner}/{repo}/interaction-limits"
] ,
removeRestrictionsForYourPublicRepos : [
"DELETE /user/interaction-limits" ,
{ } ,
{ renamed : [ "interactions" , "removeRestrictionsForAuthenticatedUser" ] }
] ,
setRestrictionsForAuthenticatedUser : [ "PUT /user/interaction-limits" ] ,
setRestrictionsForOrg : [ "PUT /orgs/{org}/interaction-limits" ] ,
setRestrictionsForRepo : [ "PUT /repos/{owner}/{repo}/interaction-limits" ] ,
setRestrictionsForYourPublicRepos : [
"PUT /user/interaction-limits" ,
{ } ,
{ renamed : [ "interactions" , "setRestrictionsForAuthenticatedUser" ] }
]
} ,
issues : {
addAssignees : [
"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"
] ,
addLabels : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
checkUserCanBeAssigned : [ "GET /repos/{owner}/{repo}/assignees/{assignee}" ] ,
checkUserCanBeAssignedToIssue : [
"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}"
] ,
create : [ "POST /repos/{owner}/{repo}/issues" ] ,
createComment : [
"POST /repos/{owner}/{repo}/issues/{issue_number}/comments"
] ,
createLabel : [ "POST /repos/{owner}/{repo}/labels" ] ,
createMilestone : [ "POST /repos/{owner}/{repo}/milestones" ] ,
deleteComment : [
"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"
] ,
deleteLabel : [ "DELETE /repos/{owner}/{repo}/labels/{name}" ] ,
deleteMilestone : [
"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"
] ,
get : [ "GET /repos/{owner}/{repo}/issues/{issue_number}" ] ,
getComment : [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
getEvent : [ "GET /repos/{owner}/{repo}/issues/events/{event_id}" ] ,
getLabel : [ "GET /repos/{owner}/{repo}/labels/{name}" ] ,
getMilestone : [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}" ] ,
list : [ "GET /issues" ] ,
listAssignees : [ "GET /repos/{owner}/{repo}/assignees" ] ,
listComments : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/comments" ] ,
listCommentsForRepo : [ "GET /repos/{owner}/{repo}/issues/comments" ] ,
listEvents : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/events" ] ,
listEventsForRepo : [ "GET /repos/{owner}/{repo}/issues/events" ] ,
listEventsForTimeline : [
"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"
] ,
listForAuthenticatedUser : [ "GET /user/issues" ] ,
listForOrg : [ "GET /orgs/{org}/issues" ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/issues" ] ,
listLabelsForMilestone : [
"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"
] ,
listLabelsForRepo : [ "GET /repos/{owner}/{repo}/labels" ] ,
listLabelsOnIssue : [
"GET /repos/{owner}/{repo}/issues/{issue_number}/labels"
] ,
listMilestones : [ "GET /repos/{owner}/{repo}/milestones" ] ,
lock : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
removeAllLabels : [
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"
] ,
removeAssignees : [
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"
] ,
removeLabel : [
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"
] ,
setLabels : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
unlock : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
update : [ "PATCH /repos/{owner}/{repo}/issues/{issue_number}" ] ,
updateComment : [ "PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
updateLabel : [ "PATCH /repos/{owner}/{repo}/labels/{name}" ] ,
updateMilestone : [
"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"
]
} ,
licenses : {
get : [ "GET /licenses/{license}" ] ,
getAllCommonlyUsed : [ "GET /licenses" ] ,
getForRepo : [ "GET /repos/{owner}/{repo}/license" ]
} ,
markdown : {
render : [ "POST /markdown" ] ,
renderRaw : [
"POST /markdown/raw" ,
{ headers : { "content-type" : "text/plain; charset=utf-8" } }
]
} ,
meta : {
get : [ "GET /meta" ] ,
getAllVersions : [ "GET /versions" ] ,
getOctocat : [ "GET /octocat" ] ,
getZen : [ "GET /zen" ] ,
root : [ "GET /" ]
} ,
migrations : {
cancelImport : [
"DELETE /repos/{owner}/{repo}/import" ,
{ } ,
{
deprecated : "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import"
}
] ,
deleteArchiveForAuthenticatedUser : [
"DELETE /user/migrations/{migration_id}/archive"
] ,
deleteArchiveForOrg : [
"DELETE /orgs/{org}/migrations/{migration_id}/archive"
] ,
downloadArchiveForOrg : [
"GET /orgs/{org}/migrations/{migration_id}/archive"
] ,
getArchiveForAuthenticatedUser : [
"GET /user/migrations/{migration_id}/archive"
] ,
getCommitAuthors : [
"GET /repos/{owner}/{repo}/import/authors" ,
{ } ,
{
deprecated : "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors"
}
] ,
getImportStatus : [
"GET /repos/{owner}/{repo}/import" ,
{ } ,
{
deprecated : "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status"
}
] ,
getLargeFiles : [
"GET /repos/{owner}/{repo}/import/large_files" ,
{ } ,
{
deprecated : "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files"
}
] ,
getStatusForAuthenticatedUser : [ "GET /user/migrations/{migration_id}" ] ,
getStatusForOrg : [ "GET /orgs/{org}/migrations/{migration_id}" ] ,
listForAuthenticatedUser : [ "GET /user/migrations" ] ,
listForOrg : [ "GET /orgs/{org}/migrations" ] ,
listReposForAuthenticatedUser : [
"GET /user/migrations/{migration_id}/repositories"
] ,
listReposForOrg : [ "GET /orgs/{org}/migrations/{migration_id}/repositories" ] ,
listReposForUser : [
"GET /user/migrations/{migration_id}/repositories" ,
{ } ,
{ renamed : [ "migrations" , "listReposForAuthenticatedUser" ] }
] ,
mapCommitAuthor : [
"PATCH /repos/{owner}/{repo}/import/authors/{author_id}" ,
{ } ,
{
deprecated : "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author"
}
] ,
setLfsPreference : [
"PATCH /repos/{owner}/{repo}/import/lfs" ,
{ } ,
{
deprecated : "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference"
}
] ,
startForAuthenticatedUser : [ "POST /user/migrations" ] ,
startForOrg : [ "POST /orgs/{org}/migrations" ] ,
startImport : [
"PUT /repos/{owner}/{repo}/import" ,
{ } ,
{
deprecated : "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import"
}
] ,
unlockRepoForAuthenticatedUser : [
"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"
] ,
unlockRepoForOrg : [
"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"
] ,
updateImport : [
"PATCH /repos/{owner}/{repo}/import" ,
{ } ,
{
deprecated : "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import"
}
]
} ,
oidc : {
getOidcCustomSubTemplateForOrg : [
"GET /orgs/{org}/actions/oidc/customization/sub"
] ,
updateOidcCustomSubTemplateForOrg : [
"PUT /orgs/{org}/actions/oidc/customization/sub"
]
} ,
orgs : {
addSecurityManagerTeam : [
"PUT /orgs/{org}/security-managers/teams/{team_slug}"
] ,
assignTeamToOrgRole : [
"PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}"
] ,
assignUserToOrgRole : [
"PUT /orgs/{org}/organization-roles/users/{username}/{role_id}"
] ,
blockUser : [ "PUT /orgs/{org}/blocks/{username}" ] ,
cancelInvitation : [ "DELETE /orgs/{org}/invitations/{invitation_id}" ] ,
checkBlockedUser : [ "GET /orgs/{org}/blocks/{username}" ] ,
checkMembershipForUser : [ "GET /orgs/{org}/members/{username}" ] ,
checkPublicMembershipForUser : [ "GET /orgs/{org}/public_members/{username}" ] ,
convertMemberToOutsideCollaborator : [
"PUT /orgs/{org}/outside_collaborators/{username}"
] ,
createCustomOrganizationRole : [ "POST /orgs/{org}/organization-roles" ] ,
createInvitation : [ "POST /orgs/{org}/invitations" ] ,
createOrUpdateCustomProperties : [ "PATCH /orgs/{org}/properties/schema" ] ,
createOrUpdateCustomPropertiesValuesForRepos : [
"PATCH /orgs/{org}/properties/values"
] ,
createOrUpdateCustomProperty : [
"PUT /orgs/{org}/properties/schema/{custom_property_name}"
] ,
createWebhook : [ "POST /orgs/{org}/hooks" ] ,
delete : [ "DELETE /orgs/{org}" ] ,
deleteCustomOrganizationRole : [
"DELETE /orgs/{org}/organization-roles/{role_id}"
] ,
deleteWebhook : [ "DELETE /orgs/{org}/hooks/{hook_id}" ] ,
enableOrDisableSecurityProductOnAllOrgRepos : [
"POST /orgs/{org}/{security_product}/{enablement}"
] ,
get : [ "GET /orgs/{org}" ] ,
getAllCustomProperties : [ "GET /orgs/{org}/properties/schema" ] ,
getCustomProperty : [
"GET /orgs/{org}/properties/schema/{custom_property_name}"
] ,
getMembershipForAuthenticatedUser : [ "GET /user/memberships/orgs/{org}" ] ,
getMembershipForUser : [ "GET /orgs/{org}/memberships/{username}" ] ,
getOrgRole : [ "GET /orgs/{org}/organization-roles/{role_id}" ] ,
getWebhook : [ "GET /orgs/{org}/hooks/{hook_id}" ] ,
getWebhookConfigForOrg : [ "GET /orgs/{org}/hooks/{hook_id}/config" ] ,
getWebhookDelivery : [
"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"
] ,
list : [ "GET /organizations" ] ,
listAppInstallations : [ "GET /orgs/{org}/installations" ] ,
listBlockedUsers : [ "GET /orgs/{org}/blocks" ] ,
listCustomPropertiesValuesForRepos : [ "GET /orgs/{org}/properties/values" ] ,
listFailedInvitations : [ "GET /orgs/{org}/failed_invitations" ] ,
listForAuthenticatedUser : [ "GET /user/orgs" ] ,
listForUser : [ "GET /users/{username}/orgs" ] ,
listInvitationTeams : [ "GET /orgs/{org}/invitations/{invitation_id}/teams" ] ,
listMembers : [ "GET /orgs/{org}/members" ] ,
listMembershipsForAuthenticatedUser : [ "GET /user/memberships/orgs" ] ,
listOrgRoleTeams : [ "GET /orgs/{org}/organization-roles/{role_id}/teams" ] ,
listOrgRoleUsers : [ "GET /orgs/{org}/organization-roles/{role_id}/users" ] ,
listOrgRoles : [ "GET /orgs/{org}/organization-roles" ] ,
listOrganizationFineGrainedPermissions : [
"GET /orgs/{org}/organization-fine-grained-permissions"
] ,
listOutsideCollaborators : [ "GET /orgs/{org}/outside_collaborators" ] ,
listPatGrantRepositories : [
"GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories"
] ,
listPatGrantRequestRepositories : [
"GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories"
] ,
listPatGrantRequests : [ "GET /orgs/{org}/personal-access-token-requests" ] ,
listPatGrants : [ "GET /orgs/{org}/personal-access-tokens" ] ,
listPendingInvitations : [ "GET /orgs/{org}/invitations" ] ,
listPublicMembers : [ "GET /orgs/{org}/public_members" ] ,
listSecurityManagerTeams : [ "GET /orgs/{org}/security-managers" ] ,
listWebhookDeliveries : [ "GET /orgs/{org}/hooks/{hook_id}/deliveries" ] ,
listWebhooks : [ "GET /orgs/{org}/hooks" ] ,
patchCustomOrganizationRole : [
"PATCH /orgs/{org}/organization-roles/{role_id}"
] ,
pingWebhook : [ "POST /orgs/{org}/hooks/{hook_id}/pings" ] ,
redeliverWebhookDelivery : [
"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
] ,
removeCustomProperty : [
"DELETE /orgs/{org}/properties/schema/{custom_property_name}"
] ,
removeMember : [ "DELETE /orgs/{org}/members/{username}" ] ,
removeMembershipForUser : [ "DELETE /orgs/{org}/memberships/{username}" ] ,
removeOutsideCollaborator : [
"DELETE /orgs/{org}/outside_collaborators/{username}"
] ,
removePublicMembershipForAuthenticatedUser : [
"DELETE /orgs/{org}/public_members/{username}"
] ,
removeSecurityManagerTeam : [
"DELETE /orgs/{org}/security-managers/teams/{team_slug}"
] ,
reviewPatGrantRequest : [
"POST /orgs/{org}/personal-access-token-requests/{pat_request_id}"
] ,
reviewPatGrantRequestsInBulk : [
"POST /orgs/{org}/personal-access-token-requests"
] ,
revokeAllOrgRolesTeam : [
"DELETE /orgs/{org}/organization-roles/teams/{team_slug}"
] ,
revokeAllOrgRolesUser : [
"DELETE /orgs/{org}/organization-roles/users/{username}"
] ,
revokeOrgRoleTeam : [
"DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}"
] ,
revokeOrgRoleUser : [
"DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}"
] ,
setMembershipForUser : [ "PUT /orgs/{org}/memberships/{username}" ] ,
setPublicMembershipForAuthenticatedUser : [
"PUT /orgs/{org}/public_members/{username}"
] ,
unblockUser : [ "DELETE /orgs/{org}/blocks/{username}" ] ,
update : [ "PATCH /orgs/{org}" ] ,
updateMembershipForAuthenticatedUser : [
"PATCH /user/memberships/orgs/{org}"
] ,
updatePatAccess : [ "POST /orgs/{org}/personal-access-tokens/{pat_id}" ] ,
updatePatAccesses : [ "POST /orgs/{org}/personal-access-tokens" ] ,
updateWebhook : [ "PATCH /orgs/{org}/hooks/{hook_id}" ] ,
updateWebhookConfigForOrg : [ "PATCH /orgs/{org}/hooks/{hook_id}/config" ]
} ,
packages : {
deletePackageForAuthenticatedUser : [
"DELETE /user/packages/{package_type}/{package_name}"
] ,
deletePackageForOrg : [
"DELETE /orgs/{org}/packages/{package_type}/{package_name}"
] ,
deletePackageForUser : [
"DELETE /users/{username}/packages/{package_type}/{package_name}"
] ,
deletePackageVersionForAuthenticatedUser : [
"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
deletePackageVersionForOrg : [
"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
deletePackageVersionForUser : [
"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
getAllPackageVersionsForAPackageOwnedByAnOrg : [
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions" ,
{ } ,
{ renamed : [ "packages" , "getAllPackageVersionsForPackageOwnedByOrg" ] }
] ,
getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser : [
"GET /user/packages/{package_type}/{package_name}/versions" ,
{ } ,
{
renamed : [
"packages" ,
"getAllPackageVersionsForPackageOwnedByAuthenticatedUser"
]
}
] ,
getAllPackageVersionsForPackageOwnedByAuthenticatedUser : [
"GET /user/packages/{package_type}/{package_name}/versions"
] ,
getAllPackageVersionsForPackageOwnedByOrg : [
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions"
] ,
getAllPackageVersionsForPackageOwnedByUser : [
"GET /users/{username}/packages/{package_type}/{package_name}/versions"
] ,
getPackageForAuthenticatedUser : [
"GET /user/packages/{package_type}/{package_name}"
] ,
getPackageForOrganization : [
"GET /orgs/{org}/packages/{package_type}/{package_name}"
] ,
getPackageForUser : [
"GET /users/{username}/packages/{package_type}/{package_name}"
] ,
getPackageVersionForAuthenticatedUser : [
"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
getPackageVersionForOrganization : [
"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
getPackageVersionForUser : [
"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
] ,
listDockerMigrationConflictingPackagesForAuthenticatedUser : [
"GET /user/docker/conflicts"
] ,
listDockerMigrationConflictingPackagesForOrganization : [
"GET /orgs/{org}/docker/conflicts"
] ,
listDockerMigrationConflictingPackagesForUser : [
"GET /users/{username}/docker/conflicts"
] ,
listPackagesForAuthenticatedUser : [ "GET /user/packages" ] ,
listPackagesForOrganization : [ "GET /orgs/{org}/packages" ] ,
listPackagesForUser : [ "GET /users/{username}/packages" ] ,
restorePackageForAuthenticatedUser : [
"POST /user/packages/{package_type}/{package_name}/restore{?token}"
] ,
restorePackageForOrg : [
"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"
] ,
restorePackageForUser : [
"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"
] ,
restorePackageVersionForAuthenticatedUser : [
"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
] ,
restorePackageVersionForOrg : [
"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
] ,
restorePackageVersionForUser : [
"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
]
} ,
projects : {
addCollaborator : [ "PUT /projects/{project_id}/collaborators/{username}" ] ,
createCard : [ "POST /projects/columns/{column_id}/cards" ] ,
createColumn : [ "POST /projects/{project_id}/columns" ] ,
createForAuthenticatedUser : [ "POST /user/projects" ] ,
createForOrg : [ "POST /orgs/{org}/projects" ] ,
createForRepo : [ "POST /repos/{owner}/{repo}/projects" ] ,
delete : [ "DELETE /projects/{project_id}" ] ,
deleteCard : [ "DELETE /projects/columns/cards/{card_id}" ] ,
deleteColumn : [ "DELETE /projects/columns/{column_id}" ] ,
get : [ "GET /projects/{project_id}" ] ,
getCard : [ "GET /projects/columns/cards/{card_id}" ] ,
getColumn : [ "GET /projects/columns/{column_id}" ] ,
getPermissionForUser : [
"GET /projects/{project_id}/collaborators/{username}/permission"
] ,
listCards : [ "GET /projects/columns/{column_id}/cards" ] ,
listCollaborators : [ "GET /projects/{project_id}/collaborators" ] ,
listColumns : [ "GET /projects/{project_id}/columns" ] ,
listForOrg : [ "GET /orgs/{org}/projects" ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/projects" ] ,
listForUser : [ "GET /users/{username}/projects" ] ,
moveCard : [ "POST /projects/columns/cards/{card_id}/moves" ] ,
moveColumn : [ "POST /projects/columns/{column_id}/moves" ] ,
removeCollaborator : [
"DELETE /projects/{project_id}/collaborators/{username}"
] ,
update : [ "PATCH /projects/{project_id}" ] ,
updateCard : [ "PATCH /projects/columns/cards/{card_id}" ] ,
updateColumn : [ "PATCH /projects/columns/{column_id}" ]
} ,
pulls : {
checkIfMerged : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
create : [ "POST /repos/{owner}/{repo}/pulls" ] ,
createReplyForReviewComment : [
"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"
] ,
createReview : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
createReviewComment : [
"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"
] ,
deletePendingReview : [
"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
] ,
deleteReviewComment : [
"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"
] ,
dismissReview : [
"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"
] ,
get : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
getReview : [
"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
] ,
getReviewComment : [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
list : [ "GET /repos/{owner}/{repo}/pulls" ] ,
listCommentsForReview : [
"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"
] ,
listCommits : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits" ] ,
listFiles : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/files" ] ,
listRequestedReviewers : [
"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
] ,
listReviewComments : [
"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"
] ,
listReviewCommentsForRepo : [ "GET /repos/{owner}/{repo}/pulls/comments" ] ,
listReviews : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
merge : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
removeRequestedReviewers : [
"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
] ,
requestReviewers : [
"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
] ,
submitReview : [
"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"
] ,
update : [ "PATCH /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
updateBranch : [
"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"
] ,
updateReview : [
"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
] ,
updateReviewComment : [
"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"
]
} ,
rateLimit : { get : [ "GET /rate_limit" ] } ,
reactions : {
createForCommitComment : [
"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"
] ,
createForIssue : [
"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"
] ,
createForIssueComment : [
"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
] ,
createForPullRequestReviewComment : [
"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
] ,
createForRelease : [
"POST /repos/{owner}/{repo}/releases/{release_id}/reactions"
] ,
createForTeamDiscussionCommentInOrg : [
"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
] ,
createForTeamDiscussionInOrg : [
"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
] ,
deleteForCommitComment : [
"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"
] ,
deleteForIssue : [
"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"
] ,
deleteForIssueComment : [
"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"
] ,
deleteForPullRequestComment : [
"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"
] ,
deleteForRelease : [
"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"
] ,
deleteForTeamDiscussion : [
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"
] ,
deleteForTeamDiscussionComment : [
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"
] ,
listForCommitComment : [
"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"
] ,
listForIssue : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions" ] ,
listForIssueComment : [
"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
] ,
listForPullRequestReviewComment : [
"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
] ,
listForRelease : [
"GET /repos/{owner}/{repo}/releases/{release_id}/reactions"
] ,
listForTeamDiscussionCommentInOrg : [
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
] ,
listForTeamDiscussionInOrg : [
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
]
} ,
repos : {
acceptInvitation : [
"PATCH /user/repository_invitations/{invitation_id}" ,
{ } ,
{ renamed : [ "repos" , "acceptInvitationForAuthenticatedUser" ] }
] ,
acceptInvitationForAuthenticatedUser : [
"PATCH /user/repository_invitations/{invitation_id}"
] ,
addAppAccessRestrictions : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ,
{ } ,
{ mapToData : "apps" }
] ,
addCollaborator : [ "PUT /repos/{owner}/{repo}/collaborators/{username}" ] ,
addStatusCheckContexts : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ,
{ } ,
{ mapToData : "contexts" }
] ,
addTeamAccessRestrictions : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ,
{ } ,
{ mapToData : "teams" }
] ,
addUserAccessRestrictions : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ,
{ } ,
{ mapToData : "users" }
] ,
cancelPagesDeployment : [
"POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel"
] ,
checkAutomatedSecurityFixes : [
"GET /repos/{owner}/{repo}/automated-security-fixes"
] ,
checkCollaborator : [ "GET /repos/{owner}/{repo}/collaborators/{username}" ] ,
checkVulnerabilityAlerts : [
"GET /repos/{owner}/{repo}/vulnerability-alerts"
] ,
codeownersErrors : [ "GET /repos/{owner}/{repo}/codeowners/errors" ] ,
compareCommits : [ "GET /repos/{owner}/{repo}/compare/{base}...{head}" ] ,
compareCommitsWithBasehead : [
"GET /repos/{owner}/{repo}/compare/{basehead}"
] ,
createAutolink : [ "POST /repos/{owner}/{repo}/autolinks" ] ,
createCommitComment : [
"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"
] ,
createCommitSignatureProtection : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
] ,
createCommitStatus : [ "POST /repos/{owner}/{repo}/statuses/{sha}" ] ,
createDeployKey : [ "POST /repos/{owner}/{repo}/keys" ] ,
createDeployment : [ "POST /repos/{owner}/{repo}/deployments" ] ,
createDeploymentBranchPolicy : [
"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
] ,
createDeploymentProtectionRule : [
"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
] ,
createDeploymentStatus : [
"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
] ,
createDispatchEvent : [ "POST /repos/{owner}/{repo}/dispatches" ] ,
createForAuthenticatedUser : [ "POST /user/repos" ] ,
createFork : [ "POST /repos/{owner}/{repo}/forks" ] ,
createInOrg : [ "POST /orgs/{org}/repos" ] ,
createOrUpdateCustomPropertiesValues : [
"PATCH /repos/{owner}/{repo}/properties/values"
] ,
createOrUpdateEnvironment : [
"PUT /repos/{owner}/{repo}/environments/{environment_name}"
] ,
createOrUpdateFileContents : [ "PUT /repos/{owner}/{repo}/contents/{path}" ] ,
createOrgRuleset : [ "POST /orgs/{org}/rulesets" ] ,
createPagesDeployment : [ "POST /repos/{owner}/{repo}/pages/deployments" ] ,
createPagesSite : [ "POST /repos/{owner}/{repo}/pages" ] ,
createRelease : [ "POST /repos/{owner}/{repo}/releases" ] ,
createRepoRuleset : [ "POST /repos/{owner}/{repo}/rulesets" ] ,
createTagProtection : [ "POST /repos/{owner}/{repo}/tags/protection" ] ,
createUsingTemplate : [
"POST /repos/{template_owner}/{template_repo}/generate"
] ,
createWebhook : [ "POST /repos/{owner}/{repo}/hooks" ] ,
declineInvitation : [
"DELETE /user/repository_invitations/{invitation_id}" ,
{ } ,
{ renamed : [ "repos" , "declineInvitationForAuthenticatedUser" ] }
] ,
declineInvitationForAuthenticatedUser : [
"DELETE /user/repository_invitations/{invitation_id}"
] ,
delete : [ "DELETE /repos/{owner}/{repo}" ] ,
deleteAccessRestrictions : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
] ,
deleteAdminBranchProtection : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
] ,
deleteAnEnvironment : [
"DELETE /repos/{owner}/{repo}/environments/{environment_name}"
] ,
deleteAutolink : [ "DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}" ] ,
deleteBranchProtection : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection"
] ,
deleteCommitComment : [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}" ] ,
deleteCommitSignatureProtection : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
] ,
deleteDeployKey : [ "DELETE /repos/{owner}/{repo}/keys/{key_id}" ] ,
deleteDeployment : [
"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"
] ,
deleteDeploymentBranchPolicy : [
"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
] ,
deleteFile : [ "DELETE /repos/{owner}/{repo}/contents/{path}" ] ,
deleteInvitation : [
"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"
] ,
deleteOrgRuleset : [ "DELETE /orgs/{org}/rulesets/{ruleset_id}" ] ,
deletePagesSite : [ "DELETE /repos/{owner}/{repo}/pages" ] ,
deletePullRequestReviewProtection : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
] ,
deleteRelease : [ "DELETE /repos/{owner}/{repo}/releases/{release_id}" ] ,
deleteReleaseAsset : [
"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"
] ,
deleteRepoRuleset : [ "DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}" ] ,
deleteTagProtection : [
"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"
] ,
deleteWebhook : [ "DELETE /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
disableAutomatedSecurityFixes : [
"DELETE /repos/{owner}/{repo}/automated-security-fixes"
] ,
disableDeploymentProtectionRule : [
"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
] ,
disablePrivateVulnerabilityReporting : [
"DELETE /repos/{owner}/{repo}/private-vulnerability-reporting"
] ,
disableVulnerabilityAlerts : [
"DELETE /repos/{owner}/{repo}/vulnerability-alerts"
] ,
downloadArchive : [
"GET /repos/{owner}/{repo}/zipball/{ref}" ,
{ } ,
{ renamed : [ "repos" , "downloadZipballArchive" ] }
] ,
downloadTarballArchive : [ "GET /repos/{owner}/{repo}/tarball/{ref}" ] ,
downloadZipballArchive : [ "GET /repos/{owner}/{repo}/zipball/{ref}" ] ,
enableAutomatedSecurityFixes : [
"PUT /repos/{owner}/{repo}/automated-security-fixes"
] ,
enablePrivateVulnerabilityReporting : [
"PUT /repos/{owner}/{repo}/private-vulnerability-reporting"
] ,
enableVulnerabilityAlerts : [
"PUT /repos/{owner}/{repo}/vulnerability-alerts"
] ,
generateReleaseNotes : [
"POST /repos/{owner}/{repo}/releases/generate-notes"
] ,
get : [ "GET /repos/{owner}/{repo}" ] ,
getAccessRestrictions : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
] ,
getAdminBranchProtection : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
] ,
getAllDeploymentProtectionRules : [
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
] ,
getAllEnvironments : [ "GET /repos/{owner}/{repo}/environments" ] ,
getAllStatusCheckContexts : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"
] ,
getAllTopics : [ "GET /repos/{owner}/{repo}/topics" ] ,
getAppsWithAccessToProtectedBranch : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"
] ,
getAutolink : [ "GET /repos/{owner}/{repo}/autolinks/{autolink_id}" ] ,
getBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}" ] ,
getBranchProtection : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection"
] ,
getBranchRules : [ "GET /repos/{owner}/{repo}/rules/branches/{branch}" ] ,
getClones : [ "GET /repos/{owner}/{repo}/traffic/clones" ] ,
getCodeFrequencyStats : [ "GET /repos/{owner}/{repo}/stats/code_frequency" ] ,
getCollaboratorPermissionLevel : [
"GET /repos/{owner}/{repo}/collaborators/{username}/permission"
] ,
getCombinedStatusForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/status" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/commits/{ref}" ] ,
getCommitActivityStats : [ "GET /repos/{owner}/{repo}/stats/commit_activity" ] ,
getCommitComment : [ "GET /repos/{owner}/{repo}/comments/{comment_id}" ] ,
getCommitSignatureProtection : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
] ,
getCommunityProfileMetrics : [ "GET /repos/{owner}/{repo}/community/profile" ] ,
getContent : [ "GET /repos/{owner}/{repo}/contents/{path}" ] ,
getContributorsStats : [ "GET /repos/{owner}/{repo}/stats/contributors" ] ,
getCustomDeploymentProtectionRule : [
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
] ,
getCustomPropertiesValues : [ "GET /repos/{owner}/{repo}/properties/values" ] ,
getDeployKey : [ "GET /repos/{owner}/{repo}/keys/{key_id}" ] ,
getDeployment : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}" ] ,
getDeploymentBranchPolicy : [
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
] ,
getDeploymentStatus : [
"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"
] ,
getEnvironment : [
"GET /repos/{owner}/{repo}/environments/{environment_name}"
] ,
getLatestPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/latest" ] ,
getLatestRelease : [ "GET /repos/{owner}/{repo}/releases/latest" ] ,
getOrgRuleSuite : [ "GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}" ] ,
getOrgRuleSuites : [ "GET /orgs/{org}/rulesets/rule-suites" ] ,
getOrgRuleset : [ "GET /orgs/{org}/rulesets/{ruleset_id}" ] ,
getOrgRulesets : [ "GET /orgs/{org}/rulesets" ] ,
getPages : [ "GET /repos/{owner}/{repo}/pages" ] ,
getPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/{build_id}" ] ,
getPagesDeployment : [
"GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}"
] ,
getPagesHealthCheck : [ "GET /repos/{owner}/{repo}/pages/health" ] ,
getParticipationStats : [ "GET /repos/{owner}/{repo}/stats/participation" ] ,
getPullRequestReviewProtection : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
] ,
getPunchCardStats : [ "GET /repos/{owner}/{repo}/stats/punch_card" ] ,
getReadme : [ "GET /repos/{owner}/{repo}/readme" ] ,
getReadmeInDirectory : [ "GET /repos/{owner}/{repo}/readme/{dir}" ] ,
getRelease : [ "GET /repos/{owner}/{repo}/releases/{release_id}" ] ,
getReleaseAsset : [ "GET /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
getReleaseByTag : [ "GET /repos/{owner}/{repo}/releases/tags/{tag}" ] ,
getRepoRuleSuite : [
"GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}"
] ,
getRepoRuleSuites : [ "GET /repos/{owner}/{repo}/rulesets/rule-suites" ] ,
getRepoRuleset : [ "GET /repos/{owner}/{repo}/rulesets/{ruleset_id}" ] ,
getRepoRulesets : [ "GET /repos/{owner}/{repo}/rulesets" ] ,
getStatusChecksProtection : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
] ,
getTeamsWithAccessToProtectedBranch : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"
] ,
getTopPaths : [ "GET /repos/{owner}/{repo}/traffic/popular/paths" ] ,
getTopReferrers : [ "GET /repos/{owner}/{repo}/traffic/popular/referrers" ] ,
getUsersWithAccessToProtectedBranch : [
"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"
] ,
getViews : [ "GET /repos/{owner}/{repo}/traffic/views" ] ,
getWebhook : [ "GET /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
getWebhookConfigForRepo : [
"GET /repos/{owner}/{repo}/hooks/{hook_id}/config"
] ,
getWebhookDelivery : [
"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"
] ,
listActivities : [ "GET /repos/{owner}/{repo}/activity" ] ,
listAutolinks : [ "GET /repos/{owner}/{repo}/autolinks" ] ,
listBranches : [ "GET /repos/{owner}/{repo}/branches" ] ,
listBranchesForHeadCommit : [
"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"
] ,
listCollaborators : [ "GET /repos/{owner}/{repo}/collaborators" ] ,
listCommentsForCommit : [
"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"
] ,
listCommitCommentsForRepo : [ "GET /repos/{owner}/{repo}/comments" ] ,
listCommitStatusesForRef : [
"GET /repos/{owner}/{repo}/commits/{ref}/statuses"
] ,
listCommits : [ "GET /repos/{owner}/{repo}/commits" ] ,
listContributors : [ "GET /repos/{owner}/{repo}/contributors" ] ,
listCustomDeploymentRuleIntegrations : [
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"
] ,
listDeployKeys : [ "GET /repos/{owner}/{repo}/keys" ] ,
listDeploymentBranchPolicies : [
"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
] ,
listDeploymentStatuses : [
"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
] ,
listDeployments : [ "GET /repos/{owner}/{repo}/deployments" ] ,
listForAuthenticatedUser : [ "GET /user/repos" ] ,
listForOrg : [ "GET /orgs/{org}/repos" ] ,
listForUser : [ "GET /users/{username}/repos" ] ,
listForks : [ "GET /repos/{owner}/{repo}/forks" ] ,
listInvitations : [ "GET /repos/{owner}/{repo}/invitations" ] ,
listInvitationsForAuthenticatedUser : [ "GET /user/repository_invitations" ] ,
listLanguages : [ "GET /repos/{owner}/{repo}/languages" ] ,
listPagesBuilds : [ "GET /repos/{owner}/{repo}/pages/builds" ] ,
listPublic : [ "GET /repositories" ] ,
listPullRequestsAssociatedWithCommit : [
"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"
] ,
listReleaseAssets : [
"GET /repos/{owner}/{repo}/releases/{release_id}/assets"
] ,
listReleases : [ "GET /repos/{owner}/{repo}/releases" ] ,
listTagProtection : [ "GET /repos/{owner}/{repo}/tags/protection" ] ,
listTags : [ "GET /repos/{owner}/{repo}/tags" ] ,
listTeams : [ "GET /repos/{owner}/{repo}/teams" ] ,
listWebhookDeliveries : [
"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"
] ,
listWebhooks : [ "GET /repos/{owner}/{repo}/hooks" ] ,
merge : [ "POST /repos/{owner}/{repo}/merges" ] ,
mergeUpstream : [ "POST /repos/{owner}/{repo}/merge-upstream" ] ,
pingWebhook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/pings" ] ,
redeliverWebhookDelivery : [
"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
] ,
removeAppAccessRestrictions : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ,
{ } ,
{ mapToData : "apps" }
] ,
removeCollaborator : [
"DELETE /repos/{owner}/{repo}/collaborators/{username}"
] ,
removeStatusCheckContexts : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ,
{ } ,
{ mapToData : "contexts" }
] ,
removeStatusCheckProtection : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
] ,
removeTeamAccessRestrictions : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ,
{ } ,
{ mapToData : "teams" }
] ,
removeUserAccessRestrictions : [
"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ,
{ } ,
{ mapToData : "users" }
] ,
renameBranch : [ "POST /repos/{owner}/{repo}/branches/{branch}/rename" ] ,
replaceAllTopics : [ "PUT /repos/{owner}/{repo}/topics" ] ,
requestPagesBuild : [ "POST /repos/{owner}/{repo}/pages/builds" ] ,
setAdminBranchProtection : [
"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
] ,
setAppAccessRestrictions : [
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ,
{ } ,
{ mapToData : "apps" }
] ,
setStatusCheckContexts : [
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ,
{ } ,
{ mapToData : "contexts" }
] ,
setTeamAccessRestrictions : [
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ,
{ } ,
{ mapToData : "teams" }
] ,
setUserAccessRestrictions : [
"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ,
{ } ,
{ mapToData : "users" }
] ,
testPushWebhook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/tests" ] ,
transfer : [ "POST /repos/{owner}/{repo}/transfer" ] ,
update : [ "PATCH /repos/{owner}/{repo}" ] ,
updateBranchProtection : [
"PUT /repos/{owner}/{repo}/branches/{branch}/protection"
] ,
updateCommitComment : [ "PATCH /repos/{owner}/{repo}/comments/{comment_id}" ] ,
updateDeploymentBranchPolicy : [
"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
] ,
updateInformationAboutPagesSite : [ "PUT /repos/{owner}/{repo}/pages" ] ,
updateInvitation : [
"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"
] ,
updateOrgRuleset : [ "PUT /orgs/{org}/rulesets/{ruleset_id}" ] ,
updatePullRequestReviewProtection : [
"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
] ,
updateRelease : [ "PATCH /repos/{owner}/{repo}/releases/{release_id}" ] ,
updateReleaseAsset : [
"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"
] ,
updateRepoRuleset : [ "PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}" ] ,
updateStatusCheckPotection : [
"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ,
{ } ,
{ renamed : [ "repos" , "updateStatusCheckProtection" ] }
] ,
updateStatusCheckProtection : [
"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
] ,
updateWebhook : [ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
updateWebhookConfigForRepo : [
"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"
] ,
uploadReleaseAsset : [
"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}" ,
{ baseUrl : "https://uploads.github.com" }
]
} ,
search : {
code : [ "GET /search/code" ] ,
commits : [ "GET /search/commits" ] ,
issuesAndPullRequests : [ "GET /search/issues" ] ,
labels : [ "GET /search/labels" ] ,
repos : [ "GET /search/repositories" ] ,
topics : [ "GET /search/topics" ] ,
users : [ "GET /search/users" ]
} ,
secretScanning : {
getAlert : [
"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
] ,
listAlertsForEnterprise : [
"GET /enterprises/{enterprise}/secret-scanning/alerts"
] ,
listAlertsForOrg : [ "GET /orgs/{org}/secret-scanning/alerts" ] ,
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/secret-scanning/alerts" ] ,
listLocationsForAlert : [
"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"
] ,
updateAlert : [
"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
]
} ,
securityAdvisories : {
createFork : [
"POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks"
] ,
createPrivateVulnerabilityReport : [
"POST /repos/{owner}/{repo}/security-advisories/reports"
] ,
createRepositoryAdvisory : [
"POST /repos/{owner}/{repo}/security-advisories"
] ,
createRepositoryAdvisoryCveRequest : [
"POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve"
] ,
getGlobalAdvisory : [ "GET /advisories/{ghsa_id}" ] ,
getRepositoryAdvisory : [
"GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
] ,
listGlobalAdvisories : [ "GET /advisories" ] ,
listOrgRepositoryAdvisories : [ "GET /orgs/{org}/security-advisories" ] ,
listRepositoryAdvisories : [ "GET /repos/{owner}/{repo}/security-advisories" ] ,
updateRepositoryAdvisory : [
"PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
]
} ,
teams : {
addOrUpdateMembershipForUserInOrg : [
"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"
] ,
addOrUpdateProjectPermissionsInOrg : [
"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"
] ,
addOrUpdateRepoPermissionsInOrg : [
"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
] ,
checkPermissionsForProjectInOrg : [
"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"
] ,
checkPermissionsForRepoInOrg : [
"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
] ,
create : [ "POST /orgs/{org}/teams" ] ,
createDiscussionCommentInOrg : [
"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
] ,
createDiscussionInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions" ] ,
deleteDiscussionCommentInOrg : [
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
] ,
deleteDiscussionInOrg : [
"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
] ,
deleteInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}" ] ,
getByName : [ "GET /orgs/{org}/teams/{team_slug}" ] ,
getDiscussionCommentInOrg : [
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
] ,
getDiscussionInOrg : [
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
] ,
getMembershipForUserInOrg : [
"GET /orgs/{org}/teams/{team_slug}/memberships/{username}"
] ,
list : [ "GET /orgs/{org}/teams" ] ,
listChildInOrg : [ "GET /orgs/{org}/teams/{team_slug}/teams" ] ,
listDiscussionCommentsInOrg : [
"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
] ,
listDiscussionsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions" ] ,
listForAuthenticatedUser : [ "GET /user/teams" ] ,
listMembersInOrg : [ "GET /orgs/{org}/teams/{team_slug}/members" ] ,
listPendingInvitationsInOrg : [
"GET /orgs/{org}/teams/{team_slug}/invitations"
] ,
listProjectsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects" ] ,
listReposInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos" ] ,
removeMembershipForUserInOrg : [
"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"
] ,
removeProjectInOrg : [
"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"
] ,
removeRepoInOrg : [
"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
] ,
updateDiscussionCommentInOrg : [
"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
] ,
updateDiscussionInOrg : [
"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
] ,
updateInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}" ]
} ,
users : {
addEmailForAuthenticated : [
"POST /user/emails" ,
{ } ,
{ renamed : [ "users" , "addEmailForAuthenticatedUser" ] }
] ,
addEmailForAuthenticatedUser : [ "POST /user/emails" ] ,
addSocialAccountForAuthenticatedUser : [ "POST /user/social_accounts" ] ,
block : [ "PUT /user/blocks/{username}" ] ,
checkBlocked : [ "GET /user/blocks/{username}" ] ,
checkFollowingForUser : [ "GET /users/{username}/following/{target_user}" ] ,
checkPersonIsFollowedByAuthenticated : [ "GET /user/following/{username}" ] ,
createGpgKeyForAuthenticated : [
"POST /user/gpg_keys" ,
{ } ,
{ renamed : [ "users" , "createGpgKeyForAuthenticatedUser" ] }
] ,
createGpgKeyForAuthenticatedUser : [ "POST /user/gpg_keys" ] ,
createPublicSshKeyForAuthenticated : [
"POST /user/keys" ,
{ } ,
{ renamed : [ "users" , "createPublicSshKeyForAuthenticatedUser" ] }
] ,
createPublicSshKeyForAuthenticatedUser : [ "POST /user/keys" ] ,
createSshSigningKeyForAuthenticatedUser : [ "POST /user/ssh_signing_keys" ] ,
deleteEmailForAuthenticated : [
"DELETE /user/emails" ,
{ } ,
{ renamed : [ "users" , "deleteEmailForAuthenticatedUser" ] }
] ,
deleteEmailForAuthenticatedUser : [ "DELETE /user/emails" ] ,
deleteGpgKeyForAuthenticated : [
"DELETE /user/gpg_keys/{gpg_key_id}" ,
{ } ,
{ renamed : [ "users" , "deleteGpgKeyForAuthenticatedUser" ] }
] ,
deleteGpgKeyForAuthenticatedUser : [ "DELETE /user/gpg_keys/{gpg_key_id}" ] ,
deletePublicSshKeyForAuthenticated : [
"DELETE /user/keys/{key_id}" ,
{ } ,
{ renamed : [ "users" , "deletePublicSshKeyForAuthenticatedUser" ] }
] ,
deletePublicSshKeyForAuthenticatedUser : [ "DELETE /user/keys/{key_id}" ] ,
deleteSocialAccountForAuthenticatedUser : [ "DELETE /user/social_accounts" ] ,
deleteSshSigningKeyForAuthenticatedUser : [
"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}"
] ,
follow : [ "PUT /user/following/{username}" ] ,
getAuthenticated : [ "GET /user" ] ,
getByUsername : [ "GET /users/{username}" ] ,
getContextForUser : [ "GET /users/{username}/hovercard" ] ,
getGpgKeyForAuthenticated : [
"GET /user/gpg_keys/{gpg_key_id}" ,
{ } ,
{ renamed : [ "users" , "getGpgKeyForAuthenticatedUser" ] }
] ,
getGpgKeyForAuthenticatedUser : [ "GET /user/gpg_keys/{gpg_key_id}" ] ,
getPublicSshKeyForAuthenticated : [
"GET /user/keys/{key_id}" ,
{ } ,
{ renamed : [ "users" , "getPublicSshKeyForAuthenticatedUser" ] }
] ,
getPublicSshKeyForAuthenticatedUser : [ "GET /user/keys/{key_id}" ] ,
getSshSigningKeyForAuthenticatedUser : [
"GET /user/ssh_signing_keys/{ssh_signing_key_id}"
] ,
list : [ "GET /users" ] ,
listBlockedByAuthenticated : [
"GET /user/blocks" ,
{ } ,
{ renamed : [ "users" , "listBlockedByAuthenticatedUser" ] }
] ,
listBlockedByAuthenticatedUser : [ "GET /user/blocks" ] ,
listEmailsForAuthenticated : [
"GET /user/emails" ,
{ } ,
{ renamed : [ "users" , "listEmailsForAuthenticatedUser" ] }
] ,
listEmailsForAuthenticatedUser : [ "GET /user/emails" ] ,
listFollowedByAuthenticated : [
"GET /user/following" ,
{ } ,
{ renamed : [ "users" , "listFollowedByAuthenticatedUser" ] }
] ,
listFollowedByAuthenticatedUser : [ "GET /user/following" ] ,
listFollowersForAuthenticatedUser : [ "GET /user/followers" ] ,
listFollowersForUser : [ "GET /users/{username}/followers" ] ,
listFollowingForUser : [ "GET /users/{username}/following" ] ,
listGpgKeysForAuthenticated : [
"GET /user/gpg_keys" ,
{ } ,
{ renamed : [ "users" , "listGpgKeysForAuthenticatedUser" ] }
] ,
listGpgKeysForAuthenticatedUser : [ "GET /user/gpg_keys" ] ,
listGpgKeysForUser : [ "GET /users/{username}/gpg_keys" ] ,
listPublicEmailsForAuthenticated : [
"GET /user/public_emails" ,
{ } ,
{ renamed : [ "users" , "listPublicEmailsForAuthenticatedUser" ] }
] ,
listPublicEmailsForAuthenticatedUser : [ "GET /user/public_emails" ] ,
listPublicKeysForUser : [ "GET /users/{username}/keys" ] ,
listPublicSshKeysForAuthenticated : [
"GET /user/keys" ,
{ } ,
{ renamed : [ "users" , "listPublicSshKeysForAuthenticatedUser" ] }
] ,
listPublicSshKeysForAuthenticatedUser : [ "GET /user/keys" ] ,
listSocialAccountsForAuthenticatedUser : [ "GET /user/social_accounts" ] ,
listSocialAccountsForUser : [ "GET /users/{username}/social_accounts" ] ,
listSshSigningKeysForAuthenticatedUser : [ "GET /user/ssh_signing_keys" ] ,
listSshSigningKeysForUser : [ "GET /users/{username}/ssh_signing_keys" ] ,
setPrimaryEmailVisibilityForAuthenticated : [
"PATCH /user/email/visibility" ,
{ } ,
{ renamed : [ "users" , "setPrimaryEmailVisibilityForAuthenticatedUser" ] }
] ,
setPrimaryEmailVisibilityForAuthenticatedUser : [
"PATCH /user/email/visibility"
] ,
unblock : [ "DELETE /user/blocks/{username}" ] ,
unfollow : [ "DELETE /user/following/{username}" ] ,
updateAuthenticated : [ "PATCH /user" ]
}
} ;
var endpoints _default = Endpoints ;
// pkg/dist-src/endpoints-to-methods.js
var endpointMethodsMap = /* @__PURE__ */ new Map ( ) ;
for ( const [ scope , endpoints ] of Object . entries ( endpoints _default ) ) {
for ( const [ methodName , endpoint ] of Object . entries ( endpoints ) ) {
const [ route , defaults , decorations ] = endpoint ;
const [ method , url ] = route . split ( / / ) ;
const endpointDefaults = Object . assign (
{
method ,
url
} ,
defaults
) ;
if ( ! endpointMethodsMap . has ( scope ) ) {
endpointMethodsMap . set ( scope , /* @__PURE__ */ new Map ( ) ) ;
}
endpointMethodsMap . get ( scope ) . set ( methodName , {
scope ,
methodName ,
endpointDefaults ,
decorations
} ) ;
}
}
var handler = {
has ( { scope } , methodName ) {
return endpointMethodsMap . get ( scope ) . has ( methodName ) ;
} ,
getOwnPropertyDescriptor ( target , methodName ) {
return {
value : this . get ( target , methodName ) ,
// ensures method is in the cache
configurable : true ,
writable : true ,
enumerable : true
} ;
} ,
defineProperty ( target , methodName , descriptor ) {
Object . defineProperty ( target . cache , methodName , descriptor ) ;
return true ;
} ,
deleteProperty ( target , methodName ) {
delete target . cache [ methodName ] ;
return true ;
} ,
ownKeys ( { scope } ) {
return [ ... endpointMethodsMap . get ( scope ) . keys ( ) ] ;
} ,
set ( target , methodName , value ) {
return target . cache [ methodName ] = value ;
} ,
get ( { octokit , scope , cache } , methodName ) {
if ( cache [ methodName ] ) {
return cache [ methodName ] ;
}
const method = endpointMethodsMap . get ( scope ) . get ( methodName ) ;
if ( ! method ) {
return void 0 ;
}
const { endpointDefaults , decorations } = method ;
if ( decorations ) {
cache [ methodName ] = decorate (
octokit ,
scope ,
methodName ,
endpointDefaults ,
decorations
) ;
} else {
cache [ methodName ] = octokit . request . defaults ( endpointDefaults ) ;
}
return cache [ methodName ] ;
}
} ;
function endpointsToMethods ( octokit ) {
const newMethods = { } ;
for ( const scope of endpointMethodsMap . keys ( ) ) {
newMethods [ scope ] = new Proxy ( { octokit , scope , cache : { } } , handler ) ;
}
return newMethods ;
}
function decorate ( octokit , scope , methodName , defaults , decorations ) {
const requestWithDefaults = octokit . request . defaults ( defaults ) ;
function withDecorations ( ... args ) {
let options = requestWithDefaults . endpoint . merge ( ... args ) ;
if ( decorations . mapToData ) {
options = Object . assign ( { } , options , {
data : options [ decorations . mapToData ] ,
[ decorations . mapToData ] : void 0
} ) ;
return requestWithDefaults ( options ) ;
}
if ( decorations . renamed ) {
const [ newScope , newMethodName ] = decorations . renamed ;
octokit . log . warn (
` octokit. ${ scope } . ${ methodName } () has been renamed to octokit. ${ newScope } . ${ newMethodName } () `
) ;
}
if ( decorations . deprecated ) {
octokit . log . warn ( decorations . deprecated ) ;
}
if ( decorations . renamedParameters ) {
const options2 = requestWithDefaults . endpoint . merge ( ... args ) ;
for ( const [ name , alias ] of Object . entries (
decorations . renamedParameters
) ) {
if ( name in options2 ) {
octokit . log . warn (
` " ${ name } " parameter is deprecated for "octokit. ${ scope } . ${ methodName } ()". Use " ${ alias } " instead `
) ;
if ( ! ( alias in options2 ) ) {
options2 [ alias ] = options2 [ name ] ;
}
delete options2 [ name ] ;
}
}
return requestWithDefaults ( options2 ) ;
}
return requestWithDefaults ( ... args ) ;
}
return Object . assign ( withDecorations , requestWithDefaults ) ;
}
// pkg/dist-src/index.js
function restEndpointMethods ( octokit ) {
const api = endpointsToMethods ( octokit ) ;
return {
rest : api
} ;
}
restEndpointMethods . VERSION = VERSION ;
function legacyRestEndpointMethods ( octokit ) {
const api = endpointsToMethods ( octokit ) ;
return {
... api ,
rest : api
} ;
}
legacyRestEndpointMethods . VERSION = VERSION ;
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
/***/ } ) ,
/***/ 537 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
var _ _create = Object . create ;
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _getProtoOf = Object . getPrototypeOf ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toESM = ( mod , isNodeMode , target ) => ( target = mod != null ? _ _create ( _ _getProtoOf ( mod ) ) : { } , _ _copyProps (
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || ! mod || ! mod . _ _esModule ? _ _defProp ( target , "default" , { value : mod , enumerable : true } ) : target ,
mod
) ) ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
RequestError : ( ) => RequestError
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
var import _deprecation = _ _nccwpck _require _ _ ( 8932 ) ;
var import _once = _ _toESM ( _ _nccwpck _require _ _ ( 1223 ) ) ;
var logOnceCode = ( 0 , import _once . default ) ( ( deprecation ) => console . warn ( deprecation ) ) ;
var logOnceHeaders = ( 0 , import _once . default ) ( ( deprecation ) => console . warn ( deprecation ) ) ;
var RequestError = class extends Error {
constructor ( message , statusCode , options ) {
super ( message ) ;
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
this . name = "HttpError" ;
this . status = statusCode ;
let headers ;
if ( "headers" in options && typeof options . headers !== "undefined" ) {
headers = options . headers ;
}
if ( "response" in options ) {
this . response = options . response ;
headers = options . response . headers ;
}
const requestCopy = Object . assign ( { } , options . request ) ;
if ( options . request . headers . authorization ) {
requestCopy . headers = Object . assign ( { } , options . request . headers , {
authorization : options . request . headers . authorization . replace (
/ .*$/ ,
" [REDACTED]"
)
} ) ;
}
requestCopy . url = requestCopy . url . replace ( /\bclient_secret=\w+/g , "client_secret=[REDACTED]" ) . replace ( /\baccess_token=\w+/g , "access_token=[REDACTED]" ) ;
this . request = requestCopy ;
Object . defineProperty ( this , "code" , {
get ( ) {
logOnceCode (
new import _deprecation . Deprecation (
"[@octokit/request-error] `error.code` is deprecated, use `error.status`."
)
) ;
return statusCode ;
}
} ) ;
Object . defineProperty ( this , "headers" , {
get ( ) {
logOnceHeaders (
new import _deprecation . Deprecation (
"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."
)
) ;
return headers || { } ;
}
} ) ;
}
} ;
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
/***/ } ) ,
/***/ 6234 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
var _ _defProp = Object . defineProperty ;
var _ _getOwnPropDesc = Object . getOwnPropertyDescriptor ;
var _ _getOwnPropNames = Object . getOwnPropertyNames ;
var _ _hasOwnProp = Object . prototype . hasOwnProperty ;
var _ _export = ( target , all ) => {
for ( var name in all )
_ _defProp ( target , name , { get : all [ name ] , enumerable : true } ) ;
} ;
var _ _copyProps = ( to , from , except , desc ) => {
if ( from && typeof from === "object" || typeof from === "function" ) {
for ( let key of _ _getOwnPropNames ( from ) )
if ( ! _ _hasOwnProp . call ( to , key ) && key !== except )
_ _defProp ( to , key , { get : ( ) => from [ key ] , enumerable : ! ( desc = _ _getOwnPropDesc ( from , key ) ) || desc . enumerable } ) ;
}
return to ;
} ;
var _ _toCommonJS = ( mod ) => _ _copyProps ( _ _defProp ( { } , "__esModule" , { value : true } ) , mod ) ;
// pkg/dist-src/index.js
var dist _src _exports = { } ;
_ _export ( dist _src _exports , {
request : ( ) => request
} ) ;
module . exports = _ _toCommonJS ( dist _src _exports ) ;
var import _endpoint = _ _nccwpck _require _ _ ( 9440 ) ;
var import _universal _user _agent = _ _nccwpck _require _ _ ( 5030 ) ;
// pkg/dist-src/version.js
var VERSION = "8.4.0" ;
// pkg/dist-src/is-plain-object.js
function isPlainObject ( value ) {
if ( typeof value !== "object" || value === null )
return false ;
if ( Object . prototype . toString . call ( value ) !== "[object Object]" )
return false ;
const proto = Object . getPrototypeOf ( value ) ;
if ( proto === null )
return true ;
const Ctor = Object . prototype . hasOwnProperty . call ( proto , "constructor" ) && proto . constructor ;
return typeof Ctor === "function" && Ctor instanceof Ctor && Function . prototype . call ( Ctor ) === Function . prototype . call ( value ) ;
}
// pkg/dist-src/fetch-wrapper.js
var import _request _error = _ _nccwpck _require _ _ ( 537 ) ;
// pkg/dist-src/get-buffer-response.js
function getBufferResponse ( response ) {
return response . arrayBuffer ( ) ;
}
// pkg/dist-src/fetch-wrapper.js
function fetchWrapper ( requestOptions ) {
var _a , _b , _c , _d ;
const log = requestOptions . request && requestOptions . request . log ? requestOptions . request . log : console ;
const parseSuccessResponseBody = ( ( _a = requestOptions . request ) == null ? void 0 : _a . parseSuccessResponseBody ) !== false ;
if ( isPlainObject ( requestOptions . body ) || Array . isArray ( requestOptions . body ) ) {
requestOptions . body = JSON . stringify ( requestOptions . body ) ;
}
let headers = { } ;
let status ;
let url ;
let { fetch } = globalThis ;
if ( ( _b = requestOptions . request ) == null ? void 0 : _b . fetch ) {
fetch = requestOptions . request . fetch ;
}
if ( ! fetch ) {
throw new Error (
"fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing"
) ;
}
return fetch ( requestOptions . url , {
method : requestOptions . method ,
body : requestOptions . body ,
redirect : ( _c = requestOptions . request ) == null ? void 0 : _c . redirect ,
headers : requestOptions . headers ,
signal : ( _d = requestOptions . request ) == null ? void 0 : _d . signal ,
// duplex must be set if request.body is ReadableStream or Async Iterables.
// See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.
... requestOptions . body && { duplex : "half" }
} ) . then ( async ( response ) => {
url = response . url ;
status = response . status ;
for ( const keyAndValue of response . headers ) {
headers [ keyAndValue [ 0 ] ] = keyAndValue [ 1 ] ;
}
if ( "deprecation" in headers ) {
const matches = headers . link && headers . link . match ( /<([^>]+)>; rel="deprecation"/ ) ;
const deprecationLink = matches && matches . pop ( ) ;
log . warn (
` [@octokit/request] " ${ requestOptions . method } ${ requestOptions . url } " is deprecated. It is scheduled to be removed on ${ headers . sunset } ${ deprecationLink ? ` . See ${ deprecationLink } ` : "" } `
) ;
}
if ( status === 204 || status === 205 ) {
return ;
}
if ( requestOptions . method === "HEAD" ) {
if ( status < 400 ) {
return ;
}
throw new import _request _error . RequestError ( response . statusText , status , {
response : {
url ,
status ,
headers ,
data : void 0
} ,
request : requestOptions
} ) ;
}
if ( status === 304 ) {
throw new import _request _error . RequestError ( "Not modified" , status , {
response : {
url ,
status ,
headers ,
data : await getResponseData ( response )
} ,
request : requestOptions
} ) ;
}
if ( status >= 400 ) {
const data = await getResponseData ( response ) ;
const error = new import _request _error . RequestError ( toErrorMessage ( data ) , status , {
response : {
url ,
status ,
headers ,
data
} ,
request : requestOptions
} ) ;
throw error ;
}
return parseSuccessResponseBody ? await getResponseData ( response ) : response . body ;
} ) . then ( ( data ) => {
return {
status ,
url ,
headers ,
data
} ;
} ) . catch ( ( error ) => {
if ( error instanceof import _request _error . RequestError )
throw error ;
else if ( error . name === "AbortError" )
throw error ;
let message = error . message ;
if ( error . name === "TypeError" && "cause" in error ) {
if ( error . cause instanceof Error ) {
message = error . cause . message ;
} else if ( typeof error . cause === "string" ) {
message = error . cause ;
}
}
throw new import _request _error . RequestError ( message , 500 , {
request : requestOptions
} ) ;
} ) ;
}
async function getResponseData ( response ) {
const contentType = response . headers . get ( "content-type" ) ;
if ( /application\/json/ . test ( contentType ) ) {
return response . json ( ) . catch ( ( ) => response . text ( ) ) . catch ( ( ) => "" ) ;
}
if ( ! contentType || /^text\/|charset=utf-8$/ . test ( contentType ) ) {
return response . text ( ) ;
}
return getBufferResponse ( response ) ;
}
function toErrorMessage ( data ) {
if ( typeof data === "string" )
return data ;
let suffix ;
if ( "documentation_url" in data ) {
suffix = ` - ${ data . documentation _url } ` ;
} else {
suffix = "" ;
}
if ( "message" in data ) {
if ( Array . isArray ( data . errors ) ) {
return ` ${ data . message } : ${ data . errors . map ( JSON . stringify ) . join ( ", " ) } ${ suffix } ` ;
}
return ` ${ data . message } ${ suffix } ` ;
}
return ` Unknown error: ${ JSON . stringify ( data ) } ` ;
}
// pkg/dist-src/with-defaults.js
function withDefaults ( oldEndpoint , newDefaults ) {
const endpoint2 = oldEndpoint . defaults ( newDefaults ) ;
const newApi = function ( route , parameters ) {
const endpointOptions = endpoint2 . merge ( route , parameters ) ;
if ( ! endpointOptions . request || ! endpointOptions . request . hook ) {
return fetchWrapper ( endpoint2 . parse ( endpointOptions ) ) ;
}
const request2 = ( route2 , parameters2 ) => {
return fetchWrapper (
endpoint2 . parse ( endpoint2 . merge ( route2 , parameters2 ) )
) ;
} ;
Object . assign ( request2 , {
endpoint : endpoint2 ,
defaults : withDefaults . bind ( null , endpoint2 )
} ) ;
return endpointOptions . request . hook ( request2 , endpointOptions ) ;
} ;
return Object . assign ( newApi , {
endpoint : endpoint2 ,
defaults : withDefaults . bind ( null , endpoint2 )
} ) ;
}
// pkg/dist-src/index.js
var request = withDefaults ( import _endpoint . endpoint , {
headers : {
"user-agent" : ` octokit-request.js/ ${ VERSION } ${ ( 0 , import _universal _user _agent . getUserAgent ) ( ) } `
}
} ) ;
// Annotate the CommonJS export names for ESM import in node:
0 && ( 0 ) ;
/***/ } ) ,
/***/ 3682 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
var register = _ _nccwpck _require _ _ ( 4670 ) ;
var addHook = _ _nccwpck _require _ _ ( 5549 ) ;
var removeHook = _ _nccwpck _require _ _ ( 6819 ) ;
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function . bind ;
var bindable = bind . bind ( bind ) ;
function bindApi ( hook , state , name ) {
var removeHookRef = bindable ( removeHook , null ) . apply (
null ,
name ? [ state , name ] : [ state ]
) ;
hook . api = { remove : removeHookRef } ;
hook . remove = removeHookRef ;
[ "before" , "error" , "after" , "wrap" ] . forEach ( function ( kind ) {
var args = name ? [ state , kind , name ] : [ state , kind ] ;
hook [ kind ] = hook . api [ kind ] = bindable ( addHook , null ) . apply ( null , args ) ;
} ) ;
}
function HookSingular ( ) {
var singularHookName = "h" ;
var singularHookState = {
registry : { } ,
} ;
var singularHook = register . bind ( null , singularHookState , singularHookName ) ;
bindApi ( singularHook , singularHookState , singularHookName ) ;
return singularHook ;
}
function HookCollection ( ) {
var state = {
registry : { } ,
} ;
var hook = register . bind ( null , state ) ;
bindApi ( hook , state ) ;
return hook ;
}
var collectionHookDeprecationMessageDisplayed = false ;
function Hook ( ) {
if ( ! collectionHookDeprecationMessageDisplayed ) {
console . warn (
'[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4'
) ;
collectionHookDeprecationMessageDisplayed = true ;
}
return HookCollection ( ) ;
}
Hook . Singular = HookSingular . bind ( ) ;
Hook . Collection = HookCollection . bind ( ) ;
module . exports = Hook ;
// expose constructors as a named property for TypeScript
module . exports . Hook = Hook ;
module . exports . Singular = Hook . Singular ;
module . exports . Collection = Hook . Collection ;
/***/ } ) ,
/***/ 5549 :
/***/ ( ( module ) => {
module . exports = addHook ;
function addHook ( state , kind , name , hook ) {
var orig = hook ;
if ( ! state . registry [ name ] ) {
state . registry [ name ] = [ ] ;
}
if ( kind === "before" ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( orig . bind ( null , options ) )
. then ( method . bind ( null , options ) ) ;
} ;
}
if ( kind === "after" ) {
hook = function ( method , options ) {
var result ;
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. then ( function ( result _ ) {
result = result _ ;
return orig ( result , options ) ;
} )
. then ( function ( ) {
return result ;
} ) ;
} ;
}
if ( kind === "error" ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. catch ( function ( error ) {
return orig ( error , options ) ;
} ) ;
} ;
}
state . registry [ name ] . push ( {
hook : hook ,
orig : orig ,
} ) ;
}
/***/ } ) ,
/***/ 4670 :
/***/ ( ( module ) => {
module . exports = register ;
function register ( state , name , method , options ) {
if ( typeof method !== "function" ) {
throw new Error ( "method for before hook must be a function" ) ;
}
if ( ! options ) {
options = { } ;
}
if ( Array . isArray ( name ) ) {
return name . reverse ( ) . reduce ( function ( callback , name ) {
return register . bind ( null , state , name , callback , options ) ;
} , method ) ( ) ;
}
return Promise . resolve ( ) . then ( function ( ) {
if ( ! state . registry [ name ] ) {
return method ( options ) ;
}
return state . registry [ name ] . reduce ( function ( method , registered ) {
return registered . hook . bind ( null , method , options ) ;
} , method ) ( ) ;
} ) ;
}
/***/ } ) ,
/***/ 6819 :
/***/ ( ( module ) => {
module . exports = removeHook ;
function removeHook ( state , name , method ) {
if ( ! state . registry [ name ] ) {
return ;
}
var index = state . registry [ name ]
. map ( function ( registered ) {
return registered . orig ;
} )
. indexOf ( method ) ;
if ( index === - 1 ) {
return ;
}
state . registry [ name ] . splice ( index , 1 ) ;
}
/***/ } ) ,
/***/ 8932 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
class Deprecation extends Error {
constructor ( message ) {
super ( message ) ; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
this . name = 'Deprecation' ;
}
}
exports . Deprecation = Deprecation ;
/***/ } ) ,
/***/ 1223 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
var wrappy = _ _nccwpck _require _ _ ( 2940 )
module . exports = wrappy ( once )
module . exports . strict = wrappy ( onceStrict )
once . proto = once ( function ( ) {
Object . defineProperty ( Function . prototype , 'once' , {
value : function ( ) {
return once ( this )
} ,
configurable : true
} )
Object . defineProperty ( Function . prototype , 'onceStrict' , {
value : function ( ) {
return onceStrict ( this )
} ,
configurable : true
} )
} )
function once ( fn ) {
var f = function ( ) {
if ( f . called ) return f . value
f . called = true
return f . value = fn . apply ( this , arguments )
}
f . called = false
return f
}
function onceStrict ( fn ) {
var f = function ( ) {
if ( f . called )
throw new Error ( f . onceError )
f . called = true
return f . value = fn . apply ( this , arguments )
}
var name = fn . name || 'Function wrapped with `once`'
f . onceError = name + " shouldn't be called more than once"
f . called = false
return f
}
/***/ } ) ,
/***/ 5911 :
/***/ ( ( module , exports ) => {
exports = module . exports = SemVer
var debug
/* istanbul ignore next */
if ( typeof process === 'object' &&
process . env &&
process . env . NODE _DEBUG &&
/\bsemver\b/i . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments , 0 )
args . unshift ( 'SEMVER' )
console . log . apply ( console , args )
}
} else {
debug = function ( ) { }
}
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
exports . SEMVER _SPEC _VERSION = '2.0.0'
var MAX _LENGTH = 256
var MAX _SAFE _INTEGER = Number . MAX _SAFE _INTEGER ||
/* istanbul ignore next */ 9007199254740991
// Max safe segment length for coercion.
var MAX _SAFE _COMPONENT _LENGTH = 16
var MAX _SAFE _BUILD _LENGTH = MAX _LENGTH - 6
// The actual regexps go on exports.re
var re = exports . re = [ ]
var safeRe = exports . safeRe = [ ]
var src = exports . src = [ ]
var t = exports . tokens = { }
var R = 0
function tok ( n ) {
t [ n ] = R ++
}
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
var safeRegexReplacements = [
[ '\\s' , 1 ] ,
[ '\\d' , MAX _LENGTH ] ,
[ LETTERDASHNUMBER , MAX _SAFE _BUILD _LENGTH ] ,
]
function makeSafeRe ( value ) {
for ( var i = 0 ; i < safeRegexReplacements . length ; i ++ ) {
var token = safeRegexReplacements [ i ] [ 0 ]
var max = safeRegexReplacements [ i ] [ 1 ]
value = value
. split ( token + '*' ) . join ( token + '{0,' + max + '}' )
. split ( token + '+' ) . join ( token + '{1,' + max + '}' )
}
return value
}
// The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings.
// ## Numeric Identifier
// A single `0`, or a non-zero digit followed by zero or more digits.
tok ( 'NUMERICIDENTIFIER' )
src [ t . NUMERICIDENTIFIER ] = '0|[1-9]\\d*'
tok ( 'NUMERICIDENTIFIERLOOSE' )
src [ t . NUMERICIDENTIFIERLOOSE ] = '\\d+'
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
tok ( 'NONNUMERICIDENTIFIER' )
src [ t . NONNUMERICIDENTIFIER ] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
// ## Main Version
// Three dot-separated numeric identifiers.
tok ( 'MAINVERSION' )
src [ t . MAINVERSION ] = '(' + src [ t . NUMERICIDENTIFIER ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIER ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIER ] + ')'
tok ( 'MAINVERSIONLOOSE' )
src [ t . MAINVERSIONLOOSE ] = '(' + src [ t . NUMERICIDENTIFIERLOOSE ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIERLOOSE ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIERLOOSE ] + ')'
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
tok ( 'PRERELEASEIDENTIFIER' )
src [ t . PRERELEASEIDENTIFIER ] = '(?:' + src [ t . NUMERICIDENTIFIER ] +
'|' + src [ t . NONNUMERICIDENTIFIER ] + ')'
tok ( 'PRERELEASEIDENTIFIERLOOSE' )
src [ t . PRERELEASEIDENTIFIERLOOSE ] = '(?:' + src [ t . NUMERICIDENTIFIERLOOSE ] +
'|' + src [ t . NONNUMERICIDENTIFIER ] + ')'
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
// identifiers.
tok ( 'PRERELEASE' )
src [ t . PRERELEASE ] = '(?:-(' + src [ t . PRERELEASEIDENTIFIER ] +
'(?:\\.' + src [ t . PRERELEASEIDENTIFIER ] + ')*))'
tok ( 'PRERELEASELOOSE' )
src [ t . PRERELEASELOOSE ] = '(?:-?(' + src [ t . PRERELEASEIDENTIFIERLOOSE ] +
'(?:\\.' + src [ t . PRERELEASEIDENTIFIERLOOSE ] + ')*))'
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
tok ( 'BUILDIDENTIFIER' )
src [ t . BUILDIDENTIFIER ] = LETTERDASHNUMBER + '+'
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
// identifiers.
tok ( 'BUILD' )
src [ t . BUILD ] = '(?:\\+(' + src [ t . BUILDIDENTIFIER ] +
'(?:\\.' + src [ t . BUILDIDENTIFIER ] + ')*))'
// ## Full Version String
// A main version, followed optionally by a pre-release version and
// build metadata.
// Note that the only major, minor, patch, and pre-release sections of
// the version string are capturing groups. The build metadata is not a
// capturing group, because it should not ever be used in version
// comparison.
tok ( 'FULL' )
tok ( 'FULLPLAIN' )
src [ t . FULLPLAIN ] = 'v?' + src [ t . MAINVERSION ] +
src [ t . PRERELEASE ] + '?' +
src [ t . BUILD ] + '?'
src [ t . FULL ] = '^' + src [ t . FULLPLAIN ] + '$'
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
// common in the npm registry.
tok ( 'LOOSEPLAIN' )
src [ t . LOOSEPLAIN ] = '[v=\\s]*' + src [ t . MAINVERSIONLOOSE ] +
src [ t . PRERELEASELOOSE ] + '?' +
src [ t . BUILD ] + '?'
tok ( 'LOOSE' )
src [ t . LOOSE ] = '^' + src [ t . LOOSEPLAIN ] + '$'
tok ( 'GTLT' )
src [ t . GTLT ] = '((?:<|>)?=?)'
// Something like "2.*" or "1.2.x".
// Note that "x.x" is a valid xRange identifer, meaning "any version"
// Only the first item is strictly required.
tok ( 'XRANGEIDENTIFIERLOOSE' )
src [ t . XRANGEIDENTIFIERLOOSE ] = src [ t . NUMERICIDENTIFIERLOOSE ] + '|x|X|\\*'
tok ( 'XRANGEIDENTIFIER' )
src [ t . XRANGEIDENTIFIER ] = src [ t . NUMERICIDENTIFIER ] + '|x|X|\\*'
tok ( 'XRANGEPLAIN' )
src [ t . XRANGEPLAIN ] = '[v=\\s]*(' + src [ t . XRANGEIDENTIFIER ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIER ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIER ] + ')' +
'(?:' + src [ t . PRERELEASE ] + ')?' +
src [ t . BUILD ] + '?' +
')?)?'
tok ( 'XRANGEPLAINLOOSE' )
src [ t . XRANGEPLAINLOOSE ] = '[v=\\s]*(' + src [ t . XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:' + src [ t . PRERELEASELOOSE ] + ')?' +
src [ t . BUILD ] + '?' +
')?)?'
tok ( 'XRANGE' )
src [ t . XRANGE ] = '^' + src [ t . GTLT ] + '\\s*' + src [ t . XRANGEPLAIN ] + '$'
tok ( 'XRANGELOOSE' )
src [ t . XRANGELOOSE ] = '^' + src [ t . GTLT ] + '\\s*' + src [ t . XRANGEPLAINLOOSE ] + '$'
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
tok ( 'COERCE' )
src [ t . COERCE ] = '(^|[^\\d])' +
'(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '})' +
'(?:\\.(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '}))?' +
'(?:\\.(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '}))?' +
'(?:$|[^\\d])'
tok ( 'COERCERTL' )
re [ t . COERCERTL ] = new RegExp ( src [ t . COERCE ] , 'g' )
safeRe [ t . COERCERTL ] = new RegExp ( makeSafeRe ( src [ t . COERCE ] ) , 'g' )
// Tilde ranges.
// Meaning is "reasonably at or greater than"
tok ( 'LONETILDE' )
src [ t . LONETILDE ] = '(?:~>?)'
tok ( 'TILDETRIM' )
src [ t . TILDETRIM ] = '(\\s*)' + src [ t . LONETILDE ] + '\\s+'
re [ t . TILDETRIM ] = new RegExp ( src [ t . TILDETRIM ] , 'g' )
safeRe [ t . TILDETRIM ] = new RegExp ( makeSafeRe ( src [ t . TILDETRIM ] ) , 'g' )
var tildeTrimReplace = '$1~'
tok ( 'TILDE' )
src [ t . TILDE ] = '^' + src [ t . LONETILDE ] + src [ t . XRANGEPLAIN ] + '$'
tok ( 'TILDELOOSE' )
src [ t . TILDELOOSE ] = '^' + src [ t . LONETILDE ] + src [ t . XRANGEPLAINLOOSE ] + '$'
// Caret ranges.
// Meaning is "at least and backwards compatible with"
tok ( 'LONECARET' )
src [ t . LONECARET ] = '(?:\\^)'
tok ( 'CARETTRIM' )
src [ t . CARETTRIM ] = '(\\s*)' + src [ t . LONECARET ] + '\\s+'
re [ t . CARETTRIM ] = new RegExp ( src [ t . CARETTRIM ] , 'g' )
safeRe [ t . CARETTRIM ] = new RegExp ( makeSafeRe ( src [ t . CARETTRIM ] ) , 'g' )
var caretTrimReplace = '$1^'
tok ( 'CARET' )
src [ t . CARET ] = '^' + src [ t . LONECARET ] + src [ t . XRANGEPLAIN ] + '$'
tok ( 'CARETLOOSE' )
src [ t . CARETLOOSE ] = '^' + src [ t . LONECARET ] + src [ t . XRANGEPLAINLOOSE ] + '$'
// A simple gt/lt/eq thing, or just "" to indicate "any version"
tok ( 'COMPARATORLOOSE' )
src [ t . COMPARATORLOOSE ] = '^' + src [ t . GTLT ] + '\\s*(' + src [ t . LOOSEPLAIN ] + ')$|^$'
tok ( 'COMPARATOR' )
src [ t . COMPARATOR ] = '^' + src [ t . GTLT ] + '\\s*(' + src [ t . FULLPLAIN ] + ')$|^$'
// An expression to strip any whitespace between the gtlt and the thing
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
tok ( 'COMPARATORTRIM' )
src [ t . COMPARATORTRIM ] = '(\\s*)' + src [ t . GTLT ] +
'\\s*(' + src [ t . LOOSEPLAIN ] + '|' + src [ t . XRANGEPLAIN ] + ')'
// this one has to use the /g flag
re [ t . COMPARATORTRIM ] = new RegExp ( src [ t . COMPARATORTRIM ] , 'g' )
safeRe [ t . COMPARATORTRIM ] = new RegExp ( makeSafeRe ( src [ t . COMPARATORTRIM ] ) , 'g' )
var comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4`
// Note that these all use the loose form, because they'll be
// checked against either the strict or loose comparator form
// later.
tok ( 'HYPHENRANGE' )
src [ t . HYPHENRANGE ] = '^\\s*(' + src [ t . XRANGEPLAIN ] + ')' +
'\\s+-\\s+' +
'(' + src [ t . XRANGEPLAIN ] + ')' +
'\\s*$'
tok ( 'HYPHENRANGELOOSE' )
src [ t . HYPHENRANGELOOSE ] = '^\\s*(' + src [ t . XRANGEPLAINLOOSE ] + ')' +
'\\s+-\\s+' +
'(' + src [ t . XRANGEPLAINLOOSE ] + ')' +
'\\s*$'
// Star ranges basically just allow anything at all.
tok ( 'STAR' )
src [ t . STAR ] = '(<|>)?=?\\s*\\*'
// Compile to actual regexp objects.
// All are flag-free, unless they were created above with a flag.
for ( var i = 0 ; i < R ; i ++ ) {
debug ( i , src [ i ] )
if ( ! re [ i ] ) {
re [ i ] = new RegExp ( src [ i ] )
// Replace all greedy whitespace to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
safeRe [ i ] = new RegExp ( makeSafeRe ( src [ i ] ) )
}
}
exports . parse = parse
function parse ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( version instanceof SemVer ) {
return version
}
if ( typeof version !== 'string' ) {
return null
}
if ( version . length > MAX _LENGTH ) {
return null
}
var r = options . loose ? safeRe [ t . LOOSE ] : safeRe [ t . FULL ]
if ( ! r . test ( version ) ) {
return null
}
try {
return new SemVer ( version , options )
} catch ( er ) {
return null
}
}
exports . valid = valid
function valid ( version , options ) {
var v = parse ( version , options )
return v ? v . version : null
}
exports . clean = clean
function clean ( version , options ) {
var s = parse ( version . trim ( ) . replace ( /^[=v]+/ , '' ) , options )
return s ? s . version : null
}
exports . SemVer = SemVer
function SemVer ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( version instanceof SemVer ) {
if ( version . loose === options . loose ) {
return version
} else {
version = version . version
}
} else if ( typeof version !== 'string' ) {
throw new TypeError ( 'Invalid Version: ' + version )
}
if ( version . length > MAX _LENGTH ) {
throw new TypeError ( 'version is longer than ' + MAX _LENGTH + ' characters' )
}
if ( ! ( this instanceof SemVer ) ) {
return new SemVer ( version , options )
}
debug ( 'SemVer' , version , options )
this . options = options
this . loose = ! ! options . loose
var m = version . trim ( ) . match ( options . loose ? safeRe [ t . LOOSE ] : safeRe [ t . FULL ] )
if ( ! m ) {
throw new TypeError ( 'Invalid Version: ' + version )
}
this . raw = version
// these are actually numbers
this . major = + m [ 1 ]
this . minor = + m [ 2 ]
this . patch = + m [ 3 ]
if ( this . major > MAX _SAFE _INTEGER || this . major < 0 ) {
throw new TypeError ( 'Invalid major version' )
}
if ( this . minor > MAX _SAFE _INTEGER || this . minor < 0 ) {
throw new TypeError ( 'Invalid minor version' )
}
if ( this . patch > MAX _SAFE _INTEGER || this . patch < 0 ) {
throw new TypeError ( 'Invalid patch version' )
}
// numberify any prerelease numeric ids
if ( ! m [ 4 ] ) {
this . prerelease = [ ]
} else {
this . prerelease = m [ 4 ] . split ( '.' ) . map ( function ( id ) {
if ( /^[0-9]+$/ . test ( id ) ) {
var num = + id
if ( num >= 0 && num < MAX _SAFE _INTEGER ) {
return num
}
}
return id
} )
}
this . build = m [ 5 ] ? m [ 5 ] . split ( '.' ) : [ ]
this . format ( )
}
SemVer . prototype . format = function ( ) {
this . version = this . major + '.' + this . minor + '.' + this . patch
if ( this . prerelease . length ) {
this . version += '-' + this . prerelease . join ( '.' )
}
return this . version
}
SemVer . prototype . toString = function ( ) {
return this . version
}
SemVer . prototype . compare = function ( other ) {
debug ( 'SemVer.compare' , this . version , this . options , other )
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
return this . compareMain ( other ) || this . comparePre ( other )
}
SemVer . prototype . compareMain = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
return compareIdentifiers ( this . major , other . major ) ||
compareIdentifiers ( this . minor , other . minor ) ||
compareIdentifiers ( this . patch , other . patch )
}
SemVer . prototype . comparePre = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
// NOT having a prerelease is > having one
if ( this . prerelease . length && ! other . prerelease . length ) {
return - 1
} else if ( ! this . prerelease . length && other . prerelease . length ) {
return 1
} else if ( ! this . prerelease . length && ! other . prerelease . length ) {
return 0
}
var i = 0
do {
var a = this . prerelease [ i ]
var b = other . prerelease [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
SemVer . prototype . compareBuild = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
var i = 0
do {
var a = this . build [ i ]
var b = other . build [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
SemVer . prototype . inc = function ( release , identifier ) {
switch ( release ) {
case 'premajor' :
this . prerelease . length = 0
this . patch = 0
this . minor = 0
this . major ++
this . inc ( 'pre' , identifier )
break
case 'preminor' :
this . prerelease . length = 0
this . patch = 0
this . minor ++
this . inc ( 'pre' , identifier )
break
case 'prepatch' :
// If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not
// relevant at this point.
this . prerelease . length = 0
this . inc ( 'patch' , identifier )
this . inc ( 'pre' , identifier )
break
// If the input is a non-prerelease version, this acts the same as
// prepatch.
case 'prerelease' :
if ( this . prerelease . length === 0 ) {
this . inc ( 'patch' , identifier )
}
this . inc ( 'pre' , identifier )
break
case 'major' :
// If this is a pre-major version, bump up to the same major version.
// Otherwise increment major.
// 1.0.0-5 bumps to 1.0.0
// 1.1.0 bumps to 2.0.0
if ( this . minor !== 0 ||
this . patch !== 0 ||
this . prerelease . length === 0 ) {
this . major ++
}
this . minor = 0
this . patch = 0
this . prerelease = [ ]
break
case 'minor' :
// If this is a pre-minor version, bump up to the same minor version.
// Otherwise increment minor.
// 1.2.0-5 bumps to 1.2.0
// 1.2.1 bumps to 1.3.0
if ( this . patch !== 0 || this . prerelease . length === 0 ) {
this . minor ++
}
this . patch = 0
this . prerelease = [ ]
break
case 'patch' :
// If this is not a pre-release version, it will increment the patch.
// If it is a pre-release it will bump up to the same patch version.
// 1.2.0-5 patches to 1.2.0
// 1.2.0 patches to 1.2.1
if ( this . prerelease . length === 0 ) {
this . patch ++
}
this . prerelease = [ ]
break
// This probably shouldn't be used publicly.
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
case 'pre' :
if ( this . prerelease . length === 0 ) {
this . prerelease = [ 0 ]
} else {
var i = this . prerelease . length
while ( -- i >= 0 ) {
if ( typeof this . prerelease [ i ] === 'number' ) {
this . prerelease [ i ] ++
i = - 2
}
}
if ( i === - 1 ) {
// didn't increment anything
this . prerelease . push ( 0 )
}
}
if ( identifier ) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
if ( this . prerelease [ 0 ] === identifier ) {
if ( isNaN ( this . prerelease [ 1 ] ) ) {
this . prerelease = [ identifier , 0 ]
}
} else {
this . prerelease = [ identifier , 0 ]
}
}
break
default :
throw new Error ( 'invalid increment argument: ' + release )
}
this . format ( )
this . raw = this . version
return this
}
exports . inc = inc
function inc ( version , release , loose , identifier ) {
if ( typeof ( loose ) === 'string' ) {
identifier = loose
loose = undefined
}
try {
return new SemVer ( version , loose ) . inc ( release , identifier ) . version
} catch ( er ) {
return null
}
}
exports . diff = diff
function diff ( version1 , version2 ) {
if ( eq ( version1 , version2 ) ) {
return null
} else {
var v1 = parse ( version1 )
var v2 = parse ( version2 )
var prefix = ''
if ( v1 . prerelease . length || v2 . prerelease . length ) {
prefix = 'pre'
var defaultResult = 'prerelease'
}
for ( var key in v1 ) {
if ( key === 'major' || key === 'minor' || key === 'patch' ) {
if ( v1 [ key ] !== v2 [ key ] ) {
return prefix + key
}
}
}
return defaultResult // may be undefined
}
}
exports . compareIdentifiers = compareIdentifiers
var numeric = /^[0-9]+$/
function compareIdentifiers ( a , b ) {
var anum = numeric . test ( a )
var bnum = numeric . test ( b )
if ( anum && bnum ) {
a = + a
b = + b
}
return a === b ? 0
: ( anum && ! bnum ) ? - 1
: ( bnum && ! anum ) ? 1
: a < b ? - 1
: 1
}
exports . rcompareIdentifiers = rcompareIdentifiers
function rcompareIdentifiers ( a , b ) {
return compareIdentifiers ( b , a )
}
exports . major = major
function major ( a , loose ) {
return new SemVer ( a , loose ) . major
}
exports . minor = minor
function minor ( a , loose ) {
return new SemVer ( a , loose ) . minor
}
exports . patch = patch
function patch ( a , loose ) {
return new SemVer ( a , loose ) . patch
}
exports . compare = compare
function compare ( a , b , loose ) {
return new SemVer ( a , loose ) . compare ( new SemVer ( b , loose ) )
}
exports . compareLoose = compareLoose
function compareLoose ( a , b ) {
return compare ( a , b , true )
}
exports . compareBuild = compareBuild
function compareBuild ( a , b , loose ) {
var versionA = new SemVer ( a , loose )
var versionB = new SemVer ( b , loose )
return versionA . compare ( versionB ) || versionA . compareBuild ( versionB )
}
exports . rcompare = rcompare
function rcompare ( a , b , loose ) {
return compare ( b , a , loose )
}
exports . sort = sort
function sort ( list , loose ) {
return list . sort ( function ( a , b ) {
return exports . compareBuild ( a , b , loose )
} )
}
exports . rsort = rsort
function rsort ( list , loose ) {
return list . sort ( function ( a , b ) {
return exports . compareBuild ( b , a , loose )
} )
}
exports . gt = gt
function gt ( a , b , loose ) {
return compare ( a , b , loose ) > 0
}
exports . lt = lt
function lt ( a , b , loose ) {
return compare ( a , b , loose ) < 0
}
exports . eq = eq
function eq ( a , b , loose ) {
return compare ( a , b , loose ) === 0
}
exports . neq = neq
function neq ( a , b , loose ) {
return compare ( a , b , loose ) !== 0
}
exports . gte = gte
function gte ( a , b , loose ) {
return compare ( a , b , loose ) >= 0
}
exports . lte = lte
function lte ( a , b , loose ) {
return compare ( a , b , loose ) <= 0
}
exports . cmp = cmp
function cmp ( a , op , b , loose ) {
switch ( op ) {
case '===' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a === b
case '!==' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a !== b
case '' :
case '=' :
case '==' :
return eq ( a , b , loose )
case '!=' :
return neq ( a , b , loose )
case '>' :
return gt ( a , b , loose )
case '>=' :
return gte ( a , b , loose )
case '<' :
return lt ( a , b , loose )
case '<=' :
return lte ( a , b , loose )
default :
throw new TypeError ( 'Invalid operator: ' + op )
}
}
exports . Comparator = Comparator
function Comparator ( comp , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( comp instanceof Comparator ) {
if ( comp . loose === ! ! options . loose ) {
return comp
} else {
comp = comp . value
}
}
if ( ! ( this instanceof Comparator ) ) {
return new Comparator ( comp , options )
}
comp = comp . trim ( ) . split ( /\s+/ ) . join ( ' ' )
debug ( 'comparator' , comp , options )
this . options = options
this . loose = ! ! options . loose
this . parse ( comp )
if ( this . semver === ANY ) {
this . value = ''
} else {
this . value = this . operator + this . semver . version
}
debug ( 'comp' , this )
}
var ANY = { }
Comparator . prototype . parse = function ( comp ) {
var r = this . options . loose ? safeRe [ t . COMPARATORLOOSE ] : safeRe [ t . COMPARATOR ]
var m = comp . match ( r )
if ( ! m ) {
throw new TypeError ( 'Invalid comparator: ' + comp )
}
this . operator = m [ 1 ] !== undefined ? m [ 1 ] : ''
if ( this . operator === '=' ) {
this . operator = ''
}
// if it literally is just '>' or '' then allow anything.
if ( ! m [ 2 ] ) {
this . semver = ANY
} else {
this . semver = new SemVer ( m [ 2 ] , this . options . loose )
}
}
Comparator . prototype . toString = function ( ) {
return this . value
}
Comparator . prototype . test = function ( version ) {
debug ( 'Comparator.test' , version , this . options . loose )
if ( this . semver === ANY || version === ANY ) {
return true
}
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
}
}
return cmp ( version , this . operator , this . semver , this . options )
}
Comparator . prototype . intersects = function ( comp , options ) {
if ( ! ( comp instanceof Comparator ) ) {
throw new TypeError ( 'a Comparator is required' )
}
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
var rangeTmp
if ( this . operator === '' ) {
if ( this . value === '' ) {
return true
}
rangeTmp = new Range ( comp . value , options )
return satisfies ( this . value , rangeTmp , options )
} else if ( comp . operator === '' ) {
if ( comp . value === '' ) {
return true
}
rangeTmp = new Range ( this . value , options )
return satisfies ( comp . semver , rangeTmp , options )
}
var sameDirectionIncreasing =
( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '>=' || comp . operator === '>' )
var sameDirectionDecreasing =
( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '<=' || comp . operator === '<' )
var sameSemVer = this . semver . version === comp . semver . version
var differentDirectionsInclusive =
( this . operator === '>=' || this . operator === '<=' ) &&
( comp . operator === '>=' || comp . operator === '<=' )
var oppositeDirectionsLessThan =
cmp ( this . semver , '<' , comp . semver , options ) &&
( ( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '<=' || comp . operator === '<' ) )
var oppositeDirectionsGreaterThan =
cmp ( this . semver , '>' , comp . semver , options ) &&
( ( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '>=' || comp . operator === '>' ) )
return sameDirectionIncreasing || sameDirectionDecreasing ||
( sameSemVer && differentDirectionsInclusive ) ||
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
}
exports . Range = Range
function Range ( range , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( range instanceof Range ) {
if ( range . loose === ! ! options . loose &&
range . includePrerelease === ! ! options . includePrerelease ) {
return range
} else {
return new Range ( range . raw , options )
}
}
if ( range instanceof Comparator ) {
return new Range ( range . value , options )
}
if ( ! ( this instanceof Range ) ) {
return new Range ( range , options )
}
this . options = options
this . loose = ! ! options . loose
this . includePrerelease = ! ! options . includePrerelease
// First reduce all whitespace as much as possible so we do not have to rely
// on potentially slow regexes like \s*. This is then stored and used for
// future error messages as well.
this . raw = range
. trim ( )
. split ( /\s+/ )
. join ( ' ' )
// First, split based on boolean or ||
this . set = this . raw . split ( '||' ) . map ( function ( range ) {
return this . parseRange ( range . trim ( ) )
} , this ) . filter ( function ( c ) {
// throw out any that are not relevant for whatever reason
return c . length
} )
if ( ! this . set . length ) {
throw new TypeError ( 'Invalid SemVer Range: ' + this . raw )
}
this . format ( )
}
Range . prototype . format = function ( ) {
this . range = this . set . map ( function ( comps ) {
return comps . join ( ' ' ) . trim ( )
} ) . join ( '||' ) . trim ( )
return this . range
}
Range . prototype . toString = function ( ) {
return this . range
}
Range . prototype . parseRange = function ( range ) {
var loose = this . options . loose
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? safeRe [ t . HYPHENRANGELOOSE ] : safeRe [ t . HYPHENRANGE ]
range = range . replace ( hr , hyphenReplace )
debug ( 'hyphen replace' , range )
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range . replace ( safeRe [ t . COMPARATORTRIM ] , comparatorTrimReplace )
debug ( 'comparator trim' , range , safeRe [ t . COMPARATORTRIM ] )
// `~ 1.2.3` => `~1.2.3`
range = range . replace ( safeRe [ t . TILDETRIM ] , tildeTrimReplace )
// `^ 1.2.3` => `^1.2.3`
range = range . replace ( safeRe [ t . CARETTRIM ] , caretTrimReplace )
// normalize spaces
range = range . split ( /\s+/ ) . join ( ' ' )
// At this point, the range is completely trimmed and
// ready to be split into comparators.
var compRe = loose ? safeRe [ t . COMPARATORLOOSE ] : safeRe [ t . COMPARATOR ]
var set = range . split ( ' ' ) . map ( function ( comp ) {
return parseComparator ( comp , this . options )
} , this ) . join ( ' ' ) . split ( /\s+/ )
if ( this . options . loose ) {
// in loose mode, throw out any that are not valid comparators
set = set . filter ( function ( comp ) {
return ! ! comp . match ( compRe )
} )
}
set = set . map ( function ( comp ) {
return new Comparator ( comp , this . options )
} , this )
return set
}
Range . prototype . intersects = function ( range , options ) {
if ( ! ( range instanceof Range ) ) {
throw new TypeError ( 'a Range is required' )
}
return this . set . some ( function ( thisComparators ) {
return (
isSatisfiable ( thisComparators , options ) &&
range . set . some ( function ( rangeComparators ) {
return (
isSatisfiable ( rangeComparators , options ) &&
thisComparators . every ( function ( thisComparator ) {
return rangeComparators . every ( function ( rangeComparator ) {
return thisComparator . intersects ( rangeComparator , options )
} )
} )
)
} )
)
} )
}
// take a set of comparators and determine whether there
// exists a version which can satisfy it
function isSatisfiable ( comparators , options ) {
var result = true
var remainingComparators = comparators . slice ( )
var testComparator = remainingComparators . pop ( )
while ( result && remainingComparators . length ) {
result = remainingComparators . every ( function ( otherComparator ) {
return testComparator . intersects ( otherComparator , options )
} )
testComparator = remainingComparators . pop ( )
}
return result
}
// Mostly just for testing and legacy API reasons
exports . toComparators = toComparators
function toComparators ( range , options ) {
return new Range ( range , options ) . set . map ( function ( comp ) {
return comp . map ( function ( c ) {
return c . value
} ) . join ( ' ' ) . trim ( ) . split ( ' ' )
} )
}
// comprised of xranges, tildes, stars, and gtlt's at this point.
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
function parseComparator ( comp , options ) {
debug ( 'comp' , comp , options )
comp = replaceCarets ( comp , options )
debug ( 'caret' , comp )
comp = replaceTildes ( comp , options )
debug ( 'tildes' , comp )
comp = replaceXRanges ( comp , options )
debug ( 'xrange' , comp )
comp = replaceStars ( comp , options )
debug ( 'stars' , comp )
return comp
}
function isX ( id ) {
return ! id || id . toLowerCase ( ) === 'x' || id === '*'
}
// ~, ~> --> * (any, kinda silly)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
function replaceTildes ( comp , options ) {
return comp . trim ( ) . split ( /\s+/ ) . map ( function ( comp ) {
return replaceTilde ( comp , options )
} ) . join ( ' ' )
}
function replaceTilde ( comp , options ) {
var r = options . loose ? safeRe [ t . TILDELOOSE ] : safeRe [ t . TILDE ]
return comp . replace ( r , function ( _ , M , m , p , pr ) {
debug ( 'tilde' , comp , _ , M , m , p , pr )
var ret
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( isX ( p ) ) {
// ~1.2 == >=1.2.0 <1.3.0
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
} else if ( pr ) {
debug ( 'replaceTilde pr' , pr )
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0'
} else {
// ~1.2.3 == >=1.2.3 <1.3.0
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
debug ( 'tilde return' , ret )
return ret
} )
}
// ^ --> * (any, kinda silly)
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
// ^1.2.3 --> >=1.2.3 <2.0.0
// ^1.2.0 --> >=1.2.0 <2.0.0
function replaceCarets ( comp , options ) {
return comp . trim ( ) . split ( /\s+/ ) . map ( function ( comp ) {
return replaceCaret ( comp , options )
} ) . join ( ' ' )
}
function replaceCaret ( comp , options ) {
debug ( 'caret' , comp , options )
var r = options . loose ? safeRe [ t . CARETLOOSE ] : safeRe [ t . CARET ]
return comp . replace ( r , function ( _ , M , m , p , pr ) {
debug ( 'caret' , comp , _ , M , m , p , pr )
var ret
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( isX ( p ) ) {
if ( M === '0' ) {
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
} else {
ret = '>=' + M + '.' + m + '.0 <' + ( + M + 1 ) + '.0.0'
}
} else if ( pr ) {
debug ( 'replaceCaret pr' , pr )
if ( M === '0' ) {
if ( m === '0' ) {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + m + '.' + ( + p + 1 )
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + ( + M + 1 ) + '.0.0'
}
} else {
debug ( 'no pr' )
if ( M === '0' ) {
if ( m === '0' ) {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + m + '.' + ( + p + 1 )
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + ( + M + 1 ) + '.0.0'
}
}
debug ( 'caret return' , ret )
return ret
} )
}
function replaceXRanges ( comp , options ) {
debug ( 'replaceXRanges' , comp , options )
return comp . split ( /\s+/ ) . map ( function ( comp ) {
return replaceXRange ( comp , options )
} ) . join ( ' ' )
}
function replaceXRange ( comp , options ) {
comp = comp . trim ( )
var r = options . loose ? safeRe [ t . XRANGELOOSE ] : safeRe [ t . XRANGE ]
return comp . replace ( r , function ( ret , gtlt , M , m , p , pr ) {
debug ( 'xRange' , comp , ret , gtlt , M , m , p , pr )
var xM = isX ( M )
var xm = xM || isX ( m )
var xp = xm || isX ( p )
var anyX = xp
if ( gtlt === '=' && anyX ) {
gtlt = ''
}
// if we're including prereleases in the match, then we need
// to fix this to -0, the lowest possible prerelease value
pr = options . includePrerelease ? '-0' : ''
if ( xM ) {
if ( gtlt === '>' || gtlt === '<' ) {
// nothing is allowed
ret = '<0.0.0-0'
} else {
// nothing is forbidden
ret = '*'
}
} else if ( gtlt && anyX ) {
// we know patch is an x, because we have any x at all.
// replace X with 0
if ( xm ) {
m = 0
}
p = 0
if ( gtlt === '>' ) {
// >1 => >=2.0.0
// >1.2 => >=1.3.0
// >1.2.3 => >= 1.2.4
gtlt = '>='
if ( xm ) {
M = + M + 1
m = 0
p = 0
} else {
m = + m + 1
p = 0
}
} else if ( gtlt === '<=' ) {
// <=0.7.x is actually <0.8.0, since any 0.7.x should
// pass. Similarly, <=7.x is actually <8.0.0, etc.
gtlt = '<'
if ( xm ) {
M = + M + 1
} else {
m = + m + 1
}
}
ret = gtlt + M + '.' + m + '.' + p + pr
} else if ( xm ) {
ret = '>=' + M + '.0.0' + pr + ' <' + ( + M + 1 ) + '.0.0' + pr
} else if ( xp ) {
ret = '>=' + M + '.' + m + '.0' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0' + pr
}
debug ( 'xRange return' , ret )
return ret
} )
}
// Because * is AND-ed with everything else in the comparator,
// and '' means "any version", just remove the *s entirely.
function replaceStars ( comp , options ) {
debug ( 'replaceStars' , comp , options )
// Looseness is ignored here. star is always as loose as it gets!
return comp . trim ( ) . replace ( safeRe [ t . STAR ] , '' )
}
// This function is passed to string.replace(re[t.HYPHENRANGE])
2023-03-09 17:42:29 +01:00
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0
function hyphenReplace ( $0 ,
from , fM , fm , fp , fpr , fb ,
to , tM , tm , tp , tpr , tb ) {
if ( isX ( fM ) ) {
from = ''
} else if ( isX ( fm ) ) {
from = '>=' + fM + '.0.0'
} else if ( isX ( fp ) ) {
from = '>=' + fM + '.' + fm + '.0'
} else {
2024-04-24 12:04:10 -04:00
from = '>=' + from
}
if ( isX ( tM ) ) {
to = ''
} else if ( isX ( tm ) ) {
to = '<' + ( + tM + 1 ) + '.0.0'
} else if ( isX ( tp ) ) {
to = '<' + tM + '.' + ( + tm + 1 ) + '.0'
} else if ( tpr ) {
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
} else {
to = '<=' + to
}
return ( from + ' ' + to ) . trim ( )
}
// if ANY of the sets match ALL of its comparators, then pass
Range . prototype . test = function ( version ) {
if ( ! version ) {
return false
}
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
}
}
for ( var i = 0 ; i < this . set . length ; i ++ ) {
if ( testSet ( this . set [ i ] , version , this . options ) ) {
return true
}
}
return false
}
function testSet ( set , version , options ) {
for ( var i = 0 ; i < set . length ; i ++ ) {
if ( ! set [ i ] . test ( version ) ) {
return false
}
}
if ( version . prerelease . length && ! options . includePrerelease ) {
// Find the set of versions that are allowed to have prereleases
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
// That should allow `1.2.3-pr.2` to pass.
// However, `1.2.4-alpha.notready` should NOT be allowed,
// even though it's within the range set by the comparators.
for ( i = 0 ; i < set . length ; i ++ ) {
debug ( set [ i ] . semver )
if ( set [ i ] . semver === ANY ) {
continue
}
if ( set [ i ] . semver . prerelease . length > 0 ) {
var allowed = set [ i ] . semver
if ( allowed . major === version . major &&
allowed . minor === version . minor &&
allowed . patch === version . patch ) {
return true
}
}
}
// Version has a -pre, but it's not one of the ones we like.
return false
}
return true
}
exports . satisfies = satisfies
function satisfies ( version , range , options ) {
try {
range = new Range ( range , options )
} catch ( er ) {
return false
}
return range . test ( version )
}
exports . maxSatisfying = maxSatisfying
function maxSatisfying ( versions , range , options ) {
var max = null
var maxSV = null
try {
var rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( function ( v ) {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! max || maxSV . compare ( v ) === - 1 ) {
// compare(max, v, true)
max = v
maxSV = new SemVer ( max , options )
}
}
} )
return max
}
exports . minSatisfying = minSatisfying
function minSatisfying ( versions , range , options ) {
var min = null
var minSV = null
try {
var rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( function ( v ) {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! min || minSV . compare ( v ) === 1 ) {
// compare(min, v, true)
min = v
minSV = new SemVer ( min , options )
}
}
} )
return min
}
exports . minVersion = minVersion
function minVersion ( range , loose ) {
range = new Range ( range , loose )
var minver = new SemVer ( '0.0.0' )
if ( range . test ( minver ) ) {
return minver
}
minver = new SemVer ( '0.0.0-0' )
if ( range . test ( minver ) ) {
return minver
}
minver = null
for ( var i = 0 ; i < range . set . length ; ++ i ) {
var comparators = range . set [ i ]
comparators . forEach ( function ( comparator ) {
// Clone to avoid manipulating the comparator's semver object.
var compver = new SemVer ( comparator . semver . version )
switch ( comparator . operator ) {
case '>' :
if ( compver . prerelease . length === 0 ) {
compver . patch ++
} else {
compver . prerelease . push ( 0 )
}
compver . raw = compver . format ( )
/* fallthrough */
case '' :
case '>=' :
if ( ! minver || gt ( minver , compver ) ) {
minver = compver
}
break
case '<' :
case '<=' :
/* Ignore maximum versions */
break
/* istanbul ignore next */
default :
throw new Error ( 'Unexpected operation: ' + comparator . operator )
}
} )
}
if ( minver && range . test ( minver ) ) {
return minver
}
return null
}
exports . validRange = validRange
function validRange ( range , options ) {
try {
// Return '*' instead of '' so that truthiness works.
// This will throw if it's invalid anyway
return new Range ( range , options ) . range || '*'
} catch ( er ) {
return null
}
}
// Determine if version is less than all the versions possible in the range
exports . ltr = ltr
function ltr ( version , range , options ) {
return outside ( version , range , '<' , options )
}
// Determine if version is greater than all the versions possible in the range.
exports . gtr = gtr
function gtr ( version , range , options ) {
return outside ( version , range , '>' , options )
}
exports . outside = outside
function outside ( version , range , hilo , options ) {
version = new SemVer ( version , options )
range = new Range ( range , options )
var gtfn , ltefn , ltfn , comp , ecomp
switch ( hilo ) {
case '>' :
gtfn = gt
ltefn = lte
ltfn = lt
comp = '>'
ecomp = '>='
break
case '<' :
gtfn = lt
ltefn = gte
ltfn = gt
comp = '<'
ecomp = '<='
break
default :
throw new TypeError ( 'Must provide a hilo val of "<" or ">"' )
}
// If it satisifes the range it is not outside
if ( satisfies ( version , range , options ) ) {
return false
}
// From now on, variable terms are as if we're in "gtr" mode.
// but note that everything is flipped for the "ltr" function.
for ( var i = 0 ; i < range . set . length ; ++ i ) {
var comparators = range . set [ i ]
var high = null
var low = null
comparators . forEach ( function ( comparator ) {
if ( comparator . semver === ANY ) {
comparator = new Comparator ( '>=0.0.0' )
}
high = high || comparator
low = low || comparator
if ( gtfn ( comparator . semver , high . semver , options ) ) {
high = comparator
} else if ( ltfn ( comparator . semver , low . semver , options ) ) {
low = comparator
}
} )
// If the edge version comparator has a operator then our version
// isn't outside it
if ( high . operator === comp || high . operator === ecomp ) {
return false
}
// If the lowest version comparator has an operator and our version
// is less than it then it isn't higher than the range
if ( ( ! low . operator || low . operator === comp ) &&
ltefn ( version , low . semver ) ) {
return false
} else if ( low . operator === ecomp && ltfn ( version , low . semver ) ) {
return false
}
}
return true
}
exports . prerelease = prerelease
function prerelease ( version , options ) {
var parsed = parse ( version , options )
return ( parsed && parsed . prerelease . length ) ? parsed . prerelease : null
}
exports . intersects = intersects
function intersects ( r1 , r2 , options ) {
r1 = new Range ( r1 , options )
r2 = new Range ( r2 , options )
return r1 . intersects ( r2 )
}
exports . coerce = coerce
function coerce ( version , options ) {
if ( version instanceof SemVer ) {
return version
}
if ( typeof version === 'number' ) {
version = String ( version )
}
if ( typeof version !== 'string' ) {
return null
}
options = options || { }
var match = null
if ( ! options . rtl ) {
match = version . match ( safeRe [ t . COERCE ] )
} else {
// Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
//
// Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus.
var next
while ( ( next = safeRe [ t . COERCERTL ] . exec ( version ) ) &&
( ! match || match . index + match [ 0 ] . length !== version . length )
) {
if ( ! match ||
next . index + next [ 0 ] . length !== match . index + match [ 0 ] . length ) {
match = next
}
safeRe [ t . COERCERTL ] . lastIndex = next . index + next [ 1 ] . length + next [ 2 ] . length
}
// leave it in a clean state
safeRe [ t . COERCERTL ] . lastIndex = - 1
}
if ( match === null ) {
return null
}
return parse ( match [ 2 ] +
'.' + ( match [ 3 ] || '0' ) +
'.' + ( match [ 4 ] || '0' ) , options )
}
/***/ } ) ,
/***/ 4294 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
module . exports = _ _nccwpck _require _ _ ( 4219 ) ;
/***/ } ) ,
/***/ 4219 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
var net = _ _nccwpck _require _ _ ( 1808 ) ;
var tls = _ _nccwpck _require _ _ ( 4404 ) ;
var http = _ _nccwpck _require _ _ ( 3685 ) ;
var https = _ _nccwpck _require _ _ ( 5687 ) ;
var events = _ _nccwpck _require _ _ ( 2361 ) ;
var assert = _ _nccwpck _require _ _ ( 9491 ) ;
var util = _ _nccwpck _require _ _ ( 3837 ) ;
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
}
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
}
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
}
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
}
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
}
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
}
} ) ;
} ;
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
}
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
}
} ;
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
}
this . sockets . splice ( pos , 1 ) ;
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
} ) ;
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
}
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
}
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
}
}
}
}
return target ;
}
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
}
} else {
debug = function ( ) { } ;
}
exports . debug = debug ; // for test
/***/ } ) ,
/***/ 1773 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const Client = _ _nccwpck _require _ _ ( 3598 )
const Dispatcher = _ _nccwpck _require _ _ ( 412 )
const errors = _ _nccwpck _require _ _ ( 8045 )
const Pool = _ _nccwpck _require _ _ ( 4634 )
const BalancedPool = _ _nccwpck _require _ _ ( 7931 )
const Agent = _ _nccwpck _require _ _ ( 7890 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { InvalidArgumentError } = errors
const api = _ _nccwpck _require _ _ ( 4059 )
const buildConnector = _ _nccwpck _require _ _ ( 2067 )
const MockClient = _ _nccwpck _require _ _ ( 8687 )
const MockAgent = _ _nccwpck _require _ _ ( 6771 )
const MockPool = _ _nccwpck _require _ _ ( 6193 )
const mockErrors = _ _nccwpck _require _ _ ( 888 )
const ProxyAgent = _ _nccwpck _require _ _ ( 7858 )
const RetryHandler = _ _nccwpck _require _ _ ( 2286 )
const { getGlobalDispatcher , setGlobalDispatcher } = _ _nccwpck _require _ _ ( 1892 )
const DecoratorHandler = _ _nccwpck _require _ _ ( 6930 )
const RedirectHandler = _ _nccwpck _require _ _ ( 2860 )
const createRedirectInterceptor = _ _nccwpck _require _ _ ( 8861 )
let hasCrypto
try {
_ _nccwpck _require _ _ ( 6113 )
hasCrypto = true
} catch {
hasCrypto = false
}
Object . assign ( Dispatcher . prototype , api )
module . exports . Dispatcher = Dispatcher
module . exports . Client = Client
module . exports . Pool = Pool
module . exports . BalancedPool = BalancedPool
module . exports . Agent = Agent
module . exports . ProxyAgent = ProxyAgent
module . exports . RetryHandler = RetryHandler
module . exports . DecoratorHandler = DecoratorHandler
module . exports . RedirectHandler = RedirectHandler
module . exports . createRedirectInterceptor = createRedirectInterceptor
module . exports . buildConnector = buildConnector
module . exports . errors = errors
function makeDispatcher ( fn ) {
return ( url , opts , handler ) => {
if ( typeof opts === 'function' ) {
handler = opts
opts = null
}
if ( ! url || ( typeof url !== 'string' && typeof url !== 'object' && ! ( url instanceof URL ) ) ) {
throw new InvalidArgumentError ( 'invalid url' )
}
if ( opts != null && typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
if ( opts && opts . path != null ) {
if ( typeof opts . path !== 'string' ) {
throw new InvalidArgumentError ( 'invalid opts.path' )
}
let path = opts . path
if ( ! opts . path . startsWith ( '/' ) ) {
path = ` / ${ path } `
}
url = new URL ( util . parseOrigin ( url ) . origin + path )
} else {
if ( ! opts ) {
opts = typeof url === 'object' ? url : { }
}
url = util . parseURL ( url )
}
const { agent , dispatcher = getGlobalDispatcher ( ) } = opts
if ( agent ) {
throw new InvalidArgumentError ( 'unsupported opts.agent. Did you mean opts.client?' )
}
return fn . call ( dispatcher , {
... opts ,
origin : url . origin ,
path : url . search ? ` ${ url . pathname } ${ url . search } ` : url . pathname ,
method : opts . method || ( opts . body ? 'PUT' : 'GET' )
} , handler )
}
}
module . exports . setGlobalDispatcher = setGlobalDispatcher
module . exports . getGlobalDispatcher = getGlobalDispatcher
if ( util . nodeMajor > 16 || ( util . nodeMajor === 16 && util . nodeMinor >= 8 ) ) {
let fetchImpl = null
module . exports . fetch = async function fetch ( resource ) {
if ( ! fetchImpl ) {
fetchImpl = ( _ _nccwpck _require _ _ ( 4881 ) . fetch )
}
try {
return await fetchImpl ( ... arguments )
} catch ( err ) {
if ( typeof err === 'object' ) {
Error . captureStackTrace ( err , this )
}
throw err
}
}
module . exports . Headers = _ _nccwpck _require _ _ ( 554 ) . Headers
module . exports . Response = _ _nccwpck _require _ _ ( 7823 ) . Response
module . exports . Request = _ _nccwpck _require _ _ ( 8359 ) . Request
module . exports . FormData = _ _nccwpck _require _ _ ( 2015 ) . FormData
module . exports . File = _ _nccwpck _require _ _ ( 8511 ) . File
module . exports . FileReader = _ _nccwpck _require _ _ ( 1446 ) . FileReader
const { setGlobalOrigin , getGlobalOrigin } = _ _nccwpck _require _ _ ( 1246 )
module . exports . setGlobalOrigin = setGlobalOrigin
module . exports . getGlobalOrigin = getGlobalOrigin
const { CacheStorage } = _ _nccwpck _require _ _ ( 7907 )
const { kConstruct } = _ _nccwpck _require _ _ ( 9174 )
// Cache & CacheStorage are tightly coupled with fetch. Even if it may run
// in an older version of Node, it doesn't have any use without fetch.
module . exports . caches = new CacheStorage ( kConstruct )
}
if ( util . nodeMajor >= 16 ) {
const { deleteCookie , getCookies , getSetCookies , setCookie } = _ _nccwpck _require _ _ ( 1724 )
module . exports . deleteCookie = deleteCookie
module . exports . getCookies = getCookies
module . exports . getSetCookies = getSetCookies
module . exports . setCookie = setCookie
const { parseMIMEType , serializeAMimeType } = _ _nccwpck _require _ _ ( 685 )
module . exports . parseMIMEType = parseMIMEType
module . exports . serializeAMimeType = serializeAMimeType
}
if ( util . nodeMajor >= 18 && hasCrypto ) {
const { WebSocket } = _ _nccwpck _require _ _ ( 4284 )
module . exports . WebSocket = WebSocket
}
module . exports . request = makeDispatcher ( api . request )
module . exports . stream = makeDispatcher ( api . stream )
module . exports . pipeline = makeDispatcher ( api . pipeline )
module . exports . connect = makeDispatcher ( api . connect )
module . exports . upgrade = makeDispatcher ( api . upgrade )
module . exports . MockClient = MockClient
module . exports . MockPool = MockPool
module . exports . MockAgent = MockAgent
module . exports . mockErrors = mockErrors
/***/ } ) ,
/***/ 7890 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const { kClients , kRunning , kClose , kDestroy , kDispatch , kInterceptors } = _ _nccwpck _require _ _ ( 2785 )
const DispatcherBase = _ _nccwpck _require _ _ ( 4839 )
const Pool = _ _nccwpck _require _ _ ( 4634 )
const Client = _ _nccwpck _require _ _ ( 3598 )
const util = _ _nccwpck _require _ _ ( 3983 )
const createRedirectInterceptor = _ _nccwpck _require _ _ ( 8861 )
const { WeakRef , FinalizationRegistry } = _ _nccwpck _require _ _ ( 6436 ) ( )
const kOnConnect = Symbol ( 'onConnect' )
const kOnDisconnect = Symbol ( 'onDisconnect' )
const kOnConnectionError = Symbol ( 'onConnectionError' )
const kMaxRedirections = Symbol ( 'maxRedirections' )
const kOnDrain = Symbol ( 'onDrain' )
const kFactory = Symbol ( 'factory' )
const kFinalizer = Symbol ( 'finalizer' )
const kOptions = Symbol ( 'options' )
function defaultFactory ( origin , opts ) {
return opts && opts . connections === 1
? new Client ( origin , opts )
: new Pool ( origin , opts )
}
class Agent extends DispatcherBase {
constructor ( { factory = defaultFactory , maxRedirections = 0 , connect , ... options } = { } ) {
super ( )
if ( typeof factory !== 'function' ) {
throw new InvalidArgumentError ( 'factory must be a function.' )
}
if ( connect != null && typeof connect !== 'function' && typeof connect !== 'object' ) {
throw new InvalidArgumentError ( 'connect must be a function or an object' )
}
if ( ! Number . isInteger ( maxRedirections ) || maxRedirections < 0 ) {
throw new InvalidArgumentError ( 'maxRedirections must be a positive number' )
}
if ( connect && typeof connect !== 'function' ) {
connect = { ... connect }
}
this [ kInterceptors ] = options . interceptors && options . interceptors . Agent && Array . isArray ( options . interceptors . Agent )
? options . interceptors . Agent
: [ createRedirectInterceptor ( { maxRedirections } ) ]
this [ kOptions ] = { ... util . deepClone ( options ) , connect }
this [ kOptions ] . interceptors = options . interceptors
? { ... options . interceptors }
: undefined
this [ kMaxRedirections ] = maxRedirections
this [ kFactory ] = factory
this [ kClients ] = new Map ( )
this [ kFinalizer ] = new FinalizationRegistry ( /* istanbul ignore next: gc is undeterministic */ key => {
const ref = this [ kClients ] . get ( key )
if ( ref !== undefined && ref . deref ( ) === undefined ) {
this [ kClients ] . delete ( key )
}
} )
const agent = this
this [ kOnDrain ] = ( origin , targets ) => {
agent . emit ( 'drain' , origin , [ agent , ... targets ] )
}
this [ kOnConnect ] = ( origin , targets ) => {
agent . emit ( 'connect' , origin , [ agent , ... targets ] )
}
this [ kOnDisconnect ] = ( origin , targets , err ) => {
agent . emit ( 'disconnect' , origin , [ agent , ... targets ] , err )
}
this [ kOnConnectionError ] = ( origin , targets , err ) => {
agent . emit ( 'connectionError' , origin , [ agent , ... targets ] , err )
}
}
get [ kRunning ] ( ) {
let ret = 0
for ( const ref of this [ kClients ] . values ( ) ) {
const client = ref . deref ( )
/* istanbul ignore next: gc is undeterministic */
if ( client ) {
ret += client [ kRunning ]
}
}
return ret
}
[ kDispatch ] ( opts , handler ) {
let key
if ( opts . origin && ( typeof opts . origin === 'string' || opts . origin instanceof URL ) ) {
key = String ( opts . origin )
} else {
throw new InvalidArgumentError ( 'opts.origin must be a non-empty string or URL.' )
}
const ref = this [ kClients ] . get ( key )
let dispatcher = ref ? ref . deref ( ) : null
if ( ! dispatcher ) {
dispatcher = this [ kFactory ] ( opts . origin , this [ kOptions ] )
. on ( 'drain' , this [ kOnDrain ] )
. on ( 'connect' , this [ kOnConnect ] )
. on ( 'disconnect' , this [ kOnDisconnect ] )
. on ( 'connectionError' , this [ kOnConnectionError ] )
this [ kClients ] . set ( key , new WeakRef ( dispatcher ) )
this [ kFinalizer ] . register ( dispatcher , key )
}
return dispatcher . dispatch ( opts , handler )
}
async [ kClose ] ( ) {
const closePromises = [ ]
for ( const ref of this [ kClients ] . values ( ) ) {
const client = ref . deref ( )
/* istanbul ignore else: gc is undeterministic */
if ( client ) {
closePromises . push ( client . close ( ) )
}
}
await Promise . all ( closePromises )
}
async [ kDestroy ] ( err ) {
const destroyPromises = [ ]
for ( const ref of this [ kClients ] . values ( ) ) {
const client = ref . deref ( )
/* istanbul ignore else: gc is undeterministic */
if ( client ) {
destroyPromises . push ( client . destroy ( err ) )
}
}
await Promise . all ( destroyPromises )
}
}
module . exports = Agent
/***/ } ) ,
/***/ 7032 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
const { addAbortListener } = _ _nccwpck _require _ _ ( 3983 )
const { RequestAbortedError } = _ _nccwpck _require _ _ ( 8045 )
const kListener = Symbol ( 'kListener' )
const kSignal = Symbol ( 'kSignal' )
function abort ( self ) {
if ( self . abort ) {
self . abort ( )
} else {
self . onError ( new RequestAbortedError ( ) )
}
}
function addSignal ( self , signal ) {
self [ kSignal ] = null
self [ kListener ] = null
if ( ! signal ) {
return
}
if ( signal . aborted ) {
abort ( self )
return
}
self [ kSignal ] = signal
self [ kListener ] = ( ) => {
abort ( self )
}
addAbortListener ( self [ kSignal ] , self [ kListener ] )
}
function removeSignal ( self ) {
if ( ! self [ kSignal ] ) {
return
}
if ( 'removeEventListener' in self [ kSignal ] ) {
self [ kSignal ] . removeEventListener ( 'abort' , self [ kListener ] )
} else {
self [ kSignal ] . removeListener ( 'abort' , self [ kListener ] )
}
self [ kSignal ] = null
self [ kListener ] = null
}
module . exports = {
addSignal ,
removeSignal
}
/***/ } ) ,
/***/ 9744 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { AsyncResource } = _ _nccwpck _require _ _ ( 852 )
const { InvalidArgumentError , RequestAbortedError , SocketError } = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { addSignal , removeSignal } = _ _nccwpck _require _ _ ( 7032 )
class ConnectHandler extends AsyncResource {
constructor ( opts , callback ) {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
const { signal , opaque , responseHeaders } = opts
if ( signal && typeof signal . on !== 'function' && typeof signal . addEventListener !== 'function' ) {
throw new InvalidArgumentError ( 'signal must be an EventEmitter or EventTarget' )
}
super ( 'UNDICI_CONNECT' )
this . opaque = opaque || null
this . responseHeaders = responseHeaders || null
this . callback = callback
this . abort = null
addSignal ( this , signal )
}
onConnect ( abort , context ) {
if ( ! this . callback ) {
throw new RequestAbortedError ( )
}
this . abort = abort
this . context = context
}
onHeaders ( ) {
throw new SocketError ( 'bad connect' , null )
}
onUpgrade ( statusCode , rawHeaders , socket ) {
const { callback , opaque , context } = this
removeSignal ( this )
this . callback = null
let headers = rawHeaders
// Indicates is an HTTP2Session
if ( headers != null ) {
headers = this . responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
}
this . runInAsyncScope ( callback , null , null , {
statusCode ,
headers ,
socket ,
opaque ,
context
} )
}
onError ( err ) {
const { callback , opaque } = this
removeSignal ( this )
if ( callback ) {
this . callback = null
queueMicrotask ( ( ) => {
this . runInAsyncScope ( callback , null , err , { opaque } )
} )
}
}
}
function connect ( opts , callback ) {
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
connect . call ( this , opts , ( err , data ) => {
return err ? reject ( err ) : resolve ( data )
} )
} )
}
try {
const connectHandler = new ConnectHandler ( opts , callback )
this . dispatch ( { ... opts , method : 'CONNECT' } , connectHandler )
} catch ( err ) {
if ( typeof callback !== 'function' ) {
throw err
}
const opaque = opts && opts . opaque
queueMicrotask ( ( ) => callback ( err , { opaque } ) )
}
}
module . exports = connect
/***/ } ) ,
/***/ 8752 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
Readable ,
Duplex ,
PassThrough
} = _ _nccwpck _require _ _ ( 2781 )
const {
InvalidArgumentError ,
InvalidReturnValueError ,
RequestAbortedError
} = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { AsyncResource } = _ _nccwpck _require _ _ ( 852 )
const { addSignal , removeSignal } = _ _nccwpck _require _ _ ( 7032 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const kResume = Symbol ( 'resume' )
class PipelineRequest extends Readable {
constructor ( ) {
super ( { autoDestroy : true } )
this [ kResume ] = null
}
_read ( ) {
const { [ kResume ] : resume } = this
if ( resume ) {
this [ kResume ] = null
resume ( )
}
}
_destroy ( err , callback ) {
this . _read ( )
callback ( err )
}
}
class PipelineResponse extends Readable {
constructor ( resume ) {
super ( { autoDestroy : true } )
this [ kResume ] = resume
}
_read ( ) {
this [ kResume ] ( )
}
_destroy ( err , callback ) {
if ( ! err && ! this . _readableState . endEmitted ) {
err = new RequestAbortedError ( )
}
callback ( err )
}
}
class PipelineHandler extends AsyncResource {
constructor ( opts , handler ) {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
if ( typeof handler !== 'function' ) {
throw new InvalidArgumentError ( 'invalid handler' )
}
const { signal , method , opaque , onInfo , responseHeaders } = opts
if ( signal && typeof signal . on !== 'function' && typeof signal . addEventListener !== 'function' ) {
throw new InvalidArgumentError ( 'signal must be an EventEmitter or EventTarget' )
}
if ( method === 'CONNECT' ) {
throw new InvalidArgumentError ( 'invalid method' )
}
if ( onInfo && typeof onInfo !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onInfo callback' )
}
super ( 'UNDICI_PIPELINE' )
this . opaque = opaque || null
this . responseHeaders = responseHeaders || null
this . handler = handler
this . abort = null
this . context = null
this . onInfo = onInfo || null
this . req = new PipelineRequest ( ) . on ( 'error' , util . nop )
this . ret = new Duplex ( {
readableObjectMode : opts . objectMode ,
autoDestroy : true ,
read : ( ) => {
const { body } = this
if ( body && body . resume ) {
body . resume ( )
}
} ,
write : ( chunk , encoding , callback ) => {
const { req } = this
if ( req . push ( chunk , encoding ) || req . _readableState . destroyed ) {
callback ( )
} else {
req [ kResume ] = callback
}
} ,
destroy : ( err , callback ) => {
const { body , req , res , ret , abort } = this
if ( ! err && ! ret . _readableState . endEmitted ) {
err = new RequestAbortedError ( )
}
if ( abort && err ) {
abort ( )
}
util . destroy ( body , err )
util . destroy ( req , err )
util . destroy ( res , err )
removeSignal ( this )
callback ( err )
}
} ) . on ( 'prefinish' , ( ) => {
const { req } = this
// Node < 15 does not call _final in same tick.
req . push ( null )
} )
this . res = null
addSignal ( this , signal )
}
onConnect ( abort , context ) {
const { ret , res } = this
assert ( ! res , 'pipeline cannot be retried' )
if ( ret . destroyed ) {
throw new RequestAbortedError ( )
}
this . abort = abort
this . context = context
}
onHeaders ( statusCode , rawHeaders , resume ) {
const { opaque , handler , context } = this
if ( statusCode < 200 ) {
if ( this . onInfo ) {
const headers = this . responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
this . onInfo ( { statusCode , headers } )
}
return
}
this . res = new PipelineResponse ( resume )
let body
try {
this . handler = null
const headers = this . responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
body = this . runInAsyncScope ( handler , null , {
statusCode ,
headers ,
opaque ,
body : this . res ,
context
} )
} catch ( err ) {
this . res . on ( 'error' , util . nop )
throw err
}
if ( ! body || typeof body . on !== 'function' ) {
throw new InvalidReturnValueError ( 'expected Readable' )
}
body
. on ( 'data' , ( chunk ) => {
const { ret , body } = this
if ( ! ret . push ( chunk ) && body . pause ) {
body . pause ( )
}
} )
. on ( 'error' , ( err ) => {
const { ret } = this
util . destroy ( ret , err )
} )
. on ( 'end' , ( ) => {
const { ret } = this
ret . push ( null )
} )
. on ( 'close' , ( ) => {
const { ret } = this
if ( ! ret . _readableState . ended ) {
util . destroy ( ret , new RequestAbortedError ( ) )
}
} )
this . body = body
}
onData ( chunk ) {
const { res } = this
return res . push ( chunk )
}
onComplete ( trailers ) {
const { res } = this
res . push ( null )
}
onError ( err ) {
const { ret } = this
this . handler = null
util . destroy ( ret , err )
}
}
function pipeline ( opts , handler ) {
try {
const pipelineHandler = new PipelineHandler ( opts , handler )
this . dispatch ( { ... opts , body : pipelineHandler . req } , pipelineHandler )
return pipelineHandler . ret
} catch ( err ) {
return new PassThrough ( ) . destroy ( err )
}
}
module . exports = pipeline
/***/ } ) ,
/***/ 5448 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const Readable = _ _nccwpck _require _ _ ( 3858 )
const {
InvalidArgumentError ,
RequestAbortedError
} = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { getResolveErrorBodyCallback } = _ _nccwpck _require _ _ ( 7474 )
const { AsyncResource } = _ _nccwpck _require _ _ ( 852 )
const { addSignal , removeSignal } = _ _nccwpck _require _ _ ( 7032 )
class RequestHandler extends AsyncResource {
constructor ( opts , callback ) {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
const { signal , method , opaque , body , onInfo , responseHeaders , throwOnError , highWaterMark } = opts
try {
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
if ( highWaterMark && ( typeof highWaterMark !== 'number' || highWaterMark < 0 ) ) {
throw new InvalidArgumentError ( 'invalid highWaterMark' )
}
if ( signal && typeof signal . on !== 'function' && typeof signal . addEventListener !== 'function' ) {
throw new InvalidArgumentError ( 'signal must be an EventEmitter or EventTarget' )
}
if ( method === 'CONNECT' ) {
throw new InvalidArgumentError ( 'invalid method' )
}
if ( onInfo && typeof onInfo !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onInfo callback' )
}
super ( 'UNDICI_REQUEST' )
} catch ( err ) {
if ( util . isStream ( body ) ) {
util . destroy ( body . on ( 'error' , util . nop ) , err )
}
throw err
}
this . responseHeaders = responseHeaders || null
this . opaque = opaque || null
this . callback = callback
this . res = null
this . abort = null
this . body = body
this . trailers = { }
this . context = null
this . onInfo = onInfo || null
this . throwOnError = throwOnError
this . highWaterMark = highWaterMark
if ( util . isStream ( body ) ) {
body . on ( 'error' , ( err ) => {
this . onError ( err )
} )
}
addSignal ( this , signal )
}
onConnect ( abort , context ) {
if ( ! this . callback ) {
throw new RequestAbortedError ( )
}
this . abort = abort
this . context = context
}
onHeaders ( statusCode , rawHeaders , resume , statusMessage ) {
const { callback , opaque , abort , context , responseHeaders , highWaterMark } = this
const headers = responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
if ( statusCode < 200 ) {
if ( this . onInfo ) {
this . onInfo ( { statusCode , headers } )
}
return
}
const parsedHeaders = responseHeaders === 'raw' ? util . parseHeaders ( rawHeaders ) : headers
const contentType = parsedHeaders [ 'content-type' ]
const body = new Readable ( { resume , abort , contentType , highWaterMark } )
this . callback = null
this . res = body
if ( callback !== null ) {
if ( this . throwOnError && statusCode >= 400 ) {
this . runInAsyncScope ( getResolveErrorBodyCallback , null ,
{ callback , body , contentType , statusCode , statusMessage , headers }
)
} else {
this . runInAsyncScope ( callback , null , null , {
statusCode ,
headers ,
trailers : this . trailers ,
opaque ,
body ,
context
} )
}
}
}
onData ( chunk ) {
const { res } = this
return res . push ( chunk )
}
onComplete ( trailers ) {
const { res } = this
removeSignal ( this )
util . parseHeaders ( trailers , this . trailers )
res . push ( null )
}
onError ( err ) {
const { res , callback , body , opaque } = this
removeSignal ( this )
if ( callback ) {
// TODO: Does this need queueMicrotask?
this . callback = null
queueMicrotask ( ( ) => {
this . runInAsyncScope ( callback , null , err , { opaque } )
} )
}
if ( res ) {
this . res = null
// Ensure all queued handlers are invoked before destroying res.
queueMicrotask ( ( ) => {
util . destroy ( res , err )
} )
}
if ( body ) {
this . body = null
util . destroy ( body , err )
}
}
}
function request ( opts , callback ) {
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
request . call ( this , opts , ( err , data ) => {
return err ? reject ( err ) : resolve ( data )
} )
} )
}
try {
this . dispatch ( opts , new RequestHandler ( opts , callback ) )
} catch ( err ) {
if ( typeof callback !== 'function' ) {
throw err
}
const opaque = opts && opts . opaque
queueMicrotask ( ( ) => callback ( err , { opaque } ) )
}
}
module . exports = request
module . exports . RequestHandler = RequestHandler
/***/ } ) ,
/***/ 5395 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { finished , PassThrough } = _ _nccwpck _require _ _ ( 2781 )
const {
InvalidArgumentError ,
InvalidReturnValueError ,
RequestAbortedError
} = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { getResolveErrorBodyCallback } = _ _nccwpck _require _ _ ( 7474 )
const { AsyncResource } = _ _nccwpck _require _ _ ( 852 )
const { addSignal , removeSignal } = _ _nccwpck _require _ _ ( 7032 )
class StreamHandler extends AsyncResource {
constructor ( opts , factory , callback ) {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
const { signal , method , opaque , body , onInfo , responseHeaders , throwOnError } = opts
try {
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
if ( typeof factory !== 'function' ) {
throw new InvalidArgumentError ( 'invalid factory' )
}
if ( signal && typeof signal . on !== 'function' && typeof signal . addEventListener !== 'function' ) {
throw new InvalidArgumentError ( 'signal must be an EventEmitter or EventTarget' )
}
if ( method === 'CONNECT' ) {
throw new InvalidArgumentError ( 'invalid method' )
}
if ( onInfo && typeof onInfo !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onInfo callback' )
}
super ( 'UNDICI_STREAM' )
} catch ( err ) {
if ( util . isStream ( body ) ) {
util . destroy ( body . on ( 'error' , util . nop ) , err )
}
throw err
}
this . responseHeaders = responseHeaders || null
this . opaque = opaque || null
this . factory = factory
this . callback = callback
this . res = null
this . abort = null
this . context = null
this . trailers = null
this . body = body
this . onInfo = onInfo || null
this . throwOnError = throwOnError || false
if ( util . isStream ( body ) ) {
body . on ( 'error' , ( err ) => {
this . onError ( err )
} )
}
addSignal ( this , signal )
}
onConnect ( abort , context ) {
if ( ! this . callback ) {
throw new RequestAbortedError ( )
}
this . abort = abort
this . context = context
}
onHeaders ( statusCode , rawHeaders , resume , statusMessage ) {
const { factory , opaque , context , callback , responseHeaders } = this
const headers = responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
if ( statusCode < 200 ) {
if ( this . onInfo ) {
this . onInfo ( { statusCode , headers } )
}
return
}
this . factory = null
let res
if ( this . throwOnError && statusCode >= 400 ) {
const parsedHeaders = responseHeaders === 'raw' ? util . parseHeaders ( rawHeaders ) : headers
const contentType = parsedHeaders [ 'content-type' ]
res = new PassThrough ( )
this . callback = null
this . runInAsyncScope ( getResolveErrorBodyCallback , null ,
{ callback , body : res , contentType , statusCode , statusMessage , headers }
)
} else {
if ( factory === null ) {
return
}
res = this . runInAsyncScope ( factory , null , {
statusCode ,
headers ,
opaque ,
context
} )
if (
! res ||
typeof res . write !== 'function' ||
typeof res . end !== 'function' ||
typeof res . on !== 'function'
) {
throw new InvalidReturnValueError ( 'expected Writable' )
}
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
finished ( res , { readable : false } , ( err ) => {
const { callback , res , opaque , trailers , abort } = this
this . res = null
if ( err || ! res . readable ) {
util . destroy ( res , err )
}
this . callback = null
this . runInAsyncScope ( callback , null , err || null , { opaque , trailers } )
if ( err ) {
abort ( )
}
} )
}
res . on ( 'drain' , resume )
this . res = res
const needDrain = res . writableNeedDrain !== undefined
? res . writableNeedDrain
: res . _writableState && res . _writableState . needDrain
return needDrain !== true
}
onData ( chunk ) {
const { res } = this
return res ? res . write ( chunk ) : true
}
onComplete ( trailers ) {
const { res } = this
removeSignal ( this )
if ( ! res ) {
return
}
this . trailers = util . parseHeaders ( trailers )
res . end ( )
}
onError ( err ) {
const { res , callback , opaque , body } = this
removeSignal ( this )
this . factory = null
if ( res ) {
this . res = null
util . destroy ( res , err )
} else if ( callback ) {
this . callback = null
queueMicrotask ( ( ) => {
this . runInAsyncScope ( callback , null , err , { opaque } )
} )
}
if ( body ) {
this . body = null
util . destroy ( body , err )
}
}
}
function stream ( opts , factory , callback ) {
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
stream . call ( this , opts , factory , ( err , data ) => {
return err ? reject ( err ) : resolve ( data )
} )
} )
}
try {
this . dispatch ( opts , new StreamHandler ( opts , factory , callback ) )
} catch ( err ) {
if ( typeof callback !== 'function' ) {
throw err
}
const opaque = opts && opts . opaque
queueMicrotask ( ( ) => callback ( err , { opaque } ) )
}
}
module . exports = stream
/***/ } ) ,
/***/ 6923 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { InvalidArgumentError , RequestAbortedError , SocketError } = _ _nccwpck _require _ _ ( 8045 )
const { AsyncResource } = _ _nccwpck _require _ _ ( 852 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { addSignal , removeSignal } = _ _nccwpck _require _ _ ( 7032 )
const assert = _ _nccwpck _require _ _ ( 9491 )
class UpgradeHandler extends AsyncResource {
constructor ( opts , callback ) {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'invalid opts' )
}
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
const { signal , opaque , responseHeaders } = opts
if ( signal && typeof signal . on !== 'function' && typeof signal . addEventListener !== 'function' ) {
throw new InvalidArgumentError ( 'signal must be an EventEmitter or EventTarget' )
}
super ( 'UNDICI_UPGRADE' )
this . responseHeaders = responseHeaders || null
this . opaque = opaque || null
this . callback = callback
this . abort = null
this . context = null
addSignal ( this , signal )
}
onConnect ( abort , context ) {
if ( ! this . callback ) {
throw new RequestAbortedError ( )
}
this . abort = abort
this . context = null
}
onHeaders ( ) {
throw new SocketError ( 'bad upgrade' , null )
}
onUpgrade ( statusCode , rawHeaders , socket ) {
const { callback , opaque , context } = this
assert . strictEqual ( statusCode , 101 )
removeSignal ( this )
this . callback = null
const headers = this . responseHeaders === 'raw' ? util . parseRawHeaders ( rawHeaders ) : util . parseHeaders ( rawHeaders )
this . runInAsyncScope ( callback , null , null , {
headers ,
socket ,
opaque ,
context
} )
}
onError ( err ) {
const { callback , opaque } = this
removeSignal ( this )
if ( callback ) {
this . callback = null
queueMicrotask ( ( ) => {
this . runInAsyncScope ( callback , null , err , { opaque } )
} )
}
}
}
function upgrade ( opts , callback ) {
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
upgrade . call ( this , opts , ( err , data ) => {
return err ? reject ( err ) : resolve ( data )
} )
} )
}
try {
const upgradeHandler = new UpgradeHandler ( opts , callback )
this . dispatch ( {
... opts ,
method : opts . method || 'GET' ,
upgrade : opts . protocol || 'Websocket'
} , upgradeHandler )
} catch ( err ) {
if ( typeof callback !== 'function' ) {
throw err
}
const opaque = opts && opts . opaque
queueMicrotask ( ( ) => callback ( err , { opaque } ) )
}
}
module . exports = upgrade
/***/ } ) ,
/***/ 4059 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
module . exports . request = _ _nccwpck _require _ _ ( 5448 )
module . exports . stream = _ _nccwpck _require _ _ ( 5395 )
module . exports . pipeline = _ _nccwpck _require _ _ ( 8752 )
module . exports . upgrade = _ _nccwpck _require _ _ ( 6923 )
module . exports . connect = _ _nccwpck _require _ _ ( 9744 )
/***/ } ) ,
/***/ 3858 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
// Ported from https://github.com/nodejs/undici/pull/907
const assert = _ _nccwpck _require _ _ ( 9491 )
const { Readable } = _ _nccwpck _require _ _ ( 2781 )
const { RequestAbortedError , NotSupportedError , InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { ReadableStreamFrom , toUSVString } = _ _nccwpck _require _ _ ( 3983 )
let Blob
const kConsume = Symbol ( 'kConsume' )
const kReading = Symbol ( 'kReading' )
const kBody = Symbol ( 'kBody' )
const kAbort = Symbol ( 'abort' )
const kContentType = Symbol ( 'kContentType' )
const noop = ( ) => { }
module . exports = class BodyReadable extends Readable {
constructor ( {
resume ,
abort ,
contentType = '' ,
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
} ) {
super ( {
autoDestroy : true ,
read : resume ,
highWaterMark
} )
this . _readableState . dataEmitted = false
this [ kAbort ] = abort
this [ kConsume ] = null
this [ kBody ] = null
this [ kContentType ] = contentType
// Is stream being consumed through Readable API?
// This is an optimization so that we avoid checking
// for 'data' and 'readable' listeners in the hot path
// inside push().
this [ kReading ] = false
}
destroy ( err ) {
if ( this . destroyed ) {
// Node < 16
return this
}
if ( ! err && ! this . _readableState . endEmitted ) {
err = new RequestAbortedError ( )
}
if ( err ) {
this [ kAbort ] ( )
}
return super . destroy ( err )
}
emit ( ev , ... args ) {
if ( ev === 'data' ) {
// Node < 16.7
this . _readableState . dataEmitted = true
} else if ( ev === 'error' ) {
// Node < 16
this . _readableState . errorEmitted = true
}
return super . emit ( ev , ... args )
}
on ( ev , ... args ) {
if ( ev === 'data' || ev === 'readable' ) {
this [ kReading ] = true
}
return super . on ( ev , ... args )
}
addListener ( ev , ... args ) {
return this . on ( ev , ... args )
}
off ( ev , ... args ) {
const ret = super . off ( ev , ... args )
if ( ev === 'data' || ev === 'readable' ) {
this [ kReading ] = (
this . listenerCount ( 'data' ) > 0 ||
this . listenerCount ( 'readable' ) > 0
)
}
return ret
}
removeListener ( ev , ... args ) {
return this . off ( ev , ... args )
}
push ( chunk ) {
if ( this [ kConsume ] && chunk !== null && this . readableLength === 0 ) {
consumePush ( this [ kConsume ] , chunk )
return this [ kReading ] ? super . push ( chunk ) : true
}
return super . push ( chunk )
}
// https://fetch.spec.whatwg.org/#dom-body-text
async text ( ) {
return consume ( this , 'text' )
}
// https://fetch.spec.whatwg.org/#dom-body-json
async json ( ) {
return consume ( this , 'json' )
}
// https://fetch.spec.whatwg.org/#dom-body-blob
async blob ( ) {
return consume ( this , 'blob' )
}
// https://fetch.spec.whatwg.org/#dom-body-arraybuffer
async arrayBuffer ( ) {
return consume ( this , 'arrayBuffer' )
}
// https://fetch.spec.whatwg.org/#dom-body-formdata
async formData ( ) {
// TODO: Implement.
throw new NotSupportedError ( )
}
// https://fetch.spec.whatwg.org/#dom-body-bodyused
get bodyUsed ( ) {
return util . isDisturbed ( this )
}
// https://fetch.spec.whatwg.org/#dom-body-body
get body ( ) {
if ( ! this [ kBody ] ) {
this [ kBody ] = ReadableStreamFrom ( this )
if ( this [ kConsume ] ) {
// TODO: Is this the best way to force a lock?
this [ kBody ] . getReader ( ) // Ensure stream is locked.
assert ( this [ kBody ] . locked )
}
}
return this [ kBody ]
}
dump ( opts ) {
let limit = opts && Number . isFinite ( opts . limit ) ? opts . limit : 262144
const signal = opts && opts . signal
if ( signal ) {
try {
if ( typeof signal !== 'object' || ! ( 'aborted' in signal ) ) {
throw new InvalidArgumentError ( 'signal must be an AbortSignal' )
}
util . throwIfAborted ( signal )
} catch ( err ) {
return Promise . reject ( err )
}
}
if ( this . closed ) {
return Promise . resolve ( null )
}
return new Promise ( ( resolve , reject ) => {
const signalListenerCleanup = signal
? util . addAbortListener ( signal , ( ) => {
this . destroy ( )
} )
: noop
this
. on ( 'close' , function ( ) {
signalListenerCleanup ( )
if ( signal && signal . aborted ) {
reject ( signal . reason || Object . assign ( new Error ( 'The operation was aborted' ) , { name : 'AbortError' } ) )
} else {
resolve ( null )
}
} )
. on ( 'error' , noop )
. on ( 'data' , function ( chunk ) {
limit -= chunk . length
if ( limit <= 0 ) {
this . destroy ( )
}
} )
. resume ( )
} )
}
}
// https://streams.spec.whatwg.org/#readablestream-locked
function isLocked ( self ) {
// Consume is an implicit lock.
return ( self [ kBody ] && self [ kBody ] . locked === true ) || self [ kConsume ]
}
// https://fetch.spec.whatwg.org/#body-unusable
function isUnusable ( self ) {
return util . isDisturbed ( self ) || isLocked ( self )
}
async function consume ( stream , type ) {
if ( isUnusable ( stream ) ) {
throw new TypeError ( 'unusable' )
}
assert ( ! stream [ kConsume ] )
return new Promise ( ( resolve , reject ) => {
stream [ kConsume ] = {
type ,
stream ,
resolve ,
reject ,
length : 0 ,
body : [ ]
}
stream
. on ( 'error' , function ( err ) {
consumeFinish ( this [ kConsume ] , err )
} )
. on ( 'close' , function ( ) {
if ( this [ kConsume ] . body !== null ) {
consumeFinish ( this [ kConsume ] , new RequestAbortedError ( ) )
}
} )
process . nextTick ( consumeStart , stream [ kConsume ] )
} )
}
function consumeStart ( consume ) {
if ( consume . body === null ) {
return
}
const { _readableState : state } = consume . stream
for ( const chunk of state . buffer ) {
consumePush ( consume , chunk )
}
if ( state . endEmitted ) {
consumeEnd ( this [ kConsume ] )
} else {
consume . stream . on ( 'end' , function ( ) {
consumeEnd ( this [ kConsume ] )
} )
}
consume . stream . resume ( )
while ( consume . stream . read ( ) != null ) {
// Loop
}
}
function consumeEnd ( consume ) {
const { type , body , resolve , stream , length } = consume
try {
if ( type === 'text' ) {
resolve ( toUSVString ( Buffer . concat ( body ) ) )
} else if ( type === 'json' ) {
resolve ( JSON . parse ( Buffer . concat ( body ) ) )
} else if ( type === 'arrayBuffer' ) {
const dst = new Uint8Array ( length )
let pos = 0
for ( const buf of body ) {
dst . set ( buf , pos )
pos += buf . byteLength
}
resolve ( dst . buffer )
} else if ( type === 'blob' ) {
if ( ! Blob ) {
Blob = ( _ _nccwpck _require _ _ ( 4300 ) . Blob )
}
resolve ( new Blob ( body , { type : stream [ kContentType ] } ) )
}
consumeFinish ( consume )
} catch ( err ) {
stream . destroy ( err )
}
}
function consumePush ( consume , chunk ) {
consume . length += chunk . length
consume . body . push ( chunk )
}
function consumeFinish ( consume , err ) {
if ( consume . body === null ) {
return
}
if ( err ) {
consume . reject ( err )
} else {
consume . resolve ( )
}
consume . type = null
consume . stream = null
consume . resolve = null
consume . reject = null
consume . length = 0
consume . body = null
}
/***/ } ) ,
/***/ 7474 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
const assert = _ _nccwpck _require _ _ ( 9491 )
const {
ResponseStatusCodeError
} = _ _nccwpck _require _ _ ( 8045 )
const { toUSVString } = _ _nccwpck _require _ _ ( 3983 )
async function getResolveErrorBodyCallback ( { callback , body , contentType , statusCode , statusMessage , headers } ) {
assert ( body )
let chunks = [ ]
let limit = 0
for await ( const chunk of body ) {
chunks . push ( chunk )
limit += chunk . length
if ( limit > 128 * 1024 ) {
chunks = null
break
}
}
if ( statusCode === 204 || ! contentType || ! chunks ) {
process . nextTick ( callback , new ResponseStatusCodeError ( ` Response status code ${ statusCode } ${ statusMessage ? ` : ${ statusMessage } ` : '' } ` , statusCode , headers ) )
return
}
try {
if ( contentType . startsWith ( 'application/json' ) ) {
const payload = JSON . parse ( toUSVString ( Buffer . concat ( chunks ) ) )
process . nextTick ( callback , new ResponseStatusCodeError ( ` Response status code ${ statusCode } ${ statusMessage ? ` : ${ statusMessage } ` : '' } ` , statusCode , headers , payload ) )
return
}
if ( contentType . startsWith ( 'text/' ) ) {
const payload = toUSVString ( Buffer . concat ( chunks ) )
process . nextTick ( callback , new ResponseStatusCodeError ( ` Response status code ${ statusCode } ${ statusMessage ? ` : ${ statusMessage } ` : '' } ` , statusCode , headers , payload ) )
return
}
} catch ( err ) {
// Process in a fallback if error
}
process . nextTick ( callback , new ResponseStatusCodeError ( ` Response status code ${ statusCode } ${ statusMessage ? ` : ${ statusMessage } ` : '' } ` , statusCode , headers ) )
}
module . exports = { getResolveErrorBodyCallback }
/***/ } ) ,
/***/ 7931 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
BalancedPoolMissingUpstreamError ,
InvalidArgumentError
} = _ _nccwpck _require _ _ ( 8045 )
const {
PoolBase ,
kClients ,
kNeedDrain ,
kAddClient ,
kRemoveClient ,
kGetDispatcher
} = _ _nccwpck _require _ _ ( 3198 )
const Pool = _ _nccwpck _require _ _ ( 4634 )
const { kUrl , kInterceptors } = _ _nccwpck _require _ _ ( 2785 )
const { parseOrigin } = _ _nccwpck _require _ _ ( 3983 )
const kFactory = Symbol ( 'factory' )
const kOptions = Symbol ( 'options' )
const kGreatestCommonDivisor = Symbol ( 'kGreatestCommonDivisor' )
const kCurrentWeight = Symbol ( 'kCurrentWeight' )
const kIndex = Symbol ( 'kIndex' )
const kWeight = Symbol ( 'kWeight' )
const kMaxWeightPerServer = Symbol ( 'kMaxWeightPerServer' )
const kErrorPenalty = Symbol ( 'kErrorPenalty' )
function getGreatestCommonDivisor ( a , b ) {
if ( b === 0 ) return a
return getGreatestCommonDivisor ( b , a % b )
}
function defaultFactory ( origin , opts ) {
return new Pool ( origin , opts )
}
class BalancedPool extends PoolBase {
constructor ( upstreams = [ ] , { factory = defaultFactory , ... opts } = { } ) {
super ( )
this [ kOptions ] = opts
this [ kIndex ] = - 1
this [ kCurrentWeight ] = 0
this [ kMaxWeightPerServer ] = this [ kOptions ] . maxWeightPerServer || 100
this [ kErrorPenalty ] = this [ kOptions ] . errorPenalty || 15
if ( ! Array . isArray ( upstreams ) ) {
upstreams = [ upstreams ]
}
if ( typeof factory !== 'function' ) {
throw new InvalidArgumentError ( 'factory must be a function.' )
}
this [ kInterceptors ] = opts . interceptors && opts . interceptors . BalancedPool && Array . isArray ( opts . interceptors . BalancedPool )
? opts . interceptors . BalancedPool
: [ ]
this [ kFactory ] = factory
for ( const upstream of upstreams ) {
this . addUpstream ( upstream )
}
this . _updateBalancedPoolStats ( )
}
addUpstream ( upstream ) {
const upstreamOrigin = parseOrigin ( upstream ) . origin
if ( this [ kClients ] . find ( ( pool ) => (
pool [ kUrl ] . origin === upstreamOrigin &&
pool . closed !== true &&
pool . destroyed !== true
) ) ) {
return this
}
const pool = this [ kFactory ] ( upstreamOrigin , Object . assign ( { } , this [ kOptions ] ) )
this [ kAddClient ] ( pool )
pool . on ( 'connect' , ( ) => {
pool [ kWeight ] = Math . min ( this [ kMaxWeightPerServer ] , pool [ kWeight ] + this [ kErrorPenalty ] )
} )
pool . on ( 'connectionError' , ( ) => {
pool [ kWeight ] = Math . max ( 1 , pool [ kWeight ] - this [ kErrorPenalty ] )
this . _updateBalancedPoolStats ( )
} )
pool . on ( 'disconnect' , ( ... args ) => {
const err = args [ 2 ]
if ( err && err . code === 'UND_ERR_SOCKET' ) {
// decrease the weight of the pool.
pool [ kWeight ] = Math . max ( 1 , pool [ kWeight ] - this [ kErrorPenalty ] )
this . _updateBalancedPoolStats ( )
}
} )
for ( const client of this [ kClients ] ) {
client [ kWeight ] = this [ kMaxWeightPerServer ]
}
this . _updateBalancedPoolStats ( )
return this
}
_updateBalancedPoolStats ( ) {
this [ kGreatestCommonDivisor ] = this [ kClients ] . map ( p => p [ kWeight ] ) . reduce ( getGreatestCommonDivisor , 0 )
}
removeUpstream ( upstream ) {
const upstreamOrigin = parseOrigin ( upstream ) . origin
const pool = this [ kClients ] . find ( ( pool ) => (
pool [ kUrl ] . origin === upstreamOrigin &&
pool . closed !== true &&
pool . destroyed !== true
) )
if ( pool ) {
this [ kRemoveClient ] ( pool )
}
return this
}
get upstreams ( ) {
return this [ kClients ]
. filter ( dispatcher => dispatcher . closed !== true && dispatcher . destroyed !== true )
. map ( ( p ) => p [ kUrl ] . origin )
}
[ kGetDispatcher ] ( ) {
// We validate that pools is greater than 0,
// otherwise we would have to wait until an upstream
// is added, which might never happen.
if ( this [ kClients ] . length === 0 ) {
throw new BalancedPoolMissingUpstreamError ( )
}
const dispatcher = this [ kClients ] . find ( dispatcher => (
! dispatcher [ kNeedDrain ] &&
dispatcher . closed !== true &&
dispatcher . destroyed !== true
) )
if ( ! dispatcher ) {
return
}
const allClientsBusy = this [ kClients ] . map ( pool => pool [ kNeedDrain ] ) . reduce ( ( a , b ) => a && b , true )
if ( allClientsBusy ) {
return
}
let counter = 0
let maxWeightIndex = this [ kClients ] . findIndex ( pool => ! pool [ kNeedDrain ] )
while ( counter ++ < this [ kClients ] . length ) {
this [ kIndex ] = ( this [ kIndex ] + 1 ) % this [ kClients ] . length
const pool = this [ kClients ] [ this [ kIndex ] ]
// find pool index with the largest weight
if ( pool [ kWeight ] > this [ kClients ] [ maxWeightIndex ] [ kWeight ] && ! pool [ kNeedDrain ] ) {
maxWeightIndex = this [ kIndex ]
}
// decrease the current weight every `this[kClients].length`.
if ( this [ kIndex ] === 0 ) {
// Set the current weight to the next lower weight.
this [ kCurrentWeight ] = this [ kCurrentWeight ] - this [ kGreatestCommonDivisor ]
if ( this [ kCurrentWeight ] <= 0 ) {
this [ kCurrentWeight ] = this [ kMaxWeightPerServer ]
}
}
if ( pool [ kWeight ] >= this [ kCurrentWeight ] && ( ! pool [ kNeedDrain ] ) ) {
return pool
}
}
this [ kCurrentWeight ] = this [ kClients ] [ maxWeightIndex ] [ kWeight ]
this [ kIndex ] = maxWeightIndex
return this [ kClients ] [ maxWeightIndex ]
}
}
module . exports = BalancedPool
/***/ } ) ,
/***/ 6101 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { kConstruct } = _ _nccwpck _require _ _ ( 9174 )
const { urlEquals , fieldValues : getFieldValues } = _ _nccwpck _require _ _ ( 2396 )
const { kEnumerableProperty , isDisturbed } = _ _nccwpck _require _ _ ( 3983 )
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { Response , cloneResponse } = _ _nccwpck _require _ _ ( 7823 )
const { Request } = _ _nccwpck _require _ _ ( 8359 )
const { kState , kHeaders , kGuard , kRealm } = _ _nccwpck _require _ _ ( 5861 )
const { fetching } = _ _nccwpck _require _ _ ( 4881 )
const { urlIsHttpHttpsScheme , createDeferredPromise , readAllBytes } = _ _nccwpck _require _ _ ( 2538 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { getGlobalDispatcher } = _ _nccwpck _require _ _ ( 1892 )
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
* @ typedef { Object } CacheBatchOperation
* @ property { 'delete' | 'put' } type
* @ property { any } request
* @ property { any } response
* @ property { import ( '../../types/cache' ) . CacheQueryOptions } options
* /
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dfn-request-response-list
* @ typedef { [ any , any ] [ ] } requestResponseList
* /
class Cache {
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list
* @ type { requestResponseList }
* /
# relevantRequestResponseList
constructor ( ) {
if ( arguments [ 0 ] !== kConstruct ) {
webidl . illegalConstructor ( )
}
this . # relevantRequestResponseList = arguments [ 1 ]
}
async match ( request , options = { } ) {
webidl . brandCheck ( this , Cache )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Cache.match' } )
request = webidl . converters . RequestInfo ( request )
options = webidl . converters . CacheQueryOptions ( options )
const p = await this . matchAll ( request , options )
if ( p . length === 0 ) {
return
}
return p [ 0 ]
}
async matchAll ( request = undefined , options = { } ) {
webidl . brandCheck ( this , Cache )
if ( request !== undefined ) request = webidl . converters . RequestInfo ( request )
options = webidl . converters . CacheQueryOptions ( options )
// 1.
let r = null
// 2.
if ( request !== undefined ) {
if ( request instanceof Request ) {
// 2.1.1
r = request [ kState ]
// 2.1.2
if ( r . method !== 'GET' && ! options . ignoreMethod ) {
return [ ]
}
} else if ( typeof request === 'string' ) {
// 2.2.1
r = new Request ( request ) [ kState ]
}
}
// 5.
// 5.1
const responses = [ ]
// 5.2
if ( request === undefined ) {
// 5.2.1
for ( const requestResponse of this . # relevantRequestResponseList ) {
responses . push ( requestResponse [ 1 ] )
}
} else { // 5.3
// 5.3.1
const requestResponses = this . # queryCache ( r , options )
// 5.3.2
for ( const requestResponse of requestResponses ) {
responses . push ( requestResponse [ 1 ] )
}
}
// 5.4
// We don't implement CORs so we don't need to loop over the responses, yay!
// 5.5.1
const responseList = [ ]
// 5.5.2
for ( const response of responses ) {
// 5.5.2.1
const responseObject = new Response ( response . body ? . source ? ? null )
const body = responseObject [ kState ] . body
responseObject [ kState ] = response
responseObject [ kState ] . body = body
responseObject [ kHeaders ] [ kHeadersList ] = response . headersList
responseObject [ kHeaders ] [ kGuard ] = 'immutable'
responseList . push ( responseObject )
}
// 6.
return Object . freeze ( responseList )
}
async add ( request ) {
webidl . brandCheck ( this , Cache )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Cache.add' } )
request = webidl . converters . RequestInfo ( request )
// 1.
const requests = [ request ]
// 2.
const responseArrayPromise = this . addAll ( requests )
// 3.
return await responseArrayPromise
}
async addAll ( requests ) {
webidl . brandCheck ( this , Cache )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Cache.addAll' } )
requests = webidl . converters [ 'sequence<RequestInfo>' ] ( requests )
// 1.
const responsePromises = [ ]
// 2.
const requestList = [ ]
// 3.
for ( const request of requests ) {
if ( typeof request === 'string' ) {
continue
}
// 3.1
const r = request [ kState ]
// 3.2
if ( ! urlIsHttpHttpsScheme ( r . url ) || r . method !== 'GET' ) {
throw webidl . errors . exception ( {
header : 'Cache.addAll' ,
message : 'Expected http/s scheme when method is not GET.'
} )
}
}
// 4.
/** @type {ReturnType<typeof fetching>[]} */
const fetchControllers = [ ]
// 5.
for ( const request of requests ) {
// 5.1
const r = new Request ( request ) [ kState ]
// 5.2
if ( ! urlIsHttpHttpsScheme ( r . url ) ) {
throw webidl . errors . exception ( {
header : 'Cache.addAll' ,
message : 'Expected http/s scheme.'
} )
}
// 5.4
r . initiator = 'fetch'
r . destination = 'subresource'
// 5.5
requestList . push ( r )
// 5.6
const responsePromise = createDeferredPromise ( )
// 5.7
fetchControllers . push ( fetching ( {
request : r ,
dispatcher : getGlobalDispatcher ( ) ,
processResponse ( response ) {
// 1.
if ( response . type === 'error' || response . status === 206 || response . status < 200 || response . status > 299 ) {
responsePromise . reject ( webidl . errors . exception ( {
header : 'Cache.addAll' ,
message : 'Received an invalid status code or the request failed.'
} ) )
} else if ( response . headersList . contains ( 'vary' ) ) { // 2.
// 2.1
const fieldValues = getFieldValues ( response . headersList . get ( 'vary' ) )
// 2.2
for ( const fieldValue of fieldValues ) {
// 2.2.1
if ( fieldValue === '*' ) {
responsePromise . reject ( webidl . errors . exception ( {
header : 'Cache.addAll' ,
message : 'invalid vary field value'
} ) )
for ( const controller of fetchControllers ) {
controller . abort ( )
}
return
}
}
}
} ,
processResponseEndOfBody ( response ) {
// 1.
if ( response . aborted ) {
responsePromise . reject ( new DOMException ( 'aborted' , 'AbortError' ) )
return
}
// 2.
responsePromise . resolve ( response )
}
} ) )
// 5.8
responsePromises . push ( responsePromise . promise )
}
// 6.
const p = Promise . all ( responsePromises )
// 7.
const responses = await p
// 7.1
const operations = [ ]
// 7.2
let index = 0
// 7.3
for ( const response of responses ) {
// 7.3.1
/** @type {CacheBatchOperation} */
const operation = {
type : 'put' , // 7.3.2
request : requestList [ index ] , // 7.3.3
response // 7.3.4
}
operations . push ( operation ) // 7.3.5
index ++ // 7.3.6
}
// 7.5
const cacheJobPromise = createDeferredPromise ( )
// 7.6.1
let errorData = null
// 7.6.2
try {
this . # batchCacheOperations ( operations )
} catch ( e ) {
errorData = e
}
// 7.6.3
queueMicrotask ( ( ) => {
// 7.6.3.1
if ( errorData === null ) {
cacheJobPromise . resolve ( undefined )
} else {
// 7.6.3.2
cacheJobPromise . reject ( errorData )
}
} )
// 7.7
return cacheJobPromise . promise
}
async put ( request , response ) {
webidl . brandCheck ( this , Cache )
webidl . argumentLengthCheck ( arguments , 2 , { header : 'Cache.put' } )
request = webidl . converters . RequestInfo ( request )
response = webidl . converters . Response ( response )
// 1.
let innerRequest = null
// 2.
if ( request instanceof Request ) {
innerRequest = request [ kState ]
} else { // 3.
innerRequest = new Request ( request ) [ kState ]
}
// 4.
if ( ! urlIsHttpHttpsScheme ( innerRequest . url ) || innerRequest . method !== 'GET' ) {
throw webidl . errors . exception ( {
header : 'Cache.put' ,
message : 'Expected an http/s scheme when method is not GET'
} )
}
// 5.
const innerResponse = response [ kState ]
// 6.
if ( innerResponse . status === 206 ) {
throw webidl . errors . exception ( {
header : 'Cache.put' ,
message : 'Got 206 status'
} )
}
// 7.
if ( innerResponse . headersList . contains ( 'vary' ) ) {
// 7.1.
const fieldValues = getFieldValues ( innerResponse . headersList . get ( 'vary' ) )
// 7.2.
for ( const fieldValue of fieldValues ) {
// 7.2.1
if ( fieldValue === '*' ) {
throw webidl . errors . exception ( {
header : 'Cache.put' ,
message : 'Got * vary field value'
} )
}
}
}
// 8.
if ( innerResponse . body && ( isDisturbed ( innerResponse . body . stream ) || innerResponse . body . stream . locked ) ) {
throw webidl . errors . exception ( {
header : 'Cache.put' ,
message : 'Response body is locked or disturbed'
} )
}
// 9.
const clonedResponse = cloneResponse ( innerResponse )
// 10.
const bodyReadPromise = createDeferredPromise ( )
// 11.
if ( innerResponse . body != null ) {
// 11.1
const stream = innerResponse . body . stream
// 11.2
const reader = stream . getReader ( )
// 11.3
readAllBytes ( reader ) . then ( bodyReadPromise . resolve , bodyReadPromise . reject )
} else {
bodyReadPromise . resolve ( undefined )
}
// 12.
/** @type {CacheBatchOperation[]} */
const operations = [ ]
// 13.
/** @type {CacheBatchOperation} */
const operation = {
type : 'put' , // 14.
request : innerRequest , // 15.
response : clonedResponse // 16.
}
// 17.
operations . push ( operation )
// 19.
const bytes = await bodyReadPromise . promise
if ( clonedResponse . body != null ) {
clonedResponse . body . source = bytes
}
// 19.1
const cacheJobPromise = createDeferredPromise ( )
// 19.2.1
let errorData = null
// 19.2.2
try {
this . # batchCacheOperations ( operations )
} catch ( e ) {
errorData = e
}
// 19.2.3
queueMicrotask ( ( ) => {
// 19.2.3.1
if ( errorData === null ) {
cacheJobPromise . resolve ( )
} else { // 19.2.3.2
cacheJobPromise . reject ( errorData )
}
} )
return cacheJobPromise . promise
}
async delete ( request , options = { } ) {
webidl . brandCheck ( this , Cache )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Cache.delete' } )
request = webidl . converters . RequestInfo ( request )
options = webidl . converters . CacheQueryOptions ( options )
/ * *
* @ type { Request }
* /
let r = null
if ( request instanceof Request ) {
r = request [ kState ]
if ( r . method !== 'GET' && ! options . ignoreMethod ) {
return false
}
} else {
assert ( typeof request === 'string' )
r = new Request ( request ) [ kState ]
}
/** @type {CacheBatchOperation[]} */
const operations = [ ]
/** @type {CacheBatchOperation} */
const operation = {
type : 'delete' ,
request : r ,
options
}
operations . push ( operation )
const cacheJobPromise = createDeferredPromise ( )
let errorData = null
let requestResponses
try {
requestResponses = this . # batchCacheOperations ( operations )
} catch ( e ) {
errorData = e
}
queueMicrotask ( ( ) => {
if ( errorData === null ) {
cacheJobPromise . resolve ( ! ! requestResponses ? . length )
} else {
cacheJobPromise . reject ( errorData )
}
} )
return cacheJobPromise . promise
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dom-cache-keys
* @ param { any } request
* @ param { import ( '../../types/cache' ) . CacheQueryOptions } options
* @ returns { readonly Request [ ] }
* /
async keys ( request = undefined , options = { } ) {
webidl . brandCheck ( this , Cache )
if ( request !== undefined ) request = webidl . converters . RequestInfo ( request )
options = webidl . converters . CacheQueryOptions ( options )
// 1.
let r = null
// 2.
if ( request !== undefined ) {
// 2.1
if ( request instanceof Request ) {
// 2.1.1
r = request [ kState ]
// 2.1.2
if ( r . method !== 'GET' && ! options . ignoreMethod ) {
return [ ]
}
} else if ( typeof request === 'string' ) { // 2.2
r = new Request ( request ) [ kState ]
}
}
// 4.
const promise = createDeferredPromise ( )
// 5.
// 5.1
const requests = [ ]
// 5.2
if ( request === undefined ) {
// 5.2.1
for ( const requestResponse of this . # relevantRequestResponseList ) {
// 5.2.1.1
requests . push ( requestResponse [ 0 ] )
}
} else { // 5.3
// 5.3.1
const requestResponses = this . # queryCache ( r , options )
// 5.3.2
for ( const requestResponse of requestResponses ) {
// 5.3.2.1
requests . push ( requestResponse [ 0 ] )
}
}
// 5.4
queueMicrotask ( ( ) => {
// 5.4.1
const requestList = [ ]
// 5.4.2
for ( const request of requests ) {
const requestObject = new Request ( 'https://a' )
requestObject [ kState ] = request
requestObject [ kHeaders ] [ kHeadersList ] = request . headersList
requestObject [ kHeaders ] [ kGuard ] = 'immutable'
requestObject [ kRealm ] = request . client
// 5.4.2.1
requestList . push ( requestObject )
}
// 5.4.3
promise . resolve ( Object . freeze ( requestList ) )
} )
return promise . promise
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm
* @ param { CacheBatchOperation [ ] } operations
* @ returns { requestResponseList }
* /
# batchCacheOperations ( operations ) {
// 1.
const cache = this . # relevantRequestResponseList
// 2.
const backupCache = [ ... cache ]
// 3.
const addedItems = [ ]
// 4.1
const resultList = [ ]
try {
// 4.2
for ( const operation of operations ) {
// 4.2.1
if ( operation . type !== 'delete' && operation . type !== 'put' ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'operation type does not match "delete" or "put"'
} )
}
// 4.2.2
if ( operation . type === 'delete' && operation . response != null ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'delete operation should not have an associated response'
} )
}
// 4.2.3
if ( this . # queryCache ( operation . request , operation . options , addedItems ) . length ) {
throw new DOMException ( '???' , 'InvalidStateError' )
}
// 4.2.4
let requestResponses
// 4.2.5
if ( operation . type === 'delete' ) {
// 4.2.5.1
requestResponses = this . # queryCache ( operation . request , operation . options )
// TODO: the spec is wrong, this is needed to pass WPTs
if ( requestResponses . length === 0 ) {
return [ ]
}
// 4.2.5.2
for ( const requestResponse of requestResponses ) {
const idx = cache . indexOf ( requestResponse )
assert ( idx !== - 1 )
// 4.2.5.2.1
cache . splice ( idx , 1 )
}
} else if ( operation . type === 'put' ) { // 4.2.6
// 4.2.6.1
if ( operation . response == null ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'put operation should have an associated response'
} )
}
// 4.2.6.2
const r = operation . request
// 4.2.6.3
if ( ! urlIsHttpHttpsScheme ( r . url ) ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'expected http or https scheme'
} )
}
// 4.2.6.4
if ( r . method !== 'GET' ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'not get method'
} )
}
// 4.2.6.5
if ( operation . options != null ) {
throw webidl . errors . exception ( {
header : 'Cache.#batchCacheOperations' ,
message : 'options must not be defined'
} )
}
// 4.2.6.6
requestResponses = this . # queryCache ( operation . request )
// 4.2.6.7
for ( const requestResponse of requestResponses ) {
const idx = cache . indexOf ( requestResponse )
assert ( idx !== - 1 )
// 4.2.6.7.1
cache . splice ( idx , 1 )
}
// 4.2.6.8
cache . push ( [ operation . request , operation . response ] )
// 4.2.6.10
addedItems . push ( [ operation . request , operation . response ] )
}
// 4.2.7
resultList . push ( [ operation . request , operation . response ] )
}
// 4.3
return resultList
} catch ( e ) { // 5.
// 5.1
this . # relevantRequestResponseList . length = 0
// 5.2
this . # relevantRequestResponseList = backupCache
// 5.3
throw e
}
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#query-cache
* @ param { any } requestQuery
* @ param { import ( '../../types/cache' ) . CacheQueryOptions } options
* @ param { requestResponseList } targetStorage
* @ returns { requestResponseList }
* /
# queryCache ( requestQuery , options , targetStorage ) {
/** @type {requestResponseList} */
const resultList = [ ]
const storage = targetStorage ? ? this . # relevantRequestResponseList
for ( const requestResponse of storage ) {
const [ cachedRequest , cachedResponse ] = requestResponse
if ( this . # requestMatchesCachedItem ( requestQuery , cachedRequest , cachedResponse , options ) ) {
resultList . push ( requestResponse )
}
}
return resultList
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm
* @ param { any } requestQuery
* @ param { any } request
* @ param { any | null } response
* @ param { import ( '../../types/cache' ) . CacheQueryOptions | undefined } options
* @ returns { boolean }
* /
# requestMatchesCachedItem ( requestQuery , request , response = null , options ) {
// if (options?.ignoreMethod === false && request.method === 'GET') {
// return false
// }
const queryURL = new URL ( requestQuery . url )
const cachedURL = new URL ( request . url )
if ( options ? . ignoreSearch ) {
cachedURL . search = ''
queryURL . search = ''
}
if ( ! urlEquals ( queryURL , cachedURL , true ) ) {
return false
}
if (
response == null ||
options ? . ignoreVary ||
! response . headersList . contains ( 'vary' )
) {
return true
}
const fieldValues = getFieldValues ( response . headersList . get ( 'vary' ) )
for ( const fieldValue of fieldValues ) {
if ( fieldValue === '*' ) {
return false
}
const requestValue = request . headersList . get ( fieldValue )
const queryValue = requestQuery . headersList . get ( fieldValue )
// If one has the header and the other doesn't, or one has
// a different value than the other, return false
if ( requestValue !== queryValue ) {
return false
}
}
return true
}
}
Object . defineProperties ( Cache . prototype , {
[ Symbol . toStringTag ] : {
value : 'Cache' ,
configurable : true
} ,
match : kEnumerableProperty ,
matchAll : kEnumerableProperty ,
add : kEnumerableProperty ,
addAll : kEnumerableProperty ,
put : kEnumerableProperty ,
delete : kEnumerableProperty ,
keys : kEnumerableProperty
} )
const cacheQueryOptionConverters = [
{
key : 'ignoreSearch' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'ignoreMethod' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'ignoreVary' ,
converter : webidl . converters . boolean ,
defaultValue : false
}
]
webidl . converters . CacheQueryOptions = webidl . dictionaryConverter ( cacheQueryOptionConverters )
webidl . converters . MultiCacheQueryOptions = webidl . dictionaryConverter ( [
... cacheQueryOptionConverters ,
{
key : 'cacheName' ,
converter : webidl . converters . DOMString
}
] )
webidl . converters . Response = webidl . interfaceConverter ( Response )
webidl . converters [ 'sequence<RequestInfo>' ] = webidl . sequenceConverter (
webidl . converters . RequestInfo
)
module . exports = {
Cache
}
/***/ } ) ,
/***/ 7907 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { kConstruct } = _ _nccwpck _require _ _ ( 9174 )
const { Cache } = _ _nccwpck _require _ _ ( 6101 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { kEnumerableProperty } = _ _nccwpck _require _ _ ( 3983 )
class CacheStorage {
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map
* @ type { Map < string , import ( './cache' ) . requestResponseList }
* /
# caches = new Map ( )
constructor ( ) {
if ( arguments [ 0 ] !== kConstruct ) {
webidl . illegalConstructor ( )
}
}
async match ( request , options = { } ) {
webidl . brandCheck ( this , CacheStorage )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'CacheStorage.match' } )
request = webidl . converters . RequestInfo ( request )
options = webidl . converters . MultiCacheQueryOptions ( options )
// 1.
if ( options . cacheName != null ) {
// 1.1.1.1
if ( this . # caches . has ( options . cacheName ) ) {
// 1.1.1.1.1
const cacheList = this . # caches . get ( options . cacheName )
const cache = new Cache ( kConstruct , cacheList )
return await cache . match ( request , options )
}
} else { // 2.
// 2.2
for ( const cacheList of this . # caches . values ( ) ) {
const cache = new Cache ( kConstruct , cacheList )
// 2.2.1.2
const response = await cache . match ( request , options )
if ( response !== undefined ) {
return response
}
}
}
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#cache-storage-has
* @ param { string } cacheName
* @ returns { Promise < boolean > }
* /
async has ( cacheName ) {
webidl . brandCheck ( this , CacheStorage )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'CacheStorage.has' } )
cacheName = webidl . converters . DOMString ( cacheName )
// 2.1.1
// 2.2
return this . # caches . has ( cacheName )
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#dom-cachestorage-open
* @ param { string } cacheName
* @ returns { Promise < Cache > }
* /
async open ( cacheName ) {
webidl . brandCheck ( this , CacheStorage )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'CacheStorage.open' } )
cacheName = webidl . converters . DOMString ( cacheName )
// 2.1
if ( this . # caches . has ( cacheName ) ) {
// await caches.open('v1') !== await caches.open('v1')
// 2.1.1
const cache = this . # caches . get ( cacheName )
// 2.1.1.1
return new Cache ( kConstruct , cache )
}
// 2.2
const cache = [ ]
// 2.3
this . # caches . set ( cacheName , cache )
// 2.4
return new Cache ( kConstruct , cache )
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#cache-storage-delete
* @ param { string } cacheName
* @ returns { Promise < boolean > }
* /
async delete ( cacheName ) {
webidl . brandCheck ( this , CacheStorage )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'CacheStorage.delete' } )
cacheName = webidl . converters . DOMString ( cacheName )
return this . # caches . delete ( cacheName )
}
/ * *
* @ see https : //w3c.github.io/ServiceWorker/#cache-storage-keys
* @ returns { string [ ] }
* /
async keys ( ) {
webidl . brandCheck ( this , CacheStorage )
// 2.1
const keys = this . # caches . keys ( )
// 2.2
return [ ... keys ]
}
}
Object . defineProperties ( CacheStorage . prototype , {
[ Symbol . toStringTag ] : {
value : 'CacheStorage' ,
configurable : true
} ,
match : kEnumerableProperty ,
has : kEnumerableProperty ,
open : kEnumerableProperty ,
delete : kEnumerableProperty ,
keys : kEnumerableProperty
} )
module . exports = {
CacheStorage
}
/***/ } ) ,
/***/ 9174 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
module . exports = {
kConstruct : ( _ _nccwpck _require _ _ ( 2785 ) . kConstruct )
}
/***/ } ) ,
/***/ 2396 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const assert = _ _nccwpck _require _ _ ( 9491 )
const { URLSerializer } = _ _nccwpck _require _ _ ( 685 )
const { isValidHeaderName } = _ _nccwpck _require _ _ ( 2538 )
/ * *
* @ see https : //url.spec.whatwg.org/#concept-url-equals
* @ param { URL } A
* @ param { URL } B
* @ param { boolean | undefined } excludeFragment
* @ returns { boolean }
* /
function urlEquals ( A , B , excludeFragment = false ) {
const serializedA = URLSerializer ( A , excludeFragment )
const serializedB = URLSerializer ( B , excludeFragment )
return serializedA === serializedB
}
/ * *
* @ see https : //github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
* @ param { string } header
* /
function fieldValues ( header ) {
assert ( header !== null )
const values = [ ]
for ( let value of header . split ( ',' ) ) {
value = value . trim ( )
if ( ! value . length ) {
continue
} else if ( ! isValidHeaderName ( value ) ) {
continue
}
values . push ( value )
}
return values
}
module . exports = {
urlEquals ,
fieldValues
}
/***/ } ) ,
/***/ 3598 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
// @ts-check
/* global WebAssembly */
const assert = _ _nccwpck _require _ _ ( 9491 )
const net = _ _nccwpck _require _ _ ( 1808 )
const http = _ _nccwpck _require _ _ ( 3685 )
const { pipeline } = _ _nccwpck _require _ _ ( 2781 )
const util = _ _nccwpck _require _ _ ( 3983 )
const timers = _ _nccwpck _require _ _ ( 9459 )
const Request = _ _nccwpck _require _ _ ( 2905 )
const DispatcherBase = _ _nccwpck _require _ _ ( 4839 )
const {
RequestContentLengthMismatchError ,
ResponseContentLengthMismatchError ,
InvalidArgumentError ,
RequestAbortedError ,
HeadersTimeoutError ,
HeadersOverflowError ,
SocketError ,
InformationalError ,
BodyTimeoutError ,
HTTPParserError ,
ResponseExceededMaxSizeError ,
ClientDestroyedError
} = _ _nccwpck _require _ _ ( 8045 )
const buildConnector = _ _nccwpck _require _ _ ( 2067 )
const {
kUrl ,
kReset ,
kServerName ,
kClient ,
kBusy ,
kParser ,
kConnect ,
kBlocking ,
kResuming ,
kRunning ,
kPending ,
kSize ,
kWriting ,
kQueue ,
kConnected ,
kConnecting ,
kNeedDrain ,
kNoRef ,
kKeepAliveDefaultTimeout ,
kHostHeader ,
kPendingIdx ,
kRunningIdx ,
kError ,
kPipelining ,
kSocket ,
kKeepAliveTimeoutValue ,
kMaxHeadersSize ,
kKeepAliveMaxTimeout ,
kKeepAliveTimeoutThreshold ,
kHeadersTimeout ,
kBodyTimeout ,
kStrictContentLength ,
kConnector ,
kMaxRedirections ,
kMaxRequests ,
kCounter ,
kClose ,
kDestroy ,
kDispatch ,
kInterceptors ,
kLocalAddress ,
kMaxResponseSize ,
kHTTPConnVersion ,
// HTTP2
kHost ,
kHTTP2Session ,
kHTTP2SessionState ,
kHTTP2BuildRequest ,
kHTTP2CopyHeaders ,
kHTTP1BuildRequest
} = _ _nccwpck _require _ _ ( 2785 )
/** @type {import('http2')} */
let http2
try {
http2 = _ _nccwpck _require _ _ ( 5158 )
} catch {
// @ts-ignore
http2 = { constants : { } }
}
const {
constants : {
HTTP2 _HEADER _AUTHORITY ,
HTTP2 _HEADER _METHOD ,
HTTP2 _HEADER _PATH ,
HTTP2 _HEADER _SCHEME ,
HTTP2 _HEADER _CONTENT _LENGTH ,
HTTP2 _HEADER _EXPECT ,
HTTP2 _HEADER _STATUS
}
} = http2
// Experimental
let h2ExperimentalWarned = false
const FastBuffer = Buffer [ Symbol . species ]
const kClosedResolve = Symbol ( 'kClosedResolve' )
const channels = { }
try {
const diagnosticsChannel = _ _nccwpck _require _ _ ( 7643 )
channels . sendHeaders = diagnosticsChannel . channel ( 'undici:client:sendHeaders' )
channels . beforeConnect = diagnosticsChannel . channel ( 'undici:client:beforeConnect' )
channels . connectError = diagnosticsChannel . channel ( 'undici:client:connectError' )
channels . connected = diagnosticsChannel . channel ( 'undici:client:connected' )
} catch {
channels . sendHeaders = { hasSubscribers : false }
channels . beforeConnect = { hasSubscribers : false }
channels . connectError = { hasSubscribers : false }
channels . connected = { hasSubscribers : false }
}
/ * *
* @ type { import ( '../types/client' ) . default }
* /
class Client extends DispatcherBase {
/ * *
*
* @ param { string | URL } url
* @ param { import ( '../types/client' ) . Client . Options } options
* /
constructor ( url , {
interceptors ,
maxHeaderSize ,
headersTimeout ,
socketTimeout ,
requestTimeout ,
connectTimeout ,
bodyTimeout ,
idleTimeout ,
keepAlive ,
keepAliveTimeout ,
maxKeepAliveTimeout ,
keepAliveMaxTimeout ,
keepAliveTimeoutThreshold ,
socketPath ,
pipelining ,
tls ,
strictContentLength ,
maxCachedSessions ,
maxRedirections ,
connect ,
maxRequestsPerClient ,
localAddress ,
maxResponseSize ,
autoSelectFamily ,
autoSelectFamilyAttemptTimeout ,
// h2
allowH2 ,
maxConcurrentStreams
} = { } ) {
super ( )
if ( keepAlive !== undefined ) {
throw new InvalidArgumentError ( 'unsupported keepAlive, use pipelining=0 instead' )
}
if ( socketTimeout !== undefined ) {
throw new InvalidArgumentError ( 'unsupported socketTimeout, use headersTimeout & bodyTimeout instead' )
}
if ( requestTimeout !== undefined ) {
throw new InvalidArgumentError ( 'unsupported requestTimeout, use headersTimeout & bodyTimeout instead' )
}
if ( idleTimeout !== undefined ) {
throw new InvalidArgumentError ( 'unsupported idleTimeout, use keepAliveTimeout instead' )
}
if ( maxKeepAliveTimeout !== undefined ) {
throw new InvalidArgumentError ( 'unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead' )
}
if ( maxHeaderSize != null && ! Number . isFinite ( maxHeaderSize ) ) {
throw new InvalidArgumentError ( 'invalid maxHeaderSize' )
}
if ( socketPath != null && typeof socketPath !== 'string' ) {
throw new InvalidArgumentError ( 'invalid socketPath' )
}
if ( connectTimeout != null && ( ! Number . isFinite ( connectTimeout ) || connectTimeout < 0 ) ) {
throw new InvalidArgumentError ( 'invalid connectTimeout' )
}
if ( keepAliveTimeout != null && ( ! Number . isFinite ( keepAliveTimeout ) || keepAliveTimeout <= 0 ) ) {
throw new InvalidArgumentError ( 'invalid keepAliveTimeout' )
}
if ( keepAliveMaxTimeout != null && ( ! Number . isFinite ( keepAliveMaxTimeout ) || keepAliveMaxTimeout <= 0 ) ) {
throw new InvalidArgumentError ( 'invalid keepAliveMaxTimeout' )
}
if ( keepAliveTimeoutThreshold != null && ! Number . isFinite ( keepAliveTimeoutThreshold ) ) {
throw new InvalidArgumentError ( 'invalid keepAliveTimeoutThreshold' )
}
if ( headersTimeout != null && ( ! Number . isInteger ( headersTimeout ) || headersTimeout < 0 ) ) {
throw new InvalidArgumentError ( 'headersTimeout must be a positive integer or zero' )
}
if ( bodyTimeout != null && ( ! Number . isInteger ( bodyTimeout ) || bodyTimeout < 0 ) ) {
throw new InvalidArgumentError ( 'bodyTimeout must be a positive integer or zero' )
}
if ( connect != null && typeof connect !== 'function' && typeof connect !== 'object' ) {
throw new InvalidArgumentError ( 'connect must be a function or an object' )
}
if ( maxRedirections != null && ( ! Number . isInteger ( maxRedirections ) || maxRedirections < 0 ) ) {
throw new InvalidArgumentError ( 'maxRedirections must be a positive number' )
}
if ( maxRequestsPerClient != null && ( ! Number . isInteger ( maxRequestsPerClient ) || maxRequestsPerClient < 0 ) ) {
throw new InvalidArgumentError ( 'maxRequestsPerClient must be a positive number' )
}
if ( localAddress != null && ( typeof localAddress !== 'string' || net . isIP ( localAddress ) === 0 ) ) {
throw new InvalidArgumentError ( 'localAddress must be valid string IP address' )
}
if ( maxResponseSize != null && ( ! Number . isInteger ( maxResponseSize ) || maxResponseSize < - 1 ) ) {
throw new InvalidArgumentError ( 'maxResponseSize must be a positive number' )
}
if (
autoSelectFamilyAttemptTimeout != null &&
( ! Number . isInteger ( autoSelectFamilyAttemptTimeout ) || autoSelectFamilyAttemptTimeout < - 1 )
) {
throw new InvalidArgumentError ( 'autoSelectFamilyAttemptTimeout must be a positive number' )
}
// h2
if ( allowH2 != null && typeof allowH2 !== 'boolean' ) {
throw new InvalidArgumentError ( 'allowH2 must be a valid boolean value' )
}
if ( maxConcurrentStreams != null && ( typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1 ) ) {
throw new InvalidArgumentError ( 'maxConcurrentStreams must be a possitive integer, greater than 0' )
}
if ( typeof connect !== 'function' ) {
connect = buildConnector ( {
... tls ,
maxCachedSessions ,
allowH2 ,
socketPath ,
timeout : connectTimeout ,
... ( util . nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily , autoSelectFamilyAttemptTimeout } : undefined ) ,
... connect
} )
}
this [ kInterceptors ] = interceptors && interceptors . Client && Array . isArray ( interceptors . Client )
? interceptors . Client
: [ createRedirectInterceptor ( { maxRedirections } ) ]
this [ kUrl ] = util . parseOrigin ( url )
this [ kConnector ] = connect
this [ kSocket ] = null
this [ kPipelining ] = pipelining != null ? pipelining : 1
this [ kMaxHeadersSize ] = maxHeaderSize || http . maxHeaderSize
this [ kKeepAliveDefaultTimeout ] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
this [ kKeepAliveMaxTimeout ] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
this [ kKeepAliveTimeoutThreshold ] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold
this [ kKeepAliveTimeoutValue ] = this [ kKeepAliveDefaultTimeout ]
this [ kServerName ] = null
this [ kLocalAddress ] = localAddress != null ? localAddress : null
this [ kResuming ] = 0 // 0, idle, 1, scheduled, 2 resuming
this [ kNeedDrain ] = 0 // 0, idle, 1, scheduled, 2 resuming
this [ kHostHeader ] = ` host: ${ this [ kUrl ] . hostname } ${ this [ kUrl ] . port ? ` : ${ this [ kUrl ] . port } ` : '' } \r \n `
this [ kBodyTimeout ] = bodyTimeout != null ? bodyTimeout : 300e3
this [ kHeadersTimeout ] = headersTimeout != null ? headersTimeout : 300e3
this [ kStrictContentLength ] = strictContentLength == null ? true : strictContentLength
this [ kMaxRedirections ] = maxRedirections
this [ kMaxRequests ] = maxRequestsPerClient
this [ kClosedResolve ] = null
this [ kMaxResponseSize ] = maxResponseSize > - 1 ? maxResponseSize : - 1
this [ kHTTPConnVersion ] = 'h1'
// HTTP/2
this [ kHTTP2Session ] = null
this [ kHTTP2SessionState ] = ! allowH2
? null
: {
// streams: null, // Fixed queue of streams - For future support of `push`
openStreams : 0 , // Keep track of them to decide wether or not unref the session
maxConcurrentStreams : maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
}
this [ kHost ] = ` ${ this [ kUrl ] . hostname } ${ this [ kUrl ] . port ? ` : ${ this [ kUrl ] . port } ` : '' } `
// kQueue is built up of 3 sections separated by
// the kRunningIdx and kPendingIdx indices.
// | complete | running | pending |
// ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
// kRunningIdx points to the first running element.
// kPendingIdx points to the first pending element.
// This implements a fast queue with an amortized
// time of O(1).
this [ kQueue ] = [ ]
this [ kRunningIdx ] = 0
this [ kPendingIdx ] = 0
}
get pipelining ( ) {
return this [ kPipelining ]
}
set pipelining ( value ) {
this [ kPipelining ] = value
resume ( this , true )
}
get [ kPending ] ( ) {
return this [ kQueue ] . length - this [ kPendingIdx ]
}
get [ kRunning ] ( ) {
return this [ kPendingIdx ] - this [ kRunningIdx ]
}
get [ kSize ] ( ) {
return this [ kQueue ] . length - this [ kRunningIdx ]
}
get [ kConnected ] ( ) {
return ! ! this [ kSocket ] && ! this [ kConnecting ] && ! this [ kSocket ] . destroyed
}
get [ kBusy ] ( ) {
const socket = this [ kSocket ]
return (
( socket && ( socket [ kReset ] || socket [ kWriting ] || socket [ kBlocking ] ) ) ||
( this [ kSize ] >= ( this [ kPipelining ] || 1 ) ) ||
this [ kPending ] > 0
)
}
/* istanbul ignore: only used for test */
[ kConnect ] ( cb ) {
connect ( this )
this . once ( 'connect' , cb )
}
[ kDispatch ] ( opts , handler ) {
const origin = opts . origin || this [ kUrl ] . origin
const request = this [ kHTTPConnVersion ] === 'h2'
? Request [ kHTTP2BuildRequest ] ( origin , opts , handler )
: Request [ kHTTP1BuildRequest ] ( origin , opts , handler )
this [ kQueue ] . push ( request )
if ( this [ kResuming ] ) {
// Do nothing.
} else if ( util . bodyLength ( request . body ) == null && util . isIterable ( request . body ) ) {
// Wait a tick in case stream/iterator is ended in the same tick.
this [ kResuming ] = 1
process . nextTick ( resume , this )
} else {
resume ( this , true )
}
if ( this [ kResuming ] && this [ kNeedDrain ] !== 2 && this [ kBusy ] ) {
this [ kNeedDrain ] = 2
}
return this [ kNeedDrain ] < 2
}
async [ kClose ] ( ) {
// TODO: for H2 we need to gracefully flush the remaining enqueued
// request and close each stream.
return new Promise ( ( resolve ) => {
if ( ! this [ kSize ] ) {
resolve ( null )
} else {
this [ kClosedResolve ] = resolve
}
} )
}
async [ kDestroy ] ( err ) {
return new Promise ( ( resolve ) => {
const requests = this [ kQueue ] . splice ( this [ kPendingIdx ] )
for ( let i = 0 ; i < requests . length ; i ++ ) {
const request = requests [ i ]
errorRequest ( this , request , err )
}
const callback = ( ) => {
if ( this [ kClosedResolve ] ) {
// TODO (fix): Should we error here with ClientDestroyedError?
this [ kClosedResolve ] ( )
this [ kClosedResolve ] = null
}
resolve ( )
}
if ( this [ kHTTP2Session ] != null ) {
util . destroy ( this [ kHTTP2Session ] , err )
this [ kHTTP2Session ] = null
this [ kHTTP2SessionState ] = null
}
if ( ! this [ kSocket ] ) {
queueMicrotask ( callback )
} else {
util . destroy ( this [ kSocket ] . on ( 'close' , callback ) , err )
}
resume ( this )
} )
}
}
function onHttp2SessionError ( err ) {
assert ( err . code !== 'ERR_TLS_CERT_ALTNAME_INVALID' )
this [ kSocket ] [ kError ] = err
onError ( this [ kClient ] , err )
}
function onHttp2FrameError ( type , code , id ) {
const err = new InformationalError ( ` HTTP/2: "frameError" received - type ${ type } , code ${ code } ` )
if ( id === 0 ) {
this [ kSocket ] [ kError ] = err
onError ( this [ kClient ] , err )
}
}
function onHttp2SessionEnd ( ) {
util . destroy ( this , new SocketError ( 'other side closed' ) )
util . destroy ( this [ kSocket ] , new SocketError ( 'other side closed' ) )
}
function onHTTP2GoAway ( code ) {
const client = this [ kClient ]
const err = new InformationalError ( ` HTTP/2: "GOAWAY" frame received with code ${ code } ` )
client [ kSocket ] = null
client [ kHTTP2Session ] = null
if ( client . destroyed ) {
assert ( this [ kPending ] === 0 )
// Fail entire queue.
const requests = client [ kQueue ] . splice ( client [ kRunningIdx ] )
for ( let i = 0 ; i < requests . length ; i ++ ) {
const request = requests [ i ]
errorRequest ( this , request , err )
}
} else if ( client [ kRunning ] > 0 ) {
// Fail head of pipeline.
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
client [ kQueue ] [ client [ kRunningIdx ] ++ ] = null
errorRequest ( client , request , err )
}
client [ kPendingIdx ] = client [ kRunningIdx ]
assert ( client [ kRunning ] === 0 )
client . emit ( 'disconnect' ,
client [ kUrl ] ,
[ client ] ,
err
)
resume ( client )
}
const constants = _ _nccwpck _require _ _ ( 953 )
const createRedirectInterceptor = _ _nccwpck _require _ _ ( 8861 )
const EMPTY _BUF = Buffer . alloc ( 0 )
async function lazyllhttp ( ) {
const llhttpWasmData = process . env . JEST _WORKER _ID ? _ _nccwpck _require _ _ ( 1145 ) : undefined
let mod
try {
mod = await WebAssembly . compile ( Buffer . from ( _ _nccwpck _require _ _ ( 5627 ) , 'base64' ) )
} catch ( e ) {
/* istanbul ignore next */
// We could check if the error was caused by the simd option not
// being enabled, but the occurring of this other error
// * https://github.com/emscripten-core/emscripten/issues/11495
// got me to remove that check to avoid breaking Node 12.
mod = await WebAssembly . compile ( Buffer . from ( llhttpWasmData || _ _nccwpck _require _ _ ( 1145 ) , 'base64' ) )
}
return await WebAssembly . instantiate ( mod , {
env : {
/* eslint-disable camelcase */
wasm _on _url : ( p , at , len ) => {
/* istanbul ignore next */
return 0
} ,
wasm _on _status : ( p , at , len ) => {
assert . strictEqual ( currentParser . ptr , p )
const start = at - currentBufferPtr + currentBufferRef . byteOffset
return currentParser . onStatus ( new FastBuffer ( currentBufferRef . buffer , start , len ) ) || 0
} ,
wasm _on _message _begin : ( p ) => {
assert . strictEqual ( currentParser . ptr , p )
return currentParser . onMessageBegin ( ) || 0
} ,
wasm _on _header _field : ( p , at , len ) => {
assert . strictEqual ( currentParser . ptr , p )
const start = at - currentBufferPtr + currentBufferRef . byteOffset
return currentParser . onHeaderField ( new FastBuffer ( currentBufferRef . buffer , start , len ) ) || 0
} ,
wasm _on _header _value : ( p , at , len ) => {
assert . strictEqual ( currentParser . ptr , p )
const start = at - currentBufferPtr + currentBufferRef . byteOffset
return currentParser . onHeaderValue ( new FastBuffer ( currentBufferRef . buffer , start , len ) ) || 0
} ,
wasm _on _headers _complete : ( p , statusCode , upgrade , shouldKeepAlive ) => {
assert . strictEqual ( currentParser . ptr , p )
return currentParser . onHeadersComplete ( statusCode , Boolean ( upgrade ) , Boolean ( shouldKeepAlive ) ) || 0
} ,
wasm _on _body : ( p , at , len ) => {
assert . strictEqual ( currentParser . ptr , p )
const start = at - currentBufferPtr + currentBufferRef . byteOffset
return currentParser . onBody ( new FastBuffer ( currentBufferRef . buffer , start , len ) ) || 0
} ,
wasm _on _message _complete : ( p ) => {
assert . strictEqual ( currentParser . ptr , p )
return currentParser . onMessageComplete ( ) || 0
}
/* eslint-enable camelcase */
}
} )
}
let llhttpInstance = null
let llhttpPromise = lazyllhttp ( )
llhttpPromise . catch ( )
let currentParser = null
let currentBufferRef = null
let currentBufferSize = 0
let currentBufferPtr = null
const TIMEOUT _HEADERS = 1
const TIMEOUT _BODY = 2
const TIMEOUT _IDLE = 3
class Parser {
constructor ( client , socket , { exports } ) {
assert ( Number . isFinite ( client [ kMaxHeadersSize ] ) && client [ kMaxHeadersSize ] > 0 )
this . llhttp = exports
this . ptr = this . llhttp . llhttp _alloc ( constants . TYPE . RESPONSE )
this . client = client
this . socket = socket
this . timeout = null
this . timeoutValue = null
this . timeoutType = null
this . statusCode = null
this . statusText = ''
this . upgrade = false
this . headers = [ ]
this . headersSize = 0
this . headersMaxSize = client [ kMaxHeadersSize ]
this . shouldKeepAlive = false
this . paused = false
this . resume = this . resume . bind ( this )
this . bytesRead = 0
this . keepAlive = ''
this . contentLength = ''
this . connection = ''
this . maxResponseSize = client [ kMaxResponseSize ]
}
setTimeout ( value , type ) {
this . timeoutType = type
if ( value !== this . timeoutValue ) {
timers . clearTimeout ( this . timeout )
if ( value ) {
this . timeout = timers . setTimeout ( onParserTimeout , value , this )
// istanbul ignore else: only for jest
if ( this . timeout . unref ) {
this . timeout . unref ( )
}
} else {
this . timeout = null
}
this . timeoutValue = value
} else if ( this . timeout ) {
// istanbul ignore else: only for jest
if ( this . timeout . refresh ) {
this . timeout . refresh ( )
}
}
}
resume ( ) {
if ( this . socket . destroyed || ! this . paused ) {
return
}
assert ( this . ptr != null )
assert ( currentParser == null )
this . llhttp . llhttp _resume ( this . ptr )
assert ( this . timeoutType === TIMEOUT _BODY )
if ( this . timeout ) {
// istanbul ignore else: only for jest
if ( this . timeout . refresh ) {
this . timeout . refresh ( )
}
}
this . paused = false
this . execute ( this . socket . read ( ) || EMPTY _BUF ) // Flush parser.
this . readMore ( )
}
readMore ( ) {
while ( ! this . paused && this . ptr ) {
const chunk = this . socket . read ( )
if ( chunk === null ) {
break
}
this . execute ( chunk )
}
}
execute ( data ) {
assert ( this . ptr != null )
assert ( currentParser == null )
assert ( ! this . paused )
const { socket , llhttp } = this
if ( data . length > currentBufferSize ) {
if ( currentBufferPtr ) {
llhttp . free ( currentBufferPtr )
}
currentBufferSize = Math . ceil ( data . length / 4096 ) * 4096
currentBufferPtr = llhttp . malloc ( currentBufferSize )
}
new Uint8Array ( llhttp . memory . buffer , currentBufferPtr , currentBufferSize ) . set ( data )
// Call `execute` on the wasm parser.
// We pass the `llhttp_parser` pointer address, the pointer address of buffer view data,
// and finally the length of bytes to parse.
// The return value is an error code or `constants.ERROR.OK`.
try {
let ret
try {
currentBufferRef = data
currentParser = this
ret = llhttp . llhttp _execute ( this . ptr , currentBufferPtr , data . length )
/* eslint-disable-next-line no-useless-catch */
} catch ( err ) {
/* istanbul ignore next: difficult to make a test case for */
throw err
} finally {
currentParser = null
currentBufferRef = null
}
const offset = llhttp . llhttp _get _error _pos ( this . ptr ) - currentBufferPtr
if ( ret === constants . ERROR . PAUSED _UPGRADE ) {
this . onUpgrade ( data . slice ( offset ) )
} else if ( ret === constants . ERROR . PAUSED ) {
this . paused = true
socket . unshift ( data . slice ( offset ) )
} else if ( ret !== constants . ERROR . OK ) {
const ptr = llhttp . llhttp _get _error _reason ( this . ptr )
let message = ''
/* istanbul ignore else: difficult to make a test case for */
if ( ptr ) {
const len = new Uint8Array ( llhttp . memory . buffer , ptr ) . indexOf ( 0 )
message =
'Response does not match the HTTP/1.1 protocol (' +
Buffer . from ( llhttp . memory . buffer , ptr , len ) . toString ( ) +
')'
}
throw new HTTPParserError ( message , constants . ERROR [ ret ] , data . slice ( offset ) )
}
} catch ( err ) {
util . destroy ( socket , err )
}
}
destroy ( ) {
assert ( this . ptr != null )
assert ( currentParser == null )
this . llhttp . llhttp _free ( this . ptr )
this . ptr = null
timers . clearTimeout ( this . timeout )
this . timeout = null
this . timeoutValue = null
this . timeoutType = null
this . paused = false
}
onStatus ( buf ) {
this . statusText = buf . toString ( )
}
onMessageBegin ( ) {
const { socket , client } = this
/* istanbul ignore next: difficult to make a test case for */
if ( socket . destroyed ) {
return - 1
}
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
if ( ! request ) {
return - 1
}
}
onHeaderField ( buf ) {
const len = this . headers . length
if ( ( len & 1 ) === 0 ) {
this . headers . push ( buf )
} else {
this . headers [ len - 1 ] = Buffer . concat ( [ this . headers [ len - 1 ] , buf ] )
}
this . trackHeader ( buf . length )
}
onHeaderValue ( buf ) {
let len = this . headers . length
if ( ( len & 1 ) === 1 ) {
this . headers . push ( buf )
len += 1
} else {
this . headers [ len - 1 ] = Buffer . concat ( [ this . headers [ len - 1 ] , buf ] )
}
const key = this . headers [ len - 2 ]
if ( key . length === 10 && key . toString ( ) . toLowerCase ( ) === 'keep-alive' ) {
this . keepAlive += buf . toString ( )
} else if ( key . length === 10 && key . toString ( ) . toLowerCase ( ) === 'connection' ) {
this . connection += buf . toString ( )
} else if ( key . length === 14 && key . toString ( ) . toLowerCase ( ) === 'content-length' ) {
this . contentLength += buf . toString ( )
}
this . trackHeader ( buf . length )
}
trackHeader ( len ) {
this . headersSize += len
if ( this . headersSize >= this . headersMaxSize ) {
util . destroy ( this . socket , new HeadersOverflowError ( ) )
}
}
onUpgrade ( head ) {
const { upgrade , client , socket , headers , statusCode } = this
assert ( upgrade )
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
assert ( request )
assert ( ! socket . destroyed )
assert ( socket === client [ kSocket ] )
assert ( ! this . paused )
assert ( request . upgrade || request . method === 'CONNECT' )
this . statusCode = null
this . statusText = ''
this . shouldKeepAlive = null
assert ( this . headers . length % 2 === 0 )
this . headers = [ ]
this . headersSize = 0
socket . unshift ( head )
socket [ kParser ] . destroy ( )
socket [ kParser ] = null
socket [ kClient ] = null
socket [ kError ] = null
socket
. removeListener ( 'error' , onSocketError )
. removeListener ( 'readable' , onSocketReadable )
. removeListener ( 'end' , onSocketEnd )
. removeListener ( 'close' , onSocketClose )
client [ kSocket ] = null
client [ kQueue ] [ client [ kRunningIdx ] ++ ] = null
client . emit ( 'disconnect' , client [ kUrl ] , [ client ] , new InformationalError ( 'upgrade' ) )
try {
request . onUpgrade ( statusCode , headers , socket )
} catch ( err ) {
util . destroy ( socket , err )
}
resume ( client )
}
onHeadersComplete ( statusCode , upgrade , shouldKeepAlive ) {
const { client , socket , headers , statusText } = this
/* istanbul ignore next: difficult to make a test case for */
if ( socket . destroyed ) {
return - 1
}
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
/* istanbul ignore next: difficult to make a test case for */
if ( ! request ) {
return - 1
}
assert ( ! this . upgrade )
assert ( this . statusCode < 200 )
if ( statusCode === 100 ) {
util . destroy ( socket , new SocketError ( 'bad response' , util . getSocketInfo ( socket ) ) )
return - 1
}
/* this can only happen if server is misbehaving */
if ( upgrade && ! request . upgrade ) {
util . destroy ( socket , new SocketError ( 'bad upgrade' , util . getSocketInfo ( socket ) ) )
return - 1
}
assert . strictEqual ( this . timeoutType , TIMEOUT _HEADERS )
this . statusCode = statusCode
this . shouldKeepAlive = (
shouldKeepAlive ||
// Override llhttp value which does not allow keepAlive for HEAD.
( request . method === 'HEAD' && ! socket [ kReset ] && this . connection . toLowerCase ( ) === 'keep-alive' )
)
if ( this . statusCode >= 200 ) {
const bodyTimeout = request . bodyTimeout != null
? request . bodyTimeout
: client [ kBodyTimeout ]
this . setTimeout ( bodyTimeout , TIMEOUT _BODY )
} else if ( this . timeout ) {
// istanbul ignore else: only for jest
if ( this . timeout . refresh ) {
this . timeout . refresh ( )
}
}
if ( request . method === 'CONNECT' ) {
assert ( client [ kRunning ] === 1 )
this . upgrade = true
return 2
}
if ( upgrade ) {
assert ( client [ kRunning ] === 1 )
this . upgrade = true
return 2
}
assert ( this . headers . length % 2 === 0 )
this . headers = [ ]
this . headersSize = 0
if ( this . shouldKeepAlive && client [ kPipelining ] ) {
const keepAliveTimeout = this . keepAlive ? util . parseKeepAliveTimeout ( this . keepAlive ) : null
if ( keepAliveTimeout != null ) {
const timeout = Math . min (
keepAliveTimeout - client [ kKeepAliveTimeoutThreshold ] ,
client [ kKeepAliveMaxTimeout ]
)
if ( timeout <= 0 ) {
socket [ kReset ] = true
} else {
client [ kKeepAliveTimeoutValue ] = timeout
}
} else {
client [ kKeepAliveTimeoutValue ] = client [ kKeepAliveDefaultTimeout ]
}
} else {
// Stop more requests from being dispatched.
socket [ kReset ] = true
}
const pause = request . onHeaders ( statusCode , headers , this . resume , statusText ) === false
if ( request . aborted ) {
return - 1
}
if ( request . method === 'HEAD' ) {
return 1
}
if ( statusCode < 200 ) {
return 1
}
if ( socket [ kBlocking ] ) {
socket [ kBlocking ] = false
resume ( client )
}
return pause ? constants . ERROR . PAUSED : 0
}
onBody ( buf ) {
const { client , socket , statusCode , maxResponseSize } = this
if ( socket . destroyed ) {
return - 1
}
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
assert ( request )
assert . strictEqual ( this . timeoutType , TIMEOUT _BODY )
if ( this . timeout ) {
// istanbul ignore else: only for jest
if ( this . timeout . refresh ) {
this . timeout . refresh ( )
}
}
assert ( statusCode >= 200 )
if ( maxResponseSize > - 1 && this . bytesRead + buf . length > maxResponseSize ) {
util . destroy ( socket , new ResponseExceededMaxSizeError ( ) )
return - 1
}
this . bytesRead += buf . length
if ( request . onData ( buf ) === false ) {
return constants . ERROR . PAUSED
}
}
onMessageComplete ( ) {
const { client , socket , statusCode , upgrade , headers , contentLength , bytesRead , shouldKeepAlive } = this
if ( socket . destroyed && ( ! statusCode || shouldKeepAlive ) ) {
return - 1
}
if ( upgrade ) {
return
}
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
assert ( request )
assert ( statusCode >= 100 )
this . statusCode = null
this . statusText = ''
this . bytesRead = 0
this . contentLength = ''
this . keepAlive = ''
this . connection = ''
assert ( this . headers . length % 2 === 0 )
this . headers = [ ]
this . headersSize = 0
if ( statusCode < 200 ) {
return
}
/* istanbul ignore next: should be handled by llhttp? */
if ( request . method !== 'HEAD' && contentLength && bytesRead !== parseInt ( contentLength , 10 ) ) {
util . destroy ( socket , new ResponseContentLengthMismatchError ( ) )
return - 1
}
request . onComplete ( headers )
client [ kQueue ] [ client [ kRunningIdx ] ++ ] = null
if ( socket [ kWriting ] ) {
assert . strictEqual ( client [ kRunning ] , 0 )
// Response completed before request.
util . destroy ( socket , new InformationalError ( 'reset' ) )
return constants . ERROR . PAUSED
} else if ( ! shouldKeepAlive ) {
util . destroy ( socket , new InformationalError ( 'reset' ) )
return constants . ERROR . PAUSED
} else if ( socket [ kReset ] && client [ kRunning ] === 0 ) {
// Destroy socket once all requests have completed.
// The request at the tail of the pipeline is the one
// that requested reset and no further requests should
// have been queued since then.
util . destroy ( socket , new InformationalError ( 'reset' ) )
return constants . ERROR . PAUSED
} else if ( client [ kPipelining ] === 1 ) {
// We must wait a full event loop cycle to reuse this socket to make sure
// that non-spec compliant servers are not closing the connection even if they
// said they won't.
setImmediate ( resume , client )
} else {
resume ( client )
}
}
}
function onParserTimeout ( parser ) {
const { socket , timeoutType , client } = parser
/* istanbul ignore else */
if ( timeoutType === TIMEOUT _HEADERS ) {
if ( ! socket [ kWriting ] || socket . writableNeedDrain || client [ kRunning ] > 1 ) {
assert ( ! parser . paused , 'cannot be paused while waiting for headers' )
util . destroy ( socket , new HeadersTimeoutError ( ) )
}
} else if ( timeoutType === TIMEOUT _BODY ) {
if ( ! parser . paused ) {
util . destroy ( socket , new BodyTimeoutError ( ) )
}
} else if ( timeoutType === TIMEOUT _IDLE ) {
assert ( client [ kRunning ] === 0 && client [ kKeepAliveTimeoutValue ] )
util . destroy ( socket , new InformationalError ( 'socket idle timeout' ) )
}
}
function onSocketReadable ( ) {
const { [ kParser ] : parser } = this
if ( parser ) {
parser . readMore ( )
}
}
function onSocketError ( err ) {
const { [ kClient ] : client , [ kParser ] : parser } = this
assert ( err . code !== 'ERR_TLS_CERT_ALTNAME_INVALID' )
if ( client [ kHTTPConnVersion ] !== 'h2' ) {
// On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded
// to the user.
if ( err . code === 'ECONNRESET' && parser . statusCode && ! parser . shouldKeepAlive ) {
// We treat all incoming data so for as a valid response.
parser . onMessageComplete ( )
return
}
}
this [ kError ] = err
onError ( this [ kClient ] , err )
}
function onError ( client , err ) {
if (
client [ kRunning ] === 0 &&
err . code !== 'UND_ERR_INFO' &&
err . code !== 'UND_ERR_SOCKET'
) {
// Error is not caused by running request and not a recoverable
// socket error.
assert ( client [ kPendingIdx ] === client [ kRunningIdx ] )
const requests = client [ kQueue ] . splice ( client [ kRunningIdx ] )
for ( let i = 0 ; i < requests . length ; i ++ ) {
const request = requests [ i ]
errorRequest ( client , request , err )
}
assert ( client [ kSize ] === 0 )
}
}
function onSocketEnd ( ) {
const { [ kParser ] : parser , [ kClient ] : client } = this
if ( client [ kHTTPConnVersion ] !== 'h2' ) {
if ( parser . statusCode && ! parser . shouldKeepAlive ) {
// We treat all incoming data so far as a valid response.
parser . onMessageComplete ( )
return
}
}
util . destroy ( this , new SocketError ( 'other side closed' , util . getSocketInfo ( this ) ) )
}
function onSocketClose ( ) {
const { [ kClient ] : client , [ kParser ] : parser } = this
if ( client [ kHTTPConnVersion ] === 'h1' && parser ) {
if ( ! this [ kError ] && parser . statusCode && ! parser . shouldKeepAlive ) {
// We treat all incoming data so far as a valid response.
parser . onMessageComplete ( )
}
this [ kParser ] . destroy ( )
this [ kParser ] = null
}
const err = this [ kError ] || new SocketError ( 'closed' , util . getSocketInfo ( this ) )
client [ kSocket ] = null
if ( client . destroyed ) {
assert ( client [ kPending ] === 0 )
// Fail entire queue.
const requests = client [ kQueue ] . splice ( client [ kRunningIdx ] )
for ( let i = 0 ; i < requests . length ; i ++ ) {
const request = requests [ i ]
errorRequest ( client , request , err )
}
} else if ( client [ kRunning ] > 0 && err . code !== 'UND_ERR_INFO' ) {
// Fail head of pipeline.
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
client [ kQueue ] [ client [ kRunningIdx ] ++ ] = null
errorRequest ( client , request , err )
}
client [ kPendingIdx ] = client [ kRunningIdx ]
assert ( client [ kRunning ] === 0 )
client . emit ( 'disconnect' , client [ kUrl ] , [ client ] , err )
resume ( client )
}
async function connect ( client ) {
assert ( ! client [ kConnecting ] )
assert ( ! client [ kSocket ] )
let { host , hostname , protocol , port } = client [ kUrl ]
// Resolve ipv6
if ( hostname [ 0 ] === '[' ) {
const idx = hostname . indexOf ( ']' )
assert ( idx !== - 1 )
const ip = hostname . substring ( 1 , idx )
assert ( net . isIP ( ip ) )
hostname = ip
}
client [ kConnecting ] = true
if ( channels . beforeConnect . hasSubscribers ) {
channels . beforeConnect . publish ( {
connectParams : {
host ,
hostname ,
protocol ,
port ,
servername : client [ kServerName ] ,
localAddress : client [ kLocalAddress ]
} ,
connector : client [ kConnector ]
} )
}
try {
const socket = await new Promise ( ( resolve , reject ) => {
client [ kConnector ] ( {
host ,
hostname ,
protocol ,
port ,
servername : client [ kServerName ] ,
localAddress : client [ kLocalAddress ]
} , ( err , socket ) => {
if ( err ) {
reject ( err )
} else {
resolve ( socket )
}
} )
} )
if ( client . destroyed ) {
util . destroy ( socket . on ( 'error' , ( ) => { } ) , new ClientDestroyedError ( ) )
return
}
client [ kConnecting ] = false
assert ( socket )
const isH2 = socket . alpnProtocol === 'h2'
if ( isH2 ) {
if ( ! h2ExperimentalWarned ) {
h2ExperimentalWarned = true
process . emitWarning ( 'H2 support is experimental, expect them to change at any time.' , {
code : 'UNDICI-H2'
} )
}
const session = http2 . connect ( client [ kUrl ] , {
createConnection : ( ) => socket ,
peerMaxConcurrentStreams : client [ kHTTP2SessionState ] . maxConcurrentStreams
} )
client [ kHTTPConnVersion ] = 'h2'
session [ kClient ] = client
session [ kSocket ] = socket
session . on ( 'error' , onHttp2SessionError )
session . on ( 'frameError' , onHttp2FrameError )
session . on ( 'end' , onHttp2SessionEnd )
session . on ( 'goaway' , onHTTP2GoAway )
session . on ( 'close' , onSocketClose )
session . unref ( )
client [ kHTTP2Session ] = session
socket [ kHTTP2Session ] = session
} else {
if ( ! llhttpInstance ) {
llhttpInstance = await llhttpPromise
llhttpPromise = null
}
socket [ kNoRef ] = false
socket [ kWriting ] = false
socket [ kReset ] = false
socket [ kBlocking ] = false
socket [ kParser ] = new Parser ( client , socket , llhttpInstance )
}
socket [ kCounter ] = 0
socket [ kMaxRequests ] = client [ kMaxRequests ]
socket [ kClient ] = client
socket [ kError ] = null
socket
. on ( 'error' , onSocketError )
. on ( 'readable' , onSocketReadable )
. on ( 'end' , onSocketEnd )
. on ( 'close' , onSocketClose )
client [ kSocket ] = socket
if ( channels . connected . hasSubscribers ) {
channels . connected . publish ( {
connectParams : {
host ,
hostname ,
protocol ,
port ,
servername : client [ kServerName ] ,
localAddress : client [ kLocalAddress ]
} ,
connector : client [ kConnector ] ,
socket
} )
}
client . emit ( 'connect' , client [ kUrl ] , [ client ] )
} catch ( err ) {
if ( client . destroyed ) {
return
}
client [ kConnecting ] = false
if ( channels . connectError . hasSubscribers ) {
channels . connectError . publish ( {
connectParams : {
host ,
hostname ,
protocol ,
port ,
servername : client [ kServerName ] ,
localAddress : client [ kLocalAddress ]
} ,
connector : client [ kConnector ] ,
error : err
} )
}
if ( err . code === 'ERR_TLS_CERT_ALTNAME_INVALID' ) {
assert ( client [ kRunning ] === 0 )
while ( client [ kPending ] > 0 && client [ kQueue ] [ client [ kPendingIdx ] ] . servername === client [ kServerName ] ) {
const request = client [ kQueue ] [ client [ kPendingIdx ] ++ ]
errorRequest ( client , request , err )
}
} else {
onError ( client , err )
}
client . emit ( 'connectionError' , client [ kUrl ] , [ client ] , err )
}
resume ( client )
}
function emitDrain ( client ) {
client [ kNeedDrain ] = 0
client . emit ( 'drain' , client [ kUrl ] , [ client ] )
}
function resume ( client , sync ) {
if ( client [ kResuming ] === 2 ) {
return
}
client [ kResuming ] = 2
_resume ( client , sync )
client [ kResuming ] = 0
if ( client [ kRunningIdx ] > 256 ) {
client [ kQueue ] . splice ( 0 , client [ kRunningIdx ] )
client [ kPendingIdx ] -= client [ kRunningIdx ]
client [ kRunningIdx ] = 0
}
}
function _resume ( client , sync ) {
while ( true ) {
if ( client . destroyed ) {
assert ( client [ kPending ] === 0 )
return
}
if ( client [ kClosedResolve ] && ! client [ kSize ] ) {
client [ kClosedResolve ] ( )
client [ kClosedResolve ] = null
return
}
const socket = client [ kSocket ]
if ( socket && ! socket . destroyed && socket . alpnProtocol !== 'h2' ) {
if ( client [ kSize ] === 0 ) {
if ( ! socket [ kNoRef ] && socket . unref ) {
socket . unref ( )
socket [ kNoRef ] = true
}
} else if ( socket [ kNoRef ] && socket . ref ) {
socket . ref ( )
socket [ kNoRef ] = false
}
if ( client [ kSize ] === 0 ) {
if ( socket [ kParser ] . timeoutType !== TIMEOUT _IDLE ) {
socket [ kParser ] . setTimeout ( client [ kKeepAliveTimeoutValue ] , TIMEOUT _IDLE )
}
} else if ( client [ kRunning ] > 0 && socket [ kParser ] . statusCode < 200 ) {
if ( socket [ kParser ] . timeoutType !== TIMEOUT _HEADERS ) {
const request = client [ kQueue ] [ client [ kRunningIdx ] ]
const headersTimeout = request . headersTimeout != null
? request . headersTimeout
: client [ kHeadersTimeout ]
socket [ kParser ] . setTimeout ( headersTimeout , TIMEOUT _HEADERS )
}
}
}
if ( client [ kBusy ] ) {
client [ kNeedDrain ] = 2
} else if ( client [ kNeedDrain ] === 2 ) {
if ( sync ) {
client [ kNeedDrain ] = 1
process . nextTick ( emitDrain , client )
} else {
emitDrain ( client )
}
continue
}
if ( client [ kPending ] === 0 ) {
return
}
if ( client [ kRunning ] >= ( client [ kPipelining ] || 1 ) ) {
return
}
const request = client [ kQueue ] [ client [ kPendingIdx ] ]
if ( client [ kUrl ] . protocol === 'https:' && client [ kServerName ] !== request . servername ) {
if ( client [ kRunning ] > 0 ) {
return
}
client [ kServerName ] = request . servername
if ( socket && socket . servername !== request . servername ) {
util . destroy ( socket , new InformationalError ( 'servername changed' ) )
return
}
}
if ( client [ kConnecting ] ) {
return
}
if ( ! socket && ! client [ kHTTP2Session ] ) {
connect ( client )
return
}
if ( socket . destroyed || socket [ kWriting ] || socket [ kReset ] || socket [ kBlocking ] ) {
return
}
if ( client [ kRunning ] > 0 && ! request . idempotent ) {
// Non-idempotent request cannot be retried.
// Ensure that no other requests are inflight and
// could cause failure.
return
}
if ( client [ kRunning ] > 0 && ( request . upgrade || request . method === 'CONNECT' ) ) {
// Don't dispatch an upgrade until all preceding requests have completed.
// A misbehaving server might upgrade the connection before all pipelined
// request has completed.
return
}
if ( client [ kRunning ] > 0 && util . bodyLength ( request . body ) !== 0 &&
( util . isStream ( request . body ) || util . isAsyncIterable ( request . body ) ) ) {
// Request with stream or iterator body can error while other requests
// are inflight and indirectly error those as well.
// Ensure this doesn't happen by waiting for inflight
// to complete before dispatching.
// Request with stream or iterator body cannot be retried.
// Ensure that no other requests are inflight and
// could cause failure.
return
}
if ( ! request . aborted && write ( client , request ) ) {
client [ kPendingIdx ] ++
} else {
client [ kQueue ] . splice ( client [ kPendingIdx ] , 1 )
}
}
}
// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2
function shouldSendContentLength ( method ) {
return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'
}
function write ( client , request ) {
if ( client [ kHTTPConnVersion ] === 'h2' ) {
writeH2 ( client , client [ kHTTP2Session ] , request )
return
}
const { body , method , path , host , upgrade , headers , blocking , reset } = request
// https://tools.ietf.org/html/rfc7231#section-4.3.1
// https://tools.ietf.org/html/rfc7231#section-4.3.2
// https://tools.ietf.org/html/rfc7231#section-4.3.5
// Sending a payload body on a request that does not
// expect it can cause undefined behavior on some
// servers and corrupt connection state. Do not
// re-use the connection for further requests.
const expectsPayload = (
method === 'PUT' ||
method === 'POST' ||
method === 'PATCH'
)
if ( body && typeof body . read === 'function' ) {
// Try to read EOF in order to get length.
body . read ( 0 )
}
const bodyLength = util . bodyLength ( body )
let contentLength = bodyLength
if ( contentLength === null ) {
contentLength = request . contentLength
}
if ( contentLength === 0 && ! expectsPayload ) {
// https://tools.ietf.org/html/rfc7230#section-3.3.2
// A user agent SHOULD NOT send a Content-Length header field when
// the request message does not contain a payload body and the method
// semantics do not anticipate such a body.
contentLength = null
}
// https://github.com/nodejs/undici/issues/2046
// A user agent may send a Content-Length header with 0 value, this should be allowed.
if ( shouldSendContentLength ( method ) && contentLength > 0 && request . contentLength !== null && request . contentLength !== contentLength ) {
if ( client [ kStrictContentLength ] ) {
errorRequest ( client , request , new RequestContentLengthMismatchError ( ) )
return false
}
process . emitWarning ( new RequestContentLengthMismatchError ( ) )
}
const socket = client [ kSocket ]
try {
request . onConnect ( ( err ) => {
if ( request . aborted || request . completed ) {
return
}
errorRequest ( client , request , err || new RequestAbortedError ( ) )
util . destroy ( socket , new InformationalError ( 'aborted' ) )
} )
} catch ( err ) {
errorRequest ( client , request , err )
}
if ( request . aborted ) {
return false
}
if ( method === 'HEAD' ) {
// https://github.com/mcollina/undici/issues/258
// Close after a HEAD request to interop with misbehaving servers
// that may send a body in the response.
socket [ kReset ] = true
}
if ( upgrade || method === 'CONNECT' ) {
// On CONNECT or upgrade, block pipeline from dispatching further
// requests on this connection.
socket [ kReset ] = true
}
if ( reset != null ) {
socket [ kReset ] = reset
}
if ( client [ kMaxRequests ] && socket [ kCounter ] ++ >= client [ kMaxRequests ] ) {
socket [ kReset ] = true
}
if ( blocking ) {
socket [ kBlocking ] = true
}
let header = ` ${ method } ${ path } HTTP/1.1 \r \n `
if ( typeof host === 'string' ) {
header += ` host: ${ host } \r \n `
} else {
header += client [ kHostHeader ]
}
if ( upgrade ) {
header += ` connection: upgrade \r \n upgrade: ${ upgrade } \r \n `
} else if ( client [ kPipelining ] && ! socket [ kReset ] ) {
header += 'connection: keep-alive\r\n'
} else {
header += 'connection: close\r\n'
}
if ( headers ) {
header += headers
}
if ( channels . sendHeaders . hasSubscribers ) {
channels . sendHeaders . publish ( { request , headers : header , socket } )
}
/* istanbul ignore else: assertion */
if ( ! body || bodyLength === 0 ) {
if ( contentLength === 0 ) {
socket . write ( ` ${ header } content-length: 0 \r \n \r \n ` , 'latin1' )
} else {
assert ( contentLength === null , 'no body must not have content length' )
socket . write ( ` ${ header } \r \n ` , 'latin1' )
}
request . onRequestSent ( )
} else if ( util . isBuffer ( body ) ) {
assert ( contentLength === body . byteLength , 'buffer body must have content length' )
socket . cork ( )
socket . write ( ` ${ header } content-length: ${ contentLength } \r \n \r \n ` , 'latin1' )
socket . write ( body )
socket . uncork ( )
request . onBodySent ( body )
request . onRequestSent ( )
if ( ! expectsPayload ) {
socket [ kReset ] = true
}
} else if ( util . isBlobLike ( body ) ) {
if ( typeof body . stream === 'function' ) {
writeIterable ( { body : body . stream ( ) , client , request , socket , contentLength , header , expectsPayload } )
} else {
writeBlob ( { body , client , request , socket , contentLength , header , expectsPayload } )
}
} else if ( util . isStream ( body ) ) {
writeStream ( { body , client , request , socket , contentLength , header , expectsPayload } )
} else if ( util . isIterable ( body ) ) {
writeIterable ( { body , client , request , socket , contentLength , header , expectsPayload } )
} else {
assert ( false )
}
return true
}
function writeH2 ( client , session , request ) {
const { body , method , path , host , upgrade , expectContinue , signal , headers : reqHeaders } = request
let headers
if ( typeof reqHeaders === 'string' ) headers = Request [ kHTTP2CopyHeaders ] ( reqHeaders . trim ( ) )
else headers = reqHeaders
if ( upgrade ) {
errorRequest ( client , request , new Error ( 'Upgrade not supported for H2' ) )
return false
}
try {
// TODO(HTTP/2): Should we call onConnect immediately or on stream ready event?
request . onConnect ( ( err ) => {
if ( request . aborted || request . completed ) {
return
}
errorRequest ( client , request , err || new RequestAbortedError ( ) )
} )
} catch ( err ) {
errorRequest ( client , request , err )
}
if ( request . aborted ) {
return false
}
/** @type {import('node:http2').ClientHttp2Stream} */
let stream
const h2State = client [ kHTTP2SessionState ]
headers [ HTTP2 _HEADER _AUTHORITY ] = host || client [ kHost ]
headers [ HTTP2 _HEADER _METHOD ] = method
if ( method === 'CONNECT' ) {
session . ref ( )
// we are already connected, streams are pending, first request
// will create a new stream. We trigger a request to create the stream and wait until
// `ready` event is triggered
// We disabled endStream to allow the user to write to the stream
stream = session . request ( headers , { endStream : false , signal } )
if ( stream . id && ! stream . pending ) {
request . onUpgrade ( null , null , stream )
++ h2State . openStreams
} else {
stream . once ( 'ready' , ( ) => {
request . onUpgrade ( null , null , stream )
++ h2State . openStreams
} )
}
stream . once ( 'close' , ( ) => {
h2State . openStreams -= 1
// TODO(HTTP/2): unref only if current streams count is 0
if ( h2State . openStreams === 0 ) session . unref ( )
} )
return true
}
// https://tools.ietf.org/html/rfc7540#section-8.3
// :path and :scheme headers must be omited when sending CONNECT
headers [ HTTP2 _HEADER _PATH ] = path
headers [ HTTP2 _HEADER _SCHEME ] = 'https'
// https://tools.ietf.org/html/rfc7231#section-4.3.1
// https://tools.ietf.org/html/rfc7231#section-4.3.2
// https://tools.ietf.org/html/rfc7231#section-4.3.5
// Sending a payload body on a request that does not
// expect it can cause undefined behavior on some
// servers and corrupt connection state. Do not
// re-use the connection for further requests.
const expectsPayload = (
method === 'PUT' ||
method === 'POST' ||
method === 'PATCH'
)
if ( body && typeof body . read === 'function' ) {
// Try to read EOF in order to get length.
body . read ( 0 )
}
let contentLength = util . bodyLength ( body )
if ( contentLength == null ) {
contentLength = request . contentLength
}
if ( contentLength === 0 || ! expectsPayload ) {
// https://tools.ietf.org/html/rfc7230#section-3.3.2
// A user agent SHOULD NOT send a Content-Length header field when
// the request message does not contain a payload body and the method
// semantics do not anticipate such a body.
contentLength = null
}
// https://github.com/nodejs/undici/issues/2046
// A user agent may send a Content-Length header with 0 value, this should be allowed.
if ( shouldSendContentLength ( method ) && contentLength > 0 && request . contentLength != null && request . contentLength !== contentLength ) {
if ( client [ kStrictContentLength ] ) {
errorRequest ( client , request , new RequestContentLengthMismatchError ( ) )
return false
}
process . emitWarning ( new RequestContentLengthMismatchError ( ) )
}
if ( contentLength != null ) {
assert ( body , 'no body must not have content length' )
headers [ HTTP2 _HEADER _CONTENT _LENGTH ] = ` ${ contentLength } `
}
session . ref ( )
const shouldEndStream = method === 'GET' || method === 'HEAD'
if ( expectContinue ) {
headers [ HTTP2 _HEADER _EXPECT ] = '100-continue'
stream = session . request ( headers , { endStream : shouldEndStream , signal } )
stream . once ( 'continue' , writeBodyH2 )
} else {
stream = session . request ( headers , {
endStream : shouldEndStream ,
signal
} )
writeBodyH2 ( )
}
// Increment counter as we have new several streams open
++ h2State . openStreams
stream . once ( 'response' , headers => {
const { [ HTTP2 _HEADER _STATUS ] : statusCode , ... realHeaders } = headers
if ( request . onHeaders ( Number ( statusCode ) , realHeaders , stream . resume . bind ( stream ) , '' ) === false ) {
stream . pause ( )
}
} )
stream . once ( 'end' , ( ) => {
request . onComplete ( [ ] )
} )
stream . on ( 'data' , ( chunk ) => {
if ( request . onData ( chunk ) === false ) {
stream . pause ( )
}
} )
stream . once ( 'close' , ( ) => {
h2State . openStreams -= 1
// TODO(HTTP/2): unref only if current streams count is 0
if ( h2State . openStreams === 0 ) {
session . unref ( )
}
} )
stream . once ( 'error' , function ( err ) {
if ( client [ kHTTP2Session ] && ! client [ kHTTP2Session ] . destroyed && ! this . closed && ! this . destroyed ) {
h2State . streams -= 1
util . destroy ( stream , err )
}
} )
stream . once ( 'frameError' , ( type , code ) => {
const err = new InformationalError ( ` HTTP/2: "frameError" received - type ${ type } , code ${ code } ` )
errorRequest ( client , request , err )
if ( client [ kHTTP2Session ] && ! client [ kHTTP2Session ] . destroyed && ! this . closed && ! this . destroyed ) {
h2State . streams -= 1
util . destroy ( stream , err )
}
} )
// stream.on('aborted', () => {
// // TODO(HTTP/2): Support aborted
// })
// stream.on('timeout', () => {
// // TODO(HTTP/2): Support timeout
// })
// stream.on('push', headers => {
// // TODO(HTTP/2): Suppor push
// })
// stream.on('trailers', headers => {
// // TODO(HTTP/2): Support trailers
// })
return true
function writeBodyH2 ( ) {
/* istanbul ignore else: assertion */
if ( ! body ) {
request . onRequestSent ( )
} else if ( util . isBuffer ( body ) ) {
assert ( contentLength === body . byteLength , 'buffer body must have content length' )
stream . cork ( )
stream . write ( body )
stream . uncork ( )
stream . end ( )
request . onBodySent ( body )
request . onRequestSent ( )
} else if ( util . isBlobLike ( body ) ) {
if ( typeof body . stream === 'function' ) {
writeIterable ( {
client ,
request ,
contentLength ,
h2stream : stream ,
expectsPayload ,
body : body . stream ( ) ,
socket : client [ kSocket ] ,
header : ''
} )
} else {
writeBlob ( {
body ,
client ,
request ,
contentLength ,
expectsPayload ,
h2stream : stream ,
header : '' ,
socket : client [ kSocket ]
} )
}
} else if ( util . isStream ( body ) ) {
writeStream ( {
body ,
client ,
request ,
contentLength ,
expectsPayload ,
socket : client [ kSocket ] ,
h2stream : stream ,
header : ''
} )
} else if ( util . isIterable ( body ) ) {
writeIterable ( {
body ,
client ,
request ,
contentLength ,
expectsPayload ,
header : '' ,
h2stream : stream ,
socket : client [ kSocket ]
} )
} else {
assert ( false )
}
}
}
function writeStream ( { h2stream , body , client , request , socket , contentLength , header , expectsPayload } ) {
assert ( contentLength !== 0 || client [ kRunning ] === 0 , 'stream body cannot be pipelined' )
if ( client [ kHTTPConnVersion ] === 'h2' ) {
// For HTTP/2, is enough to pipe the stream
const pipe = pipeline (
body ,
h2stream ,
( err ) => {
if ( err ) {
util . destroy ( body , err )
util . destroy ( h2stream , err )
} else {
request . onRequestSent ( )
}
}
)
pipe . on ( 'data' , onPipeData )
pipe . once ( 'end' , ( ) => {
pipe . removeListener ( 'data' , onPipeData )
util . destroy ( pipe )
} )
function onPipeData ( chunk ) {
request . onBodySent ( chunk )
}
return
}
let finished = false
const writer = new AsyncWriter ( { socket , request , contentLength , client , expectsPayload , header } )
const onData = function ( chunk ) {
if ( finished ) {
return
}
try {
if ( ! writer . write ( chunk ) && this . pause ) {
this . pause ( )
}
} catch ( err ) {
util . destroy ( this , err )
}
}
const onDrain = function ( ) {
if ( finished ) {
return
}
if ( body . resume ) {
body . resume ( )
}
}
const onAbort = function ( ) {
if ( finished ) {
return
}
const err = new RequestAbortedError ( )
queueMicrotask ( ( ) => onFinished ( err ) )
}
const onFinished = function ( err ) {
if ( finished ) {
return
}
finished = true
assert ( socket . destroyed || ( socket [ kWriting ] && client [ kRunning ] <= 1 ) )
socket
. off ( 'drain' , onDrain )
. off ( 'error' , onFinished )
body
. removeListener ( 'data' , onData )
. removeListener ( 'end' , onFinished )
. removeListener ( 'error' , onFinished )
. removeListener ( 'close' , onAbort )
if ( ! err ) {
try {
writer . end ( )
} catch ( er ) {
err = er
}
}
writer . destroy ( err )
if ( err && ( err . code !== 'UND_ERR_INFO' || err . message !== 'reset' ) ) {
util . destroy ( body , err )
} else {
util . destroy ( body )
}
}
body
. on ( 'data' , onData )
. on ( 'end' , onFinished )
. on ( 'error' , onFinished )
. on ( 'close' , onAbort )
if ( body . resume ) {
body . resume ( )
}
socket
. on ( 'drain' , onDrain )
. on ( 'error' , onFinished )
}
async function writeBlob ( { h2stream , body , client , request , socket , contentLength , header , expectsPayload } ) {
assert ( contentLength === body . size , 'blob body must have content length' )
const isH2 = client [ kHTTPConnVersion ] === 'h2'
try {
if ( contentLength != null && contentLength !== body . size ) {
throw new RequestContentLengthMismatchError ( )
}
const buffer = Buffer . from ( await body . arrayBuffer ( ) )
if ( isH2 ) {
h2stream . cork ( )
h2stream . write ( buffer )
h2stream . uncork ( )
} else {
socket . cork ( )
socket . write ( ` ${ header } content-length: ${ contentLength } \r \n \r \n ` , 'latin1' )
socket . write ( buffer )
socket . uncork ( )
}
request . onBodySent ( buffer )
request . onRequestSent ( )
if ( ! expectsPayload ) {
socket [ kReset ] = true
}
resume ( client )
} catch ( err ) {
util . destroy ( isH2 ? h2stream : socket , err )
}
}
async function writeIterable ( { h2stream , body , client , request , socket , contentLength , header , expectsPayload } ) {
assert ( contentLength !== 0 || client [ kRunning ] === 0 , 'iterator body cannot be pipelined' )
let callback = null
function onDrain ( ) {
if ( callback ) {
const cb = callback
callback = null
cb ( )
}
}
const waitForDrain = ( ) => new Promise ( ( resolve , reject ) => {
assert ( callback === null )
if ( socket [ kError ] ) {
reject ( socket [ kError ] )
} else {
callback = resolve
}
} )
if ( client [ kHTTPConnVersion ] === 'h2' ) {
h2stream
. on ( 'close' , onDrain )
. on ( 'drain' , onDrain )
try {
// It's up to the user to somehow abort the async iterable.
for await ( const chunk of body ) {
if ( socket [ kError ] ) {
throw socket [ kError ]
}
const res = h2stream . write ( chunk )
request . onBodySent ( chunk )
if ( ! res ) {
await waitForDrain ( )
}
}
} catch ( err ) {
h2stream . destroy ( err )
} finally {
request . onRequestSent ( )
h2stream . end ( )
h2stream
. off ( 'close' , onDrain )
. off ( 'drain' , onDrain )
}
return
}
socket
. on ( 'close' , onDrain )
. on ( 'drain' , onDrain )
const writer = new AsyncWriter ( { socket , request , contentLength , client , expectsPayload , header } )
try {
// It's up to the user to somehow abort the async iterable.
for await ( const chunk of body ) {
if ( socket [ kError ] ) {
throw socket [ kError ]
}
if ( ! writer . write ( chunk ) ) {
await waitForDrain ( )
}
}
writer . end ( )
} catch ( err ) {
writer . destroy ( err )
} finally {
socket
. off ( 'close' , onDrain )
. off ( 'drain' , onDrain )
}
}
class AsyncWriter {
constructor ( { socket , request , contentLength , client , expectsPayload , header } ) {
this . socket = socket
this . request = request
this . contentLength = contentLength
this . client = client
this . bytesWritten = 0
this . expectsPayload = expectsPayload
this . header = header
socket [ kWriting ] = true
}
write ( chunk ) {
const { socket , request , contentLength , client , bytesWritten , expectsPayload , header } = this
if ( socket [ kError ] ) {
throw socket [ kError ]
}
if ( socket . destroyed ) {
return false
}
const len = Buffer . byteLength ( chunk )
if ( ! len ) {
return true
}
// We should defer writing chunks.
if ( contentLength !== null && bytesWritten + len > contentLength ) {
if ( client [ kStrictContentLength ] ) {
throw new RequestContentLengthMismatchError ( )
}
process . emitWarning ( new RequestContentLengthMismatchError ( ) )
}
socket . cork ( )
if ( bytesWritten === 0 ) {
if ( ! expectsPayload ) {
socket [ kReset ] = true
}
if ( contentLength === null ) {
socket . write ( ` ${ header } transfer-encoding: chunked \r \n ` , 'latin1' )
} else {
socket . write ( ` ${ header } content-length: ${ contentLength } \r \n \r \n ` , 'latin1' )
}
}
if ( contentLength === null ) {
socket . write ( ` \r \n ${ len . toString ( 16 ) } \r \n ` , 'latin1' )
}
this . bytesWritten += len
const ret = socket . write ( chunk )
socket . uncork ( )
request . onBodySent ( chunk )
if ( ! ret ) {
if ( socket [ kParser ] . timeout && socket [ kParser ] . timeoutType === TIMEOUT _HEADERS ) {
// istanbul ignore else: only for jest
if ( socket [ kParser ] . timeout . refresh ) {
socket [ kParser ] . timeout . refresh ( )
}
}
}
return ret
}
end ( ) {
const { socket , contentLength , client , bytesWritten , expectsPayload , header , request } = this
request . onRequestSent ( )
socket [ kWriting ] = false
if ( socket [ kError ] ) {
throw socket [ kError ]
}
if ( socket . destroyed ) {
return
}
if ( bytesWritten === 0 ) {
if ( expectsPayload ) {
// https://tools.ietf.org/html/rfc7230#section-3.3.2
// A user agent SHOULD send a Content-Length in a request message when
// no Transfer-Encoding is sent and the request method defines a meaning
// for an enclosed payload body.
socket . write ( ` ${ header } content-length: 0 \r \n \r \n ` , 'latin1' )
} else {
socket . write ( ` ${ header } \r \n ` , 'latin1' )
}
} else if ( contentLength === null ) {
socket . write ( '\r\n0\r\n\r\n' , 'latin1' )
}
if ( contentLength !== null && bytesWritten !== contentLength ) {
if ( client [ kStrictContentLength ] ) {
throw new RequestContentLengthMismatchError ( )
} else {
process . emitWarning ( new RequestContentLengthMismatchError ( ) )
}
}
if ( socket [ kParser ] . timeout && socket [ kParser ] . timeoutType === TIMEOUT _HEADERS ) {
// istanbul ignore else: only for jest
if ( socket [ kParser ] . timeout . refresh ) {
socket [ kParser ] . timeout . refresh ( )
}
}
resume ( client )
}
destroy ( err ) {
const { socket , client } = this
socket [ kWriting ] = false
if ( err ) {
assert ( client [ kRunning ] <= 1 , 'pipeline should only contain this request' )
util . destroy ( socket , err )
}
}
}
function errorRequest ( client , request , err ) {
try {
request . onError ( err )
assert ( request . aborted )
} catch ( err ) {
client . emit ( 'error' , err )
}
}
module . exports = Client
/***/ } ) ,
/***/ 6436 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
/* istanbul ignore file: only for Node 12 */
const { kConnected , kSize } = _ _nccwpck _require _ _ ( 2785 )
class CompatWeakRef {
constructor ( value ) {
this . value = value
}
deref ( ) {
return this . value [ kConnected ] === 0 && this . value [ kSize ] === 0
? undefined
: this . value
}
}
class CompatFinalizer {
constructor ( finalizer ) {
this . finalizer = finalizer
}
register ( dispatcher , key ) {
if ( dispatcher . on ) {
dispatcher . on ( 'disconnect' , ( ) => {
if ( dispatcher [ kConnected ] === 0 && dispatcher [ kSize ] === 0 ) {
this . finalizer ( key )
}
} )
}
}
}
module . exports = function ( ) {
// FIXME: remove workaround when the Node bug is fixed
// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
if ( process . env . NODE _V8 _COVERAGE ) {
return {
WeakRef : CompatWeakRef ,
FinalizationRegistry : CompatFinalizer
}
}
return {
WeakRef : global . WeakRef || CompatWeakRef ,
FinalizationRegistry : global . FinalizationRegistry || CompatFinalizer
}
}
/***/ } ) ,
/***/ 663 :
/***/ ( ( module ) => {
"use strict" ;
// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size
const maxAttributeValueSize = 1024
// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size
const maxNameValuePairSize = 4096
module . exports = {
maxAttributeValueSize ,
maxNameValuePairSize
}
/***/ } ) ,
/***/ 1724 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { parseSetCookie } = _ _nccwpck _require _ _ ( 4408 )
const { stringify , getHeadersList } = _ _nccwpck _require _ _ ( 3121 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { Headers } = _ _nccwpck _require _ _ ( 554 )
/ * *
* @ typedef { Object } Cookie
* @ property { string } name
* @ property { string } value
* @ property { Date | number | undefined } expires
* @ property { number | undefined } maxAge
* @ property { string | undefined } domain
* @ property { string | undefined } path
* @ property { boolean | undefined } secure
* @ property { boolean | undefined } httpOnly
* @ property { 'Strict' | 'Lax' | 'None' } sameSite
* @ property { string [ ] } unparsed
* /
/ * *
* @ param { Headers } headers
* @ returns { Record < string , string > }
* /
function getCookies ( headers ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'getCookies' } )
webidl . brandCheck ( headers , Headers , { strict : false } )
const cookie = headers . get ( 'cookie' )
const out = { }
if ( ! cookie ) {
return out
}
for ( const piece of cookie . split ( ';' ) ) {
const [ name , ... value ] = piece . split ( '=' )
out [ name . trim ( ) ] = value . join ( '=' )
}
return out
}
/ * *
* @ param { Headers } headers
* @ param { string } name
* @ param { { path ? : string , domain ? : string } | undefined } attributes
* @ returns { void }
* /
function deleteCookie ( headers , name , attributes ) {
webidl . argumentLengthCheck ( arguments , 2 , { header : 'deleteCookie' } )
webidl . brandCheck ( headers , Headers , { strict : false } )
name = webidl . converters . DOMString ( name )
attributes = webidl . converters . DeleteCookieAttributes ( attributes )
// Matches behavior of
// https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278
setCookie ( headers , {
name ,
value : '' ,
expires : new Date ( 0 ) ,
... attributes
} )
}
/ * *
* @ param { Headers } headers
* @ returns { Cookie [ ] }
* /
function getSetCookies ( headers ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'getSetCookies' } )
webidl . brandCheck ( headers , Headers , { strict : false } )
const cookies = getHeadersList ( headers ) . cookies
if ( ! cookies ) {
return [ ]
}
// In older versions of undici, cookies is a list of name:value.
return cookies . map ( ( pair ) => parseSetCookie ( Array . isArray ( pair ) ? pair [ 1 ] : pair ) )
}
/ * *
* @ param { Headers } headers
* @ param { Cookie } cookie
* @ returns { void }
* /
function setCookie ( headers , cookie ) {
webidl . argumentLengthCheck ( arguments , 2 , { header : 'setCookie' } )
webidl . brandCheck ( headers , Headers , { strict : false } )
cookie = webidl . converters . Cookie ( cookie )
const str = stringify ( cookie )
if ( str ) {
headers . append ( 'Set-Cookie' , stringify ( cookie ) )
}
}
webidl . converters . DeleteCookieAttributes = webidl . dictionaryConverter ( [
{
converter : webidl . nullableConverter ( webidl . converters . DOMString ) ,
key : 'path' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters . DOMString ) ,
key : 'domain' ,
defaultValue : null
}
] )
webidl . converters . Cookie = webidl . dictionaryConverter ( [
{
converter : webidl . converters . DOMString ,
key : 'name'
} ,
{
converter : webidl . converters . DOMString ,
key : 'value'
} ,
{
converter : webidl . nullableConverter ( ( value ) => {
if ( typeof value === 'number' ) {
return webidl . converters [ 'unsigned long long' ] ( value )
}
return new Date ( value )
} ) ,
key : 'expires' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters [ 'long long' ] ) ,
key : 'maxAge' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters . DOMString ) ,
key : 'domain' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters . DOMString ) ,
key : 'path' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters . boolean ) ,
key : 'secure' ,
defaultValue : null
} ,
{
converter : webidl . nullableConverter ( webidl . converters . boolean ) ,
key : 'httpOnly' ,
defaultValue : null
} ,
{
converter : webidl . converters . USVString ,
key : 'sameSite' ,
allowedValues : [ 'Strict' , 'Lax' , 'None' ]
} ,
{
converter : webidl . sequenceConverter ( webidl . converters . DOMString ) ,
key : 'unparsed' ,
defaultValue : [ ]
}
] )
module . exports = {
getCookies ,
deleteCookie ,
getSetCookies ,
setCookie
}
/***/ } ) ,
/***/ 4408 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { maxNameValuePairSize , maxAttributeValueSize } = _ _nccwpck _require _ _ ( 663 )
const { isCTLExcludingHtab } = _ _nccwpck _require _ _ ( 3121 )
const { collectASequenceOfCodePointsFast } = _ _nccwpck _require _ _ ( 685 )
const assert = _ _nccwpck _require _ _ ( 9491 )
/ * *
* @ description Parses the field - value attributes of a set - cookie header string .
* @ see https : //datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
* @ param { string } header
* @ returns if the header is invalid , null will be returned
* /
function parseSetCookie ( header ) {
// 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F
// character (CTL characters excluding HTAB): Abort these steps and
// ignore the set-cookie-string entirely.
if ( isCTLExcludingHtab ( header ) ) {
return null
}
let nameValuePair = ''
let unparsedAttributes = ''
let name = ''
let value = ''
// 2. If the set-cookie-string contains a %x3B (";") character:
if ( header . includes ( ';' ) ) {
// 1. The name-value-pair string consists of the characters up to,
// but not including, the first %x3B (";"), and the unparsed-
// attributes consist of the remainder of the set-cookie-string
// (including the %x3B (";") in question).
const position = { position : 0 }
nameValuePair = collectASequenceOfCodePointsFast ( ';' , header , position )
unparsedAttributes = header . slice ( position . position )
} else {
// Otherwise:
// 1. The name-value-pair string consists of all the characters
// contained in the set-cookie-string, and the unparsed-
// attributes is the empty string.
nameValuePair = header
}
// 3. If the name-value-pair string lacks a %x3D ("=") character, then
// the name string is empty, and the value string is the value of
// name-value-pair.
if ( ! nameValuePair . includes ( '=' ) ) {
value = nameValuePair
} else {
// Otherwise, the name string consists of the characters up to, but
// not including, the first %x3D ("=") character, and the (possibly
// empty) value string consists of the characters after the first
// %x3D ("=") character.
const position = { position : 0 }
name = collectASequenceOfCodePointsFast (
'=' ,
nameValuePair ,
position
)
value = nameValuePair . slice ( position . position + 1 )
}
// 4. Remove any leading or trailing WSP characters from the name
// string and the value string.
name = name . trim ( )
value = value . trim ( )
// 5. If the sum of the lengths of the name string and the value string
// is more than 4096 octets, abort these steps and ignore the set-
// cookie-string entirely.
if ( name . length + value . length > maxNameValuePairSize ) {
return null
}
// 6. The cookie-name is the name string, and the cookie-value is the
// value string.
return {
name , value , ... parseUnparsedAttributes ( unparsedAttributes )
}
}
/ * *
* Parses the remaining attributes of a set - cookie header
* @ see https : //datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
* @ param { string } unparsedAttributes
* @ param { [ Object . < string , unknown > ] = { } } cookieAttributeList
* /
function parseUnparsedAttributes ( unparsedAttributes , cookieAttributeList = { } ) {
// 1. If the unparsed-attributes string is empty, skip the rest of
// these steps.
if ( unparsedAttributes . length === 0 ) {
return cookieAttributeList
}
// 2. Discard the first character of the unparsed-attributes (which
// will be a %x3B (";") character).
assert ( unparsedAttributes [ 0 ] === ';' )
unparsedAttributes = unparsedAttributes . slice ( 1 )
let cookieAv = ''
// 3. If the remaining unparsed-attributes contains a %x3B (";")
// character:
if ( unparsedAttributes . includes ( ';' ) ) {
// 1. Consume the characters of the unparsed-attributes up to, but
// not including, the first %x3B (";") character.
cookieAv = collectASequenceOfCodePointsFast (
';' ,
unparsedAttributes ,
{ position : 0 }
)
unparsedAttributes = unparsedAttributes . slice ( cookieAv . length )
} else {
// Otherwise:
// 1. Consume the remainder of the unparsed-attributes.
cookieAv = unparsedAttributes
unparsedAttributes = ''
}
// Let the cookie-av string be the characters consumed in this step.
let attributeName = ''
let attributeValue = ''
// 4. If the cookie-av string contains a %x3D ("=") character:
if ( cookieAv . includes ( '=' ) ) {
// 1. The (possibly empty) attribute-name string consists of the
// characters up to, but not including, the first %x3D ("=")
// character, and the (possibly empty) attribute-value string
// consists of the characters after the first %x3D ("=")
// character.
const position = { position : 0 }
attributeName = collectASequenceOfCodePointsFast (
'=' ,
cookieAv ,
position
)
attributeValue = cookieAv . slice ( position . position + 1 )
} else {
// Otherwise:
// 1. The attribute-name string consists of the entire cookie-av
// string, and the attribute-value string is empty.
attributeName = cookieAv
}
// 5. Remove any leading or trailing WSP characters from the attribute-
// name string and the attribute-value string.
attributeName = attributeName . trim ( )
attributeValue = attributeValue . trim ( )
// 6. If the attribute-value is longer than 1024 octets, ignore the
// cookie-av string and return to Step 1 of this algorithm.
if ( attributeValue . length > maxAttributeValueSize ) {
return parseUnparsedAttributes ( unparsedAttributes , cookieAttributeList )
}
// 7. Process the attribute-name and attribute-value according to the
// requirements in the following subsections. (Notice that
// attributes with unrecognized attribute-names are ignored.)
const attributeNameLowercase = attributeName . toLowerCase ( )
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1
// If the attribute-name case-insensitively matches the string
// "Expires", the user agent MUST process the cookie-av as follows.
if ( attributeNameLowercase === 'expires' ) {
// 1. Let the expiry-time be the result of parsing the attribute-value
// as cookie-date (see Section 5.1.1).
const expiryTime = new Date ( attributeValue )
// 2. If the attribute-value failed to parse as a cookie date, ignore
// the cookie-av.
cookieAttributeList . expires = expiryTime
} else if ( attributeNameLowercase === 'max-age' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2
// If the attribute-name case-insensitively matches the string "Max-
// Age", the user agent MUST process the cookie-av as follows.
// 1. If the first character of the attribute-value is not a DIGIT or a
// "-" character, ignore the cookie-av.
const charCode = attributeValue . charCodeAt ( 0 )
if ( ( charCode < 48 || charCode > 57 ) && attributeValue [ 0 ] !== '-' ) {
return parseUnparsedAttributes ( unparsedAttributes , cookieAttributeList )
}
// 2. If the remainder of attribute-value contains a non-DIGIT
// character, ignore the cookie-av.
if ( ! /^\d+$/ . test ( attributeValue ) ) {
return parseUnparsedAttributes ( unparsedAttributes , cookieAttributeList )
}
// 3. Let delta-seconds be the attribute-value converted to an integer.
const deltaSeconds = Number ( attributeValue )
// 4. Let cookie-age-limit be the maximum age of the cookie (which
// SHOULD be 400 days or less, see Section 4.1.2.2).
// 5. Set delta-seconds to the smaller of its present value and cookie-
// age-limit.
// deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)
// 6. If delta-seconds is less than or equal to zero (0), let expiry-
// time be the earliest representable date and time. Otherwise, let
// the expiry-time be the current date and time plus delta-seconds
// seconds.
// const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds
// 7. Append an attribute to the cookie-attribute-list with an
// attribute-name of Max-Age and an attribute-value of expiry-time.
cookieAttributeList . maxAge = deltaSeconds
} else if ( attributeNameLowercase === 'domain' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3
// If the attribute-name case-insensitively matches the string "Domain",
// the user agent MUST process the cookie-av as follows.
// 1. Let cookie-domain be the attribute-value.
let cookieDomain = attributeValue
// 2. If cookie-domain starts with %x2E ("."), let cookie-domain be
// cookie-domain without its leading %x2E (".").
if ( cookieDomain [ 0 ] === '.' ) {
cookieDomain = cookieDomain . slice ( 1 )
}
// 3. Convert the cookie-domain to lower case.
cookieDomain = cookieDomain . toLowerCase ( )
// 4. Append an attribute to the cookie-attribute-list with an
// attribute-name of Domain and an attribute-value of cookie-domain.
cookieAttributeList . domain = cookieDomain
} else if ( attributeNameLowercase === 'path' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4
// If the attribute-name case-insensitively matches the string "Path",
// the user agent MUST process the cookie-av as follows.
// 1. If the attribute-value is empty or if the first character of the
// attribute-value is not %x2F ("/"):
let cookiePath = ''
if ( attributeValue . length === 0 || attributeValue [ 0 ] !== '/' ) {
// 1. Let cookie-path be the default-path.
cookiePath = '/'
} else {
// Otherwise:
// 1. Let cookie-path be the attribute-value.
cookiePath = attributeValue
}
// 2. Append an attribute to the cookie-attribute-list with an
// attribute-name of Path and an attribute-value of cookie-path.
cookieAttributeList . path = cookiePath
} else if ( attributeNameLowercase === 'secure' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5
// If the attribute-name case-insensitively matches the string "Secure",
// the user agent MUST append an attribute to the cookie-attribute-list
// with an attribute-name of Secure and an empty attribute-value.
cookieAttributeList . secure = true
} else if ( attributeNameLowercase === 'httponly' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6
// If the attribute-name case-insensitively matches the string
// "HttpOnly", the user agent MUST append an attribute to the cookie-
// attribute-list with an attribute-name of HttpOnly and an empty
// attribute-value.
cookieAttributeList . httpOnly = true
} else if ( attributeNameLowercase === 'samesite' ) {
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7
// If the attribute-name case-insensitively matches the string
// "SameSite", the user agent MUST process the cookie-av as follows:
// 1. Let enforcement be "Default".
let enforcement = 'Default'
const attributeValueLowercase = attributeValue . toLowerCase ( )
// 2. If cookie-av's attribute-value is a case-insensitive match for
// "None", set enforcement to "None".
if ( attributeValueLowercase . includes ( 'none' ) ) {
enforcement = 'None'
}
// 3. If cookie-av's attribute-value is a case-insensitive match for
// "Strict", set enforcement to "Strict".
if ( attributeValueLowercase . includes ( 'strict' ) ) {
enforcement = 'Strict'
}
// 4. If cookie-av's attribute-value is a case-insensitive match for
// "Lax", set enforcement to "Lax".
if ( attributeValueLowercase . includes ( 'lax' ) ) {
enforcement = 'Lax'
}
// 5. Append an attribute to the cookie-attribute-list with an
// attribute-name of "SameSite" and an attribute-value of
// enforcement.
cookieAttributeList . sameSite = enforcement
} else {
cookieAttributeList . unparsed ? ? = [ ]
cookieAttributeList . unparsed . push ( ` ${ attributeName } = ${ attributeValue } ` )
}
// 8. Return to Step 1 of this algorithm.
return parseUnparsedAttributes ( unparsedAttributes , cookieAttributeList )
}
module . exports = {
parseSetCookie ,
parseUnparsedAttributes
}
/***/ } ) ,
/***/ 3121 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const assert = _ _nccwpck _require _ _ ( 9491 )
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
function isCTLExcludingHtab ( value ) {
if ( value . length === 0 ) {
return false
}
for ( const char of value ) {
const code = char . charCodeAt ( 0 )
if (
( code >= 0x00 || code <= 0x08 ) ||
( code >= 0x0A || code <= 0x1F ) ||
code === 0x7F
) {
return false
}
}
}
/ * *
CHAR = < any US - ASCII character ( octets 0 - 127 ) >
token = 1 * < any CHAR except CTLs or separators >
separators = "(" | ")" | "<" | ">" | "@"
| "," | ";" | ":" | "\" | <" >
| "/" | "[" | "]" | "?" | "="
| "{" | "}" | SP | HT
* @ param { string } name
* /
function validateCookieName ( name ) {
for ( const char of name ) {
const code = char . charCodeAt ( 0 )
if (
( code <= 0x20 || code > 0x7F ) ||
char === '(' ||
char === ')' ||
char === '>' ||
char === '<' ||
char === '@' ||
char === ',' ||
char === ';' ||
char === ':' ||
char === '\\' ||
char === '"' ||
char === '/' ||
char === '[' ||
char === ']' ||
char === '?' ||
char === '=' ||
char === '{' ||
char === '}'
) {
throw new Error ( 'Invalid cookie name' )
}
}
}
/ * *
cookie - value = * cookie - octet / ( DQUOTE * cookie - octet DQUOTE )
cookie - octet = % x21 / % x23 - 2 B / % x2D - 3 A / % x3C - 5 B / % x5D - 7 E
; US - ASCII characters excluding CTLs ,
; whitespace DQUOTE , comma , semicolon ,
; and backslash
* @ param { string } value
* /
function validateCookieValue ( value ) {
for ( const char of value ) {
const code = char . charCodeAt ( 0 )
if (
code < 0x21 || // exclude CTLs (0-31)
code === 0x22 ||
code === 0x2C ||
code === 0x3B ||
code === 0x5C ||
code > 0x7E // non-ascii
) {
throw new Error ( 'Invalid header value' )
}
}
}
/ * *
* path - value = < any CHAR except CTLs or ";" >
* @ param { string } path
* /
function validateCookiePath ( path ) {
for ( const char of path ) {
const code = char . charCodeAt ( 0 )
if ( code < 0x21 || char === ';' ) {
throw new Error ( 'Invalid cookie path' )
}
}
}
/ * *
* I have no idea why these values aren ' t allowed to be honest ,
* but Deno tests these . - Khafra
* @ param { string } domain
* /
function validateCookieDomain ( domain ) {
if (
domain . startsWith ( '-' ) ||
domain . endsWith ( '.' ) ||
domain . endsWith ( '-' )
) {
throw new Error ( 'Invalid cookie domain' )
}
}
/ * *
* @ see https : //www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
* @ param { number | Date } date
IMF - fixdate = day - name "," SP date1 SP time - of - day SP GMT
; fixed length / zone / capitalization subset of the format
; see Section 3.3 of [ RFC5322 ]
day - name = % x4D . 6 F . 6 E ; "Mon" , case - sensitive
/ % x 5 4 . 7 5 . 6 5 ; " T u e " , c a s e - s e n s i t i v e
/ % x 5 7 . 6 5 . 6 4 ; " W e d " , c a s e - s e n s i t i v e
/ % x 5 4 . 6 8 . 7 5 ; " T h u " , c a s e - s e n s i t i v e
/ % x 4 6 . 7 2 . 6 9 ; " F r i " , c a s e - s e n s i t i v e
/ % x 5 3 . 6 1 . 7 4 ; " S a t " , c a s e - s e n s i t i v e
/ % x 5 3 . 7 5 . 6 E ; " S u n " , c a s e - s e n s i t i v e
date1 = day SP month SP year
; e . g . , 02 Jun 1982
day = 2 DIGIT
month = % x4A . 61.6 E ; "Jan" , case - sensitive
/ % x 4 6 . 6 5 . 6 2 ; " F e b " , c a s e - s e n s i t i v e
/ % x 4 D . 6 1 . 7 2 ; " M a r " , c a s e - s e n s i t i v e
/ % x 4 1 . 7 0 . 7 2 ; " A p r " , c a s e - s e n s i t i v e
/ % x 4 D . 6 1 . 7 9 ; " M a y " , c a s e - s e n s i t i v e
/ % x 4 A . 7 5 . 6 E ; " J u n " , c a s e - s e n s i t i v e
/ % x 4 A . 7 5 . 6 C ; " J u l " , c a s e - s e n s i t i v e
/ % x 4 1 . 7 5 . 6 7 ; " A u g " , c a s e - s e n s i t i v e
/ % x 5 3 . 6 5 . 7 0 ; " S e p " , c a s e - s e n s i t i v e
/ % x 4 F . 6 3 . 7 4 ; " O c t " , c a s e - s e n s i t i v e
/ % x 4 E . 6 F . 7 6 ; " N o v " , c a s e - s e n s i t i v e
/ % x 4 4 . 6 5 . 6 3 ; " D e c " , c a s e - s e n s i t i v e
year = 4 DIGIT
GMT = % x47 . 4 D . 54 ; "GMT" , case - sensitive
time - of - day = hour ":" minute ":" second
; 00 : 00 : 00 - 23 : 59 : 60 ( leap second )
hour = 2 DIGIT
minute = 2 DIGIT
second = 2 DIGIT
* /
function toIMFDate ( date ) {
if ( typeof date === 'number' ) {
date = new Date ( date )
}
const days = [
'Sun' , 'Mon' , 'Tue' , 'Wed' ,
'Thu' , 'Fri' , 'Sat'
]
const months = [
'Jan' , 'Feb' , 'Mar' , 'Apr' , 'May' , 'Jun' ,
'Jul' , 'Aug' , 'Sep' , 'Oct' , 'Nov' , 'Dec'
]
const dayName = days [ date . getUTCDay ( ) ]
const day = date . getUTCDate ( ) . toString ( ) . padStart ( 2 , '0' )
const month = months [ date . getUTCMonth ( ) ]
const year = date . getUTCFullYear ( )
const hour = date . getUTCHours ( ) . toString ( ) . padStart ( 2 , '0' )
const minute = date . getUTCMinutes ( ) . toString ( ) . padStart ( 2 , '0' )
const second = date . getUTCSeconds ( ) . toString ( ) . padStart ( 2 , '0' )
return ` ${ dayName } , ${ day } ${ month } ${ year } ${ hour } : ${ minute } : ${ second } GMT `
}
/ * *
max - age - av = "Max-Age=" non - zero - digit * DIGIT
; In practice , both expires - av and max - age - av
; are limited to dates representable by the
; user agent .
* @ param { number } maxAge
* /
function validateCookieMaxAge ( maxAge ) {
if ( maxAge < 0 ) {
throw new Error ( 'Invalid cookie max-age' )
}
}
/ * *
* @ see https : //www.rfc-editor.org/rfc/rfc6265#section-4.1.1
* @ param { import ( './index' ) . Cookie } cookie
* /
function stringify ( cookie ) {
if ( cookie . name . length === 0 ) {
return null
}
validateCookieName ( cookie . name )
validateCookieValue ( cookie . value )
const out = [ ` ${ cookie . name } = ${ cookie . value } ` ]
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2
if ( cookie . name . startsWith ( '__Secure-' ) ) {
cookie . secure = true
}
if ( cookie . name . startsWith ( '__Host-' ) ) {
cookie . secure = true
cookie . domain = null
cookie . path = '/'
}
if ( cookie . secure ) {
out . push ( 'Secure' )
}
if ( cookie . httpOnly ) {
out . push ( 'HttpOnly' )
}
if ( typeof cookie . maxAge === 'number' ) {
validateCookieMaxAge ( cookie . maxAge )
out . push ( ` Max-Age= ${ cookie . maxAge } ` )
}
if ( cookie . domain ) {
validateCookieDomain ( cookie . domain )
out . push ( ` Domain= ${ cookie . domain } ` )
}
if ( cookie . path ) {
validateCookiePath ( cookie . path )
out . push ( ` Path= ${ cookie . path } ` )
}
if ( cookie . expires && cookie . expires . toString ( ) !== 'Invalid Date' ) {
out . push ( ` Expires= ${ toIMFDate ( cookie . expires ) } ` )
}
if ( cookie . sameSite ) {
out . push ( ` SameSite= ${ cookie . sameSite } ` )
}
for ( const part of cookie . unparsed ) {
if ( ! part . includes ( '=' ) ) {
throw new Error ( 'Invalid unparsed' )
}
const [ key , ... value ] = part . split ( '=' )
out . push ( ` ${ key . trim ( ) } = ${ value . join ( '=' ) } ` )
}
return out . join ( '; ' )
}
let kHeadersListNode
function getHeadersList ( headers ) {
if ( headers [ kHeadersList ] ) {
return headers [ kHeadersList ]
}
if ( ! kHeadersListNode ) {
kHeadersListNode = Object . getOwnPropertySymbols ( headers ) . find (
( symbol ) => symbol . description === 'headers list'
)
assert ( kHeadersListNode , 'Headers cannot be parsed' )
}
const headersList = headers [ kHeadersListNode ]
assert ( headersList )
return headersList
}
module . exports = {
isCTLExcludingHtab ,
stringify ,
getHeadersList
}
/***/ } ) ,
/***/ 2067 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const net = _ _nccwpck _require _ _ ( 1808 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { InvalidArgumentError , ConnectTimeoutError } = _ _nccwpck _require _ _ ( 8045 )
let tls // include tls conditionally since it is not always available
// TODO: session re-use does not wait for the first
// connection to resolve the session and might therefore
// resolve the same servername multiple times even when
// re-use is enabled.
let SessionCache
// FIXME: remove workaround when the Node bug is fixed
// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
if ( global . FinalizationRegistry && ! process . env . NODE _V8 _COVERAGE ) {
SessionCache = class WeakSessionCache {
constructor ( maxCachedSessions ) {
this . _maxCachedSessions = maxCachedSessions
this . _sessionCache = new Map ( )
this . _sessionRegistry = new global . FinalizationRegistry ( ( key ) => {
if ( this . _sessionCache . size < this . _maxCachedSessions ) {
return
}
const ref = this . _sessionCache . get ( key )
if ( ref !== undefined && ref . deref ( ) === undefined ) {
this . _sessionCache . delete ( key )
}
} )
}
get ( sessionKey ) {
const ref = this . _sessionCache . get ( sessionKey )
return ref ? ref . deref ( ) : null
}
set ( sessionKey , session ) {
if ( this . _maxCachedSessions === 0 ) {
return
}
this . _sessionCache . set ( sessionKey , new WeakRef ( session ) )
this . _sessionRegistry . register ( session , sessionKey )
}
}
} else {
SessionCache = class SimpleSessionCache {
constructor ( maxCachedSessions ) {
this . _maxCachedSessions = maxCachedSessions
this . _sessionCache = new Map ( )
}
get ( sessionKey ) {
return this . _sessionCache . get ( sessionKey )
}
set ( sessionKey , session ) {
if ( this . _maxCachedSessions === 0 ) {
return
}
if ( this . _sessionCache . size >= this . _maxCachedSessions ) {
// remove the oldest session
const { value : oldestKey } = this . _sessionCache . keys ( ) . next ( )
this . _sessionCache . delete ( oldestKey )
}
this . _sessionCache . set ( sessionKey , session )
}
}
}
function buildConnector ( { allowH2 , maxCachedSessions , socketPath , timeout , ... opts } ) {
if ( maxCachedSessions != null && ( ! Number . isInteger ( maxCachedSessions ) || maxCachedSessions < 0 ) ) {
throw new InvalidArgumentError ( 'maxCachedSessions must be a positive integer or zero' )
}
const options = { path : socketPath , ... opts }
const sessionCache = new SessionCache ( maxCachedSessions == null ? 100 : maxCachedSessions )
timeout = timeout == null ? 10e3 : timeout
allowH2 = allowH2 != null ? allowH2 : false
return function connect ( { hostname , host , protocol , port , servername , localAddress , httpSocket } , callback ) {
let socket
if ( protocol === 'https:' ) {
if ( ! tls ) {
tls = _ _nccwpck _require _ _ ( 4404 )
}
servername = servername || options . servername || util . getServerName ( host ) || null
const sessionKey = servername || hostname
const session = sessionCache . get ( sessionKey ) || null
assert ( sessionKey )
socket = tls . connect ( {
highWaterMark : 16384 , // TLS in node can't have bigger HWM anyway...
... options ,
servername ,
session ,
localAddress ,
// TODO(HTTP/2): Add support for h2c
ALPNProtocols : allowH2 ? [ 'http/1.1' , 'h2' ] : [ 'http/1.1' ] ,
socket : httpSocket , // upgrade socket connection
port : port || 443 ,
host : hostname
} )
socket
. on ( 'session' , function ( session ) {
// TODO (fix): Can a session become invalid once established? Don't think so?
sessionCache . set ( sessionKey , session )
} )
} else {
assert ( ! httpSocket , 'httpSocket can only be sent on TLS update' )
socket = net . connect ( {
highWaterMark : 64 * 1024 , // Same as nodejs fs streams.
... options ,
localAddress ,
port : port || 80 ,
host : hostname
} )
}
// Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
if ( options . keepAlive == null || options . keepAlive ) {
const keepAliveInitialDelay = options . keepAliveInitialDelay === undefined ? 60e3 : options . keepAliveInitialDelay
socket . setKeepAlive ( true , keepAliveInitialDelay )
}
const cancelTimeout = setupTimeout ( ( ) => onConnectTimeout ( socket ) , timeout )
socket
. setNoDelay ( true )
. once ( protocol === 'https:' ? 'secureConnect' : 'connect' , function ( ) {
cancelTimeout ( )
if ( callback ) {
const cb = callback
callback = null
cb ( null , this )
}
} )
. on ( 'error' , function ( err ) {
cancelTimeout ( )
if ( callback ) {
const cb = callback
callback = null
cb ( err )
}
} )
return socket
}
}
function setupTimeout ( onConnectTimeout , timeout ) {
if ( ! timeout ) {
return ( ) => { }
}
let s1 = null
let s2 = null
const timeoutId = setTimeout ( ( ) => {
// setImmediate is added to make sure that we priotorise socket error events over timeouts
s1 = setImmediate ( ( ) => {
if ( process . platform === 'win32' ) {
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
s2 = setImmediate ( ( ) => onConnectTimeout ( ) )
} else {
onConnectTimeout ( )
}
} )
} , timeout )
return ( ) => {
clearTimeout ( timeoutId )
clearImmediate ( s1 )
clearImmediate ( s2 )
}
}
function onConnectTimeout ( socket ) {
util . destroy ( socket , new ConnectTimeoutError ( ) )
}
module . exports = buildConnector
/***/ } ) ,
/***/ 4462 :
/***/ ( ( module ) => {
"use strict" ;
/** @type {Record<string, string | undefined>} */
const headerNameLowerCasedRecord = { }
// https://developer.mozilla.org/docs/Web/HTTP/Headers
const wellknownHeaderNames = [
'Accept' ,
'Accept-Encoding' ,
'Accept-Language' ,
'Accept-Ranges' ,
'Access-Control-Allow-Credentials' ,
'Access-Control-Allow-Headers' ,
'Access-Control-Allow-Methods' ,
'Access-Control-Allow-Origin' ,
'Access-Control-Expose-Headers' ,
'Access-Control-Max-Age' ,
'Access-Control-Request-Headers' ,
'Access-Control-Request-Method' ,
'Age' ,
'Allow' ,
'Alt-Svc' ,
'Alt-Used' ,
'Authorization' ,
'Cache-Control' ,
'Clear-Site-Data' ,
'Connection' ,
'Content-Disposition' ,
'Content-Encoding' ,
'Content-Language' ,
'Content-Length' ,
'Content-Location' ,
'Content-Range' ,
'Content-Security-Policy' ,
'Content-Security-Policy-Report-Only' ,
'Content-Type' ,
'Cookie' ,
'Cross-Origin-Embedder-Policy' ,
'Cross-Origin-Opener-Policy' ,
'Cross-Origin-Resource-Policy' ,
'Date' ,
'Device-Memory' ,
'Downlink' ,
'ECT' ,
'ETag' ,
'Expect' ,
'Expect-CT' ,
'Expires' ,
'Forwarded' ,
'From' ,
'Host' ,
'If-Match' ,
'If-Modified-Since' ,
'If-None-Match' ,
'If-Range' ,
'If-Unmodified-Since' ,
'Keep-Alive' ,
'Last-Modified' ,
'Link' ,
'Location' ,
'Max-Forwards' ,
'Origin' ,
'Permissions-Policy' ,
'Pragma' ,
'Proxy-Authenticate' ,
'Proxy-Authorization' ,
'RTT' ,
'Range' ,
'Referer' ,
'Referrer-Policy' ,
'Refresh' ,
'Retry-After' ,
'Sec-WebSocket-Accept' ,
'Sec-WebSocket-Extensions' ,
'Sec-WebSocket-Key' ,
'Sec-WebSocket-Protocol' ,
'Sec-WebSocket-Version' ,
'Server' ,
'Server-Timing' ,
'Service-Worker-Allowed' ,
'Service-Worker-Navigation-Preload' ,
'Set-Cookie' ,
'SourceMap' ,
'Strict-Transport-Security' ,
'Supports-Loading-Mode' ,
'TE' ,
'Timing-Allow-Origin' ,
'Trailer' ,
'Transfer-Encoding' ,
'Upgrade' ,
'Upgrade-Insecure-Requests' ,
'User-Agent' ,
'Vary' ,
'Via' ,
'WWW-Authenticate' ,
'X-Content-Type-Options' ,
'X-DNS-Prefetch-Control' ,
'X-Frame-Options' ,
'X-Permitted-Cross-Domain-Policies' ,
'X-Powered-By' ,
'X-Requested-With' ,
'X-XSS-Protection'
]
for ( let i = 0 ; i < wellknownHeaderNames . length ; ++ i ) {
const key = wellknownHeaderNames [ i ]
const lowerCasedKey = key . toLowerCase ( )
headerNameLowerCasedRecord [ key ] = headerNameLowerCasedRecord [ lowerCasedKey ] =
lowerCasedKey
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object . setPrototypeOf ( headerNameLowerCasedRecord , null )
module . exports = {
wellknownHeaderNames ,
headerNameLowerCasedRecord
}
/***/ } ) ,
/***/ 8045 :
/***/ ( ( module ) => {
"use strict" ;
class UndiciError extends Error {
constructor ( message ) {
super ( message )
this . name = 'UndiciError'
this . code = 'UND_ERR'
}
}
class ConnectTimeoutError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , ConnectTimeoutError )
this . name = 'ConnectTimeoutError'
this . message = message || 'Connect Timeout Error'
this . code = 'UND_ERR_CONNECT_TIMEOUT'
}
}
class HeadersTimeoutError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , HeadersTimeoutError )
this . name = 'HeadersTimeoutError'
this . message = message || 'Headers Timeout Error'
this . code = 'UND_ERR_HEADERS_TIMEOUT'
}
}
class HeadersOverflowError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , HeadersOverflowError )
this . name = 'HeadersOverflowError'
this . message = message || 'Headers Overflow Error'
this . code = 'UND_ERR_HEADERS_OVERFLOW'
}
}
class BodyTimeoutError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , BodyTimeoutError )
this . name = 'BodyTimeoutError'
this . message = message || 'Body Timeout Error'
this . code = 'UND_ERR_BODY_TIMEOUT'
}
}
class ResponseStatusCodeError extends UndiciError {
constructor ( message , statusCode , headers , body ) {
super ( message )
Error . captureStackTrace ( this , ResponseStatusCodeError )
this . name = 'ResponseStatusCodeError'
this . message = message || 'Response Status Code Error'
this . code = 'UND_ERR_RESPONSE_STATUS_CODE'
this . body = body
this . status = statusCode
this . statusCode = statusCode
this . headers = headers
}
}
class InvalidArgumentError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , InvalidArgumentError )
this . name = 'InvalidArgumentError'
this . message = message || 'Invalid Argument Error'
this . code = 'UND_ERR_INVALID_ARG'
}
}
class InvalidReturnValueError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , InvalidReturnValueError )
this . name = 'InvalidReturnValueError'
this . message = message || 'Invalid Return Value Error'
this . code = 'UND_ERR_INVALID_RETURN_VALUE'
}
}
class RequestAbortedError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , RequestAbortedError )
this . name = 'AbortError'
this . message = message || 'Request aborted'
this . code = 'UND_ERR_ABORTED'
}
}
class InformationalError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , InformationalError )
this . name = 'InformationalError'
this . message = message || 'Request information'
this . code = 'UND_ERR_INFO'
}
}
class RequestContentLengthMismatchError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , RequestContentLengthMismatchError )
this . name = 'RequestContentLengthMismatchError'
this . message = message || 'Request body length does not match content-length header'
this . code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
}
}
class ResponseContentLengthMismatchError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , ResponseContentLengthMismatchError )
this . name = 'ResponseContentLengthMismatchError'
this . message = message || 'Response body length does not match content-length header'
this . code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
}
}
class ClientDestroyedError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , ClientDestroyedError )
this . name = 'ClientDestroyedError'
this . message = message || 'The client is destroyed'
this . code = 'UND_ERR_DESTROYED'
}
}
class ClientClosedError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , ClientClosedError )
this . name = 'ClientClosedError'
this . message = message || 'The client is closed'
this . code = 'UND_ERR_CLOSED'
}
}
class SocketError extends UndiciError {
constructor ( message , socket ) {
super ( message )
Error . captureStackTrace ( this , SocketError )
this . name = 'SocketError'
this . message = message || 'Socket error'
this . code = 'UND_ERR_SOCKET'
this . socket = socket
}
}
class NotSupportedError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , NotSupportedError )
this . name = 'NotSupportedError'
this . message = message || 'Not supported error'
this . code = 'UND_ERR_NOT_SUPPORTED'
}
}
class BalancedPoolMissingUpstreamError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , NotSupportedError )
this . name = 'MissingUpstreamError'
this . message = message || 'No upstream has been added to the BalancedPool'
this . code = 'UND_ERR_BPL_MISSING_UPSTREAM'
}
}
class HTTPParserError extends Error {
constructor ( message , code , data ) {
super ( message )
Error . captureStackTrace ( this , HTTPParserError )
this . name = 'HTTPParserError'
this . code = code ? ` HPE_ ${ code } ` : undefined
this . data = data ? data . toString ( ) : undefined
}
}
class ResponseExceededMaxSizeError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , ResponseExceededMaxSizeError )
this . name = 'ResponseExceededMaxSizeError'
this . message = message || 'Response content exceeded max size'
this . code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
}
}
class RequestRetryError extends UndiciError {
constructor ( message , code , { headers , data } ) {
super ( message )
Error . captureStackTrace ( this , RequestRetryError )
this . name = 'RequestRetryError'
this . message = message || 'Request retry error'
this . code = 'UND_ERR_REQ_RETRY'
this . statusCode = code
this . data = data
this . headers = headers
}
}
module . exports = {
HTTPParserError ,
UndiciError ,
HeadersTimeoutError ,
HeadersOverflowError ,
BodyTimeoutError ,
RequestContentLengthMismatchError ,
ConnectTimeoutError ,
ResponseStatusCodeError ,
InvalidArgumentError ,
InvalidReturnValueError ,
RequestAbortedError ,
ClientDestroyedError ,
ClientClosedError ,
InformationalError ,
SocketError ,
NotSupportedError ,
ResponseContentLengthMismatchError ,
BalancedPoolMissingUpstreamError ,
ResponseExceededMaxSizeError ,
RequestRetryError
}
/***/ } ) ,
/***/ 2905 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
InvalidArgumentError ,
NotSupportedError
} = _ _nccwpck _require _ _ ( 8045 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { kHTTP2BuildRequest , kHTTP2CopyHeaders , kHTTP1BuildRequest } = _ _nccwpck _require _ _ ( 2785 )
const util = _ _nccwpck _require _ _ ( 3983 )
// tokenRegExp and headerCharRegex have been lifted from
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
/ * *
* Verifies that the given val is a valid HTTP token
* per the rules defined in RFC 7230
* See https : //tools.ietf.org/html/rfc7230#section-3.2.6
* /
const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/
/ * *
* Matches if val contains an invalid field - vchar
* field - value = * ( field - content / obs - fold )
* field - content = field - vchar [ 1 * ( SP / HTAB ) field - vchar ]
* field - vchar = VCHAR / obs - text
* /
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
// Verifies that a given path is valid does not contain control chars \x00 to \x20
const invalidPathRegex = /[^\u0021-\u00ff]/
const kHandler = Symbol ( 'handler' )
const channels = { }
let extractBody
try {
const diagnosticsChannel = _ _nccwpck _require _ _ ( 7643 )
channels . create = diagnosticsChannel . channel ( 'undici:request:create' )
channels . bodySent = diagnosticsChannel . channel ( 'undici:request:bodySent' )
channels . headers = diagnosticsChannel . channel ( 'undici:request:headers' )
channels . trailers = diagnosticsChannel . channel ( 'undici:request:trailers' )
channels . error = diagnosticsChannel . channel ( 'undici:request:error' )
} catch {
channels . create = { hasSubscribers : false }
channels . bodySent = { hasSubscribers : false }
channels . headers = { hasSubscribers : false }
channels . trailers = { hasSubscribers : false }
channels . error = { hasSubscribers : false }
}
class Request {
constructor ( origin , {
path ,
method ,
body ,
headers ,
query ,
idempotent ,
blocking ,
upgrade ,
headersTimeout ,
bodyTimeout ,
reset ,
throwOnError ,
expectContinue
} , handler ) {
if ( typeof path !== 'string' ) {
throw new InvalidArgumentError ( 'path must be a string' )
} else if (
path [ 0 ] !== '/' &&
! ( path . startsWith ( 'http://' ) || path . startsWith ( 'https://' ) ) &&
method !== 'CONNECT'
) {
throw new InvalidArgumentError ( 'path must be an absolute URL or start with a slash' )
} else if ( invalidPathRegex . exec ( path ) !== null ) {
throw new InvalidArgumentError ( 'invalid request path' )
}
if ( typeof method !== 'string' ) {
throw new InvalidArgumentError ( 'method must be a string' )
} else if ( tokenRegExp . exec ( method ) === null ) {
throw new InvalidArgumentError ( 'invalid request method' )
}
if ( upgrade && typeof upgrade !== 'string' ) {
throw new InvalidArgumentError ( 'upgrade must be a string' )
}
if ( headersTimeout != null && ( ! Number . isFinite ( headersTimeout ) || headersTimeout < 0 ) ) {
throw new InvalidArgumentError ( 'invalid headersTimeout' )
}
if ( bodyTimeout != null && ( ! Number . isFinite ( bodyTimeout ) || bodyTimeout < 0 ) ) {
throw new InvalidArgumentError ( 'invalid bodyTimeout' )
}
if ( reset != null && typeof reset !== 'boolean' ) {
throw new InvalidArgumentError ( 'invalid reset' )
}
if ( expectContinue != null && typeof expectContinue !== 'boolean' ) {
throw new InvalidArgumentError ( 'invalid expectContinue' )
}
this . headersTimeout = headersTimeout
this . bodyTimeout = bodyTimeout
this . throwOnError = throwOnError === true
this . method = method
this . abort = null
if ( body == null ) {
this . body = null
} else if ( util . isStream ( body ) ) {
this . body = body
const rState = this . body . _readableState
if ( ! rState || ! rState . autoDestroy ) {
this . endHandler = function autoDestroy ( ) {
util . destroy ( this )
}
this . body . on ( 'end' , this . endHandler )
}
this . errorHandler = err => {
if ( this . abort ) {
this . abort ( err )
} else {
this . error = err
}
}
this . body . on ( 'error' , this . errorHandler )
} else if ( util . isBuffer ( body ) ) {
this . body = body . byteLength ? body : null
} else if ( ArrayBuffer . isView ( body ) ) {
this . body = body . buffer . byteLength ? Buffer . from ( body . buffer , body . byteOffset , body . byteLength ) : null
} else if ( body instanceof ArrayBuffer ) {
this . body = body . byteLength ? Buffer . from ( body ) : null
} else if ( typeof body === 'string' ) {
this . body = body . length ? Buffer . from ( body ) : null
} else if ( util . isFormDataLike ( body ) || util . isIterable ( body ) || util . isBlobLike ( body ) ) {
this . body = body
} else {
throw new InvalidArgumentError ( 'body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable' )
}
this . completed = false
this . aborted = false
this . upgrade = upgrade || null
this . path = query ? util . buildURL ( path , query ) : path
this . origin = origin
this . idempotent = idempotent == null
? method === 'HEAD' || method === 'GET'
: idempotent
this . blocking = blocking == null ? false : blocking
this . reset = reset == null ? null : reset
this . host = null
this . contentLength = null
this . contentType = null
this . headers = ''
// Only for H2
this . expectContinue = expectContinue != null ? expectContinue : false
if ( Array . isArray ( headers ) ) {
if ( headers . length % 2 !== 0 ) {
throw new InvalidArgumentError ( 'headers array must be even' )
}
for ( let i = 0 ; i < headers . length ; i += 2 ) {
processHeader ( this , headers [ i ] , headers [ i + 1 ] )
}
} else if ( headers && typeof headers === 'object' ) {
const keys = Object . keys ( headers )
for ( let i = 0 ; i < keys . length ; i ++ ) {
const key = keys [ i ]
processHeader ( this , key , headers [ key ] )
}
} else if ( headers != null ) {
throw new InvalidArgumentError ( 'headers must be an object or an array' )
}
if ( util . isFormDataLike ( this . body ) ) {
if ( util . nodeMajor < 16 || ( util . nodeMajor === 16 && util . nodeMinor < 8 ) ) {
throw new InvalidArgumentError ( 'Form-Data bodies are only supported in node v16.8 and newer.' )
}
if ( ! extractBody ) {
extractBody = ( _ _nccwpck _require _ _ ( 9990 ) . extractBody )
}
const [ bodyStream , contentType ] = extractBody ( body )
if ( this . contentType == null ) {
this . contentType = contentType
this . headers += ` content-type: ${ contentType } \r \n `
}
this . body = bodyStream . stream
this . contentLength = bodyStream . length
} else if ( util . isBlobLike ( body ) && this . contentType == null && body . type ) {
this . contentType = body . type
this . headers += ` content-type: ${ body . type } \r \n `
}
util . validateHandler ( handler , method , upgrade )
this . servername = util . getServerName ( this . host )
this [ kHandler ] = handler
if ( channels . create . hasSubscribers ) {
channels . create . publish ( { request : this } )
}
}
onBodySent ( chunk ) {
if ( this [ kHandler ] . onBodySent ) {
try {
return this [ kHandler ] . onBodySent ( chunk )
} catch ( err ) {
this . abort ( err )
}
}
}
onRequestSent ( ) {
if ( channels . bodySent . hasSubscribers ) {
channels . bodySent . publish ( { request : this } )
}
if ( this [ kHandler ] . onRequestSent ) {
try {
return this [ kHandler ] . onRequestSent ( )
} catch ( err ) {
this . abort ( err )
}
}
}
onConnect ( abort ) {
assert ( ! this . aborted )
assert ( ! this . completed )
if ( this . error ) {
abort ( this . error )
} else {
this . abort = abort
return this [ kHandler ] . onConnect ( abort )
}
}
onHeaders ( statusCode , headers , resume , statusText ) {
assert ( ! this . aborted )
assert ( ! this . completed )
if ( channels . headers . hasSubscribers ) {
channels . headers . publish ( { request : this , response : { statusCode , headers , statusText } } )
}
try {
return this [ kHandler ] . onHeaders ( statusCode , headers , resume , statusText )
} catch ( err ) {
this . abort ( err )
}
}
onData ( chunk ) {
assert ( ! this . aborted )
assert ( ! this . completed )
try {
return this [ kHandler ] . onData ( chunk )
} catch ( err ) {
this . abort ( err )
return false
}
}
onUpgrade ( statusCode , headers , socket ) {
assert ( ! this . aborted )
assert ( ! this . completed )
return this [ kHandler ] . onUpgrade ( statusCode , headers , socket )
}
onComplete ( trailers ) {
this . onFinally ( )
assert ( ! this . aborted )
this . completed = true
if ( channels . trailers . hasSubscribers ) {
channels . trailers . publish ( { request : this , trailers } )
}
try {
return this [ kHandler ] . onComplete ( trailers )
} catch ( err ) {
// TODO (fix): This might be a bad idea?
this . onError ( err )
}
}
onError ( error ) {
this . onFinally ( )
if ( channels . error . hasSubscribers ) {
channels . error . publish ( { request : this , error } )
}
if ( this . aborted ) {
return
}
this . aborted = true
return this [ kHandler ] . onError ( error )
}
onFinally ( ) {
if ( this . errorHandler ) {
this . body . off ( 'error' , this . errorHandler )
this . errorHandler = null
}
if ( this . endHandler ) {
this . body . off ( 'end' , this . endHandler )
this . endHandler = null
}
}
// TODO: adjust to support H2
addHeader ( key , value ) {
processHeader ( this , key , value )
return this
}
static [ kHTTP1BuildRequest ] ( origin , opts , handler ) {
// TODO: Migrate header parsing here, to make Requests
// HTTP agnostic
return new Request ( origin , opts , handler )
}
static [ kHTTP2BuildRequest ] ( origin , opts , handler ) {
const headers = opts . headers
opts = { ... opts , headers : null }
const request = new Request ( origin , opts , handler )
request . headers = { }
if ( Array . isArray ( headers ) ) {
if ( headers . length % 2 !== 0 ) {
throw new InvalidArgumentError ( 'headers array must be even' )
}
for ( let i = 0 ; i < headers . length ; i += 2 ) {
processHeader ( request , headers [ i ] , headers [ i + 1 ] , true )
}
} else if ( headers && typeof headers === 'object' ) {
const keys = Object . keys ( headers )
for ( let i = 0 ; i < keys . length ; i ++ ) {
const key = keys [ i ]
processHeader ( request , key , headers [ key ] , true )
}
} else if ( headers != null ) {
throw new InvalidArgumentError ( 'headers must be an object or an array' )
}
return request
}
static [ kHTTP2CopyHeaders ] ( raw ) {
const rawHeaders = raw . split ( '\r\n' )
const headers = { }
for ( const header of rawHeaders ) {
const [ key , value ] = header . split ( ': ' )
if ( value == null || value . length === 0 ) continue
if ( headers [ key ] ) headers [ key ] += ` , ${ value } `
else headers [ key ] = value
}
return headers
}
}
function processHeaderValue ( key , val , skipAppend ) {
if ( val && typeof val === 'object' ) {
throw new InvalidArgumentError ( ` invalid ${ key } header ` )
}
val = val != null ? ` ${ val } ` : ''
if ( headerCharRegex . exec ( val ) !== null ) {
throw new InvalidArgumentError ( ` invalid ${ key } header ` )
}
return skipAppend ? val : ` ${ key } : ${ val } \r \n `
}
function processHeader ( request , key , val , skipAppend = false ) {
if ( val && ( typeof val === 'object' && ! Array . isArray ( val ) ) ) {
throw new InvalidArgumentError ( ` invalid ${ key } header ` )
} else if ( val === undefined ) {
return
}
if (
request . host === null &&
key . length === 4 &&
key . toLowerCase ( ) === 'host'
) {
if ( headerCharRegex . exec ( val ) !== null ) {
throw new InvalidArgumentError ( ` invalid ${ key } header ` )
}
// Consumed by Client
request . host = val
} else if (
request . contentLength === null &&
key . length === 14 &&
key . toLowerCase ( ) === 'content-length'
) {
request . contentLength = parseInt ( val , 10 )
if ( ! Number . isFinite ( request . contentLength ) ) {
throw new InvalidArgumentError ( 'invalid content-length header' )
}
} else if (
request . contentType === null &&
key . length === 12 &&
key . toLowerCase ( ) === 'content-type'
) {
request . contentType = val
if ( skipAppend ) request . headers [ key ] = processHeaderValue ( key , val , skipAppend )
else request . headers += processHeaderValue ( key , val )
} else if (
key . length === 17 &&
key . toLowerCase ( ) === 'transfer-encoding'
) {
throw new InvalidArgumentError ( 'invalid transfer-encoding header' )
} else if (
key . length === 10 &&
key . toLowerCase ( ) === 'connection'
) {
const value = typeof val === 'string' ? val . toLowerCase ( ) : null
if ( value !== 'close' && value !== 'keep-alive' ) {
throw new InvalidArgumentError ( 'invalid connection header' )
} else if ( value === 'close' ) {
request . reset = true
}
} else if (
key . length === 10 &&
key . toLowerCase ( ) === 'keep-alive'
) {
throw new InvalidArgumentError ( 'invalid keep-alive header' )
} else if (
key . length === 7 &&
key . toLowerCase ( ) === 'upgrade'
) {
throw new InvalidArgumentError ( 'invalid upgrade header' )
} else if (
key . length === 6 &&
key . toLowerCase ( ) === 'expect'
) {
throw new NotSupportedError ( 'expect header not supported' )
} else if ( tokenRegExp . exec ( key ) === null ) {
throw new InvalidArgumentError ( 'invalid header key' )
} else {
if ( Array . isArray ( val ) ) {
for ( let i = 0 ; i < val . length ; i ++ ) {
if ( skipAppend ) {
if ( request . headers [ key ] ) request . headers [ key ] += ` , ${ processHeaderValue ( key , val [ i ] , skipAppend ) } `
else request . headers [ key ] = processHeaderValue ( key , val [ i ] , skipAppend )
} else {
request . headers += processHeaderValue ( key , val [ i ] )
}
}
} else {
if ( skipAppend ) request . headers [ key ] = processHeaderValue ( key , val , skipAppend )
else request . headers += processHeaderValue ( key , val )
}
}
}
module . exports = Request
/***/ } ) ,
/***/ 2785 :
/***/ ( ( module ) => {
module . exports = {
kClose : Symbol ( 'close' ) ,
kDestroy : Symbol ( 'destroy' ) ,
kDispatch : Symbol ( 'dispatch' ) ,
kUrl : Symbol ( 'url' ) ,
kWriting : Symbol ( 'writing' ) ,
kResuming : Symbol ( 'resuming' ) ,
kQueue : Symbol ( 'queue' ) ,
kConnect : Symbol ( 'connect' ) ,
kConnecting : Symbol ( 'connecting' ) ,
kHeadersList : Symbol ( 'headers list' ) ,
kKeepAliveDefaultTimeout : Symbol ( 'default keep alive timeout' ) ,
kKeepAliveMaxTimeout : Symbol ( 'max keep alive timeout' ) ,
kKeepAliveTimeoutThreshold : Symbol ( 'keep alive timeout threshold' ) ,
kKeepAliveTimeoutValue : Symbol ( 'keep alive timeout' ) ,
kKeepAlive : Symbol ( 'keep alive' ) ,
kHeadersTimeout : Symbol ( 'headers timeout' ) ,
kBodyTimeout : Symbol ( 'body timeout' ) ,
kServerName : Symbol ( 'server name' ) ,
kLocalAddress : Symbol ( 'local address' ) ,
kHost : Symbol ( 'host' ) ,
kNoRef : Symbol ( 'no ref' ) ,
kBodyUsed : Symbol ( 'used' ) ,
kRunning : Symbol ( 'running' ) ,
kBlocking : Symbol ( 'blocking' ) ,
kPending : Symbol ( 'pending' ) ,
kSize : Symbol ( 'size' ) ,
kBusy : Symbol ( 'busy' ) ,
kQueued : Symbol ( 'queued' ) ,
kFree : Symbol ( 'free' ) ,
kConnected : Symbol ( 'connected' ) ,
kClosed : Symbol ( 'closed' ) ,
kNeedDrain : Symbol ( 'need drain' ) ,
kReset : Symbol ( 'reset' ) ,
kDestroyed : Symbol . for ( 'nodejs.stream.destroyed' ) ,
kMaxHeadersSize : Symbol ( 'max headers size' ) ,
kRunningIdx : Symbol ( 'running index' ) ,
kPendingIdx : Symbol ( 'pending index' ) ,
kError : Symbol ( 'error' ) ,
kClients : Symbol ( 'clients' ) ,
kClient : Symbol ( 'client' ) ,
kParser : Symbol ( 'parser' ) ,
kOnDestroyed : Symbol ( 'destroy callbacks' ) ,
kPipelining : Symbol ( 'pipelining' ) ,
kSocket : Symbol ( 'socket' ) ,
kHostHeader : Symbol ( 'host header' ) ,
kConnector : Symbol ( 'connector' ) ,
kStrictContentLength : Symbol ( 'strict content length' ) ,
kMaxRedirections : Symbol ( 'maxRedirections' ) ,
kMaxRequests : Symbol ( 'maxRequestsPerClient' ) ,
kProxy : Symbol ( 'proxy agent options' ) ,
kCounter : Symbol ( 'socket request counter' ) ,
kInterceptors : Symbol ( 'dispatch interceptors' ) ,
kMaxResponseSize : Symbol ( 'max response size' ) ,
kHTTP2Session : Symbol ( 'http2Session' ) ,
kHTTP2SessionState : Symbol ( 'http2Session state' ) ,
kHTTP2BuildRequest : Symbol ( 'http2 build request' ) ,
kHTTP1BuildRequest : Symbol ( 'http1 build request' ) ,
kHTTP2CopyHeaders : Symbol ( 'http2 copy headers' ) ,
kHTTPConnVersion : Symbol ( 'http connection version' ) ,
kRetryHandlerDefaultRetry : Symbol ( 'retry agent default retry' ) ,
kConstruct : Symbol ( 'constructable' )
}
/***/ } ) ,
/***/ 3983 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const assert = _ _nccwpck _require _ _ ( 9491 )
const { kDestroyed , kBodyUsed } = _ _nccwpck _require _ _ ( 2785 )
const { IncomingMessage } = _ _nccwpck _require _ _ ( 3685 )
const stream = _ _nccwpck _require _ _ ( 2781 )
const net = _ _nccwpck _require _ _ ( 1808 )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const { Blob } = _ _nccwpck _require _ _ ( 4300 )
const nodeUtil = _ _nccwpck _require _ _ ( 3837 )
const { stringify } = _ _nccwpck _require _ _ ( 3477 )
const { headerNameLowerCasedRecord } = _ _nccwpck _require _ _ ( 4462 )
const [ nodeMajor , nodeMinor ] = process . versions . node . split ( '.' ) . map ( v => Number ( v ) )
function nop ( ) { }
function isStream ( obj ) {
return obj && typeof obj === 'object' && typeof obj . pipe === 'function' && typeof obj . on === 'function'
}
// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
function isBlobLike ( object ) {
return ( Blob && object instanceof Blob ) || (
object &&
typeof object === 'object' &&
( typeof object . stream === 'function' ||
typeof object . arrayBuffer === 'function' ) &&
/^(Blob|File)$/ . test ( object [ Symbol . toStringTag ] )
)
}
function buildURL ( url , queryParams ) {
if ( url . includes ( '?' ) || url . includes ( '#' ) ) {
throw new Error ( 'Query params cannot be passed when url already contains "?" or "#".' )
}
const stringified = stringify ( queryParams )
if ( stringified ) {
url += '?' + stringified
}
return url
}
function parseURL ( url ) {
if ( typeof url === 'string' ) {
url = new URL ( url )
if ( ! /^https?:/ . test ( url . origin || url . protocol ) ) {
throw new InvalidArgumentError ( 'Invalid URL protocol: the URL must start with `http:` or `https:`.' )
}
return url
}
if ( ! url || typeof url !== 'object' ) {
throw new InvalidArgumentError ( 'Invalid URL: The URL argument must be a non-null object.' )
}
if ( ! /^https?:/ . test ( url . origin || url . protocol ) ) {
throw new InvalidArgumentError ( 'Invalid URL protocol: the URL must start with `http:` or `https:`.' )
}
if ( ! ( url instanceof URL ) ) {
if ( url . port != null && url . port !== '' && ! Number . isFinite ( parseInt ( url . port ) ) ) {
throw new InvalidArgumentError ( 'Invalid URL: port must be a valid integer or a string representation of an integer.' )
}
if ( url . path != null && typeof url . path !== 'string' ) {
throw new InvalidArgumentError ( 'Invalid URL path: the path must be a string or null/undefined.' )
}
if ( url . pathname != null && typeof url . pathname !== 'string' ) {
throw new InvalidArgumentError ( 'Invalid URL pathname: the pathname must be a string or null/undefined.' )
}
if ( url . hostname != null && typeof url . hostname !== 'string' ) {
throw new InvalidArgumentError ( 'Invalid URL hostname: the hostname must be a string or null/undefined.' )
}
if ( url . origin != null && typeof url . origin !== 'string' ) {
throw new InvalidArgumentError ( 'Invalid URL origin: the origin must be a string or null/undefined.' )
}
const port = url . port != null
? url . port
: ( url . protocol === 'https:' ? 443 : 80 )
let origin = url . origin != null
? url . origin
: ` ${ url . protocol } // ${ url . hostname } : ${ port } `
let path = url . path != null
? url . path
: ` ${ url . pathname || '' } ${ url . search || '' } `
if ( origin . endsWith ( '/' ) ) {
origin = origin . substring ( 0 , origin . length - 1 )
}
if ( path && ! path . startsWith ( '/' ) ) {
path = ` / ${ path } `
}
// new URL(path, origin) is unsafe when `path` contains an absolute URL
// From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
// If first parameter is a relative URL, second param is required, and will be used as the base URL.
// If first parameter is an absolute URL, a given second param will be ignored.
url = new URL ( origin + path )
}
return url
}
function parseOrigin ( url ) {
url = parseURL ( url )
if ( url . pathname !== '/' || url . search || url . hash ) {
throw new InvalidArgumentError ( 'invalid url' )
}
return url
}
function getHostname ( host ) {
if ( host [ 0 ] === '[' ) {
const idx = host . indexOf ( ']' )
assert ( idx !== - 1 )
return host . substring ( 1 , idx )
}
const idx = host . indexOf ( ':' )
if ( idx === - 1 ) return host
return host . substring ( 0 , idx )
}
// IP addresses are not valid server names per RFC6066
// > Currently, the only server names supported are DNS hostnames
function getServerName ( host ) {
if ( ! host ) {
return null
}
assert . strictEqual ( typeof host , 'string' )
const servername = getHostname ( host )
if ( net . isIP ( servername ) ) {
return ''
}
return servername
}
function deepClone ( obj ) {
return JSON . parse ( JSON . stringify ( obj ) )
}
function isAsyncIterable ( obj ) {
return ! ! ( obj != null && typeof obj [ Symbol . asyncIterator ] === 'function' )
}
function isIterable ( obj ) {
return ! ! ( obj != null && ( typeof obj [ Symbol . iterator ] === 'function' || typeof obj [ Symbol . asyncIterator ] === 'function' ) )
}
function bodyLength ( body ) {
if ( body == null ) {
return 0
} else if ( isStream ( body ) ) {
const state = body . _readableState
return state && state . objectMode === false && state . ended === true && Number . isFinite ( state . length )
? state . length
: null
} else if ( isBlobLike ( body ) ) {
return body . size != null ? body . size : null
} else if ( isBuffer ( body ) ) {
return body . byteLength
}
return null
}
function isDestroyed ( stream ) {
return ! stream || ! ! ( stream . destroyed || stream [ kDestroyed ] )
}
function isReadableAborted ( stream ) {
const state = stream && stream . _readableState
return isDestroyed ( stream ) && state && ! state . endEmitted
}
function destroy ( stream , err ) {
if ( stream == null || ! isStream ( stream ) || isDestroyed ( stream ) ) {
return
}
if ( typeof stream . destroy === 'function' ) {
if ( Object . getPrototypeOf ( stream ) . constructor === IncomingMessage ) {
// See: https://github.com/nodejs/node/pull/38505/files
stream . socket = null
}
stream . destroy ( err )
} else if ( err ) {
process . nextTick ( ( stream , err ) => {
stream . emit ( 'error' , err )
} , stream , err )
}
if ( stream . destroyed !== true ) {
stream [ kDestroyed ] = true
}
}
const KEEPALIVE _TIMEOUT _EXPR = /timeout=(\d+)/
function parseKeepAliveTimeout ( val ) {
const m = val . toString ( ) . match ( KEEPALIVE _TIMEOUT _EXPR )
return m ? parseInt ( m [ 1 ] , 10 ) * 1000 : null
}
/ * *
* Retrieves a header name and returns its lowercase value .
* @ param { string | Buffer } value Header name
* @ returns { string }
* /
function headerNameToString ( value ) {
return headerNameLowerCasedRecord [ value ] || value . toLowerCase ( )
}
function parseHeaders ( headers , obj = { } ) {
// For H2 support
if ( ! Array . isArray ( headers ) ) return headers
for ( let i = 0 ; i < headers . length ; i += 2 ) {
const key = headers [ i ] . toString ( ) . toLowerCase ( )
let val = obj [ key ]
if ( ! val ) {
if ( Array . isArray ( headers [ i + 1 ] ) ) {
obj [ key ] = headers [ i + 1 ] . map ( x => x . toString ( 'utf8' ) )
} else {
obj [ key ] = headers [ i + 1 ] . toString ( 'utf8' )
}
} else {
if ( ! Array . isArray ( val ) ) {
val = [ val ]
obj [ key ] = val
}
val . push ( headers [ i + 1 ] . toString ( 'utf8' ) )
}
}
// See https://github.com/nodejs/node/pull/46528
if ( 'content-length' in obj && 'content-disposition' in obj ) {
obj [ 'content-disposition' ] = Buffer . from ( obj [ 'content-disposition' ] ) . toString ( 'latin1' )
}
return obj
}
function parseRawHeaders ( headers ) {
const ret = [ ]
let hasContentLength = false
let contentDispositionIdx = - 1
for ( let n = 0 ; n < headers . length ; n += 2 ) {
const key = headers [ n + 0 ] . toString ( )
const val = headers [ n + 1 ] . toString ( 'utf8' )
if ( key . length === 14 && ( key === 'content-length' || key . toLowerCase ( ) === 'content-length' ) ) {
ret . push ( key , val )
hasContentLength = true
} else if ( key . length === 19 && ( key === 'content-disposition' || key . toLowerCase ( ) === 'content-disposition' ) ) {
contentDispositionIdx = ret . push ( key , val ) - 1
} else {
ret . push ( key , val )
}
}
// See https://github.com/nodejs/node/pull/46528
if ( hasContentLength && contentDispositionIdx !== - 1 ) {
ret [ contentDispositionIdx ] = Buffer . from ( ret [ contentDispositionIdx ] ) . toString ( 'latin1' )
}
return ret
}
function isBuffer ( buffer ) {
// See, https://github.com/mcollina/undici/pull/319
return buffer instanceof Uint8Array || Buffer . isBuffer ( buffer )
}
function validateHandler ( handler , method , upgrade ) {
if ( ! handler || typeof handler !== 'object' ) {
throw new InvalidArgumentError ( 'handler must be an object' )
}
if ( typeof handler . onConnect !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onConnect method' )
}
if ( typeof handler . onError !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onError method' )
}
if ( typeof handler . onBodySent !== 'function' && handler . onBodySent !== undefined ) {
throw new InvalidArgumentError ( 'invalid onBodySent method' )
}
if ( upgrade || method === 'CONNECT' ) {
if ( typeof handler . onUpgrade !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onUpgrade method' )
}
} else {
if ( typeof handler . onHeaders !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onHeaders method' )
}
if ( typeof handler . onData !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onData method' )
}
if ( typeof handler . onComplete !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onComplete method' )
}
}
}
// A body is disturbed if it has been read from and it cannot
// be re-used without losing state or data.
function isDisturbed ( body ) {
return ! ! ( body && (
stream . isDisturbed
? stream . isDisturbed ( body ) || body [ kBodyUsed ] // TODO (fix): Why is body[kBodyUsed] needed?
: body [ kBodyUsed ] ||
body . readableDidRead ||
( body . _readableState && body . _readableState . dataEmitted ) ||
isReadableAborted ( body )
) )
}
function isErrored ( body ) {
return ! ! ( body && (
stream . isErrored
? stream . isErrored ( body )
: /state: 'errored'/ . test ( nodeUtil . inspect ( body )
) ) )
}
function isReadable ( body ) {
return ! ! ( body && (
stream . isReadable
? stream . isReadable ( body )
: /state: 'readable'/ . test ( nodeUtil . inspect ( body )
) ) )
}
function getSocketInfo ( socket ) {
return {
localAddress : socket . localAddress ,
localPort : socket . localPort ,
remoteAddress : socket . remoteAddress ,
remotePort : socket . remotePort ,
remoteFamily : socket . remoteFamily ,
timeout : socket . timeout ,
bytesWritten : socket . bytesWritten ,
bytesRead : socket . bytesRead
}
}
async function * convertIterableToBuffer ( iterable ) {
for await ( const chunk of iterable ) {
yield Buffer . isBuffer ( chunk ) ? chunk : Buffer . from ( chunk )
}
}
let ReadableStream
function ReadableStreamFrom ( iterable ) {
if ( ! ReadableStream ) {
ReadableStream = ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
}
if ( ReadableStream . from ) {
return ReadableStream . from ( convertIterableToBuffer ( iterable ) )
}
let iterator
return new ReadableStream (
{
async start ( ) {
iterator = iterable [ Symbol . asyncIterator ] ( )
} ,
async pull ( controller ) {
const { done , value } = await iterator . next ( )
if ( done ) {
queueMicrotask ( ( ) => {
controller . close ( )
} )
} else {
const buf = Buffer . isBuffer ( value ) ? value : Buffer . from ( value )
controller . enqueue ( new Uint8Array ( buf ) )
}
return controller . desiredSize > 0
} ,
async cancel ( reason ) {
await iterator . return ( )
}
} ,
0
)
}
// The chunk should be a FormData instance and contains
// all the required methods.
function isFormDataLike ( object ) {
return (
object &&
typeof object === 'object' &&
typeof object . append === 'function' &&
typeof object . delete === 'function' &&
typeof object . get === 'function' &&
typeof object . getAll === 'function' &&
typeof object . has === 'function' &&
typeof object . set === 'function' &&
object [ Symbol . toStringTag ] === 'FormData'
)
}
function throwIfAborted ( signal ) {
if ( ! signal ) { return }
if ( typeof signal . throwIfAborted === 'function' ) {
signal . throwIfAborted ( )
} else {
if ( signal . aborted ) {
// DOMException not available < v17.0.0
const err = new Error ( 'The operation was aborted' )
err . name = 'AbortError'
throw err
}
}
}
function addAbortListener ( signal , listener ) {
if ( 'addEventListener' in signal ) {
signal . addEventListener ( 'abort' , listener , { once : true } )
return ( ) => signal . removeEventListener ( 'abort' , listener )
}
signal . addListener ( 'abort' , listener )
return ( ) => signal . removeListener ( 'abort' , listener )
}
const hasToWellFormed = ! ! String . prototype . toWellFormed
/ * *
* @ param { string } val
* /
function toUSVString ( val ) {
if ( hasToWellFormed ) {
return ` ${ val } ` . toWellFormed ( )
} else if ( nodeUtil . toUSVString ) {
return nodeUtil . toUSVString ( val )
}
return ` ${ val } `
}
// Parsed accordingly to RFC 9110
// https://www.rfc-editor.org/rfc/rfc9110#field.content-range
function parseRangeHeader ( range ) {
if ( range == null || range === '' ) return { start : 0 , end : null , size : null }
const m = range ? range . match ( /^bytes (\d+)-(\d+)\/(\d+)?$/ ) : null
return m
? {
start : parseInt ( m [ 1 ] ) ,
end : m [ 2 ] ? parseInt ( m [ 2 ] ) : null ,
size : m [ 3 ] ? parseInt ( m [ 3 ] ) : null
}
: null
}
const kEnumerableProperty = Object . create ( null )
kEnumerableProperty . enumerable = true
module . exports = {
kEnumerableProperty ,
nop ,
isDisturbed ,
isErrored ,
isReadable ,
toUSVString ,
isReadableAborted ,
isBlobLike ,
parseOrigin ,
parseURL ,
getServerName ,
isStream ,
isIterable ,
isAsyncIterable ,
isDestroyed ,
headerNameToString ,
parseRawHeaders ,
parseHeaders ,
parseKeepAliveTimeout ,
destroy ,
bodyLength ,
deepClone ,
ReadableStreamFrom ,
isBuffer ,
validateHandler ,
getSocketInfo ,
isFormDataLike ,
buildURL ,
throwIfAborted ,
addAbortListener ,
parseRangeHeader ,
nodeMajor ,
nodeMinor ,
nodeHasAutoSelectFamily : nodeMajor > 18 || ( nodeMajor === 18 && nodeMinor >= 13 ) ,
safeHTTPMethods : [ 'GET' , 'HEAD' , 'OPTIONS' , 'TRACE' ]
}
/***/ } ) ,
/***/ 4839 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const Dispatcher = _ _nccwpck _require _ _ ( 412 )
const {
ClientDestroyedError ,
ClientClosedError ,
InvalidArgumentError
} = _ _nccwpck _require _ _ ( 8045 )
const { kDestroy , kClose , kDispatch , kInterceptors } = _ _nccwpck _require _ _ ( 2785 )
const kDestroyed = Symbol ( 'destroyed' )
const kClosed = Symbol ( 'closed' )
const kOnDestroyed = Symbol ( 'onDestroyed' )
const kOnClosed = Symbol ( 'onClosed' )
const kInterceptedDispatch = Symbol ( 'Intercepted Dispatch' )
class DispatcherBase extends Dispatcher {
constructor ( ) {
super ( )
this [ kDestroyed ] = false
this [ kOnDestroyed ] = null
this [ kClosed ] = false
this [ kOnClosed ] = [ ]
}
get destroyed ( ) {
return this [ kDestroyed ]
}
get closed ( ) {
return this [ kClosed ]
}
get interceptors ( ) {
return this [ kInterceptors ]
}
set interceptors ( newInterceptors ) {
if ( newInterceptors ) {
for ( let i = newInterceptors . length - 1 ; i >= 0 ; i -- ) {
const interceptor = this [ kInterceptors ] [ i ]
if ( typeof interceptor !== 'function' ) {
throw new InvalidArgumentError ( 'interceptor must be an function' )
}
}
}
this [ kInterceptors ] = newInterceptors
}
close ( callback ) {
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
this . close ( ( err , data ) => {
return err ? reject ( err ) : resolve ( data )
} )
} )
}
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
if ( this [ kDestroyed ] ) {
queueMicrotask ( ( ) => callback ( new ClientDestroyedError ( ) , null ) )
return
}
if ( this [ kClosed ] ) {
if ( this [ kOnClosed ] ) {
this [ kOnClosed ] . push ( callback )
} else {
queueMicrotask ( ( ) => callback ( null , null ) )
}
return
}
this [ kClosed ] = true
this [ kOnClosed ] . push ( callback )
const onClosed = ( ) => {
const callbacks = this [ kOnClosed ]
this [ kOnClosed ] = null
for ( let i = 0 ; i < callbacks . length ; i ++ ) {
callbacks [ i ] ( null , null )
}
}
// Should not error.
this [ kClose ] ( )
. then ( ( ) => this . destroy ( ) )
. then ( ( ) => {
queueMicrotask ( onClosed )
} )
}
destroy ( err , callback ) {
if ( typeof err === 'function' ) {
callback = err
err = null
}
if ( callback === undefined ) {
return new Promise ( ( resolve , reject ) => {
this . destroy ( err , ( err , data ) => {
return err ? /* istanbul ignore next: should never error */ reject ( err ) : resolve ( data )
} )
} )
}
if ( typeof callback !== 'function' ) {
throw new InvalidArgumentError ( 'invalid callback' )
}
if ( this [ kDestroyed ] ) {
if ( this [ kOnDestroyed ] ) {
this [ kOnDestroyed ] . push ( callback )
} else {
queueMicrotask ( ( ) => callback ( null , null ) )
}
return
}
if ( ! err ) {
err = new ClientDestroyedError ( )
}
this [ kDestroyed ] = true
this [ kOnDestroyed ] = this [ kOnDestroyed ] || [ ]
this [ kOnDestroyed ] . push ( callback )
const onDestroyed = ( ) => {
const callbacks = this [ kOnDestroyed ]
this [ kOnDestroyed ] = null
for ( let i = 0 ; i < callbacks . length ; i ++ ) {
callbacks [ i ] ( null , null )
}
}
// Should not error.
this [ kDestroy ] ( err ) . then ( ( ) => {
queueMicrotask ( onDestroyed )
} )
}
[ kInterceptedDispatch ] ( opts , handler ) {
if ( ! this [ kInterceptors ] || this [ kInterceptors ] . length === 0 ) {
this [ kInterceptedDispatch ] = this [ kDispatch ]
return this [ kDispatch ] ( opts , handler )
}
let dispatch = this [ kDispatch ] . bind ( this )
for ( let i = this [ kInterceptors ] . length - 1 ; i >= 0 ; i -- ) {
dispatch = this [ kInterceptors ] [ i ] ( dispatch )
}
this [ kInterceptedDispatch ] = dispatch
return dispatch ( opts , handler )
}
dispatch ( opts , handler ) {
if ( ! handler || typeof handler !== 'object' ) {
throw new InvalidArgumentError ( 'handler must be an object' )
}
try {
if ( ! opts || typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'opts must be an object.' )
}
if ( this [ kDestroyed ] || this [ kOnDestroyed ] ) {
throw new ClientDestroyedError ( )
}
if ( this [ kClosed ] ) {
throw new ClientClosedError ( )
}
return this [ kInterceptedDispatch ] ( opts , handler )
} catch ( err ) {
if ( typeof handler . onError !== 'function' ) {
throw new InvalidArgumentError ( 'invalid onError method' )
}
handler . onError ( err )
return false
}
}
}
module . exports = DispatcherBase
/***/ } ) ,
/***/ 412 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const EventEmitter = _ _nccwpck _require _ _ ( 2361 )
class Dispatcher extends EventEmitter {
dispatch ( ) {
throw new Error ( 'not implemented' )
}
close ( ) {
throw new Error ( 'not implemented' )
}
destroy ( ) {
throw new Error ( 'not implemented' )
}
}
module . exports = Dispatcher
/***/ } ) ,
/***/ 9990 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const Busboy = _ _nccwpck _require _ _ ( 727 )
const util = _ _nccwpck _require _ _ ( 3983 )
const {
ReadableStreamFrom ,
isBlobLike ,
isReadableStreamLike ,
readableStreamClose ,
createDeferredPromise ,
fullyReadBody
} = _ _nccwpck _require _ _ ( 2538 )
const { FormData } = _ _nccwpck _require _ _ ( 2015 )
const { kState } = _ _nccwpck _require _ _ ( 5861 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { DOMException , structuredClone } = _ _nccwpck _require _ _ ( 1037 )
const { Blob , File : NativeFile } = _ _nccwpck _require _ _ ( 4300 )
const { kBodyUsed } = _ _nccwpck _require _ _ ( 2785 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { isErrored } = _ _nccwpck _require _ _ ( 3983 )
const { isUint8Array , isArrayBuffer } = _ _nccwpck _require _ _ ( 9830 )
const { File : UndiciFile } = _ _nccwpck _require _ _ ( 8511 )
const { parseMIMEType , serializeAMimeType } = _ _nccwpck _require _ _ ( 685 )
let ReadableStream = globalThis . ReadableStream
/** @type {globalThis['File']} */
const File = NativeFile ? ? UndiciFile
const textEncoder = new TextEncoder ( )
const textDecoder = new TextDecoder ( )
// https://fetch.spec.whatwg.org/#concept-bodyinit-extract
function extractBody ( object , keepalive = false ) {
if ( ! ReadableStream ) {
ReadableStream = ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
}
// 1. Let stream be null.
let stream = null
// 2. If object is a ReadableStream object, then set stream to object.
if ( object instanceof ReadableStream ) {
stream = object
} else if ( isBlobLike ( object ) ) {
// 3. Otherwise, if object is a Blob object, set stream to the
// result of running object’ s get stream.
stream = object . stream ( )
} else {
// 4. Otherwise, set stream to a new ReadableStream object, and set
// up stream.
stream = new ReadableStream ( {
async pull ( controller ) {
controller . enqueue (
typeof source === 'string' ? textEncoder . encode ( source ) : source
)
queueMicrotask ( ( ) => readableStreamClose ( controller ) )
} ,
start ( ) { } ,
type : undefined
} )
}
// 5. Assert: stream is a ReadableStream object.
assert ( isReadableStreamLike ( stream ) )
// 6. Let action be null.
let action = null
// 7. Let source be null.
let source = null
// 8. Let length be null.
let length = null
// 9. Let type be null.
let type = null
// 10. Switch on object:
if ( typeof object === 'string' ) {
// Set source to the UTF-8 encoding of object.
// Note: setting source to a Uint8Array here breaks some mocking assumptions.
source = object
// Set type to `text/plain;charset=UTF-8`.
type = 'text/plain;charset=UTF-8'
} else if ( object instanceof URLSearchParams ) {
// URLSearchParams
// spec says to run application/x-www-form-urlencoded on body.list
// this is implemented in Node.js as apart of an URLSearchParams instance toString method
// See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490
// and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100
// Set source to the result of running the application/x-www-form-urlencoded serializer with object’ s list.
source = object . toString ( )
// Set type to `application/x-www-form-urlencoded;charset=UTF-8`.
type = 'application/x-www-form-urlencoded;charset=UTF-8'
} else if ( isArrayBuffer ( object ) ) {
// BufferSource/ArrayBuffer
// Set source to a copy of the bytes held by object.
source = new Uint8Array ( object . slice ( ) )
} else if ( ArrayBuffer . isView ( object ) ) {
// BufferSource/ArrayBufferView
// Set source to a copy of the bytes held by object.
source = new Uint8Array ( object . buffer . slice ( object . byteOffset , object . byteOffset + object . byteLength ) )
} else if ( util . isFormDataLike ( object ) ) {
const boundary = ` ----formdata-undici-0 ${ ` ${ Math . floor ( Math . random ( ) * 1e11 ) } ` . padStart ( 11 , '0' ) } `
const prefix = ` -- ${ boundary } \r \n Content-Disposition: form-data `
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
const escape = ( str ) =>
str . replace ( /\n/g , '%0A' ) . replace ( /\r/g , '%0D' ) . replace ( /"/g , '%22' )
const normalizeLinefeeds = ( value ) => value . replace ( /\r?\n|\r/g , '\r\n' )
// Set action to this step: run the multipart/form-data
// encoding algorithm, with object’ s entry list and UTF-8.
// - This ensures that the body is immutable and can't be changed afterwords
// - That the content-length is calculated in advance.
// - And that all parts are pre-encoded and ready to be sent.
const blobParts = [ ]
const rn = new Uint8Array ( [ 13 , 10 ] ) // '\r\n'
length = 0
let hasUnknownSizeValue = false
for ( const [ name , value ] of object ) {
if ( typeof value === 'string' ) {
const chunk = textEncoder . encode ( prefix +
` ; name=" ${ escape ( normalizeLinefeeds ( name ) ) } " ` +
` \r \n \r \n ${ normalizeLinefeeds ( value ) } \r \n ` )
blobParts . push ( chunk )
length += chunk . byteLength
} else {
const chunk = textEncoder . encode ( ` ${ prefix } ; name=" ${ escape ( normalizeLinefeeds ( name ) ) } " ` +
( value . name ? ` ; filename=" ${ escape ( value . name ) } " ` : '' ) + '\r\n' +
` Content-Type: ${
value . type || 'application/octet-stream'
} \ r \ n \ r \ n ` )
blobParts . push ( chunk , value , rn )
if ( typeof value . size === 'number' ) {
length += chunk . byteLength + value . size + rn . byteLength
} else {
hasUnknownSizeValue = true
}
}
}
const chunk = textEncoder . encode ( ` -- ${ boundary } -- ` )
blobParts . push ( chunk )
length += chunk . byteLength
if ( hasUnknownSizeValue ) {
length = null
}
// Set source to object.
source = object
action = async function * ( ) {
for ( const part of blobParts ) {
if ( part . stream ) {
yield * part . stream ( )
} else {
yield part
}
}
}
// Set type to `multipart/form-data; boundary=`,
// followed by the multipart/form-data boundary string generated
// by the multipart/form-data encoding algorithm.
type = 'multipart/form-data; boundary=' + boundary
} else if ( isBlobLike ( object ) ) {
// Blob
// Set source to object.
source = object
// Set length to object’ s size.
length = object . size
// If object’ s type attribute is not the empty byte sequence, set
// type to its value.
if ( object . type ) {
type = object . type
}
} else if ( typeof object [ Symbol . asyncIterator ] === 'function' ) {
// If keepalive is true, then throw a TypeError.
if ( keepalive ) {
throw new TypeError ( 'keepalive' )
}
// If object is disturbed or locked, then throw a TypeError.
if ( util . isDisturbed ( object ) || object . locked ) {
throw new TypeError (
'Response body object should not be disturbed or locked'
)
}
stream =
object instanceof ReadableStream ? object : ReadableStreamFrom ( object )
}
// 11. If source is a byte sequence, then set action to a
// step that returns source and length to source’ s length.
if ( typeof source === 'string' || util . isBuffer ( source ) ) {
length = Buffer . byteLength ( source )
}
// 12. If action is non-null, then run these steps in in parallel:
if ( action != null ) {
// Run action.
let iterator
stream = new ReadableStream ( {
async start ( ) {
iterator = action ( object ) [ Symbol . asyncIterator ] ( )
} ,
async pull ( controller ) {
const { value , done } = await iterator . next ( )
if ( done ) {
// When running action is done, close stream.
queueMicrotask ( ( ) => {
controller . close ( )
} )
} else {
// Whenever one or more bytes are available and stream is not errored,
// enqueue a Uint8Array wrapping an ArrayBuffer containing the available
// bytes into stream.
if ( ! isErrored ( stream ) ) {
controller . enqueue ( new Uint8Array ( value ) )
}
}
return controller . desiredSize > 0
} ,
async cancel ( reason ) {
await iterator . return ( )
} ,
type : undefined
} )
}
// 13. Let body be a body whose stream is stream, source is source,
// and length is length.
const body = { stream , source , length }
// 14. Return (body, type).
return [ body , type ]
}
// https://fetch.spec.whatwg.org/#bodyinit-safely-extract
function safelyExtractBody ( object , keepalive = false ) {
if ( ! ReadableStream ) {
// istanbul ignore next
ReadableStream = ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
}
// To safely extract a body and a `Content-Type` value from
// a byte sequence or BodyInit object object, run these steps:
// 1. If object is a ReadableStream object, then:
if ( object instanceof ReadableStream ) {
// Assert: object is neither disturbed nor locked.
// istanbul ignore next
assert ( ! util . isDisturbed ( object ) , 'The body has already been consumed.' )
// istanbul ignore next
assert ( ! object . locked , 'The stream is locked.' )
}
// 2. Return the results of extracting object.
return extractBody ( object , keepalive )
}
function cloneBody ( body ) {
// To clone a body body, run these steps:
// https://fetch.spec.whatwg.org/#concept-body-clone
// 1. Let « out1, out2 » be the result of teeing body’ s stream.
const [ out1 , out2 ] = body . stream . tee ( )
const out2Clone = structuredClone ( out2 , { transfer : [ out2 ] } )
// This, for whatever reasons, unrefs out2Clone which allows
// the process to exit by itself.
const [ , finalClone ] = out2Clone . tee ( )
// 2. Set body’ s stream to out1.
body . stream = out1
// 3. Return a body whose stream is out2 and other members are copied from body.
return {
stream : finalClone ,
length : body . length ,
source : body . source
}
}
async function * consumeBody ( body ) {
if ( body ) {
if ( isUint8Array ( body ) ) {
yield body
} else {
const stream = body . stream
if ( util . isDisturbed ( stream ) ) {
throw new TypeError ( 'The body has already been consumed.' )
}
if ( stream . locked ) {
throw new TypeError ( 'The stream is locked.' )
}
// Compat.
stream [ kBodyUsed ] = true
yield * stream
}
}
}
function throwIfAborted ( state ) {
if ( state . aborted ) {
throw new DOMException ( 'The operation was aborted.' , 'AbortError' )
}
}
function bodyMixinMethods ( instance ) {
const methods = {
blob ( ) {
// The blob() method steps are to return the result of
// running consume body with this and the following step
// given a byte sequence bytes: return a Blob whose
// contents are bytes and whose type attribute is this’ s
// MIME type.
return specConsumeBody ( this , ( bytes ) => {
let mimeType = bodyMimeType ( this )
if ( mimeType === 'failure' ) {
mimeType = ''
} else if ( mimeType ) {
mimeType = serializeAMimeType ( mimeType )
}
// Return a Blob whose contents are bytes and type attribute
// is mimeType.
return new Blob ( [ bytes ] , { type : mimeType } )
} , instance )
} ,
arrayBuffer ( ) {
// The arrayBuffer() method steps are to return the result
// of running consume body with this and the following step
// given a byte sequence bytes: return a new ArrayBuffer
// whose contents are bytes.
return specConsumeBody ( this , ( bytes ) => {
return new Uint8Array ( bytes ) . buffer
} , instance )
} ,
text ( ) {
// The text() method steps are to return the result of running
// consume body with this and UTF-8 decode.
return specConsumeBody ( this , utf8DecodeBytes , instance )
} ,
json ( ) {
// The json() method steps are to return the result of running
// consume body with this and parse JSON from bytes.
return specConsumeBody ( this , parseJSONFromBytes , instance )
} ,
async formData ( ) {
webidl . brandCheck ( this , instance )
throwIfAborted ( this [ kState ] )
const contentType = this . headers . get ( 'Content-Type' )
// If mimeType’ s essence is "multipart/form-data", then:
if ( /multipart\/form-data/ . test ( contentType ) ) {
const headers = { }
for ( const [ key , value ] of this . headers ) headers [ key . toLowerCase ( ) ] = value
const responseFormData = new FormData ( )
let busboy
try {
busboy = new Busboy ( {
headers ,
preservePath : true
} )
} catch ( err ) {
throw new DOMException ( ` ${ err } ` , 'AbortError' )
}
busboy . on ( 'field' , ( name , value ) => {
responseFormData . append ( name , value )
} )
busboy . on ( 'file' , ( name , value , filename , encoding , mimeType ) => {
const chunks = [ ]
if ( encoding === 'base64' || encoding . toLowerCase ( ) === 'base64' ) {
let base64chunk = ''
value . on ( 'data' , ( chunk ) => {
base64chunk += chunk . toString ( ) . replace ( /[\r\n]/gm , '' )
const end = base64chunk . length - base64chunk . length % 4
chunks . push ( Buffer . from ( base64chunk . slice ( 0 , end ) , 'base64' ) )
base64chunk = base64chunk . slice ( end )
} )
value . on ( 'end' , ( ) => {
chunks . push ( Buffer . from ( base64chunk , 'base64' ) )
responseFormData . append ( name , new File ( chunks , filename , { type : mimeType } ) )
} )
} else {
value . on ( 'data' , ( chunk ) => {
chunks . push ( chunk )
} )
value . on ( 'end' , ( ) => {
responseFormData . append ( name , new File ( chunks , filename , { type : mimeType } ) )
} )
}
} )
const busboyResolve = new Promise ( ( resolve , reject ) => {
busboy . on ( 'finish' , resolve )
busboy . on ( 'error' , ( err ) => reject ( new TypeError ( err ) ) )
} )
if ( this . body !== null ) for await ( const chunk of consumeBody ( this [ kState ] . body ) ) busboy . write ( chunk )
busboy . end ( )
await busboyResolve
return responseFormData
} else if ( /application\/x-www-form-urlencoded/ . test ( contentType ) ) {
// Otherwise, if mimeType’ s essence is "application/x-www-form-urlencoded", then:
// 1. Let entries be the result of parsing bytes.
let entries
try {
let text = ''
// application/x-www-form-urlencoded parser will keep the BOM.
// https://url.spec.whatwg.org/#concept-urlencoded-parser
// Note that streaming decoder is stateful and cannot be reused
const streamingDecoder = new TextDecoder ( 'utf-8' , { ignoreBOM : true } )
for await ( const chunk of consumeBody ( this [ kState ] . body ) ) {
if ( ! isUint8Array ( chunk ) ) {
throw new TypeError ( 'Expected Uint8Array chunk' )
}
text += streamingDecoder . decode ( chunk , { stream : true } )
}
text += streamingDecoder . decode ( )
entries = new URLSearchParams ( text )
} catch ( err ) {
// istanbul ignore next: Unclear when new URLSearchParams can fail on a string.
// 2. If entries is failure, then throw a TypeError.
throw Object . assign ( new TypeError ( ) , { cause : err } )
}
// 3. Return a new FormData object whose entries are entries.
const formData = new FormData ( )
for ( const [ name , value ] of entries ) {
formData . append ( name , value )
}
return formData
} else {
// Wait a tick before checking if the request has been aborted.
// Otherwise, a TypeError can be thrown when an AbortError should.
await Promise . resolve ( )
throwIfAborted ( this [ kState ] )
// Otherwise, throw a TypeError.
throw webidl . errors . exception ( {
header : ` ${ instance . name } .formData ` ,
message : 'Could not parse content as FormData.'
} )
}
}
}
return methods
}
function mixinBody ( prototype ) {
Object . assign ( prototype . prototype , bodyMixinMethods ( prototype ) )
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#concept-body-consume-body
* @ param { Response | Request } object
* @ param { ( value : unknown ) => unknown } convertBytesToJSValue
* @ param { Response | Request } instance
* /
async function specConsumeBody ( object , convertBytesToJSValue , instance ) {
webidl . brandCheck ( object , instance )
throwIfAborted ( object [ kState ] )
// 1. If object is unusable, then return a promise rejected
// with a TypeError.
if ( bodyUnusable ( object [ kState ] . body ) ) {
throw new TypeError ( 'Body is unusable' )
}
// 2. Let promise be a new promise.
const promise = createDeferredPromise ( )
// 3. Let errorSteps given error be to reject promise with error.
const errorSteps = ( error ) => promise . reject ( error )
// 4. Let successSteps given a byte sequence data be to resolve
// promise with the result of running convertBytesToJSValue
// with data. If that threw an exception, then run errorSteps
// with that exception.
const successSteps = ( data ) => {
try {
promise . resolve ( convertBytesToJSValue ( data ) )
} catch ( e ) {
errorSteps ( e )
}
}
// 5. If object’ s body is null, then run successSteps with an
// empty byte sequence.
if ( object [ kState ] . body == null ) {
successSteps ( new Uint8Array ( ) )
return promise . promise
}
// 6. Otherwise, fully read object’ s body given successSteps,
// errorSteps, and object’ s relevant global object.
await fullyReadBody ( object [ kState ] . body , successSteps , errorSteps )
// 7. Return promise.
return promise . promise
}
// https://fetch.spec.whatwg.org/#body-unusable
function bodyUnusable ( body ) {
// An object including the Body interface mixin is
// said to be unusable if its body is non-null and
// its body’ s stream is disturbed or locked.
return body != null && ( body . stream . locked || util . isDisturbed ( body . stream ) )
}
/ * *
* @ see https : //encoding.spec.whatwg.org/#utf-8-decode
* @ param { Buffer } buffer
* /
function utf8DecodeBytes ( buffer ) {
if ( buffer . length === 0 ) {
return ''
}
// 1. Let buffer be the result of peeking three bytes from
// ioQueue, converted to a byte sequence.
// 2. If buffer is 0xEF 0xBB 0xBF, then read three
// bytes from ioQueue. (Do nothing with those bytes.)
if ( buffer [ 0 ] === 0xEF && buffer [ 1 ] === 0xBB && buffer [ 2 ] === 0xBF ) {
buffer = buffer . subarray ( 3 )
}
// 3. Process a queue with an instance of UTF-8’ s
// decoder, ioQueue, output, and "replacement".
const output = textDecoder . decode ( buffer )
// 4. Return output.
return output
}
/ * *
* @ see https : //infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value
* @ param { Uint8Array } bytes
* /
function parseJSONFromBytes ( bytes ) {
return JSON . parse ( utf8DecodeBytes ( bytes ) )
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#concept-body-mime-type
* @ param { import ( './response' ) . Response | import ( './request' ) . Request } object
* /
function bodyMimeType ( object ) {
const { headersList } = object [ kState ]
const contentType = headersList . get ( 'content-type' )
if ( contentType === null ) {
return 'failure'
}
return parseMIMEType ( contentType )
}
module . exports = {
extractBody ,
safelyExtractBody ,
cloneBody ,
mixinBody
}
/***/ } ) ,
/***/ 1037 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { MessageChannel , receiveMessageOnPort } = _ _nccwpck _require _ _ ( 1267 )
const corsSafeListedMethods = [ 'GET' , 'HEAD' , 'POST' ]
const corsSafeListedMethodsSet = new Set ( corsSafeListedMethods )
const nullBodyStatus = [ 101 , 204 , 205 , 304 ]
const redirectStatus = [ 301 , 302 , 303 , 307 , 308 ]
const redirectStatusSet = new Set ( redirectStatus )
// https://fetch.spec.whatwg.org/#block-bad-port
const badPorts = [
'1' , '7' , '9' , '11' , '13' , '15' , '17' , '19' , '20' , '21' , '22' , '23' , '25' , '37' , '42' , '43' , '53' , '69' , '77' , '79' ,
'87' , '95' , '101' , '102' , '103' , '104' , '109' , '110' , '111' , '113' , '115' , '117' , '119' , '123' , '135' , '137' ,
'139' , '143' , '161' , '179' , '389' , '427' , '465' , '512' , '513' , '514' , '515' , '526' , '530' , '531' , '532' ,
'540' , '548' , '554' , '556' , '563' , '587' , '601' , '636' , '989' , '990' , '993' , '995' , '1719' , '1720' , '1723' ,
'2049' , '3659' , '4045' , '5060' , '5061' , '6000' , '6566' , '6665' , '6666' , '6667' , '6668' , '6669' , '6697' ,
'10080'
]
const badPortsSet = new Set ( badPorts )
// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies
const referrerPolicy = [
'' ,
'no-referrer' ,
'no-referrer-when-downgrade' ,
'same-origin' ,
'origin' ,
'strict-origin' ,
'origin-when-cross-origin' ,
'strict-origin-when-cross-origin' ,
'unsafe-url'
]
const referrerPolicySet = new Set ( referrerPolicy )
const requestRedirect = [ 'follow' , 'manual' , 'error' ]
const safeMethods = [ 'GET' , 'HEAD' , 'OPTIONS' , 'TRACE' ]
const safeMethodsSet = new Set ( safeMethods )
const requestMode = [ 'navigate' , 'same-origin' , 'no-cors' , 'cors' ]
const requestCredentials = [ 'omit' , 'same-origin' , 'include' ]
const requestCache = [
'default' ,
'no-store' ,
'reload' ,
'no-cache' ,
'force-cache' ,
'only-if-cached'
]
// https://fetch.spec.whatwg.org/#request-body-header-name
const requestBodyHeader = [
'content-encoding' ,
'content-language' ,
'content-location' ,
'content-type' ,
// See https://github.com/nodejs/undici/issues/2021
// 'Content-Length' is a forbidden header name, which is typically
// removed in the Headers implementation. However, undici doesn't
// filter out headers, so we add it here.
'content-length'
]
// https://fetch.spec.whatwg.org/#enumdef-requestduplex
const requestDuplex = [
'half'
]
// http://fetch.spec.whatwg.org/#forbidden-method
const forbiddenMethods = [ 'CONNECT' , 'TRACE' , 'TRACK' ]
const forbiddenMethodsSet = new Set ( forbiddenMethods )
const subresource = [
'audio' ,
'audioworklet' ,
'font' ,
'image' ,
'manifest' ,
'paintworklet' ,
'script' ,
'style' ,
'track' ,
'video' ,
'xslt' ,
''
]
const subresourceSet = new Set ( subresource )
/** @type {globalThis['DOMException']} */
const DOMException = globalThis . DOMException ? ? ( ( ) => {
// DOMException was only made a global in Node v17.0.0,
// but fetch supports >= v16.8.
try {
atob ( '~' )
} catch ( err ) {
return Object . getPrototypeOf ( err ) . constructor
}
} ) ( )
let channel
/** @type {globalThis['structuredClone']} */
const structuredClone =
globalThis . structuredClone ? ?
// https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js
// structuredClone was added in v17.0.0, but fetch supports v16.8
function structuredClone ( value , options = undefined ) {
if ( arguments . length === 0 ) {
throw new TypeError ( 'missing argument' )
}
if ( ! channel ) {
channel = new MessageChannel ( )
}
channel . port1 . unref ( )
channel . port2 . unref ( )
channel . port1 . postMessage ( value , options ? . transfer )
return receiveMessageOnPort ( channel . port2 ) . message
}
module . exports = {
DOMException ,
structuredClone ,
subresource ,
forbiddenMethods ,
requestBodyHeader ,
referrerPolicy ,
requestRedirect ,
requestMode ,
requestCredentials ,
requestCache ,
redirectStatus ,
corsSafeListedMethods ,
nullBodyStatus ,
safeMethods ,
badPorts ,
requestDuplex ,
subresourceSet ,
badPortsSet ,
redirectStatusSet ,
corsSafeListedMethodsSet ,
safeMethodsSet ,
forbiddenMethodsSet ,
referrerPolicySet
}
/***/ } ) ,
/***/ 685 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
const assert = _ _nccwpck _require _ _ ( 9491 )
const { atob } = _ _nccwpck _require _ _ ( 4300 )
const { isomorphicDecode } = _ _nccwpck _require _ _ ( 2538 )
const encoder = new TextEncoder ( )
/ * *
* @ see https : //mimesniff.spec.whatwg.org/#http-token-code-point
* /
const HTTP _TOKEN _CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/
const HTTP _WHITESPACE _REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line
/ * *
* @ see https : //mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point
* /
const HTTP _QUOTED _STRING _TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line
// https://fetch.spec.whatwg.org/#data-url-processor
/** @param {URL} dataURL */
function dataURLProcessor ( dataURL ) {
// 1. Assert: dataURL’ s scheme is "data".
assert ( dataURL . protocol === 'data:' )
// 2. Let input be the result of running the URL
// serializer on dataURL with exclude fragment
// set to true.
let input = URLSerializer ( dataURL , true )
// 3. Remove the leading "data:" string from input.
input = input . slice ( 5 )
// 4. Let position point at the start of input.
const position = { position : 0 }
// 5. Let mimeType be the result of collecting a
// sequence of code points that are not equal
// to U+002C (,), given position.
let mimeType = collectASequenceOfCodePointsFast (
',' ,
input ,
position
)
// 6. Strip leading and trailing ASCII whitespace
// from mimeType.
// Undici implementation note: we need to store the
// length because if the mimetype has spaces removed,
// the wrong amount will be sliced from the input in
// step #9
const mimeTypeLength = mimeType . length
mimeType = removeASCIIWhitespace ( mimeType , true , true )
// 7. If position is past the end of input, then
// return failure
if ( position . position >= input . length ) {
return 'failure'
}
// 8. Advance position by 1.
position . position ++
// 9. Let encodedBody be the remainder of input.
const encodedBody = input . slice ( mimeTypeLength + 1 )
// 10. Let body be the percent-decoding of encodedBody.
let body = stringPercentDecode ( encodedBody )
// 11. If mimeType ends with U+003B (;), followed by
// zero or more U+0020 SPACE, followed by an ASCII
// case-insensitive match for "base64", then:
if ( /;(\u0020){0,}base64$/i . test ( mimeType ) ) {
// 1. Let stringBody be the isomorphic decode of body.
const stringBody = isomorphicDecode ( body )
// 2. Set body to the forgiving-base64 decode of
// stringBody.
body = forgivingBase64 ( stringBody )
// 3. If body is failure, then return failure.
if ( body === 'failure' ) {
return 'failure'
}
// 4. Remove the last 6 code points from mimeType.
mimeType = mimeType . slice ( 0 , - 6 )
// 5. Remove trailing U+0020 SPACE code points from mimeType,
// if any.
mimeType = mimeType . replace ( /(\u0020)+$/ , '' )
// 6. Remove the last U+003B (;) code point from mimeType.
mimeType = mimeType . slice ( 0 , - 1 )
}
// 12. If mimeType starts with U+003B (;), then prepend
// "text/plain" to mimeType.
if ( mimeType . startsWith ( ';' ) ) {
mimeType = 'text/plain' + mimeType
}
// 13. Let mimeTypeRecord be the result of parsing
// mimeType.
let mimeTypeRecord = parseMIMEType ( mimeType )
// 14. If mimeTypeRecord is failure, then set
// mimeTypeRecord to text/plain;charset=US-ASCII.
if ( mimeTypeRecord === 'failure' ) {
mimeTypeRecord = parseMIMEType ( 'text/plain;charset=US-ASCII' )
}
// 15. Return a new data: URL struct whose MIME
// type is mimeTypeRecord and body is body.
// https://fetch.spec.whatwg.org/#data-url-struct
return { mimeType : mimeTypeRecord , body }
}
// https://url.spec.whatwg.org/#concept-url-serializer
/ * *
* @ param { URL } url
* @ param { boolean } excludeFragment
* /
function URLSerializer ( url , excludeFragment = false ) {
if ( ! excludeFragment ) {
return url . href
}
const href = url . href
const hashLength = url . hash . length
return hashLength === 0 ? href : href . substring ( 0 , href . length - hashLength )
}
// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
/ * *
* @ param { ( char : string ) => boolean } condition
* @ param { string } input
* @ param { { position : number } } position
* /
function collectASequenceOfCodePoints ( condition , input , position ) {
// 1. Let result be the empty string.
let result = ''
// 2. While position doesn’ t point past the end of input and the
// code point at position within input meets the condition condition:
while ( position . position < input . length && condition ( input [ position . position ] ) ) {
// 1. Append that code point to the end of result.
result += input [ position . position ]
// 2. Advance position by 1.
position . position ++
}
// 3. Return result.
return result
}
/ * *
* A faster collectASequenceOfCodePoints that only works when comparing a single character .
* @ param { string } char
* @ param { string } input
* @ param { { position : number } } position
* /
function collectASequenceOfCodePointsFast ( char , input , position ) {
const idx = input . indexOf ( char , position . position )
const start = position . position
if ( idx === - 1 ) {
position . position = input . length
return input . slice ( start )
}
position . position = idx
return input . slice ( start , position . position )
}
// https://url.spec.whatwg.org/#string-percent-decode
/** @param {string} input */
function stringPercentDecode ( input ) {
// 1. Let bytes be the UTF-8 encoding of input.
const bytes = encoder . encode ( input )
// 2. Return the percent-decoding of bytes.
return percentDecode ( bytes )
}
// https://url.spec.whatwg.org/#percent-decode
/** @param {Uint8Array} input */
function percentDecode ( input ) {
// 1. Let output be an empty byte sequence.
/** @type {number[]} */
const output = [ ]
// 2. For each byte byte in input:
for ( let i = 0 ; i < input . length ; i ++ ) {
const byte = input [ i ]
// 1. If byte is not 0x25 (%), then append byte to output.
if ( byte !== 0x25 ) {
output . push ( byte )
// 2. Otherwise, if byte is 0x25 (%) and the next two bytes
// after byte in input are not in the ranges
// 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F),
// and 0x61 (a) to 0x66 (f), all inclusive, append byte
// to output.
} else if (
byte === 0x25 &&
! /^[0-9A-Fa-f]{2}$/i . test ( String . fromCharCode ( input [ i + 1 ] , input [ i + 2 ] ) )
) {
output . push ( 0x25 )
// 3. Otherwise:
} else {
// 1. Let bytePoint be the two bytes after byte in input,
// decoded, and then interpreted as hexadecimal number.
const nextTwoBytes = String . fromCharCode ( input [ i + 1 ] , input [ i + 2 ] )
const bytePoint = Number . parseInt ( nextTwoBytes , 16 )
// 2. Append a byte whose value is bytePoint to output.
output . push ( bytePoint )
// 3. Skip the next two bytes in input.
i += 2
}
}
// 3. Return output.
return Uint8Array . from ( output )
}
// https://mimesniff.spec.whatwg.org/#parse-a-mime-type
/** @param {string} input */
function parseMIMEType ( input ) {
// 1. Remove any leading and trailing HTTP whitespace
// from input.
input = removeHTTPWhitespace ( input , true , true )
// 2. Let position be a position variable for input,
// initially pointing at the start of input.
const position = { position : 0 }
// 3. Let type be the result of collecting a sequence
// of code points that are not U+002F (/) from
// input, given position.
const type = collectASequenceOfCodePointsFast (
'/' ,
input ,
position
)
// 4. If type is the empty string or does not solely
// contain HTTP token code points, then return failure.
// https://mimesniff.spec.whatwg.org/#http-token-code-point
if ( type . length === 0 || ! HTTP _TOKEN _CODEPOINTS . test ( type ) ) {
return 'failure'
}
// 5. If position is past the end of input, then return
// failure
if ( position . position > input . length ) {
return 'failure'
}
// 6. Advance position by 1. (This skips past U+002F (/).)
position . position ++
// 7. Let subtype be the result of collecting a sequence of
// code points that are not U+003B (;) from input, given
// position.
let subtype = collectASequenceOfCodePointsFast (
';' ,
input ,
position
)
// 8. Remove any trailing HTTP whitespace from subtype.
subtype = removeHTTPWhitespace ( subtype , false , true )
// 9. If subtype is the empty string or does not solely
// contain HTTP token code points, then return failure.
if ( subtype . length === 0 || ! HTTP _TOKEN _CODEPOINTS . test ( subtype ) ) {
return 'failure'
}
const typeLowercase = type . toLowerCase ( )
const subtypeLowercase = subtype . toLowerCase ( )
// 10. Let mimeType be a new MIME type record whose type
// is type, in ASCII lowercase, and subtype is subtype,
// in ASCII lowercase.
// https://mimesniff.spec.whatwg.org/#mime-type
const mimeType = {
type : typeLowercase ,
subtype : subtypeLowercase ,
/** @type {Map<string, string>} */
parameters : new Map ( ) ,
// https://mimesniff.spec.whatwg.org/#mime-type-essence
essence : ` ${ typeLowercase } / ${ subtypeLowercase } `
}
// 11. While position is not past the end of input:
while ( position . position < input . length ) {
// 1. Advance position by 1. (This skips past U+003B (;).)
position . position ++
// 2. Collect a sequence of code points that are HTTP
// whitespace from input given position.
collectASequenceOfCodePoints (
// https://fetch.spec.whatwg.org/#http-whitespace
char => HTTP _WHITESPACE _REGEX . test ( char ) ,
input ,
position
)
// 3. Let parameterName be the result of collecting a
// sequence of code points that are not U+003B (;)
// or U+003D (=) from input, given position.
let parameterName = collectASequenceOfCodePoints (
( char ) => char !== ';' && char !== '=' ,
input ,
position
)
// 4. Set parameterName to parameterName, in ASCII
// lowercase.
parameterName = parameterName . toLowerCase ( )
// 5. If position is not past the end of input, then:
if ( position . position < input . length ) {
// 1. If the code point at position within input is
// U+003B (;), then continue.
if ( input [ position . position ] === ';' ) {
continue
}
// 2. Advance position by 1. (This skips past U+003D (=).)
position . position ++
}
// 6. If position is past the end of input, then break.
if ( position . position > input . length ) {
break
}
// 7. Let parameterValue be null.
let parameterValue = null
// 8. If the code point at position within input is
// U+0022 ("), then:
if ( input [ position . position ] === '"' ) {
// 1. Set parameterValue to the result of collecting
// an HTTP quoted string from input, given position
// and the extract-value flag.
parameterValue = collectAnHTTPQuotedString ( input , position , true )
// 2. Collect a sequence of code points that are not
// U+003B (;) from input, given position.
collectASequenceOfCodePointsFast (
';' ,
input ,
position
)
// 9. Otherwise:
} else {
// 1. Set parameterValue to the result of collecting
// a sequence of code points that are not U+003B (;)
// from input, given position.
parameterValue = collectASequenceOfCodePointsFast (
';' ,
input ,
position
)
// 2. Remove any trailing HTTP whitespace from parameterValue.
parameterValue = removeHTTPWhitespace ( parameterValue , false , true )
// 3. If parameterValue is the empty string, then continue.
if ( parameterValue . length === 0 ) {
continue
}
}
// 10. If all of the following are true
// - parameterName is not the empty string
// - parameterName solely contains HTTP token code points
// - parameterValue solely contains HTTP quoted-string token code points
// - mimeType’ s parameters[parameterName] does not exist
// then set mimeType’ s parameters[parameterName] to parameterValue.
if (
parameterName . length !== 0 &&
HTTP _TOKEN _CODEPOINTS . test ( parameterName ) &&
( parameterValue . length === 0 || HTTP _QUOTED _STRING _TOKENS . test ( parameterValue ) ) &&
! mimeType . parameters . has ( parameterName )
) {
mimeType . parameters . set ( parameterName , parameterValue )
}
}
// 12. Return mimeType.
return mimeType
}
// https://infra.spec.whatwg.org/#forgiving-base64-decode
/** @param {string} data */
function forgivingBase64 ( data ) {
// 1. Remove all ASCII whitespace from data.
data = data . replace ( /[\u0009\u000A\u000C\u000D\u0020]/g , '' ) // eslint-disable-line
// 2. If data’ s code point length divides by 4 leaving
// no remainder, then:
if ( data . length % 4 === 0 ) {
// 1. If data ends with one or two U+003D (=) code points,
// then remove them from data.
data = data . replace ( /=?=$/ , '' )
}
// 3. If data’ s code point length divides by 4 leaving
// a remainder of 1, then return failure.
if ( data . length % 4 === 1 ) {
return 'failure'
}
// 4. If data contains a code point that is not one of
// U+002B (+)
// U+002F (/)
// ASCII alphanumeric
// then return failure.
if ( /[^+/0-9A-Za-z]/ . test ( data ) ) {
return 'failure'
}
const binary = atob ( data )
const bytes = new Uint8Array ( binary . length )
for ( let byte = 0 ; byte < binary . length ; byte ++ ) {
bytes [ byte ] = binary . charCodeAt ( byte )
}
return bytes
}
// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string
// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string
/ * *
* @ param { string } input
* @ param { { position : number } } position
* @ param { boolean ? } extractValue
* /
function collectAnHTTPQuotedString ( input , position , extractValue ) {
// 1. Let positionStart be position.
const positionStart = position . position
// 2. Let value be the empty string.
let value = ''
// 3. Assert: the code point at position within input
// is U+0022 (").
assert ( input [ position . position ] === '"' )
// 4. Advance position by 1.
position . position ++
// 5. While true:
while ( true ) {
// 1. Append the result of collecting a sequence of code points
// that are not U+0022 (") or U+005C (\) from input, given
// position, to value.
value += collectASequenceOfCodePoints (
( char ) => char !== '"' && char !== '\\' ,
input ,
position
)
// 2. If position is past the end of input, then break.
if ( position . position >= input . length ) {
break
}
// 3. Let quoteOrBackslash be the code point at position within
// input.
const quoteOrBackslash = input [ position . position ]
// 4. Advance position by 1.
position . position ++
// 5. If quoteOrBackslash is U+005C (\), then:
if ( quoteOrBackslash === '\\' ) {
// 1. If position is past the end of input, then append
// U+005C (\) to value and break.
if ( position . position >= input . length ) {
value += '\\'
break
}
// 2. Append the code point at position within input to value.
value += input [ position . position ]
// 3. Advance position by 1.
position . position ++
// 6. Otherwise:
} else {
// 1. Assert: quoteOrBackslash is U+0022 (").
assert ( quoteOrBackslash === '"' )
// 2. Break.
break
}
}
// 6. If the extract-value flag is set, then return value.
if ( extractValue ) {
return value
}
// 7. Return the code points from positionStart to position,
// inclusive, within input.
return input . slice ( positionStart , position . position )
}
/ * *
* @ see https : //mimesniff.spec.whatwg.org/#serialize-a-mime-type
* /
function serializeAMimeType ( mimeType ) {
assert ( mimeType !== 'failure' )
const { parameters , essence } = mimeType
// 1. Let serialization be the concatenation of mimeType’ s
// type, U+002F (/), and mimeType’ s subtype.
let serialization = essence
// 2. For each name → value of mimeType’ s parameters:
for ( let [ name , value ] of parameters . entries ( ) ) {
// 1. Append U+003B (;) to serialization.
serialization += ';'
// 2. Append name to serialization.
serialization += name
// 3. Append U+003D (=) to serialization.
serialization += '='
// 4. If value does not solely contain HTTP token code
// points or value is the empty string, then:
if ( ! HTTP _TOKEN _CODEPOINTS . test ( value ) ) {
// 1. Precede each occurence of U+0022 (") or
// U+005C (\) in value with U+005C (\).
value = value . replace ( /(\\|")/g , '\\$1' )
// 2. Prepend U+0022 (") to value.
value = '"' + value
// 3. Append U+0022 (") to value.
value += '"'
}
// 5. Append value to serialization.
serialization += value
}
// 3. Return serialization.
return serialization
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#http-whitespace
* @ param { string } char
* /
function isHTTPWhiteSpace ( char ) {
return char === '\r' || char === '\n' || char === '\t' || char === ' '
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#http-whitespace
* @ param { string } str
* /
function removeHTTPWhitespace ( str , leading = true , trailing = true ) {
let lead = 0
let trail = str . length - 1
if ( leading ) {
for ( ; lead < str . length && isHTTPWhiteSpace ( str [ lead ] ) ; lead ++ ) ;
}
if ( trailing ) {
for ( ; trail > 0 && isHTTPWhiteSpace ( str [ trail ] ) ; trail -- ) ;
}
return str . slice ( lead , trail + 1 )
}
/ * *
* @ see https : //infra.spec.whatwg.org/#ascii-whitespace
* @ param { string } char
* /
function isASCIIWhitespace ( char ) {
return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' '
}
/ * *
* @ see https : //infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace
* /
function removeASCIIWhitespace ( str , leading = true , trailing = true ) {
let lead = 0
let trail = str . length - 1
if ( leading ) {
for ( ; lead < str . length && isASCIIWhitespace ( str [ lead ] ) ; lead ++ ) ;
}
if ( trailing ) {
for ( ; trail > 0 && isASCIIWhitespace ( str [ trail ] ) ; trail -- ) ;
}
return str . slice ( lead , trail + 1 )
}
module . exports = {
dataURLProcessor ,
URLSerializer ,
collectASequenceOfCodePoints ,
collectASequenceOfCodePointsFast ,
stringPercentDecode ,
parseMIMEType ,
collectAnHTTPQuotedString ,
serializeAMimeType
}
/***/ } ) ,
/***/ 8511 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { Blob , File : NativeFile } = _ _nccwpck _require _ _ ( 4300 )
const { types } = _ _nccwpck _require _ _ ( 3837 )
const { kState } = _ _nccwpck _require _ _ ( 5861 )
const { isBlobLike } = _ _nccwpck _require _ _ ( 2538 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { parseMIMEType , serializeAMimeType } = _ _nccwpck _require _ _ ( 685 )
const { kEnumerableProperty } = _ _nccwpck _require _ _ ( 3983 )
const encoder = new TextEncoder ( )
class File extends Blob {
constructor ( fileBits , fileName , options = { } ) {
// The File constructor is invoked with two or three parameters, depending
// on whether the optional dictionary parameter is used. When the File()
// constructor is invoked, user agents must run the following steps:
webidl . argumentLengthCheck ( arguments , 2 , { header : 'File constructor' } )
fileBits = webidl . converters [ 'sequence<BlobPart>' ] ( fileBits )
fileName = webidl . converters . USVString ( fileName )
options = webidl . converters . FilePropertyBag ( options )
// 1. Let bytes be the result of processing blob parts given fileBits and
// options.
// Note: Blob handles this for us
// 2. Let n be the fileName argument to the constructor.
const n = fileName
// 3. Process FilePropertyBag dictionary argument by running the following
// substeps:
// 1. If the type member is provided and is not the empty string, let t
// be set to the type dictionary member. If t contains any characters
// outside the range U+0020 to U+007E, then set t to the empty string
// and return from these substeps.
// 2. Convert every character in t to ASCII lowercase.
let t = options . type
let d
// eslint-disable-next-line no-labels
substep : {
if ( t ) {
t = parseMIMEType ( t )
if ( t === 'failure' ) {
t = ''
// eslint-disable-next-line no-labels
break substep
}
t = serializeAMimeType ( t ) . toLowerCase ( )
}
// 3. If the lastModified member is provided, let d be set to the
// lastModified dictionary member. If it is not provided, set d to the
// current date and time represented as the number of milliseconds since
// the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
d = options . lastModified
}
// 4. Return a new File object F such that:
// F refers to the bytes byte sequence.
// F.size is set to the number of total bytes in bytes.
// F.name is set to n.
// F.type is set to t.
// F.lastModified is set to d.
super ( processBlobParts ( fileBits , options ) , { type : t } )
this [ kState ] = {
name : n ,
lastModified : d ,
type : t
}
}
get name ( ) {
webidl . brandCheck ( this , File )
return this [ kState ] . name
}
get lastModified ( ) {
webidl . brandCheck ( this , File )
return this [ kState ] . lastModified
}
get type ( ) {
webidl . brandCheck ( this , File )
return this [ kState ] . type
}
}
class FileLike {
constructor ( blobLike , fileName , options = { } ) {
// TODO: argument idl type check
// The File constructor is invoked with two or three parameters, depending
// on whether the optional dictionary parameter is used. When the File()
// constructor is invoked, user agents must run the following steps:
// 1. Let bytes be the result of processing blob parts given fileBits and
// options.
// 2. Let n be the fileName argument to the constructor.
const n = fileName
// 3. Process FilePropertyBag dictionary argument by running the following
// substeps:
// 1. If the type member is provided and is not the empty string, let t
// be set to the type dictionary member. If t contains any characters
// outside the range U+0020 to U+007E, then set t to the empty string
// and return from these substeps.
// TODO
const t = options . type
// 2. Convert every character in t to ASCII lowercase.
// TODO
// 3. If the lastModified member is provided, let d be set to the
// lastModified dictionary member. If it is not provided, set d to the
// current date and time represented as the number of milliseconds since
// the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
const d = options . lastModified ? ? Date . now ( )
// 4. Return a new File object F such that:
// F refers to the bytes byte sequence.
// F.size is set to the number of total bytes in bytes.
// F.name is set to n.
// F.type is set to t.
// F.lastModified is set to d.
this [ kState ] = {
blobLike ,
name : n ,
type : t ,
lastModified : d
}
}
stream ( ... args ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . stream ( ... args )
}
arrayBuffer ( ... args ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . arrayBuffer ( ... args )
}
slice ( ... args ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . slice ( ... args )
}
text ( ... args ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . text ( ... args )
}
get size ( ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . size
}
get type ( ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . blobLike . type
}
get name ( ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . name
}
get lastModified ( ) {
webidl . brandCheck ( this , FileLike )
return this [ kState ] . lastModified
}
get [ Symbol . toStringTag ] ( ) {
return 'File'
}
}
Object . defineProperties ( File . prototype , {
[ Symbol . toStringTag ] : {
value : 'File' ,
configurable : true
} ,
name : kEnumerableProperty ,
lastModified : kEnumerableProperty
} )
webidl . converters . Blob = webidl . interfaceConverter ( Blob )
webidl . converters . BlobPart = function ( V , opts ) {
if ( webidl . util . Type ( V ) === 'Object' ) {
if ( isBlobLike ( V ) ) {
return webidl . converters . Blob ( V , { strict : false } )
}
if (
ArrayBuffer . isView ( V ) ||
types . isAnyArrayBuffer ( V )
) {
return webidl . converters . BufferSource ( V , opts )
}
}
return webidl . converters . USVString ( V , opts )
}
webidl . converters [ 'sequence<BlobPart>' ] = webidl . sequenceConverter (
webidl . converters . BlobPart
)
// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag
webidl . converters . FilePropertyBag = webidl . dictionaryConverter ( [
{
key : 'lastModified' ,
converter : webidl . converters [ 'long long' ] ,
get defaultValue ( ) {
return Date . now ( )
}
} ,
{
key : 'type' ,
converter : webidl . converters . DOMString ,
defaultValue : ''
} ,
{
key : 'endings' ,
converter : ( value ) => {
value = webidl . converters . DOMString ( value )
value = value . toLowerCase ( )
if ( value !== 'native' ) {
value = 'transparent'
}
return value
} ,
defaultValue : 'transparent'
}
] )
/ * *
* @ see https : //www.w3.org/TR/FileAPI/#process-blob-parts
* @ param { ( NodeJS . TypedArray | Blob | string ) [ ] } parts
* @ param { { type : string , endings : string } } options
* /
function processBlobParts ( parts , options ) {
// 1. Let bytes be an empty sequence of bytes.
/** @type {NodeJS.TypedArray[]} */
const bytes = [ ]
// 2. For each element in parts:
for ( const element of parts ) {
// 1. If element is a USVString, run the following substeps:
if ( typeof element === 'string' ) {
// 1. Let s be element.
let s = element
// 2. If the endings member of options is "native", set s
// to the result of converting line endings to native
// of element.
if ( options . endings === 'native' ) {
s = convertLineEndingsNative ( s )
}
// 3. Append the result of UTF-8 encoding s to bytes.
bytes . push ( encoder . encode ( s ) )
} else if (
types . isAnyArrayBuffer ( element ) ||
types . isTypedArray ( element )
) {
// 2. If element is a BufferSource, get a copy of the
// bytes held by the buffer source, and append those
// bytes to bytes.
if ( ! element . buffer ) { // ArrayBuffer
bytes . push ( new Uint8Array ( element ) )
} else {
bytes . push (
new Uint8Array ( element . buffer , element . byteOffset , element . byteLength )
)
}
} else if ( isBlobLike ( element ) ) {
// 3. If element is a Blob, append the bytes it represents
// to bytes.
bytes . push ( element )
}
}
// 3. Return bytes.
return bytes
}
/ * *
* @ see https : //www.w3.org/TR/FileAPI/#convert-line-endings-to-native
* @ param { string } s
* /
function convertLineEndingsNative ( s ) {
// 1. Let native line ending be be the code point U+000A LF.
let nativeLineEnding = '\n'
// 2. If the underlying platform’ s conventions are to
// represent newlines as a carriage return and line feed
// sequence, set native line ending to the code point
// U+000D CR followed by the code point U+000A LF.
if ( process . platform === 'win32' ) {
nativeLineEnding = '\r\n'
}
return s . replace ( /\r?\n/g , nativeLineEnding )
}
// If this function is moved to ./util.js, some tools (such as
// rollup) will warn about circular dependencies. See:
// https://github.com/nodejs/undici/issues/1629
function isFileLike ( object ) {
return (
( NativeFile && object instanceof NativeFile ) ||
object instanceof File || (
object &&
( typeof object . stream === 'function' ||
typeof object . arrayBuffer === 'function' ) &&
object [ Symbol . toStringTag ] === 'File'
)
)
}
module . exports = { File , FileLike , isFileLike }
/***/ } ) ,
/***/ 2015 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { isBlobLike , toUSVString , makeIterator } = _ _nccwpck _require _ _ ( 2538 )
const { kState } = _ _nccwpck _require _ _ ( 5861 )
const { File : UndiciFile , FileLike , isFileLike } = _ _nccwpck _require _ _ ( 8511 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { Blob , File : NativeFile } = _ _nccwpck _require _ _ ( 4300 )
/** @type {globalThis['File']} */
const File = NativeFile ? ? UndiciFile
// https://xhr.spec.whatwg.org/#formdata
class FormData {
constructor ( form ) {
if ( form !== undefined ) {
throw webidl . errors . conversionFailed ( {
prefix : 'FormData constructor' ,
argument : 'Argument 1' ,
types : [ 'undefined' ]
} )
}
this [ kState ] = [ ]
}
append ( name , value , filename = undefined ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 2 , { header : 'FormData.append' } )
if ( arguments . length === 3 && ! isBlobLike ( value ) ) {
throw new TypeError (
"Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'"
)
}
// 1. Let value be value if given; otherwise blobValue.
name = webidl . converters . USVString ( name )
value = isBlobLike ( value )
? webidl . converters . Blob ( value , { strict : false } )
: webidl . converters . USVString ( value )
filename = arguments . length === 3
? webidl . converters . USVString ( filename )
: undefined
// 2. Let entry be the result of creating an entry with
// name, value, and filename if given.
const entry = makeEntry ( name , value , filename )
// 3. Append entry to this’ s entry list.
this [ kState ] . push ( entry )
}
delete ( name ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FormData.delete' } )
name = webidl . converters . USVString ( name )
// The delete(name) method steps are to remove all entries whose name
// is name from this’ s entry list.
this [ kState ] = this [ kState ] . filter ( entry => entry . name !== name )
}
get ( name ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FormData.get' } )
name = webidl . converters . USVString ( name )
// 1. If there is no entry whose name is name in this’ s entry list,
// then return null.
const idx = this [ kState ] . findIndex ( ( entry ) => entry . name === name )
if ( idx === - 1 ) {
return null
}
// 2. Return the value of the first entry whose name is name from
// this’ s entry list.
return this [ kState ] [ idx ] . value
}
getAll ( name ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FormData.getAll' } )
name = webidl . converters . USVString ( name )
// 1. If there is no entry whose name is name in this’ s entry list,
// then return the empty list.
// 2. Return the values of all entries whose name is name, in order,
// from this’ s entry list.
return this [ kState ]
. filter ( ( entry ) => entry . name === name )
. map ( ( entry ) => entry . value )
}
has ( name ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FormData.has' } )
name = webidl . converters . USVString ( name )
// The has(name) method steps are to return true if there is an entry
// whose name is name in this’ s entry list; otherwise false.
return this [ kState ] . findIndex ( ( entry ) => entry . name === name ) !== - 1
}
set ( name , value , filename = undefined ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 2 , { header : 'FormData.set' } )
if ( arguments . length === 3 && ! isBlobLike ( value ) ) {
throw new TypeError (
"Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'"
)
}
// The set(name, value) and set(name, blobValue, filename) method steps
// are:
// 1. Let value be value if given; otherwise blobValue.
name = webidl . converters . USVString ( name )
value = isBlobLike ( value )
? webidl . converters . Blob ( value , { strict : false } )
: webidl . converters . USVString ( value )
filename = arguments . length === 3
? toUSVString ( filename )
: undefined
// 2. Let entry be the result of creating an entry with name, value, and
// filename if given.
const entry = makeEntry ( name , value , filename )
// 3. If there are entries in this’ s entry list whose name is name, then
// replace the first such entry with entry and remove the others.
const idx = this [ kState ] . findIndex ( ( entry ) => entry . name === name )
if ( idx !== - 1 ) {
this [ kState ] = [
... this [ kState ] . slice ( 0 , idx ) ,
entry ,
... this [ kState ] . slice ( idx + 1 ) . filter ( ( entry ) => entry . name !== name )
]
} else {
// 4. Otherwise, append entry to this’ s entry list.
this [ kState ] . push ( entry )
}
}
entries ( ) {
webidl . brandCheck ( this , FormData )
return makeIterator (
( ) => this [ kState ] . map ( pair => [ pair . name , pair . value ] ) ,
'FormData' ,
'key+value'
)
}
keys ( ) {
webidl . brandCheck ( this , FormData )
return makeIterator (
( ) => this [ kState ] . map ( pair => [ pair . name , pair . value ] ) ,
'FormData' ,
'key'
)
}
values ( ) {
webidl . brandCheck ( this , FormData )
return makeIterator (
( ) => this [ kState ] . map ( pair => [ pair . name , pair . value ] ) ,
'FormData' ,
'value'
)
}
/ * *
* @ param { ( value : string , key : string , self : FormData ) => void } callbackFn
* @ param { unknown } thisArg
* /
forEach ( callbackFn , thisArg = globalThis ) {
webidl . brandCheck ( this , FormData )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FormData.forEach' } )
if ( typeof callbackFn !== 'function' ) {
throw new TypeError (
"Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'."
)
}
for ( const [ key , value ] of this ) {
callbackFn . apply ( thisArg , [ value , key , this ] )
}
}
}
FormData . prototype [ Symbol . iterator ] = FormData . prototype . entries
Object . defineProperties ( FormData . prototype , {
[ Symbol . toStringTag ] : {
value : 'FormData' ,
configurable : true
}
} )
/ * *
* @ see https : //html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry
* @ param { string } name
* @ param { string | Blob } value
* @ param { ? string } filename
* @ returns
* /
function makeEntry ( name , value , filename ) {
// 1. Set name to the result of converting name into a scalar value string.
// "To convert a string into a scalar value string, replace any surrogates
// with U+FFFD."
// see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end
name = Buffer . from ( name ) . toString ( 'utf8' )
// 2. If value is a string, then set value to the result of converting
// value into a scalar value string.
if ( typeof value === 'string' ) {
value = Buffer . from ( value ) . toString ( 'utf8' )
} else {
// 3. Otherwise:
// 1. If value is not a File object, then set value to a new File object,
// representing the same bytes, whose name attribute value is "blob"
if ( ! isFileLike ( value ) ) {
value = value instanceof Blob
? new File ( [ value ] , 'blob' , { type : value . type } )
: new FileLike ( value , 'blob' , { type : value . type } )
}
// 2. If filename is given, then set value to a new File object,
// representing the same bytes, whose name attribute is filename.
if ( filename !== undefined ) {
/** @type {FilePropertyBag} */
const options = {
type : value . type ,
lastModified : value . lastModified
}
value = ( NativeFile && value instanceof NativeFile ) || value instanceof UndiciFile
? new File ( [ value ] , filename , options )
: new FileLike ( value , filename , options )
}
}
// 4. Return an entry whose name is name and whose value is value.
return { name , value }
}
module . exports = { FormData }
/***/ } ) ,
/***/ 1246 :
/***/ ( ( module ) => {
"use strict" ;
// In case of breaking changes, increase the version
// number to avoid conflicts.
const globalOrigin = Symbol . for ( 'undici.globalOrigin.1' )
function getGlobalOrigin ( ) {
return globalThis [ globalOrigin ]
}
function setGlobalOrigin ( newOrigin ) {
if ( newOrigin === undefined ) {
Object . defineProperty ( globalThis , globalOrigin , {
value : undefined ,
writable : true ,
enumerable : false ,
configurable : false
} )
return
}
const parsedURL = new URL ( newOrigin )
if ( parsedURL . protocol !== 'http:' && parsedURL . protocol !== 'https:' ) {
throw new TypeError ( ` Only http & https urls are allowed, received ${ parsedURL . protocol } ` )
}
Object . defineProperty ( globalThis , globalOrigin , {
value : parsedURL ,
writable : true ,
enumerable : false ,
configurable : false
} )
}
module . exports = {
getGlobalOrigin ,
setGlobalOrigin
}
/***/ } ) ,
/***/ 554 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
// https://github.com/Ethan-Arrowood/undici-fetch
const { kHeadersList , kConstruct } = _ _nccwpck _require _ _ ( 2785 )
const { kGuard } = _ _nccwpck _require _ _ ( 5861 )
const { kEnumerableProperty } = _ _nccwpck _require _ _ ( 3983 )
const {
makeIterator ,
isValidHeaderName ,
isValidHeaderValue
} = _ _nccwpck _require _ _ ( 2538 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const kHeadersMap = Symbol ( 'headers map' )
const kHeadersSortedMap = Symbol ( 'headers map sorted' )
/ * *
* @ param { number } code
* /
function isHTTPWhiteSpaceCharCode ( code ) {
return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#concept-header-value-normalize
* @ param { string } potentialValue
* /
function headerValueNormalize ( potentialValue ) {
// To normalize a byte sequence potentialValue, remove
// any leading and trailing HTTP whitespace bytes from
// potentialValue.
let i = 0 ; let j = potentialValue . length
while ( j > i && isHTTPWhiteSpaceCharCode ( potentialValue . charCodeAt ( j - 1 ) ) ) -- j
while ( j > i && isHTTPWhiteSpaceCharCode ( potentialValue . charCodeAt ( i ) ) ) ++ i
return i === 0 && j === potentialValue . length ? potentialValue : potentialValue . substring ( i , j )
}
function fill ( headers , object ) {
// To fill a Headers object headers with a given object object, run these steps:
// 1. If object is a sequence, then for each header in object:
// Note: webidl conversion to array has already been done.
if ( Array . isArray ( object ) ) {
for ( let i = 0 ; i < object . length ; ++ i ) {
const header = object [ i ]
// 1. If header does not contain exactly two items, then throw a TypeError.
if ( header . length !== 2 ) {
throw webidl . errors . exception ( {
header : 'Headers constructor' ,
message : ` expected name/value pair to be length 2, found ${ header . length } . `
} )
}
// 2. Append (header’ s first item, header’ s second item) to headers.
appendHeader ( headers , header [ 0 ] , header [ 1 ] )
}
} else if ( typeof object === 'object' && object !== null ) {
// Note: null should throw
// 2. Otherwise, object is a record, then for each key → value in object,
// append (key, value) to headers
const keys = Object . keys ( object )
for ( let i = 0 ; i < keys . length ; ++ i ) {
appendHeader ( headers , keys [ i ] , object [ keys [ i ] ] )
}
} else {
throw webidl . errors . conversionFailed ( {
prefix : 'Headers constructor' ,
argument : 'Argument 1' ,
types : [ 'sequence<sequence<ByteString>>' , 'record<ByteString, ByteString>' ]
} )
}
}
/ * *
* @ see https : //fetch.spec.whatwg.org/#concept-headers-append
* /
function appendHeader ( headers , name , value ) {
// 1. Normalize value.
value = headerValueNormalize ( value )
// 2. If name is not a header name or value is not a
// header value, then throw a TypeError.
if ( ! isValidHeaderName ( name ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.append' ,
value : name ,
type : 'header name'
} )
} else if ( ! isValidHeaderValue ( value ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.append' ,
value ,
type : 'header value'
} )
}
// 3. If headers’ s guard is "immutable", then throw a TypeError.
// 4. Otherwise, if headers’ s guard is "request" and name is a
// forbidden header name, return.
// Note: undici does not implement forbidden header names
if ( headers [ kGuard ] === 'immutable' ) {
throw new TypeError ( 'immutable' )
} else if ( headers [ kGuard ] === 'request-no-cors' ) {
// 5. Otherwise, if headers’ s guard is "request-no-cors":
// TODO
}
// 6. Otherwise, if headers’ s guard is "response" and name is a
// forbidden response-header name, return.
// 7. Append (name, value) to headers’ s header list.
return headers [ kHeadersList ] . append ( name , value )
// 8. If headers’ s guard is "request-no-cors", then remove
// privileged no-CORS request headers from headers
}
class HeadersList {
/** @type {[string, string][]|null} */
cookies = null
constructor ( init ) {
if ( init instanceof HeadersList ) {
this [ kHeadersMap ] = new Map ( init [ kHeadersMap ] )
this [ kHeadersSortedMap ] = init [ kHeadersSortedMap ]
this . cookies = init . cookies === null ? null : [ ... init . cookies ]
} else {
this [ kHeadersMap ] = new Map ( init )
this [ kHeadersSortedMap ] = null
}
}
// https://fetch.spec.whatwg.org/#header-list-contains
contains ( name ) {
// A header list list contains a header name name if list
// contains a header whose name is a byte-case-insensitive
// match for name.
name = name . toLowerCase ( )
return this [ kHeadersMap ] . has ( name )
}
clear ( ) {
this [ kHeadersMap ] . clear ( )
this [ kHeadersSortedMap ] = null
this . cookies = null
}
// https://fetch.spec.whatwg.org/#concept-header-list-append
append ( name , value ) {
this [ kHeadersSortedMap ] = null
// 1. If list contains name, then set name to the first such
// header’ s name.
const lowercaseName = name . toLowerCase ( )
const exists = this [ kHeadersMap ] . get ( lowercaseName )
// 2. Append (name, value) to list.
if ( exists ) {
const delimiter = lowercaseName === 'cookie' ? '; ' : ', '
this [ kHeadersMap ] . set ( lowercaseName , {
name : exists . name ,
value : ` ${ exists . value } ${ delimiter } ${ value } `
} )
} else {
this [ kHeadersMap ] . set ( lowercaseName , { name , value } )
}
if ( lowercaseName === 'set-cookie' ) {
this . cookies ? ? = [ ]
this . cookies . push ( value )
}
}
// https://fetch.spec.whatwg.org/#concept-header-list-set
set ( name , value ) {
this [ kHeadersSortedMap ] = null
const lowercaseName = name . toLowerCase ( )
if ( lowercaseName === 'set-cookie' ) {
this . cookies = [ value ]
}
// 1. If list contains name, then set the value of
// the first such header to value and remove the
// others.
// 2. Otherwise, append header (name, value) to list.
this [ kHeadersMap ] . set ( lowercaseName , { name , value } )
}
// https://fetch.spec.whatwg.org/#concept-header-list-delete
delete ( name ) {
this [ kHeadersSortedMap ] = null
name = name . toLowerCase ( )
if ( name === 'set-cookie' ) {
this . cookies = null
}
this [ kHeadersMap ] . delete ( name )
}
// https://fetch.spec.whatwg.org/#concept-header-list-get
get ( name ) {
const value = this [ kHeadersMap ] . get ( name . toLowerCase ( ) )
// 1. If list does not contain name, then return null.
// 2. Return the values of all headers in list whose name
// is a byte-case-insensitive match for name,
// separated from each other by 0x2C 0x20, in order.
return value === undefined ? null : value . value
}
* [ Symbol . iterator ] ( ) {
// use the lowercased name
for ( const [ name , { value } ] of this [ kHeadersMap ] ) {
yield [ name , value ]
}
}
get entries ( ) {
const headers = { }
if ( this [ kHeadersMap ] . size ) {
for ( const { name , value } of this [ kHeadersMap ] . values ( ) ) {
headers [ name ] = value
}
}
return headers
}
}
// https://fetch.spec.whatwg.org/#headers-class
class Headers {
constructor ( init = undefined ) {
if ( init === kConstruct ) {
return
}
this [ kHeadersList ] = new HeadersList ( )
// The new Headers(init) constructor steps are:
// 1. Set this’ s guard to "none".
this [ kGuard ] = 'none'
// 2. If init is given, then fill this with init.
if ( init !== undefined ) {
init = webidl . converters . HeadersInit ( init )
fill ( this , init )
}
}
// https://fetch.spec.whatwg.org/#dom-headers-append
append ( name , value ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 2 , { header : 'Headers.append' } )
name = webidl . converters . ByteString ( name )
value = webidl . converters . ByteString ( value )
return appendHeader ( this , name , value )
}
// https://fetch.spec.whatwg.org/#dom-headers-delete
delete ( name ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Headers.delete' } )
name = webidl . converters . ByteString ( name )
// 1. If name is not a header name, then throw a TypeError.
if ( ! isValidHeaderName ( name ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.delete' ,
value : name ,
type : 'header name'
} )
}
// 2. If this’ s guard is "immutable", then throw a TypeError.
// 3. Otherwise, if this’ s guard is "request" and name is a
// forbidden header name, return.
// 4. Otherwise, if this’ s guard is "request-no-cors", name
// is not a no-CORS-safelisted request-header name, and
// name is not a privileged no-CORS request-header name,
// return.
// 5. Otherwise, if this’ s guard is "response" and name is
// a forbidden response-header name, return.
// Note: undici does not implement forbidden header names
if ( this [ kGuard ] === 'immutable' ) {
throw new TypeError ( 'immutable' )
} else if ( this [ kGuard ] === 'request-no-cors' ) {
// TODO
}
// 6. If this’ s header list does not contain name, then
// return.
if ( ! this [ kHeadersList ] . contains ( name ) ) {
return
}
// 7. Delete name from this’ s header list.
// 8. If this’ s guard is "request-no-cors", then remove
// privileged no-CORS request headers from this.
this [ kHeadersList ] . delete ( name )
}
// https://fetch.spec.whatwg.org/#dom-headers-get
get ( name ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Headers.get' } )
name = webidl . converters . ByteString ( name )
// 1. If name is not a header name, then throw a TypeError.
if ( ! isValidHeaderName ( name ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.get' ,
value : name ,
type : 'header name'
} )
}
// 2. Return the result of getting name from this’ s header
// list.
return this [ kHeadersList ] . get ( name )
}
// https://fetch.spec.whatwg.org/#dom-headers-has
has ( name ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Headers.has' } )
name = webidl . converters . ByteString ( name )
// 1. If name is not a header name, then throw a TypeError.
if ( ! isValidHeaderName ( name ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.has' ,
value : name ,
type : 'header name'
} )
}
// 2. Return true if this’ s header list contains name;
// otherwise false.
return this [ kHeadersList ] . contains ( name )
}
// https://fetch.spec.whatwg.org/#dom-headers-set
set ( name , value ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 2 , { header : 'Headers.set' } )
name = webidl . converters . ByteString ( name )
value = webidl . converters . ByteString ( value )
// 1. Normalize value.
value = headerValueNormalize ( value )
// 2. If name is not a header name or value is not a
// header value, then throw a TypeError.
if ( ! isValidHeaderName ( name ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.set' ,
value : name ,
type : 'header name'
} )
} else if ( ! isValidHeaderValue ( value ) ) {
throw webidl . errors . invalidArgument ( {
prefix : 'Headers.set' ,
value ,
type : 'header value'
} )
}
// 3. If this’ s guard is "immutable", then throw a TypeError.
// 4. Otherwise, if this’ s guard is "request" and name is a
// forbidden header name, return.
// 5. Otherwise, if this’ s guard is "request-no-cors" and
// name/value is not a no-CORS-safelisted request-header,
// return.
// 6. Otherwise, if this’ s guard is "response" and name is a
// forbidden response-header name, return.
// Note: undici does not implement forbidden header names
if ( this [ kGuard ] === 'immutable' ) {
throw new TypeError ( 'immutable' )
} else if ( this [ kGuard ] === 'request-no-cors' ) {
// TODO
}
// 7. Set (name, value) in this’ s header list.
// 8. If this’ s guard is "request-no-cors", then remove
// privileged no-CORS request headers from this
this [ kHeadersList ] . set ( name , value )
}
// https://fetch.spec.whatwg.org/#dom-headers-getsetcookie
getSetCookie ( ) {
webidl . brandCheck ( this , Headers )
// 1. If this’ s header list does not contain `Set-Cookie`, then return « ».
// 2. Return the values of all headers in this’ s header list whose name is
// a byte-case-insensitive match for `Set-Cookie`, in order.
const list = this [ kHeadersList ] . cookies
if ( list ) {
return [ ... list ]
}
return [ ]
}
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
get [ kHeadersSortedMap ] ( ) {
if ( this [ kHeadersList ] [ kHeadersSortedMap ] ) {
return this [ kHeadersList ] [ kHeadersSortedMap ]
}
// 1. Let headers be an empty list of headers with the key being the name
// and value the value.
const headers = [ ]
// 2. Let names be the result of convert header names to a sorted-lowercase
// set with all the names of the headers in list.
const names = [ ... this [ kHeadersList ] ] . sort ( ( a , b ) => a [ 0 ] < b [ 0 ] ? - 1 : 1 )
const cookies = this [ kHeadersList ] . cookies
// 3. For each name of names:
for ( let i = 0 ; i < names . length ; ++ i ) {
const [ name , value ] = names [ i ]
// 1. If name is `set-cookie`, then:
if ( name === 'set-cookie' ) {
// 1. Let values be a list of all values of headers in list whose name
// is a byte-case-insensitive match for name, in order.
// 2. For each value of values:
// 1. Append (name, value) to headers.
for ( let j = 0 ; j < cookies . length ; ++ j ) {
headers . push ( [ name , cookies [ j ] ] )
}
} else {
// 2. Otherwise:
// 1. Let value be the result of getting name from list.
// 2. Assert: value is non-null.
assert ( value !== null )
// 3. Append (name, value) to headers.
headers . push ( [ name , value ] )
}
}
this [ kHeadersList ] [ kHeadersSortedMap ] = headers
// 4. Return headers.
return headers
}
keys ( ) {
webidl . brandCheck ( this , Headers )
if ( this [ kGuard ] === 'immutable' ) {
const value = this [ kHeadersSortedMap ]
return makeIterator ( ( ) => value , 'Headers' ,
'key' )
}
return makeIterator (
( ) => [ ... this [ kHeadersSortedMap ] . values ( ) ] ,
'Headers' ,
'key'
)
}
values ( ) {
webidl . brandCheck ( this , Headers )
if ( this [ kGuard ] === 'immutable' ) {
const value = this [ kHeadersSortedMap ]
return makeIterator ( ( ) => value , 'Headers' ,
'value' )
}
return makeIterator (
( ) => [ ... this [ kHeadersSortedMap ] . values ( ) ] ,
'Headers' ,
'value'
)
}
entries ( ) {
webidl . brandCheck ( this , Headers )
if ( this [ kGuard ] === 'immutable' ) {
const value = this [ kHeadersSortedMap ]
return makeIterator ( ( ) => value , 'Headers' ,
'key+value' )
}
return makeIterator (
( ) => [ ... this [ kHeadersSortedMap ] . values ( ) ] ,
'Headers' ,
'key+value'
)
}
/ * *
* @ param { ( value : string , key : string , self : Headers ) => void } callbackFn
* @ param { unknown } thisArg
* /
forEach ( callbackFn , thisArg = globalThis ) {
webidl . brandCheck ( this , Headers )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Headers.forEach' } )
if ( typeof callbackFn !== 'function' ) {
throw new TypeError (
"Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'."
)
}
for ( const [ key , value ] of this ) {
callbackFn . apply ( thisArg , [ value , key , this ] )
}
}
[ Symbol . for ( 'nodejs.util.inspect.custom' ) ] ( ) {
webidl . brandCheck ( this , Headers )
return this [ kHeadersList ]
}
}
Headers . prototype [ Symbol . iterator ] = Headers . prototype . entries
Object . defineProperties ( Headers . prototype , {
append : kEnumerableProperty ,
delete : kEnumerableProperty ,
get : kEnumerableProperty ,
has : kEnumerableProperty ,
set : kEnumerableProperty ,
getSetCookie : kEnumerableProperty ,
keys : kEnumerableProperty ,
values : kEnumerableProperty ,
entries : kEnumerableProperty ,
forEach : kEnumerableProperty ,
[ Symbol . iterator ] : { enumerable : false } ,
[ Symbol . toStringTag ] : {
value : 'Headers' ,
configurable : true
}
} )
webidl . converters . HeadersInit = function ( V ) {
if ( webidl . util . Type ( V ) === 'Object' ) {
if ( V [ Symbol . iterator ] ) {
return webidl . converters [ 'sequence<sequence<ByteString>>' ] ( V )
}
return webidl . converters [ 'record<ByteString, ByteString>' ] ( V )
}
throw webidl . errors . conversionFailed ( {
prefix : 'Headers constructor' ,
argument : 'Argument 1' ,
types : [ 'sequence<sequence<ByteString>>' , 'record<ByteString, ByteString>' ]
} )
}
module . exports = {
fill ,
Headers ,
HeadersList
}
/***/ } ) ,
/***/ 4881 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
// https://github.com/Ethan-Arrowood/undici-fetch
const {
Response ,
makeNetworkError ,
makeAppropriateNetworkError ,
filterResponse ,
makeResponse
} = _ _nccwpck _require _ _ ( 7823 )
const { Headers } = _ _nccwpck _require _ _ ( 554 )
const { Request , makeRequest } = _ _nccwpck _require _ _ ( 8359 )
const zlib = _ _nccwpck _require _ _ ( 9796 )
const {
bytesMatch ,
makePolicyContainer ,
clonePolicyContainer ,
requestBadPort ,
TAOCheck ,
appendRequestOriginHeader ,
responseLocationURL ,
requestCurrentURL ,
setRequestReferrerPolicyOnRedirect ,
tryUpgradeRequestToAPotentiallyTrustworthyURL ,
createOpaqueTimingInfo ,
appendFetchMetadata ,
corsCheck ,
crossOriginResourcePolicyCheck ,
determineRequestsReferrer ,
coarsenedSharedCurrentTime ,
createDeferredPromise ,
isBlobLike ,
sameOrigin ,
isCancelled ,
isAborted ,
isErrorLike ,
fullyReadBody ,
readableStreamClose ,
isomorphicEncode ,
urlIsLocal ,
urlIsHttpHttpsScheme ,
urlHasHttpsScheme
} = _ _nccwpck _require _ _ ( 2538 )
const { kState , kHeaders , kGuard , kRealm } = _ _nccwpck _require _ _ ( 5861 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { safelyExtractBody } = _ _nccwpck _require _ _ ( 9990 )
const {
redirectStatusSet ,
nullBodyStatus ,
safeMethodsSet ,
requestBodyHeader ,
subresourceSet ,
DOMException
} = _ _nccwpck _require _ _ ( 1037 )
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
const EE = _ _nccwpck _require _ _ ( 2361 )
const { Readable , pipeline } = _ _nccwpck _require _ _ ( 2781 )
const { addAbortListener , isErrored , isReadable , nodeMajor , nodeMinor } = _ _nccwpck _require _ _ ( 3983 )
const { dataURLProcessor , serializeAMimeType } = _ _nccwpck _require _ _ ( 685 )
const { TransformStream } = _ _nccwpck _require _ _ ( 5356 )
const { getGlobalDispatcher } = _ _nccwpck _require _ _ ( 1892 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { STATUS _CODES } = _ _nccwpck _require _ _ ( 3685 )
const GET _OR _HEAD = [ 'GET' , 'HEAD' ]
/** @type {import('buffer').resolveObjectURL} */
let resolveObjectURL
let ReadableStream = globalThis . ReadableStream
class Fetch extends EE {
constructor ( dispatcher ) {
super ( )
this . dispatcher = dispatcher
this . connection = null
this . dump = false
this . state = 'ongoing'
// 2 terminated listeners get added per request,
// but only 1 gets removed. If there are 20 redirects,
// 21 listeners will be added.
// See https://github.com/nodejs/undici/issues/1711
// TODO (fix): Find and fix root cause for leaked listener.
this . setMaxListeners ( 21 )
}
terminate ( reason ) {
if ( this . state !== 'ongoing' ) {
return
}
this . state = 'terminated'
this . connection ? . destroy ( reason )
this . emit ( 'terminated' , reason )
}
// https://fetch.spec.whatwg.org/#fetch-controller-abort
abort ( error ) {
if ( this . state !== 'ongoing' ) {
return
}
// 1. Set controller’ s state to "aborted".
this . state = 'aborted'
// 2. Let fallbackError be an "AbortError" DOMException.
// 3. Set error to fallbackError if it is not given.
if ( ! error ) {
error = new DOMException ( 'The operation was aborted.' , 'AbortError' )
}
// 4. Let serializedError be StructuredSerialize(error).
// If that threw an exception, catch it, and let
// serializedError be StructuredSerialize(fallbackError).
// 5. Set controller’ s serialized abort reason to serializedError.
this . serializedAbortReason = error
this . connection ? . destroy ( error )
this . emit ( 'terminated' , error )
}
}
// https://fetch.spec.whatwg.org/#fetch-method
function fetch ( input , init = { } ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'globalThis.fetch' } )
// 1. Let p be a new promise.
const p = createDeferredPromise ( )
// 2. Let requestObject be the result of invoking the initial value of
// Request as constructor with input and init as arguments. If this throws
// an exception, reject p with it and return p.
let requestObject
try {
requestObject = new Request ( input , init )
} catch ( e ) {
p . reject ( e )
return p . promise
}
// 3. Let request be requestObject’ s request.
const request = requestObject [ kState ]
// 4. If requestObject’ s signal’ s aborted flag is set, then:
if ( requestObject . signal . aborted ) {
// 1. Abort the fetch() call with p, request, null, and
// requestObject’ s signal’ s abort reason.
abortFetch ( p , request , null , requestObject . signal . reason )
// 2. Return p.
return p . promise
}
// 5. Let globalObject be request’ s client’ s global object.
const globalObject = request . client . globalObject
// 6. If globalObject is a ServiceWorkerGlobalScope object, then set
// request’ s service-workers mode to "none".
if ( globalObject ? . constructor ? . name === 'ServiceWorkerGlobalScope' ) {
request . serviceWorkers = 'none'
}
// 7. Let responseObject be null.
let responseObject = null
// 8. Let relevantRealm be this’ s relevant Realm.
const relevantRealm = null
// 9. Let locallyAborted be false.
let locallyAborted = false
// 10. Let controller be null.
let controller = null
// 11. Add the following abort steps to requestObject’ s signal:
addAbortListener (
requestObject . signal ,
( ) => {
// 1. Set locallyAborted to true.
locallyAborted = true
// 2. Assert: controller is non-null.
assert ( controller != null )
// 3. Abort controller with requestObject’ s signal’ s abort reason.
controller . abort ( requestObject . signal . reason )
// 4. Abort the fetch() call with p, request, responseObject,
// and requestObject’ s signal’ s abort reason.
abortFetch ( p , request , responseObject , requestObject . signal . reason )
}
)
// 12. Let handleFetchDone given response response be to finalize and
// report timing with response, globalObject, and "fetch".
const handleFetchDone = ( response ) =>
finalizeAndReportTiming ( response , 'fetch' )
// 13. Set controller to the result of calling fetch given request,
// with processResponseEndOfBody set to handleFetchDone, and processResponse
// given response being these substeps:
const processResponse = ( response ) => {
// 1. If locallyAborted is true, terminate these substeps.
if ( locallyAborted ) {
return Promise . resolve ( )
}
// 2. If response’ s aborted flag is set, then:
if ( response . aborted ) {
// 1. Let deserializedError be the result of deserialize a serialized
// abort reason given controller’ s serialized abort reason and
// relevantRealm.
// 2. Abort the fetch() call with p, request, responseObject, and
// deserializedError.
abortFetch ( p , request , responseObject , controller . serializedAbortReason )
return Promise . resolve ( )
}
// 3. If response is a network error, then reject p with a TypeError
// and terminate these substeps.
if ( response . type === 'error' ) {
p . reject (
Object . assign ( new TypeError ( 'fetch failed' ) , { cause : response . error } )
)
return Promise . resolve ( )
}
// 4. Set responseObject to the result of creating a Response object,
// given response, "immutable", and relevantRealm.
responseObject = new Response ( )
responseObject [ kState ] = response
responseObject [ kRealm ] = relevantRealm
responseObject [ kHeaders ] [ kHeadersList ] = response . headersList
responseObject [ kHeaders ] [ kGuard ] = 'immutable'
responseObject [ kHeaders ] [ kRealm ] = relevantRealm
// 5. Resolve p with responseObject.
p . resolve ( responseObject )
}
controller = fetching ( {
request ,
processResponseEndOfBody : handleFetchDone ,
processResponse ,
dispatcher : init . dispatcher ? ? getGlobalDispatcher ( ) // undici
} )
// 14. Return p.
return p . promise
}
// https://fetch.spec.whatwg.org/#finalize-and-report-timing
function finalizeAndReportTiming ( response , initiatorType = 'other' ) {
// 1. If response is an aborted network error, then return.
if ( response . type === 'error' && response . aborted ) {
return
}
// 2. If response’ s URL list is null or empty, then return.
if ( ! response . urlList ? . length ) {
return
}
// 3. Let originalURL be response’ s URL list[0].
const originalURL = response . urlList [ 0 ]
// 4. Let timingInfo be response’ s timing info.
let timingInfo = response . timingInfo
// 5. Let cacheState be response’ s cache state.
let cacheState = response . cacheState
// 6. If originalURL’ s scheme is not an HTTP(S) scheme, then return.
if ( ! urlIsHttpHttpsScheme ( originalURL ) ) {
return
}
// 7. If timingInfo is null, then return.
if ( timingInfo === null ) {
return
}
// 8. If response’ s timing allow passed flag is not set, then:
if ( ! response . timingAllowPassed ) {
// 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo.
timingInfo = createOpaqueTimingInfo ( {
startTime : timingInfo . startTime
} )
// 2. Set cacheState to the empty string.
cacheState = ''
}
// 9. Set timingInfo’ s end time to the coarsened shared current time
// given global’ s relevant settings object’ s cross-origin isolated
// capability.
// TODO: given global’ s relevant settings object’ s cross-origin isolated
// capability?
timingInfo . endTime = coarsenedSharedCurrentTime ( )
// 10. Set response’ s timing info to timingInfo.
response . timingInfo = timingInfo
// 11. Mark resource timing for timingInfo, originalURL, initiatorType,
// global, and cacheState.
markResourceTiming (
timingInfo ,
originalURL ,
initiatorType ,
globalThis ,
cacheState
)
}
// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing
function markResourceTiming ( timingInfo , originalURL , initiatorType , globalThis , cacheState ) {
if ( nodeMajor > 18 || ( nodeMajor === 18 && nodeMinor >= 2 ) ) {
performance . markResourceTiming ( timingInfo , originalURL . href , initiatorType , globalThis , cacheState )
}
}
// https://fetch.spec.whatwg.org/#abort-fetch
function abortFetch ( p , request , responseObject , error ) {
// Note: AbortSignal.reason was added in node v17.2.0
// which would give us an undefined error to reject with.
// Remove this once node v16 is no longer supported.
if ( ! error ) {
error = new DOMException ( 'The operation was aborted.' , 'AbortError' )
}
// 1. Reject promise with error.
p . reject ( error )
// 2. If request’ s body is not null and is readable, then cancel request’ s
// body with error.
if ( request . body != null && isReadable ( request . body ? . stream ) ) {
request . body . stream . cancel ( error ) . catch ( ( err ) => {
if ( err . code === 'ERR_INVALID_STATE' ) {
// Node bug?
return
}
throw err
} )
}
// 3. If responseObject is null, then return.
if ( responseObject == null ) {
return
}
// 4. Let response be responseObject’ s response.
const response = responseObject [ kState ]
// 5. If response’ s body is not null and is readable, then error response’ s
// body with error.
if ( response . body != null && isReadable ( response . body ? . stream ) ) {
response . body . stream . cancel ( error ) . catch ( ( err ) => {
if ( err . code === 'ERR_INVALID_STATE' ) {
// Node bug?
return
}
throw err
} )
}
}
// https://fetch.spec.whatwg.org/#fetching
function fetching ( {
request ,
processRequestBodyChunkLength ,
processRequestEndOfBody ,
processResponse ,
processResponseEndOfBody ,
processResponseConsumeBody ,
useParallelQueue = false ,
dispatcher // undici
} ) {
// 1. Let taskDestination be null.
let taskDestination = null
// 2. Let crossOriginIsolatedCapability be false.
let crossOriginIsolatedCapability = false
// 3. If request’ s client is non-null, then:
if ( request . client != null ) {
// 1. Set taskDestination to request’ s client’ s global object.
taskDestination = request . client . globalObject
// 2. Set crossOriginIsolatedCapability to request’ s client’ s cross-origin
// isolated capability.
crossOriginIsolatedCapability =
request . client . crossOriginIsolatedCapability
}
// 4. If useParallelQueue is true, then set taskDestination to the result of
// starting a new parallel queue.
// TODO
// 5. Let timingInfo be a new fetch timing info whose start time and
// post-redirect start time are the coarsened shared current time given
// crossOriginIsolatedCapability.
const currenTime = coarsenedSharedCurrentTime ( crossOriginIsolatedCapability )
const timingInfo = createOpaqueTimingInfo ( {
startTime : currenTime
} )
// 6. Let fetchParams be a new fetch params whose
// request is request,
// timing info is timingInfo,
// process request body chunk length is processRequestBodyChunkLength,
// process request end-of-body is processRequestEndOfBody,
// process response is processResponse,
// process response consume body is processResponseConsumeBody,
// process response end-of-body is processResponseEndOfBody,
// task destination is taskDestination,
// and cross-origin isolated capability is crossOriginIsolatedCapability.
const fetchParams = {
controller : new Fetch ( dispatcher ) ,
request ,
timingInfo ,
processRequestBodyChunkLength ,
processRequestEndOfBody ,
processResponse ,
processResponseConsumeBody ,
processResponseEndOfBody ,
taskDestination ,
crossOriginIsolatedCapability
}
// 7. If request’ s body is a byte sequence, then set request’ s body to
// request’ s body as a body.
// NOTE: Since fetching is only called from fetch, body should already be
// extracted.
assert ( ! request . body || request . body . stream )
// 8. If request’ s window is "client", then set request’ s window to request’ s
// client, if request’ s client’ s global object is a Window object; otherwise
// "no-window".
if ( request . window === 'client' ) {
// TODO: What if request.client is null?
request . window =
request . client ? . globalObject ? . constructor ? . name === 'Window'
? request . client
: 'no-window'
}
// 9. If request’ s origin is "client", then set request’ s origin to request’ s
// client’ s origin.
if ( request . origin === 'client' ) {
// TODO: What if request.client is null?
request . origin = request . client ? . origin
}
// 10. If all of the following conditions are true:
// TODO
// 11. If request’ s policy container is "client", then:
if ( request . policyContainer === 'client' ) {
// 1. If request’ s client is non-null, then set request’ s policy
// container to a clone of request’ s client’ s policy container. [HTML]
if ( request . client != null ) {
request . policyContainer = clonePolicyContainer (
request . client . policyContainer
)
} else {
// 2. Otherwise, set request’ s policy container to a new policy
// container.
request . policyContainer = makePolicyContainer ( )
}
}
// 12. If request’ s header list does not contain `Accept`, then:
if ( ! request . headersList . contains ( 'accept' ) ) {
// 1. Let value be `*/*`.
const value = '*/*'
// 2. A user agent should set value to the first matching statement, if
// any, switching on request’ s destination:
// "document"
// "frame"
// "iframe"
// `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8`
// "image"
// `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5`
// "style"
// `text/css,*/*;q=0.1`
// TODO
// 3. Append `Accept`/value to request’ s header list.
request . headersList . append ( 'accept' , value )
}
// 13. If request’ s header list does not contain `Accept-Language`, then
// user agents should append `Accept-Language`/an appropriate value to
// request’ s header list.
if ( ! request . headersList . contains ( 'accept-language' ) ) {
request . headersList . append ( 'accept-language' , '*' )
}
// 14. If request’ s priority is null, then use request’ s initiator and
// destination appropriately in setting request’ s priority to a
// user-agent-defined object.
if ( request . priority === null ) {
// TODO
}
// 15. If request is a subresource request, then:
if ( subresourceSet . has ( request . destination ) ) {
// TODO
}
// 16. Run main fetch given fetchParams.
mainFetch ( fetchParams )
. catch ( err => {
fetchParams . controller . terminate ( err )
} )
// 17. Return fetchParam's controller
return fetchParams . controller
}
// https://fetch.spec.whatwg.org/#concept-main-fetch
async function mainFetch ( fetchParams , recursive = false ) {
// 1. Let request be fetchParams’ s request.
const request = fetchParams . request
// 2. Let response be null.
let response = null
// 3. If request’ s local-URLs-only flag is set and request’ s current URL is
// not local, then set response to a network error.
if ( request . localURLsOnly && ! urlIsLocal ( requestCurrentURL ( request ) ) ) {
response = makeNetworkError ( 'local URLs only' )
}
// 4. Run report Content Security Policy violations for request.
// TODO
// 5. Upgrade request to a potentially trustworthy URL, if appropriate.
tryUpgradeRequestToAPotentiallyTrustworthyURL ( request )
// 6. If should request be blocked due to a bad port, should fetching request
// be blocked as mixed content, or should request be blocked by Content
// Security Policy returns blocked, then set response to a network error.
if ( requestBadPort ( request ) === 'blocked' ) {
response = makeNetworkError ( 'bad port' )
}
// TODO: should fetching request be blocked as mixed content?
// TODO: should request be blocked by Content Security Policy?
// 7. If request’ s referrer policy is the empty string, then set request’ s
// referrer policy to request’ s policy container’ s referrer policy.
if ( request . referrerPolicy === '' ) {
request . referrerPolicy = request . policyContainer . referrerPolicy
}
// 8. If request’ s referrer is not "no-referrer", then set request’ s
// referrer to the result of invoking determine request’ s referrer.
if ( request . referrer !== 'no-referrer' ) {
request . referrer = determineRequestsReferrer ( request )
}
// 9. Set request’ s current URL’ s scheme to "https" if all of the following
// conditions are true:
// - request’ s current URL’ s scheme is "http"
// - request’ s current URL’ s host is a domain
// - Matching request’ s current URL’ s host per Known HSTS Host Domain Name
// Matching results in either a superdomain match with an asserted
// includeSubDomains directive or a congruent match (with or without an
// asserted includeSubDomains directive). [HSTS]
// TODO
// 10. If recursive is false, then run the remaining steps in parallel.
// TODO
// 11. If response is null, then set response to the result of running
// the steps corresponding to the first matching statement:
if ( response === null ) {
response = await ( async ( ) => {
const currentURL = requestCurrentURL ( request )
if (
// - request’ s current URL’ s origin is same origin with request’ s origin,
// and request’ s response tainting is "basic"
( sameOrigin ( currentURL , request . url ) && request . responseTainting === 'basic' ) ||
// request’ s current URL’ s scheme is "data"
( currentURL . protocol === 'data:' ) ||
// - request’ s mode is "navigate" or "websocket"
( request . mode === 'navigate' || request . mode === 'websocket' )
) {
// 1. Set request’ s response tainting to "basic".
request . responseTainting = 'basic'
// 2. Return the result of running scheme fetch given fetchParams.
return await schemeFetch ( fetchParams )
}
// request’ s mode is "same-origin"
if ( request . mode === 'same-origin' ) {
// 1. Return a network error.
return makeNetworkError ( 'request mode cannot be "same-origin"' )
}
// request’ s mode is "no-cors"
if ( request . mode === 'no-cors' ) {
// 1. If request’ s redirect mode is not "follow", then return a network
// error.
if ( request . redirect !== 'follow' ) {
return makeNetworkError (
'redirect mode cannot be "follow" for "no-cors" request'
)
}
// 2. Set request’ s response tainting to "opaque".
request . responseTainting = 'opaque'
// 3. Return the result of running scheme fetch given fetchParams.
return await schemeFetch ( fetchParams )
}
// request’ s current URL’ s scheme is not an HTTP(S) scheme
if ( ! urlIsHttpHttpsScheme ( requestCurrentURL ( request ) ) ) {
// Return a network error.
return makeNetworkError ( 'URL scheme must be a HTTP(S) scheme' )
}
// - request’ s use-CORS-preflight flag is set
// - request’ s unsafe-request flag is set and either request’ s method is
// not a CORS-safelisted method or CORS-unsafe request-header names with
// request’ s header list is not empty
// 1. Set request’ s response tainting to "cors".
// 2. Let corsWithPreflightResponse be the result of running HTTP fetch
// given fetchParams and true.
// 3. If corsWithPreflightResponse is a network error, then clear cache
// entries using request.
// 4. Return corsWithPreflightResponse.
// TODO
// Otherwise
// 1. Set request’ s response tainting to "cors".
request . responseTainting = 'cors'
// 2. Return the result of running HTTP fetch given fetchParams.
return await httpFetch ( fetchParams )
} ) ( )
}
// 12. If recursive is true, then return response.
if ( recursive ) {
return response
}
// 13. If response is not a network error and response is not a filtered
// response, then:
if ( response . status !== 0 && ! response . internalResponse ) {
// If request’ s response tainting is "cors", then:
if ( request . responseTainting === 'cors' ) {
// 1. Let headerNames be the result of extracting header list values
// given `Access-Control-Expose-Headers` and response’ s header list.
// TODO
// 2. If request’ s credentials mode is not "include" and headerNames
// contains `*`, then set response’ s CORS-exposed header-name list to
// all unique header names in response’ s header list.
// TODO
// 3. Otherwise, if headerNames is not null or failure, then set
// response’ s CORS-exposed header-name list to headerNames.
// TODO
}
// Set response to the following filtered response with response as its
// internal response, depending on request’ s response tainting:
if ( request . responseTainting === 'basic' ) {
response = filterResponse ( response , 'basic' )
} else if ( request . responseTainting === 'cors' ) {
response = filterResponse ( response , 'cors' )
} else if ( request . responseTainting === 'opaque' ) {
response = filterResponse ( response , 'opaque' )
} else {
assert ( false )
}
}
// 14. Let internalResponse be response, if response is a network error,
// and response’ s internal response otherwise.
let internalResponse =
response . status === 0 ? response : response . internalResponse
// 15. If internalResponse’ s URL list is empty, then set it to a clone of
// request’ s URL list.
if ( internalResponse . urlList . length === 0 ) {
internalResponse . urlList . push ( ... request . urlList )
}
// 16. If request’ s timing allow failed flag is unset, then set
// internalResponse’ s timing allow passed flag.
if ( ! request . timingAllowFailed ) {
response . timingAllowPassed = true
}
// 17. If response is not a network error and any of the following returns
// blocked
// - should internalResponse to request be blocked as mixed content
// - should internalResponse to request be blocked by Content Security Policy
// - should internalResponse to request be blocked due to its MIME type
// - should internalResponse to request be blocked due to nosniff
// TODO
// 18. If response’ s type is "opaque", internalResponse’ s status is 206,
// internalResponse’ s range-requested flag is set, and request’ s header
// list does not contain `Range`, then set response and internalResponse
// to a network error.
if (
response . type === 'opaque' &&
internalResponse . status === 206 &&
internalResponse . rangeRequested &&
! request . headers . contains ( 'range' )
) {
response = internalResponse = makeNetworkError ( )
}
// 19. If response is not a network error and either request’ s method is
// `HEAD` or `CONNECT`, or internalResponse’ s status is a null body status,
// set internalResponse’ s body to null and disregard any enqueuing toward
// it (if any).
if (
response . status !== 0 &&
( request . method === 'HEAD' ||
request . method === 'CONNECT' ||
nullBodyStatus . includes ( internalResponse . status ) )
) {
internalResponse . body = null
fetchParams . controller . dump = true
}
// 20. If request’ s integrity metadata is not the empty string, then:
if ( request . integrity ) {
// 1. Let processBodyError be this step: run fetch finale given fetchParams
// and a network error.
const processBodyError = ( reason ) =>
fetchFinale ( fetchParams , makeNetworkError ( reason ) )
// 2. If request’ s response tainting is "opaque", or response’ s body is null,
// then run processBodyError and abort these steps.
if ( request . responseTainting === 'opaque' || response . body == null ) {
processBodyError ( response . error )
return
}
// 3. Let processBody given bytes be these steps:
const processBody = ( bytes ) => {
// 1. If bytes do not match request’ s integrity metadata,
// then run processBodyError and abort these steps. [SRI]
if ( ! bytesMatch ( bytes , request . integrity ) ) {
processBodyError ( 'integrity mismatch' )
return
}
// 2. Set response’ s body to bytes as a body.
response . body = safelyExtractBody ( bytes ) [ 0 ]
// 3. Run fetch finale given fetchParams and response.
fetchFinale ( fetchParams , response )
}
// 4. Fully read response’ s body given processBody and processBodyError.
await fullyReadBody ( response . body , processBody , processBodyError )
} else {
// 21. Otherwise, run fetch finale given fetchParams and response.
fetchFinale ( fetchParams , response )
}
}
// https://fetch.spec.whatwg.org/#concept-scheme-fetch
// given a fetch params fetchParams
function schemeFetch ( fetchParams ) {
// Note: since the connection is destroyed on redirect, which sets fetchParams to a
// cancelled state, we do not want this condition to trigger *unless* there have been
// no redirects. See https://github.com/nodejs/undici/issues/1776
// 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.
if ( isCancelled ( fetchParams ) && fetchParams . request . redirectCount === 0 ) {
return Promise . resolve ( makeAppropriateNetworkError ( fetchParams ) )
}
// 2. Let request be fetchParams’ s request.
const { request } = fetchParams
const { protocol : scheme } = requestCurrentURL ( request )
// 3. Switch on request’ s current URL’ s scheme and run the associated steps:
switch ( scheme ) {
case 'about:' : {
// If request’ s current URL’ s path is the string "blank", then return a new response
// whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) »,
// and body is the empty byte sequence as a body.
// Otherwise, return a network error.
return Promise . resolve ( makeNetworkError ( 'about scheme is not supported' ) )
}
case 'blob:' : {
if ( ! resolveObjectURL ) {
resolveObjectURL = ( _ _nccwpck _require _ _ ( 4300 ) . resolveObjectURL )
}
// 1. Let blobURLEntry be request’ s current URL’ s blob URL entry.
const blobURLEntry = requestCurrentURL ( request )
// https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56
// Buffer.resolveObjectURL does not ignore URL queries.
if ( blobURLEntry . search . length !== 0 ) {
return Promise . resolve ( makeNetworkError ( 'NetworkError when attempting to fetch resource.' ) )
}
const blobURLEntryObject = resolveObjectURL ( blobURLEntry . toString ( ) )
// 2. If request’ s method is not `GET`, blobURLEntry is null, or blobURLEntry’ s
// object is not a Blob object, then return a network error.
if ( request . method !== 'GET' || ! isBlobLike ( blobURLEntryObject ) ) {
return Promise . resolve ( makeNetworkError ( 'invalid method' ) )
}
// 3. Let bodyWithType be the result of safely extracting blobURLEntry’ s object.
const bodyWithType = safelyExtractBody ( blobURLEntryObject )
// 4. Let body be bodyWithType’ s body.
const body = bodyWithType [ 0 ]
// 5. Let length be body’ s length, serialized and isomorphic encoded.
const length = isomorphicEncode ( ` ${ body . length } ` )
// 6. Let type be bodyWithType’ s type if it is non-null; otherwise the empty byte sequence.
const type = bodyWithType [ 1 ] ? ? ''
// 7. Return a new response whose status message is `OK`, header list is
// « (`Content-Length`, length), (`Content-Type`, type) », and body is body.
const response = makeResponse ( {
statusText : 'OK' ,
headersList : [
[ 'content-length' , { name : 'Content-Length' , value : length } ] ,
[ 'content-type' , { name : 'Content-Type' , value : type } ]
]
} )
response . body = body
return Promise . resolve ( response )
}
case 'data:' : {
// 1. Let dataURLStruct be the result of running the
// data: URL processor on request’ s current URL.
const currentURL = requestCurrentURL ( request )
const dataURLStruct = dataURLProcessor ( currentURL )
// 2. If dataURLStruct is failure, then return a
// network error.
if ( dataURLStruct === 'failure' ) {
return Promise . resolve ( makeNetworkError ( 'failed to fetch the data URL' ) )
}
// 3. Let mimeType be dataURLStruct’ s MIME type, serialized.
const mimeType = serializeAMimeType ( dataURLStruct . mimeType )
// 4. Return a response whose status message is `OK`,
// header list is « (`Content-Type`, mimeType) »,
// and body is dataURLStruct’ s body as a body.
return Promise . resolve ( makeResponse ( {
statusText : 'OK' ,
headersList : [
[ 'content-type' , { name : 'Content-Type' , value : mimeType } ]
] ,
body : safelyExtractBody ( dataURLStruct . body ) [ 0 ]
} ) )
}
case 'file:' : {
// For now, unfortunate as it is, file URLs are left as an exercise for the reader.
// When in doubt, return a network error.
return Promise . resolve ( makeNetworkError ( 'not implemented... yet...' ) )
}
case 'http:' :
case 'https:' : {
// Return the result of running HTTP fetch given fetchParams.
return httpFetch ( fetchParams )
. catch ( ( err ) => makeNetworkError ( err ) )
}
default : {
return Promise . resolve ( makeNetworkError ( 'unknown scheme' ) )
}
}
}
// https://fetch.spec.whatwg.org/#finalize-response
function finalizeResponse ( fetchParams , response ) {
// 1. Set fetchParams’ s request’ s done flag.
fetchParams . request . done = true
// 2, If fetchParams’ s process response done is not null, then queue a fetch
// task to run fetchParams’ s process response done given response, with
// fetchParams’ s task destination.
if ( fetchParams . processResponseDone != null ) {
queueMicrotask ( ( ) => fetchParams . processResponseDone ( response ) )
}
}
// https://fetch.spec.whatwg.org/#fetch-finale
function fetchFinale ( fetchParams , response ) {
// 1. If response is a network error, then:
if ( response . type === 'error' ) {
// 1. Set response’ s URL list to « fetchParams’ s request’ s URL list[0] ».
response . urlList = [ fetchParams . request . urlList [ 0 ] ]
// 2. Set response’ s timing info to the result of creating an opaque timing
// info for fetchParams’ s timing info.
response . timingInfo = createOpaqueTimingInfo ( {
startTime : fetchParams . timingInfo . startTime
} )
}
// 2. Let processResponseEndOfBody be the following steps:
const processResponseEndOfBody = ( ) => {
// 1. Set fetchParams’ s request’ s done flag.
fetchParams . request . done = true
// If fetchParams’ s process response end-of-body is not null,
// then queue a fetch task to run fetchParams’ s process response
// end-of-body given response with fetchParams’ s task destination.
if ( fetchParams . processResponseEndOfBody != null ) {
queueMicrotask ( ( ) => fetchParams . processResponseEndOfBody ( response ) )
}
}
// 3. If fetchParams’ s process response is non-null, then queue a fetch task
// to run fetchParams’ s process response given response, with fetchParams’ s
// task destination.
if ( fetchParams . processResponse != null ) {
queueMicrotask ( ( ) => fetchParams . processResponse ( response ) )
}
// 4. If response’ s body is null, then run processResponseEndOfBody.
if ( response . body == null ) {
processResponseEndOfBody ( )
} else {
// 5. Otherwise:
// 1. Let transformStream be a new a TransformStream.
// 2. Let identityTransformAlgorithm be an algorithm which, given chunk,
// enqueues chunk in transformStream.
const identityTransformAlgorithm = ( chunk , controller ) => {
controller . enqueue ( chunk )
}
// 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm
// and flushAlgorithm set to processResponseEndOfBody.
const transformStream = new TransformStream ( {
start ( ) { } ,
transform : identityTransformAlgorithm ,
flush : processResponseEndOfBody
} , {
size ( ) {
return 1
}
} , {
size ( ) {
return 1
}
} )
// 4. Set response’ s body to the result of piping response’ s body through transformStream.
response . body = { stream : response . body . stream . pipeThrough ( transformStream ) }
}
// 6. If fetchParams’ s process response consume body is non-null, then:
if ( fetchParams . processResponseConsumeBody != null ) {
// 1. Let processBody given nullOrBytes be this step: run fetchParams’ s
// process response consume body given response and nullOrBytes.
const processBody = ( nullOrBytes ) => fetchParams . processResponseConsumeBody ( response , nullOrBytes )
// 2. Let processBodyError be this step: run fetchParams’ s process
// response consume body given response and failure.
const processBodyError = ( failure ) => fetchParams . processResponseConsumeBody ( response , failure )
// 3. If response’ s body is null, then queue a fetch task to run processBody
// given null, with fetchParams’ s task destination.
if ( response . body == null ) {
queueMicrotask ( ( ) => processBody ( null ) )
} else {
// 4. Otherwise, fully read response’ s body given processBody, processBodyError,
// and fetchParams’ s task destination.
return fullyReadBody ( response . body , processBody , processBodyError )
}
return Promise . resolve ( )
}
}
// https://fetch.spec.whatwg.org/#http-fetch
async function httpFetch ( fetchParams ) {
// 1. Let request be fetchParams’ s request.
const request = fetchParams . request
// 2. Let response be null.
let response = null
// 3. Let actualResponse be null.
let actualResponse = null
// 4. Let timingInfo be fetchParams’ s timing info.
const timingInfo = fetchParams . timingInfo
// 5. If request’ s service-workers mode is "all", then:
if ( request . serviceWorkers === 'all' ) {
// TODO
}
// 6. If response is null, then:
if ( response === null ) {
// 1. If makeCORSPreflight is true and one of these conditions is true:
// TODO
// 2. If request’ s redirect mode is "follow", then set request’ s
// service-workers mode to "none".
if ( request . redirect === 'follow' ) {
request . serviceWorkers = 'none'
}
// 3. Set response and actualResponse to the result of running
// HTTP-network-or-cache fetch given fetchParams.
actualResponse = response = await httpNetworkOrCacheFetch ( fetchParams )
// 4. If request’ s response tainting is "cors" and a CORS check
// for request and response returns failure, then return a network error.
if (
request . responseTainting === 'cors' &&
corsCheck ( request , response ) === 'failure'
) {
return makeNetworkError ( 'cors failure' )
}
// 5. If the TAO check for request and response returns failure, then set
// request’ s timing allow failed flag.
if ( TAOCheck ( request , response ) === 'failure' ) {
request . timingAllowFailed = true
}
}
// 7. If either request’ s response tainting or response’ s type
// is "opaque", and the cross-origin resource policy check with
// request’ s origin, request’ s client, request’ s destination,
// and actualResponse returns blocked, then return a network error.
if (
( request . responseTainting === 'opaque' || response . type === 'opaque' ) &&
crossOriginResourcePolicyCheck (
request . origin ,
request . client ,
request . destination ,
actualResponse
) === 'blocked'
) {
return makeNetworkError ( 'blocked' )
}
// 8. If actualResponse’ s status is a redirect status, then:
if ( redirectStatusSet . has ( actualResponse . status ) ) {
// 1. If actualResponse’ s status is not 303, request’ s body is not null,
// and the connection uses HTTP/2, then user agents may, and are even
// encouraged to, transmit an RST_STREAM frame.
// See, https://github.com/whatwg/fetch/issues/1288
if ( request . redirect !== 'manual' ) {
fetchParams . controller . connection . destroy ( )
}
// 2. Switch on request’ s redirect mode:
if ( request . redirect === 'error' ) {
// Set response to a network error.
response = makeNetworkError ( 'unexpected redirect' )
} else if ( request . redirect === 'manual' ) {
// Set response to an opaque-redirect filtered response whose internal
// response is actualResponse.
// NOTE(spec): On the web this would return an `opaqueredirect` response,
// but that doesn't make sense server side.
// See https://github.com/nodejs/undici/issues/1193.
response = actualResponse
} else if ( request . redirect === 'follow' ) {
// Set response to the result of running HTTP-redirect fetch given
// fetchParams and response.
response = await httpRedirectFetch ( fetchParams , response )
} else {
assert ( false )
}
}
// 9. Set response’ s timing info to timingInfo.
response . timingInfo = timingInfo
// 10. Return response.
return response
}
// https://fetch.spec.whatwg.org/#http-redirect-fetch
function httpRedirectFetch ( fetchParams , response ) {
// 1. Let request be fetchParams’ s request.
const request = fetchParams . request
// 2. Let actualResponse be response, if response is not a filtered response,
// and response’ s internal response otherwise.
const actualResponse = response . internalResponse
? response . internalResponse
: response
// 3. Let locationURL be actualResponse’ s location URL given request’ s current
// URL’ s fragment.
let locationURL
try {
locationURL = responseLocationURL (
actualResponse ,
requestCurrentURL ( request ) . hash
)
// 4. If locationURL is null, then return response.
if ( locationURL == null ) {
return response
}
} catch ( err ) {
// 5. If locationURL is failure, then return a network error.
return Promise . resolve ( makeNetworkError ( err ) )
}
// 6. If locationURL’ s scheme is not an HTTP(S) scheme, then return a network
// error.
if ( ! urlIsHttpHttpsScheme ( locationURL ) ) {
return Promise . resolve ( makeNetworkError ( 'URL scheme must be a HTTP(S) scheme' ) )
}
// 7. If request’ s redirect count is 20, then return a network error.
if ( request . redirectCount === 20 ) {
return Promise . resolve ( makeNetworkError ( 'redirect count exceeded' ) )
}
// 8. Increase request’ s redirect count by 1.
request . redirectCount += 1
// 9. If request’ s mode is "cors", locationURL includes credentials, and
// request’ s origin is not same origin with locationURL’ s origin, then return
// a network error.
if (
request . mode === 'cors' &&
( locationURL . username || locationURL . password ) &&
! sameOrigin ( request , locationURL )
) {
return Promise . resolve ( makeNetworkError ( 'cross origin not allowed for request mode "cors"' ) )
}
// 10. If request’ s response tainting is "cors" and locationURL includes
// credentials, then return a network error.
if (
request . responseTainting === 'cors' &&
( locationURL . username || locationURL . password )
) {
return Promise . resolve ( makeNetworkError (
'URL cannot contain credentials for request mode "cors"'
) )
}
// 11. If actualResponse’ s status is not 303, request’ s body is non-null,
// and request’ s body’ s source is null, then return a network error.
if (
actualResponse . status !== 303 &&
request . body != null &&
request . body . source == null
) {
return Promise . resolve ( makeNetworkError ( ) )
}
// 12. If one of the following is true
// - actualResponse’ s status is 301 or 302 and request’ s method is `POST`
// - actualResponse’ s status is 303 and request’ s method is not `GET` or `HEAD`
if (
( [ 301 , 302 ] . includes ( actualResponse . status ) && request . method === 'POST' ) ||
( actualResponse . status === 303 &&
! GET _OR _HEAD . includes ( request . method ) )
) {
// then:
// 1. Set request’ s method to `GET` and request’ s body to null.
request . method = 'GET'
request . body = null
// 2. For each headerName of request-body-header name, delete headerName from
// request’ s header list.
for ( const headerName of requestBodyHeader ) {
request . headersList . delete ( headerName )
}
}
// 13. If request’ s current URL’ s origin is not same origin with locationURL’ s
// origin, then for each headerName of CORS non-wildcard request-header name,
// delete headerName from request’ s header list.
if ( ! sameOrigin ( requestCurrentURL ( request ) , locationURL ) ) {
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
request . headersList . delete ( 'authorization' )
// https://fetch.spec.whatwg.org/#authentication-entries
request . headersList . delete ( 'proxy-authorization' , true )
// "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
request . headersList . delete ( 'cookie' )
request . headersList . delete ( 'host' )
}
// 14. If request’ s body is non-null, then set request’ s body to the first return
// value of safely extracting request’ s body’ s source.
if ( request . body != null ) {
assert ( request . body . source != null )
request . body = safelyExtractBody ( request . body . source ) [ 0 ]
}
// 15. Let timingInfo be fetchParams’ s timing info.
const timingInfo = fetchParams . timingInfo
// 16. Set timingInfo’ s redirect end time and post-redirect start time to the
// coarsened shared current time given fetchParams’ s cross-origin isolated
// capability.
timingInfo . redirectEndTime = timingInfo . postRedirectStartTime =
coarsenedSharedCurrentTime ( fetchParams . crossOriginIsolatedCapability )
// 17. If timingInfo’ s redirect start time is 0, then set timingInfo’ s
// redirect start time to timingInfo’ s start time.
if ( timingInfo . redirectStartTime === 0 ) {
timingInfo . redirectStartTime = timingInfo . startTime
}
// 18. Append locationURL to request’ s URL list.
request . urlList . push ( locationURL )
// 19. Invoke set request’ s referrer policy on redirect on request and
// actualResponse.
setRequestReferrerPolicyOnRedirect ( request , actualResponse )
// 20. Return the result of running main fetch given fetchParams and true.
return mainFetch ( fetchParams , true )
}
// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch
async function httpNetworkOrCacheFetch (
fetchParams ,
isAuthenticationFetch = false ,
isNewConnectionFetch = false
) {
// 1. Let request be fetchParams’ s request.
const request = fetchParams . request
// 2. Let httpFetchParams be null.
let httpFetchParams = null
// 3. Let httpRequest be null.
let httpRequest = null
// 4. Let response be null.
let response = null
// 5. Let storedResponse be null.
// TODO: cache
// 6. Let httpCache be null.
const httpCache = null
// 7. Let the revalidatingFlag be unset.
const revalidatingFlag = false
// 8. Run these steps, but abort when the ongoing fetch is terminated:
// 1. If request’ s window is "no-window" and request’ s redirect mode is
// "error", then set httpFetchParams to fetchParams and httpRequest to
// request.
if ( request . window === 'no-window' && request . redirect === 'error' ) {
httpFetchParams = fetchParams
httpRequest = request
} else {
// Otherwise:
// 1. Set httpRequest to a clone of request.
httpRequest = makeRequest ( request )
// 2. Set httpFetchParams to a copy of fetchParams.
httpFetchParams = { ... fetchParams }
// 3. Set httpFetchParams’ s request to httpRequest.
httpFetchParams . request = httpRequest
}
// 3. Let includeCredentials be true if one of
const includeCredentials =
request . credentials === 'include' ||
( request . credentials === 'same-origin' &&
request . responseTainting === 'basic' )
// 4. Let contentLength be httpRequest’ s body’ s length, if httpRequest’ s
// body is non-null; otherwise null.
const contentLength = httpRequest . body ? httpRequest . body . length : null
// 5. Let contentLengthHeaderValue be null.
let contentLengthHeaderValue = null
// 6. If httpRequest’ s body is null and httpRequest’ s method is `POST` or
// `PUT`, then set contentLengthHeaderValue to `0`.
if (
httpRequest . body == null &&
[ 'POST' , 'PUT' ] . includes ( httpRequest . method )
) {
contentLengthHeaderValue = '0'
}
// 7. If contentLength is non-null, then set contentLengthHeaderValue to
// contentLength, serialized and isomorphic encoded.
if ( contentLength != null ) {
contentLengthHeaderValue = isomorphicEncode ( ` ${ contentLength } ` )
}
// 8. If contentLengthHeaderValue is non-null, then append
// `Content-Length`/contentLengthHeaderValue to httpRequest’ s header
// list.
if ( contentLengthHeaderValue != null ) {
httpRequest . headersList . append ( 'content-length' , contentLengthHeaderValue )
}
// 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`,
// contentLengthHeaderValue) to httpRequest’ s header list.
// 10. If contentLength is non-null and httpRequest’ s keepalive is true,
// then:
if ( contentLength != null && httpRequest . keepalive ) {
// NOTE: keepalive is a noop outside of browser context.
}
// 11. If httpRequest’ s referrer is a URL, then append
// `Referer`/httpRequest’ s referrer, serialized and isomorphic encoded,
// to httpRequest’ s header list.
if ( httpRequest . referrer instanceof URL ) {
httpRequest . headersList . append ( 'referer' , isomorphicEncode ( httpRequest . referrer . href ) )
}
// 12. Append a request `Origin` header for httpRequest.
appendRequestOriginHeader ( httpRequest )
// 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA]
appendFetchMetadata ( httpRequest )
// 14. If httpRequest’ s header list does not contain `User-Agent`, then
// user agents should append `User-Agent`/default `User-Agent` value to
// httpRequest’ s header list.
if ( ! httpRequest . headersList . contains ( 'user-agent' ) ) {
httpRequest . headersList . append ( 'user-agent' , typeof esbuildDetection === 'undefined' ? 'undici' : 'node' )
}
// 15. If httpRequest’ s cache mode is "default" and httpRequest’ s header
// list contains `If-Modified-Since`, `If-None-Match`,
// `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set
// httpRequest’ s cache mode to "no-store".
if (
httpRequest . cache === 'default' &&
( httpRequest . headersList . contains ( 'if-modified-since' ) ||
httpRequest . headersList . contains ( 'if-none-match' ) ||
httpRequest . headersList . contains ( 'if-unmodified-since' ) ||
httpRequest . headersList . contains ( 'if-match' ) ||
httpRequest . headersList . contains ( 'if-range' ) )
) {
httpRequest . cache = 'no-store'
}
// 16. If httpRequest’ s cache mode is "no-cache", httpRequest’ s prevent
// no-cache cache-control header modification flag is unset, and
// httpRequest’ s header list does not contain `Cache-Control`, then append
// `Cache-Control`/`max-age=0` to httpRequest’ s header list.
if (
httpRequest . cache === 'no-cache' &&
! httpRequest . preventNoCacheCacheControlHeaderModification &&
! httpRequest . headersList . contains ( 'cache-control' )
) {
httpRequest . headersList . append ( 'cache-control' , 'max-age=0' )
}
// 17. If httpRequest’ s cache mode is "no-store" or "reload", then:
if ( httpRequest . cache === 'no-store' || httpRequest . cache === 'reload' ) {
// 1. If httpRequest’ s header list does not contain `Pragma`, then append
// `Pragma`/`no-cache` to httpRequest’ s header list.
if ( ! httpRequest . headersList . contains ( 'pragma' ) ) {
httpRequest . headersList . append ( 'pragma' , 'no-cache' )
}
// 2. If httpRequest’ s header list does not contain `Cache-Control`,
// then append `Cache-Control`/`no-cache` to httpRequest’ s header list.
if ( ! httpRequest . headersList . contains ( 'cache-control' ) ) {
httpRequest . headersList . append ( 'cache-control' , 'no-cache' )
}
}
// 18. If httpRequest’ s header list contains `Range`, then append
// `Accept-Encoding`/`identity` to httpRequest’ s header list.
if ( httpRequest . headersList . contains ( 'range' ) ) {
httpRequest . headersList . append ( 'accept-encoding' , 'identity' )
}
// 19. Modify httpRequest’ s header list per HTTP. Do not append a given
// header if httpRequest’ s header list contains that header’ s name.
// TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129
if ( ! httpRequest . headersList . contains ( 'accept-encoding' ) ) {
if ( urlHasHttpsScheme ( requestCurrentURL ( httpRequest ) ) ) {
httpRequest . headersList . append ( 'accept-encoding' , 'br, gzip, deflate' )
} else {
httpRequest . headersList . append ( 'accept-encoding' , 'gzip, deflate' )
}
}
httpRequest . headersList . delete ( 'host' )
// 20. If includeCredentials is true, then:
if ( includeCredentials ) {
// 1. If the user agent is not configured to block cookies for httpRequest
// (see section 7 of [COOKIES]), then:
// TODO: credentials
// 2. If httpRequest’ s header list does not contain `Authorization`, then:
// TODO: credentials
}
// 21. If there’ s a proxy-authentication entry, use it as appropriate.
// TODO: proxy-authentication
// 22. Set httpCache to the result of determining the HTTP cache
// partition, given httpRequest.
// TODO: cache
// 23. If httpCache is null, then set httpRequest’ s cache mode to
// "no-store".
if ( httpCache == null ) {
httpRequest . cache = 'no-store'
}
// 24. If httpRequest’ s cache mode is neither "no-store" nor "reload",
// then:
if ( httpRequest . mode !== 'no-store' && httpRequest . mode !== 'reload' ) {
// TODO: cache
}
// 9. If aborted, then return the appropriate network error for fetchParams.
// TODO
// 10. If response is null, then:
if ( response == null ) {
// 1. If httpRequest’ s cache mode is "only-if-cached", then return a
// network error.
if ( httpRequest . mode === 'only-if-cached' ) {
return makeNetworkError ( 'only if cached' )
}
// 2. Let forwardResponse be the result of running HTTP-network fetch
// given httpFetchParams, includeCredentials, and isNewConnectionFetch.
const forwardResponse = await httpNetworkFetch (
httpFetchParams ,
includeCredentials ,
isNewConnectionFetch
)
// 3. If httpRequest’ s method is unsafe and forwardResponse’ s status is
// in the range 200 to 399, inclusive, invalidate appropriate stored
// responses in httpCache, as per the "Invalidation" chapter of HTTP
// Caching, and set storedResponse to null. [HTTP-CACHING]
if (
! safeMethodsSet . has ( httpRequest . method ) &&
forwardResponse . status >= 200 &&
forwardResponse . status <= 399
) {
// TODO: cache
}
// 4. If the revalidatingFlag is set and forwardResponse’ s status is 304,
// then:
if ( revalidatingFlag && forwardResponse . status === 304 ) {
// TODO: cache
}
// 5. If response is null, then:
if ( response == null ) {
// 1. Set response to forwardResponse.
response = forwardResponse
// 2. Store httpRequest and forwardResponse in httpCache, as per the
// "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING]
// TODO: cache
}
}
// 11. Set response’ s URL list to a clone of httpRequest’ s URL list.
response . urlList = [ ... httpRequest . urlList ]
// 12. If httpRequest’ s header list contains `Range`, then set response’ s
// range-requested flag.
if ( httpRequest . headersList . contains ( 'range' ) ) {
response . rangeRequested = true
}
// 13. Set response’ s request-includes-credentials to includeCredentials.
response . requestIncludesCredentials = includeCredentials
// 14. If response’ s status is 401, httpRequest’ s response tainting is not
// "cors", includeCredentials is true, and request’ s window is an environment
// settings object, then:
// TODO
// 15. If response’ s status is 407, then:
if ( response . status === 407 ) {
// 1. If request’ s window is "no-window", then return a network error.
if ( request . window === 'no-window' ) {
return makeNetworkError ( )
}
// 2. ???
// 3. If fetchParams is canceled, then return the appropriate network error for fetchParams.
if ( isCancelled ( fetchParams ) ) {
return makeAppropriateNetworkError ( fetchParams )
}
// 4. Prompt the end user as appropriate in request’ s window and store
// the result as a proxy-authentication entry. [HTTP-AUTH]
// TODO: Invoke some kind of callback?
// 5. Set response to the result of running HTTP-network-or-cache fetch given
// fetchParams.
// TODO
return makeNetworkError ( 'proxy authentication required' )
}
// 16. If all of the following are true
if (
// response’ s status is 421
response . status === 421 &&
// isNewConnectionFetch is false
! isNewConnectionFetch &&
// request’ s body is null, or request’ s body is non-null and request’ s body’ s source is non-null
( request . body == null || request . body . source != null )
) {
// then:
// 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.
if ( isCancelled ( fetchParams ) ) {
return makeAppropriateNetworkError ( fetchParams )
}
// 2. Set response to the result of running HTTP-network-or-cache
// fetch given fetchParams, isAuthenticationFetch, and true.
// TODO (spec): The spec doesn't specify this but we need to cancel
// the active response before we can start a new one.
// https://github.com/whatwg/fetch/issues/1293
fetchParams . controller . connection . destroy ( )
response = await httpNetworkOrCacheFetch (
fetchParams ,
isAuthenticationFetch ,
true
)
}
// 17. If isAuthenticationFetch is true, then create an authentication entry
if ( isAuthenticationFetch ) {
// TODO
}
// 18. Return response.
return response
}
// https://fetch.spec.whatwg.org/#http-network-fetch
async function httpNetworkFetch (
fetchParams ,
includeCredentials = false ,
forceNewConnection = false
) {
assert ( ! fetchParams . controller . connection || fetchParams . controller . connection . destroyed )
fetchParams . controller . connection = {
abort : null ,
destroyed : false ,
destroy ( err ) {
if ( ! this . destroyed ) {
this . destroyed = true
this . abort ? . ( err ? ? new DOMException ( 'The operation was aborted.' , 'AbortError' ) )
}
}
}
// 1. Let request be fetchParams’ s request.
const request = fetchParams . request
// 2. Let response be null.
let response = null
// 3. Let timingInfo be fetchParams’ s timing info.
const timingInfo = fetchParams . timingInfo
// 4. Let httpCache be the result of determining the HTTP cache partition,
// given request.
// TODO: cache
const httpCache = null
// 5. If httpCache is null, then set request’ s cache mode to "no-store".
if ( httpCache == null ) {
request . cache = 'no-store'
}
// 6. Let networkPartitionKey be the result of determining the network
// partition key given request.
// TODO
// 7. Let newConnection be "yes" if forceNewConnection is true; otherwise
// "no".
const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars
// 8. Switch on request’ s mode:
if ( request . mode === 'websocket' ) {
// Let connection be the result of obtaining a WebSocket connection,
// given request’ s current URL.
// TODO
} else {
// Let connection be the result of obtaining a connection, given
// networkPartitionKey, request’ s current URL’ s origin,
// includeCredentials, and forceNewConnection.
// TODO
}
// 9. Run these steps, but abort when the ongoing fetch is terminated:
// 1. If connection is failure, then return a network error.
// 2. Set timingInfo’ s final connection timing info to the result of
// calling clamp and coarsen connection timing info with connection’ s
// timing info, timingInfo’ s post-redirect start time, and fetchParams’ s
// cross-origin isolated capability.
// 3. If connection is not an HTTP/2 connection, request’ s body is non-null,
// and request’ s body’ s source is null, then append (`Transfer-Encoding`,
// `chunked`) to request’ s header list.
// 4. Set timingInfo’ s final network-request start time to the coarsened
// shared current time given fetchParams’ s cross-origin isolated
// capability.
// 5. Set response to the result of making an HTTP request over connection
// using request with the following caveats:
// - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS]
// [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH]
// - If request’ s body is non-null, and request’ s body’ s source is null,
// then the user agent may have a buffer of up to 64 kibibytes and store
// a part of request’ s body in that buffer. If the user agent reads from
// request’ s body beyond that buffer’ s size and the user agent needs to
// resend request, then instead return a network error.
// - Set timingInfo’ s final network-response start time to the coarsened
// shared current time given fetchParams’ s cross-origin isolated capability,
// immediately after the user agent’ s HTTP parser receives the first byte
// of the response (e.g., frame header bytes for HTTP/2 or response status
// line for HTTP/1.x).
// - Wait until all the headers are transmitted.
// - Any responses whose status is in the range 100 to 199, inclusive,
// and is not 101, are to be ignored, except for the purposes of setting
// timingInfo’ s final network-response start time above.
// - If request’ s header list contains `Transfer-Encoding`/`chunked` and
// response is transferred via HTTP/1.0 or older, then return a network
// error.
// - If the HTTP request results in a TLS client certificate dialog, then:
// 1. If request’ s window is an environment settings object, make the
// dialog available in request’ s window.
// 2. Otherwise, return a network error.
// To transmit request’ s body body, run these steps:
let requestBody = null
// 1. If body is null and fetchParams’ s process request end-of-body is
// non-null, then queue a fetch task given fetchParams’ s process request
// end-of-body and fetchParams’ s task destination.
if ( request . body == null && fetchParams . processRequestEndOfBody ) {
queueMicrotask ( ( ) => fetchParams . processRequestEndOfBody ( ) )
} else if ( request . body != null ) {
// 2. Otherwise, if body is non-null:
// 1. Let processBodyChunk given bytes be these steps:
const processBodyChunk = async function * ( bytes ) {
// 1. If the ongoing fetch is terminated, then abort these steps.
if ( isCancelled ( fetchParams ) ) {
return
}
// 2. Run this step in parallel: transmit bytes.
yield bytes
// 3. If fetchParams’ s process request body is non-null, then run
// fetchParams’ s process request body given bytes’ s length.
fetchParams . processRequestBodyChunkLength ? . ( bytes . byteLength )
}
// 2. Let processEndOfBody be these steps:
const processEndOfBody = ( ) => {
// 1. If fetchParams is canceled, then abort these steps.
if ( isCancelled ( fetchParams ) ) {
return
}
// 2. If fetchParams’ s process request end-of-body is non-null,
// then run fetchParams’ s process request end-of-body.
if ( fetchParams . processRequestEndOfBody ) {
fetchParams . processRequestEndOfBody ( )
}
}
// 3. Let processBodyError given e be these steps:
const processBodyError = ( e ) => {
// 1. If fetchParams is canceled, then abort these steps.
if ( isCancelled ( fetchParams ) ) {
return
}
// 2. If e is an "AbortError" DOMException, then abort fetchParams’ s controller.
if ( e . name === 'AbortError' ) {
fetchParams . controller . abort ( )
} else {
fetchParams . controller . terminate ( e )
}
}
// 4. Incrementally read request’ s body given processBodyChunk, processEndOfBody,
// processBodyError, and fetchParams’ s task destination.
requestBody = ( async function * ( ) {
try {
for await ( const bytes of request . body . stream ) {
yield * processBodyChunk ( bytes )
}
processEndOfBody ( )
} catch ( err ) {
processBodyError ( err )
}
} ) ( )
}
try {
// socket is only provided for websockets
const { body , status , statusText , headersList , socket } = await dispatch ( { body : requestBody } )
if ( socket ) {
response = makeResponse ( { status , statusText , headersList , socket } )
} else {
const iterator = body [ Symbol . asyncIterator ] ( )
fetchParams . controller . next = ( ) => iterator . next ( )
response = makeResponse ( { status , statusText , headersList } )
}
} catch ( err ) {
// 10. If aborted, then:
if ( err . name === 'AbortError' ) {
// 1. If connection uses HTTP/2, then transmit an RST_STREAM frame.
fetchParams . controller . connection . destroy ( )
// 2. Return the appropriate network error for fetchParams.
return makeAppropriateNetworkError ( fetchParams , err )
}
return makeNetworkError ( err )
}
// 11. Let pullAlgorithm be an action that resumes the ongoing fetch
// if it is suspended.
const pullAlgorithm = ( ) => {
fetchParams . controller . resume ( )
}
// 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’ s
// controller with reason, given reason.
const cancelAlgorithm = ( reason ) => {
fetchParams . controller . abort ( reason )
}
// 13. Let highWaterMark be a non-negative, non-NaN number, chosen by
// the user agent.
// TODO
// 14. Let sizeAlgorithm be an algorithm that accepts a chunk object
// and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent.
// TODO
// 15. Let stream be a new ReadableStream.
// 16. Set up stream with pullAlgorithm set to pullAlgorithm,
// cancelAlgorithm set to cancelAlgorithm, highWaterMark set to
// highWaterMark, and sizeAlgorithm set to sizeAlgorithm.
if ( ! ReadableStream ) {
ReadableStream = ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
}
const stream = new ReadableStream (
{
async start ( controller ) {
fetchParams . controller . controller = controller
} ,
async pull ( controller ) {
await pullAlgorithm ( controller )
} ,
async cancel ( reason ) {
await cancelAlgorithm ( reason )
}
} ,
{
highWaterMark : 0 ,
size ( ) {
return 1
}
}
)
// 17. Run these steps, but abort when the ongoing fetch is terminated:
// 1. Set response’ s body to a new body whose stream is stream.
response . body = { stream }
// 2. If response is not a network error and request’ s cache mode is
// not "no-store", then update response in httpCache for request.
// TODO
// 3. If includeCredentials is true and the user agent is not configured
// to block cookies for request (see section 7 of [COOKIES]), then run the
// "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on
// the value of each header whose name is a byte-case-insensitive match for
// `Set-Cookie` in response’ s header list, if any, and request’ s current URL.
// TODO
// 18. If aborted, then:
// TODO
// 19. Run these steps in parallel:
// 1. Run these steps, but abort when fetchParams is canceled:
fetchParams . controller . on ( 'terminated' , onAborted )
fetchParams . controller . resume = async ( ) => {
// 1. While true
while ( true ) {
// 1-3. See onData...
// 4. Set bytes to the result of handling content codings given
// codings and bytes.
let bytes
let isFailure
try {
const { done , value } = await fetchParams . controller . next ( )
if ( isAborted ( fetchParams ) ) {
break
}
bytes = done ? undefined : value
} catch ( err ) {
if ( fetchParams . controller . ended && ! timingInfo . encodedBodySize ) {
// zlib doesn't like empty streams.
bytes = undefined
} else {
bytes = err
// err may be propagated from the result of calling readablestream.cancel,
// which might not be an error. https://github.com/nodejs/undici/issues/2009
isFailure = true
}
}
if ( bytes === undefined ) {
// 2. Otherwise, if the bytes transmission for response’ s message
// body is done normally and stream is readable, then close
// stream, finalize response for fetchParams and response, and
// abort these in-parallel steps.
readableStreamClose ( fetchParams . controller . controller )
finalizeResponse ( fetchParams , response )
return
}
// 5. Increase timingInfo’ s decoded body size by bytes’ s length.
timingInfo . decodedBodySize += bytes ? . byteLength ? ? 0
// 6. If bytes is failure, then terminate fetchParams’ s controller.
if ( isFailure ) {
fetchParams . controller . terminate ( bytes )
return
}
// 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes
// into stream.
fetchParams . controller . controller . enqueue ( new Uint8Array ( bytes ) )
// 8. If stream is errored, then terminate the ongoing fetch.
if ( isErrored ( stream ) ) {
fetchParams . controller . terminate ( )
return
}
// 9. If stream doesn’ t need more data ask the user agent to suspend
// the ongoing fetch.
if ( ! fetchParams . controller . controller . desiredSize ) {
return
}
}
}
// 2. If aborted, then:
function onAborted ( reason ) {
// 2. If fetchParams is aborted, then:
if ( isAborted ( fetchParams ) ) {
// 1. Set response’ s aborted flag.
response . aborted = true
// 2. If stream is readable, then error stream with the result of
// deserialize a serialized abort reason given fetchParams’ s
// controller’ s serialized abort reason and an
// implementation-defined realm.
if ( isReadable ( stream ) ) {
fetchParams . controller . controller . error (
fetchParams . controller . serializedAbortReason
)
}
} else {
// 3. Otherwise, if stream is readable, error stream with a TypeError.
if ( isReadable ( stream ) ) {
fetchParams . controller . controller . error ( new TypeError ( 'terminated' , {
cause : isErrorLike ( reason ) ? reason : undefined
} ) )
}
}
// 4. If connection uses HTTP/2, then transmit an RST_STREAM frame.
// 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so.
fetchParams . controller . connection . destroy ( )
}
// 20. Return response.
return response
async function dispatch ( { body } ) {
const url = requestCurrentURL ( request )
/** @type {import('../..').Agent} */
const agent = fetchParams . controller . dispatcher
return new Promise ( ( resolve , reject ) => agent . dispatch (
{
path : url . pathname + url . search ,
origin : url . origin ,
method : request . method ,
body : fetchParams . controller . dispatcher . isMockActive ? request . body && ( request . body . source || request . body . stream ) : body ,
headers : request . headersList . entries ,
maxRedirections : 0 ,
upgrade : request . mode === 'websocket' ? 'websocket' : undefined
} ,
{
body : null ,
abort : null ,
onConnect ( abort ) {
// TODO (fix): Do we need connection here?
const { connection } = fetchParams . controller
if ( connection . destroyed ) {
abort ( new DOMException ( 'The operation was aborted.' , 'AbortError' ) )
} else {
fetchParams . controller . on ( 'terminated' , abort )
this . abort = connection . abort = abort
}
} ,
onHeaders ( status , headersList , resume , statusText ) {
if ( status < 200 ) {
return
}
let codings = [ ]
let location = ''
const headers = new Headers ( )
// For H2, the headers are a plain JS object
// We distinguish between them and iterate accordingly
if ( Array . isArray ( headersList ) ) {
for ( let n = 0 ; n < headersList . length ; n += 2 ) {
const key = headersList [ n + 0 ] . toString ( 'latin1' )
const val = headersList [ n + 1 ] . toString ( 'latin1' )
if ( key . toLowerCase ( ) === 'content-encoding' ) {
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
// "All content-coding values are case-insensitive..."
codings = val . toLowerCase ( ) . split ( ',' ) . map ( ( x ) => x . trim ( ) )
} else if ( key . toLowerCase ( ) === 'location' ) {
location = val
}
headers [ kHeadersList ] . append ( key , val )
}
} else {
const keys = Object . keys ( headersList )
for ( const key of keys ) {
const val = headersList [ key ]
if ( key . toLowerCase ( ) === 'content-encoding' ) {
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
// "All content-coding values are case-insensitive..."
codings = val . toLowerCase ( ) . split ( ',' ) . map ( ( x ) => x . trim ( ) ) . reverse ( )
} else if ( key . toLowerCase ( ) === 'location' ) {
location = val
}
headers [ kHeadersList ] . append ( key , val )
}
}
this . body = new Readable ( { read : resume } )
const decoders = [ ]
const willFollow = request . redirect === 'follow' &&
location &&
redirectStatusSet . has ( status )
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
if ( request . method !== 'HEAD' && request . method !== 'CONNECT' && ! nullBodyStatus . includes ( status ) && ! willFollow ) {
for ( const coding of codings ) {
// https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2
if ( coding === 'x-gzip' || coding === 'gzip' ) {
decoders . push ( zlib . createGunzip ( {
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
flush : zlib . constants . Z _SYNC _FLUSH ,
finishFlush : zlib . constants . Z _SYNC _FLUSH
} ) )
} else if ( coding === 'deflate' ) {
decoders . push ( zlib . createInflate ( ) )
} else if ( coding === 'br' ) {
decoders . push ( zlib . createBrotliDecompress ( ) )
} else {
decoders . length = 0
break
}
}
}
resolve ( {
status ,
statusText ,
headersList : headers [ kHeadersList ] ,
body : decoders . length
? pipeline ( this . body , ... decoders , ( ) => { } )
: this . body . on ( 'error' , ( ) => { } )
} )
return true
} ,
onData ( chunk ) {
if ( fetchParams . controller . dump ) {
return
}
// 1. If one or more bytes have been transmitted from response’ s
// message body, then:
// 1. Let bytes be the transmitted bytes.
const bytes = chunk
// 2. Let codings be the result of extracting header list values
// given `Content-Encoding` and response’ s header list.
// See pullAlgorithm.
// 3. Increase timingInfo’ s encoded body size by bytes’ s length.
timingInfo . encodedBodySize += bytes . byteLength
// 4. See pullAlgorithm...
return this . body . push ( bytes )
} ,
onComplete ( ) {
if ( this . abort ) {
fetchParams . controller . off ( 'terminated' , this . abort )
}
fetchParams . controller . ended = true
this . body . push ( null )
} ,
onError ( error ) {
if ( this . abort ) {
fetchParams . controller . off ( 'terminated' , this . abort )
}
this . body ? . destroy ( error )
fetchParams . controller . terminate ( error )
reject ( error )
} ,
onUpgrade ( status , headersList , socket ) {
if ( status !== 101 ) {
return
}
const headers = new Headers ( )
for ( let n = 0 ; n < headersList . length ; n += 2 ) {
const key = headersList [ n + 0 ] . toString ( 'latin1' )
const val = headersList [ n + 1 ] . toString ( 'latin1' )
headers [ kHeadersList ] . append ( key , val )
}
resolve ( {
status ,
statusText : STATUS _CODES [ status ] ,
headersList : headers [ kHeadersList ] ,
socket
} )
return true
}
}
) )
}
}
module . exports = {
fetch ,
Fetch ,
fetching ,
finalizeAndReportTiming
}
/***/ } ) ,
/***/ 8359 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
/* globals AbortController */
const { extractBody , mixinBody , cloneBody } = _ _nccwpck _require _ _ ( 9990 )
const { Headers , fill : fillHeaders , HeadersList } = _ _nccwpck _require _ _ ( 554 )
const { FinalizationRegistry } = _ _nccwpck _require _ _ ( 6436 ) ( )
const util = _ _nccwpck _require _ _ ( 3983 )
const {
isValidHTTPToken ,
sameOrigin ,
normalizeMethod ,
makePolicyContainer ,
normalizeMethodRecord
} = _ _nccwpck _require _ _ ( 2538 )
const {
forbiddenMethodsSet ,
corsSafeListedMethodsSet ,
referrerPolicy ,
requestRedirect ,
requestMode ,
requestCredentials ,
requestCache ,
requestDuplex
} = _ _nccwpck _require _ _ ( 1037 )
const { kEnumerableProperty } = util
const { kHeaders , kSignal , kState , kGuard , kRealm } = _ _nccwpck _require _ _ ( 5861 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { getGlobalOrigin } = _ _nccwpck _require _ _ ( 1246 )
const { URLSerializer } = _ _nccwpck _require _ _ ( 685 )
const { kHeadersList , kConstruct } = _ _nccwpck _require _ _ ( 2785 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { getMaxListeners , setMaxListeners , getEventListeners , defaultMaxListeners } = _ _nccwpck _require _ _ ( 2361 )
let TransformStream = globalThis . TransformStream
const kAbortController = Symbol ( 'abortController' )
const requestFinalizer = new FinalizationRegistry ( ( { signal , abort } ) => {
signal . removeEventListener ( 'abort' , abort )
} )
// https://fetch.spec.whatwg.org/#request-class
class Request {
// https://fetch.spec.whatwg.org/#dom-request
constructor ( input , init = { } ) {
if ( input === kConstruct ) {
return
}
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Request constructor' } )
input = webidl . converters . RequestInfo ( input )
init = webidl . converters . RequestInit ( init )
// https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object
this [ kRealm ] = {
settingsObject : {
baseUrl : getGlobalOrigin ( ) ,
get origin ( ) {
return this . baseUrl ? . origin
} ,
policyContainer : makePolicyContainer ( )
}
}
// 1. Let request be null.
let request = null
// 2. Let fallbackMode be null.
let fallbackMode = null
// 3. Let baseURL be this’ s relevant settings object’ s API base URL.
const baseUrl = this [ kRealm ] . settingsObject . baseUrl
// 4. Let signal be null.
let signal = null
// 5. If input is a string, then:
if ( typeof input === 'string' ) {
// 1. Let parsedURL be the result of parsing input with baseURL.
// 2. If parsedURL is failure, then throw a TypeError.
let parsedURL
try {
parsedURL = new URL ( input , baseUrl )
} catch ( err ) {
throw new TypeError ( 'Failed to parse URL from ' + input , { cause : err } )
}
// 3. If parsedURL includes credentials, then throw a TypeError.
if ( parsedURL . username || parsedURL . password ) {
throw new TypeError (
'Request cannot be constructed from a URL that includes credentials: ' +
input
)
}
// 4. Set request to a new request whose URL is parsedURL.
request = makeRequest ( { urlList : [ parsedURL ] } )
// 5. Set fallbackMode to "cors".
fallbackMode = 'cors'
} else {
// 6. Otherwise:
// 7. Assert: input is a Request object.
assert ( input instanceof Request )
// 8. Set request to input’ s request.
request = input [ kState ]
// 9. Set signal to input’ s signal.
signal = input [ kSignal ]
}
// 7. Let origin be this’ s relevant settings object’ s origin.
const origin = this [ kRealm ] . settingsObject . origin
// 8. Let window be "client".
let window = 'client'
// 9. If request’ s window is an environment settings object and its origin
// is same origin with origin, then set window to request’ s window.
if (
request . window ? . constructor ? . name === 'EnvironmentSettingsObject' &&
sameOrigin ( request . window , origin )
) {
window = request . window
}
// 10. If init["window"] exists and is non-null, then throw a TypeError.
if ( init . window != null ) {
throw new TypeError ( ` 'window' option ' ${ window } ' must be null ` )
}
// 11. If init["window"] exists, then set window to "no-window".
if ( 'window' in init ) {
window = 'no-window'
}
// 12. Set request to a new request with the following properties:
request = makeRequest ( {
// URL request’ s URL.
// undici implementation note: this is set as the first item in request's urlList in makeRequest
// method request’ s method.
method : request . method ,
// header list A copy of request’ s header list.
// undici implementation note: headersList is cloned in makeRequest
headersList : request . headersList ,
// unsafe-request flag Set.
unsafeRequest : request . unsafeRequest ,
// client This’ s relevant settings object.
client : this [ kRealm ] . settingsObject ,
// window window.
window ,
// priority request’ s priority.
priority : request . priority ,
// origin request’ s origin. The propagation of the origin is only significant for navigation requests
// being handled by a service worker. In this scenario a request can have an origin that is different
// from the current client.
origin : request . origin ,
// referrer request’ s referrer.
referrer : request . referrer ,
// referrer policy request’ s referrer policy.
referrerPolicy : request . referrerPolicy ,
// mode request’ s mode.
mode : request . mode ,
// credentials mode request’ s credentials mode.
credentials : request . credentials ,
// cache mode request’ s cache mode.
cache : request . cache ,
// redirect mode request’ s redirect mode.
redirect : request . redirect ,
// integrity metadata request’ s integrity metadata.
integrity : request . integrity ,
// keepalive request’ s keepalive.
keepalive : request . keepalive ,
// reload-navigation flag request’ s reload-navigation flag.
reloadNavigation : request . reloadNavigation ,
// history-navigation flag request’ s history-navigation flag.
historyNavigation : request . historyNavigation ,
// URL list A clone of request’ s URL list.
urlList : [ ... request . urlList ]
} )
const initHasKey = Object . keys ( init ) . length !== 0
// 13. If init is not empty, then:
if ( initHasKey ) {
// 1. If request’ s mode is "navigate", then set it to "same-origin".
if ( request . mode === 'navigate' ) {
request . mode = 'same-origin'
}
// 2. Unset request’ s reload-navigation flag.
request . reloadNavigation = false
// 3. Unset request’ s history-navigation flag.
request . historyNavigation = false
// 4. Set request’ s origin to "client".
request . origin = 'client'
// 5. Set request’ s referrer to "client"
request . referrer = 'client'
// 6. Set request’ s referrer policy to the empty string.
request . referrerPolicy = ''
// 7. Set request’ s URL to request’ s current URL.
request . url = request . urlList [ request . urlList . length - 1 ]
// 8. Set request’ s URL list to « request’ s URL ».
request . urlList = [ request . url ]
}
// 14. If init["referrer"] exists, then:
if ( init . referrer !== undefined ) {
// 1. Let referrer be init["referrer"].
const referrer = init . referrer
// 2. If referrer is the empty string, then set request’ s referrer to "no-referrer".
if ( referrer === '' ) {
request . referrer = 'no-referrer'
} else {
// 1. Let parsedReferrer be the result of parsing referrer with
// baseURL.
// 2. If parsedReferrer is failure, then throw a TypeError.
let parsedReferrer
try {
parsedReferrer = new URL ( referrer , baseUrl )
} catch ( err ) {
throw new TypeError ( ` Referrer " ${ referrer } " is not a valid URL. ` , { cause : err } )
}
// 3. If one of the following is true
// - parsedReferrer’ s scheme is "about" and path is the string "client"
// - parsedReferrer’ s origin is not same origin with origin
// then set request’ s referrer to "client".
if (
( parsedReferrer . protocol === 'about:' && parsedReferrer . hostname === 'client' ) ||
( origin && ! sameOrigin ( parsedReferrer , this [ kRealm ] . settingsObject . baseUrl ) )
) {
request . referrer = 'client'
} else {
// 4. Otherwise, set request’ s referrer to parsedReferrer.
request . referrer = parsedReferrer
}
}
}
// 15. If init["referrerPolicy"] exists, then set request’ s referrer policy
// to it.
if ( init . referrerPolicy !== undefined ) {
request . referrerPolicy = init . referrerPolicy
}
// 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise.
let mode
if ( init . mode !== undefined ) {
mode = init . mode
} else {
mode = fallbackMode
}
// 17. If mode is "navigate", then throw a TypeError.
if ( mode === 'navigate' ) {
throw webidl . errors . exception ( {
header : 'Request constructor' ,
message : 'invalid request mode navigate.'
} )
}
// 18. If mode is non-null, set request’ s mode to mode.
if ( mode != null ) {
request . mode = mode
}
// 19. If init["credentials"] exists, then set request’ s credentials mode
// to it.
if ( init . credentials !== undefined ) {
request . credentials = init . credentials
}
// 18. If init["cache"] exists, then set request’ s cache mode to it.
if ( init . cache !== undefined ) {
request . cache = init . cache
}
// 21. If request’ s cache mode is "only-if-cached" and request’ s mode is
// not "same-origin", then throw a TypeError.
if ( request . cache === 'only-if-cached' && request . mode !== 'same-origin' ) {
throw new TypeError (
"'only-if-cached' can be set only with 'same-origin' mode"
)
}
// 22. If init["redirect"] exists, then set request’ s redirect mode to it.
if ( init . redirect !== undefined ) {
request . redirect = init . redirect
}
// 23. If init["integrity"] exists, then set request’ s integrity metadata to it.
if ( init . integrity != null ) {
request . integrity = String ( init . integrity )
}
// 24. If init["keepalive"] exists, then set request’ s keepalive to it.
if ( init . keepalive !== undefined ) {
request . keepalive = Boolean ( init . keepalive )
}
// 25. If init["method"] exists, then:
if ( init . method !== undefined ) {
// 1. Let method be init["method"].
let method = init . method
// 2. If method is not a method or method is a forbidden method, then
// throw a TypeError.
if ( ! isValidHTTPToken ( method ) ) {
throw new TypeError ( ` ' ${ method } ' is not a valid HTTP method. ` )
}
if ( forbiddenMethodsSet . has ( method . toUpperCase ( ) ) ) {
throw new TypeError ( ` ' ${ method } ' HTTP method is unsupported. ` )
}
// 3. Normalize method.
method = normalizeMethodRecord [ method ] ? ? normalizeMethod ( method )
// 4. Set request’ s method to method.
request . method = method
}
// 26. If init["signal"] exists, then set signal to it.
if ( init . signal !== undefined ) {
signal = init . signal
}
// 27. Set this’ s request to request.
this [ kState ] = request
// 28. Set this’ s signal to a new AbortSignal object with this’ s relevant
// Realm.
// TODO: could this be simplified with AbortSignal.any
// (https://dom.spec.whatwg.org/#dom-abortsignal-any)
const ac = new AbortController ( )
this [ kSignal ] = ac . signal
this [ kSignal ] [ kRealm ] = this [ kRealm ]
// 29. If signal is not null, then make this’ s signal follow signal.
if ( signal != null ) {
if (
! signal ||
typeof signal . aborted !== 'boolean' ||
typeof signal . addEventListener !== 'function'
) {
throw new TypeError (
"Failed to construct 'Request': member signal is not of type AbortSignal."
)
}
if ( signal . aborted ) {
ac . abort ( signal . reason )
} else {
// Keep a strong ref to ac while request object
// is alive. This is needed to prevent AbortController
// from being prematurely garbage collected.
// See, https://github.com/nodejs/undici/issues/1926.
this [ kAbortController ] = ac
const acRef = new WeakRef ( ac )
const abort = function ( ) {
const ac = acRef . deref ( )
if ( ac !== undefined ) {
ac . abort ( this . reason )
}
}
// Third-party AbortControllers may not work with these.
// See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619.
try {
// If the max amount of listeners is equal to the default, increase it
// This is only available in node >= v19.9.0
if ( typeof getMaxListeners === 'function' && getMaxListeners ( signal ) === defaultMaxListeners ) {
setMaxListeners ( 100 , signal )
} else if ( getEventListeners ( signal , 'abort' ) . length >= defaultMaxListeners ) {
setMaxListeners ( 100 , signal )
}
} catch { }
util . addAbortListener ( signal , abort )
requestFinalizer . register ( ac , { signal , abort } )
}
}
// 30. Set this’ s headers to a new Headers object with this’ s relevant
// Realm, whose header list is request’ s header list and guard is
// "request".
this [ kHeaders ] = new Headers ( kConstruct )
this [ kHeaders ] [ kHeadersList ] = request . headersList
this [ kHeaders ] [ kGuard ] = 'request'
this [ kHeaders ] [ kRealm ] = this [ kRealm ]
// 31. If this’ s request’ s mode is "no-cors", then:
if ( mode === 'no-cors' ) {
// 1. If this’ s request’ s method is not a CORS-safelisted method,
// then throw a TypeError.
if ( ! corsSafeListedMethodsSet . has ( request . method ) ) {
throw new TypeError (
` ' ${ request . method } is unsupported in no-cors mode. `
)
}
// 2. Set this’ s headers’ s guard to "request-no-cors".
this [ kHeaders ] [ kGuard ] = 'request-no-cors'
}
// 32. If init is not empty, then:
if ( initHasKey ) {
/** @type {HeadersList} */
const headersList = this [ kHeaders ] [ kHeadersList ]
// 1. Let headers be a copy of this’ s headers and its associated header
// list.
// 2. If init["headers"] exists, then set headers to init["headers"].
const headers = init . headers !== undefined ? init . headers : new HeadersList ( headersList )
// 3. Empty this’ s headers’ s header list.
headersList . clear ( )
// 4. If headers is a Headers object, then for each header in its header
// list, append header’ s name/header’ s value to this’ s headers.
if ( headers instanceof HeadersList ) {
for ( const [ key , val ] of headers ) {
headersList . append ( key , val )
}
// Note: Copy the `set-cookie` meta-data.
headersList . cookies = headers . cookies
} else {
// 5. Otherwise, fill this’ s headers with headers.
fillHeaders ( this [ kHeaders ] , headers )
}
}
// 33. Let inputBody be input’ s request’ s body if input is a Request
// object; otherwise null.
const inputBody = input instanceof Request ? input [ kState ] . body : null
// 34. If either init["body"] exists and is non-null or inputBody is
// non-null, and request’ s method is `GET` or `HEAD`, then throw a
// TypeError.
if (
( init . body != null || inputBody != null ) &&
( request . method === 'GET' || request . method === 'HEAD' )
) {
throw new TypeError ( 'Request with GET/HEAD method cannot have body.' )
}
// 35. Let initBody be null.
let initBody = null
// 36. If init["body"] exists and is non-null, then:
if ( init . body != null ) {
// 1. Let Content-Type be null.
// 2. Set initBody and Content-Type to the result of extracting
// init["body"], with keepalive set to request’ s keepalive.
const [ extractedBody , contentType ] = extractBody (
init . body ,
request . keepalive
)
initBody = extractedBody
// 3, If Content-Type is non-null and this’ s headers’ s header list does
// not contain `Content-Type`, then append `Content-Type`/Content-Type to
// this’ s headers.
if ( contentType && ! this [ kHeaders ] [ kHeadersList ] . contains ( 'content-type' ) ) {
this [ kHeaders ] . append ( 'content-type' , contentType )
}
}
// 37. Let inputOrInitBody be initBody if it is non-null; otherwise
// inputBody.
const inputOrInitBody = initBody ? ? inputBody
// 38. If inputOrInitBody is non-null and inputOrInitBody’ s source is
// null, then:
if ( inputOrInitBody != null && inputOrInitBody . source == null ) {
// 1. If initBody is non-null and init["duplex"] does not exist,
// then throw a TypeError.
if ( initBody != null && init . duplex == null ) {
throw new TypeError ( 'RequestInit: duplex option is required when sending a body.' )
}
// 2. If this’ s request’ s mode is neither "same-origin" nor "cors",
// then throw a TypeError.
if ( request . mode !== 'same-origin' && request . mode !== 'cors' ) {
throw new TypeError (
'If request is made from ReadableStream, mode should be "same-origin" or "cors"'
)
}
// 3. Set this’ s request’ s use-CORS-preflight flag.
request . useCORSPreflightFlag = true
}
// 39. Let finalBody be inputOrInitBody.
let finalBody = inputOrInitBody
// 40. If initBody is null and inputBody is non-null, then:
if ( initBody == null && inputBody != null ) {
// 1. If input is unusable, then throw a TypeError.
if ( util . isDisturbed ( inputBody . stream ) || inputBody . stream . locked ) {
throw new TypeError (
'Cannot construct a Request with a Request object that has already been used.'
)
}
// 2. Set finalBody to the result of creating a proxy for inputBody.
if ( ! TransformStream ) {
TransformStream = ( _ _nccwpck _require _ _ ( 5356 ) . TransformStream )
}
// https://streams.spec.whatwg.org/#readablestream-create-a-proxy
const identityTransform = new TransformStream ( )
inputBody . stream . pipeThrough ( identityTransform )
finalBody = {
source : inputBody . source ,
length : inputBody . length ,
stream : identityTransform . readable
}
}
// 41. Set this’ s request’ s body to finalBody.
this [ kState ] . body = finalBody
}
// Returns request’ s HTTP method, which is "GET" by default.
get method ( ) {
webidl . brandCheck ( this , Request )
// The method getter steps are to return this’ s request’ s method.
return this [ kState ] . method
}
// Returns the URL of request as a string.
get url ( ) {
webidl . brandCheck ( this , Request )
// The url getter steps are to return this’ s request’ s URL, serialized.
return URLSerializer ( this [ kState ] . url )
}
// Returns a Headers object consisting of the headers associated with request.
// Note that headers added in the network layer by the user agent will not
// be accounted for in this object, e.g., the "Host" header.
get headers ( ) {
webidl . brandCheck ( this , Request )
// The headers getter steps are to return this’ s headers.
return this [ kHeaders ]
}
// Returns the kind of resource requested by request, e.g., "document"
// or "script".
get destination ( ) {
webidl . brandCheck ( this , Request )
// The destination getter are to return this’ s request’ s destination.
return this [ kState ] . destination
}
// Returns the referrer of request. Its value can be a same-origin URL if
// explicitly set in init, the empty string to indicate no referrer, and
// "about:client" when defaulting to the global’ s default. This is used
// during fetching to determine the value of the `Referer` header of the
// request being made.
get referrer ( ) {
webidl . brandCheck ( this , Request )
// 1. If this’ s request’ s referrer is "no-referrer", then return the
// empty string.
if ( this [ kState ] . referrer === 'no-referrer' ) {
return ''
}
// 2. If this’ s request’ s referrer is "client", then return
// "about:client".
if ( this [ kState ] . referrer === 'client' ) {
return 'about:client'
}
// Return this’ s request’ s referrer, serialized.
return this [ kState ] . referrer . toString ( )
}
// Returns the referrer policy associated with request.
// This is used during fetching to compute the value of the request’ s
// referrer.
get referrerPolicy ( ) {
webidl . brandCheck ( this , Request )
// The referrerPolicy getter steps are to return this’ s request’ s referrer policy.
return this [ kState ] . referrerPolicy
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Returns the mode associated with request, which is a string indicating
// whether the request will use CORS, or will be restricted to same-origin
// URLs.
get mode ( ) {
webidl . brandCheck ( this , Request )
// The mode getter steps are to return this’ s request’ s mode.
return this [ kState ] . mode
}
// Returns the credentials mode associated with request,
// which is a string indicating whether credentials will be sent with the
// request always, never, or only when sent to a same-origin URL.
get credentials ( ) {
// The credentials getter steps are to return this’ s request’ s credentials mode.
return this [ kState ] . credentials
}
// Returns the cache mode associated with request,
// which is a string indicating how the request will
// interact with the browser’ s cache when fetching.
get cache ( ) {
webidl . brandCheck ( this , Request )
// The cache getter steps are to return this’ s request’ s cache mode.
return this [ kState ] . cache
}
// Returns the redirect mode associated with request,
// which is a string indicating how redirects for the
// request will be handled during fetching. A request
// will follow redirects by default.
get redirect ( ) {
webidl . brandCheck ( this , Request )
// The redirect getter steps are to return this’ s request’ s redirect mode.
return this [ kState ] . redirect
}
// Returns request’ s subresource integrity metadata, which is a
// cryptographic hash of the resource being fetched. Its value
// consists of multiple hashes separated by whitespace. [SRI]
get integrity ( ) {
webidl . brandCheck ( this , Request )
// The integrity getter steps are to return this’ s request’ s integrity
// metadata.
return this [ kState ] . integrity
}
// Returns a boolean indicating whether or not request can outlive the
// global in which it was created.
get keepalive ( ) {
webidl . brandCheck ( this , Request )
// The keepalive getter steps are to return this’ s request’ s keepalive.
return this [ kState ] . keepalive
}
// Returns a boolean indicating whether or not request is for a reload
// navigation.
get isReloadNavigation ( ) {
webidl . brandCheck ( this , Request )
// The isReloadNavigation getter steps are to return true if this’ s
// request’ s reload-navigation flag is set; otherwise false.
return this [ kState ] . reloadNavigation
}
// Returns a boolean indicating whether or not request is for a history
// navigation (a.k.a. back-foward navigation).
get isHistoryNavigation ( ) {
webidl . brandCheck ( this , Request )
// The isHistoryNavigation getter steps are to return true if this’ s request’ s
// history-navigation flag is set; otherwise false.
return this [ kState ] . historyNavigation
}
// Returns the signal associated with request, which is an AbortSignal
// object indicating whether or not request has been aborted, and its
// abort event handler.
get signal ( ) {
webidl . brandCheck ( this , Request )
// The signal getter steps are to return this’ s signal.
return this [ kSignal ]
}
get body ( ) {
webidl . brandCheck ( this , Request )
return this [ kState ] . body ? this [ kState ] . body . stream : null
}
get bodyUsed ( ) {
webidl . brandCheck ( this , Request )
return ! ! this [ kState ] . body && util . isDisturbed ( this [ kState ] . body . stream )
}
get duplex ( ) {
webidl . brandCheck ( this , Request )
return 'half'
}
// Returns a clone of request.
clone ( ) {
webidl . brandCheck ( this , Request )
// 1. If this is unusable, then throw a TypeError.
if ( this . bodyUsed || this . body ? . locked ) {
throw new TypeError ( 'unusable' )
}
// 2. Let clonedRequest be the result of cloning this’ s request.
const clonedRequest = cloneRequest ( this [ kState ] )
// 3. Let clonedRequestObject be the result of creating a Request object,
// given clonedRequest, this’ s headers’ s guard, and this’ s relevant Realm.
const clonedRequestObject = new Request ( kConstruct )
clonedRequestObject [ kState ] = clonedRequest
clonedRequestObject [ kRealm ] = this [ kRealm ]
clonedRequestObject [ kHeaders ] = new Headers ( kConstruct )
clonedRequestObject [ kHeaders ] [ kHeadersList ] = clonedRequest . headersList
clonedRequestObject [ kHeaders ] [ kGuard ] = this [ kHeaders ] [ kGuard ]
clonedRequestObject [ kHeaders ] [ kRealm ] = this [ kHeaders ] [ kRealm ]
// 4. Make clonedRequestObject’ s signal follow this’ s signal.
const ac = new AbortController ( )
if ( this . signal . aborted ) {
ac . abort ( this . signal . reason )
} else {
util . addAbortListener (
this . signal ,
( ) => {
ac . abort ( this . signal . reason )
}
)
}
clonedRequestObject [ kSignal ] = ac . signal
// 4. Return clonedRequestObject.
return clonedRequestObject
}
}
mixinBody ( Request )
function makeRequest ( init ) {
// https://fetch.spec.whatwg.org/#requests
const request = {
method : 'GET' ,
localURLsOnly : false ,
unsafeRequest : false ,
body : null ,
client : null ,
reservedClient : null ,
replacesClientId : '' ,
window : 'client' ,
keepalive : false ,
serviceWorkers : 'all' ,
initiator : '' ,
destination : '' ,
priority : null ,
origin : 'client' ,
policyContainer : 'client' ,
referrer : 'client' ,
referrerPolicy : '' ,
mode : 'no-cors' ,
useCORSPreflightFlag : false ,
credentials : 'same-origin' ,
useCredentials : false ,
cache : 'default' ,
redirect : 'follow' ,
integrity : '' ,
cryptoGraphicsNonceMetadata : '' ,
parserMetadata : '' ,
reloadNavigation : false ,
historyNavigation : false ,
userActivation : false ,
taintedOrigin : false ,
redirectCount : 0 ,
responseTainting : 'basic' ,
preventNoCacheCacheControlHeaderModification : false ,
done : false ,
timingAllowFailed : false ,
... init ,
headersList : init . headersList
? new HeadersList ( init . headersList )
: new HeadersList ( )
}
request . url = request . urlList [ 0 ]
return request
}
// https://fetch.spec.whatwg.org/#concept-request-clone
function cloneRequest ( request ) {
// To clone a request request, run these steps:
// 1. Let newRequest be a copy of request, except for its body.
const newRequest = makeRequest ( { ... request , body : null } )
// 2. If request’ s body is non-null, set newRequest’ s body to the
// result of cloning request’ s body.
if ( request . body != null ) {
newRequest . body = cloneBody ( request . body )
}
// 3. Return newRequest.
return newRequest
}
Object . defineProperties ( Request . prototype , {
method : kEnumerableProperty ,
url : kEnumerableProperty ,
headers : kEnumerableProperty ,
redirect : kEnumerableProperty ,
clone : kEnumerableProperty ,
signal : kEnumerableProperty ,
duplex : kEnumerableProperty ,
destination : kEnumerableProperty ,
body : kEnumerableProperty ,
bodyUsed : kEnumerableProperty ,
isHistoryNavigation : kEnumerableProperty ,
isReloadNavigation : kEnumerableProperty ,
keepalive : kEnumerableProperty ,
integrity : kEnumerableProperty ,
cache : kEnumerableProperty ,
credentials : kEnumerableProperty ,
attribute : kEnumerableProperty ,
referrerPolicy : kEnumerableProperty ,
referrer : kEnumerableProperty ,
mode : kEnumerableProperty ,
[ Symbol . toStringTag ] : {
value : 'Request' ,
configurable : true
}
} )
webidl . converters . Request = webidl . interfaceConverter (
Request
)
// https://fetch.spec.whatwg.org/#requestinfo
webidl . converters . RequestInfo = function ( V ) {
if ( typeof V === 'string' ) {
return webidl . converters . USVString ( V )
}
if ( V instanceof Request ) {
return webidl . converters . Request ( V )
}
return webidl . converters . USVString ( V )
}
webidl . converters . AbortSignal = webidl . interfaceConverter (
AbortSignal
)
// https://fetch.spec.whatwg.org/#requestinit
webidl . converters . RequestInit = webidl . dictionaryConverter ( [
{
key : 'method' ,
converter : webidl . converters . ByteString
} ,
{
key : 'headers' ,
converter : webidl . converters . HeadersInit
} ,
{
key : 'body' ,
converter : webidl . nullableConverter (
webidl . converters . BodyInit
)
} ,
{
key : 'referrer' ,
converter : webidl . converters . USVString
} ,
{
key : 'referrerPolicy' ,
converter : webidl . converters . DOMString ,
// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy
allowedValues : referrerPolicy
} ,
{
key : 'mode' ,
converter : webidl . converters . DOMString ,
// https://fetch.spec.whatwg.org/#concept-request-mode
allowedValues : requestMode
} ,
{
key : 'credentials' ,
converter : webidl . converters . DOMString ,
// https://fetch.spec.whatwg.org/#requestcredentials
allowedValues : requestCredentials
} ,
{
key : 'cache' ,
converter : webidl . converters . DOMString ,
// https://fetch.spec.whatwg.org/#requestcache
allowedValues : requestCache
} ,
{
key : 'redirect' ,
converter : webidl . converters . DOMString ,
// https://fetch.spec.whatwg.org/#requestredirect
allowedValues : requestRedirect
} ,
{
key : 'integrity' ,
converter : webidl . converters . DOMString
} ,
{
key : 'keepalive' ,
converter : webidl . converters . boolean
} ,
{
key : 'signal' ,
converter : webidl . nullableConverter (
( signal ) => webidl . converters . AbortSignal (
signal ,
{ strict : false }
)
)
} ,
{
key : 'window' ,
converter : webidl . converters . any
} ,
{
key : 'duplex' ,
converter : webidl . converters . DOMString ,
allowedValues : requestDuplex
}
] )
module . exports = { Request , makeRequest }
/***/ } ) ,
/***/ 7823 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { Headers , HeadersList , fill } = _ _nccwpck _require _ _ ( 554 )
const { extractBody , cloneBody , mixinBody } = _ _nccwpck _require _ _ ( 9990 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { kEnumerableProperty } = util
const {
isValidReasonPhrase ,
isCancelled ,
isAborted ,
isBlobLike ,
serializeJavascriptValueToJSONString ,
isErrorLike ,
isomorphicEncode
} = _ _nccwpck _require _ _ ( 2538 )
const {
redirectStatusSet ,
nullBodyStatus ,
DOMException
} = _ _nccwpck _require _ _ ( 1037 )
const { kState , kHeaders , kGuard , kRealm } = _ _nccwpck _require _ _ ( 5861 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { FormData } = _ _nccwpck _require _ _ ( 2015 )
const { getGlobalOrigin } = _ _nccwpck _require _ _ ( 1246 )
const { URLSerializer } = _ _nccwpck _require _ _ ( 685 )
const { kHeadersList , kConstruct } = _ _nccwpck _require _ _ ( 2785 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { types } = _ _nccwpck _require _ _ ( 3837 )
const ReadableStream = globalThis . ReadableStream || ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
const textEncoder = new TextEncoder ( 'utf-8' )
// https://fetch.spec.whatwg.org/#response-class
class Response {
// Creates network error Response.
static error ( ) {
// TODO
const relevantRealm = { settingsObject : { } }
// The static error() method steps are to return the result of creating a
// Response object, given a new network error, "immutable", and this’ s
// relevant Realm.
const responseObject = new Response ( )
responseObject [ kState ] = makeNetworkError ( )
responseObject [ kRealm ] = relevantRealm
responseObject [ kHeaders ] [ kHeadersList ] = responseObject [ kState ] . headersList
responseObject [ kHeaders ] [ kGuard ] = 'immutable'
responseObject [ kHeaders ] [ kRealm ] = relevantRealm
return responseObject
}
// https://fetch.spec.whatwg.org/#dom-response-json
static json ( data , init = { } ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Response.json' } )
if ( init !== null ) {
init = webidl . converters . ResponseInit ( init )
}
// 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data.
const bytes = textEncoder . encode (
serializeJavascriptValueToJSONString ( data )
)
// 2. Let body be the result of extracting bytes.
const body = extractBody ( bytes )
// 3. Let responseObject be the result of creating a Response object, given a new response,
// "response", and this’ s relevant Realm.
const relevantRealm = { settingsObject : { } }
const responseObject = new Response ( )
responseObject [ kRealm ] = relevantRealm
responseObject [ kHeaders ] [ kGuard ] = 'response'
responseObject [ kHeaders ] [ kRealm ] = relevantRealm
// 4. Perform initialize a response given responseObject, init, and (body, "application/json").
initializeResponse ( responseObject , init , { body : body [ 0 ] , type : 'application/json' } )
// 5. Return responseObject.
return responseObject
}
// Creates a redirect Response that redirects to url with status status.
static redirect ( url , status = 302 ) {
const relevantRealm = { settingsObject : { } }
webidl . argumentLengthCheck ( arguments , 1 , { header : 'Response.redirect' } )
url = webidl . converters . USVString ( url )
status = webidl . converters [ 'unsigned short' ] ( status )
// 1. Let parsedURL be the result of parsing url with current settings
// object’ s API base URL.
// 2. If parsedURL is failure, then throw a TypeError.
// TODO: base-URL?
let parsedURL
try {
parsedURL = new URL ( url , getGlobalOrigin ( ) )
} catch ( err ) {
throw Object . assign ( new TypeError ( 'Failed to parse URL from ' + url ) , {
cause : err
} )
}
// 3. If status is not a redirect status, then throw a RangeError.
if ( ! redirectStatusSet . has ( status ) ) {
throw new RangeError ( 'Invalid status code ' + status )
}
// 4. Let responseObject be the result of creating a Response object,
// given a new response, "immutable", and this’ s relevant Realm.
const responseObject = new Response ( )
responseObject [ kRealm ] = relevantRealm
responseObject [ kHeaders ] [ kGuard ] = 'immutable'
responseObject [ kHeaders ] [ kRealm ] = relevantRealm
// 5. Set responseObject’ s response’ s status to status.
responseObject [ kState ] . status = status
// 6. Let value be parsedURL, serialized and isomorphic encoded.
const value = isomorphicEncode ( URLSerializer ( parsedURL ) )
// 7. Append `Location`/value to responseObject’ s response’ s header list.
responseObject [ kState ] . headersList . append ( 'location' , value )
// 8. Return responseObject.
return responseObject
}
// https://fetch.spec.whatwg.org/#dom-response
constructor ( body = null , init = { } ) {
if ( body !== null ) {
body = webidl . converters . BodyInit ( body )
}
init = webidl . converters . ResponseInit ( init )
// TODO
this [ kRealm ] = { settingsObject : { } }
// 1. Set this’ s response to a new response.
this [ kState ] = makeResponse ( { } )
// 2. Set this’ s headers to a new Headers object with this’ s relevant
// Realm, whose header list is this’ s response’ s header list and guard
// is "response".
this [ kHeaders ] = new Headers ( kConstruct )
this [ kHeaders ] [ kGuard ] = 'response'
this [ kHeaders ] [ kHeadersList ] = this [ kState ] . headersList
this [ kHeaders ] [ kRealm ] = this [ kRealm ]
// 3. Let bodyWithType be null.
let bodyWithType = null
// 4. If body is non-null, then set bodyWithType to the result of extracting body.
if ( body != null ) {
const [ extractedBody , type ] = extractBody ( body )
bodyWithType = { body : extractedBody , type }
}
// 5. Perform initialize a response given this, init, and bodyWithType.
initializeResponse ( this , init , bodyWithType )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Returns response’ s type, e.g., "cors".
get type ( ) {
webidl . brandCheck ( this , Response )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// The type getter steps are to return this’ s response’ s type.
return this [ kState ] . type
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
// Returns response’ s URL, if it has one; otherwise the empty string.
get url ( ) {
webidl . brandCheck ( this , Response )
const urlList = this [ kState ] . urlList
// The url getter steps are to return the empty string if this’ s
// response’ s URL is null; otherwise this’ s response’ s URL,
// serialized with exclude fragment set to true.
const url = urlList [ urlList . length - 1 ] ? ? null
if ( url === null ) {
return ''
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return URLSerializer ( url , true )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Returns whether response was obtained through a redirect.
get redirected ( ) {
webidl . brandCheck ( this , Response )
// The redirected getter steps are to return true if this’ s response’ s URL
// list has more than one item; otherwise false.
return this [ kState ] . urlList . length > 1
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Returns response’ s status.
get status ( ) {
webidl . brandCheck ( this , Response )
// The status getter steps are to return this’ s response’ s status.
return this [ kState ] . status
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Returns whether response’ s status is an ok status.
get ok ( ) {
webidl . brandCheck ( this , Response )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// The ok getter steps are to return true if this’ s response’ s status is an
// ok status; otherwise false.
return this [ kState ] . status >= 200 && this [ kState ] . status <= 299
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Returns response’ s status message.
get statusText ( ) {
webidl . brandCheck ( this , Response )
// The statusText getter steps are to return this’ s response’ s status
// message.
return this [ kState ] . statusText
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Returns response’ s headers as Headers.
get headers ( ) {
webidl . brandCheck ( this , Response )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// The headers getter steps are to return this’ s headers.
return this [ kHeaders ]
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
get body ( ) {
webidl . brandCheck ( this , Response )
return this [ kState ] . body ? this [ kState ] . body . stream : null
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
get bodyUsed ( ) {
webidl . brandCheck ( this , Response )
return ! ! this [ kState ] . body && util . isDisturbed ( this [ kState ] . body . stream )
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
// Returns a clone of response.
clone ( ) {
webidl . brandCheck ( this , Response )
// 1. If this is unusable, then throw a TypeError.
if ( this . bodyUsed || ( this . body && this . body . locked ) ) {
throw webidl . errors . exception ( {
header : 'Response.clone' ,
message : 'Body has already been consumed.'
} )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Let clonedResponse be the result of cloning this’ s response.
const clonedResponse = cloneResponse ( this [ kState ] )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 3. Return the result of creating a Response object, given
// clonedResponse, this’ s headers’ s guard, and this’ s relevant Realm.
const clonedResponseObject = new Response ( )
clonedResponseObject [ kState ] = clonedResponse
clonedResponseObject [ kRealm ] = this [ kRealm ]
clonedResponseObject [ kHeaders ] [ kHeadersList ] = clonedResponse . headersList
clonedResponseObject [ kHeaders ] [ kGuard ] = this [ kHeaders ] [ kGuard ]
clonedResponseObject [ kHeaders ] [ kRealm ] = this [ kHeaders ] [ kRealm ]
return clonedResponseObject
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
mixinBody ( Response )
Object . defineProperties ( Response . prototype , {
type : kEnumerableProperty ,
url : kEnumerableProperty ,
status : kEnumerableProperty ,
ok : kEnumerableProperty ,
redirected : kEnumerableProperty ,
statusText : kEnumerableProperty ,
headers : kEnumerableProperty ,
clone : kEnumerableProperty ,
body : kEnumerableProperty ,
bodyUsed : kEnumerableProperty ,
[ Symbol . toStringTag ] : {
value : 'Response' ,
configurable : true
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
} )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
Object . defineProperties ( Response , {
json : kEnumerableProperty ,
redirect : kEnumerableProperty ,
error : kEnumerableProperty
} )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// https://fetch.spec.whatwg.org/#concept-response-clone
function cloneResponse ( response ) {
// To clone a response response, run these steps:
// 1. If response is a filtered response, then return a new identical
// filtered response whose internal response is a clone of response’ s
// internal response.
if ( response . internalResponse ) {
return filterResponse (
cloneResponse ( response . internalResponse ) ,
response . type
)
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Let newResponse be a copy of response, except for its body.
const newResponse = makeResponse ( { ... response , body : null } )
// 3. If response’ s body is non-null, then set newResponse’ s body to the
// result of cloning response’ s body.
if ( response . body != null ) {
newResponse . body = cloneBody ( response . body )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 4. Return newResponse.
return newResponse
2023-03-09 17:42:29 +01:00
}
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
function makeResponse ( init ) {
return {
aborted : false ,
rangeRequested : false ,
timingAllowPassed : false ,
requestIncludesCredentials : false ,
type : 'default' ,
status : 200 ,
timingInfo : null ,
cacheState : '' ,
statusText : '' ,
... init ,
headersList : init . headersList
? new HeadersList ( init . headersList )
: new HeadersList ( ) ,
urlList : init . urlList ? [ ... init . urlList ] : [ ]
2023-03-09 17:42:29 +01:00
}
}
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
function makeNetworkError ( reason ) {
const isError = isErrorLike ( reason )
return makeResponse ( {
type : 'error' ,
status : 0 ,
error : isError
? reason
: new Error ( reason ? String ( reason ) : reason ) ,
aborted : reason && reason . name === 'AbortError'
} )
2023-03-09 17:42:29 +01:00
}
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
function makeFilteredResponse ( response , state ) {
state = {
internalResponse : response ,
... state
}
return new Proxy ( response , {
get ( target , p ) {
return p in state ? state [ p ] : target [ p ]
} ,
set ( target , p , value ) {
assert ( ! ( p in state ) )
target [ p ] = value
return true
}
} )
2023-03-09 17:42:29 +01:00
}
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
// https://fetch.spec.whatwg.org/#concept-filtered-response
function filterResponse ( response , type ) {
// Set response to the following filtered response with response as its
// internal response, depending on request’ s response tainting:
if ( type === 'basic' ) {
// A basic filtered response is a filtered response whose type is "basic"
// and header list excludes any headers in internal response’ s header list
// whose name is a forbidden response-header name.
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
// Note: undici does not implement forbidden response-header names
return makeFilteredResponse ( response , {
type : 'basic' ,
headersList : response . headersList
} )
} else if ( type === 'cors' ) {
// A CORS filtered response is a filtered response whose type is "cors"
// and header list excludes any headers in internal response’ s header
// list whose name is not a CORS-safelisted response-header name, given
// internal response’ s CORS-exposed header-name list.
// Note: undici does not implement CORS-safelisted response-header names
return makeFilteredResponse ( response , {
type : 'cors' ,
headersList : response . headersList
} )
} else if ( type === 'opaque' ) {
// An opaque filtered response is a filtered response whose type is
// "opaque", URL list is the empty list, status is 0, status message
// is the empty byte sequence, header list is empty, and body is null.
return makeFilteredResponse ( response , {
type : 'opaque' ,
urlList : Object . freeze ( [ ] ) ,
status : 0 ,
statusText : '' ,
body : null
} )
} else if ( type === 'opaqueredirect' ) {
// An opaque-redirect filtered response is a filtered response whose type
// is "opaqueredirect", status is 0, status message is the empty byte
// sequence, header list is empty, and body is null.
return makeFilteredResponse ( response , {
type : 'opaqueredirect' ,
status : 0 ,
statusText : '' ,
headersList : [ ] ,
body : null
} )
} else {
assert ( false )
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
// https://fetch.spec.whatwg.org/#appropriate-network-error
function makeAppropriateNetworkError ( fetchParams , err = null ) {
// 1. Assert: fetchParams is canceled.
assert ( isCancelled ( fetchParams ) )
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
// 2. Return an aborted network error if fetchParams is aborted;
// otherwise return a network error.
return isAborted ( fetchParams )
? makeNetworkError ( Object . assign ( new DOMException ( 'The operation was aborted.' , 'AbortError' ) , { cause : err } ) )
: makeNetworkError ( Object . assign ( new DOMException ( 'Request was cancelled.' ) , { cause : err } ) )
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// https://whatpr.org/fetch/1392.html#initialize-a-response
function initializeResponse ( response , init , body ) {
// 1. If init["status"] is not in the range 200 to 599, inclusive, then
// throw a RangeError.
if ( init . status !== null && ( init . status < 200 || init . status > 599 ) ) {
throw new RangeError ( 'init["status"] must be in the range of 200 to 599, inclusive.' )
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. If init["statusText"] does not match the reason-phrase token production,
// then throw a TypeError.
if ( 'statusText' in init && init . statusText != null ) {
// See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2:
// reason-phrase = *( HTAB / SP / VCHAR / obs-text )
if ( ! isValidReasonPhrase ( String ( init . statusText ) ) ) {
throw new TypeError ( 'Invalid statusText' )
}
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 3. Set response’ s response’ s status to init["status"].
if ( 'status' in init && init . status != null ) {
response [ kState ] . status = init . status
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 4. Set response’ s response’ s status message to init["statusText"].
if ( 'statusText' in init && init . statusText != null ) {
response [ kState ] . statusText = init . statusText
}
// 5. If init["headers"] exists, then fill response’ s headers with init["headers"].
if ( 'headers' in init && init . headers != null ) {
fill ( response [ kHeaders ] , init . headers )
}
// 6. If body was given, then:
if ( body ) {
// 1. If response's status is a null body status, then throw a TypeError.
if ( nullBodyStatus . includes ( response . status ) ) {
throw webidl . errors . exception ( {
header : 'Response constructor' ,
message : 'Invalid response status code ' + response . status
} )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Set response's body to body's body.
response [ kState ] . body = body . body
// 3. If body's type is non-null and response's header list does not contain
// `Content-Type`, then append (`Content-Type`, body's type) to response's header list.
if ( body . type != null && ! response [ kState ] . headersList . contains ( 'Content-Type' ) ) {
response [ kState ] . headersList . append ( 'content-type' , body . type )
2023-03-09 17:42:29 +01:00
}
}
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
webidl . converters . ReadableStream = webidl . interfaceConverter (
ReadableStream
)
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
webidl . converters . FormData = webidl . interfaceConverter (
FormData
)
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
webidl . converters . URLSearchParams = webidl . interfaceConverter (
URLSearchParams
)
// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit
webidl . converters . XMLHttpRequestBodyInit = function ( V ) {
if ( typeof V === 'string' ) {
return webidl . converters . USVString ( V )
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
if ( isBlobLike ( V ) ) {
return webidl . converters . Blob ( V , { strict : false } )
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
if ( types . isArrayBuffer ( V ) || types . isTypedArray ( V ) || types . isDataView ( V ) ) {
return webidl . converters . BufferSource ( V )
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
if ( util . isFormDataLike ( V ) ) {
return webidl . converters . FormData ( V , { strict : false } )
}
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
if ( V instanceof URLSearchParams ) {
return webidl . converters . URLSearchParams ( V )
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
return webidl . converters . DOMString ( V )
}
// https://fetch.spec.whatwg.org/#bodyinit
webidl . converters . BodyInit = function ( V ) {
if ( V instanceof ReadableStream ) {
return webidl . converters . ReadableStream ( V )
2022-10-03 18:04:49 +01:00
}
2024-04-24 12:04:10 -04:00
// Note: the spec doesn't include async iterables,
// this is an undici extension.
if ( V ? . [ Symbol . asyncIterator ] ) {
return V
}
return webidl . converters . XMLHttpRequestBodyInit ( V )
}
webidl . converters . ResponseInit = webidl . dictionaryConverter ( [
{
key : 'status' ,
converter : webidl . converters [ 'unsigned short' ] ,
defaultValue : 200
} ,
{
key : 'statusText' ,
converter : webidl . converters . ByteString ,
defaultValue : ''
} ,
{
key : 'headers' ,
converter : webidl . converters . HeadersInit
}
] )
module . exports = {
makeNetworkError ,
makeResponse ,
makeAppropriateNetworkError ,
filterResponse ,
Response ,
cloneResponse
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
2019-12-03 10:28:59 -05:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 5861 :
/***/ ( ( module ) => {
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
module . exports = {
kUrl : Symbol ( 'url' ) ,
kHeaders : Symbol ( 'headers' ) ,
kSignal : Symbol ( 'signal' ) ,
kState : Symbol ( 'state' ) ,
kGuard : Symbol ( 'guard' ) ,
kRealm : Symbol ( 'realm' )
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
/***/ 2538 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { redirectStatusSet , referrerPolicySet : referrerPolicyTokens , badPortsSet } = _ _nccwpck _require _ _ ( 1037 )
const { getGlobalOrigin } = _ _nccwpck _require _ _ ( 1246 )
const { performance } = _ _nccwpck _require _ _ ( 4074 )
const { isBlobLike , toUSVString , ReadableStreamFrom } = _ _nccwpck _require _ _ ( 3983 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { isUint8Array } = _ _nccwpck _require _ _ ( 9830 )
let supportedHashes = [ ]
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
/** @type {import('crypto')|undefined} */
let crypto
try {
crypto = _ _nccwpck _require _ _ ( 6113 )
const possibleRelevantHashes = [ 'sha256' , 'sha384' , 'sha512' ]
supportedHashes = crypto . getHashes ( ) . filter ( ( hash ) => possibleRelevantHashes . includes ( hash ) )
/* c8 ignore next 3 */
} catch {
}
function responseURL ( response ) {
// https://fetch.spec.whatwg.org/#responses
// A response has an associated URL. It is a pointer to the last URL
// in response’ s URL list and null if response’ s URL list is empty.
const urlList = response . urlList
const length = urlList . length
return length === 0 ? null : urlList [ length - 1 ] . toString ( )
}
// https://fetch.spec.whatwg.org/#concept-response-location-url
function responseLocationURL ( response , requestFragment ) {
// 1. If response’ s status is not a redirect status, then return null.
if ( ! redirectStatusSet . has ( response . status ) ) {
return null
}
// 2. Let location be the result of extracting header list values given
// `Location` and response’ s header list.
let location = response . headersList . get ( 'location' )
// 3. If location is a header value, then set location to the result of
// parsing location with response’ s URL.
if ( location !== null && isValidHeaderValue ( location ) ) {
location = new URL ( location , responseURL ( response ) )
}
// 4. If location is a URL whose fragment is null, then set location’ s
// fragment to requestFragment.
if ( location && ! location . hash ) {
location . hash = requestFragment
}
// 5. Return location.
return location
}
/** @returns {URL} */
function requestCurrentURL ( request ) {
return request . urlList [ request . urlList . length - 1 ]
}
function requestBadPort ( request ) {
// 1. Let url be request’ s current URL.
const url = requestCurrentURL ( request )
// 2. If url’ s scheme is an HTTP(S) scheme and url’ s port is a bad port,
// then return blocked.
if ( urlIsHttpHttpsScheme ( url ) && badPortsSet . has ( url . port ) ) {
return 'blocked'
}
// 3. Return allowed.
return 'allowed'
}
function isErrorLike ( object ) {
return object instanceof Error || (
object ? . constructor ? . name === 'Error' ||
object ? . constructor ? . name === 'DOMException'
)
}
// Check whether |statusText| is a ByteString and
// matches the Reason-Phrase token production.
// RFC 2616: https://tools.ietf.org/html/rfc2616
// RFC 7230: https://tools.ietf.org/html/rfc7230
// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )"
// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116
function isValidReasonPhrase ( statusText ) {
for ( let i = 0 ; i < statusText . length ; ++ i ) {
const c = statusText . charCodeAt ( i )
if (
! (
(
c === 0x09 || // HTAB
( c >= 0x20 && c <= 0x7e ) || // SP / VCHAR
( c >= 0x80 && c <= 0xff )
) // obs-text
)
) {
return false
}
}
return true
2019-12-03 10:28:59 -05:00
}
2023-03-09 17:42:29 +01:00
/ * *
2024-04-24 12:04:10 -04:00
* @ see https : //tools.ietf.org/html/rfc7230#section-3.2.6
* @ param { number } c
2023-03-09 17:42:29 +01:00
* /
2024-04-24 12:04:10 -04:00
function isTokenCharCode ( c ) {
switch ( c ) {
case 0x22 :
case 0x28 :
case 0x29 :
case 0x2c :
case 0x2f :
case 0x3a :
case 0x3b :
case 0x3c :
case 0x3d :
case 0x3e :
case 0x3f :
case 0x40 :
case 0x5b :
case 0x5c :
case 0x5d :
case 0x7b :
case 0x7d :
// DQUOTE and "(),/:;<=>?@[\]{}"
return false
default :
// VCHAR %x21-7E
return c >= 0x21 && c <= 0x7e
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* @ param { string } characters
* /
function isValidHTTPToken ( characters ) {
if ( characters . length === 0 ) {
return false
}
for ( let i = 0 ; i < characters . length ; ++ i ) {
if ( ! isTokenCharCode ( characters . charCodeAt ( i ) ) ) {
return false
}
}
return true
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //fetch.spec.whatwg.org/#header-name
* @ param { string } potentialValue
* /
function isValidHeaderName ( potentialValue ) {
return isValidHTTPToken ( potentialValue )
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //fetch.spec.whatwg.org/#header-value
* @ param { string } potentialValue
* /
function isValidHeaderValue ( potentialValue ) {
// - Has no leading or trailing HTTP tab or space bytes.
// - Contains no 0x00 (NUL) or HTTP newline bytes.
if (
potentialValue . startsWith ( '\t' ) ||
potentialValue . startsWith ( ' ' ) ||
potentialValue . endsWith ( '\t' ) ||
potentialValue . endsWith ( ' ' )
) {
return false
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
if (
potentialValue . includes ( '\0' ) ||
potentialValue . includes ( '\r' ) ||
potentialValue . includes ( '\n' )
) {
return false
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return true
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect
function setRequestReferrerPolicyOnRedirect ( request , actualResponse ) {
// Given a request request and a response actualResponse, this algorithm
// updates request’ s referrer policy according to the Referrer-Policy
// header (if any) in actualResponse.
// 1. Let policy be the result of executing § 8.1 Parse a referrer policy
// from a Referrer-Policy header on actualResponse.
// 8.1 Parse a referrer policy from a Referrer-Policy header
// 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’ s header list.
const { headersList } = actualResponse
// 2. Let policy be the empty string.
// 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token.
// 4. Return policy.
const policyHeader = ( headersList . get ( 'referrer-policy' ) ? ? '' ) . split ( ',' )
// Note: As the referrer-policy can contain multiple policies
// separated by comma, we need to loop through all of them
// and pick the first valid one.
// Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy
let policy = ''
if ( policyHeader . length > 0 ) {
// The right-most policy takes precedence.
// The left-most policy is the fallback.
for ( let i = policyHeader . length ; i !== 0 ; i -- ) {
const token = policyHeader [ i - 1 ] . trim ( )
if ( referrerPolicyTokens . has ( token ) ) {
policy = token
break
}
}
}
2019-12-12 13:16:16 -05:00
2024-04-24 12:04:10 -04:00
// 2. If policy is not the empty string, then set request’ s referrer policy to policy.
if ( policy !== '' ) {
request . referrerPolicy = policy
}
}
2019-12-12 13:16:16 -05:00
2024-04-24 12:04:10 -04:00
// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check
function crossOriginResourcePolicyCheck ( ) {
// TODO
return 'allowed'
}
2019-12-12 13:16:16 -05:00
2024-04-24 12:04:10 -04:00
// https://fetch.spec.whatwg.org/#concept-cors-check
function corsCheck ( ) {
// TODO
return 'success'
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// https://fetch.spec.whatwg.org/#concept-tao-check
function TAOCheck ( ) {
// TODO
return 'success'
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function appendFetchMetadata ( httpRequest ) {
// https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header
// TODO
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 1. Assert: r’ s url is a potentially trustworthy URL.
// TODO
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 2. Let header be a Structured Header whose value is a token.
let header = null
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 3. Set header’ s value to r’ s mode.
header = httpRequest . mode
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 4. Set a structured field value `Sec-Fetch-Mode`/header in r’ s header list.
httpRequest . headersList . set ( 'sec-fetch-mode' , header )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header
// TODO
// https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header
// TODO
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// https://fetch.spec.whatwg.org/#append-a-request-origin-header
function appendRequestOriginHeader ( request ) {
// 1. Let serializedOrigin be the result of byte-serializing a request origin with request.
let serializedOrigin = request . origin
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 2. If request’ s response tainting is "cors" or request’ s mode is "websocket", then append (`Origin`, serializedOrigin) to request’ s header list.
if ( request . responseTainting === 'cors' || request . mode === 'websocket' ) {
if ( serializedOrigin ) {
request . headersList . append ( 'origin' , serializedOrigin )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 3. Otherwise, if request’ s method is neither `GET` nor `HEAD`, then:
} else if ( request . method !== 'GET' && request . method !== 'HEAD' ) {
// 1. Switch on request’ s referrer policy:
switch ( request . referrerPolicy ) {
case 'no-referrer' :
// Set serializedOrigin to `null`.
serializedOrigin = null
break
case 'no-referrer-when-downgrade' :
case 'strict-origin' :
case 'strict-origin-when-cross-origin' :
// If request’ s origin is a tuple origin, its scheme is "https", and request’ s current URL’ s scheme is not "https", then set serializedOrigin to `null`.
if ( request . origin && urlHasHttpsScheme ( request . origin ) && ! urlHasHttpsScheme ( requestCurrentURL ( request ) ) ) {
serializedOrigin = null
}
break
case 'same-origin' :
// If request’ s origin is not same origin with request’ s current URL’ s origin, then set serializedOrigin to `null`.
if ( ! sameOrigin ( request , requestCurrentURL ( request ) ) ) {
serializedOrigin = null
}
break
default :
// Do nothing.
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( serializedOrigin ) {
// 2. Append (`Origin`, serializedOrigin) to request’ s header list.
request . headersList . append ( 'origin' , serializedOrigin )
2023-04-12 19:55:27 +08:00
}
}
}
2024-04-24 12:04:10 -04:00
function coarsenedSharedCurrentTime ( crossOriginIsolatedCapability ) {
// TODO
return performance . now ( )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info
function createOpaqueTimingInfo ( timingInfo ) {
return {
startTime : timingInfo . startTime ? ? 0 ,
redirectStartTime : 0 ,
redirectEndTime : 0 ,
postRedirectStartTime : timingInfo . startTime ? ? 0 ,
finalServiceWorkerStartTime : 0 ,
finalNetworkResponseStartTime : 0 ,
finalNetworkRequestStartTime : 0 ,
endTime : 0 ,
encodedBodySize : 0 ,
decodedBodySize : 0 ,
finalConnectionTimingInfo : null
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// https://html.spec.whatwg.org/multipage/origin.html#policy-container
function makePolicyContainer ( ) {
// Note: the fetch spec doesn't make use of embedder policy or CSP list
return {
referrerPolicy : 'strict-origin-when-cross-origin'
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container
function clonePolicyContainer ( policyContainer ) {
return {
referrerPolicy : policyContainer . referrerPolicy
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer
function determineRequestsReferrer ( request ) {
// 1. Let policy be request's referrer policy.
const policy = request . referrerPolicy
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// Note: policy cannot (shouldn't) be null or an empty string.
assert ( policy )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Let environment be request’ s client.
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
let referrerSource = null
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 3. Switch on request’ s referrer:
if ( request . referrer === 'client' ) {
// Note: node isn't a browser and doesn't implement document/iframes,
// so we bypass this step and replace it with our own.
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const globalOrigin = getGlobalOrigin ( )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( ! globalOrigin || globalOrigin . origin === 'null' ) {
return 'no-referrer'
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// note: we need to clone it as it's mutated
referrerSource = new URL ( globalOrigin )
} else if ( request . referrer instanceof URL ) {
// Let referrerSource be request’ s referrer.
referrerSource = request . referrer
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// 4. Let request’ s referrerURL be the result of stripping referrerSource for
// use as a referrer.
let referrerURL = stripURLForReferrer ( referrerSource )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 5. Let referrerOrigin be the result of stripping referrerSource for use as
// a referrer, with the origin-only flag set to true.
const referrerOrigin = stripURLForReferrer ( referrerSource , true )
// 6. If the result of serializing referrerURL is a string whose length is
// greater than 4096, set referrerURL to referrerOrigin.
if ( referrerURL . toString ( ) . length > 4096 ) {
referrerURL = referrerOrigin
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const areSameOrigin = sameOrigin ( request , referrerURL )
const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy ( referrerURL ) &&
! isURLPotentiallyTrustworthy ( request . url )
// 8. Execute the switch statements corresponding to the value of policy:
switch ( policy ) {
case 'origin' : return referrerOrigin != null ? referrerOrigin : stripURLForReferrer ( referrerSource , true )
case 'unsafe-url' : return referrerURL
case 'same-origin' :
return areSameOrigin ? referrerOrigin : 'no-referrer'
case 'origin-when-cross-origin' :
return areSameOrigin ? referrerURL : referrerOrigin
case 'strict-origin-when-cross-origin' : {
const currentURL = requestCurrentURL ( request )
// 1. If the origin of referrerURL and the origin of request’ s current
// URL are the same, then return referrerURL.
if ( sameOrigin ( referrerURL , currentURL ) ) {
return referrerURL
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// 2. If referrerURL is a potentially trustworthy URL and request’ s
// current URL is not a potentially trustworthy URL, then return no
// referrer.
if ( isURLPotentiallyTrustworthy ( referrerURL ) && ! isURLPotentiallyTrustworthy ( currentURL ) ) {
return 'no-referrer'
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 3. Return referrerOrigin.
return referrerOrigin
}
case 'strict-origin' : // eslint-disable-line
/ * *
* 1. If referrerURL is a potentially trustworthy URL and
* request ’ s current URL is not a potentially trustworthy URL ,
* then return no referrer .
* 2. Return referrerOrigin
* /
case 'no-referrer-when-downgrade' : // eslint-disable-line
/ * *
* 1. If referrerURL is a potentially trustworthy URL and
* request ’ s current URL is not a potentially trustworthy URL ,
* then return no referrer .
* 2. Return referrerOrigin
* /
default : // eslint-disable-line
return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //w3c.github.io/webappsec-referrer-policy/#strip-url
* @ param { URL } url
* @ param { boolean | undefined } originOnly
* /
function stripURLForReferrer ( url , originOnly ) {
// 1. Assert: url is a URL.
assert ( url instanceof URL )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 2. If url’ s scheme is a local scheme, then return no referrer.
if ( url . protocol === 'file:' || url . protocol === 'about:' || url . protocol === 'blank:' ) {
return 'no-referrer'
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// 3. Set url’ s username to the empty string.
url . username = ''
// 4. Set url’ s password to the empty string.
url . password = ''
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 5. Set url’ s fragment to null.
url . hash = ''
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 6. If the origin-only flag is true, then:
if ( originOnly ) {
// 1. Set url’ s path to « the empty string ».
url . pathname = ''
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 2. Set url’ s query to null.
url . search = ''
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 7. Return url.
return url
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function isURLPotentiallyTrustworthy ( url ) {
if ( ! ( url instanceof URL ) ) {
return false
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// If child of about, return true
if ( url . href === 'about:blank' || url . href === 'about:srcdoc' ) {
return true
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// If scheme is data, return true
if ( url . protocol === 'data:' ) return true
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// If file, return true
if ( url . protocol === 'file:' ) return true
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return isOriginPotentiallyTrustworthy ( url . origin )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function isOriginPotentiallyTrustworthy ( origin ) {
// If origin is explicitly null, return false
if ( origin == null || origin === 'null' ) return false
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
const originAsURL = new URL ( origin )
// If secure, return true
if ( originAsURL . protocol === 'https:' || originAsURL . protocol === 'wss:' ) {
return true
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// If localhost or variants, return true
if ( /^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/ . test ( originAsURL . hostname ) ||
( originAsURL . hostname === 'localhost' || originAsURL . hostname . includes ( 'localhost.' ) ) ||
( originAsURL . hostname . endsWith ( '.localhost' ) ) ) {
return true
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// If any other, return false
return false
}
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist
* @ param { Uint8Array } bytes
* @ param { string } metadataList
* /
function bytesMatch ( bytes , metadataList ) {
// If node is not built with OpenSSL support, we cannot check
// a request's integrity, so allow it by default (the spec will
// allow requests if an invalid hash is given, as precedence).
/* istanbul ignore if: only if node is built with --without-ssl */
if ( crypto === undefined ) {
return true
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 1. Let parsedMetadata be the result of parsing metadataList.
const parsedMetadata = parseMetadata ( metadataList )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. If parsedMetadata is no metadata, return true.
if ( parsedMetadata === 'no metadata' ) {
return true
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 3. If response is not eligible for integrity validation, return false.
// TODO
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 4. If parsedMetadata is the empty set, return true.
if ( parsedMetadata . length === 0 ) {
return true
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 5. Let metadata be the result of getting the strongest
// metadata from parsedMetadata.
const strongest = getStrongestMetadata ( parsedMetadata )
const metadata = filterMetadataListByAlgorithm ( parsedMetadata , strongest )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 6. For each item in metadata:
for ( const item of metadata ) {
// 1. Let algorithm be the alg component of item.
const algorithm = item . algo
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Let expectedValue be the val component of item.
const expectedValue = item . hash
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
// "be liberal with padding". This is annoying, and it's not even in the spec.
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 3. Let actualValue be the result of applying algorithm to bytes.
let actualValue = crypto . createHash ( algorithm ) . update ( bytes ) . digest ( 'base64' )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
if ( actualValue [ actualValue . length - 1 ] === '=' ) {
if ( actualValue [ actualValue . length - 2 ] === '=' ) {
actualValue = actualValue . slice ( 0 , - 2 )
} else {
actualValue = actualValue . slice ( 0 , - 1 )
}
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 4. If actualValue is a case-sensitive match for expectedValue,
// return true.
if ( compareBase64Mixed ( actualValue , expectedValue ) ) {
return true
}
}
// 7. Return false.
return false
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
// https://www.w3.org/TR/CSP2/#source-list-syntax
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //w3c.github.io/webappsec-subresource-integrity/#parse-metadata
* @ param { string } metadata
* /
function parseMetadata ( metadata ) {
// 1. Let result be the empty set.
/** @type {{ algo: string, hash: string }[]} */
const result = [ ]
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Let empty be equal to true.
let empty = true
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 3. For each token returned by splitting metadata on spaces:
for ( const token of metadata . split ( ' ' ) ) {
// 1. Set empty to false.
empty = false
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 2. Parse token as a hash-with-options.
const parsedToken = parseHashWithOptions . exec ( token )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 3. If token does not parse, continue to the next token.
if (
parsedToken === null ||
parsedToken . groups === undefined ||
parsedToken . groups . algo === undefined
) {
// Note: Chromium blocks the request at this point, but Firefox
// gives a warning that an invalid integrity was given. The
// correct behavior is to ignore these, and subsequently not
// check the integrity of the resource.
continue
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 4. Let algorithm be the hash-algo component of token.
const algorithm = parsedToken . groups . algo . toLowerCase ( )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 5. If algorithm is a hash function recognized by the user
// agent, add the parsed token to result.
if ( supportedHashes . includes ( algorithm ) ) {
result . push ( parsedToken . groups )
}
}
2020-03-02 11:33:30 -05:00
2024-04-24 12:04:10 -04:00
// 4. Return no metadata if empty is true, otherwise return result.
if ( empty === true ) {
return 'no metadata'
2023-03-09 17:42:29 +01:00
}
2020-03-02 11:33:30 -05:00
2024-04-24 12:04:10 -04:00
return result
2023-03-09 17:42:29 +01:00
}
2020-03-02 11:33:30 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* @ param { { algo : 'sha256' | 'sha384' | 'sha512' } [ ] } metadataList
* /
function getStrongestMetadata ( metadataList ) {
// Let algorithm be the algo component of the first item in metadataList.
// Can be sha256
let algorithm = metadataList [ 0 ] . algo
// If the algorithm is sha512, then it is the strongest
// and we can return immediately
if ( algorithm [ 3 ] === '5' ) {
return algorithm
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
for ( let i = 1 ; i < metadataList . length ; ++ i ) {
const metadata = metadataList [ i ]
// If the algorithm is sha512, then it is the strongest
// and we can break the loop immediately
if ( metadata . algo [ 3 ] === '5' ) {
algorithm = 'sha512'
break
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
} else if ( algorithm [ 3 ] === '3' ) {
continue
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
// the strongest
} else if ( metadata . algo [ 3 ] === '3' ) {
algorithm = 'sha384'
}
}
return algorithm
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function filterMetadataListByAlgorithm ( metadataList , algorithm ) {
if ( metadataList . length === 1 ) {
return metadataList
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
let pos = 0
for ( let i = 0 ; i < metadataList . length ; ++ i ) {
if ( metadataList [ i ] . algo === algorithm ) {
metadataList [ pos ++ ] = metadataList [ i ]
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
metadataList . length = pos
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return metadataList
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* Compares two base64 strings , allowing for base64url
* in the second string .
*
* @ param { string } actualValue always base64
* @ param { string } expectedValue base64 or base64url
* @ returns { boolean }
* /
function compareBase64Mixed ( actualValue , expectedValue ) {
if ( actualValue . length !== expectedValue . length ) {
return false
}
for ( let i = 0 ; i < actualValue . length ; ++ i ) {
if ( actualValue [ i ] !== expectedValue [ i ] ) {
if (
( actualValue [ i ] === '+' && expectedValue [ i ] === '-' ) ||
( actualValue [ i ] === '/' && expectedValue [ i ] === '_' )
) {
continue
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return false
2023-03-09 17:42:29 +01:00
}
}
2024-04-24 12:04:10 -04:00
return true
2020-03-02 11:33:30 -05:00
}
2024-04-24 12:04:10 -04:00
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
function tryUpgradeRequestToAPotentiallyTrustworthyURL ( request ) {
// TODO
2023-03-09 17:42:29 +01:00
}
2020-03-02 11:33:30 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* @ link { https : //html.spec.whatwg.org/multipage/origin.html#same-origin}
* @ param { URL } A
* @ param { URL } B
* /
function sameOrigin ( A , B ) {
// 1. If A and B are the same opaque origin, then return true.
if ( A . origin === B . origin && A . origin === 'null' ) {
return true
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. If A and B are both tuple origins and their schemes,
// hosts, and port are identical, then return true.
if ( A . protocol === B . protocol && A . hostname === B . hostname && A . port === B . port ) {
return true
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 3. Return false.
return false
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function createDeferredPromise ( ) {
let res
let rej
const promise = new Promise ( ( resolve , reject ) => {
res = resolve
rej = reject
} )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return { promise , resolve : res , reject : rej }
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function isAborted ( fetchParams ) {
return fetchParams . controller . state === 'aborted'
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
function isCancelled ( fetchParams ) {
return fetchParams . controller . state === 'aborted' ||
fetchParams . controller . state === 'terminated'
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
const normalizeMethodRecord = {
delete : 'DELETE' ,
DELETE : 'DELETE' ,
get : 'GET' ,
GET : 'GET' ,
head : 'HEAD' ,
HEAD : 'HEAD' ,
options : 'OPTIONS' ,
OPTIONS : 'OPTIONS' ,
post : 'POST' ,
POST : 'POST' ,
put : 'PUT' ,
PUT : 'PUT'
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object . setPrototypeOf ( normalizeMethodRecord , null )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //fetch.spec.whatwg.org/#concept-method-normalize
* @ param { string } method
* /
function normalizeMethod ( method ) {
return normalizeMethodRecord [ method . toLowerCase ( ) ] ? ? method
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string
function serializeJavascriptValueToJSONString ( value ) {
// 1. Let result be ? Call(%JSON.stringify%, undefined, « value »).
const result = JSON . stringify ( value )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. If result is undefined, then throw a TypeError.
if ( result === undefined ) {
throw new TypeError ( 'Value is not JSON serializable' )
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 3. Assert: result is a string.
assert ( typeof result === 'string' )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 4. Return result.
return result
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object
const esIteratorPrototype = Object . getPrototypeOf ( Object . getPrototypeOf ( [ ] [ Symbol . iterator ] ( ) ) )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //webidl.spec.whatwg.org/#dfn-iterator-prototype-object
* @ param { ( ) => unknown [ ] } iterator
* @ param { string } name name of the instance
* @ param { 'key' | 'value' | 'key+value' } kind
* /
function makeIterator ( iterator , name , kind ) {
const object = {
index : 0 ,
kind ,
target : iterator
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const i = {
next ( ) {
// 1. Let interface be the interface for which the iterator prototype object exists.
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 2. Let thisValue be the this value.
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 3. Let object be ? ToObject(thisValue).
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 4. If object is a platform object, then perform a security
// check, passing:
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 5. If object is not a default iterator object for interface,
// then throw a TypeError.
if ( Object . getPrototypeOf ( this ) !== i ) {
throw new TypeError (
` 'next' called on an object that does not implement interface ${ name } Iterator. `
)
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 6. Let index be object’ s index.
// 7. Let kind be object’ s kind.
// 8. Let values be object’ s target's value pairs to iterate over.
const { index , kind , target } = object
const values = target ( )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 9. Let len be the length of values.
const len = values . length
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 10. If index is greater than or equal to len, then return
// CreateIterResultObject(undefined, true).
if ( index >= len ) {
return { value : undefined , done : true }
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 11. Let pair be the entry in values at index index.
const pair = values [ index ]
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 12. Set object’ s index to index + 1.
object . index = index + 1
// 13. Return the iterator result for pair and kind.
return iteratorResult ( pair , kind )
} ,
// The class string of an iterator prototype object for a given interface is the
// result of concatenating the identifier of the interface and the string " Iterator".
[ Symbol . toStringTag ] : ` ${ name } Iterator `
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%.
Object . setPrototypeOf ( i , esIteratorPrototype )
// esIteratorPrototype needs to be the prototype of i
// which is the prototype of an empty object. Yes, it's confusing.
return Object . setPrototypeOf ( { } , i )
}
// https://webidl.spec.whatwg.org/#iterator-result
function iteratorResult ( pair , kind ) {
let result
// 1. Let result be a value determined by the value of kind:
switch ( kind ) {
case 'key' : {
// 1. Let idlKey be pair’ s key.
// 2. Let key be the result of converting idlKey to an
// ECMAScript value.
// 3. result is key.
result = pair [ 0 ]
break
}
case 'value' : {
// 1. Let idlValue be pair’ s value.
// 2. Let value be the result of converting idlValue to
// an ECMAScript value.
// 3. result is value.
result = pair [ 1 ]
break
}
case 'key+value' : {
// 1. Let idlKey be pair’ s key.
// 2. Let idlValue be pair’ s value.
// 3. Let key be the result of converting idlKey to an
// ECMAScript value.
// 4. Let value be the result of converting idlValue to
// an ECMAScript value.
// 5. Let array be ! ArrayCreate(2).
// 6. Call ! CreateDataProperty(array, "0", key).
// 7. Call ! CreateDataProperty(array, "1", value).
// 8. result is array.
result = pair
break
}
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Return CreateIterResultObject(result, false).
return { value : result , done : false }
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //fetch.spec.whatwg.org/#body-fully-read
* /
async function fullyReadBody ( body , processBody , processBodyError ) {
// 1. If taskDestination is null, then set taskDestination to
// the result of starting a new parallel queue.
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Let successSteps given a byte sequence bytes be to queue a
// fetch task to run processBody given bytes, with taskDestination.
const successSteps = processBody
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 3. Let errorSteps be to queue a fetch task to run processBodyError,
// with taskDestination.
const errorSteps = processBodyError
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 4. Let reader be the result of getting a reader for body’ s stream.
// If that threw an exception, then run errorSteps with that
// exception and return.
let reader
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
try {
reader = body . stream . getReader ( )
} catch ( e ) {
errorSteps ( e )
return
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 5. Read all bytes from reader, given successSteps and errorSteps.
try {
const result = await readAllBytes ( reader )
successSteps ( result )
} catch ( e ) {
errorSteps ( e )
}
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/** @type {ReadableStream} */
let ReadableStream = globalThis . ReadableStream
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function isReadableStreamLike ( stream ) {
if ( ! ReadableStream ) {
ReadableStream = ( _ _nccwpck _require _ _ ( 5356 ) . ReadableStream )
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return stream instanceof ReadableStream || (
stream [ Symbol . toStringTag ] === 'ReadableStream' &&
typeof stream . tee === 'function'
)
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
const MAXIMUM _ARGUMENT _LENGTH = 65535
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //infra.spec.whatwg.org/#isomorphic-decode
* @ param { number [ ] | Uint8Array } input
* /
function isomorphicDecode ( input ) {
// 1. To isomorphic decode a byte sequence input, return a string whose code point
// length is equal to input’ s length and whose code points have the same values
// as the values of input’ s bytes, in the same order.
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
if ( input . length < MAXIMUM _ARGUMENT _LENGTH ) {
return String . fromCharCode ( ... input )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return input . reduce ( ( previous , current ) => previous + String . fromCharCode ( current ) , '' )
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
/ * *
* @ param { ReadableStreamController < Uint8Array > } controller
* /
function readableStreamClose ( controller ) {
try {
controller . close ( )
} catch ( err ) {
// TODO: add comment explaining why this error occurs.
if ( ! err . message . includes ( 'Controller is already closed' ) ) {
throw err
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //infra.spec.whatwg.org/#isomorphic-encode
* @ param { string } input
* /
function isomorphicEncode ( input ) {
// 1. Assert: input contains no code points greater than U+00FF.
for ( let i = 0 ; i < input . length ; i ++ ) {
assert ( input . charCodeAt ( i ) <= 0xFF )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// 2. Return a byte sequence whose length is equal to input’ s code
// point length and whose bytes have the same values as the
// values of input’ s code points, in the same order
return input
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes
* @ see https : //streams.spec.whatwg.org/#read-loop
* @ param { ReadableStreamDefaultReader } reader
* /
async function readAllBytes ( reader ) {
const bytes = [ ]
let byteLength = 0
while ( true ) {
const { done , value : chunk } = await reader . read ( )
if ( done ) {
// 1. Call successSteps with bytes.
return Buffer . concat ( bytes , byteLength )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 1. If chunk is not a Uint8Array object, call failureSteps
// with a TypeError and abort these steps.
if ( ! isUint8Array ( chunk ) ) {
throw new TypeError ( 'Received non-Uint8Array chunk' )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 2. Append the bytes represented by chunk to bytes.
bytes . push ( chunk )
byteLength += chunk . length
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 3. Read-loop given reader, bytes, successSteps, and failureSteps.
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //fetch.spec.whatwg.org/#is-local
* @ param { URL } url
* /
function urlIsLocal ( url ) {
assert ( 'protocol' in url ) // ensure it's a url object
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const protocol = url . protocol
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:'
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* @ param { string | URL } url
* /
function urlHasHttpsScheme ( url ) {
if ( typeof url === 'string' ) {
return url . startsWith ( 'https:' )
}
return url . protocol === 'https:'
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //fetch.spec.whatwg.org/#http-scheme
* @ param { URL } url
* /
function urlIsHttpHttpsScheme ( url ) {
assert ( 'protocol' in url ) // ensure it's a url object
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
const protocol = url . protocol
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return protocol === 'http:' || protocol === 'https:'
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* Fetch supports node >= 16.8 . 0 , but Object . hasOwn was added in v16 . 9.0 .
* /
const hasOwn = Object . hasOwn || ( ( dict , key ) => Object . prototype . hasOwnProperty . call ( dict , key ) )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
module . exports = {
isAborted ,
isCancelled ,
createDeferredPromise ,
ReadableStreamFrom ,
toUSVString ,
tryUpgradeRequestToAPotentiallyTrustworthyURL ,
coarsenedSharedCurrentTime ,
determineRequestsReferrer ,
makePolicyContainer ,
clonePolicyContainer ,
appendFetchMetadata ,
appendRequestOriginHeader ,
TAOCheck ,
corsCheck ,
crossOriginResourcePolicyCheck ,
createOpaqueTimingInfo ,
setRequestReferrerPolicyOnRedirect ,
isValidHTTPToken ,
requestBadPort ,
requestCurrentURL ,
responseURL ,
responseLocationURL ,
isBlobLike ,
isURLPotentiallyTrustworthy ,
isValidReasonPhrase ,
sameOrigin ,
normalizeMethod ,
serializeJavascriptValueToJSONString ,
makeIterator ,
isValidHeaderName ,
isValidHeaderValue ,
hasOwn ,
isErrorLike ,
fullyReadBody ,
bytesMatch ,
isReadableStreamLike ,
readableStreamClose ,
isomorphicEncode ,
isomorphicDecode ,
urlIsLocal ,
urlHasHttpsScheme ,
urlIsHttpHttpsScheme ,
readAllBytes ,
normalizeMethodRecord ,
parseMetadata
}
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 1744 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
const { types } = _ _nccwpck _require _ _ ( 3837 )
const { hasOwn , toUSVString } = _ _nccwpck _require _ _ ( 2538 )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/** @type {import('../../types/webidl').Webidl} */
const webidl = { }
webidl . converters = { }
webidl . util = { }
webidl . errors = { }
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
webidl . errors . exception = function ( message ) {
return new TypeError ( ` ${ message . header } : ${ message . message } ` )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
webidl . errors . conversionFailed = function ( context ) {
const plural = context . types . length === 1 ? '' : ' one of'
const message =
` ${ context . argument } could not be converted to ` +
` ${ plural } : ${ context . types . join ( ', ' ) } . `
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return webidl . errors . exception ( {
header : context . prefix ,
message
} )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
webidl . errors . invalidArgument = function ( context ) {
return webidl . errors . exception ( {
header : context . prefix ,
message : ` " ${ context . value } " is an invalid ${ context . type } . `
} )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// https://webidl.spec.whatwg.org/#implements
webidl . brandCheck = function ( V , I , opts = undefined ) {
if ( opts ? . strict !== false && ! ( V instanceof I ) ) {
throw new TypeError ( 'Illegal invocation' )
2023-04-12 19:55:27 +08:00
} else {
2024-04-24 12:04:10 -04:00
return V ? . [ Symbol . toStringTag ] === I . prototype [ Symbol . toStringTag ]
2023-04-12 19:55:27 +08:00
}
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
webidl . argumentLengthCheck = function ( { length } , min , ctx ) {
if ( length < min ) {
throw webidl . errors . exception ( {
message : ` ${ min } argument ${ min !== 1 ? 's' : '' } required, ` +
` but ${ length ? ' only' : '' } ${ length } found. ` ,
... ctx
} )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
webidl . illegalConstructor = function ( ) {
throw webidl . errors . exception ( {
header : 'TypeError' ,
message : 'Illegal constructor'
} )
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values
webidl . util . Type = function ( V ) {
switch ( typeof V ) {
case 'undefined' : return 'Undefined'
case 'boolean' : return 'Boolean'
case 'string' : return 'String'
case 'symbol' : return 'Symbol'
case 'number' : return 'Number'
case 'bigint' : return 'BigInt'
case 'function' :
case 'object' : {
if ( V === null ) {
return 'Null'
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return 'Object'
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint
webidl . util . ConvertToInt = function ( V , bitLength , signedness , opts = { } ) {
let upperBound
let lowerBound
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 1. If bitLength is 64, then:
if ( bitLength === 64 ) {
// 1. Let upperBound be 2^53 − 1.
upperBound = Math . pow ( 2 , 53 ) - 1
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. If signedness is "unsigned", then let lowerBound be 0.
if ( signedness === 'unsigned' ) {
lowerBound = 0
} else {
// 3. Otherwise let lowerBound be − 2^53 + 1.
lowerBound = Math . pow ( - 2 , 53 ) + 1
}
} else if ( signedness === 'unsigned' ) {
// 2. Otherwise, if signedness is "unsigned", then:
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 1. Let lowerBound be 0.
lowerBound = 0
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Let upperBound be 2^bitLength − 1.
upperBound = Math . pow ( 2 , bitLength ) - 1
} else {
// 3. Otherwise:
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 1. Let lowerBound be -2^bitLength − 1.
lowerBound = Math . pow ( - 2 , bitLength ) - 1
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Let upperBound be 2^bitLength − 1 − 1.
upperBound = Math . pow ( 2 , bitLength - 1 ) - 1
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 4. Let x be ? ToNumber(V).
let x = Number ( V )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 5. If x is − 0, then set x to +0.
if ( x === 0 ) {
x = 0
}
// 6. If the conversion is to an IDL type associated
// with the [EnforceRange] extended attribute, then:
if ( opts . enforceRange === true ) {
// 1. If x is NaN, +∞, or −∞, then throw a TypeError.
if (
Number . isNaN ( x ) ||
x === Number . POSITIVE _INFINITY ||
x === Number . NEGATIVE _INFINITY
) {
throw webidl . errors . exception ( {
header : 'Integer conversion' ,
message : ` Could not convert ${ V } to an integer. `
} )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Set x to IntegerPart(x).
x = webidl . util . IntegerPart ( x )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 3. If x < lowerBound or x > upperBound, then
// throw a TypeError.
if ( x < lowerBound || x > upperBound ) {
throw webidl . errors . exception ( {
header : 'Integer conversion' ,
message : ` Value must be between ${ lowerBound } - ${ upperBound } , got ${ x } . `
} )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 4. Return x.
return x
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 7. If x is not NaN and the conversion is to an IDL
// type associated with the [Clamp] extended
// attribute, then:
if ( ! Number . isNaN ( x ) && opts . clamp === true ) {
// 1. Set x to min(max(x, lowerBound), upperBound).
x = Math . min ( Math . max ( x , lowerBound ) , upperBound )
// 2. Round x to the nearest integer, choosing the
// even integer if it lies halfway between two,
// and choosing +0 rather than − 0.
if ( Math . floor ( x ) % 2 === 0 ) {
x = Math . floor ( x )
} else {
x = Math . ceil ( x )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 3. Return x.
return x
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 8. If x is NaN, +0, +∞, or −∞, then return +0.
if (
Number . isNaN ( x ) ||
( x === 0 && Object . is ( 0 , x ) ) ||
x === Number . POSITIVE _INFINITY ||
x === Number . NEGATIVE _INFINITY
) {
return 0
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 9. Set x to IntegerPart(x).
x = webidl . util . IntegerPart ( x )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 10. Set x to x modulo 2^bitLength.
x = x % Math . pow ( 2 , bitLength )
// 11. If signedness is "signed" and x ≥ 2^bitLength − 1,
// then return x − 2^bitLength.
if ( signedness === 'signed' && x >= Math . pow ( 2 , bitLength ) - 1 ) {
return x - Math . pow ( 2 , bitLength )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// 12. Otherwise, return x.
return x
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart
webidl . util . IntegerPart = function ( n ) {
// 1. Let r be floor(abs(n)).
const r = Math . floor ( Math . abs ( n ) )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. If n < 0, then return -1 × r.
if ( n < 0 ) {
return - 1 * r
}
// 3. Otherwise, return r.
return r
}
// https://webidl.spec.whatwg.org/#es-sequence
webidl . sequenceConverter = function ( converter ) {
return ( V ) => {
// 1. If Type(V) is not Object, throw a TypeError.
if ( webidl . util . Type ( V ) !== 'Object' ) {
throw webidl . errors . exception ( {
header : 'Sequence' ,
message : ` Value of type ${ webidl . util . Type ( V ) } is not an Object. `
} )
}
// 2. Let method be ? GetMethod(V, @@iterator).
/** @type {Generator} */
const method = V ? . [ Symbol . iterator ] ? . ( )
const seq = [ ]
// 3. If method is undefined, throw a TypeError.
if (
method === undefined ||
typeof method . next !== 'function'
) {
throw webidl . errors . exception ( {
header : 'Sequence' ,
message : 'Object is not an iterator.'
} )
}
// https://webidl.spec.whatwg.org/#create-sequence-from-iterable
while ( true ) {
const { done , value } = method . next ( )
if ( done ) {
break
}
seq . push ( converter ( value ) )
}
return seq
}
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// https://webidl.spec.whatwg.org/#es-to-record
webidl . recordConverter = function ( keyConverter , valueConverter ) {
return ( O ) => {
// 1. If Type(O) is not Object, throw a TypeError.
if ( webidl . util . Type ( O ) !== 'Object' ) {
throw webidl . errors . exception ( {
header : 'Record' ,
message : ` Value of type ${ webidl . util . Type ( O ) } is not an Object. `
} )
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Let result be a new empty instance of record<K, V>.
const result = { }
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
if ( ! types . isProxy ( O ) ) {
// Object.keys only returns enumerable properties
const keys = Object . keys ( O )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
for ( const key of keys ) {
// 1. Let typedKey be key converted to an IDL value of type K.
const typedKey = keyConverter ( key )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. Let value be ? Get(O, key).
// 3. Let typedValue be value converted to an IDL value of type V.
const typedValue = valueConverter ( O [ key ] )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 4. Set result[typedKey] to typedValue.
result [ typedKey ] = typedValue
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 5. Return result.
return result
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 3. Let keys be ? O.[[OwnPropertyKeys]]().
const keys = Reflect . ownKeys ( O )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 4. For each key of keys.
for ( const key of keys ) {
// 1. Let desc be ? O.[[GetOwnProperty]](key).
const desc = Reflect . getOwnPropertyDescriptor ( O , key )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 2. If desc is not undefined and desc.[[Enumerable]] is true:
if ( desc ? . enumerable ) {
// 1. Let typedKey be key converted to an IDL value of type K.
const typedKey = keyConverter ( key )
// 2. Let value be ? Get(O, key).
// 3. Let typedValue be value converted to an IDL value of type V.
const typedValue = valueConverter ( O [ key ] )
// 4. Set result[typedKey] to typedValue.
result [ typedKey ] = typedValue
}
2023-04-12 19:55:27 +08:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
// 5. Return result.
return result
2023-04-12 19:55:27 +08:00
}
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
webidl . interfaceConverter = function ( i ) {
return ( V , opts = { } ) => {
if ( opts . strict !== false && ! ( V instanceof i ) ) {
throw webidl . errors . exception ( {
header : i . name ,
message : ` Expected ${ V } to be an instance of ${ i . name } . `
} )
2023-04-12 19:55:27 +08:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return V
}
2023-04-12 19:55:27 +08:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
webidl . dictionaryConverter = function ( converters ) {
return ( dictionary ) => {
const type = webidl . util . Type ( dictionary )
const dict = { }
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
if ( type === 'Null' || type === 'Undefined' ) {
return dict
} else if ( type !== 'Object' ) {
throw webidl . errors . exception ( {
header : 'Dictionary' ,
message : ` Expected ${ dictionary } to be one of: Null, Undefined, Object. `
} )
}
2020-05-07 12:11:11 -04:00
2024-04-24 12:04:10 -04:00
for ( const options of converters ) {
const { key , defaultValue , required , converter } = options
if ( required === true ) {
if ( ! hasOwn ( dictionary , key ) ) {
throw webidl . errors . exception ( {
header : 'Dictionary' ,
message : ` Missing required key " ${ key } ". `
} )
}
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
let value = dictionary [ key ]
const hasDefault = hasOwn ( options , 'defaultValue' )
// Only use defaultValue if value is undefined and
// a defaultValue options was provided.
if ( hasDefault && value !== null ) {
value = value ? ? defaultValue
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// A key can be optional and have no default value.
// When this happens, do not perform a conversion,
// and do not assign the key a value.
if ( required || hasDefault || value !== undefined ) {
value = converter ( value )
if (
options . allowedValues &&
! options . allowedValues . includes ( value )
) {
throw webidl . errors . exception ( {
header : 'Dictionary' ,
message : ` ${ value } is not an accepted type. Expected one of ${ options . allowedValues . join ( ', ' ) } . `
} )
}
dict [ key ] = value
}
}
return dict
}
}
webidl . nullableConverter = function ( converter ) {
return ( V ) => {
if ( V === null ) {
return V
}
return converter ( V )
}
}
// https://webidl.spec.whatwg.org/#es-DOMString
webidl . converters . DOMString = function ( V , opts = { } ) {
// 1. If V is null and the conversion is to an IDL type
// associated with the [LegacyNullToEmptyString]
// extended attribute, then return the DOMString value
// that represents the empty string.
if ( V === null && opts . legacyNullToEmptyString ) {
return ''
}
// 2. Let x be ? ToString(V).
if ( typeof V === 'symbol' ) {
throw new TypeError ( 'Could not convert argument of type symbol to string.' )
}
// 3. Return the IDL DOMString value that represents the
// same sequence of code units as the one the
// ECMAScript String value x represents.
return String ( V )
}
// https://webidl.spec.whatwg.org/#es-ByteString
webidl . converters . ByteString = function ( V ) {
// 1. Let x be ? ToString(V).
// Note: DOMString converter perform ? ToString(V)
const x = webidl . converters . DOMString ( V )
// 2. If the value of any element of x is greater than
// 255, then throw a TypeError.
for ( let index = 0 ; index < x . length ; index ++ ) {
if ( x . charCodeAt ( index ) > 255 ) {
throw new TypeError (
'Cannot convert argument to a ByteString because the character at ' +
` index ${ index } has a value of ${ x . charCodeAt ( index ) } which is greater than 255. `
)
}
}
// 3. Return an IDL ByteString value whose length is the
// length of x, and where the value of each element is
// the value of the corresponding element of x.
return x
}
// https://webidl.spec.whatwg.org/#es-USVString
webidl . converters . USVString = toUSVString
// https://webidl.spec.whatwg.org/#es-boolean
webidl . converters . boolean = function ( V ) {
// 1. Let x be the result of computing ToBoolean(V).
const x = Boolean ( V )
// 2. Return the IDL boolean value that is the one that represents
// the same truth value as the ECMAScript Boolean value x.
return x
}
// https://webidl.spec.whatwg.org/#es-any
webidl . converters . any = function ( V ) {
return V
}
// https://webidl.spec.whatwg.org/#es-long-long
webidl . converters [ 'long long' ] = function ( V ) {
// 1. Let x be ? ConvertToInt(V, 64, "signed").
const x = webidl . util . ConvertToInt ( V , 64 , 'signed' )
// 2. Return the IDL long long value that represents
// the same numeric value as x.
return x
}
// https://webidl.spec.whatwg.org/#es-unsigned-long-long
webidl . converters [ 'unsigned long long' ] = function ( V ) {
// 1. Let x be ? ConvertToInt(V, 64, "unsigned").
const x = webidl . util . ConvertToInt ( V , 64 , 'unsigned' )
// 2. Return the IDL unsigned long long value that
// represents the same numeric value as x.
return x
}
// https://webidl.spec.whatwg.org/#es-unsigned-long
webidl . converters [ 'unsigned long' ] = function ( V ) {
// 1. Let x be ? ConvertToInt(V, 32, "unsigned").
const x = webidl . util . ConvertToInt ( V , 32 , 'unsigned' )
// 2. Return the IDL unsigned long value that
// represents the same numeric value as x.
return x
}
// https://webidl.spec.whatwg.org/#es-unsigned-short
webidl . converters [ 'unsigned short' ] = function ( V , opts ) {
// 1. Let x be ? ConvertToInt(V, 16, "unsigned").
const x = webidl . util . ConvertToInt ( V , 16 , 'unsigned' , opts )
// 2. Return the IDL unsigned short value that represents
// the same numeric value as x.
return x
}
// https://webidl.spec.whatwg.org/#idl-ArrayBuffer
webidl . converters . ArrayBuffer = function ( V , opts = { } ) {
// 1. If Type(V) is not Object, or V does not have an
// [[ArrayBufferData]] internal slot, then throw a
// TypeError.
// see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances
// see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances
if (
webidl . util . Type ( V ) !== 'Object' ||
! types . isAnyArrayBuffer ( V )
) {
throw webidl . errors . conversionFailed ( {
prefix : ` ${ V } ` ,
argument : ` ${ V } ` ,
types : [ 'ArrayBuffer' ]
} )
}
// 2. If the conversion is not to an IDL type associated
// with the [AllowShared] extended attribute, and
// IsSharedArrayBuffer(V) is true, then throw a
// TypeError.
if ( opts . allowShared === false && types . isSharedArrayBuffer ( V ) ) {
throw webidl . errors . exception ( {
header : 'ArrayBuffer' ,
message : 'SharedArrayBuffer is not allowed.'
} )
}
// 3. If the conversion is not to an IDL type associated
// with the [AllowResizable] extended attribute, and
// IsResizableArrayBuffer(V) is true, then throw a
// TypeError.
// Note: resizable ArrayBuffers are currently a proposal.
// 4. Return the IDL ArrayBuffer value that is a
// reference to the same object as V.
return V
}
webidl . converters . TypedArray = function ( V , T , opts = { } ) {
// 1. Let T be the IDL type V is being converted to.
// 2. If Type(V) is not Object, or V does not have a
// [[TypedArrayName]] internal slot with a value
// equal to T’ s name, then throw a TypeError.
if (
webidl . util . Type ( V ) !== 'Object' ||
! types . isTypedArray ( V ) ||
V . constructor . name !== T . name
) {
throw webidl . errors . conversionFailed ( {
prefix : ` ${ T . name } ` ,
argument : ` ${ V } ` ,
types : [ T . name ]
} )
}
// 3. If the conversion is not to an IDL type associated
// with the [AllowShared] extended attribute, and
// IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is
// true, then throw a TypeError.
if ( opts . allowShared === false && types . isSharedArrayBuffer ( V . buffer ) ) {
throw webidl . errors . exception ( {
header : 'ArrayBuffer' ,
message : 'SharedArrayBuffer is not allowed.'
} )
}
// 4. If the conversion is not to an IDL type associated
// with the [AllowResizable] extended attribute, and
// IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
// true, then throw a TypeError.
// Note: resizable array buffers are currently a proposal
// 5. Return the IDL value of type T that is a reference
// to the same object as V.
return V
}
webidl . converters . DataView = function ( V , opts = { } ) {
// 1. If Type(V) is not Object, or V does not have a
// [[DataView]] internal slot, then throw a TypeError.
if ( webidl . util . Type ( V ) !== 'Object' || ! types . isDataView ( V ) ) {
throw webidl . errors . exception ( {
header : 'DataView' ,
message : 'Object is not a DataView.'
} )
}
// 2. If the conversion is not to an IDL type associated
// with the [AllowShared] extended attribute, and
// IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true,
// then throw a TypeError.
if ( opts . allowShared === false && types . isSharedArrayBuffer ( V . buffer ) ) {
throw webidl . errors . exception ( {
header : 'ArrayBuffer' ,
message : 'SharedArrayBuffer is not allowed.'
} )
}
// 3. If the conversion is not to an IDL type associated
// with the [AllowResizable] extended attribute, and
// IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
// true, then throw a TypeError.
// Note: resizable ArrayBuffers are currently a proposal
// 4. Return the IDL DataView value that is a reference
// to the same object as V.
return V
}
// https://webidl.spec.whatwg.org/#BufferSource
webidl . converters . BufferSource = function ( V , opts = { } ) {
if ( types . isAnyArrayBuffer ( V ) ) {
return webidl . converters . ArrayBuffer ( V , opts )
}
if ( types . isTypedArray ( V ) ) {
return webidl . converters . TypedArray ( V , V . constructor )
}
if ( types . isDataView ( V ) ) {
return webidl . converters . DataView ( V , opts )
}
throw new TypeError ( ` Could not convert ${ V } to a BufferSource. ` )
}
webidl . converters [ 'sequence<ByteString>' ] = webidl . sequenceConverter (
webidl . converters . ByteString
)
webidl . converters [ 'sequence<sequence<ByteString>>' ] = webidl . sequenceConverter (
webidl . converters [ 'sequence<ByteString>' ]
)
webidl . converters [ 'record<ByteString, ByteString>' ] = webidl . recordConverter (
webidl . converters . ByteString ,
webidl . converters . ByteString
)
module . exports = {
webidl
}
/***/ } ) ,
/***/ 4854 :
/***/ ( ( module ) => {
"use strict" ;
/ * *
* @ see https : //encoding.spec.whatwg.org/#concept-encoding-get
* @ param { string | undefined } label
* /
function getEncoding ( label ) {
if ( ! label ) {
return 'failure'
}
// 1. Remove any leading and trailing ASCII whitespace from label.
// 2. If label is an ASCII case-insensitive match for any of the
// labels listed in the table below, then return the
// corresponding encoding; otherwise return failure.
switch ( label . trim ( ) . toLowerCase ( ) ) {
case 'unicode-1-1-utf-8' :
case 'unicode11utf8' :
case 'unicode20utf8' :
case 'utf-8' :
case 'utf8' :
case 'x-unicode20utf8' :
return 'UTF-8'
case '866' :
case 'cp866' :
case 'csibm866' :
case 'ibm866' :
return 'IBM866'
case 'csisolatin2' :
case 'iso-8859-2' :
case 'iso-ir-101' :
case 'iso8859-2' :
case 'iso88592' :
case 'iso_8859-2' :
case 'iso_8859-2:1987' :
case 'l2' :
case 'latin2' :
return 'ISO-8859-2'
case 'csisolatin3' :
case 'iso-8859-3' :
case 'iso-ir-109' :
case 'iso8859-3' :
case 'iso88593' :
case 'iso_8859-3' :
case 'iso_8859-3:1988' :
case 'l3' :
case 'latin3' :
return 'ISO-8859-3'
case 'csisolatin4' :
case 'iso-8859-4' :
case 'iso-ir-110' :
case 'iso8859-4' :
case 'iso88594' :
case 'iso_8859-4' :
case 'iso_8859-4:1988' :
case 'l4' :
case 'latin4' :
return 'ISO-8859-4'
case 'csisolatincyrillic' :
case 'cyrillic' :
case 'iso-8859-5' :
case 'iso-ir-144' :
case 'iso8859-5' :
case 'iso88595' :
case 'iso_8859-5' :
case 'iso_8859-5:1988' :
return 'ISO-8859-5'
case 'arabic' :
case 'asmo-708' :
case 'csiso88596e' :
case 'csiso88596i' :
case 'csisolatinarabic' :
case 'ecma-114' :
case 'iso-8859-6' :
case 'iso-8859-6-e' :
case 'iso-8859-6-i' :
case 'iso-ir-127' :
case 'iso8859-6' :
case 'iso88596' :
case 'iso_8859-6' :
case 'iso_8859-6:1987' :
return 'ISO-8859-6'
case 'csisolatingreek' :
case 'ecma-118' :
case 'elot_928' :
case 'greek' :
case 'greek8' :
case 'iso-8859-7' :
case 'iso-ir-126' :
case 'iso8859-7' :
case 'iso88597' :
case 'iso_8859-7' :
case 'iso_8859-7:1987' :
case 'sun_eu_greek' :
return 'ISO-8859-7'
case 'csiso88598e' :
case 'csisolatinhebrew' :
case 'hebrew' :
case 'iso-8859-8' :
case 'iso-8859-8-e' :
case 'iso-ir-138' :
case 'iso8859-8' :
case 'iso88598' :
case 'iso_8859-8' :
case 'iso_8859-8:1988' :
case 'visual' :
return 'ISO-8859-8'
case 'csiso88598i' :
case 'iso-8859-8-i' :
case 'logical' :
return 'ISO-8859-8-I'
case 'csisolatin6' :
case 'iso-8859-10' :
case 'iso-ir-157' :
case 'iso8859-10' :
case 'iso885910' :
case 'l6' :
case 'latin6' :
return 'ISO-8859-10'
case 'iso-8859-13' :
case 'iso8859-13' :
case 'iso885913' :
return 'ISO-8859-13'
case 'iso-8859-14' :
case 'iso8859-14' :
case 'iso885914' :
return 'ISO-8859-14'
case 'csisolatin9' :
case 'iso-8859-15' :
case 'iso8859-15' :
case 'iso885915' :
case 'iso_8859-15' :
case 'l9' :
return 'ISO-8859-15'
case 'iso-8859-16' :
return 'ISO-8859-16'
case 'cskoi8r' :
case 'koi' :
case 'koi8' :
case 'koi8-r' :
case 'koi8_r' :
return 'KOI8-R'
case 'koi8-ru' :
case 'koi8-u' :
return 'KOI8-U'
case 'csmacintosh' :
case 'mac' :
case 'macintosh' :
case 'x-mac-roman' :
return 'macintosh'
case 'iso-8859-11' :
case 'iso8859-11' :
case 'iso885911' :
case 'tis-620' :
case 'windows-874' :
return 'windows-874'
case 'cp1250' :
case 'windows-1250' :
case 'x-cp1250' :
return 'windows-1250'
case 'cp1251' :
case 'windows-1251' :
case 'x-cp1251' :
return 'windows-1251'
case 'ansi_x3.4-1968' :
case 'ascii' :
case 'cp1252' :
case 'cp819' :
case 'csisolatin1' :
case 'ibm819' :
case 'iso-8859-1' :
case 'iso-ir-100' :
case 'iso8859-1' :
case 'iso88591' :
case 'iso_8859-1' :
case 'iso_8859-1:1987' :
case 'l1' :
case 'latin1' :
case 'us-ascii' :
case 'windows-1252' :
case 'x-cp1252' :
return 'windows-1252'
case 'cp1253' :
case 'windows-1253' :
case 'x-cp1253' :
return 'windows-1253'
case 'cp1254' :
case 'csisolatin5' :
case 'iso-8859-9' :
case 'iso-ir-148' :
case 'iso8859-9' :
case 'iso88599' :
case 'iso_8859-9' :
case 'iso_8859-9:1989' :
case 'l5' :
case 'latin5' :
case 'windows-1254' :
case 'x-cp1254' :
return 'windows-1254'
case 'cp1255' :
case 'windows-1255' :
case 'x-cp1255' :
return 'windows-1255'
case 'cp1256' :
case 'windows-1256' :
case 'x-cp1256' :
return 'windows-1256'
case 'cp1257' :
case 'windows-1257' :
case 'x-cp1257' :
return 'windows-1257'
case 'cp1258' :
case 'windows-1258' :
case 'x-cp1258' :
return 'windows-1258'
case 'x-mac-cyrillic' :
case 'x-mac-ukrainian' :
return 'x-mac-cyrillic'
case 'chinese' :
case 'csgb2312' :
case 'csiso58gb231280' :
case 'gb2312' :
case 'gb_2312' :
case 'gb_2312-80' :
case 'gbk' :
case 'iso-ir-58' :
case 'x-gbk' :
return 'GBK'
case 'gb18030' :
return 'gb18030'
case 'big5' :
case 'big5-hkscs' :
case 'cn-big5' :
case 'csbig5' :
case 'x-x-big5' :
return 'Big5'
case 'cseucpkdfmtjapanese' :
case 'euc-jp' :
case 'x-euc-jp' :
return 'EUC-JP'
case 'csiso2022jp' :
case 'iso-2022-jp' :
return 'ISO-2022-JP'
case 'csshiftjis' :
case 'ms932' :
case 'ms_kanji' :
case 'shift-jis' :
case 'shift_jis' :
case 'sjis' :
case 'windows-31j' :
case 'x-sjis' :
return 'Shift_JIS'
case 'cseuckr' :
case 'csksc56011987' :
case 'euc-kr' :
case 'iso-ir-149' :
case 'korean' :
case 'ks_c_5601-1987' :
case 'ks_c_5601-1989' :
case 'ksc5601' :
case 'ksc_5601' :
case 'windows-949' :
return 'EUC-KR'
case 'csiso2022kr' :
case 'hz-gb-2312' :
case 'iso-2022-cn' :
case 'iso-2022-cn-ext' :
case 'iso-2022-kr' :
case 'replacement' :
return 'replacement'
case 'unicodefffe' :
case 'utf-16be' :
return 'UTF-16BE'
case 'csunicode' :
case 'iso-10646-ucs-2' :
case 'ucs-2' :
case 'unicode' :
case 'unicodefeff' :
case 'utf-16' :
case 'utf-16le' :
return 'UTF-16LE'
case 'x-user-defined' :
return 'x-user-defined'
default : return 'failure'
}
}
module . exports = {
getEncoding
}
/***/ } ) ,
/***/ 1446 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
staticPropertyDescriptors ,
readOperation ,
fireAProgressEvent
} = _ _nccwpck _require _ _ ( 7530 )
const {
kState ,
kError ,
kResult ,
kEvents ,
kAborted
} = _ _nccwpck _require _ _ ( 9054 )
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { kEnumerableProperty } = _ _nccwpck _require _ _ ( 3983 )
class FileReader extends EventTarget {
constructor ( ) {
super ( )
this [ kState ] = 'empty'
this [ kResult ] = null
this [ kError ] = null
this [ kEvents ] = {
loadend : null ,
error : null ,
abort : null ,
load : null ,
progress : null ,
loadstart : null
}
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
* @ param { import ( 'buffer' ) . Blob } blob
* /
readAsArrayBuffer ( blob ) {
webidl . brandCheck ( this , FileReader )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FileReader.readAsArrayBuffer' } )
blob = webidl . converters . Blob ( blob , { strict : false } )
// The readAsArrayBuffer(blob) method, when invoked,
// must initiate a read operation for blob with ArrayBuffer.
readOperation ( this , blob , 'ArrayBuffer' )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#readAsBinaryString
* @ param { import ( 'buffer' ) . Blob } blob
* /
readAsBinaryString ( blob ) {
webidl . brandCheck ( this , FileReader )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FileReader.readAsBinaryString' } )
blob = webidl . converters . Blob ( blob , { strict : false } )
// The readAsBinaryString(blob) method, when invoked,
// must initiate a read operation for blob with BinaryString.
readOperation ( this , blob , 'BinaryString' )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#readAsDataText
* @ param { import ( 'buffer' ) . Blob } blob
* @ param { string ? } encoding
* /
readAsText ( blob , encoding = undefined ) {
webidl . brandCheck ( this , FileReader )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FileReader.readAsText' } )
blob = webidl . converters . Blob ( blob , { strict : false } )
if ( encoding !== undefined ) {
encoding = webidl . converters . DOMString ( encoding )
}
// The readAsText(blob, encoding) method, when invoked,
// must initiate a read operation for blob with Text and encoding.
readOperation ( this , blob , 'Text' , encoding )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dfn-readAsDataURL
* @ param { import ( 'buffer' ) . Blob } blob
* /
readAsDataURL ( blob ) {
webidl . brandCheck ( this , FileReader )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'FileReader.readAsDataURL' } )
blob = webidl . converters . Blob ( blob , { strict : false } )
// The readAsDataURL(blob) method, when invoked, must
// initiate a read operation for blob with DataURL.
readOperation ( this , blob , 'DataURL' )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dfn-abort
* /
abort ( ) {
// 1. If this's state is "empty" or if this's state is
// "done" set this's result to null and terminate
// this algorithm.
if ( this [ kState ] === 'empty' || this [ kState ] === 'done' ) {
this [ kResult ] = null
return
}
// 2. If this's state is "loading" set this's state to
// "done" and set this's result to null.
if ( this [ kState ] === 'loading' ) {
this [ kState ] = 'done'
this [ kResult ] = null
}
// 3. If there are any tasks from this on the file reading
// task source in an affiliated task queue, then remove
// those tasks from that task queue.
this [ kAborted ] = true
// 4. Terminate the algorithm for the read method being processed.
// TODO
// 5. Fire a progress event called abort at this.
fireAProgressEvent ( 'abort' , this )
// 6. If this's state is not "loading", fire a progress
// event called loadend at this.
if ( this [ kState ] !== 'loading' ) {
fireAProgressEvent ( 'loadend' , this )
}
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dom-filereader-readystate
* /
get readyState ( ) {
webidl . brandCheck ( this , FileReader )
switch ( this [ kState ] ) {
case 'empty' : return this . EMPTY
case 'loading' : return this . LOADING
case 'done' : return this . DONE
}
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dom-filereader-result
* /
get result ( ) {
webidl . brandCheck ( this , FileReader )
// The result attribute’ s getter, when invoked, must return
// this's result.
return this [ kResult ]
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#dom-filereader-error
* /
get error ( ) {
webidl . brandCheck ( this , FileReader )
// The error attribute’ s getter, when invoked, must return
// this's error.
return this [ kError ]
}
get onloadend ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . loadend
}
set onloadend ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . loadend ) {
this . removeEventListener ( 'loadend' , this [ kEvents ] . loadend )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . loadend = fn
this . addEventListener ( 'loadend' , fn )
} else {
this [ kEvents ] . loadend = null
}
}
get onerror ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . error
}
set onerror ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . error ) {
this . removeEventListener ( 'error' , this [ kEvents ] . error )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . error = fn
this . addEventListener ( 'error' , fn )
} else {
this [ kEvents ] . error = null
}
}
get onloadstart ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . loadstart
}
set onloadstart ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . loadstart ) {
this . removeEventListener ( 'loadstart' , this [ kEvents ] . loadstart )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . loadstart = fn
this . addEventListener ( 'loadstart' , fn )
} else {
this [ kEvents ] . loadstart = null
}
}
get onprogress ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . progress
}
set onprogress ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . progress ) {
this . removeEventListener ( 'progress' , this [ kEvents ] . progress )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . progress = fn
this . addEventListener ( 'progress' , fn )
} else {
this [ kEvents ] . progress = null
}
}
get onload ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . load
}
set onload ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . load ) {
this . removeEventListener ( 'load' , this [ kEvents ] . load )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . load = fn
this . addEventListener ( 'load' , fn )
} else {
this [ kEvents ] . load = null
}
}
get onabort ( ) {
webidl . brandCheck ( this , FileReader )
return this [ kEvents ] . abort
}
set onabort ( fn ) {
webidl . brandCheck ( this , FileReader )
if ( this [ kEvents ] . abort ) {
this . removeEventListener ( 'abort' , this [ kEvents ] . abort )
}
if ( typeof fn === 'function' ) {
this [ kEvents ] . abort = fn
this . addEventListener ( 'abort' , fn )
} else {
this [ kEvents ] . abort = null
}
}
}
// https://w3c.github.io/FileAPI/#dom-filereader-empty
FileReader . EMPTY = FileReader . prototype . EMPTY = 0
// https://w3c.github.io/FileAPI/#dom-filereader-loading
FileReader . LOADING = FileReader . prototype . LOADING = 1
// https://w3c.github.io/FileAPI/#dom-filereader-done
FileReader . DONE = FileReader . prototype . DONE = 2
Object . defineProperties ( FileReader . prototype , {
EMPTY : staticPropertyDescriptors ,
LOADING : staticPropertyDescriptors ,
DONE : staticPropertyDescriptors ,
readAsArrayBuffer : kEnumerableProperty ,
readAsBinaryString : kEnumerableProperty ,
readAsText : kEnumerableProperty ,
readAsDataURL : kEnumerableProperty ,
abort : kEnumerableProperty ,
readyState : kEnumerableProperty ,
result : kEnumerableProperty ,
error : kEnumerableProperty ,
onloadstart : kEnumerableProperty ,
onprogress : kEnumerableProperty ,
onload : kEnumerableProperty ,
onabort : kEnumerableProperty ,
onerror : kEnumerableProperty ,
onloadend : kEnumerableProperty ,
[ Symbol . toStringTag ] : {
value : 'FileReader' ,
writable : false ,
enumerable : false ,
configurable : true
}
} )
Object . defineProperties ( FileReader , {
EMPTY : staticPropertyDescriptors ,
LOADING : staticPropertyDescriptors ,
DONE : staticPropertyDescriptors
} )
module . exports = {
FileReader
}
/***/ } ) ,
/***/ 5504 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const kState = Symbol ( 'ProgressEvent state' )
/ * *
* @ see https : //xhr.spec.whatwg.org/#progressevent
* /
class ProgressEvent extends Event {
constructor ( type , eventInitDict = { } ) {
type = webidl . converters . DOMString ( type )
eventInitDict = webidl . converters . ProgressEventInit ( eventInitDict ? ? { } )
super ( type , eventInitDict )
this [ kState ] = {
lengthComputable : eventInitDict . lengthComputable ,
loaded : eventInitDict . loaded ,
total : eventInitDict . total
}
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
get lengthComputable ( ) {
webidl . brandCheck ( this , ProgressEvent )
return this [ kState ] . lengthComputable
}
get loaded ( ) {
webidl . brandCheck ( this , ProgressEvent )
return this [ kState ] . loaded
}
get total ( ) {
webidl . brandCheck ( this , ProgressEvent )
return this [ kState ] . total
}
}
webidl . converters . ProgressEventInit = webidl . dictionaryConverter ( [
{
key : 'lengthComputable' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'loaded' ,
converter : webidl . converters [ 'unsigned long long' ] ,
defaultValue : 0
} ,
{
key : 'total' ,
converter : webidl . converters [ 'unsigned long long' ] ,
defaultValue : 0
} ,
{
key : 'bubbles' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'cancelable' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'composed' ,
converter : webidl . converters . boolean ,
defaultValue : false
}
] )
module . exports = {
ProgressEvent
}
/***/ } ) ,
/***/ 9054 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = {
kState : Symbol ( 'FileReader state' ) ,
kResult : Symbol ( 'FileReader result' ) ,
kError : Symbol ( 'FileReader error' ) ,
kLastProgressEventFired : Symbol ( 'FileReader last progress event fired timestamp' ) ,
kEvents : Symbol ( 'FileReader events' ) ,
kAborted : Symbol ( 'FileReader aborted' )
}
/***/ } ) ,
/***/ 7530 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const {
kState ,
kError ,
kResult ,
kAborted ,
kLastProgressEventFired
} = _ _nccwpck _require _ _ ( 9054 )
const { ProgressEvent } = _ _nccwpck _require _ _ ( 5504 )
const { getEncoding } = _ _nccwpck _require _ _ ( 4854 )
const { DOMException } = _ _nccwpck _require _ _ ( 1037 )
const { serializeAMimeType , parseMIMEType } = _ _nccwpck _require _ _ ( 685 )
const { types } = _ _nccwpck _require _ _ ( 3837 )
const { StringDecoder } = _ _nccwpck _require _ _ ( 1576 )
const { btoa } = _ _nccwpck _require _ _ ( 4300 )
/** @type {PropertyDescriptor} */
const staticPropertyDescriptors = {
enumerable : true ,
writable : false ,
configurable : false
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#readOperation
* @ param { import ( './filereader' ) . FileReader } fr
* @ param { import ( 'buffer' ) . Blob } blob
* @ param { string } type
* @ param { string ? } encodingName
* /
function readOperation ( fr , blob , type , encodingName ) {
// 1. If fr’ s state is "loading", throw an InvalidStateError
// DOMException.
if ( fr [ kState ] === 'loading' ) {
throw new DOMException ( 'Invalid state' , 'InvalidStateError' )
}
// 2. Set fr’ s state to "loading".
fr [ kState ] = 'loading'
// 3. Set fr’ s result to null.
fr [ kResult ] = null
// 4. Set fr’ s error to null.
fr [ kError ] = null
// 5. Let stream be the result of calling get stream on blob.
/** @type {import('stream/web').ReadableStream} */
const stream = blob . stream ( )
// 6. Let reader be the result of getting a reader from stream.
const reader = stream . getReader ( )
// 7. Let bytes be an empty byte sequence.
/** @type {Uint8Array[]} */
const bytes = [ ]
// 8. Let chunkPromise be the result of reading a chunk from
// stream with reader.
let chunkPromise = reader . read ( )
// 9. Let isFirstChunk be true.
let isFirstChunk = true
// 10. In parallel, while true:
// Note: "In parallel" just means non-blocking
// Note 2: readOperation itself cannot be async as double
// reading the body would then reject the promise, instead
// of throwing an error.
; ( async ( ) => {
while ( ! fr [ kAborted ] ) {
// 1. Wait for chunkPromise to be fulfilled or rejected.
try {
const { done , value } = await chunkPromise
// 2. If chunkPromise is fulfilled, and isFirstChunk is
// true, queue a task to fire a progress event called
// loadstart at fr.
if ( isFirstChunk && ! fr [ kAborted ] ) {
queueMicrotask ( ( ) => {
fireAProgressEvent ( 'loadstart' , fr )
} )
}
// 3. Set isFirstChunk to false.
isFirstChunk = false
// 4. If chunkPromise is fulfilled with an object whose
// done property is false and whose value property is
// a Uint8Array object, run these steps:
if ( ! done && types . isUint8Array ( value ) ) {
// 1. Let bs be the byte sequence represented by the
// Uint8Array object.
// 2. Append bs to bytes.
bytes . push ( value )
// 3. If roughly 50ms have passed since these steps
// were last invoked, queue a task to fire a
// progress event called progress at fr.
if (
(
fr [ kLastProgressEventFired ] === undefined ||
Date . now ( ) - fr [ kLastProgressEventFired ] >= 50
) &&
! fr [ kAborted ]
) {
fr [ kLastProgressEventFired ] = Date . now ( )
queueMicrotask ( ( ) => {
fireAProgressEvent ( 'progress' , fr )
} )
}
// 4. Set chunkPromise to the result of reading a
// chunk from stream with reader.
chunkPromise = reader . read ( )
} else if ( done ) {
// 5. Otherwise, if chunkPromise is fulfilled with an
// object whose done property is true, queue a task
// to run the following steps and abort this algorithm:
queueMicrotask ( ( ) => {
// 1. Set fr’ s state to "done".
fr [ kState ] = 'done'
// 2. Let result be the result of package data given
// bytes, type, blob’ s type, and encodingName.
try {
const result = packageData ( bytes , type , blob . type , encodingName )
// 4. Else:
if ( fr [ kAborted ] ) {
return
}
// 1. Set fr’ s result to result.
fr [ kResult ] = result
// 2. Fire a progress event called load at the fr.
fireAProgressEvent ( 'load' , fr )
} catch ( error ) {
// 3. If package data threw an exception error:
// 1. Set fr’ s error to error.
fr [ kError ] = error
// 2. Fire a progress event called error at fr.
fireAProgressEvent ( 'error' , fr )
}
// 5. If fr’ s state is not "loading", fire a progress
// event called loadend at the fr.
if ( fr [ kState ] !== 'loading' ) {
fireAProgressEvent ( 'loadend' , fr )
}
} )
break
}
} catch ( error ) {
if ( fr [ kAborted ] ) {
return
}
// 6. Otherwise, if chunkPromise is rejected with an
// error error, queue a task to run the following
// steps and abort this algorithm:
queueMicrotask ( ( ) => {
// 1. Set fr’ s state to "done".
fr [ kState ] = 'done'
// 2. Set fr’ s error to error.
fr [ kError ] = error
// 3. Fire a progress event called error at fr.
fireAProgressEvent ( 'error' , fr )
// 4. If fr’ s state is not "loading", fire a progress
// event called loadend at fr.
if ( fr [ kState ] !== 'loading' ) {
fireAProgressEvent ( 'loadend' , fr )
}
} )
break
}
}
} ) ( )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#fire-a-progress-event
* @ see https : //dom.spec.whatwg.org/#concept-event-fire
* @ param { string } e The name of the event
* @ param { import ( './filereader' ) . FileReader } reader
* /
function fireAProgressEvent ( e , reader ) {
// The progress event e does not bubble. e.bubbles must be false
// The progress event e is NOT cancelable. e.cancelable must be false
const event = new ProgressEvent ( e , {
bubbles : false ,
cancelable : false
} )
reader . dispatchEvent ( event )
}
/ * *
* @ see https : //w3c.github.io/FileAPI/#blob-package-data
* @ param { Uint8Array [ ] } bytes
* @ param { string } type
* @ param { string ? } mimeType
* @ param { string ? } encodingName
* /
function packageData ( bytes , type , mimeType , encodingName ) {
// 1. A Blob has an associated package data algorithm, given
// bytes, a type, a optional mimeType, and a optional
// encodingName, which switches on type and runs the
// associated steps:
switch ( type ) {
case 'DataURL' : {
// 1. Return bytes as a DataURL [RFC2397] subject to
// the considerations below:
// * Use mimeType as part of the Data URL if it is
// available in keeping with the Data URL
// specification [RFC2397].
// * If mimeType is not available return a Data URL
// without a media-type. [RFC2397].
// https://datatracker.ietf.org/doc/html/rfc2397#section-3
// dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
// mediatype := [ type "/" subtype ] *( ";" parameter )
// data := *urlchar
// parameter := attribute "=" value
let dataURL = 'data:'
const parsed = parseMIMEType ( mimeType || 'application/octet-stream' )
if ( parsed !== 'failure' ) {
dataURL += serializeAMimeType ( parsed )
}
dataURL += ';base64,'
const decoder = new StringDecoder ( 'latin1' )
for ( const chunk of bytes ) {
dataURL += btoa ( decoder . write ( chunk ) )
}
dataURL += btoa ( decoder . end ( ) )
return dataURL
}
case 'Text' : {
// 1. Let encoding be failure
let encoding = 'failure'
// 2. If the encodingName is present, set encoding to the
// result of getting an encoding from encodingName.
if ( encodingName ) {
encoding = getEncoding ( encodingName )
}
// 3. If encoding is failure, and mimeType is present:
if ( encoding === 'failure' && mimeType ) {
// 1. Let type be the result of parse a MIME type
// given mimeType.
const type = parseMIMEType ( mimeType )
// 2. If type is not failure, set encoding to the result
// of getting an encoding from type’ s parameters["charset"].
if ( type !== 'failure' ) {
encoding = getEncoding ( type . parameters . get ( 'charset' ) )
}
}
// 4. If encoding is failure, then set encoding to UTF-8.
if ( encoding === 'failure' ) {
encoding = 'UTF-8'
}
// 5. Decode bytes using fallback encoding encoding, and
// return the result.
return decode ( bytes , encoding )
}
case 'ArrayBuffer' : {
// Return a new ArrayBuffer whose contents are bytes.
const sequence = combineByteSequences ( bytes )
return sequence . buffer
}
case 'BinaryString' : {
// Return bytes as a binary string, in which every byte
// is represented by a code unit of equal value [0..255].
let binaryString = ''
const decoder = new StringDecoder ( 'latin1' )
for ( const chunk of bytes ) {
binaryString += decoder . write ( chunk )
}
binaryString += decoder . end ( )
return binaryString
}
}
}
/ * *
* @ see https : //encoding.spec.whatwg.org/#decode
* @ param { Uint8Array [ ] } ioQueue
* @ param { string } encoding
* /
function decode ( ioQueue , encoding ) {
const bytes = combineByteSequences ( ioQueue )
// 1. Let BOMEncoding be the result of BOM sniffing ioQueue.
const BOMEncoding = BOMSniffing ( bytes )
let slice = 0
// 2. If BOMEncoding is non-null:
if ( BOMEncoding !== null ) {
// 1. Set encoding to BOMEncoding.
encoding = BOMEncoding
// 2. Read three bytes from ioQueue, if BOMEncoding is
// UTF-8; otherwise read two bytes.
// (Do nothing with those bytes.)
slice = BOMEncoding === 'UTF-8' ? 3 : 2
}
// 3. Process a queue with an instance of encoding’ s
// decoder, ioQueue, output, and "replacement".
// 4. Return output.
const sliced = bytes . slice ( slice )
return new TextDecoder ( encoding ) . decode ( sliced )
}
/ * *
* @ see https : //encoding.spec.whatwg.org/#bom-sniff
* @ param { Uint8Array } ioQueue
* /
function BOMSniffing ( ioQueue ) {
// 1. Let BOM be the result of peeking 3 bytes from ioQueue,
// converted to a byte sequence.
const [ a , b , c ] = ioQueue
// 2. For each of the rows in the table below, starting with
// the first one and going down, if BOM starts with the
// bytes given in the first column, then return the
// encoding given in the cell in the second column of that
// row. Otherwise, return null.
if ( a === 0xEF && b === 0xBB && c === 0xBF ) {
return 'UTF-8'
} else if ( a === 0xFE && b === 0xFF ) {
return 'UTF-16BE'
} else if ( a === 0xFF && b === 0xFE ) {
return 'UTF-16LE'
}
return null
}
/ * *
* @ param { Uint8Array [ ] } sequences
* /
function combineByteSequences ( sequences ) {
const size = sequences . reduce ( ( a , b ) => {
return a + b . byteLength
} , 0 )
let offset = 0
return sequences . reduce ( ( a , b ) => {
a . set ( b , offset )
offset += b . byteLength
return a
} , new Uint8Array ( size ) )
}
module . exports = {
staticPropertyDescriptors ,
readOperation ,
fireAProgressEvent
}
/***/ } ) ,
/***/ 1892 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
// We include a version number for the Dispatcher API. In case of breaking changes,
// this version number must be increased to avoid conflicts.
const globalDispatcher = Symbol . for ( 'undici.globalDispatcher.1' )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const Agent = _ _nccwpck _require _ _ ( 7890 )
if ( getGlobalDispatcher ( ) === undefined ) {
setGlobalDispatcher ( new Agent ( ) )
}
function setGlobalDispatcher ( agent ) {
if ( ! agent || typeof agent . dispatch !== 'function' ) {
throw new InvalidArgumentError ( 'Argument agent must implement Agent' )
}
Object . defineProperty ( globalThis , globalDispatcher , {
value : agent ,
writable : true ,
enumerable : false ,
configurable : false
} )
}
function getGlobalDispatcher ( ) {
return globalThis [ globalDispatcher ]
}
module . exports = {
setGlobalDispatcher ,
getGlobalDispatcher
}
/***/ } ) ,
/***/ 6930 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = class DecoratorHandler {
constructor ( handler ) {
this . handler = handler
}
onConnect ( ... args ) {
return this . handler . onConnect ( ... args )
}
onError ( ... args ) {
return this . handler . onError ( ... args )
}
onUpgrade ( ... args ) {
return this . handler . onUpgrade ( ... args )
}
onHeaders ( ... args ) {
return this . handler . onHeaders ( ... args )
}
onData ( ... args ) {
return this . handler . onData ( ... args )
}
onComplete ( ... args ) {
return this . handler . onComplete ( ... args )
}
onBodySent ( ... args ) {
return this . handler . onBodySent ( ... args )
}
}
/***/ } ) ,
/***/ 2860 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const util = _ _nccwpck _require _ _ ( 3983 )
const { kBodyUsed } = _ _nccwpck _require _ _ ( 2785 )
const assert = _ _nccwpck _require _ _ ( 9491 )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const EE = _ _nccwpck _require _ _ ( 2361 )
const redirectableStatusCodes = [ 300 , 301 , 302 , 303 , 307 , 308 ]
const kBody = Symbol ( 'body' )
class BodyAsyncIterable {
constructor ( body ) {
this [ kBody ] = body
this [ kBodyUsed ] = false
}
async * [ Symbol . asyncIterator ] ( ) {
assert ( ! this [ kBodyUsed ] , 'disturbed' )
this [ kBodyUsed ] = true
yield * this [ kBody ]
}
}
class RedirectHandler {
constructor ( dispatch , maxRedirections , opts , handler ) {
if ( maxRedirections != null && ( ! Number . isInteger ( maxRedirections ) || maxRedirections < 0 ) ) {
throw new InvalidArgumentError ( 'maxRedirections must be a positive number' )
}
util . validateHandler ( handler , opts . method , opts . upgrade )
this . dispatch = dispatch
this . location = null
this . abort = null
this . opts = { ... opts , maxRedirections : 0 } // opts must be a copy
this . maxRedirections = maxRedirections
this . handler = handler
this . history = [ ]
if ( util . isStream ( this . opts . body ) ) {
// TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
// so that it can be dispatched again?
// TODO (fix): Do we need 100-expect support to provide a way to do this properly?
if ( util . bodyLength ( this . opts . body ) === 0 ) {
this . opts . body
. on ( 'data' , function ( ) {
assert ( false )
} )
}
if ( typeof this . opts . body . readableDidRead !== 'boolean' ) {
this . opts . body [ kBodyUsed ] = false
EE . prototype . on . call ( this . opts . body , 'data' , function ( ) {
this [ kBodyUsed ] = true
} )
}
} else if ( this . opts . body && typeof this . opts . body . pipeTo === 'function' ) {
// TODO (fix): We can't access ReadableStream internal state
// to determine whether or not it has been disturbed. This is just
// a workaround.
this . opts . body = new BodyAsyncIterable ( this . opts . body )
} else if (
this . opts . body &&
typeof this . opts . body !== 'string' &&
! ArrayBuffer . isView ( this . opts . body ) &&
util . isIterable ( this . opts . body )
) {
// TODO: Should we allow re-using iterable if !this.opts.idempotent
// or through some other flag?
this . opts . body = new BodyAsyncIterable ( this . opts . body )
}
}
onConnect ( abort ) {
this . abort = abort
this . handler . onConnect ( abort , { history : this . history } )
}
onUpgrade ( statusCode , headers , socket ) {
this . handler . onUpgrade ( statusCode , headers , socket )
}
onError ( error ) {
this . handler . onError ( error )
}
onHeaders ( statusCode , headers , resume , statusText ) {
this . location = this . history . length >= this . maxRedirections || util . isDisturbed ( this . opts . body )
? null
: parseLocation ( statusCode , headers )
if ( this . opts . origin ) {
this . history . push ( new URL ( this . opts . path , this . opts . origin ) )
}
if ( ! this . location ) {
return this . handler . onHeaders ( statusCode , headers , resume , statusText )
}
const { origin , pathname , search } = util . parseURL ( new URL ( this . location , this . opts . origin && new URL ( this . opts . path , this . opts . origin ) ) )
const path = search ? ` ${ pathname } ${ search } ` : pathname
// Remove headers referring to the original URL.
// By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers.
// https://tools.ietf.org/html/rfc7231#section-6.4
this . opts . headers = cleanRequestHeaders ( this . opts . headers , statusCode === 303 , this . opts . origin !== origin )
this . opts . path = path
this . opts . origin = origin
this . opts . maxRedirections = 0
this . opts . query = null
// https://tools.ietf.org/html/rfc7231#section-6.4.4
// In case of HTTP 303, always replace method to be either HEAD or GET
if ( statusCode === 303 && this . opts . method !== 'HEAD' ) {
this . opts . method = 'GET'
this . opts . body = null
}
}
onData ( chunk ) {
if ( this . location ) {
/ *
https : //tools.ietf.org/html/rfc7231#section-6.4
TLDR : undici always ignores 3 xx response bodies .
Redirection is used to serve the requested resource from another URL , so it is assumes that
no body is generated ( and thus can be ignored ) . Even though generating a body is not prohibited .
For status 301 , 302 , 303 , 307 and 308 ( the latter from RFC 7238 ) , the specs mention that the body usually
( which means it ' s optional and not mandated ) contain just an hyperlink to the value of
the Location response header , so the body can be ignored safely .
For status 300 , which is "Multiple Choices" , the spec mentions both generating a Location
response header AND a response body with the other possible location to follow .
Since the spec explicitily chooses not to specify a format for such body and leave it to
servers and browsers implementors , we ignore the body as there is no specified way to eventually parse it .
* /
} else {
return this . handler . onData ( chunk )
}
}
onComplete ( trailers ) {
if ( this . location ) {
/ *
https : //tools.ietf.org/html/rfc7231#section-6.4
TLDR : undici always ignores 3 xx response trailers as they are not expected in case of redirections
and neither are useful if present .
See comment on onData method above for more detailed informations .
* /
this . location = null
this . abort = null
this . dispatch ( this . opts , this )
} else {
this . handler . onComplete ( trailers )
}
}
onBodySent ( chunk ) {
if ( this . handler . onBodySent ) {
this . handler . onBodySent ( chunk )
}
}
}
function parseLocation ( statusCode , headers ) {
if ( redirectableStatusCodes . indexOf ( statusCode ) === - 1 ) {
return null
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
for ( let i = 0 ; i < headers . length ; i += 2 ) {
if ( headers [ i ] . toString ( ) . toLowerCase ( ) === 'location' ) {
return headers [ i + 1 ]
}
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// https://tools.ietf.org/html/rfc7231#section-6.4.4
function shouldRemoveHeader ( header , removeContent , unknownOrigin ) {
if ( header . length === 4 ) {
return util . headerNameToString ( header ) === 'host'
}
if ( removeContent && util . headerNameToString ( header ) . startsWith ( 'content-' ) ) {
return true
}
if ( unknownOrigin && ( header . length === 13 || header . length === 6 || header . length === 19 ) ) {
const name = util . headerNameToString ( header )
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
}
return false
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// https://tools.ietf.org/html/rfc7231#section-6.4
function cleanRequestHeaders ( headers , removeContent , unknownOrigin ) {
const ret = [ ]
if ( Array . isArray ( headers ) ) {
for ( let i = 0 ; i < headers . length ; i += 2 ) {
if ( ! shouldRemoveHeader ( headers [ i ] , removeContent , unknownOrigin ) ) {
ret . push ( headers [ i ] , headers [ i + 1 ] )
}
}
} else if ( headers && typeof headers === 'object' ) {
for ( const key of Object . keys ( headers ) ) {
if ( ! shouldRemoveHeader ( key , removeContent , unknownOrigin ) ) {
ret . push ( key , headers [ key ] )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
} else {
assert ( headers == null , 'headers must be an object or an array' )
}
return ret
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
module . exports = RedirectHandler
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
/***/ 2286 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
const assert = _ _nccwpck _require _ _ ( 9491 )
const { kRetryHandlerDefaultRetry } = _ _nccwpck _require _ _ ( 2785 )
const { RequestRetryError } = _ _nccwpck _require _ _ ( 8045 )
const { isDisturbed , parseHeaders , parseRangeHeader } = _ _nccwpck _require _ _ ( 3983 )
function calculateRetryAfterHeader ( retryAfter ) {
const current = Date . now ( )
const diff = new Date ( retryAfter ) . getTime ( ) - current
return diff
}
class RetryHandler {
constructor ( opts , handlers ) {
const { retryOptions , ... dispatchOpts } = opts
const {
// Retry scoped
retry : retryFn ,
maxRetries ,
maxTimeout ,
minTimeout ,
timeoutFactor ,
// Response scoped
methods ,
errorCodes ,
retryAfter ,
statusCodes
} = retryOptions ? ? { }
this . dispatch = handlers . dispatch
this . handler = handlers . handler
this . opts = dispatchOpts
this . abort = null
this . aborted = false
this . retryOpts = {
retry : retryFn ? ? RetryHandler [ kRetryHandlerDefaultRetry ] ,
retryAfter : retryAfter ? ? true ,
maxTimeout : maxTimeout ? ? 30 * 1000 , // 30s,
timeout : minTimeout ? ? 500 , // .5s
timeoutFactor : timeoutFactor ? ? 2 ,
maxRetries : maxRetries ? ? 5 ,
// What errors we should retry
methods : methods ? ? [ 'GET' , 'HEAD' , 'OPTIONS' , 'PUT' , 'DELETE' , 'TRACE' ] ,
// Indicates which errors to retry
statusCodes : statusCodes ? ? [ 500 , 502 , 503 , 504 , 429 ] ,
// List of errors to retry
errorCodes : errorCodes ? ? [
'ECONNRESET' ,
'ECONNREFUSED' ,
'ENOTFOUND' ,
'ENETDOWN' ,
'ENETUNREACH' ,
'EHOSTDOWN' ,
'EHOSTUNREACH' ,
'EPIPE'
]
}
this . retryCount = 0
this . start = 0
this . end = null
this . etag = null
this . resume = null
// Handle possible onConnect duplication
this . handler . onConnect ( reason => {
this . aborted = true
if ( this . abort ) {
this . abort ( reason )
} else {
this . reason = reason
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
} )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
onRequestSent ( ) {
if ( this . handler . onRequestSent ) {
this . handler . onRequestSent ( )
2023-04-12 19:55:27 +08:00
}
}
2024-04-24 12:04:10 -04:00
onUpgrade ( statusCode , headers , socket ) {
if ( this . handler . onUpgrade ) {
this . handler . onUpgrade ( statusCode , headers , socket )
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
onConnect ( abort ) {
if ( this . aborted ) {
abort ( this . reason )
} else {
this . abort = abort
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
onBodySent ( chunk ) {
if ( this . handler . onBodySent ) return this . handler . onBodySent ( chunk )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
static [ kRetryHandlerDefaultRetry ] ( err , { state , opts } , cb ) {
const { statusCode , code , headers } = err
const { method , retryOptions } = opts
const {
maxRetries ,
timeout ,
maxTimeout ,
timeoutFactor ,
statusCodes ,
errorCodes ,
methods
} = retryOptions
let { counter , currentTimeout } = state
currentTimeout =
currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout
// Any code that is not a Undici's originated and allowed to retry
if (
code &&
code !== 'UND_ERR_REQ_RETRY' &&
code !== 'UND_ERR_SOCKET' &&
! errorCodes . includes ( code )
) {
cb ( err )
return
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// If a set of method are provided and the current method is not in the list
if ( Array . isArray ( methods ) && ! methods . includes ( method ) ) {
cb ( err )
return
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// If a set of status code are provided and the current status code is not in the list
if (
statusCode != null &&
Array . isArray ( statusCodes ) &&
! statusCodes . includes ( statusCode )
) {
cb ( err )
return
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// If we reached the max number of retries
if ( counter > maxRetries ) {
cb ( err )
return
}
let retryAfterHeader = headers != null && headers [ 'retry-after' ]
if ( retryAfterHeader ) {
retryAfterHeader = Number ( retryAfterHeader )
retryAfterHeader = isNaN ( retryAfterHeader )
? calculateRetryAfterHeader ( retryAfterHeader )
: retryAfterHeader * 1e3 // Retry-After is in seconds
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const retryTimeout =
retryAfterHeader > 0
? Math . min ( retryAfterHeader , maxTimeout )
: Math . min ( currentTimeout * timeoutFactor * * counter , maxTimeout )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
state . currentTimeout = retryTimeout
setTimeout ( ( ) => cb ( null ) , retryTimeout )
}
onHeaders ( statusCode , rawHeaders , resume , statusMessage ) {
const headers = parseHeaders ( rawHeaders )
this . retryCount += 1
if ( statusCode >= 300 ) {
this . abort (
new RequestRetryError ( 'Request failed' , statusCode , {
headers ,
count : this . retryCount
} )
)
return false
}
// Checkpoint for resume from where we left it
if ( this . resume != null ) {
this . resume = null
if ( statusCode !== 206 ) {
return true
}
const contentRange = parseRangeHeader ( headers [ 'content-range' ] )
// If no content range
if ( ! contentRange ) {
this . abort (
new RequestRetryError ( 'Content-Range mismatch' , statusCode , {
headers ,
count : this . retryCount
} )
)
return false
}
// Let's start with a weak etag check
if ( this . etag != null && this . etag !== headers . etag ) {
this . abort (
new RequestRetryError ( 'ETag mismatch' , statusCode , {
headers ,
count : this . retryCount
} )
)
return false
}
const { start , size , end = size } = contentRange
assert ( this . start === start , 'content-range mismatch' )
assert ( this . end == null || this . end === end , 'content-range mismatch' )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this . resume = resume
return true
}
if ( this . end == null ) {
if ( statusCode === 206 ) {
// First time we receive 206
const range = parseRangeHeader ( headers [ 'content-range' ] )
if ( range == null ) {
return this . handler . onHeaders (
statusCode ,
rawHeaders ,
resume ,
statusMessage
)
2020-05-07 12:11:11 -04:00
}
2024-04-24 12:04:10 -04:00
const { start , size , end = size } = range
assert (
start != null && Number . isFinite ( start ) && this . start !== start ,
'content-range mismatch'
)
assert ( Number . isFinite ( start ) )
assert (
end != null && Number . isFinite ( end ) && this . end !== end ,
'invalid content-length'
)
this . start = start
this . end = end
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// We make our best to checkpoint the body for further range headers
if ( this . end == null ) {
const contentLength = headers [ 'content-length' ]
this . end = contentLength != null ? Number ( contentLength ) : null
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
assert ( Number . isFinite ( this . start ) )
assert (
this . end == null || Number . isFinite ( this . end ) ,
'invalid content-length'
)
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this . resume = resume
this . etag = headers . etag != null ? headers . etag : null
return this . handler . onHeaders (
statusCode ,
rawHeaders ,
resume ,
statusMessage
)
}
const err = new RequestRetryError ( 'Request failed' , statusCode , {
headers ,
count : this . retryCount
} )
this . abort ( err )
return false
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
onData ( chunk ) {
this . start += chunk . length
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return this . handler . onData ( chunk )
}
onComplete ( rawTrailers ) {
this . retryCount = 0
return this . handler . onComplete ( rawTrailers )
}
onError ( err ) {
if ( this . aborted || isDisturbed ( this . opts . body ) ) {
return this . handler . onError ( err )
}
this . retryOpts . retry (
err ,
{
state : { counter : this . retryCount ++ , currentTimeout : this . retryAfter } ,
opts : { retryOptions : this . retryOpts , ... this . opts }
} ,
onRetry . bind ( this )
)
function onRetry ( err ) {
if ( err != null || this . aborted || isDisturbed ( this . opts . body ) ) {
return this . handler . onError ( err )
}
if ( this . start !== 0 ) {
this . opts = {
... this . opts ,
headers : {
... this . opts . headers ,
range : ` bytes= ${ this . start } - ${ this . end ? ? '' } `
}
}
}
try {
this . dispatch ( this . opts , this )
} catch ( err ) {
this . handler . onError ( err )
}
}
}
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
module . exports = RetryHandler
/***/ } ) ,
/***/ 8861 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const RedirectHandler = _ _nccwpck _require _ _ ( 2860 )
function createRedirectInterceptor ( { maxRedirections : defaultMaxRedirections } ) {
return ( dispatch ) => {
return function Intercept ( opts , handler ) {
const { maxRedirections = defaultMaxRedirections } = opts
if ( ! maxRedirections ) {
return dispatch ( opts , handler )
}
const redirectHandler = new RedirectHandler ( dispatch , maxRedirections , opts , handler )
opts = { ... opts , maxRedirections : 0 } // Stop sub dispatcher from also redirecting.
return dispatch ( opts , redirectHandler )
}
}
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
module . exports = createRedirectInterceptor
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 953 :
2023-04-12 19:55:27 +08:00
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . SPECIAL _HEADERS = exports . HEADER _STATE = exports . MINOR = exports . MAJOR = exports . CONNECTION _TOKEN _CHARS = exports . HEADER _CHARS = exports . TOKEN = exports . STRICT _TOKEN = exports . HEX = exports . URL _CHAR = exports . STRICT _URL _CHAR = exports . USERINFO _CHARS = exports . MARK = exports . ALPHANUM = exports . NUM = exports . HEX _MAP = exports . NUM _MAP = exports . ALPHA = exports . FINISH = exports . H _METHOD _MAP = exports . METHOD _MAP = exports . METHODS _RTSP = exports . METHODS _ICE = exports . METHODS _HTTP = exports . METHODS = exports . LENIENT _FLAGS = exports . FLAGS = exports . TYPE = exports . ERROR = void 0 ;
const utils _1 = _ _nccwpck _require _ _ ( 1891 ) ;
// C headers
var ERROR ;
( function ( ERROR ) {
ERROR [ ERROR [ "OK" ] = 0 ] = "OK" ;
ERROR [ ERROR [ "INTERNAL" ] = 1 ] = "INTERNAL" ;
ERROR [ ERROR [ "STRICT" ] = 2 ] = "STRICT" ;
ERROR [ ERROR [ "LF_EXPECTED" ] = 3 ] = "LF_EXPECTED" ;
ERROR [ ERROR [ "UNEXPECTED_CONTENT_LENGTH" ] = 4 ] = "UNEXPECTED_CONTENT_LENGTH" ;
ERROR [ ERROR [ "CLOSED_CONNECTION" ] = 5 ] = "CLOSED_CONNECTION" ;
ERROR [ ERROR [ "INVALID_METHOD" ] = 6 ] = "INVALID_METHOD" ;
ERROR [ ERROR [ "INVALID_URL" ] = 7 ] = "INVALID_URL" ;
ERROR [ ERROR [ "INVALID_CONSTANT" ] = 8 ] = "INVALID_CONSTANT" ;
ERROR [ ERROR [ "INVALID_VERSION" ] = 9 ] = "INVALID_VERSION" ;
ERROR [ ERROR [ "INVALID_HEADER_TOKEN" ] = 10 ] = "INVALID_HEADER_TOKEN" ;
ERROR [ ERROR [ "INVALID_CONTENT_LENGTH" ] = 11 ] = "INVALID_CONTENT_LENGTH" ;
ERROR [ ERROR [ "INVALID_CHUNK_SIZE" ] = 12 ] = "INVALID_CHUNK_SIZE" ;
ERROR [ ERROR [ "INVALID_STATUS" ] = 13 ] = "INVALID_STATUS" ;
ERROR [ ERROR [ "INVALID_EOF_STATE" ] = 14 ] = "INVALID_EOF_STATE" ;
ERROR [ ERROR [ "INVALID_TRANSFER_ENCODING" ] = 15 ] = "INVALID_TRANSFER_ENCODING" ;
ERROR [ ERROR [ "CB_MESSAGE_BEGIN" ] = 16 ] = "CB_MESSAGE_BEGIN" ;
ERROR [ ERROR [ "CB_HEADERS_COMPLETE" ] = 17 ] = "CB_HEADERS_COMPLETE" ;
ERROR [ ERROR [ "CB_MESSAGE_COMPLETE" ] = 18 ] = "CB_MESSAGE_COMPLETE" ;
ERROR [ ERROR [ "CB_CHUNK_HEADER" ] = 19 ] = "CB_CHUNK_HEADER" ;
ERROR [ ERROR [ "CB_CHUNK_COMPLETE" ] = 20 ] = "CB_CHUNK_COMPLETE" ;
ERROR [ ERROR [ "PAUSED" ] = 21 ] = "PAUSED" ;
ERROR [ ERROR [ "PAUSED_UPGRADE" ] = 22 ] = "PAUSED_UPGRADE" ;
ERROR [ ERROR [ "PAUSED_H2_UPGRADE" ] = 23 ] = "PAUSED_H2_UPGRADE" ;
ERROR [ ERROR [ "USER" ] = 24 ] = "USER" ;
} ) ( ERROR = exports . ERROR || ( exports . ERROR = { } ) ) ;
var TYPE ;
( function ( TYPE ) {
TYPE [ TYPE [ "BOTH" ] = 0 ] = "BOTH" ;
TYPE [ TYPE [ "REQUEST" ] = 1 ] = "REQUEST" ;
TYPE [ TYPE [ "RESPONSE" ] = 2 ] = "RESPONSE" ;
} ) ( TYPE = exports . TYPE || ( exports . TYPE = { } ) ) ;
var FLAGS ;
( function ( FLAGS ) {
FLAGS [ FLAGS [ "CONNECTION_KEEP_ALIVE" ] = 1 ] = "CONNECTION_KEEP_ALIVE" ;
FLAGS [ FLAGS [ "CONNECTION_CLOSE" ] = 2 ] = "CONNECTION_CLOSE" ;
FLAGS [ FLAGS [ "CONNECTION_UPGRADE" ] = 4 ] = "CONNECTION_UPGRADE" ;
FLAGS [ FLAGS [ "CHUNKED" ] = 8 ] = "CHUNKED" ;
FLAGS [ FLAGS [ "UPGRADE" ] = 16 ] = "UPGRADE" ;
FLAGS [ FLAGS [ "CONTENT_LENGTH" ] = 32 ] = "CONTENT_LENGTH" ;
FLAGS [ FLAGS [ "SKIPBODY" ] = 64 ] = "SKIPBODY" ;
FLAGS [ FLAGS [ "TRAILING" ] = 128 ] = "TRAILING" ;
// 1 << 8 is unused
FLAGS [ FLAGS [ "TRANSFER_ENCODING" ] = 512 ] = "TRANSFER_ENCODING" ;
} ) ( FLAGS = exports . FLAGS || ( exports . FLAGS = { } ) ) ;
var LENIENT _FLAGS ;
( function ( LENIENT _FLAGS ) {
LENIENT _FLAGS [ LENIENT _FLAGS [ "HEADERS" ] = 1 ] = "HEADERS" ;
LENIENT _FLAGS [ LENIENT _FLAGS [ "CHUNKED_LENGTH" ] = 2 ] = "CHUNKED_LENGTH" ;
LENIENT _FLAGS [ LENIENT _FLAGS [ "KEEP_ALIVE" ] = 4 ] = "KEEP_ALIVE" ;
} ) ( LENIENT _FLAGS = exports . LENIENT _FLAGS || ( exports . LENIENT _FLAGS = { } ) ) ;
var METHODS ;
( function ( METHODS ) {
METHODS [ METHODS [ "DELETE" ] = 0 ] = "DELETE" ;
METHODS [ METHODS [ "GET" ] = 1 ] = "GET" ;
METHODS [ METHODS [ "HEAD" ] = 2 ] = "HEAD" ;
METHODS [ METHODS [ "POST" ] = 3 ] = "POST" ;
METHODS [ METHODS [ "PUT" ] = 4 ] = "PUT" ;
/* pathological */
METHODS [ METHODS [ "CONNECT" ] = 5 ] = "CONNECT" ;
METHODS [ METHODS [ "OPTIONS" ] = 6 ] = "OPTIONS" ;
METHODS [ METHODS [ "TRACE" ] = 7 ] = "TRACE" ;
/* WebDAV */
METHODS [ METHODS [ "COPY" ] = 8 ] = "COPY" ;
METHODS [ METHODS [ "LOCK" ] = 9 ] = "LOCK" ;
METHODS [ METHODS [ "MKCOL" ] = 10 ] = "MKCOL" ;
METHODS [ METHODS [ "MOVE" ] = 11 ] = "MOVE" ;
METHODS [ METHODS [ "PROPFIND" ] = 12 ] = "PROPFIND" ;
METHODS [ METHODS [ "PROPPATCH" ] = 13 ] = "PROPPATCH" ;
METHODS [ METHODS [ "SEARCH" ] = 14 ] = "SEARCH" ;
METHODS [ METHODS [ "UNLOCK" ] = 15 ] = "UNLOCK" ;
METHODS [ METHODS [ "BIND" ] = 16 ] = "BIND" ;
METHODS [ METHODS [ "REBIND" ] = 17 ] = "REBIND" ;
METHODS [ METHODS [ "UNBIND" ] = 18 ] = "UNBIND" ;
METHODS [ METHODS [ "ACL" ] = 19 ] = "ACL" ;
/* subversion */
METHODS [ METHODS [ "REPORT" ] = 20 ] = "REPORT" ;
METHODS [ METHODS [ "MKACTIVITY" ] = 21 ] = "MKACTIVITY" ;
METHODS [ METHODS [ "CHECKOUT" ] = 22 ] = "CHECKOUT" ;
METHODS [ METHODS [ "MERGE" ] = 23 ] = "MERGE" ;
/* upnp */
METHODS [ METHODS [ "M-SEARCH" ] = 24 ] = "M-SEARCH" ;
METHODS [ METHODS [ "NOTIFY" ] = 25 ] = "NOTIFY" ;
METHODS [ METHODS [ "SUBSCRIBE" ] = 26 ] = "SUBSCRIBE" ;
METHODS [ METHODS [ "UNSUBSCRIBE" ] = 27 ] = "UNSUBSCRIBE" ;
/* RFC-5789 */
METHODS [ METHODS [ "PATCH" ] = 28 ] = "PATCH" ;
METHODS [ METHODS [ "PURGE" ] = 29 ] = "PURGE" ;
/* CalDAV */
METHODS [ METHODS [ "MKCALENDAR" ] = 30 ] = "MKCALENDAR" ;
/* RFC-2068, section 19.6.1.2 */
METHODS [ METHODS [ "LINK" ] = 31 ] = "LINK" ;
METHODS [ METHODS [ "UNLINK" ] = 32 ] = "UNLINK" ;
/* icecast */
METHODS [ METHODS [ "SOURCE" ] = 33 ] = "SOURCE" ;
/* RFC-7540, section 11.6 */
METHODS [ METHODS [ "PRI" ] = 34 ] = "PRI" ;
/* RFC-2326 RTSP */
METHODS [ METHODS [ "DESCRIBE" ] = 35 ] = "DESCRIBE" ;
METHODS [ METHODS [ "ANNOUNCE" ] = 36 ] = "ANNOUNCE" ;
METHODS [ METHODS [ "SETUP" ] = 37 ] = "SETUP" ;
METHODS [ METHODS [ "PLAY" ] = 38 ] = "PLAY" ;
METHODS [ METHODS [ "PAUSE" ] = 39 ] = "PAUSE" ;
METHODS [ METHODS [ "TEARDOWN" ] = 40 ] = "TEARDOWN" ;
METHODS [ METHODS [ "GET_PARAMETER" ] = 41 ] = "GET_PARAMETER" ;
METHODS [ METHODS [ "SET_PARAMETER" ] = 42 ] = "SET_PARAMETER" ;
METHODS [ METHODS [ "REDIRECT" ] = 43 ] = "REDIRECT" ;
METHODS [ METHODS [ "RECORD" ] = 44 ] = "RECORD" ;
/* RAOP */
METHODS [ METHODS [ "FLUSH" ] = 45 ] = "FLUSH" ;
} ) ( METHODS = exports . METHODS || ( exports . METHODS = { } ) ) ;
exports . METHODS _HTTP = [
METHODS . DELETE ,
METHODS . GET ,
METHODS . HEAD ,
METHODS . POST ,
METHODS . PUT ,
METHODS . CONNECT ,
METHODS . OPTIONS ,
METHODS . TRACE ,
METHODS . COPY ,
METHODS . LOCK ,
METHODS . MKCOL ,
METHODS . MOVE ,
METHODS . PROPFIND ,
METHODS . PROPPATCH ,
METHODS . SEARCH ,
METHODS . UNLOCK ,
METHODS . BIND ,
METHODS . REBIND ,
METHODS . UNBIND ,
METHODS . ACL ,
METHODS . REPORT ,
METHODS . MKACTIVITY ,
METHODS . CHECKOUT ,
METHODS . MERGE ,
METHODS [ 'M-SEARCH' ] ,
METHODS . NOTIFY ,
METHODS . SUBSCRIBE ,
METHODS . UNSUBSCRIBE ,
METHODS . PATCH ,
METHODS . PURGE ,
METHODS . MKCALENDAR ,
METHODS . LINK ,
METHODS . UNLINK ,
METHODS . PRI ,
// TODO(indutny): should we allow it with HTTP?
METHODS . SOURCE ,
] ;
exports . METHODS _ICE = [
METHODS . SOURCE ,
] ;
exports . METHODS _RTSP = [
METHODS . OPTIONS ,
METHODS . DESCRIBE ,
METHODS . ANNOUNCE ,
METHODS . SETUP ,
METHODS . PLAY ,
METHODS . PAUSE ,
METHODS . TEARDOWN ,
METHODS . GET _PARAMETER ,
METHODS . SET _PARAMETER ,
METHODS . REDIRECT ,
METHODS . RECORD ,
METHODS . FLUSH ,
// For AirPlay
METHODS . GET ,
METHODS . POST ,
] ;
exports . METHOD _MAP = utils _1 . enumToMap ( METHODS ) ;
exports . H _METHOD _MAP = { } ;
Object . keys ( exports . METHOD _MAP ) . forEach ( ( key ) => {
if ( /^H/ . test ( key ) ) {
exports . H _METHOD _MAP [ key ] = exports . METHOD _MAP [ key ] ;
}
} ) ;
var FINISH ;
( function ( FINISH ) {
FINISH [ FINISH [ "SAFE" ] = 0 ] = "SAFE" ;
FINISH [ FINISH [ "SAFE_WITH_CB" ] = 1 ] = "SAFE_WITH_CB" ;
FINISH [ FINISH [ "UNSAFE" ] = 2 ] = "UNSAFE" ;
} ) ( FINISH = exports . FINISH || ( exports . FINISH = { } ) ) ;
exports . ALPHA = [ ] ;
for ( let i = 'A' . charCodeAt ( 0 ) ; i <= 'Z' . charCodeAt ( 0 ) ; i ++ ) {
// Upper case
exports . ALPHA . push ( String . fromCharCode ( i ) ) ;
// Lower case
exports . ALPHA . push ( String . fromCharCode ( i + 0x20 ) ) ;
}
exports . NUM _MAP = {
0 : 0 , 1 : 1 , 2 : 2 , 3 : 3 , 4 : 4 ,
5 : 5 , 6 : 6 , 7 : 7 , 8 : 8 , 9 : 9 ,
} ;
exports . HEX _MAP = {
0 : 0 , 1 : 1 , 2 : 2 , 3 : 3 , 4 : 4 ,
5 : 5 , 6 : 6 , 7 : 7 , 8 : 8 , 9 : 9 ,
A : 0XA , B : 0XB , C : 0XC , D : 0XD , E : 0XE , F : 0XF ,
a : 0xa , b : 0xb , c : 0xc , d : 0xd , e : 0xe , f : 0xf ,
} ;
exports . NUM = [
'0' , '1' , '2' , '3' , '4' , '5' , '6' , '7' , '8' , '9' ,
] ;
exports . ALPHANUM = exports . ALPHA . concat ( exports . NUM ) ;
exports . MARK = [ '-' , '_' , '.' , '!' , '~' , '*' , '\'' , '(' , ')' ] ;
exports . USERINFO _CHARS = exports . ALPHANUM
. concat ( exports . MARK )
. concat ( [ '%' , ';' , ':' , '&' , '=' , '+' , '$' , ',' ] ) ;
// TODO(indutny): use RFC
exports . STRICT _URL _CHAR = [
'!' , '"' , '$' , '%' , '&' , '\'' ,
'(' , ')' , '*' , '+' , ',' , '-' , '.' , '/' ,
':' , ';' , '<' , '=' , '>' ,
'@' , '[' , '\\' , ']' , '^' , '_' ,
'`' ,
'{' , '|' , '}' , '~' ,
] . concat ( exports . ALPHANUM ) ;
exports . URL _CHAR = exports . STRICT _URL _CHAR
. concat ( [ '\t' , '\f' ] ) ;
// All characters with 0x80 bit set to 1
for ( let i = 0x80 ; i <= 0xff ; i ++ ) {
exports . URL _CHAR . push ( i ) ;
}
exports . HEX = exports . NUM . concat ( [ 'a' , 'b' , 'c' , 'd' , 'e' , 'f' , 'A' , 'B' , 'C' , 'D' , 'E' , 'F' ] ) ;
/ * T o k e n s a s d e f i n e d b y r f c 2 6 1 6 . A l s o l o w e r c a s e s t h e m .
* token = 1 * < any CHAR except CTLs or separators >
* separators = "(" | ")" | "<" | ">" | "@"
* | "," | ";" | ":" | "\" | <" >
* | "/" | "[" | "]" | "?" | "="
* | "{" | "}" | SP | HT
* /
exports . STRICT _TOKEN = [
'!' , '#' , '$' , '%' , '&' , '\'' ,
'*' , '+' , '-' , '.' ,
'^' , '_' , '`' ,
'|' , '~' ,
] . concat ( exports . ALPHANUM ) ;
exports . TOKEN = exports . STRICT _TOKEN . concat ( [ ' ' ] ) ;
/ *
* Verify that a char is a valid visible ( printable ) US - ASCII
* character or % x80 - FF
* /
exports . HEADER _CHARS = [ '\t' ] ;
for ( let i = 32 ; i <= 255 ; i ++ ) {
if ( i !== 127 ) {
exports . HEADER _CHARS . push ( i ) ;
}
}
// ',' = \x44
exports . CONNECTION _TOKEN _CHARS = exports . HEADER _CHARS . filter ( ( c ) => c !== 44 ) ;
exports . MAJOR = exports . NUM _MAP ;
exports . MINOR = exports . MAJOR ;
var HEADER _STATE ;
( function ( HEADER _STATE ) {
HEADER _STATE [ HEADER _STATE [ "GENERAL" ] = 0 ] = "GENERAL" ;
HEADER _STATE [ HEADER _STATE [ "CONNECTION" ] = 1 ] = "CONNECTION" ;
HEADER _STATE [ HEADER _STATE [ "CONTENT_LENGTH" ] = 2 ] = "CONTENT_LENGTH" ;
HEADER _STATE [ HEADER _STATE [ "TRANSFER_ENCODING" ] = 3 ] = "TRANSFER_ENCODING" ;
HEADER _STATE [ HEADER _STATE [ "UPGRADE" ] = 4 ] = "UPGRADE" ;
HEADER _STATE [ HEADER _STATE [ "CONNECTION_KEEP_ALIVE" ] = 5 ] = "CONNECTION_KEEP_ALIVE" ;
HEADER _STATE [ HEADER _STATE [ "CONNECTION_CLOSE" ] = 6 ] = "CONNECTION_CLOSE" ;
HEADER _STATE [ HEADER _STATE [ "CONNECTION_UPGRADE" ] = 7 ] = "CONNECTION_UPGRADE" ;
HEADER _STATE [ HEADER _STATE [ "TRANSFER_ENCODING_CHUNKED" ] = 8 ] = "TRANSFER_ENCODING_CHUNKED" ;
} ) ( HEADER _STATE = exports . HEADER _STATE || ( exports . HEADER _STATE = { } ) ) ;
exports . SPECIAL _HEADERS = {
'connection' : HEADER _STATE . CONNECTION ,
'content-length' : HEADER _STATE . CONTENT _LENGTH ,
'proxy-connection' : HEADER _STATE . CONNECTION ,
'transfer-encoding' : HEADER _STATE . TRANSFER _ENCODING ,
'upgrade' : HEADER _STATE . UPGRADE ,
} ;
//# sourceMappingURL=constants.js.map
/***/ } ) ,
/***/ 1145 :
/***/ ( ( module ) => {
module . exports = ' AGFzbQEAAAABMAhgAX8Bf2ADf39 / AX9gBH9 / f38Bf2AAAGADf39 / AGABfwBgAn9 / AGAGf39 / f39 / AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0 + P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H + sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75 + AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja + AgAAPC0HqoYCAAA8LQbStgIAADwtB0q + AgAAPC0HfsoCAAA8LQdKygIAAD
/***/ } ) ,
/***/ 5627 :
/***/ ( ( module ) => {
module . exports = ' AGFzbQEAAAABMAhgAX8Bf2ADf39 / AX9gBH9 / f38Bf2AAAGADf39 / AGABfwBgAn9 / AGAGf39 / f39 / AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0 + P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H + sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75 + AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja + AgAAPC0HqoYCAAA8LQbStgIAADwtB0q + AgAAPC0HfsoCAAA8LQdKygIAAD
/***/ } ) ,
/***/ 1891 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
2023-04-12 19:55:27 +08:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2024-04-24 12:04:10 -04:00
exports . enumToMap = void 0 ;
function enumToMap ( obj ) {
const res = { } ;
Object . keys ( obj ) . forEach ( ( key ) => {
const value = obj [ key ] ;
if ( typeof value === 'number' ) {
res [ key ] = value ;
}
} ) ;
return res ;
}
exports . enumToMap = enumToMap ;
//# sourceMappingURL=utils.js.map
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ 6771 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const { kClients } = _ _nccwpck _require _ _ ( 2785 )
const Agent = _ _nccwpck _require _ _ ( 7890 )
const {
kAgent ,
kMockAgentSet ,
kMockAgentGet ,
kDispatches ,
kIsMockActive ,
kNetConnect ,
kGetNetConnect ,
kOptions ,
kFactory
} = _ _nccwpck _require _ _ ( 4347 )
const MockClient = _ _nccwpck _require _ _ ( 8687 )
const MockPool = _ _nccwpck _require _ _ ( 6193 )
const { matchValue , buildMockOptions } = _ _nccwpck _require _ _ ( 9323 )
const { InvalidArgumentError , UndiciError } = _ _nccwpck _require _ _ ( 8045 )
const Dispatcher = _ _nccwpck _require _ _ ( 412 )
const Pluralizer = _ _nccwpck _require _ _ ( 8891 )
const PendingInterceptorsFormatter = _ _nccwpck _require _ _ ( 6823 )
class FakeWeakRef {
constructor ( value ) {
this . value = value
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
deref ( ) {
return this . value
}
}
class MockAgent extends Dispatcher {
constructor ( opts ) {
super ( opts )
this [ kNetConnect ] = true
this [ kIsMockActive ] = true
// Instantiate Agent and encapsulate
if ( ( opts && opts . agent && typeof opts . agent . dispatch !== 'function' ) ) {
throw new InvalidArgumentError ( 'Argument opts.agent must implement Agent' )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
const agent = opts && opts . agent ? opts . agent : new Agent ( opts )
this [ kAgent ] = agent
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kClients ] = agent [ kClients ]
this [ kOptions ] = buildMockOptions ( opts )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get ( origin ) {
let dispatcher = this [ kMockAgentGet ] ( origin )
if ( ! dispatcher ) {
dispatcher = this [ kFactory ] ( origin )
this [ kMockAgentSet ] ( origin , dispatcher )
}
return dispatcher
}
dispatch ( opts , handler ) {
// Call MockAgent.get to perform additional setup before dispatching as normal
this . get ( opts . origin )
return this [ kAgent ] . dispatch ( opts , handler )
}
async close ( ) {
await this [ kAgent ] . close ( )
this [ kClients ] . clear ( )
}
deactivate ( ) {
this [ kIsMockActive ] = false
}
activate ( ) {
this [ kIsMockActive ] = true
}
enableNetConnect ( matcher ) {
if ( typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp ) {
if ( Array . isArray ( this [ kNetConnect ] ) ) {
this [ kNetConnect ] . push ( matcher )
} else {
this [ kNetConnect ] = [ matcher ]
}
} else if ( typeof matcher === 'undefined' ) {
this [ kNetConnect ] = true
} else {
throw new InvalidArgumentError ( 'Unsupported matcher. Must be one of String|Function|RegExp.' )
}
}
disableNetConnect ( ) {
this [ kNetConnect ] = false
}
// This is required to bypass issues caused by using global symbols - see:
// https://github.com/nodejs/undici/issues/1447
get isMockActive ( ) {
return this [ kIsMockActive ]
}
[ kMockAgentSet ] ( origin , dispatcher ) {
this [ kClients ] . set ( origin , new FakeWeakRef ( dispatcher ) )
}
[ kFactory ] ( origin ) {
const mockOptions = Object . assign ( { agent : this } , this [ kOptions ] )
return this [ kOptions ] && this [ kOptions ] . connections === 1
? new MockClient ( origin , mockOptions )
: new MockPool ( origin , mockOptions )
}
[ kMockAgentGet ] ( origin ) {
// First check if we can immediately find it
const ref = this [ kClients ] . get ( origin )
if ( ref ) {
return ref . deref ( )
}
// If the origin is not a string create a dummy parent pool and return to user
if ( typeof origin !== 'string' ) {
const dispatcher = this [ kFactory ] ( 'http://localhost:9999' )
this [ kMockAgentSet ] ( origin , dispatcher )
return dispatcher
}
// If we match, create a pool and assign the same dispatches
for ( const [ keyMatcher , nonExplicitRef ] of Array . from ( this [ kClients ] ) ) {
const nonExplicitDispatcher = nonExplicitRef . deref ( )
if ( nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue ( keyMatcher , origin ) ) {
const dispatcher = this [ kFactory ] ( origin )
this [ kMockAgentSet ] ( origin , dispatcher )
dispatcher [ kDispatches ] = nonExplicitDispatcher [ kDispatches ]
return dispatcher
}
}
}
[ kGetNetConnect ] ( ) {
return this [ kNetConnect ]
}
pendingInterceptors ( ) {
const mockAgentClients = this [ kClients ]
return Array . from ( mockAgentClients . entries ( ) )
. flatMap ( ( [ origin , scope ] ) => scope . deref ( ) [ kDispatches ] . map ( dispatch => ( { ... dispatch , origin } ) ) )
. filter ( ( { pending } ) => pending )
}
assertNoPendingInterceptors ( { pendingInterceptorsFormatter = new PendingInterceptorsFormatter ( ) } = { } ) {
const pending = this . pendingInterceptors ( )
if ( pending . length === 0 ) {
return
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
const pluralizer = new Pluralizer ( 'interceptor' , 'interceptors' ) . pluralize ( pending . length )
throw new UndiciError ( `
$ { pluralizer . count } $ { pluralizer . noun } $ { pluralizer . is } pending :
$ { pendingInterceptorsFormatter . format ( pending ) }
` .trim())
}
}
module . exports = MockAgent
/***/ } ) ,
/***/ 8687 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const { promisify } = _ _nccwpck _require _ _ ( 3837 )
const Client = _ _nccwpck _require _ _ ( 3598 )
const { buildMockDispatch } = _ _nccwpck _require _ _ ( 9323 )
const {
kDispatches ,
kMockAgent ,
kClose ,
kOriginalClose ,
kOrigin ,
kOriginalDispatch ,
kConnected
} = _ _nccwpck _require _ _ ( 4347 )
const { MockInterceptor } = _ _nccwpck _require _ _ ( 410 )
const Symbols = _ _nccwpck _require _ _ ( 2785 )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* MockClient provides an API that extends the Client to influence the mockDispatches .
* /
class MockClient extends Client {
constructor ( origin , opts ) {
super ( origin , opts )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( ! opts || ! opts . agent || typeof opts . agent . dispatch !== 'function' ) {
throw new InvalidArgumentError ( 'Argument opts.agent must implement Agent' )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
this [ kMockAgent ] = opts . agent
this [ kOrigin ] = origin
this [ kDispatches ] = [ ]
this [ kConnected ] = 1
this [ kOriginalDispatch ] = this . dispatch
this [ kOriginalClose ] = this . close . bind ( this )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this . dispatch = buildMockDispatch . call ( this )
this . close = this [ kClose ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get [ Symbols . kConnected ] ( ) {
return this [ kConnected ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* Sets up the base interceptor for mocking replies from undici .
* /
intercept ( opts ) {
return new MockInterceptor ( opts , this [ kDispatches ] )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
async [ kClose ] ( ) {
await promisify ( this [ kOriginalClose ] ) ( )
this [ kConnected ] = 0
this [ kMockAgent ] [ Symbols . kClients ] . delete ( this [ kOrigin ] )
}
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
module . exports = MockClient
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 888 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-04-12 19:55:27 +08:00
"use strict" ;
2024-04-24 12:04:10 -04:00
const { UndiciError } = _ _nccwpck _require _ _ ( 8045 )
class MockNotMatchedError extends UndiciError {
constructor ( message ) {
super ( message )
Error . captureStackTrace ( this , MockNotMatchedError )
this . name = 'MockNotMatchedError'
this . message = message || 'The request does not match any registered mock dispatches'
this . code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
module . exports = {
MockNotMatchedError
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ 410 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const { getResponseData , buildKey , addMockDispatch } = _ _nccwpck _require _ _ ( 9323 )
const {
kDispatches ,
kDispatchKey ,
kDefaultHeaders ,
kDefaultTrailers ,
kContentLength ,
kMockDispatch
} = _ _nccwpck _require _ _ ( 4347 )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
const { buildURL } = _ _nccwpck _require _ _ ( 3983 )
/ * *
* Defines the scope API for an interceptor reply
* /
class MockScope {
constructor ( mockDispatch ) {
this [ kMockDispatch ] = mockDispatch
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
/ * *
* Delay a reply by a set amount in ms .
* /
delay ( waitInMs ) {
if ( typeof waitInMs !== 'number' || ! Number . isInteger ( waitInMs ) || waitInMs <= 0 ) {
throw new InvalidArgumentError ( 'waitInMs must be a valid integer > 0' )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kMockDispatch ] . delay = waitInMs
return this
}
/ * *
* For a defined reply , never mark as consumed .
* /
persist ( ) {
this [ kMockDispatch ] . persist = true
return this
}
/ * *
* Allow one to define a reply for a set amount of matching requests .
* /
times ( repeatTimes ) {
if ( typeof repeatTimes !== 'number' || ! Number . isInteger ( repeatTimes ) || repeatTimes <= 0 ) {
throw new InvalidArgumentError ( 'repeatTimes must be a valid integer > 0' )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
this [ kMockDispatch ] . times = repeatTimes
return this
}
}
/ * *
* Defines an interceptor for a Mock
* /
class MockInterceptor {
constructor ( opts , mockDispatches ) {
if ( typeof opts !== 'object' ) {
throw new InvalidArgumentError ( 'opts must be an object' )
}
if ( typeof opts . path === 'undefined' ) {
throw new InvalidArgumentError ( 'opts.path must be defined' )
}
if ( typeof opts . method === 'undefined' ) {
opts . method = 'GET'
}
// See https://github.com/nodejs/undici/issues/1245
// As per RFC 3986, clients are not supposed to send URI
// fragments to servers when they retrieve a document,
if ( typeof opts . path === 'string' ) {
if ( opts . query ) {
opts . path = buildURL ( opts . path , opts . query )
} else {
// Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811
const parsedURL = new URL ( opts . path , 'data://' )
opts . path = parsedURL . pathname + parsedURL . search
}
}
if ( typeof opts . method === 'string' ) {
opts . method = opts . method . toUpperCase ( )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
this [ kDispatchKey ] = buildKey ( opts )
this [ kDispatches ] = mockDispatches
this [ kDefaultHeaders ] = { }
this [ kDefaultTrailers ] = { }
this [ kContentLength ] = false
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
createMockScopeDispatchData ( statusCode , data , responseOptions = { } ) {
const responseData = getResponseData ( data )
const contentLength = this [ kContentLength ] ? { 'content-length' : responseData . length } : { }
const headers = { ... this [ kDefaultHeaders ] , ... contentLength , ... responseOptions . headers }
const trailers = { ... this [ kDefaultTrailers ] , ... responseOptions . trailers }
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return { statusCode , data , headers , trailers }
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
validateReplyParameters ( statusCode , data , responseOptions ) {
if ( typeof statusCode === 'undefined' ) {
throw new InvalidArgumentError ( 'statusCode must be defined' )
}
if ( typeof data === 'undefined' ) {
throw new InvalidArgumentError ( 'data must be defined' )
}
if ( typeof responseOptions !== 'object' ) {
throw new InvalidArgumentError ( 'responseOptions must be an object' )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* Mock an undici request with a defined reply .
* /
reply ( replyData ) {
// Values of reply aren't available right now as they
// can only be available when the reply callback is invoked.
if ( typeof replyData === 'function' ) {
// We'll first wrap the provided callback in another function,
// this function will properly resolve the data from the callback
// when invoked.
const wrappedDefaultsCallback = ( opts ) => {
// Our reply options callback contains the parameter for statusCode, data and options.
const resolvedData = replyData ( opts )
// Check if it is in the right format
if ( typeof resolvedData !== 'object' ) {
throw new InvalidArgumentError ( 'reply options callback must return an object' )
}
const { statusCode , data = '' , responseOptions = { } } = resolvedData
this . validateReplyParameters ( statusCode , data , responseOptions )
// Since the values can be obtained immediately we return them
// from this higher order function that will be resolved later.
return {
... this . createMockScopeDispatchData ( statusCode , data , responseOptions )
}
}
// Add usual dispatch data, but this time set the data parameter to function that will eventually provide data.
const newMockDispatch = addMockDispatch ( this [ kDispatches ] , this [ kDispatchKey ] , wrappedDefaultsCallback )
return new MockScope ( newMockDispatch )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// We can have either one or three parameters, if we get here,
// we should have 1-3 parameters. So we spread the arguments of
// this function to obtain the parameters, since replyData will always
// just be the statusCode.
const [ statusCode , data = '' , responseOptions = { } ] = [ ... arguments ]
this . validateReplyParameters ( statusCode , data , responseOptions )
// Send in-already provided data like usual
const dispatchData = this . createMockScopeDispatchData ( statusCode , data , responseOptions )
const newMockDispatch = addMockDispatch ( this [ kDispatches ] , this [ kDispatchKey ] , dispatchData )
return new MockScope ( newMockDispatch )
}
/ * *
* Mock an undici request with a defined error .
* /
replyWithError ( error ) {
if ( typeof error === 'undefined' ) {
throw new InvalidArgumentError ( 'error must be defined' )
2020-05-07 12:11:11 -04:00
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const newMockDispatch = addMockDispatch ( this [ kDispatches ] , this [ kDispatchKey ] , { error } )
return new MockScope ( newMockDispatch )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* Set default reply headers on the interceptor for subsequent replies
* /
defaultReplyHeaders ( headers ) {
if ( typeof headers === 'undefined' ) {
throw new InvalidArgumentError ( 'headers must be defined' )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kDefaultHeaders ] = headers
return this
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
/ * *
* Set default reply trailers on the interceptor for subsequent replies
* /
defaultReplyTrailers ( trailers ) {
if ( typeof trailers === 'undefined' ) {
throw new InvalidArgumentError ( 'trailers must be defined' )
}
this [ kDefaultTrailers ] = trailers
return this
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
/ * *
* Set reply content length header for replies on the interceptor
* /
replyContentLength ( ) {
this [ kContentLength ] = true
return this
}
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
module . exports . MockInterceptor = MockInterceptor
module . exports . MockScope = MockScope
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
/***/ 6193 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const { promisify } = _ _nccwpck _require _ _ ( 3837 )
const Pool = _ _nccwpck _require _ _ ( 4634 )
const { buildMockDispatch } = _ _nccwpck _require _ _ ( 9323 )
const {
kDispatches ,
kMockAgent ,
kClose ,
kOriginalClose ,
kOrigin ,
kOriginalDispatch ,
kConnected
} = _ _nccwpck _require _ _ ( 4347 )
const { MockInterceptor } = _ _nccwpck _require _ _ ( 410 )
const Symbols = _ _nccwpck _require _ _ ( 2785 )
const { InvalidArgumentError } = _ _nccwpck _require _ _ ( 8045 )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* MockPool provides an API that extends the Pool to influence the mockDispatches .
* /
class MockPool extends Pool {
constructor ( origin , opts ) {
super ( origin , opts )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( ! opts || ! opts . agent || typeof opts . agent . dispatch !== 'function' ) {
throw new InvalidArgumentError ( 'Argument opts.agent must implement Agent' )
2020-05-07 12:11:11 -04:00
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kMockAgent ] = opts . agent
this [ kOrigin ] = origin
this [ kDispatches ] = [ ]
this [ kConnected ] = 1
this [ kOriginalDispatch ] = this . dispatch
this [ kOriginalClose ] = this . close . bind ( this )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this . dispatch = buildMockDispatch . call ( this )
this . close = this [ kClose ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get [ Symbols . kConnected ] ( ) {
return this [ kConnected ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* Sets up the base interceptor for mocking replies from undici .
* /
intercept ( opts ) {
return new MockInterceptor ( opts , this [ kDispatches ] )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
async [ kClose ] ( ) {
await promisify ( this [ kOriginalClose ] ) ( )
this [ kConnected ] = 0
this [ kMockAgent ] [ Symbols . kClients ] . delete ( this [ kOrigin ] )
}
}
module . exports = MockPool
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 4347 :
/***/ ( ( module ) => {
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
module . exports = {
kAgent : Symbol ( 'agent' ) ,
kOptions : Symbol ( 'options' ) ,
kFactory : Symbol ( 'factory' ) ,
kDispatches : Symbol ( 'dispatches' ) ,
kDispatchKey : Symbol ( 'dispatch key' ) ,
kDefaultHeaders : Symbol ( 'default headers' ) ,
kDefaultTrailers : Symbol ( 'default trailers' ) ,
kContentLength : Symbol ( 'content length' ) ,
kMockAgent : Symbol ( 'mock agent' ) ,
kMockAgentSet : Symbol ( 'mock agent set' ) ,
kMockAgentGet : Symbol ( 'mock agent get' ) ,
kMockDispatch : Symbol ( 'mock dispatch' ) ,
kClose : Symbol ( 'close' ) ,
kOriginalClose : Symbol ( 'original agent close' ) ,
kOrigin : Symbol ( 'origin' ) ,
kIsMockActive : Symbol ( 'is mock active' ) ,
kNetConnect : Symbol ( 'net connect' ) ,
kGetNetConnect : Symbol ( 'get net connect' ) ,
kConnected : Symbol ( 'connected' )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ 9323 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const { MockNotMatchedError } = _ _nccwpck _require _ _ ( 888 )
const {
kDispatches ,
kMockAgent ,
kOriginalDispatch ,
kOrigin ,
kGetNetConnect
} = _ _nccwpck _require _ _ ( 4347 )
const { buildURL , nop } = _ _nccwpck _require _ _ ( 3983 )
const { STATUS _CODES } = _ _nccwpck _require _ _ ( 3685 )
const {
types : {
isPromise
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
} = _ _nccwpck _require _ _ ( 3837 )
function matchValue ( match , value ) {
if ( typeof match === 'string' ) {
return match === value
}
if ( match instanceof RegExp ) {
return match . test ( value )
}
if ( typeof match === 'function' ) {
return match ( value ) === true
}
return false
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
function lowerCaseEntries ( headers ) {
return Object . fromEntries (
Object . entries ( headers ) . map ( ( [ headerName , headerValue ] ) => {
return [ headerName . toLocaleLowerCase ( ) , headerValue ]
} )
)
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* @ param { import ( '../../index' ) . Headers | string [ ] | Record < string , string > } headers
* @ param { string } key
* /
function getHeaderByName ( headers , key ) {
if ( Array . isArray ( headers ) ) {
for ( let i = 0 ; i < headers . length ; i += 2 ) {
if ( headers [ i ] . toLocaleLowerCase ( ) === key . toLocaleLowerCase ( ) ) {
return headers [ i + 1 ]
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return undefined
} else if ( typeof headers . get === 'function' ) {
return headers . get ( key )
} else {
return lowerCaseEntries ( headers ) [ key . toLocaleLowerCase ( ) ]
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/** @param {string[]} headers */
function buildHeadersFromArray ( headers ) { // fetch HeadersList
const clone = headers . slice ( )
const entries = [ ]
for ( let index = 0 ; index < clone . length ; index += 2 ) {
entries . push ( [ clone [ index ] , clone [ index + 1 ] ] )
}
return Object . fromEntries ( entries )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function matchHeaders ( mockDispatch , headers ) {
if ( typeof mockDispatch . headers === 'function' ) {
if ( Array . isArray ( headers ) ) { // fetch HeadersList
headers = buildHeadersFromArray ( headers )
}
return mockDispatch . headers ( headers ? lowerCaseEntries ( headers ) : { } )
}
if ( typeof mockDispatch . headers === 'undefined' ) {
return true
}
if ( typeof headers !== 'object' || typeof mockDispatch . headers !== 'object' ) {
return false
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
for ( const [ matchHeaderName , matchHeaderValue ] of Object . entries ( mockDispatch . headers ) ) {
const headerValue = getHeaderByName ( headers , matchHeaderName )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( ! matchValue ( matchHeaderValue , headerValue ) ) {
return false
}
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
return true
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function safeUrl ( path ) {
if ( typeof path !== 'string' ) {
return path
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
const pathSegments = path . split ( '?' )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( pathSegments . length !== 2 ) {
return path
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
const qp = new URLSearchParams ( pathSegments . pop ( ) )
qp . sort ( )
return [ ... pathSegments , qp . toString ( ) ] . join ( '?' )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
function matchKey ( mockDispatch , { path , method , body , headers } ) {
const pathMatch = matchValue ( mockDispatch . path , path )
const methodMatch = matchValue ( mockDispatch . method , method )
const bodyMatch = typeof mockDispatch . body !== 'undefined' ? matchValue ( mockDispatch . body , body ) : true
const headersMatch = matchHeaders ( mockDispatch , headers )
return pathMatch && methodMatch && bodyMatch && headersMatch
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function getResponseData ( data ) {
if ( Buffer . isBuffer ( data ) ) {
return data
} else if ( typeof data === 'object' ) {
return JSON . stringify ( data )
} else {
return data . toString ( )
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function getMockDispatch ( mockDispatches , key ) {
const basePath = key . query ? buildURL ( key . path , key . query ) : key . path
const resolvedPath = typeof basePath === 'string' ? safeUrl ( basePath ) : basePath
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// Match path
let matchedMockDispatches = mockDispatches . filter ( ( { consumed } ) => ! consumed ) . filter ( ( { path } ) => matchValue ( safeUrl ( path ) , resolvedPath ) )
if ( matchedMockDispatches . length === 0 ) {
throw new MockNotMatchedError ( ` Mock dispatch not matched for path ' ${ resolvedPath } ' ` )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// Match method
matchedMockDispatches = matchedMockDispatches . filter ( ( { method } ) => matchValue ( method , key . method ) )
if ( matchedMockDispatches . length === 0 ) {
throw new MockNotMatchedError ( ` Mock dispatch not matched for method ' ${ key . method } ' ` )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// Match body
matchedMockDispatches = matchedMockDispatches . filter ( ( { body } ) => typeof body !== 'undefined' ? matchValue ( body , key . body ) : true )
if ( matchedMockDispatches . length === 0 ) {
throw new MockNotMatchedError ( ` Mock dispatch not matched for body ' ${ key . body } ' ` )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// Match headers
matchedMockDispatches = matchedMockDispatches . filter ( ( mockDispatch ) => matchHeaders ( mockDispatch , key . headers ) )
if ( matchedMockDispatches . length === 0 ) {
throw new MockNotMatchedError ( ` Mock dispatch not matched for headers ' ${ typeof key . headers === 'object' ? JSON . stringify ( key . headers ) : key . headers } ' ` )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
return matchedMockDispatches [ 0 ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function addMockDispatch ( mockDispatches , key , data ) {
const baseData = { timesInvoked : 0 , times : 1 , persist : false , consumed : false }
const replyData = typeof data === 'function' ? { callback : data } : { ... data }
const newMockDispatch = { ... baseData , ... key , pending : true , data : { error : null , ... replyData } }
mockDispatches . push ( newMockDispatch )
return newMockDispatch
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
function deleteMockDispatch ( mockDispatches , key ) {
const index = mockDispatches . findIndex ( dispatch => {
if ( ! dispatch . consumed ) {
return false
}
return matchKey ( dispatch , key )
} )
if ( index !== - 1 ) {
mockDispatches . splice ( index , 1 )
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function buildKey ( opts ) {
const { path , method , body , headers , query } = opts
return {
path ,
method ,
body ,
headers ,
query
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function generateKeyValues ( data ) {
return Object . entries ( data ) . reduce ( ( keyValuePairs , [ key , value ] ) => [
... keyValuePairs ,
Buffer . from ( ` ${ key } ` ) ,
Array . isArray ( value ) ? value . map ( x => Buffer . from ( ` ${ x } ` ) ) : Buffer . from ( ` ${ value } ` )
] , [ ] )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //developer.mozilla.org/en-US/docs/Web/HTTP/Status
* @ param { number } statusCode
* /
function getStatusText ( statusCode ) {
return STATUS _CODES [ statusCode ] || 'unknown'
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
async function getResponse ( body ) {
const buffers = [ ]
for await ( const data of body ) {
buffers . push ( data )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
return Buffer . concat ( buffers ) . toString ( 'utf8' )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* Mock dispatch function used to simulate undici dispatches
* /
function mockDispatch ( opts , handler ) {
// Get mock dispatch from built key
const key = buildKey ( opts )
const mockDispatch = getMockDispatch ( this [ kDispatches ] , key )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
mockDispatch . timesInvoked ++
// Here's where we resolve a callback if a callback is present for the dispatch data.
if ( mockDispatch . data . callback ) {
mockDispatch . data = { ... mockDispatch . data , ... mockDispatch . data . callback ( opts ) }
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
// Parse mockDispatch data
const { data : { statusCode , data , headers , trailers , error } , delay , persist } = mockDispatch
const { timesInvoked , times } = mockDispatch
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// If it's used up and not persistent, mark as consumed
mockDispatch . consumed = ! persist && timesInvoked >= times
mockDispatch . pending = timesInvoked < times
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// If specified, trigger dispatch error
if ( error !== null ) {
deleteMockDispatch ( this [ kDispatches ] , key )
handler . onError ( error )
return true
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// Handle the request with a delay if necessary
if ( typeof delay === 'number' && delay > 0 ) {
setTimeout ( ( ) => {
handleReply ( this [ kDispatches ] )
} , delay )
} else {
handleReply ( this [ kDispatches ] )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function handleReply ( mockDispatches , _data = data ) {
// fetch's HeadersList is a 1D string array
const optsHeaders = Array . isArray ( opts . headers )
? buildHeadersFromArray ( opts . headers )
: opts . headers
const body = typeof _data === 'function'
? _data ( { ... opts , headers : optsHeaders } )
: _data
// util.types.isPromise is likely needed for jest.
if ( isPromise ( body ) ) {
// If handleReply is asynchronous, throwing an error
// in the callback will reject the promise, rather than
// synchronously throw the error, which breaks some tests.
// Rather, we wait for the callback to resolve if it is a
// promise, and then re-run handleReply with the new body.
body . then ( ( newData ) => handleReply ( mockDispatches , newData ) )
return
}
const responseData = getResponseData ( body )
const responseHeaders = generateKeyValues ( headers )
const responseTrailers = generateKeyValues ( trailers )
handler . abort = nop
handler . onHeaders ( statusCode , responseHeaders , resume , getStatusText ( statusCode ) )
handler . onData ( Buffer . from ( responseData ) )
handler . onComplete ( responseTrailers )
deleteMockDispatch ( mockDispatches , key )
}
function resume ( ) { }
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return true
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function buildMockDispatch ( ) {
const agent = this [ kMockAgent ]
const origin = this [ kOrigin ]
const originalDispatch = this [ kOriginalDispatch ]
return function dispatch ( opts , handler ) {
if ( agent . isMockActive ) {
try {
mockDispatch . call ( this , opts , handler )
} catch ( error ) {
if ( error instanceof MockNotMatchedError ) {
const netConnect = agent [ kGetNetConnect ] ( )
if ( netConnect === false ) {
throw new MockNotMatchedError ( ` ${ error . message } : subsequent request to origin ${ origin } was not allowed (net.connect disabled) ` )
}
if ( checkNetConnect ( netConnect , origin ) ) {
originalDispatch . call ( this , opts , handler )
} else {
throw new MockNotMatchedError ( ` ${ error . message } : subsequent request to origin ${ origin } was not allowed (net.connect is not enabled for this origin) ` )
}
} else {
throw error
}
}
} else {
originalDispatch . call ( this , opts , handler )
}
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function checkNetConnect ( netConnect , origin ) {
const url = new URL ( origin )
if ( netConnect === true ) {
return true
} else if ( Array . isArray ( netConnect ) && netConnect . some ( ( matcher ) => matchValue ( matcher , url . host ) ) ) {
return true
}
return false
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function buildMockOptions ( opts ) {
if ( opts ) {
const { agent , ... mockOptions } = opts
return mockOptions
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
module . exports = {
getResponseData ,
getMockDispatch ,
addMockDispatch ,
deleteMockDispatch ,
buildKey ,
generateKeyValues ,
matchValue ,
getResponse ,
getStatusText ,
mockDispatch ,
buildMockDispatch ,
checkNetConnect ,
buildMockOptions ,
getHeaderByName
}
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 6823 :
2023-04-12 19:55:27 +08:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
2024-04-24 12:04:10 -04:00
const { Transform } = _ _nccwpck _require _ _ ( 2781 )
const { Console } = _ _nccwpck _require _ _ ( 6206 )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/ * *
* Gets the output of ` console.table(…) ` as a string .
* /
module . exports = class PendingInterceptorsFormatter {
constructor ( { disableColors } = { } ) {
this . transform = new Transform ( {
transform ( chunk , _enc , cb ) {
cb ( null , chunk )
}
} )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this . logger = new Console ( {
stdout : this . transform ,
inspectOptions : {
colors : ! disableColors && ! process . env . CI
}
} )
}
format ( pendingInterceptors ) {
const withPrettyHeaders = pendingInterceptors . map (
( { method , path , data : { statusCode } , persist , times , timesInvoked , origin } ) => ( {
Method : method ,
Origin : origin ,
Path : path ,
'Status code' : statusCode ,
Persistent : persist ? '✅' : '❌' ,
Invocations : timesInvoked ,
Remaining : persist ? Infinity : times - timesInvoked
} ) )
this . logger . table ( withPrettyHeaders )
return this . transform . read ( ) . toString ( )
}
2023-04-12 19:55:27 +08:00
}
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 8891 :
/***/ ( ( module ) => {
2023-04-12 19:55:27 +08:00
"use strict" ;
2024-04-24 12:04:10 -04:00
const singulars = {
pronoun : 'it' ,
is : 'is' ,
was : 'was' ,
this : 'this'
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const plurals = {
pronoun : 'they' ,
is : 'are' ,
was : 'were' ,
this : 'these'
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
module . exports = class Pluralizer {
constructor ( singular , plural ) {
this . singular = singular
this . plural = plural
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
pluralize ( count ) {
const one = count === 1
const keys = one ? singulars : plurals
const noun = one ? this . singular : this . plural
return { ... keys , count , noun }
}
2023-04-12 19:55:27 +08:00
}
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 8266 :
2023-04-12 19:55:27 +08:00
/***/ ( ( module ) => {
"use strict" ;
2024-04-24 12:04:10 -04:00
/* eslint-disable */
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// Extracted from node/lib/internal/fixed_queue.js
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.
const kSize = 2048 ;
const kMask = kSize - 1 ;
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// The FixedQueue is implemented as a singly-linked list of fixed-size
// circular buffers. It looks something like this:
//
// head tail
// | |
// v v
// +-----------+ <-----\ +-----------+ <------\ +-----------+
// | [null] | \----- | next | \------- | next |
// +-----------+ +-----------+ +-----------+
// | item | <-- bottom | item | <-- bottom | [empty] |
// | item | | item | | [empty] |
// | item | | item | | [empty] |
// | item | | item | | [empty] |
// | item | | item | bottom --> | item |
// | item | | item | | item |
// | ... | | ... | | ... |
// | item | | item | | item |
// | item | | item | | item |
// | [empty] | <-- top | item | | item |
// | [empty] | | item | | item |
// | [empty] | | [empty] | <-- top top --> | [empty] |
// +-----------+ +-----------+ +-----------+
//
// Or, if there is only one circular buffer, it looks something
// like either of these:
//
// head tail head tail
// | | | |
// v v v v
// +-----------+ +-----------+
// | [null] | | [null] |
// +-----------+ +-----------+
// | [empty] | | item |
// | [empty] | | item |
// | item | <-- bottom top --> | [empty] |
// | item | | [empty] |
// | [empty] | <-- top bottom --> | item |
// | [empty] | | item |
// +-----------+ +-----------+
//
// Adding a value means moving `top` forward by one, removing means
// moving `bottom` forward by one. After reaching the end, the queue
// wraps around.
//
// When `top === bottom` the current queue is empty and when
// `top + 1 === bottom` it's full. This wastes a single space of storage
// but allows much quicker checks.
class FixedCircularBuffer {
constructor ( ) {
this . bottom = 0 ;
this . top = 0 ;
this . list = new Array ( kSize ) ;
this . next = null ;
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
isEmpty ( ) {
return this . top === this . bottom ;
}
isFull ( ) {
return ( ( this . top + 1 ) & kMask ) === this . bottom ;
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
push ( data ) {
this . list [ this . top ] = data ;
this . top = ( this . top + 1 ) & kMask ;
}
shift ( ) {
const nextItem = this . list [ this . bottom ] ;
if ( nextItem === undefined )
return null ;
this . list [ this . bottom ] = undefined ;
this . bottom = ( this . bottom + 1 ) & kMask ;
return nextItem ;
}
}
module . exports = class FixedQueue {
constructor ( ) {
this . head = this . tail = new FixedCircularBuffer ( ) ;
}
isEmpty ( ) {
return this . head . isEmpty ( ) ;
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
push ( data ) {
if ( this . head . isFull ( ) ) {
// Head is full: Creates a new queue, sets the old queue's `.next` to it,
// and sets it as the new main queue.
this . head = this . head . next = new FixedCircularBuffer ( ) ;
2020-05-07 12:11:11 -04:00
}
2024-04-24 12:04:10 -04:00
this . head . push ( data ) ;
}
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
shift ( ) {
const tail = this . tail ;
const next = tail . shift ( ) ;
if ( tail . isEmpty ( ) && tail . next !== null ) {
// If there is another queue, it forms the new tail.
this . tail = tail . next ;
}
return next ;
}
2023-04-12 19:55:27 +08:00
} ;
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 3198 :
2023-04-12 19:55:27 +08:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
2024-04-24 12:04:10 -04:00
const DispatcherBase = _ _nccwpck _require _ _ ( 4839 )
const FixedQueue = _ _nccwpck _require _ _ ( 8266 )
const { kConnected , kSize , kRunning , kPending , kQueued , kBusy , kFree , kUrl , kClose , kDestroy , kDispatch } = _ _nccwpck _require _ _ ( 2785 )
const PoolStats = _ _nccwpck _require _ _ ( 9689 )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const kClients = Symbol ( 'clients' )
const kNeedDrain = Symbol ( 'needDrain' )
const kQueue = Symbol ( 'queue' )
const kClosedResolve = Symbol ( 'closed resolve' )
const kOnDrain = Symbol ( 'onDrain' )
const kOnConnect = Symbol ( 'onConnect' )
const kOnDisconnect = Symbol ( 'onDisconnect' )
const kOnConnectionError = Symbol ( 'onConnectionError' )
const kGetDispatcher = Symbol ( 'get dispatcher' )
const kAddClient = Symbol ( 'add client' )
const kRemoveClient = Symbol ( 'remove client' )
const kStats = Symbol ( 'stats' )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
class PoolBase extends DispatcherBase {
constructor ( ) {
super ( )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kQueue ] = new FixedQueue ( )
this [ kClients ] = [ ]
this [ kQueued ] = 0
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const pool = this
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kOnDrain ] = function onDrain ( origin , targets ) {
const queue = pool [ kQueue ]
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
let needDrain = false
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
while ( ! needDrain ) {
const item = queue . shift ( )
if ( ! item ) {
break
}
pool [ kQueued ] --
needDrain = ! this . dispatch ( item . opts , item . handler )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kNeedDrain ] = needDrain
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( ! this [ kNeedDrain ] && pool [ kNeedDrain ] ) {
pool [ kNeedDrain ] = false
pool . emit ( 'drain' , origin , [ pool , ... targets ] )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( pool [ kClosedResolve ] && queue . isEmpty ( ) ) {
Promise
. all ( pool [ kClients ] . map ( c => c . close ( ) ) )
. then ( pool [ kClosedResolve ] )
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kOnConnect ] = ( origin , targets ) => {
pool . emit ( 'connect' , origin , [ pool , ... targets ] )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kOnDisconnect ] = ( origin , targets , err ) => {
pool . emit ( 'disconnect' , origin , [ pool , ... targets ] , err )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kOnConnectionError ] = ( origin , targets , err ) => {
pool . emit ( 'connectionError' , origin , [ pool , ... targets ] , err )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kStats ] = new PoolStats ( this )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get [ kBusy ] ( ) {
return this [ kNeedDrain ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get [ kConnected ] ( ) {
return this [ kClients ] . filter ( client => client [ kConnected ] ) . length
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get [ kFree ] ( ) {
return this [ kClients ] . filter ( client => client [ kConnected ] && ! client [ kNeedDrain ] ) . length
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get [ kPending ] ( ) {
let ret = this [ kQueued ]
for ( const { [ kPending ] : pending } of this [ kClients ] ) {
ret += pending
}
return ret
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get [ kRunning ] ( ) {
let ret = 0
for ( const { [ kRunning ] : running } of this [ kClients ] ) {
ret += running
}
return ret
}
get [ kSize ] ( ) {
let ret = this [ kQueued ]
for ( const { [ kSize ] : size } of this [ kClients ] ) {
ret += size
}
return ret
}
get stats ( ) {
return this [ kStats ]
}
async [ kClose ] ( ) {
if ( this [ kQueue ] . isEmpty ( ) ) {
return Promise . all ( this [ kClients ] . map ( c => c . close ( ) ) )
} else {
return new Promise ( ( resolve ) => {
this [ kClosedResolve ] = resolve
} )
}
}
async [ kDestroy ] ( err ) {
while ( true ) {
const item = this [ kQueue ] . shift ( )
if ( ! item ) {
break
}
item . handler . onError ( err )
}
return Promise . all ( this [ kClients ] . map ( c => c . destroy ( err ) ) )
}
[ kDispatch ] ( opts , handler ) {
const dispatcher = this [ kGetDispatcher ] ( )
if ( ! dispatcher ) {
this [ kNeedDrain ] = true
this [ kQueue ] . push ( { opts , handler } )
this [ kQueued ] ++
} else if ( ! dispatcher . dispatch ( opts , handler ) ) {
dispatcher [ kNeedDrain ] = true
this [ kNeedDrain ] = ! this [ kGetDispatcher ] ( )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return ! this [ kNeedDrain ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
[ kAddClient ] ( client ) {
client
. on ( 'drain' , this [ kOnDrain ] )
. on ( 'connect' , this [ kOnConnect ] )
. on ( 'disconnect' , this [ kOnDisconnect ] )
. on ( 'connectionError' , this [ kOnConnectionError ] )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kClients ] . push ( client )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( this [ kNeedDrain ] ) {
process . nextTick ( ( ) => {
if ( this [ kNeedDrain ] ) {
this [ kOnDrain ] ( client [ kUrl ] , [ this , client ] )
}
} )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return this
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
[ kRemoveClient ] ( client ) {
client . close ( ( ) => {
const idx = this [ kClients ] . indexOf ( client )
if ( idx !== - 1 ) {
this [ kClients ] . splice ( idx , 1 )
}
} )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kNeedDrain ] = this [ kClients ] . some ( dispatcher => (
! dispatcher [ kNeedDrain ] &&
dispatcher . closed !== true &&
dispatcher . destroyed !== true
) )
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
module . exports = {
PoolBase ,
kClients ,
kNeedDrain ,
kAddClient ,
kRemoveClient ,
kGetDispatcher
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ 9689 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const { kFree , kConnected , kPending , kQueued , kRunning , kSize } = _ _nccwpck _require _ _ ( 2785 )
const kPool = Symbol ( 'pool' )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
class PoolStats {
constructor ( pool ) {
this [ kPool ] = pool
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get connected ( ) {
return this [ kPool ] [ kConnected ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get free ( ) {
return this [ kPool ] [ kFree ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get pending ( ) {
return this [ kPool ] [ kPending ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get queued ( ) {
return this [ kPool ] [ kQueued ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get running ( ) {
return this [ kPool ] [ kRunning ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get size ( ) {
return this [ kPool ] [ kSize ]
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
module . exports = PoolStats
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 4634 :
2023-04-12 19:55:27 +08:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
2024-04-24 12:04:10 -04:00
const {
PoolBase ,
kClients ,
kNeedDrain ,
kAddClient ,
kGetDispatcher
} = _ _nccwpck _require _ _ ( 3198 )
const Client = _ _nccwpck _require _ _ ( 3598 )
const {
InvalidArgumentError
} = _ _nccwpck _require _ _ ( 8045 )
const util = _ _nccwpck _require _ _ ( 3983 )
const { kUrl , kInterceptors } = _ _nccwpck _require _ _ ( 2785 )
const buildConnector = _ _nccwpck _require _ _ ( 2067 )
const kOptions = Symbol ( 'options' )
const kConnections = Symbol ( 'connections' )
const kFactory = Symbol ( 'factory' )
function defaultFactory ( origin , opts ) {
return new Client ( origin , opts )
}
class Pool extends PoolBase {
constructor ( origin , {
connections ,
factory = defaultFactory ,
connect ,
connectTimeout ,
tls ,
maxCachedSessions ,
socketPath ,
autoSelectFamily ,
autoSelectFamilyAttemptTimeout ,
allowH2 ,
... options
} = { } ) {
super ( )
if ( connections != null && ( ! Number . isFinite ( connections ) || connections < 0 ) ) {
throw new InvalidArgumentError ( 'invalid connections' )
}
if ( typeof factory !== 'function' ) {
throw new InvalidArgumentError ( 'factory must be a function.' )
}
if ( connect != null && typeof connect !== 'function' && typeof connect !== 'object' ) {
throw new InvalidArgumentError ( 'connect must be a function or an object' )
}
if ( typeof connect !== 'function' ) {
connect = buildConnector ( {
... tls ,
maxCachedSessions ,
allowH2 ,
socketPath ,
timeout : connectTimeout ,
... ( util . nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily , autoSelectFamilyAttemptTimeout } : undefined ) ,
... connect
} )
}
this [ kInterceptors ] = options . interceptors && options . interceptors . Pool && Array . isArray ( options . interceptors . Pool )
? options . interceptors . Pool
: [ ]
this [ kConnections ] = connections || null
this [ kUrl ] = util . parseOrigin ( origin )
this [ kOptions ] = { ... util . deepClone ( options ) , connect , allowH2 }
this [ kOptions ] . interceptors = options . interceptors
? { ... options . interceptors }
: undefined
this [ kFactory ] = factory
}
[ kGetDispatcher ] ( ) {
let dispatcher = this [ kClients ] . find ( dispatcher => ! dispatcher [ kNeedDrain ] )
if ( dispatcher ) {
return dispatcher
}
if ( ! this [ kConnections ] || this [ kClients ] . length < this [ kConnections ] ) {
dispatcher = this [ kFactory ] ( this [ kUrl ] , this [ kOptions ] )
this [ kAddClient ] ( dispatcher )
}
return dispatcher
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
module . exports = Pool
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 7858 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-04-12 19:55:27 +08:00
"use strict" ;
2024-04-24 12:04:10 -04:00
const { kProxy , kClose , kDestroy , kInterceptors } = _ _nccwpck _require _ _ ( 2785 )
const { URL } = _ _nccwpck _require _ _ ( 7310 )
const Agent = _ _nccwpck _require _ _ ( 7890 )
const Pool = _ _nccwpck _require _ _ ( 4634 )
const DispatcherBase = _ _nccwpck _require _ _ ( 4839 )
const { InvalidArgumentError , RequestAbortedError } = _ _nccwpck _require _ _ ( 8045 )
const buildConnector = _ _nccwpck _require _ _ ( 2067 )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const kAgent = Symbol ( 'proxy agent' )
const kClient = Symbol ( 'proxy client' )
const kProxyHeaders = Symbol ( 'proxy headers' )
const kRequestTls = Symbol ( 'request tls settings' )
const kProxyTls = Symbol ( 'proxy tls settings' )
const kConnectEndpoint = Symbol ( 'connect endpoint function' )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function defaultProtocolPort ( protocol ) {
return protocol === 'https:' ? 443 : 80
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
function buildProxyOptions ( opts ) {
if ( typeof opts === 'string' ) {
opts = { uri : opts }
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( ! opts || ! opts . uri ) {
throw new InvalidArgumentError ( 'Proxy opts.uri is mandatory' )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return {
uri : opts . uri ,
protocol : opts . protocol || 'https'
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
function defaultFactory ( origin , opts ) {
return new Pool ( origin , opts )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
class ProxyAgent extends DispatcherBase {
constructor ( opts ) {
super ( opts )
this [ kProxy ] = buildProxyOptions ( opts )
this [ kAgent ] = new Agent ( opts )
this [ kInterceptors ] = opts . interceptors && opts . interceptors . ProxyAgent && Array . isArray ( opts . interceptors . ProxyAgent )
? opts . interceptors . ProxyAgent
: [ ]
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( typeof opts === 'string' ) {
opts = { uri : opts }
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( ! opts || ! opts . uri ) {
throw new InvalidArgumentError ( 'Proxy opts.uri is mandatory' )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const { clientFactory = defaultFactory } = opts
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( typeof clientFactory !== 'function' ) {
throw new InvalidArgumentError ( 'Proxy opts.clientFactory must be a function.' )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this [ kRequestTls ] = opts . requestTls
this [ kProxyTls ] = opts . proxyTls
this [ kProxyHeaders ] = opts . headers || { }
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const resolvedUrl = new URL ( opts . uri )
const { origin , port , host , username , password } = resolvedUrl
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( opts . auth && opts . token ) {
throw new InvalidArgumentError ( 'opts.auth cannot be used in combination with opts.token' )
} else if ( opts . auth ) {
/* @deprecated in favour of opts.token */
this [ kProxyHeaders ] [ 'proxy-authorization' ] = ` Basic ${ opts . auth } `
} else if ( opts . token ) {
this [ kProxyHeaders ] [ 'proxy-authorization' ] = opts . token
} else if ( username && password ) {
this [ kProxyHeaders ] [ 'proxy-authorization' ] = ` Basic ${ Buffer . from ( ` ${ decodeURIComponent ( username ) } : ${ decodeURIComponent ( password ) } ` ) . toString ( 'base64' ) } `
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const connect = buildConnector ( { ... opts . proxyTls } )
this [ kConnectEndpoint ] = buildConnector ( { ... opts . requestTls } )
this [ kClient ] = clientFactory ( resolvedUrl , { connect } )
this [ kAgent ] = new Agent ( {
... opts ,
connect : async ( opts , callback ) => {
let requestedHost = opts . host
if ( ! opts . port ) {
requestedHost += ` : ${ defaultProtocolPort ( opts . protocol ) } `
}
try {
const { socket , statusCode } = await this [ kClient ] . connect ( {
origin ,
port ,
path : requestedHost ,
signal : opts . signal ,
headers : {
... this [ kProxyHeaders ] ,
host
}
} )
if ( statusCode !== 200 ) {
socket . on ( 'error' , ( ) => { } ) . destroy ( )
callback ( new RequestAbortedError ( ` Proxy response ( ${ statusCode } ) !== 200 when HTTP Tunneling ` ) )
}
if ( opts . protocol !== 'https:' ) {
callback ( null , socket )
return
}
let servername
if ( this [ kRequestTls ] ) {
servername = this [ kRequestTls ] . servername
} else {
servername = opts . servername
}
this [ kConnectEndpoint ] ( { ... opts , servername , httpSocket : socket } , callback )
} catch ( err ) {
callback ( err )
}
}
} )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
dispatch ( opts , handler ) {
const { host } = new URL ( opts . origin )
const headers = buildHeaders ( opts . headers )
throwIfProxyAuthIsSent ( headers )
return this [ kAgent ] . dispatch (
{
... opts ,
headers : {
... headers ,
host
}
} ,
handler
)
}
async [ kClose ] ( ) {
await this [ kAgent ] . close ( )
await this [ kClient ] . close ( )
}
async [ kDestroy ] ( ) {
await this [ kAgent ] . destroy ( )
await this [ kClient ] . destroy ( )
}
}
2023-04-12 19:55:27 +08:00
/ * *
2024-04-24 12:04:10 -04:00
* @ param { string [ ] | Record < string , string > } headers
* @ returns { Record < string , string > }
2023-04-12 19:55:27 +08:00
* /
2024-04-24 12:04:10 -04:00
function buildHeaders ( headers ) {
// When using undici.fetch, the headers list is stored
// as an array.
if ( Array . isArray ( headers ) ) {
/** @type {Record<string, string>} */
const headersPair = { }
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
for ( let i = 0 ; i < headers . length ; i += 2 ) {
headersPair [ headers [ i ] ] = headers [ i + 1 ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return headersPair
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
return headers
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
/ * *
* @ param { Record < string , string > } headers
*
* Previous versions of ProxyAgent suggests the Proxy - Authorization in request headers
* Nevertheless , it was changed and to avoid a security vulnerability by end users
* this check was created .
* It should be removed in the next major version for performance reasons
* /
function throwIfProxyAuthIsSent ( headers ) {
const existProxyAuth = headers && Object . keys ( headers )
. find ( ( key ) => key . toLowerCase ( ) === 'proxy-authorization' )
if ( existProxyAuth ) {
throw new InvalidArgumentError ( 'Proxy-Authorization should be sent in ProxyAgent constructor' )
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
module . exports = ProxyAgent
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ 9459 :
/***/ ( ( module ) => {
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
let fastNow = Date . now ( )
let fastNowTimeout
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const fastTimers = [ ]
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function onTimeout ( ) {
fastNow = Date . now ( )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
let len = fastTimers . length
let idx = 0
while ( idx < len ) {
const timer = fastTimers [ idx ]
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( timer . state === 0 ) {
timer . state = fastNow + timer . delay
} else if ( timer . state > 0 && fastNow >= timer . state ) {
timer . state = - 1
timer . callback ( timer . opaque )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( timer . state === - 1 ) {
timer . state = - 2
if ( idx !== len - 1 ) {
fastTimers [ idx ] = fastTimers . pop ( )
} else {
fastTimers . pop ( )
}
len -= 1
} else {
idx += 1
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( fastTimers . length > 0 ) {
refreshTimeout ( )
}
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function refreshTimeout ( ) {
if ( fastNowTimeout && fastNowTimeout . refresh ) {
fastNowTimeout . refresh ( )
} else {
clearTimeout ( fastNowTimeout )
fastNowTimeout = setTimeout ( onTimeout , 1e3 )
if ( fastNowTimeout . unref ) {
fastNowTimeout . unref ( )
}
}
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
class Timeout {
constructor ( callback , delay , opaque ) {
this . callback = callback
this . delay = delay
this . opaque = opaque
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// -2 not in timer list
// -1 in timer list but inactive
// 0 in timer list waiting for time
// > 0 in timer list waiting for time to expire
this . state = - 2
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . refresh ( )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
refresh ( ) {
if ( this . state === - 2 ) {
fastTimers . push ( this )
if ( ! fastNowTimeout || fastTimers . length === 1 ) {
refreshTimeout ( )
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . state = 0
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
clear ( ) {
this . state = - 1
}
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
module . exports = {
setTimeout ( callback , delay , opaque ) {
return delay < 1e3
? setTimeout ( callback , delay , opaque )
: new Timeout ( callback , delay , opaque )
} ,
clearTimeout ( timeout ) {
if ( timeout instanceof Timeout ) {
timeout . clear ( )
} else {
clearTimeout ( timeout )
}
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ 5354 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const diagnosticsChannel = _ _nccwpck _require _ _ ( 7643 )
const { uid , states } = _ _nccwpck _require _ _ ( 9188 )
const {
kReadyState ,
kSentClose ,
kByteParser ,
kReceivedClose
} = _ _nccwpck _require _ _ ( 7578 )
const { fireEvent , failWebsocketConnection } = _ _nccwpck _require _ _ ( 5515 )
const { CloseEvent } = _ _nccwpck _require _ _ ( 2611 )
const { makeRequest } = _ _nccwpck _require _ _ ( 8359 )
const { fetching } = _ _nccwpck _require _ _ ( 4881 )
const { Headers } = _ _nccwpck _require _ _ ( 554 )
const { getGlobalDispatcher } = _ _nccwpck _require _ _ ( 1892 )
const { kHeadersList } = _ _nccwpck _require _ _ ( 2785 )
const channels = { }
channels . open = diagnosticsChannel . channel ( 'undici:websocket:open' )
channels . close = diagnosticsChannel . channel ( 'undici:websocket:close' )
channels . socketError = diagnosticsChannel . channel ( 'undici:websocket:socket_error' )
/** @type {import('crypto')} */
let crypto
try {
crypto = _ _nccwpck _require _ _ ( 6113 )
} catch {
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
/ * *
2024-04-24 12:04:10 -04:00
* @ see https : //websockets.spec.whatwg.org/#concept-websocket-establish
* @ param { URL } url
* @ param { string | string [ ] } protocols
* @ param { import ( './websocket' ) . WebSocket } ws
* @ param { ( response : any ) => void } onEstablish
* @ param { Partial < import ( '../../types/websocket' ) . WebSocketInit > } options
2023-04-12 19:55:27 +08:00
* /
2024-04-24 12:04:10 -04:00
function establishWebSocketConnection ( url , protocols , ws , onEstablish , options ) {
// 1. Let requestURL be a copy of url, with its scheme set to "http", if url’ s
// scheme is "ws", and to "https" otherwise.
const requestURL = url
requestURL . protocol = url . protocol === 'ws:' ? 'http:' : 'https:'
// 2. Let request be a new request, whose URL is requestURL, client is client,
// service-workers mode is "none", referrer is "no-referrer", mode is
// "websocket", credentials mode is "include", cache mode is "no-store" ,
// and redirect mode is "error".
const request = makeRequest ( {
urlList : [ requestURL ] ,
serviceWorkers : 'none' ,
referrer : 'no-referrer' ,
mode : 'websocket' ,
credentials : 'include' ,
cache : 'no-store' ,
redirect : 'error'
} )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// Note: undici extension, allow setting custom headers.
if ( options . headers ) {
const headersList = new Headers ( options . headers ) [ kHeadersList ]
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
request . headersList = headersList
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 3. Append (`Upgrade`, `websocket`) to request’ s header list.
// 4. Append (`Connection`, `Upgrade`) to request’ s header list.
// Note: both of these are handled by undici currently.
// https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397
// 5. Let keyValue be a nonce consisting of a randomly selected
// 16-byte value that has been forgiving-base64-encoded and
// isomorphic encoded.
const keyValue = crypto . randomBytes ( 16 ) . toString ( 'base64' )
// 6. Append (`Sec-WebSocket-Key`, keyValue) to request’ s
// header list.
request . headersList . append ( 'sec-websocket-key' , keyValue )
// 7. Append (`Sec-WebSocket-Version`, `13`) to request’ s
// header list.
request . headersList . append ( 'sec-websocket-version' , '13' )
// 8. For each protocol in protocols, combine
// (`Sec-WebSocket-Protocol`, protocol) in request’ s header
// list.
for ( const protocol of protocols ) {
request . headersList . append ( 'sec-websocket-protocol' , protocol )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 9. Let permessageDeflate be a user-agent defined
// "permessage-deflate" extension header value.
// https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673
// TODO: enable once permessage-deflate is supported
const permessageDeflate = '' // 'permessage-deflate; 15'
// 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to
// request’ s header list.
// request.headersList.append('sec-websocket-extensions', permessageDeflate)
// 11. Fetch request with useParallelQueue set to true, and
// processResponse given response being these steps:
const controller = fetching ( {
request ,
useParallelQueue : true ,
dispatcher : options . dispatcher ? ? getGlobalDispatcher ( ) ,
processResponse ( response ) {
// 1. If response is a network error or its status is not 101,
// fail the WebSocket connection.
if ( response . type === 'error' || response . status !== 101 ) {
failWebsocketConnection ( ws , 'Received network error or non-101 status code.' )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 2. If protocols is not the empty list and extracting header
// list values given `Sec-WebSocket-Protocol` and response’ s
// header list results in null, failure, or the empty byte
// sequence, then fail the WebSocket connection.
if ( protocols . length !== 0 && ! response . headersList . get ( 'Sec-WebSocket-Protocol' ) ) {
failWebsocketConnection ( ws , 'Server did not respond with sent protocols.' )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 3. Follow the requirements stated step 2 to step 6, inclusive,
// of the last set of steps in section 4.1 of The WebSocket
// Protocol to validate response. This either results in fail
// the WebSocket connection or the WebSocket connection is
// established.
// 2. If the response lacks an |Upgrade| header field or the |Upgrade|
// header field contains a value that is not an ASCII case-
// insensitive match for the value "websocket", the client MUST
// _Fail the WebSocket Connection_.
if ( response . headersList . get ( 'Upgrade' ) ? . toLowerCase ( ) !== 'websocket' ) {
failWebsocketConnection ( ws , 'Server did not set Upgrade header to "websocket".' )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 3. If the response lacks a |Connection| header field or the
// |Connection| header field doesn't contain a token that is an
// ASCII case-insensitive match for the value "Upgrade", the client
// MUST _Fail the WebSocket Connection_.
if ( response . headersList . get ( 'Connection' ) ? . toLowerCase ( ) !== 'upgrade' ) {
failWebsocketConnection ( ws , 'Server did not set Connection header to "upgrade".' )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 4. If the response lacks a |Sec-WebSocket-Accept| header field or
// the |Sec-WebSocket-Accept| contains a value other than the
// base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket-
// Key| (as a string, not base64-decoded) with the string "258EAFA5-
// E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and
// trailing whitespace, the client MUST _Fail the WebSocket
// Connection_.
const secWSAccept = response . headersList . get ( 'Sec-WebSocket-Accept' )
const digest = crypto . createHash ( 'sha1' ) . update ( keyValue + uid ) . digest ( 'base64' )
if ( secWSAccept !== digest ) {
failWebsocketConnection ( ws , 'Incorrect hash received in Sec-WebSocket-Accept header.' )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 5. If the response includes a |Sec-WebSocket-Extensions| header
// field and this header field indicates the use of an extension
// that was not present in the client's handshake (the server has
// indicated an extension not requested by the client), the client
// MUST _Fail the WebSocket Connection_. (The parsing of this
// header field to determine which extensions are requested is
// discussed in Section 9.1.)
const secExtension = response . headersList . get ( 'Sec-WebSocket-Extensions' )
if ( secExtension !== null && secExtension !== permessageDeflate ) {
failWebsocketConnection ( ws , 'Received different permessage-deflate than the one set.' )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 6. If the response includes a |Sec-WebSocket-Protocol| header field
// and this header field indicates the use of a subprotocol that was
// not present in the client's handshake (the server has indicated a
// subprotocol not requested by the client), the client MUST _Fail
// the WebSocket Connection_.
const secProtocol = response . headersList . get ( 'Sec-WebSocket-Protocol' )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( secProtocol !== null && secProtocol !== request . headersList . get ( 'Sec-WebSocket-Protocol' ) ) {
failWebsocketConnection ( ws , 'Protocol was not set in the opening handshake.' )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
response . socket . on ( 'data' , onSocketData )
response . socket . on ( 'close' , onSocketClose )
response . socket . on ( 'error' , onSocketError )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( channels . open . hasSubscribers ) {
channels . open . publish ( {
address : response . socket . address ( ) ,
protocol : secProtocol ,
extensions : secExtension
} )
}
onEstablish ( response )
}
} )
return controller
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2023-04-12 19:55:27 +08:00
/ * *
2024-04-24 12:04:10 -04:00
* @ param { Buffer } chunk
2023-04-12 19:55:27 +08:00
* /
2024-04-24 12:04:10 -04:00
function onSocketData ( chunk ) {
if ( ! this . ws [ kByteParser ] . write ( chunk ) ) {
this . pause ( )
}
}
2023-03-09 17:42:29 +01:00
2023-04-12 19:55:27 +08:00
/ * *
2024-04-24 12:04:10 -04:00
* @ see https : //websockets.spec.whatwg.org/#feedback-from-the-protocol
* @ see https : //datatracker.ietf.org/doc/html/rfc6455#section-7.1.4
2023-04-12 19:55:27 +08:00
* /
2024-04-24 12:04:10 -04:00
function onSocketClose ( ) {
const { ws } = this
// If the TCP connection was closed after the
// WebSocket closing handshake was completed, the WebSocket connection
// is said to have been closed _cleanly_.
const wasClean = ws [ kSentClose ] && ws [ kReceivedClose ]
let code = 1005
let reason = ''
const result = ws [ kByteParser ] . closingInfo
if ( result ) {
code = result . code ? ? 1005
reason = result . reason
} else if ( ! ws [ kSentClose ] ) {
// If _The WebSocket
// Connection is Closed_ and no Close control frame was received by the
// endpoint (such as could occur if the underlying transport connection
// is lost), _The WebSocket Connection Close Code_ is considered to be
// 1006.
code = 1006
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 1. Change the ready state to CLOSED (3).
ws [ kReadyState ] = states . CLOSED
// 2. If the user agent was required to fail the WebSocket
// connection, or if the WebSocket connection was closed
// after being flagged as full, fire an event named error
// at the WebSocket object.
// TODO
// 3. Fire an event named close at the WebSocket object,
// using CloseEvent, with the wasClean attribute
// initialized to true if the connection closed cleanly
// and false otherwise, the code attribute initialized to
// the WebSocket connection close code, and the reason
// attribute initialized to the result of applying UTF-8
// decode without BOM to the WebSocket connection close
// reason.
fireEvent ( 'close' , ws , CloseEvent , {
wasClean , code , reason
} )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( channels . close . hasSubscribers ) {
channels . close . publish ( {
websocket : ws ,
code ,
reason
} )
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
function onSocketError ( error ) {
const { ws } = this
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
ws [ kReadyState ] = states . CLOSING
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( channels . socketError . hasSubscribers ) {
channels . socketError . publish ( error )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . destroy ( )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
module . exports = {
establishWebSocketConnection
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ 9188 :
/***/ ( ( module ) => {
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// This is a Globally Unique Identifier unique used
// to validate that the endpoint accepts websocket
// connections.
// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3
const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/** @type {PropertyDescriptor} */
const staticPropertyDescriptors = {
enumerable : true ,
writable : false ,
configurable : false
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const states = {
CONNECTING : 0 ,
OPEN : 1 ,
CLOSING : 2 ,
CLOSED : 3
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const opcodes = {
CONTINUATION : 0x0 ,
TEXT : 0x1 ,
BINARY : 0x2 ,
CLOSE : 0x8 ,
PING : 0x9 ,
PONG : 0xA
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const maxUnsigned16Bit = 2 * * 16 - 1 // 65535
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const parserStates = {
INFO : 0 ,
PAYLOADLENGTH _16 : 2 ,
PAYLOADLENGTH _64 : 3 ,
READ _DATA : 4
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const emptyBuffer = Buffer . allocUnsafe ( 0 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
module . exports = {
uid ,
staticPropertyDescriptors ,
states ,
opcodes ,
maxUnsigned16Bit ,
parserStates ,
emptyBuffer
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ 2611 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { kEnumerableProperty } = _ _nccwpck _require _ _ ( 3983 )
const { MessagePort } = _ _nccwpck _require _ _ ( 1267 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //html.spec.whatwg.org/multipage/comms.html#messageevent
* /
class MessageEvent extends Event {
# eventInit
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
constructor ( type , eventInitDict = { } ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'MessageEvent constructor' } )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
type = webidl . converters . DOMString ( type )
eventInitDict = webidl . converters . MessageEventInit ( eventInitDict )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
super ( type , eventInitDict )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # eventInit = eventInitDict
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get data ( ) {
webidl . brandCheck ( this , MessageEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . data
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get origin ( ) {
webidl . brandCheck ( this , MessageEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . origin
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get lastEventId ( ) {
webidl . brandCheck ( this , MessageEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . lastEventId
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get source ( ) {
webidl . brandCheck ( this , MessageEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . source
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get ports ( ) {
webidl . brandCheck ( this , MessageEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( ! Object . isFrozen ( this . # eventInit . ports ) ) {
Object . freeze ( this . # eventInit . ports )
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . ports
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
initMessageEvent (
type ,
bubbles = false ,
cancelable = false ,
data = null ,
origin = '' ,
lastEventId = '' ,
source = null ,
ports = [ ]
) {
webidl . brandCheck ( this , MessageEvent )
webidl . argumentLengthCheck ( arguments , 1 , { header : 'MessageEvent.initMessageEvent' } )
return new MessageEvent ( type , {
bubbles , cancelable , data , origin , lastEventId , source , ports
} )
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //websockets.spec.whatwg.org/#the-closeevent-interface
* /
class CloseEvent extends Event {
# eventInit
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
constructor ( type , eventInitDict = { } ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'CloseEvent constructor' } )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
type = webidl . converters . DOMString ( type )
eventInitDict = webidl . converters . CloseEventInit ( eventInitDict )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
super ( type , eventInitDict )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # eventInit = eventInitDict
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get wasClean ( ) {
webidl . brandCheck ( this , CloseEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . wasClean
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get code ( ) {
webidl . brandCheck ( this , CloseEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . code
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get reason ( ) {
webidl . brandCheck ( this , CloseEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . reason
}
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface
class ErrorEvent extends Event {
# eventInit
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
constructor ( type , eventInitDict ) {
webidl . argumentLengthCheck ( arguments , 1 , { header : 'ErrorEvent constructor' } )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
super ( type , eventInitDict )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
type = webidl . converters . DOMString ( type )
eventInitDict = webidl . converters . ErrorEventInit ( eventInitDict ? ? { } )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # eventInit = eventInitDict
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get message ( ) {
webidl . brandCheck ( this , ErrorEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . message
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get filename ( ) {
webidl . brandCheck ( this , ErrorEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . filename
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get lineno ( ) {
webidl . brandCheck ( this , ErrorEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . lineno
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get colno ( ) {
webidl . brandCheck ( this , ErrorEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . colno
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get error ( ) {
webidl . brandCheck ( this , ErrorEvent )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # eventInit . error
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Object . defineProperties ( MessageEvent . prototype , {
[ Symbol . toStringTag ] : {
value : 'MessageEvent' ,
configurable : true
} ,
data : kEnumerableProperty ,
origin : kEnumerableProperty ,
lastEventId : kEnumerableProperty ,
source : kEnumerableProperty ,
ports : kEnumerableProperty ,
initMessageEvent : kEnumerableProperty
} )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Object . defineProperties ( CloseEvent . prototype , {
[ Symbol . toStringTag ] : {
value : 'CloseEvent' ,
configurable : true
} ,
reason : kEnumerableProperty ,
code : kEnumerableProperty ,
wasClean : kEnumerableProperty
} )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Object . defineProperties ( ErrorEvent . prototype , {
[ Symbol . toStringTag ] : {
value : 'ErrorEvent' ,
configurable : true
} ,
message : kEnumerableProperty ,
filename : kEnumerableProperty ,
lineno : kEnumerableProperty ,
colno : kEnumerableProperty ,
error : kEnumerableProperty
} )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
webidl . converters . MessagePort = webidl . interfaceConverter ( MessagePort )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
webidl . converters [ 'sequence<MessagePort>' ] = webidl . sequenceConverter (
webidl . converters . MessagePort
)
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const eventInit = [
{
key : 'bubbles' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'cancelable' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'composed' ,
converter : webidl . converters . boolean ,
defaultValue : false
}
]
webidl . converters . MessageEventInit = webidl . dictionaryConverter ( [
... eventInit ,
{
key : 'data' ,
converter : webidl . converters . any ,
defaultValue : null
} ,
{
key : 'origin' ,
converter : webidl . converters . USVString ,
defaultValue : ''
} ,
{
key : 'lastEventId' ,
converter : webidl . converters . DOMString ,
defaultValue : ''
} ,
{
key : 'source' ,
// Node doesn't implement WindowProxy or ServiceWorker, so the only
// valid value for source is a MessagePort.
converter : webidl . nullableConverter ( webidl . converters . MessagePort ) ,
defaultValue : null
} ,
{
key : 'ports' ,
converter : webidl . converters [ 'sequence<MessagePort>' ] ,
get defaultValue ( ) {
return [ ]
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
] )
webidl . converters . CloseEventInit = webidl . dictionaryConverter ( [
... eventInit ,
{
key : 'wasClean' ,
converter : webidl . converters . boolean ,
defaultValue : false
} ,
{
key : 'code' ,
converter : webidl . converters [ 'unsigned short' ] ,
defaultValue : 0
} ,
{
key : 'reason' ,
converter : webidl . converters . USVString ,
defaultValue : ''
}
] )
webidl . converters . ErrorEventInit = webidl . dictionaryConverter ( [
... eventInit ,
{
key : 'message' ,
converter : webidl . converters . DOMString ,
defaultValue : ''
} ,
{
key : 'filename' ,
converter : webidl . converters . USVString ,
defaultValue : ''
} ,
{
key : 'lineno' ,
converter : webidl . converters [ 'unsigned long' ] ,
defaultValue : 0
} ,
{
key : 'colno' ,
converter : webidl . converters [ 'unsigned long' ] ,
defaultValue : 0
} ,
{
key : 'error' ,
converter : webidl . converters . any
}
] )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
module . exports = {
MessageEvent ,
CloseEvent ,
ErrorEvent
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ 5444 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const { maxUnsigned16Bit } = _ _nccwpck _require _ _ ( 9188 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/** @type {import('crypto')} */
let crypto
try {
crypto = _ _nccwpck _require _ _ ( 6113 )
} catch {
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
class WebsocketFrameSend {
/ * *
* @ param { Buffer | undefined } data
* /
constructor ( data ) {
this . frameData = data
this . maskKey = crypto . randomBytes ( 4 )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
createFrame ( opcode ) {
const bodyLength = this . frameData ? . byteLength ? ? 0
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/** @type {number} */
let payloadLength = bodyLength // 0-125
let offset = 6
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( bodyLength > maxUnsigned16Bit ) {
offset += 8 // payload length is next 8 bytes
payloadLength = 127
} else if ( bodyLength > 125 ) {
offset += 2 // payload length is next 2 bytes
payloadLength = 126
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
const buffer = Buffer . allocUnsafe ( bodyLength + offset )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// Clear first 2 bytes, everything else is overwritten
buffer [ 0 ] = buffer [ 1 ] = 0
buffer [ 0 ] |= 0x80 // FIN
buffer [ 0 ] = ( buffer [ 0 ] & 0xF0 ) + opcode // opcode
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/*! ws. MIT License. Einar Otto Stangvik <einaros@gmail.com> */
buffer [ offset - 4 ] = this . maskKey [ 0 ]
buffer [ offset - 3 ] = this . maskKey [ 1 ]
buffer [ offset - 2 ] = this . maskKey [ 2 ]
buffer [ offset - 1 ] = this . maskKey [ 3 ]
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
buffer [ 1 ] = payloadLength
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( payloadLength === 126 ) {
buffer . writeUInt16BE ( bodyLength , 2 )
} else if ( payloadLength === 127 ) {
// Clear extended payload length
buffer [ 2 ] = buffer [ 3 ] = 0
buffer . writeUIntBE ( bodyLength , 4 , 6 )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
buffer [ 1 ] |= 0x80 // MASK
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// mask body
for ( let i = 0 ; i < bodyLength ; i ++ ) {
buffer [ offset + i ] = this . frameData [ i ] ^ this . maskKey [ i % 4 ]
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return buffer
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
module . exports = {
WebsocketFrameSend
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ 1688 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const { Writable } = _ _nccwpck _require _ _ ( 2781 )
const diagnosticsChannel = _ _nccwpck _require _ _ ( 7643 )
const { parserStates , opcodes , states , emptyBuffer } = _ _nccwpck _require _ _ ( 9188 )
const { kReadyState , kSentClose , kResponse , kReceivedClose } = _ _nccwpck _require _ _ ( 7578 )
const { isValidStatusCode , failWebsocketConnection , websocketMessageReceived } = _ _nccwpck _require _ _ ( 5515 )
const { WebsocketFrameSend } = _ _nccwpck _require _ _ ( 5444 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// This code was influenced by ws released under the MIT license.
// Copyright (c) 2011 Einar Otto Stangvik <einaros@gmail.com>
// Copyright (c) 2013 Arnout Kazemier and contributors
// Copyright (c) 2016 Luigi Pinca and contributors
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const channels = { }
channels . ping = diagnosticsChannel . channel ( 'undici:websocket:ping' )
channels . pong = diagnosticsChannel . channel ( 'undici:websocket:pong' )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
class ByteParser extends Writable {
# buffers = [ ]
# byteOffset = 0
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
# state = parserStates . INFO
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
# info = { }
# fragments = [ ]
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
constructor ( ws ) {
super ( )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this . ws = ws
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
/ * *
* @ param { Buffer } chunk
* @ param { ( ) => void } callback
* /
_write ( chunk , _ , callback ) {
this . # buffers . push ( chunk )
this . # byteOffset += chunk . length
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . run ( callback )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
/ * *
* Runs whenever a new chunk is received .
* Callback is called whenever there are no more chunks buffering ,
* or not enough bytes are buffered to parse .
* /
run ( callback ) {
while ( true ) {
if ( this . # state === parserStates . INFO ) {
// If there aren't enough bytes to parse the payload length, etc.
if ( this . # byteOffset < 2 ) {
return callback ( )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const buffer = this . consume ( 2 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # info . fin = ( buffer [ 0 ] & 0x80 ) !== 0
this . # info . opcode = buffer [ 0 ] & 0x0F
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// If we receive a fragmented message, we use the type of the first
// frame to parse the full message as binary/text, when it's terminated
this . # info . originalOpcode ? ? = this . # info . opcode
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this . # info . fragmented = ! this . # info . fin && this . # info . opcode !== opcodes . CONTINUATION
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( this . # info . fragmented && this . # info . opcode !== opcodes . BINARY && this . # info . opcode !== opcodes . TEXT ) {
// Only text and binary frames can be fragmented
failWebsocketConnection ( this . ws , 'Invalid frame type was fragmented.' )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const payloadLength = buffer [ 1 ] & 0x7F
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( payloadLength <= 125 ) {
this . # info . payloadLength = payloadLength
this . # state = parserStates . READ _DATA
} else if ( payloadLength === 126 ) {
this . # state = parserStates . PAYLOADLENGTH _16
} else if ( payloadLength === 127 ) {
this . # state = parserStates . PAYLOADLENGTH _64
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( this . # info . fragmented && payloadLength > 125 ) {
// A fragmented frame can't be fragmented itself
failWebsocketConnection ( this . ws , 'Fragmented frame exceeded 125 bytes.' )
return
} else if (
( this . # info . opcode === opcodes . PING ||
this . # info . opcode === opcodes . PONG ||
this . # info . opcode === opcodes . CLOSE ) &&
payloadLength > 125
) {
// Control frames can have a payload length of 125 bytes MAX
failWebsocketConnection ( this . ws , 'Payload length for control frame exceeded 125 bytes.' )
return
} else if ( this . # info . opcode === opcodes . CLOSE ) {
if ( payloadLength === 1 ) {
failWebsocketConnection ( this . ws , 'Received close frame with a 1-byte body.' )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const body = this . consume ( payloadLength )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # info . closeInfo = this . parseCloseBody ( false , body )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( ! this . ws [ kSentClose ] ) {
// If an endpoint receives a Close frame and did not previously send a
// Close frame, the endpoint MUST send a Close frame in response. (When
// sending a Close frame in response, the endpoint typically echos the
// status code it received.)
const body = Buffer . allocUnsafe ( 2 )
body . writeUInt16BE ( this . # info . closeInfo . code , 0 )
const closeFrame = new WebsocketFrameSend ( body )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
this . ws [ kResponse ] . socket . write (
closeFrame . createFrame ( opcodes . CLOSE ) ,
( err ) => {
if ( ! err ) {
this . ws [ kSentClose ] = true
}
}
)
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// Upon either sending or receiving a Close control frame, it is said
// that _The WebSocket Closing Handshake is Started_ and that the
// WebSocket connection is in the CLOSING state.
this . ws [ kReadyState ] = states . CLOSING
this . ws [ kReceivedClose ] = true
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . end ( )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return
} else if ( this . # info . opcode === opcodes . PING ) {
// Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in
// response, unless it already received a Close frame.
// A Pong frame sent in response to a Ping frame must have identical
// "Application data"
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const body = this . consume ( payloadLength )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( ! this . ws [ kReceivedClose ] ) {
const frame = new WebsocketFrameSend ( body )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . ws [ kResponse ] . socket . write ( frame . createFrame ( opcodes . PONG ) )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( channels . ping . hasSubscribers ) {
channels . ping . publish ( {
payload : body
} )
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # state = parserStates . INFO
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( this . # byteOffset > 0 ) {
continue
} else {
callback ( )
return
}
} else if ( this . # info . opcode === opcodes . PONG ) {
// A Pong frame MAY be sent unsolicited. This serves as a
// unidirectional heartbeat. A response to an unsolicited Pong frame is
// not expected.
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const body = this . consume ( payloadLength )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( channels . pong . hasSubscribers ) {
channels . pong . publish ( {
payload : body
} )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( this . # byteOffset > 0 ) {
continue
} else {
callback ( )
return
}
}
} else if ( this . # state === parserStates . PAYLOADLENGTH _16 ) {
if ( this . # byteOffset < 2 ) {
return callback ( )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const buffer = this . consume ( 2 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # info . payloadLength = buffer . readUInt16BE ( 0 )
this . # state = parserStates . READ _DATA
} else if ( this . # state === parserStates . PAYLOADLENGTH _64 ) {
if ( this . # byteOffset < 8 ) {
return callback ( )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const buffer = this . consume ( 8 )
const upper = buffer . readUInt32BE ( 0 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 2^31 is the maxinimum bytes an arraybuffer can contain
// on 32-bit systems. Although, on 64-bit systems, this is
// 2^53-1 bytes.
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length
// https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275
// https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e
if ( upper > 2 * * 31 - 1 ) {
failWebsocketConnection ( this . ws , 'Received payload length > 2^31 bytes.' )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const lower = buffer . readUInt32BE ( 4 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # info . payloadLength = ( upper << 8 ) + lower
this . # state = parserStates . READ _DATA
} else if ( this . # state === parserStates . READ _DATA ) {
if ( this . # byteOffset < this . # info . payloadLength ) {
// If there is still more data in this chunk that needs to be read
return callback ( )
} else if ( this . # byteOffset >= this . # info . payloadLength ) {
// If the server sent multiple frames in a single chunk
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const body = this . consume ( this . # info . payloadLength )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # fragments . push ( body )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// If the frame is unfragmented, or a fragmented frame was terminated,
// a message was received
if ( ! this . # info . fragmented || ( this . # info . fin && this . # info . opcode === opcodes . CONTINUATION ) ) {
const fullMessage = Buffer . concat ( this . # fragments )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
websocketMessageReceived ( this . ws , this . # info . originalOpcode , fullMessage )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # info = { }
this . # fragments . length = 0
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # state = parserStates . INFO
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( this . # byteOffset > 0 ) {
continue
} else {
callback ( )
break
}
}
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* Take n bytes from the buffered Buffers
* @ param { number } n
* @ returns { Buffer | null }
* /
consume ( n ) {
if ( n > this . # byteOffset ) {
return null
} else if ( n === 0 ) {
return emptyBuffer
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( this . # buffers [ 0 ] . length === n ) {
this . # byteOffset -= this . # buffers [ 0 ] . length
return this . # buffers . shift ( )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const buffer = Buffer . allocUnsafe ( n )
let offset = 0
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
while ( offset !== n ) {
const next = this . # buffers [ 0 ]
const { length } = next
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( length + offset === n ) {
buffer . set ( this . # buffers . shift ( ) , offset )
break
} else if ( length + offset > n ) {
buffer . set ( next . subarray ( 0 , n - offset ) , offset )
this . # buffers [ 0 ] = next . subarray ( n - offset )
break
} else {
buffer . set ( this . # buffers . shift ( ) , offset )
offset += next . length
}
}
this . # byteOffset -= n
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return buffer
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
parseCloseBody ( onlyCode , data ) {
// https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5
/** @type {number|undefined} */
let code
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( data . length >= 2 ) {
// _The WebSocket Connection Close Code_ is
// defined as the status code (Section 7.4) contained in the first Close
// control frame received by the application
code = data . readUInt16BE ( 0 )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( onlyCode ) {
if ( ! isValidStatusCode ( code ) ) {
return null
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return { code }
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6
/** @type {Buffer} */
let reason = data . subarray ( 2 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// Remove BOM
if ( reason [ 0 ] === 0xEF && reason [ 1 ] === 0xBB && reason [ 2 ] === 0xBF ) {
reason = reason . subarray ( 3 )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( code !== undefined && ! isValidStatusCode ( code ) ) {
return null
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
try {
// TODO: optimize this
reason = new TextDecoder ( 'utf-8' , { fatal : true } ) . decode ( reason )
} catch {
return null
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return { code , reason }
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get closingInfo ( ) {
return this . # info . closeInfo
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
module . exports = {
ByteParser
}
2023-03-09 17:42:29 +01:00
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ 7578 :
/***/ ( ( module ) => {
2023-03-09 17:42:29 +01:00
2023-04-12 19:55:27 +08:00
"use strict" ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
module . exports = {
kWebSocketURL : Symbol ( 'url' ) ,
kReadyState : Symbol ( 'ready state' ) ,
kController : Symbol ( 'controller' ) ,
kResponse : Symbol ( 'response' ) ,
kBinaryType : Symbol ( 'binary type' ) ,
kSentClose : Symbol ( 'sent close' ) ,
kReceivedClose : Symbol ( 'received close' ) ,
kByteParser : Symbol ( 'byte parser' )
}
2023-03-09 17:42:29 +01:00
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ 5515 :
2023-04-12 19:55:27 +08:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
2023-04-12 19:55:27 +08:00
"use strict" ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const { kReadyState , kController , kResponse , kBinaryType , kWebSocketURL } = _ _nccwpck _require _ _ ( 7578 )
const { states , opcodes } = _ _nccwpck _require _ _ ( 9188 )
const { MessageEvent , ErrorEvent } = _ _nccwpck _require _ _ ( 2611 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/* globals Blob */
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ param { import ( './websocket' ) . WebSocket } ws
* /
function isEstablished ( ws ) {
// If the server's response is validated as provided for above, it is
// said that _The WebSocket Connection is Established_ and that the
// WebSocket Connection is in the OPEN state.
return ws [ kReadyState ] === states . OPEN
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ param { import ( './websocket' ) . WebSocket } ws
* /
function isClosing ( ws ) {
// Upon either sending or receiving a Close control frame, it is said
// that _The WebSocket Closing Handshake is Started_ and that the
// WebSocket connection is in the CLOSING state.
return ws [ kReadyState ] === states . CLOSING
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ param { import ( './websocket' ) . WebSocket } ws
* /
function isClosed ( ws ) {
return ws [ kReadyState ] === states . CLOSED
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //dom.spec.whatwg.org/#concept-event-fire
* @ param { string } e
* @ param { EventTarget } target
* @ param { EventInit | undefined } eventInitDict
* /
function fireEvent ( e , target , eventConstructor = Event , eventInitDict ) {
// 1. If eventConstructor is not given, then let eventConstructor be Event.
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 2. Let event be the result of creating an event given eventConstructor,
// in the relevant realm of target.
// 3. Initialize event’ s type attribute to e.
const event = new eventConstructor ( e , eventInitDict ) // eslint-disable-line new-cap
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 4. Initialize any other IDL attributes of event as described in the
// invocation of this algorithm.
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 5. Return the result of dispatching event at target, with legacy target
// override flag set if set.
target . dispatchEvent ( event )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //websockets.spec.whatwg.org/#feedback-from-the-protocol
* @ param { import ( './websocket' ) . WebSocket } ws
* @ param { number } type Opcode
* @ param { Buffer } data application data
* /
function websocketMessageReceived ( ws , type , data ) {
// 1. If ready state is not OPEN (1), then return.
if ( ws [ kReadyState ] !== states . OPEN ) {
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 2. Let dataForEvent be determined by switching on type and binary type:
let dataForEvent
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( type === opcodes . TEXT ) {
// -> type indicates that the data is Text
// a new DOMString containing data
try {
dataForEvent = new TextDecoder ( 'utf-8' , { fatal : true } ) . decode ( data )
} catch {
failWebsocketConnection ( ws , 'Received invalid UTF-8 in text frame.' )
return
}
} else if ( type === opcodes . BINARY ) {
if ( ws [ kBinaryType ] === 'blob' ) {
// -> type indicates that the data is Binary and binary type is "blob"
// a new Blob object, created in the relevant Realm of the WebSocket
// object, that represents data as its raw data
dataForEvent = new Blob ( [ data ] )
} else {
// -> type indicates that the data is Binary and binary type is "arraybuffer"
// a new ArrayBuffer object, created in the relevant Realm of the
// WebSocket object, whose contents are data
dataForEvent = new Uint8Array ( data ) . buffer
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
// 3. Fire an event named message at the WebSocket object, using MessageEvent,
// with the origin attribute initialized to the serialization of the WebSocket
// object’ s url's origin, and the data attribute initialized to dataForEvent.
fireEvent ( 'message' , ws , MessageEvent , {
origin : ws [ kWebSocketURL ] . origin ,
data : dataForEvent
} )
}
/ * *
* @ see https : //datatracker.ietf.org/doc/html/rfc6455
* @ see https : //datatracker.ietf.org/doc/html/rfc2616
* @ see https : //bugs.chromium.org/p/chromium/issues/detail?id=398407
* @ param { string } protocol
* /
function isValidSubprotocol ( protocol ) {
// If present, this value indicates one
// or more comma-separated subprotocol the client wishes to speak,
// ordered by preference. The elements that comprise this value
// MUST be non-empty strings with characters in the range U+0021 to
// U+007E not including separator characters as defined in
// [RFC2616] and MUST all be unique strings.
if ( protocol . length === 0 ) {
return false
}
for ( const char of protocol ) {
const code = char . charCodeAt ( 0 )
2023-03-09 17:42:29 +01:00
2023-04-12 19:55:27 +08:00
if (
2024-04-24 12:04:10 -04:00
code < 0x21 ||
code > 0x7E ||
char === '(' ||
char === ')' ||
char === '<' ||
char === '>' ||
char === '@' ||
char === ',' ||
char === ';' ||
char === ':' ||
char === '\\' ||
char === '"' ||
char === '/' ||
char === '[' ||
char === ']' ||
char === '?' ||
char === '=' ||
char === '{' ||
char === '}' ||
code === 32 || // SP
code === 9 // HT
2023-04-12 19:55:27 +08:00
) {
2024-04-24 12:04:10 -04:00
return false
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return true
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //datatracker.ietf.org/doc/html/rfc6455#section-7-4
* @ param { number } code
* /
function isValidStatusCode ( code ) {
if ( code >= 1000 && code < 1015 ) {
return (
code !== 1004 && // reserved
code !== 1005 && // "MUST NOT be set as a status code"
code !== 1006 // "MUST NOT be set as a status code"
)
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return code >= 3000 && code <= 4999
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ param { import ( './websocket' ) . WebSocket } ws
* @ param { string | undefined } reason
* /
function failWebsocketConnection ( ws , reason ) {
const { [ kController ] : controller , [ kResponse ] : response } = ws
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
controller . abort ( )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( response ? . socket && ! response . socket . destroyed ) {
response . socket . destroy ( )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( reason ) {
fireEvent ( 'error' , ws , ErrorEvent , {
error : new Error ( reason )
} )
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
module . exports = {
isEstablished ,
isClosing ,
isClosed ,
fireEvent ,
isValidSubprotocol ,
isValidStatusCode ,
failWebsocketConnection ,
websocketMessageReceived
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ 4284 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const { webidl } = _ _nccwpck _require _ _ ( 1744 )
const { DOMException } = _ _nccwpck _require _ _ ( 1037 )
const { URLSerializer } = _ _nccwpck _require _ _ ( 685 )
const { getGlobalOrigin } = _ _nccwpck _require _ _ ( 1246 )
const { staticPropertyDescriptors , states , opcodes , emptyBuffer } = _ _nccwpck _require _ _ ( 9188 )
const {
kWebSocketURL ,
kReadyState ,
kController ,
kBinaryType ,
kResponse ,
kSentClose ,
kByteParser
} = _ _nccwpck _require _ _ ( 7578 )
const { isEstablished , isClosing , isValidSubprotocol , failWebsocketConnection , fireEvent } = _ _nccwpck _require _ _ ( 5515 )
const { establishWebSocketConnection } = _ _nccwpck _require _ _ ( 5354 )
const { WebsocketFrameSend } = _ _nccwpck _require _ _ ( 5444 )
const { ByteParser } = _ _nccwpck _require _ _ ( 1688 )
const { kEnumerableProperty , isBlobLike } = _ _nccwpck _require _ _ ( 3983 )
const { getGlobalDispatcher } = _ _nccwpck _require _ _ ( 1892 )
const { types } = _ _nccwpck _require _ _ ( 3837 )
let experimentalWarned = false
// https://websockets.spec.whatwg.org/#interface-definition
class WebSocket extends EventTarget {
# events = {
open : null ,
error : null ,
close : null ,
message : null
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
# bufferedAmount = 0
# protocol = ''
# extensions = ''
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ param { string } url
* @ param { string | string [ ] } protocols
* /
constructor ( url , protocols = [ ] ) {
super ( )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
webidl . argumentLengthCheck ( arguments , 1 , { header : 'WebSocket constructor' } )
if ( ! experimentalWarned ) {
experimentalWarned = true
process . emitWarning ( 'WebSockets are experimental, expect them to change at any time.' , {
code : 'UNDICI-WS'
} )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
const options = webidl . converters [ 'DOMString or sequence<DOMString> or WebSocketInit' ] ( protocols )
url = webidl . converters . USVString ( url )
protocols = options . protocols
// 1. Let baseURL be this's relevant settings object's API base URL.
const baseURL = getGlobalOrigin ( )
// 1. Let urlRecord be the result of applying the URL parser to url with baseURL.
let urlRecord
2023-04-12 19:55:27 +08:00
try {
2024-04-24 12:04:10 -04:00
urlRecord = new URL ( url , baseURL )
} catch ( e ) {
// 3. If urlRecord is failure, then throw a "SyntaxError" DOMException.
throw new DOMException ( e , 'SyntaxError' )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 4. If urlRecord’ s scheme is "http", then set urlRecord’ s scheme to "ws".
if ( urlRecord . protocol === 'http:' ) {
urlRecord . protocol = 'ws:'
} else if ( urlRecord . protocol === 'https:' ) {
// 5. Otherwise, if urlRecord’ s scheme is "https", set urlRecord’ s scheme to "wss".
urlRecord . protocol = 'wss:'
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 6. If urlRecord’ s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException.
if ( urlRecord . protocol !== 'ws:' && urlRecord . protocol !== 'wss:' ) {
throw new DOMException (
` Expected a ws: or wss: protocol, got ${ urlRecord . protocol } ` ,
'SyntaxError'
)
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 7. If urlRecord’ s fragment is non-null, then throw a "SyntaxError"
// DOMException.
if ( urlRecord . hash || urlRecord . href . endsWith ( '#' ) ) {
throw new DOMException ( 'Got fragment' , 'SyntaxError' )
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 8. If protocols is a string, set protocols to a sequence consisting
// of just that string.
if ( typeof protocols === 'string' ) {
protocols = [ protocols ]
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 9. If any of the values in protocols occur more than once or otherwise
// fail to match the requirements for elements that comprise the value
// of `Sec-WebSocket-Protocol` fields as defined by The WebSocket
// protocol, then throw a "SyntaxError" DOMException.
if ( protocols . length !== new Set ( protocols . map ( p => p . toLowerCase ( ) ) ) . size ) {
throw new DOMException ( 'Invalid Sec-WebSocket-Protocol value' , 'SyntaxError' )
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( protocols . length > 0 && ! protocols . every ( p => isValidSubprotocol ( p ) ) ) {
throw new DOMException ( 'Invalid Sec-WebSocket-Protocol value' , 'SyntaxError' )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 10. Set this's url to urlRecord.
this [ kWebSocketURL ] = new URL ( urlRecord . href )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 11. Let client be this's relevant settings object.
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 12. Run this step in parallel:
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 1. Establish a WebSocket connection given urlRecord, protocols,
// and client.
this [ kController ] = establishWebSocketConnection (
urlRecord ,
protocols ,
this ,
( response ) => this . # onConnectionEstablished ( response ) ,
options
)
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// Each WebSocket object has an associated ready state, which is a
// number representing the state of the connection. Initially it must
// be CONNECTING (0).
this [ kReadyState ] = WebSocket . CONNECTING
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// The extensions attribute must initially return the empty string.
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// The protocol attribute must initially return the empty string.
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// Each WebSocket object has an associated binary type, which is a
// BinaryType. Initially it must be "blob".
this [ kBinaryType ] = 'blob'
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //websockets.spec.whatwg.org/#dom-websocket-close
* @ param { number | undefined } code
* @ param { string | undefined } reason
* /
close ( code = undefined , reason = undefined ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( code !== undefined ) {
code = webidl . converters [ 'unsigned short' ] ( code , { clamp : true } )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( reason !== undefined ) {
reason = webidl . converters . USVString ( reason )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 1. If code is present, but is neither an integer equal to 1000 nor an
// integer in the range 3000 to 4999, inclusive, throw an
// "InvalidAccessError" DOMException.
if ( code !== undefined ) {
if ( code !== 1000 && ( code < 3000 || code > 4999 ) ) {
throw new DOMException ( 'invalid code' , 'InvalidAccessError' )
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
let reasonByteLength = 0
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 2. If reason is present, then run these substeps:
if ( reason !== undefined ) {
// 1. Let reasonBytes be the result of encoding reason.
// 2. If reasonBytes is longer than 123 bytes, then throw a
// "SyntaxError" DOMException.
reasonByteLength = Buffer . byteLength ( reason )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( reasonByteLength > 123 ) {
throw new DOMException (
` Reason must be less than 123 bytes; received ${ reasonByteLength } ` ,
'SyntaxError'
)
}
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// 3. Run the first matching steps from the following list:
if ( this [ kReadyState ] === WebSocket . CLOSING || this [ kReadyState ] === WebSocket . CLOSED ) {
// If this's ready state is CLOSING (2) or CLOSED (3)
// Do nothing.
} else if ( ! isEstablished ( this ) ) {
// If the WebSocket connection is not yet established
// Fail the WebSocket connection and set this's ready state
// to CLOSING (2).
failWebsocketConnection ( this , 'Connection was closed before it was established.' )
this [ kReadyState ] = WebSocket . CLOSING
} else if ( ! isClosing ( this ) ) {
// If the WebSocket closing handshake has not yet been started
// Start the WebSocket closing handshake and set this's ready
// state to CLOSING (2).
// - If neither code nor reason is present, the WebSocket Close
// message must not have a body.
// - If code is present, then the status code to use in the
// WebSocket Close message must be the integer given by code.
// - If reason is also present, then reasonBytes must be
// provided in the Close message after the status code.
const frame = new WebsocketFrameSend ( )
// If neither code nor reason is present, the WebSocket Close
// message must not have a body.
// If code is present, then the status code to use in the
// WebSocket Close message must be the integer given by code.
if ( code !== undefined && reason === undefined ) {
frame . frameData = Buffer . allocUnsafe ( 2 )
frame . frameData . writeUInt16BE ( code , 0 )
} else if ( code !== undefined && reason !== undefined ) {
// If reason is also present, then reasonBytes must be
// provided in the Close message after the status code.
frame . frameData = Buffer . allocUnsafe ( 2 + reasonByteLength )
frame . frameData . writeUInt16BE ( code , 0 )
// the body MAY contain UTF-8-encoded data with value /reason/
frame . frameData . write ( reason , 2 , 'utf-8' )
} else {
frame . frameData = emptyBuffer
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/** @type {import('stream').Duplex} */
const socket = this [ kResponse ] . socket
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
socket . write ( frame . createFrame ( opcodes . CLOSE ) , ( err ) => {
if ( ! err ) {
this [ kSentClose ] = true
}
} )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// Upon either sending or receiving a Close control frame, it is said
// that _The WebSocket Closing Handshake is Started_ and that the
// WebSocket connection is in the CLOSING state.
this [ kReadyState ] = states . CLOSING
} else {
// Otherwise
// Set this's ready state to CLOSING (2).
this [ kReadyState ] = WebSocket . CLOSING
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //websockets.spec.whatwg.org/#dom-websocket-send
* @ param { NodeJS . TypedArray | ArrayBuffer | Blob | string } data
* /
send ( data ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
webidl . argumentLengthCheck ( arguments , 1 , { header : 'WebSocket.send' } )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
data = webidl . converters . WebSocketSendData ( data )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 1. If this's ready state is CONNECTING, then throw an
// "InvalidStateError" DOMException.
if ( this [ kReadyState ] === WebSocket . CONNECTING ) {
throw new DOMException ( 'Sent before connected.' , 'InvalidStateError' )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 2. Run the appropriate set of steps from the following list:
// https://datatracker.ietf.org/doc/html/rfc6455#section-6.1
// https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( ! isEstablished ( this ) || isClosing ( this ) ) {
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/** @type {import('stream').Duplex} */
const socket = this [ kResponse ] . socket
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// If data is a string
if ( typeof data === 'string' ) {
// If the WebSocket connection is established and the WebSocket
// closing handshake has not yet started, then the user agent
// must send a WebSocket Message comprised of the data argument
// using a text frame opcode; if the data cannot be sent, e.g.
// because it would need to be buffered but the buffer is full,
// the user agent must flag the WebSocket as full and then close
// the WebSocket connection. Any invocation of this method with a
// string argument that does not throw an exception must increase
// the bufferedAmount attribute by the number of bytes needed to
// express the argument as UTF-8.
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const value = Buffer . from ( data )
const frame = new WebsocketFrameSend ( value )
const buffer = frame . createFrame ( opcodes . TEXT )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . # bufferedAmount += value . byteLength
socket . write ( buffer , ( ) => {
this . # bufferedAmount -= value . byteLength
} )
} else if ( types . isArrayBuffer ( data ) ) {
// If the WebSocket connection is established, and the WebSocket
// closing handshake has not yet started, then the user agent must
// send a WebSocket Message comprised of data using a binary frame
// opcode; if the data cannot be sent, e.g. because it would need
// to be buffered but the buffer is full, the user agent must flag
// the WebSocket as full and then close the WebSocket connection.
// The data to be sent is the data stored in the buffer described
// by the ArrayBuffer object. Any invocation of this method with an
// ArrayBuffer argument that does not throw an exception must
// increase the bufferedAmount attribute by the length of the
// ArrayBuffer in bytes.
const value = Buffer . from ( data )
const frame = new WebsocketFrameSend ( value )
const buffer = frame . createFrame ( opcodes . BINARY )
this . # bufferedAmount += value . byteLength
socket . write ( buffer , ( ) => {
this . # bufferedAmount -= value . byteLength
} )
} else if ( ArrayBuffer . isView ( data ) ) {
// If the WebSocket connection is established, and the WebSocket
// closing handshake has not yet started, then the user agent must
// send a WebSocket Message comprised of data using a binary frame
// opcode; if the data cannot be sent, e.g. because it would need to
// be buffered but the buffer is full, the user agent must flag the
// WebSocket as full and then close the WebSocket connection. The
// data to be sent is the data stored in the section of the buffer
// described by the ArrayBuffer object that data references. Any
// invocation of this method with this kind of argument that does
// not throw an exception must increase the bufferedAmount attribute
// by the length of data’ s buffer in bytes.
const ab = Buffer . from ( data , data . byteOffset , data . byteLength )
const frame = new WebsocketFrameSend ( ab )
const buffer = frame . createFrame ( opcodes . BINARY )
this . # bufferedAmount += ab . byteLength
socket . write ( buffer , ( ) => {
this . # bufferedAmount -= ab . byteLength
} )
} else if ( isBlobLike ( data ) ) {
// If the WebSocket connection is established, and the WebSocket
// closing handshake has not yet started, then the user agent must
// send a WebSocket Message comprised of data using a binary frame
// opcode; if the data cannot be sent, e.g. because it would need to
// be buffered but the buffer is full, the user agent must flag the
// WebSocket as full and then close the WebSocket connection. The data
// to be sent is the raw data represented by the Blob object. Any
// invocation of this method with a Blob argument that does not throw
// an exception must increase the bufferedAmount attribute by the size
// of the Blob object’ s raw data, in bytes.
const frame = new WebsocketFrameSend ( )
data . arrayBuffer ( ) . then ( ( ab ) => {
const value = Buffer . from ( ab )
frame . frameData = value
const buffer = frame . createFrame ( opcodes . BINARY )
this . # bufferedAmount += value . byteLength
socket . write ( buffer , ( ) => {
this . # bufferedAmount -= value . byteLength
} )
} )
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get readyState ( ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// The readyState getter steps are to return this's ready state.
return this [ kReadyState ]
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get bufferedAmount ( ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # bufferedAmount
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get url ( ) {
webidl . brandCheck ( this , WebSocket )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
// The url getter steps are to return this's url, serialized.
return URLSerializer ( this [ kWebSocketURL ] )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get extensions ( ) {
webidl . brandCheck ( this , WebSocket )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return this . # extensions
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get protocol ( ) {
webidl . brandCheck ( this , WebSocket )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return this . # protocol
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
get onopen ( ) {
webidl . brandCheck ( this , WebSocket )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return this . # events . open
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
set onopen ( fn ) {
webidl . brandCheck ( this , WebSocket )
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
if ( this . # events . open ) {
this . removeEventListener ( 'open' , this . # events . open )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
if ( typeof fn === 'function' ) {
this . # events . open = fn
this . addEventListener ( 'open' , fn )
} else {
this . # events . open = null
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get onerror ( ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # events . error
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
set onerror ( fn ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( this . # events . error ) {
this . removeEventListener ( 'error' , this . # events . error )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( typeof fn === 'function' ) {
this . # events . error = fn
this . addEventListener ( 'error' , fn )
} else {
this . # events . error = null
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get onclose ( ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # events . close
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
set onclose ( fn ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( this . # events . close ) {
this . removeEventListener ( 'close' , this . # events . close )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( typeof fn === 'function' ) {
this . # events . close = fn
this . addEventListener ( 'close' , fn )
} else {
this . # events . close = null
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get onmessage ( ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this . # events . message
}
set onmessage ( fn ) {
webidl . brandCheck ( this , WebSocket )
if ( this . # events . message ) {
this . removeEventListener ( 'message' , this . # events . message )
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( typeof fn === 'function' ) {
this . # events . message = fn
this . addEventListener ( 'message' , fn )
} else {
this . # events . message = null
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
get binaryType ( ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return this [ kBinaryType ]
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
set binaryType ( type ) {
webidl . brandCheck ( this , WebSocket )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( type !== 'blob' && type !== 'arraybuffer' ) {
this [ kBinaryType ] = 'blob'
} else {
this [ kBinaryType ] = type
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * *
* @ see https : //websockets.spec.whatwg.org/#feedback-from-the-protocol
* /
# onConnectionEstablished ( response ) {
// processResponse is called when the "response’ s header list has been received and initialized."
// once this happens, the connection is open
this [ kResponse ] = response
const parser = new ByteParser ( this )
parser . on ( 'drain' , function onParserDrain ( ) {
this . ws [ kResponse ] . socket . resume ( )
} )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
response . socket . ws = this
this [ kByteParser ] = parser
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 1. Change the ready state to OPEN (1).
this [ kReadyState ] = states . OPEN
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 2. Change the extensions attribute’ s value to the extensions in use, if
// it is not the null value.
// https://datatracker.ietf.org/doc/html/rfc6455#section-9.1
const extensions = response . headersList . get ( 'sec-websocket-extensions' )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( extensions !== null ) {
this . # extensions = extensions
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 3. Change the protocol attribute’ s value to the subprotocol in use, if
// it is not the null value.
// https://datatracker.ietf.org/doc/html/rfc6455#section-1.9
const protocol = response . headersList . get ( 'sec-websocket-protocol' )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( protocol !== null ) {
this . # protocol = protocol
2023-04-12 19:55:27 +08:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// 4. Fire an event named open at the WebSocket object.
fireEvent ( 'open' , this )
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// https://websockets.spec.whatwg.org/#dom-websocket-connecting
WebSocket . CONNECTING = WebSocket . prototype . CONNECTING = states . CONNECTING
// https://websockets.spec.whatwg.org/#dom-websocket-open
WebSocket . OPEN = WebSocket . prototype . OPEN = states . OPEN
// https://websockets.spec.whatwg.org/#dom-websocket-closing
WebSocket . CLOSING = WebSocket . prototype . CLOSING = states . CLOSING
// https://websockets.spec.whatwg.org/#dom-websocket-closed
WebSocket . CLOSED = WebSocket . prototype . CLOSED = states . CLOSED
Object . defineProperties ( WebSocket . prototype , {
CONNECTING : staticPropertyDescriptors ,
OPEN : staticPropertyDescriptors ,
CLOSING : staticPropertyDescriptors ,
CLOSED : staticPropertyDescriptors ,
url : kEnumerableProperty ,
readyState : kEnumerableProperty ,
bufferedAmount : kEnumerableProperty ,
onopen : kEnumerableProperty ,
onerror : kEnumerableProperty ,
onclose : kEnumerableProperty ,
close : kEnumerableProperty ,
onmessage : kEnumerableProperty ,
binaryType : kEnumerableProperty ,
send : kEnumerableProperty ,
extensions : kEnumerableProperty ,
protocol : kEnumerableProperty ,
[ Symbol . toStringTag ] : {
value : 'WebSocket' ,
writable : false ,
enumerable : false ,
configurable : true
}
} )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Object . defineProperties ( WebSocket , {
CONNECTING : staticPropertyDescriptors ,
OPEN : staticPropertyDescriptors ,
CLOSING : staticPropertyDescriptors ,
CLOSED : staticPropertyDescriptors
} )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
webidl . converters [ 'sequence<DOMString>' ] = webidl . sequenceConverter (
webidl . converters . DOMString
)
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
webidl . converters [ 'DOMString or sequence<DOMString>' ] = function ( V ) {
if ( webidl . util . Type ( V ) === 'Object' && Symbol . iterator in V ) {
return webidl . converters [ 'sequence<DOMString>' ] ( V )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return webidl . converters . DOMString ( V )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// This implements the propsal made in https://github.com/whatwg/websockets/issues/42
webidl . converters . WebSocketInit = webidl . dictionaryConverter ( [
{
key : 'protocols' ,
converter : webidl . converters [ 'DOMString or sequence<DOMString>' ] ,
get defaultValue ( ) {
return [ ]
}
} ,
{
key : 'dispatcher' ,
converter : ( V ) => V ,
get defaultValue ( ) {
return getGlobalDispatcher ( )
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
} ,
{
key : 'headers' ,
converter : webidl . nullableConverter ( webidl . converters . HeadersInit )
}
] )
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
webidl . converters [ 'DOMString or sequence<DOMString> or WebSocketInit' ] = function ( V ) {
if ( webidl . util . Type ( V ) === 'Object' && ! ( Symbol . iterator in V ) ) {
return webidl . converters . WebSocketInit ( V )
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
return { protocols : webidl . converters [ 'DOMString or sequence<DOMString>' ] ( V ) }
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
webidl . converters . WebSocketSendData = function ( V ) {
if ( webidl . util . Type ( V ) === 'Object' ) {
if ( isBlobLike ( V ) ) {
return webidl . converters . Blob ( V , { strict : false } )
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
if ( ArrayBuffer . isView ( V ) || types . isAnyArrayBuffer ( V ) ) {
return webidl . converters . BufferSource ( V )
2023-04-12 19:55:27 +08:00
}
2024-04-24 12:04:10 -04:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return webidl . converters . USVString ( V )
}
module . exports = {
WebSocket
}
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 5030 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2023-03-09 17:42:29 +01:00
2023-04-12 19:55:27 +08:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2020-01-27 10:21:50 -05:00
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2020-01-27 10:21:50 -05:00
2024-04-24 12:04:10 -04:00
function getUserAgent ( ) {
if ( typeof navigator === "object" && "userAgent" in navigator ) {
return navigator . userAgent ;
}
2020-01-27 10:21:50 -05:00
2024-04-24 12:04:10 -04:00
if ( typeof process === "object" && process . version !== undefined ) {
return ` Node.js/ ${ process . version . substr ( 1 ) } ( ${ process . platform } ; ${ process . arch } ) ` ;
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return "<environment undetectable>" ;
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
exports . getUserAgent = getUserAgent ;
//# sourceMappingURL=index.js.map
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
/***/ 5840 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
Object . defineProperty ( exports , "NIL" , ( {
enumerable : true ,
get : function ( ) {
return _nil . default ;
}
} ) ) ;
Object . defineProperty ( exports , "parse" , ( {
enumerable : true ,
get : function ( ) {
return _parse . default ;
}
} ) ) ;
Object . defineProperty ( exports , "stringify" , ( {
enumerable : true ,
get : function ( ) {
return _stringify . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v1" , ( {
enumerable : true ,
get : function ( ) {
return _v . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v3" , ( {
enumerable : true ,
get : function ( ) {
return _v2 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v4" , ( {
enumerable : true ,
get : function ( ) {
return _v3 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "v5" , ( {
enumerable : true ,
get : function ( ) {
return _v4 . default ;
}
} ) ) ;
Object . defineProperty ( exports , "validate" , ( {
enumerable : true ,
get : function ( ) {
return _validate . default ;
}
} ) ) ;
Object . defineProperty ( exports , "version" , ( {
enumerable : true ,
get : function ( ) {
return _version . default ;
}
} ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8628 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _v2 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6409 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _v3 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5122 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _v4 = _interopRequireDefault ( _ _nccwpck _require _ _ ( 9120 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _nil = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5332 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _version = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2414 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _stringify = _interopRequireDefault ( _ _nccwpck _require _ _ ( 8950 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2746 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ 4569 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-04-12 19:55:27 +08:00
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function md5 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
return _crypto . default . createHash ( 'md5' ) . update ( bytes ) . digest ( ) ;
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _default = md5 ;
exports [ "default" ] = _default ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/***/ 2054 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
2020-01-27 10:21:50 -05:00
2024-04-24 12:04:10 -04:00
var _default = {
randomUUID : _crypto . default . randomUUID
2023-04-12 19:55:27 +08:00
} ;
2024-04-24 12:04:10 -04:00
exports [ "default" ] = _default ;
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 5332 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2019-12-03 10:28:59 -05:00
2023-04-12 19:55:27 +08:00
"use strict" ;
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _default = '00000000-0000-0000-0000-000000000000' ;
exports [ "default" ] = _default ;
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
/***/ 2746 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function parse ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
let v ;
const arr = new Uint8Array ( 16 ) ; // Parse ########-....-....-....-............
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
arr [ 0 ] = ( v = parseInt ( uuid . slice ( 0 , 8 ) , 16 ) ) >>> 24 ;
arr [ 1 ] = v >>> 16 & 0xff ;
arr [ 2 ] = v >>> 8 & 0xff ;
arr [ 3 ] = v & 0xff ; // Parse ........-####-....-....-............
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
arr [ 4 ] = ( v = parseInt ( uuid . slice ( 9 , 13 ) , 16 ) ) >>> 8 ;
arr [ 5 ] = v & 0xff ; // Parse ........-....-####-....-............
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
arr [ 6 ] = ( v = parseInt ( uuid . slice ( 14 , 18 ) , 16 ) ) >>> 8 ;
arr [ 7 ] = v & 0xff ; // Parse ........-....-....-####-............
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
arr [ 8 ] = ( v = parseInt ( uuid . slice ( 19 , 23 ) , 16 ) ) >>> 8 ;
arr [ 9 ] = v & 0xff ; // Parse ........-....-....-....-############
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
arr [ 10 ] = ( v = parseInt ( uuid . slice ( 24 , 36 ) , 16 ) ) / 0x10000000000 & 0xff ;
arr [ 11 ] = v / 0x100000000 & 0xff ;
arr [ 12 ] = v >>> 24 & 0xff ;
arr [ 13 ] = v >>> 16 & 0xff ;
arr [ 14 ] = v >>> 8 & 0xff ;
arr [ 15 ] = v & 0xff ;
return arr ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _default = parse ;
exports [ "default" ] = _default ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 814 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i ;
exports [ "default" ] = _default ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 807 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = rng ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
const rnds8Pool = new Uint8Array ( 256 ) ; // # of random values to pre-allocate
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
let poolPtr = rnds8Pool . length ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function rng ( ) {
if ( poolPtr > rnds8Pool . length - 16 ) {
_crypto . default . randomFillSync ( rnds8Pool ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
poolPtr = 0 ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return rnds8Pool . slice ( poolPtr , poolPtr += 16 ) ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 5274 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _crypto = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6113 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function sha1 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'sha1' ) . update ( bytes ) . digest ( ) ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _default = sha1 ;
exports [ "default" ] = _default ;
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/***/ 8950 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2019-12-03 10:28:59 -05:00
2023-03-09 17:42:29 +01:00
"use strict" ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
exports . unsafeStringify = unsafeStringify ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
2023-03-09 17:42:29 +01:00
* /
2024-04-24 12:04:10 -04:00
const byteToHex = [ ] ;
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
for ( let i = 0 ; i < 256 ; ++ i ) {
byteToHex . push ( ( i + 0x100 ) . toString ( 16 ) . slice ( 1 ) ) ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function unsafeStringify ( arr , offset = 0 ) {
// Note: Be careful editing this code! It's been tuned for performance
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
return byteToHex [ arr [ offset + 0 ] ] + byteToHex [ arr [ offset + 1 ] ] + byteToHex [ arr [ offset + 2 ] ] + byteToHex [ arr [ offset + 3 ] ] + '-' + byteToHex [ arr [ offset + 4 ] ] + byteToHex [ arr [ offset + 5 ] ] + '-' + byteToHex [ arr [ offset + 6 ] ] + byteToHex [ arr [ offset + 7 ] ] + '-' + byteToHex [ arr [ offset + 8 ] ] + byteToHex [ arr [ offset + 9 ] ] + '-' + byteToHex [ arr [ offset + 10 ] ] + byteToHex [ arr [ offset + 11 ] ] + byteToHex [ arr [ offset + 12 ] ] + byteToHex [ arr [ offset + 13 ] ] + byteToHex [ arr [ offset + 14 ] ] + byteToHex [ arr [ offset + 15 ] ] ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
function stringify ( arr , offset = 0 ) {
const uuid = unsafeStringify ( arr , offset ) ; // Consistency check for valid UUID. If this throws, it's likely due to one
// of the following:
// - One or more input array values don't map to a hex octet (leading to
// "undefined" in the uuid)
// - Invalid input values for the RFC `version` or `variant` fields
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Stringified UUID is invalid' ) ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
return uuid ;
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
var _default = stringify ;
exports [ "default" ] = _default ;
2019-12-03 10:28:59 -05:00
2021-10-19 10:05:28 -05:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 8628 :
2023-03-09 17:42:29 +01:00
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2021-10-19 10:05:28 -05:00
"use strict" ;
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 807 ) ) ;
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
var _stringify = _ _nccwpck _require _ _ ( 8950 ) ;
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
let _nodeId ;
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
let _clockseq ; // Previous uuid creation time
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
let _lastMSecs = 0 ;
let _lastNSecs = 0 ; // See https://github.com/uuidjs/uuid for API details
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
function v1 ( options , buf , offset ) {
let i = buf && offset || 0 ;
const b = buf || new Array ( 16 ) ;
options = options || { } ;
let node = options . node || _nodeId ;
let clockseq = options . clockseq !== undefined ? options . clockseq : _clockseq ; // node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
if ( node == null || clockseq == null ) {
const seedBytes = options . random || ( options . rng || _rng . default ) ( ) ;
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
if ( node == null ) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [ seedBytes [ 0 ] | 0x01 , seedBytes [ 1 ] , seedBytes [ 2 ] , seedBytes [ 3 ] , seedBytes [ 4 ] , seedBytes [ 5 ] ] ;
2021-10-19 10:05:28 -05:00
}
2024-04-24 12:04:10 -04:00
if ( clockseq == null ) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = ( seedBytes [ 6 ] << 8 | seedBytes [ 7 ] ) & 0x3fff ;
2021-10-19 10:05:28 -05:00
}
2024-04-24 12:04:10 -04:00
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
2021-10-19 10:05:28 -05:00
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
let msecs = options . msecs !== undefined ? options . msecs : Date . now ( ) ; // Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
let nsecs = options . nsecs !== undefined ? options . nsecs : _lastNSecs + 1 ; // Time since last uuid creation (in msecs)
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
const dt = msecs - _lastMSecs + ( nsecs - _lastNSecs ) / 10000 ; // Per 4.2.1.2, Bump clockseq on clock regression
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
if ( dt < 0 && options . clockseq === undefined ) {
clockseq = clockseq + 1 & 0x3fff ;
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
2021-10-19 10:05:28 -05:00
2024-04-24 12:04:10 -04:00
if ( ( dt < 0 || msecs > _lastMSecs ) && options . nsecs === undefined ) {
nsecs = 0 ;
} // Per 4.2.1.2 Throw error if too many uuids are requested
2021-10-19 10:05:28 -05:00
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
if ( nsecs >= 10000 ) {
throw new Error ( "uuid.v1(): Can't create more than 10M uuids/sec" ) ;
2023-03-09 17:42:29 +01:00
}
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
_lastMSecs = msecs ;
_lastNSecs = nsecs ;
_clockseq = clockseq ; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
msecs += 12219292800000 ; // `time_low`
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
const tl = ( ( msecs & 0xfffffff ) * 10000 + nsecs ) % 0x100000000 ;
b [ i ++ ] = tl >>> 24 & 0xff ;
b [ i ++ ] = tl >>> 16 & 0xff ;
b [ i ++ ] = tl >>> 8 & 0xff ;
b [ i ++ ] = tl & 0xff ; // `time_mid`
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff ;
b [ i ++ ] = tmh >>> 8 & 0xff ;
b [ i ++ ] = tmh & 0xff ; // `time_high_and_version`
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
b [ i ++ ] = tmh >>> 24 & 0xf | 0x10 ; // include version
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
b [ i ++ ] = tmh >>> 16 & 0xff ; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
b [ i ++ ] = clockseq >>> 8 | 0x80 ; // `clock_seq_low`
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
b [ i ++ ] = clockseq & 0xff ; // `node`
2019-12-03 10:28:59 -05:00
2024-04-24 12:04:10 -04:00
for ( let n = 0 ; n < 6 ; ++ n ) {
b [ i + n ] = node [ n ] ;
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
return buf || ( 0 , _stringify . unsafeStringify ) ( b ) ;
2019-12-03 10:28:59 -05:00
}
2024-04-24 12:04:10 -04:00
var _default = v1 ;
exports [ "default" ] = _default ;
2019-12-03 10:28:59 -05:00
2022-10-03 18:04:49 +01:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 6409 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2022-10-03 18:04:49 +01:00
"use strict" ;
2024-04-24 12:04:10 -04:00
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5998 ) ) ;
var _md = _interopRequireDefault ( _ _nccwpck _require _ _ ( 4569 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v3 = ( 0 , _v . default ) ( 'v3' , 0x30 , _md . default ) ;
var _default = v3 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 5998 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports . URL = exports . DNS = void 0 ;
exports [ "default" ] = v35 ;
var _stringify = _ _nccwpck _require _ _ ( 8950 ) ;
var _parse = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2746 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function stringToBytes ( str ) {
str = unescape ( encodeURIComponent ( str ) ) ; // UTF8 escape
const bytes = [ ] ;
for ( let i = 0 ; i < str . length ; ++ i ) {
bytes . push ( str . charCodeAt ( i ) ) ;
}
return bytes ;
}
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8' ;
exports . DNS = DNS ;
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8' ;
exports . URL = URL ;
function v35 ( name , version , hashfunc ) {
function generateUUID ( value , namespace , buf , offset ) {
var _namespace ;
if ( typeof value === 'string' ) {
value = stringToBytes ( value ) ;
2022-10-03 18:04:49 +01:00
}
2024-04-24 12:04:10 -04:00
if ( typeof namespace === 'string' ) {
namespace = ( 0 , _parse . default ) ( namespace ) ;
2022-10-03 18:04:49 +01:00
}
2024-04-24 12:04:10 -04:00
if ( ( ( _namespace = namespace ) === null || _namespace === void 0 ? void 0 : _namespace . length ) !== 16 ) {
throw TypeError ( 'Namespace must be array-like (16 iterable integer values, 0-255)' ) ;
} // Compute hash of namespace and value, Per 4.3
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
// hashfunc([...namespace, ... value])`
let bytes = new Uint8Array ( 16 + value . length ) ;
bytes . set ( namespace ) ;
bytes . set ( value , namespace . length ) ;
bytes = hashfunc ( bytes ) ;
bytes [ 6 ] = bytes [ 6 ] & 0x0f | version ;
bytes [ 8 ] = bytes [ 8 ] & 0x3f | 0x80 ;
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = bytes [ i ] ;
}
return buf ;
2022-10-03 18:04:49 +01:00
}
2024-04-24 12:04:10 -04:00
return ( 0 , _stringify . unsafeStringify ) ( bytes ) ;
} // Function#name is not settable on some platforms (#270)
try {
generateUUID . name = name ; // eslint-disable-next-line no-empty
} catch ( err ) { } // For CommonJS default export support
generateUUID . DNS = DNS ;
generateUUID . URL = URL ;
return generateUUID ;
}
/***/ } ) ,
/***/ 5122 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _native = _interopRequireDefault ( _ _nccwpck _require _ _ ( 2054 ) ) ;
var _rng = _interopRequireDefault ( _ _nccwpck _require _ _ ( 807 ) ) ;
var _stringify = _ _nccwpck _require _ _ ( 8950 ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function v4 ( options , buf , offset ) {
if ( _native . default . randomUUID && ! buf && ! options ) {
return _native . default . randomUUID ( ) ;
}
options = options || { } ;
const rnds = options . random || ( options . rng || _rng . default ) ( ) ; // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = rnds [ 6 ] & 0x0f | 0x40 ;
rnds [ 8 ] = rnds [ 8 ] & 0x3f | 0x80 ; // Copy bytes to buffer, if provided
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = rnds [ i ] ;
2022-10-03 18:04:49 +01:00
}
2024-04-24 12:04:10 -04:00
return buf ;
}
return ( 0 , _stringify . unsafeStringify ) ( rnds ) ;
}
var _default = v4 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 9120 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _v = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5998 ) ) ;
var _sha = _interopRequireDefault ( _ _nccwpck _require _ _ ( 5274 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v5 = ( 0 , _v . default ) ( 'v5' , 0x50 , _sha . default ) ;
var _default = v5 ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 6900 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _regex = _interopRequireDefault ( _ _nccwpck _require _ _ ( 814 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function validate ( uuid ) {
return typeof uuid === 'string' && _regex . default . test ( uuid ) ;
}
var _default = validate ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 2414 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( {
value : true
} ) ) ;
exports [ "default" ] = void 0 ;
var _validate = _interopRequireDefault ( _ _nccwpck _require _ _ ( 6900 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function version ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
return parseInt ( uuid . slice ( 14 , 15 ) , 16 ) ;
}
var _default = version ;
exports [ "default" ] = _default ;
/***/ } ) ,
/***/ 2940 :
/***/ ( ( module ) => {
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
module . exports = wrappy
function wrappy ( fn , cb ) {
if ( fn && cb ) return wrappy ( fn ) ( cb )
if ( typeof fn !== 'function' )
throw new TypeError ( 'need wrapper function' )
Object . keys ( fn ) . forEach ( function ( k ) {
wrapper [ k ] = fn [ k ]
} )
return wrapper
function wrapper ( ) {
var args = new Array ( arguments . length )
for ( var i = 0 ; i < args . length ; i ++ ) {
args [ i ] = arguments [ i ]
2022-10-03 18:04:49 +01:00
}
2024-04-24 12:04:10 -04:00
var ret = fn . apply ( this , args )
var cb = args [ args . length - 1 ]
if ( typeof ret === 'function' && ret !== cb ) {
Object . keys ( cb ) . forEach ( function ( k ) {
ret [ k ] = cb [ k ]
} )
}
return ret
}
}
/***/ } ) ,
/***/ 9491 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "assert" ) ;
/***/ } ) ,
/***/ 852 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "async_hooks" ) ;
/***/ } ) ,
/***/ 4300 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "buffer" ) ;
/***/ } ) ,
/***/ 2081 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "child_process" ) ;
/***/ } ) ,
/***/ 6206 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "console" ) ;
/***/ } ) ,
/***/ 6113 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "crypto" ) ;
/***/ } ) ,
/***/ 7643 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "diagnostics_channel" ) ;
/***/ } ) ,
/***/ 2361 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "events" ) ;
/***/ } ) ,
/***/ 7147 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "fs" ) ;
/***/ } ) ,
/***/ 3685 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "http" ) ;
/***/ } ) ,
/***/ 5158 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "http2" ) ;
/***/ } ) ,
/***/ 5687 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "https" ) ;
/***/ } ) ,
/***/ 1808 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "net" ) ;
/***/ } ) ,
/***/ 5673 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "node:events" ) ;
/***/ } ) ,
/***/ 4492 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "node:stream" ) ;
/***/ } ) ,
/***/ 7261 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "node:util" ) ;
/***/ } ) ,
/***/ 2037 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "os" ) ;
/***/ } ) ,
/***/ 1017 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "path" ) ;
/***/ } ) ,
/***/ 4074 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "perf_hooks" ) ;
/***/ } ) ,
/***/ 3477 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "querystring" ) ;
/***/ } ) ,
/***/ 2781 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "stream" ) ;
/***/ } ) ,
/***/ 5356 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "stream/web" ) ;
/***/ } ) ,
/***/ 1576 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "string_decoder" ) ;
/***/ } ) ,
/***/ 9512 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "timers" ) ;
/***/ } ) ,
/***/ 4404 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "tls" ) ;
/***/ } ) ,
/***/ 7310 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "url" ) ;
/***/ } ) ,
/***/ 3837 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "util" ) ;
/***/ } ) ,
/***/ 9830 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "util/types" ) ;
/***/ } ) ,
/***/ 1267 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "worker_threads" ) ;
/***/ } ) ,
/***/ 9796 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "zlib" ) ;
/***/ } ) ,
/***/ 2960 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const WritableStream = ( _ _nccwpck _require _ _ ( 4492 ) . Writable )
const inherits = ( _ _nccwpck _require _ _ ( 7261 ) . inherits )
const StreamSearch = _ _nccwpck _require _ _ ( 1142 )
const PartStream = _ _nccwpck _require _ _ ( 1620 )
const HeaderParser = _ _nccwpck _require _ _ ( 2032 )
const DASH = 45
const B _ONEDASH = Buffer . from ( '-' )
const B _CRLF = Buffer . from ( '\r\n' )
const EMPTY _FN = function ( ) { }
function Dicer ( cfg ) {
if ( ! ( this instanceof Dicer ) ) { return new Dicer ( cfg ) }
WritableStream . call ( this , cfg )
if ( ! cfg || ( ! cfg . headerFirst && typeof cfg . boundary !== 'string' ) ) { throw new TypeError ( 'Boundary required' ) }
if ( typeof cfg . boundary === 'string' ) { this . setBoundary ( cfg . boundary ) } else { this . _bparser = undefined }
this . _headerFirst = cfg . headerFirst
this . _dashes = 0
this . _parts = 0
this . _finished = false
this . _realFinish = false
this . _isPreamble = true
this . _justMatched = false
this . _firstWrite = true
this . _inHeader = true
this . _part = undefined
this . _cb = undefined
this . _ignoreData = false
this . _partOpts = { highWaterMark : cfg . partHwm }
this . _pause = false
const self = this
this . _hparser = new HeaderParser ( cfg )
this . _hparser . on ( 'header' , function ( header ) {
self . _inHeader = false
self . _part . emit ( 'header' , header )
} )
}
inherits ( Dicer , WritableStream )
Dicer . prototype . emit = function ( ev ) {
if ( ev === 'finish' && ! this . _realFinish ) {
if ( ! this . _finished ) {
const self = this
process . nextTick ( function ( ) {
self . emit ( 'error' , new Error ( 'Unexpected end of multipart data' ) )
if ( self . _part && ! self . _ignoreData ) {
const type = ( self . _isPreamble ? 'Preamble' : 'Part' )
self . _part . emit ( 'error' , new Error ( type + ' terminated early due to unexpected end of multipart data' ) )
self . _part . push ( null )
process . nextTick ( function ( ) {
self . _realFinish = true
self . emit ( 'finish' )
self . _realFinish = false
} )
return
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
self . _realFinish = true
self . emit ( 'finish' )
self . _realFinish = false
} )
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
} else { WritableStream . prototype . emit . apply ( this , arguments ) }
}
Dicer . prototype . _write = function ( data , encoding , cb ) {
// ignore unexpected data (e.g. extra trailer data after finished)
if ( ! this . _hparser && ! this . _bparser ) { return cb ( ) }
if ( this . _headerFirst && this . _isPreamble ) {
if ( ! this . _part ) {
this . _part = new PartStream ( this . _partOpts )
if ( this . listenerCount ( 'preamble' ) !== 0 ) { this . emit ( 'preamble' , this . _part ) } else { this . _ignore ( ) }
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
const r = this . _hparser . push ( data )
if ( ! this . _inHeader && r !== undefined && r < data . length ) { data = data . slice ( r ) } else { return cb ( ) }
}
// allows for "easier" testing
if ( this . _firstWrite ) {
this . _bparser . push ( B _CRLF )
this . _firstWrite = false
}
this . _bparser . push ( data )
if ( this . _pause ) { this . _cb = cb } else { cb ( ) }
}
Dicer . prototype . reset = function ( ) {
this . _part = undefined
this . _bparser = undefined
this . _hparser = undefined
}
Dicer . prototype . setBoundary = function ( boundary ) {
const self = this
this . _bparser = new StreamSearch ( '\r\n--' + boundary )
this . _bparser . on ( 'info' , function ( isMatch , data , start , end ) {
self . _oninfo ( isMatch , data , start , end )
} )
}
Dicer . prototype . _ignore = function ( ) {
if ( this . _part && ! this . _ignoreData ) {
this . _ignoreData = true
this . _part . on ( 'error' , EMPTY _FN )
// we must perform some kind of read on the stream even though we are
// ignoring the data, otherwise node's Readable stream will not emit 'end'
// after pushing null to the stream
this . _part . resume ( )
}
}
Dicer . prototype . _oninfo = function ( isMatch , data , start , end ) {
let buf ; const self = this ; let i = 0 ; let r ; let shouldWriteMore = true
if ( ! this . _part && this . _justMatched && data ) {
while ( this . _dashes < 2 && ( start + i ) < end ) {
if ( data [ start + i ] === DASH ) {
++ i
++ this . _dashes
} else {
if ( this . _dashes ) { buf = B _ONEDASH }
this . _dashes = 0
break
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
if ( this . _dashes === 2 ) {
if ( ( start + i ) < end && this . listenerCount ( 'trailer' ) !== 0 ) { this . emit ( 'trailer' , data . slice ( start + i , end ) ) }
this . reset ( )
this . _finished = true
// no more parts will be added
if ( self . _parts === 0 ) {
self . _realFinish = true
self . emit ( 'finish' )
self . _realFinish = false
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
if ( this . _dashes ) { return }
}
if ( this . _justMatched ) { this . _justMatched = false }
if ( ! this . _part ) {
this . _part = new PartStream ( this . _partOpts )
this . _part . _read = function ( n ) {
self . _unpause ( )
}
if ( this . _isPreamble && this . listenerCount ( 'preamble' ) !== 0 ) {
this . emit ( 'preamble' , this . _part )
} else if ( this . _isPreamble !== true && this . listenerCount ( 'part' ) !== 0 ) {
this . emit ( 'part' , this . _part )
} else {
this . _ignore ( )
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
if ( ! this . _isPreamble ) { this . _inHeader = true }
}
if ( data && start < end && ! this . _ignoreData ) {
if ( this . _isPreamble || ! this . _inHeader ) {
if ( buf ) { shouldWriteMore = this . _part . push ( buf ) }
shouldWriteMore = this . _part . push ( data . slice ( start , end ) )
if ( ! shouldWriteMore ) { this . _pause = true }
} else if ( ! this . _isPreamble && this . _inHeader ) {
if ( buf ) { this . _hparser . push ( buf ) }
r = this . _hparser . push ( data . slice ( start , end ) )
if ( ! this . _inHeader && r !== undefined && r < end ) { this . _oninfo ( false , data , start + r , end ) }
}
}
if ( isMatch ) {
this . _hparser . reset ( )
if ( this . _isPreamble ) { this . _isPreamble = false } else {
if ( start !== end ) {
++ this . _parts
this . _part . on ( 'end' , function ( ) {
if ( -- self . _parts === 0 ) {
if ( self . _finished ) {
self . _realFinish = true
self . emit ( 'finish' )
self . _realFinish = false
} else {
self . _unpause ( )
}
}
} )
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
this . _part . push ( null )
this . _part = undefined
this . _ignoreData = false
this . _justMatched = true
this . _dashes = 0
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
Dicer . prototype . _unpause = function ( ) {
if ( ! this . _pause ) { return }
this . _pause = false
if ( this . _cb ) {
const cb = this . _cb
this . _cb = undefined
cb ( )
}
}
module . exports = Dicer
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 2032 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
"use strict" ;
2024-04-24 12:04:10 -04:00
const EventEmitter = ( _ _nccwpck _require _ _ ( 5673 ) . EventEmitter )
const inherits = ( _ _nccwpck _require _ _ ( 7261 ) . inherits )
const getLimit = _ _nccwpck _require _ _ ( 1467 )
const StreamSearch = _ _nccwpck _require _ _ ( 1142 )
const B _DCRLF = Buffer . from ( '\r\n\r\n' )
const RE _CRLF = /\r\n/g
const RE _HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex
function HeaderParser ( cfg ) {
EventEmitter . call ( this )
cfg = cfg || { }
const self = this
this . nread = 0
this . maxed = false
this . npairs = 0
this . maxHeaderPairs = getLimit ( cfg , 'maxHeaderPairs' , 2000 )
this . maxHeaderSize = getLimit ( cfg , 'maxHeaderSize' , 80 * 1024 )
this . buffer = ''
this . header = { }
this . finished = false
this . ss = new StreamSearch ( B _DCRLF )
this . ss . on ( 'info' , function ( isMatch , data , start , end ) {
if ( data && ! self . maxed ) {
if ( self . nread + end - start >= self . maxHeaderSize ) {
end = self . maxHeaderSize - self . nread + start
self . nread = self . maxHeaderSize
self . maxed = true
} else { self . nread += ( end - start ) }
self . buffer += data . toString ( 'binary' , start , end )
}
if ( isMatch ) { self . _finish ( ) }
} )
}
inherits ( HeaderParser , EventEmitter )
HeaderParser . prototype . push = function ( data ) {
const r = this . ss . push ( data )
if ( this . finished ) { return r }
}
HeaderParser . prototype . reset = function ( ) {
this . finished = false
this . buffer = ''
this . header = { }
this . ss . reset ( )
}
HeaderParser . prototype . _finish = function ( ) {
if ( this . buffer ) { this . _parseHeader ( ) }
this . ss . matches = this . ss . maxMatches
const header = this . header
this . header = { }
this . buffer = ''
this . finished = true
this . nread = this . npairs = 0
this . maxed = false
this . emit ( 'header' , header )
}
HeaderParser . prototype . _parseHeader = function ( ) {
if ( this . npairs === this . maxHeaderPairs ) { return }
const lines = this . buffer . split ( RE _CRLF )
const len = lines . length
let m , h
for ( var i = 0 ; i < len ; ++ i ) { // eslint-disable-line no-var
if ( lines [ i ] . length === 0 ) { continue }
if ( lines [ i ] [ 0 ] === '\t' || lines [ i ] [ 0 ] === ' ' ) {
// folded header content
// RFC2822 says to just remove the CRLF and not the whitespace following
// it, so we follow the RFC and include the leading whitespace ...
if ( h ) {
this . header [ h ] [ this . header [ h ] . length - 1 ] += lines [ i ]
continue
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
const posColon = lines [ i ] . indexOf ( ':' )
if (
posColon === - 1 ||
posColon === 0
) {
return
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
m = RE _HDR . exec ( lines [ i ] )
h = m [ 1 ] . toLowerCase ( )
this . header [ h ] = this . header [ h ] || [ ]
this . header [ h ] . push ( ( m [ 2 ] || '' ) )
if ( ++ this . npairs === this . maxHeaderPairs ) { break }
}
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
module . exports = HeaderParser
/***/ } ) ,
/***/ 1620 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
const inherits = ( _ _nccwpck _require _ _ ( 7261 ) . inherits )
const ReadableStream = ( _ _nccwpck _require _ _ ( 4492 ) . Readable )
function PartStream ( opts ) {
ReadableStream . call ( this , opts )
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
inherits ( PartStream , ReadableStream )
PartStream . prototype . _read = function ( n ) { }
module . exports = PartStream
/***/ } ) ,
/***/ 1142 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
2023-03-09 17:42:29 +01:00
/ * *
2024-04-24 12:04:10 -04:00
* Copyright Brian White . All rights reserved .
2023-03-09 17:42:29 +01:00
*
2024-04-24 12:04:10 -04:00
* @ see https : //github.com/mscdex/streamsearch
*
* Permission is hereby granted , free of charge , to any person obtaining a copy
* of this software and associated documentation files ( the "Software" ) , to
* deal in the Software without restriction , including without limitation the
* rights to use , copy , modify , merge , publish , distribute , sublicense , and / or
* sell copies of the Software , and to permit persons to whom the Software is
* furnished to do so , subject to the following conditions :
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software .
2023-03-09 17:42:29 +01:00
*
2024-04-24 12:04:10 -04:00
* THE SOFTWARE IS PROVIDED "AS IS" , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
* IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
* LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
* FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE .
*
* Based heavily on the Streaming Boyer - Moore - Horspool C ++ implementation
* by Hongli Lai at : https : //github.com/FooBarWidget/boyer-moore-horspool
2023-03-09 17:42:29 +01:00
* /
2024-04-24 12:04:10 -04:00
const EventEmitter = ( _ _nccwpck _require _ _ ( 5673 ) . EventEmitter )
const inherits = ( _ _nccwpck _require _ _ ( 7261 ) . inherits )
function SBMH ( needle ) {
if ( typeof needle === 'string' ) {
needle = Buffer . from ( needle )
}
if ( ! Buffer . isBuffer ( needle ) ) {
throw new TypeError ( 'The needle has to be a String or a Buffer.' )
}
const needleLength = needle . length
if ( needleLength === 0 ) {
throw new Error ( 'The needle cannot be an empty String/Buffer.' )
}
if ( needleLength > 256 ) {
throw new Error ( 'The needle cannot have a length bigger than 256.' )
}
this . maxMatches = Infinity
this . matches = 0
this . _occ = new Array ( 256 )
. fill ( needleLength ) // Initialize occurrence table.
this . _lookbehind _size = 0
this . _needle = needle
this . _bufpos = 0
this . _lookbehind = Buffer . alloc ( needleLength )
// Populate occurrence table with analysis of the needle,
// ignoring last letter.
for ( var i = 0 ; i < needleLength - 1 ; ++ i ) { // eslint-disable-line no-var
this . _occ [ needle [ i ] ] = needleLength - 1 - i
}
}
inherits ( SBMH , EventEmitter )
SBMH . prototype . reset = function ( ) {
this . _lookbehind _size = 0
this . matches = 0
this . _bufpos = 0
}
SBMH . prototype . push = function ( chunk , pos ) {
if ( ! Buffer . isBuffer ( chunk ) ) {
chunk = Buffer . from ( chunk , 'binary' )
}
const chlen = chunk . length
this . _bufpos = pos || 0
let r
while ( r !== chlen && this . matches < this . maxMatches ) { r = this . _sbmh _feed ( chunk ) }
return r
}
SBMH . prototype . _sbmh _feed = function ( data ) {
const len = data . length
const needle = this . _needle
const needleLength = needle . length
const lastNeedleChar = needle [ needleLength - 1 ]
// Positive: points to a position in `data`
// pos == 3 points to data[3]
// Negative: points to a position in the lookbehind buffer
// pos == -2 points to lookbehind[lookbehind_size - 2]
let pos = - this . _lookbehind _size
let ch
if ( pos < 0 ) {
// Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
// search with character lookup code that considers both the
// lookbehind buffer and the current round's haystack data.
//
// Loop until
// there is a match.
// or until
// we've moved past the position that requires the
// lookbehind buffer. In this case we switch to the
// optimized loop.
// or until
// the character to look at lies outside the haystack.
while ( pos < 0 && pos <= len - needleLength ) {
ch = this . _sbmh _lookup _char ( data , pos + needleLength - 1 )
if (
ch === lastNeedleChar &&
this . _sbmh _memcmp ( data , pos , needleLength - 1 )
) {
this . _lookbehind _size = 0
++ this . matches
this . emit ( 'info' , true )
return ( this . _bufpos = pos + needleLength )
}
pos += this . _occ [ ch ]
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
// No match.
if ( pos < 0 ) {
// There's too few data for Boyer-Moore-Horspool to run,
// so let's use a different algorithm to skip as much as
// we can.
// Forward pos until
// the trailing part of lookbehind + data
// looks like the beginning of the needle
// or until
// pos == 0
while ( pos < 0 && ! this . _sbmh _memcmp ( data , pos , len - pos ) ) { ++ pos }
}
if ( pos >= 0 ) {
// Discard lookbehind buffer.
this . emit ( 'info' , false , this . _lookbehind , 0 , this . _lookbehind _size )
this . _lookbehind _size = 0
} else {
// Cut off part of the lookbehind buffer that has
// been processed and append the entire haystack
// into it.
const bytesToCutOff = this . _lookbehind _size + pos
if ( bytesToCutOff > 0 ) {
// The cut off data is guaranteed not to contain the needle.
this . emit ( 'info' , false , this . _lookbehind , 0 , bytesToCutOff )
}
this . _lookbehind . copy ( this . _lookbehind , 0 , bytesToCutOff ,
this . _lookbehind _size - bytesToCutOff )
this . _lookbehind _size -= bytesToCutOff
data . copy ( this . _lookbehind , this . _lookbehind _size )
this . _lookbehind _size += len
this . _bufpos = len
return len
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
}
pos += ( pos >= 0 ) * this . _bufpos
// Lookbehind buffer is now empty. We only need to check if the
// needle is in the haystack.
if ( data . indexOf ( needle , pos ) !== - 1 ) {
pos = data . indexOf ( needle , pos )
++ this . matches
if ( pos > 0 ) { this . emit ( 'info' , true , data , this . _bufpos , pos ) } else { this . emit ( 'info' , true ) }
return ( this . _bufpos = pos + needleLength )
} else {
pos = len - needleLength
}
// There was no match. If there's trailing haystack data that we cannot
// match yet using the Boyer-Moore-Horspool algorithm (because the trailing
// data is less than the needle size) then match using a modified
// algorithm that starts matching from the beginning instead of the end.
// Whatever trailing data is left after running this algorithm is added to
// the lookbehind buffer.
while (
pos < len &&
(
data [ pos ] !== needle [ 0 ] ||
(
( Buffer . compare (
data . subarray ( pos , pos + len - pos ) ,
needle . subarray ( 0 , len - pos )
) !== 0 )
)
)
) {
++ pos
}
if ( pos < len ) {
data . copy ( this . _lookbehind , 0 , pos , pos + ( len - pos ) )
this . _lookbehind _size = len - pos
}
// Everything until pos is guaranteed not to contain needle data.
if ( pos > 0 ) { this . emit ( 'info' , false , data , this . _bufpos , pos < len ? pos : len ) }
this . _bufpos = len
return len
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
SBMH . prototype . _sbmh _lookup _char = function ( data , pos ) {
return ( pos < 0 )
? this . _lookbehind [ this . _lookbehind _size + pos ]
: data [ pos ]
}
SBMH . prototype . _sbmh _memcmp = function ( data , pos , len ) {
for ( var i = 0 ; i < len ; ++ i ) { // eslint-disable-line no-var
if ( this . _sbmh _lookup _char ( data , pos + i ) !== this . _needle [ i ] ) { return false }
}
return true
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
module . exports = SBMH
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 727 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
"use strict" ;
2024-04-24 12:04:10 -04:00
const WritableStream = ( _ _nccwpck _require _ _ ( 4492 ) . Writable )
const { inherits } = _ _nccwpck _require _ _ ( 7261 )
const Dicer = _ _nccwpck _require _ _ ( 2960 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const MultipartParser = _ _nccwpck _require _ _ ( 2183 )
const UrlencodedParser = _ _nccwpck _require _ _ ( 8306 )
const parseParams = _ _nccwpck _require _ _ ( 1854 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function Busboy ( opts ) {
if ( ! ( this instanceof Busboy ) ) { return new Busboy ( opts ) }
if ( typeof opts !== 'object' ) {
throw new TypeError ( 'Busboy expected an options-Object.' )
}
if ( typeof opts . headers !== 'object' ) {
throw new TypeError ( 'Busboy expected an options-Object with headers-attribute.' )
}
if ( typeof opts . headers [ 'content-type' ] !== 'string' ) {
throw new TypeError ( 'Missing Content-Type-header.' )
2023-03-09 17:42:29 +01:00
}
2023-04-12 19:55:27 +08:00
2024-04-24 12:04:10 -04:00
const {
headers ,
... streamOptions
} = opts
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . opts = {
autoDestroy : false ,
... streamOptions
}
WritableStream . call ( this , this . opts )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . _done = false
this . _parser = this . getParserByHeaders ( headers )
this . _finished = false
}
inherits ( Busboy , WritableStream )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Busboy . prototype . emit = function ( ev ) {
if ( ev === 'finish' ) {
if ( ! this . _done ) {
this . _parser ? . end ( )
return
} else if ( this . _finished ) {
return
}
this . _finished = true
}
WritableStream . prototype . emit . apply ( this , arguments )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Busboy . prototype . getParserByHeaders = function ( headers ) {
const parsed = parseParams ( headers [ 'content-type' ] )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const cfg = {
defCharset : this . opts . defCharset ,
fileHwm : this . opts . fileHwm ,
headers ,
highWaterMark : this . opts . highWaterMark ,
isPartAFile : this . opts . isPartAFile ,
limits : this . opts . limits ,
parsedConType : parsed ,
preservePath : this . opts . preservePath
}
if ( MultipartParser . detect . test ( parsed [ 0 ] ) ) {
return new MultipartParser ( this , cfg )
}
if ( UrlencodedParser . detect . test ( parsed [ 0 ] ) ) {
return new UrlencodedParser ( this , cfg )
}
throw new Error ( 'Unsupported Content-Type.' )
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
Busboy . prototype . _write = function ( chunk , encoding , cb ) {
this . _parser . write ( chunk , cb )
2023-03-09 17:42:29 +01:00
}
2024-04-24 12:04:10 -04:00
module . exports = Busboy
module . exports [ "default" ] = Busboy
module . exports . Busboy = Busboy
module . exports . Dicer = Dicer
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 2183 :
2023-03-09 17:42:29 +01:00
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2024-04-24 12:04:10 -04:00
"use strict" ;
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
// TODO:
// * support 1 nested multipart level
// (see second multipart example here:
// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
// * support limits.fieldNameSize
// -- this will require modifications to utils.parseParams
const { Readable } = _ _nccwpck _require _ _ ( 4492 )
const { inherits } = _ _nccwpck _require _ _ ( 7261 )
const Dicer = _ _nccwpck _require _ _ ( 2960 )
const parseParams = _ _nccwpck _require _ _ ( 1854 )
const decodeText = _ _nccwpck _require _ _ ( 4619 )
const basename = _ _nccwpck _require _ _ ( 8647 )
const getLimit = _ _nccwpck _require _ _ ( 1467 )
const RE _BOUNDARY = /^boundary$/i
const RE _FIELD = /^form-data$/i
const RE _CHARSET = /^charset$/i
const RE _FILENAME = /^filename$/i
const RE _NAME = /^name$/i
Multipart . detect = /^multipart\/form-data/i
function Multipart ( boy , cfg ) {
let i
let len
const self = this
let boundary
const limits = cfg . limits
const isPartAFile = cfg . isPartAFile || ( ( fieldName , contentType , fileName ) => ( contentType === 'application/octet-stream' || fileName !== undefined ) )
const parsedConType = cfg . parsedConType || [ ]
const defCharset = cfg . defCharset || 'utf8'
const preservePath = cfg . preservePath
const fileOpts = { highWaterMark : cfg . fileHwm }
for ( i = 0 , len = parsedConType . length ; i < len ; ++ i ) {
if ( Array . isArray ( parsedConType [ i ] ) &&
RE _BOUNDARY . test ( parsedConType [ i ] [ 0 ] ) ) {
boundary = parsedConType [ i ] [ 1 ]
break
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function checkFinished ( ) {
if ( nends === 0 && finished && ! boy . _done ) {
finished = false
self . end ( )
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( typeof boundary !== 'string' ) { throw new Error ( 'Multipart: Boundary not found' ) }
const fieldSizeLimit = getLimit ( limits , 'fieldSize' , 1 * 1024 * 1024 )
const fileSizeLimit = getLimit ( limits , 'fileSize' , Infinity )
const filesLimit = getLimit ( limits , 'files' , Infinity )
const fieldsLimit = getLimit ( limits , 'fields' , Infinity )
const partsLimit = getLimit ( limits , 'parts' , Infinity )
const headerPairsLimit = getLimit ( limits , 'headerPairs' , 2000 )
const headerSizeLimit = getLimit ( limits , 'headerSize' , 80 * 1024 )
let nfiles = 0
let nfields = 0
let nends = 0
let curFile
let curField
let finished = false
this . _needDrain = false
this . _pause = false
this . _cb = undefined
this . _nparts = 0
this . _boy = boy
const parserCfg = {
boundary ,
maxHeaderPairs : headerPairsLimit ,
maxHeaderSize : headerSizeLimit ,
partHwm : fileOpts . highWaterMark ,
highWaterMark : cfg . highWaterMark
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . parser = new Dicer ( parserCfg )
this . parser . on ( 'drain' , function ( ) {
self . _needDrain = false
if ( self . _cb && ! self . _pause ) {
const cb = self . _cb
self . _cb = undefined
cb ( )
}
} ) . on ( 'part' , function onPart ( part ) {
if ( ++ self . _nparts > partsLimit ) {
self . parser . removeListener ( 'part' , onPart )
self . parser . on ( 'part' , skipPart )
boy . hitPartsLimit = true
boy . emit ( 'partsLimit' )
return skipPart ( part )
}
// hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
// us emit 'end' early since we know the part has ended if we are already
// seeing the next part
if ( curField ) {
const field = curField
field . emit ( 'end' )
field . removeAllListeners ( 'end' )
}
part . on ( 'header' , function ( header ) {
let contype
let fieldname
let parsed
let charset
let encoding
let filename
let nsize = 0
if ( header [ 'content-type' ] ) {
parsed = parseParams ( header [ 'content-type' ] [ 0 ] )
if ( parsed [ 0 ] ) {
contype = parsed [ 0 ] . toLowerCase ( )
for ( i = 0 , len = parsed . length ; i < len ; ++ i ) {
if ( RE _CHARSET . test ( parsed [ i ] [ 0 ] ) ) {
charset = parsed [ i ] [ 1 ] . toLowerCase ( )
break
}
}
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( contype === undefined ) { contype = 'text/plain' }
if ( charset === undefined ) { charset = defCharset }
if ( header [ 'content-disposition' ] ) {
parsed = parseParams ( header [ 'content-disposition' ] [ 0 ] )
if ( ! RE _FIELD . test ( parsed [ 0 ] ) ) { return skipPart ( part ) }
for ( i = 0 , len = parsed . length ; i < len ; ++ i ) {
if ( RE _NAME . test ( parsed [ i ] [ 0 ] ) ) {
fieldname = parsed [ i ] [ 1 ]
} else if ( RE _FILENAME . test ( parsed [ i ] [ 0 ] ) ) {
filename = parsed [ i ] [ 1 ]
if ( ! preservePath ) { filename = basename ( filename ) }
}
}
} else { return skipPart ( part ) }
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( header [ 'content-transfer-encoding' ] ) { encoding = header [ 'content-transfer-encoding' ] [ 0 ] . toLowerCase ( ) } else { encoding = '7bit' }
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
let onData ,
onEnd
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( isPartAFile ( fieldname , contype , filename ) ) {
// file/binary field
if ( nfiles === filesLimit ) {
if ( ! boy . hitFilesLimit ) {
boy . hitFilesLimit = true
boy . emit ( 'filesLimit' )
}
return skipPart ( part )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
++ nfiles
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( boy . listenerCount ( 'file' ) === 0 ) {
self . parser . _ignore ( )
return
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
++ nends
const file = new FileStream ( fileOpts )
curFile = file
file . on ( 'end' , function ( ) {
-- nends
self . _pause = false
checkFinished ( )
if ( self . _cb && ! self . _needDrain ) {
const cb = self . _cb
self . _cb = undefined
cb ( )
}
} )
file . _read = function ( n ) {
if ( ! self . _pause ) { return }
self . _pause = false
if ( self . _cb && ! self . _needDrain ) {
const cb = self . _cb
self . _cb = undefined
cb ( )
}
}
boy . emit ( 'file' , fieldname , file , filename , encoding , contype )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
onData = function ( data ) {
if ( ( nsize += data . length ) > fileSizeLimit ) {
const extralen = fileSizeLimit - nsize + data . length
if ( extralen > 0 ) { file . push ( data . slice ( 0 , extralen ) ) }
file . truncated = true
file . bytesRead = fileSizeLimit
part . removeAllListeners ( 'data' )
file . emit ( 'limit' )
return
} else if ( ! file . push ( data ) ) { self . _pause = true }
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
file . bytesRead = nsize
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
onEnd = function ( ) {
curFile = undefined
file . push ( null )
}
} else {
// non-file field
if ( nfields === fieldsLimit ) {
if ( ! boy . hitFieldsLimit ) {
boy . hitFieldsLimit = true
boy . emit ( 'fieldsLimit' )
}
return skipPart ( part )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
++ nfields
++ nends
let buffer = ''
let truncated = false
curField = part
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
onData = function ( data ) {
if ( ( nsize += data . length ) > fieldSizeLimit ) {
const extralen = ( fieldSizeLimit - ( nsize - data . length ) )
buffer += data . toString ( 'binary' , 0 , extralen )
truncated = true
part . removeAllListeners ( 'data' )
} else { buffer += data . toString ( 'binary' ) }
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
onEnd = function ( ) {
curField = undefined
if ( buffer . length ) { buffer = decodeText ( buffer , 'binary' , charset ) }
boy . emit ( 'field' , fieldname , buffer , false , truncated , encoding , contype )
-- nends
checkFinished ( )
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
/ * A s o f n o d e @ 2 e f e 4 a b 7 6 1 6 6 6 ( v 0 . 1 0 . 2 9 + / v 0 . 1 1 . 1 4 + ) , b u s b o y h a d b e c o m e
broken . Streams2 / streams3 is a huge black box of confusion , but
somehow overriding the sync state seems to fix things again ( and still
seems to work for previous node versions ) .
* /
part . _readableState . sync = false
part . on ( 'data' , onData )
part . on ( 'end' , onEnd )
} ) . on ( 'error' , function ( err ) {
if ( curFile ) { curFile . emit ( 'error' , err ) }
} )
} ) . on ( 'error' , function ( err ) {
boy . emit ( 'error' , err )
} ) . on ( 'finish' , function ( ) {
finished = true
checkFinished ( )
2023-03-09 17:42:29 +01:00
} )
2024-04-24 12:04:10 -04:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Multipart . prototype . write = function ( chunk , cb ) {
const r = this . parser . write ( chunk )
if ( r && ! this . _pause ) {
cb ( )
} else {
this . _needDrain = ! r
this . _cb = cb
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
Multipart . prototype . end = function ( ) {
const self = this
if ( self . parser . writable ) {
self . parser . end ( )
} else if ( ! self . _boy . _done ) {
process . nextTick ( function ( ) {
self . _boy . _done = true
self . _boy . emit ( 'finish' )
} )
2023-03-09 17:42:29 +01:00
}
2022-10-03 18:04:49 +01:00
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function skipPart ( part ) {
part . resume ( )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
function FileStream ( opts ) {
Readable . call ( this , opts )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . bytesRead = 0
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . truncated = false
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
inherits ( FileStream , Readable )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
FileStream . prototype . _read = function ( n ) { }
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
module . exports = Multipart
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 8306 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
"use strict" ;
2024-04-24 12:04:10 -04:00
const Decoder = _ _nccwpck _require _ _ ( 7100 )
const decodeText = _ _nccwpck _require _ _ ( 4619 )
const getLimit = _ _nccwpck _require _ _ ( 1467 )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const RE _CHARSET = /^charset$/i
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
UrlEncoded . detect = /^application\/x-www-form-urlencoded/i
function UrlEncoded ( boy , cfg ) {
const limits = cfg . limits
const parsedConType = cfg . parsedConType
this . boy = boy
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
this . fieldSizeLimit = getLimit ( limits , 'fieldSize' , 1 * 1024 * 1024 )
this . fieldNameSizeLimit = getLimit ( limits , 'fieldNameSize' , 100 )
this . fieldsLimit = getLimit ( limits , 'fields' , Infinity )
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
let charset
for ( var i = 0 , len = parsedConType . length ; i < len ; ++ i ) { // eslint-disable-line no-var
if ( Array . isArray ( parsedConType [ i ] ) &&
RE _CHARSET . test ( parsedConType [ i ] [ 0 ] ) ) {
charset = parsedConType [ i ] [ 1 ] . toLowerCase ( )
break
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( charset === undefined ) { charset = cfg . defCharset || 'utf8' }
this . decoder = new Decoder ( )
this . charset = charset
this . _fields = 0
this . _state = 'key'
this . _checkingBytes = true
this . _bytesKey = 0
this . _bytesVal = 0
this . _key = ''
this . _val = ''
this . _keyTrunc = false
this . _valTrunc = false
this . _hitLimit = false
}
UrlEncoded . prototype . write = function ( data , cb ) {
if ( this . _fields === this . fieldsLimit ) {
if ( ! this . boy . hitFieldsLimit ) {
this . boy . hitFieldsLimit = true
this . boy . emit ( 'fieldsLimit' )
}
return cb ( )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
let idxeq ; let idxamp ; let i ; let p = 0 ; const len = data . length
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
while ( p < len ) {
if ( this . _state === 'key' ) {
idxeq = idxamp = undefined
for ( i = p ; i < len ; ++ i ) {
if ( ! this . _checkingBytes ) { ++ p }
if ( data [ i ] === 0x3D /* = */ ) {
idxeq = i
break
} else if ( data [ i ] === 0x26 /* & */ ) {
idxamp = i
break
}
if ( this . _checkingBytes && this . _bytesKey === this . fieldNameSizeLimit ) {
this . _hitLimit = true
break
} else if ( this . _checkingBytes ) { ++ this . _bytesKey }
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( idxeq !== undefined ) {
// key with assignment
if ( idxeq > p ) { this . _key += this . decoder . write ( data . toString ( 'binary' , p , idxeq ) ) }
this . _state = 'val'
this . _hitLimit = false
this . _checkingBytes = true
this . _val = ''
this . _bytesVal = 0
this . _valTrunc = false
this . decoder . reset ( )
p = idxeq + 1
} else if ( idxamp !== undefined ) {
// key with no assignment
++ this . _fields
let key ; const keyTrunc = this . _keyTrunc
if ( idxamp > p ) { key = ( this . _key += this . decoder . write ( data . toString ( 'binary' , p , idxamp ) ) ) } else { key = this . _key }
this . _hitLimit = false
this . _checkingBytes = true
this . _key = ''
this . _bytesKey = 0
this . _keyTrunc = false
this . decoder . reset ( )
if ( key . length ) {
this . boy . emit ( 'field' , decodeText ( key , 'binary' , this . charset ) ,
'' ,
keyTrunc ,
false )
}
p = idxamp + 1
if ( this . _fields === this . fieldsLimit ) { return cb ( ) }
} else if ( this . _hitLimit ) {
// we may not have hit the actual limit if there are encoded bytes...
if ( i > p ) { this . _key += this . decoder . write ( data . toString ( 'binary' , p , i ) ) }
p = i
if ( ( this . _bytesKey = this . _key . length ) === this . fieldNameSizeLimit ) {
// yep, we actually did hit the limit
this . _checkingBytes = false
this . _keyTrunc = true
}
} else {
if ( p < len ) { this . _key += this . decoder . write ( data . toString ( 'binary' , p ) ) }
p = len
}
} else {
idxamp = undefined
for ( i = p ; i < len ; ++ i ) {
if ( ! this . _checkingBytes ) { ++ p }
if ( data [ i ] === 0x26 /* & */ ) {
idxamp = i
break
}
if ( this . _checkingBytes && this . _bytesVal === this . fieldSizeLimit ) {
this . _hitLimit = true
break
} else if ( this . _checkingBytes ) { ++ this . _bytesVal }
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( idxamp !== undefined ) {
++ this . _fields
if ( idxamp > p ) { this . _val += this . decoder . write ( data . toString ( 'binary' , p , idxamp ) ) }
this . boy . emit ( 'field' , decodeText ( this . _key , 'binary' , this . charset ) ,
decodeText ( this . _val , 'binary' , this . charset ) ,
this . _keyTrunc ,
this . _valTrunc )
this . _state = 'key'
this . _hitLimit = false
this . _checkingBytes = true
this . _key = ''
this . _bytesKey = 0
this . _keyTrunc = false
this . decoder . reset ( )
p = idxamp + 1
if ( this . _fields === this . fieldsLimit ) { return cb ( ) }
} else if ( this . _hitLimit ) {
// we may not have hit the actual limit if there are encoded bytes...
if ( i > p ) { this . _val += this . decoder . write ( data . toString ( 'binary' , p , i ) ) }
p = i
if ( ( this . _val === '' && this . fieldSizeLimit === 0 ) ||
( this . _bytesVal = this . _val . length ) === this . fieldSizeLimit ) {
// yep, we actually did hit the limit
this . _checkingBytes = false
this . _valTrunc = true
}
} else {
if ( p < len ) { this . _val += this . decoder . write ( data . toString ( 'binary' , p ) ) }
p = len
}
}
}
cb ( )
}
UrlEncoded . prototype . end = function ( ) {
if ( this . boy . _done ) { return }
if ( this . _state === 'key' && this . _key . length > 0 ) {
this . boy . emit ( 'field' , decodeText ( this . _key , 'binary' , this . charset ) ,
'' ,
this . _keyTrunc ,
false )
} else if ( this . _state === 'val' ) {
this . boy . emit ( 'field' , decodeText ( this . _key , 'binary' , this . charset ) ,
decodeText ( this . _val , 'binary' , this . charset ) ,
this . _keyTrunc ,
this . _valTrunc )
}
this . boy . _done = true
this . boy . emit ( 'finish' )
}
module . exports = UrlEncoded
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 7100 :
2023-03-09 17:42:29 +01:00
/***/ ( ( module ) => {
"use strict" ;
2024-04-24 12:04:10 -04:00
const RE _PLUS = /\+/g
const HEX = [
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ,
0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0
]
function Decoder ( ) {
this . buffer = undefined
}
Decoder . prototype . write = function ( str ) {
// Replace '+' with ' ' before decoding
str = str . replace ( RE _PLUS , ' ' )
let res = ''
let i = 0 ; let p = 0 ; const len = str . length
for ( ; i < len ; ++ i ) {
if ( this . buffer !== undefined ) {
if ( ! HEX [ str . charCodeAt ( i ) ] ) {
res += '%' + this . buffer
this . buffer = undefined
-- i // retry character
} else {
this . buffer += str [ i ]
++ p
if ( this . buffer . length === 2 ) {
res += String . fromCharCode ( parseInt ( this . buffer , 16 ) )
this . buffer = undefined
}
}
} else if ( str [ i ] === '%' ) {
if ( i > p ) {
res += str . substring ( p , i )
p = i
}
this . buffer = ''
++ p
}
}
if ( p < len && this . buffer === undefined ) { res += str . substring ( p ) }
return res
}
Decoder . prototype . reset = function ( ) {
this . buffer = undefined
}
module . exports = Decoder
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 8647 :
2023-03-09 17:42:29 +01:00
/***/ ( ( module ) => {
"use strict" ;
2024-04-24 12:04:10 -04:00
module . exports = function basename ( path ) {
if ( typeof path !== 'string' ) { return '' }
for ( var i = path . length - 1 ; i >= 0 ; -- i ) { // eslint-disable-line no-var
switch ( path . charCodeAt ( i ) ) {
case 0x2F : // '/'
case 0x5C : // '\'
path = path . slice ( i + 1 )
return ( path === '..' || path === '.' ? '' : path )
}
}
return ( path === '..' || path === '.' ? '' : path )
}
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 4619 :
/***/ ( function ( module ) {
2023-03-09 17:42:29 +01:00
"use strict" ;
2024-04-24 12:04:10 -04:00
// Node has always utf-8
const utf8Decoder = new TextDecoder ( 'utf-8' )
const textDecoders = new Map ( [
[ 'utf-8' , utf8Decoder ] ,
[ 'utf8' , utf8Decoder ]
] )
function getDecoder ( charset ) {
let lc
while ( true ) {
switch ( charset ) {
case 'utf-8' :
case 'utf8' :
return decoders . utf8
case 'latin1' :
case 'ascii' : // TODO: Make these a separate, strict decoder?
case 'us-ascii' :
case 'iso-8859-1' :
case 'iso8859-1' :
case 'iso88591' :
case 'iso_8859-1' :
case 'windows-1252' :
case 'iso_8859-1:1987' :
case 'cp1252' :
case 'x-cp1252' :
return decoders . latin1
case 'utf16le' :
case 'utf-16le' :
case 'ucs2' :
case 'ucs-2' :
return decoders . utf16le
case 'base64' :
return decoders . base64
default :
if ( lc === undefined ) {
lc = true
charset = charset . toLowerCase ( )
continue
}
return decoders . other . bind ( charset )
}
}
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
const decoders = {
utf8 : ( data , sourceEncoding ) => {
if ( data . length === 0 ) {
return ''
}
if ( typeof data === 'string' ) {
data = Buffer . from ( data , sourceEncoding )
}
return data . utf8Slice ( 0 , data . length )
} ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
latin1 : ( data , sourceEncoding ) => {
if ( data . length === 0 ) {
return ''
}
if ( typeof data === 'string' ) {
return data
}
return data . latin1Slice ( 0 , data . length )
} ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
utf16le : ( data , sourceEncoding ) => {
if ( data . length === 0 ) {
return ''
}
if ( typeof data === 'string' ) {
data = Buffer . from ( data , sourceEncoding )
}
return data . ucs2Slice ( 0 , data . length )
} ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
base64 : ( data , sourceEncoding ) => {
if ( data . length === 0 ) {
return ''
}
if ( typeof data === 'string' ) {
data = Buffer . from ( data , sourceEncoding )
}
return data . base64Slice ( 0 , data . length )
} ,
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
other : ( data , sourceEncoding ) => {
if ( data . length === 0 ) {
return ''
}
if ( typeof data === 'string' ) {
data = Buffer . from ( data , sourceEncoding )
}
2023-03-09 17:42:29 +01:00
2024-04-24 12:04:10 -04:00
if ( textDecoders . has ( this . toString ( ) ) ) {
try {
return textDecoders . get ( this ) . decode ( data )
} catch { }
}
return typeof data === 'string'
? data
: data . toString ( )
}
}
function decodeText ( text , sourceEncoding , destEncoding ) {
if ( text ) {
return getDecoder ( destEncoding ) ( text , sourceEncoding )
}
return text
}
module . exports = decodeText
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 1467 :
2023-03-09 17:42:29 +01:00
/***/ ( ( module ) => {
"use strict" ;
2024-04-24 12:04:10 -04:00
module . exports = function getLimit ( limits , name , defaultLimit ) {
if (
! limits ||
limits [ name ] === undefined ||
limits [ name ] === null
) { return defaultLimit }
if (
typeof limits [ name ] !== 'number' ||
isNaN ( limits [ name ] )
) { throw new TypeError ( 'Limit ' + name + ' is not a valid number' ) }
return limits [ name ]
}
2023-03-09 17:42:29 +01:00
/***/ } ) ,
2024-04-24 12:04:10 -04:00
/***/ 1854 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2023-03-09 17:42:29 +01:00
"use strict" ;
2024-04-24 12:04:10 -04:00
/* eslint-disable object-property-newline */
const decodeText = _ _nccwpck _require _ _ ( 4619 )
const RE _ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g
const EncodedLookup = {
'%00' : '\x00' , '%01' : '\x01' , '%02' : '\x02' , '%03' : '\x03' , '%04' : '\x04' ,
'%05' : '\x05' , '%06' : '\x06' , '%07' : '\x07' , '%08' : '\x08' , '%09' : '\x09' ,
'%0a' : '\x0a' , '%0A' : '\x0a' , '%0b' : '\x0b' , '%0B' : '\x0b' , '%0c' : '\x0c' ,
'%0C' : '\x0c' , '%0d' : '\x0d' , '%0D' : '\x0d' , '%0e' : '\x0e' , '%0E' : '\x0e' ,
'%0f' : '\x0f' , '%0F' : '\x0f' , '%10' : '\x10' , '%11' : '\x11' , '%12' : '\x12' ,
'%13' : '\x13' , '%14' : '\x14' , '%15' : '\x15' , '%16' : '\x16' , '%17' : '\x17' ,
'%18' : '\x18' , '%19' : '\x19' , '%1a' : '\x1a' , '%1A' : '\x1a' , '%1b' : '\x1b' ,
'%1B' : '\x1b' , '%1c' : '\x1c' , '%1C' : '\x1c' , '%1d' : '\x1d' , '%1D' : '\x1d' ,
'%1e' : '\x1e' , '%1E' : '\x1e' , '%1f' : '\x1f' , '%1F' : '\x1f' , '%20' : '\x20' ,
'%21' : '\x21' , '%22' : '\x22' , '%23' : '\x23' , '%24' : '\x24' , '%25' : '\x25' ,
'%26' : '\x26' , '%27' : '\x27' , '%28' : '\x28' , '%29' : '\x29' , '%2a' : '\x2a' ,
'%2A' : '\x2a' , '%2b' : '\x2b' , '%2B' : '\x2b' , '%2c' : '\x2c' , '%2C' : '\x2c' ,
'%2d' : '\x2d' , '%2D' : '\x2d' , '%2e' : '\x2e' , '%2E' : '\x2e' , '%2f' : '\x2f' ,
'%2F' : '\x2f' , '%30' : '\x30' , '%31' : '\x31' , '%32' : '\x32' , '%33' : '\x33' ,
'%34' : '\x34' , '%35' : '\x35' , '%36' : '\x36' , '%37' : '\x37' , '%38' : '\x38' ,
'%39' : '\x39' , '%3a' : '\x3a' , '%3A' : '\x3a' , '%3b' : '\x3b' , '%3B' : '\x3b' ,
'%3c' : '\x3c' , '%3C' : '\x3c' , '%3d' : '\x3d' , '%3D' : '\x3d' , '%3e' : '\x3e' ,
'%3E' : '\x3e' , '%3f' : '\x3f' , '%3F' : '\x3f' , '%40' : '\x40' , '%41' : '\x41' ,
'%42' : '\x42' , '%43' : '\x43' , '%44' : '\x44' , '%45' : '\x45' , '%46' : '\x46' ,
'%47' : '\x47' , '%48' : '\x48' , '%49' : '\x49' , '%4a' : '\x4a' , '%4A' : '\x4a' ,
'%4b' : '\x4b' , '%4B' : '\x4b' , '%4c' : '\x4c' , '%4C' : '\x4c' , '%4d' : '\x4d' ,
'%4D' : '\x4d' , '%4e' : '\x4e' , '%4E' : '\x4e' , '%4f' : '\x4f' , '%4F' : '\x4f' ,
'%50' : '\x50' , '%51' : '\x51' , '%52' : '\x52' , '%53' : '\x53' , '%54' : '\x54' ,
'%55' : '\x55' , '%56' : '\x56' , '%57' : '\x57' , '%58' : '\x58' , '%59' : '\x59' ,
'%5a' : '\x5a' , '%5A' : '\x5a' , '%5b' : '\x5b' , '%5B' : '\x5b' , '%5c' : '\x5c' ,
'%5C' : '\x5c' , '%5d' : '\x5d' , '%5D' : '\x5d' , '%5e' : '\x5e' , '%5E' : '\x5e' ,
'%5f' : '\x5f' , '%5F' : '\x5f' , '%60' : '\x60' , '%61' : '\x61' , '%62' : '\x62' ,
'%63' : '\x63' , '%64' : '\x64' , '%65' : '\x65' , '%66' : '\x66' , '%67' : '\x67' ,
'%68' : '\x68' , '%69' : '\x69' , '%6a' : '\x6a' , '%6A' : '\x6a' , '%6b' : '\x6b' ,
'%6B' : '\x6b' , '%6c' : '\x6c' , '%6C' : '\x6c' , '%6d' : '\x6d' , '%6D' : '\x6d' ,
'%6e' : '\x6e' , '%6E' : '\x6e' , '%6f' : '\x6f' , '%6F' : '\x6f' , '%70' : '\x70' ,
'%71' : '\x71' , '%72' : '\x72' , '%73' : '\x73' , '%74' : '\x74' , '%75' : '\x75' ,
'%76' : '\x76' , '%77' : '\x77' , '%78' : '\x78' , '%79' : '\x79' , '%7a' : '\x7a' ,
'%7A' : '\x7a' , '%7b' : '\x7b' , '%7B' : '\x7b' , '%7c' : '\x7c' , '%7C' : '\x7c' ,
'%7d' : '\x7d' , '%7D' : '\x7d' , '%7e' : '\x7e' , '%7E' : '\x7e' , '%7f' : '\x7f' ,
'%7F' : '\x7f' , '%80' : '\x80' , '%81' : '\x81' , '%82' : '\x82' , '%83' : '\x83' ,
'%84' : '\x84' , '%85' : '\x85' , '%86' : '\x86' , '%87' : '\x87' , '%88' : '\x88' ,
'%89' : '\x89' , '%8a' : '\x8a' , '%8A' : '\x8a' , '%8b' : '\x8b' , '%8B' : '\x8b' ,
'%8c' : '\x8c' , '%8C' : '\x8c' , '%8d' : '\x8d' , '%8D' : '\x8d' , '%8e' : '\x8e' ,
'%8E' : '\x8e' , '%8f' : '\x8f' , '%8F' : '\x8f' , '%90' : '\x90' , '%91' : '\x91' ,
'%92' : '\x92' , '%93' : '\x93' , '%94' : '\x94' , '%95' : '\x95' , '%96' : '\x96' ,
'%97' : '\x97' , '%98' : '\x98' , '%99' : '\x99' , '%9a' : '\x9a' , '%9A' : '\x9a' ,
'%9b' : '\x9b' , '%9B' : '\x9b' , '%9c' : '\x9c' , '%9C' : '\x9c' , '%9d' : '\x9d' ,
'%9D' : '\x9d' , '%9e' : '\x9e' , '%9E' : '\x9e' , '%9f' : '\x9f' , '%9F' : '\x9f' ,
'%a0' : '\xa0' , '%A0' : '\xa0' , '%a1' : '\xa1' , '%A1' : '\xa1' , '%a2' : '\xa2' ,
'%A2' : '\xa2' , '%a3' : '\xa3' , '%A3' : '\xa3' , '%a4' : '\xa4' , '%A4' : '\xa4' ,
'%a5' : '\xa5' , '%A5' : '\xa5' , '%a6' : '\xa6' , '%A6' : '\xa6' , '%a7' : '\xa7' ,
'%A7' : '\xa7' , '%a8' : '\xa8' , '%A8' : '\xa8' , '%a9' : '\xa9' , '%A9' : '\xa9' ,
'%aa' : '\xaa' , '%Aa' : '\xaa' , '%aA' : '\xaa' , '%AA' : '\xaa' , '%ab' : '\xab' ,
'%Ab' : '\xab' , '%aB' : '\xab' , '%AB' : '\xab' , '%ac' : '\xac' , '%Ac' : '\xac' ,
'%aC' : '\xac' , '%AC' : '\xac' , '%ad' : '\xad' , '%Ad' : '\xad' , '%aD' : '\xad' ,
'%AD' : '\xad' , '%ae' : '\xae' , '%Ae' : '\xae' , '%aE' : '\xae' , '%AE' : '\xae' ,
'%af' : '\xaf' , '%Af' : '\xaf' , '%aF' : '\xaf' , '%AF' : '\xaf' , '%b0' : '\xb0' ,
'%B0' : '\xb0' , '%b1' : '\xb1' , '%B1' : '\xb1' , '%b2' : '\xb2' , '%B2' : '\xb2' ,
'%b3' : '\xb3' , '%B3' : '\xb3' , '%b4' : '\xb4' , '%B4' : '\xb4' , '%b5' : '\xb5' ,
'%B5' : '\xb5' , '%b6' : '\xb6' , '%B6' : '\xb6' , '%b7' : '\xb7' , '%B7' : '\xb7' ,
'%b8' : '\xb8' , '%B8' : '\xb8' , '%b9' : '\xb9' , '%B9' : '\xb9' , '%ba' : '\xba' ,
'%Ba' : '\xba' , '%bA' : '\xba' , '%BA' : '\xba' , '%bb' : '\xbb' , '%Bb' : '\xbb' ,
'%bB' : '\xbb' , '%BB' : '\xbb' , '%bc' : '\xbc' , '%Bc' : '\xbc' , '%bC' : '\xbc' ,
'%BC' : '\xbc' , '%bd' : '\xbd' , '%Bd' : '\xbd' , '%bD' : '\xbd' , '%BD' : '\xbd' ,
'%be' : '\xbe' , '%Be' : '\xbe' , '%bE' : '\xbe' , '%BE' : '\xbe' , '%bf' : '\xbf' ,
'%Bf' : '\xbf' , '%bF' : '\xbf' , '%BF' : '\xbf' , '%c0' : '\xc0' , '%C0' : '\xc0' ,
'%c1' : '\xc1' , '%C1' : '\xc1' , '%c2' : '\xc2' , '%C2' : '\xc2' , '%c3' : '\xc3' ,
'%C3' : '\xc3' , '%c4' : '\xc4' , '%C4' : '\xc4' , '%c5' : '\xc5' , '%C5' : '\xc5' ,
'%c6' : '\xc6' , '%C6' : '\xc6' , '%c7' : '\xc7' , '%C7' : '\xc7' , '%c8' : '\xc8' ,
'%C8' : '\xc8' , '%c9' : '\xc9' , '%C9' : '\xc9' , '%ca' : '\xca' , '%Ca' : '\xca' ,
'%cA' : '\xca' , '%CA' : '\xca' , '%cb' : '\xcb' , '%Cb' : '\xcb' , '%cB' : '\xcb' ,
'%CB' : '\xcb' , '%cc' : '\xcc' , '%Cc' : '\xcc' , '%cC' : '\xcc' , '%CC' : '\xcc' ,
'%cd' : '\xcd' , '%Cd' : '\xcd' , '%cD' : '\xcd' , '%CD' : '\xcd' , '%ce' : '\xce' ,
'%Ce' : '\xce' , '%cE' : '\xce' , '%CE' : '\xce' , '%cf' : '\xcf' , '%Cf' : '\xcf' ,
'%cF' : '\xcf' , '%CF' : '\xcf' , '%d0' : '\xd0' , '%D0' : '\xd0' , '%d1' : '\xd1' ,
'%D1' : '\xd1' , '%d2' : '\xd2' , '%D2' : '\xd2' , '%d3' : '\xd3' , '%D3' : '\xd3' ,
'%d4' : '\xd4' , '%D4' : '\xd4' , '%d5' : '\xd5' , '%D5' : '\xd5' , '%d6' : '\xd6' ,
'%D6' : '\xd6' , '%d7' : '\xd7' , '%D7' : '\xd7' , '%d8' : '\xd8' , '%D8' : '\xd8' ,
'%d9' : '\xd9' , '%D9' : '\xd9' , '%da' : '\xda' , '%Da' : '\xda' , '%dA' : '\xda' ,
'%DA' : '\xda' , '%db' : '\xdb' , '%Db' : '\xdb' , '%dB' : '\xdb' , '%DB' : '\xdb' ,
'%dc' : '\xdc' , '%Dc' : '\xdc' , '%dC' : '\xdc' , '%DC' : '\xdc' , '%dd' : '\xdd' ,
'%Dd' : '\xdd' , '%dD' : '\xdd' , '%DD' : '\xdd' , '%de' : '\xde' , '%De' : '\xde' ,
'%dE' : '\xde' , '%DE' : '\xde' , '%df' : '\xdf' , '%Df' : '\xdf' , '%dF' : '\xdf' ,
'%DF' : '\xdf' , '%e0' : '\xe0' , '%E0' : '\xe0' , '%e1' : '\xe1' , '%E1' : '\xe1' ,
'%e2' : '\xe2' , '%E2' : '\xe2' , '%e3' : '\xe3' , '%E3' : '\xe3' , '%e4' : '\xe4' ,
'%E4' : '\xe4' , '%e5' : '\xe5' , '%E5' : '\xe5' , '%e6' : '\xe6' , '%E6' : '\xe6' ,
'%e7' : '\xe7' , '%E7' : '\xe7' , '%e8' : '\xe8' , '%E8' : '\xe8' , '%e9' : '\xe9' ,
'%E9' : '\xe9' , '%ea' : '\xea' , '%Ea' : '\xea' , '%eA' : '\xea' , '%EA' : '\xea' ,
'%eb' : '\xeb' , '%Eb' : '\xeb' , '%eB' : '\xeb' , '%EB' : '\xeb' , '%ec' : '\xec' ,
'%Ec' : '\xec' , '%eC' : '\xec' , '%EC' : '\xec' , '%ed' : '\xed' , '%Ed' : '\xed' ,
'%eD' : '\xed' , '%ED' : '\xed' , '%ee' : '\xee' , '%Ee' : '\xee' , '%eE' : '\xee' ,
'%EE' : '\xee' , '%ef' : '\xef' , '%Ef' : '\xef' , '%eF' : '\xef' , '%EF' : '\xef' ,
'%f0' : '\xf0' , '%F0' : '\xf0' , '%f1' : '\xf1' , '%F1' : '\xf1' , '%f2' : '\xf2' ,
'%F2' : '\xf2' , '%f3' : '\xf3' , '%F3' : '\xf3' , '%f4' : '\xf4' , '%F4' : '\xf4' ,
'%f5' : '\xf5' , '%F5' : '\xf5' , '%f6' : '\xf6' , '%F6' : '\xf6' , '%f7' : '\xf7' ,
'%F7' : '\xf7' , '%f8' : '\xf8' , '%F8' : '\xf8' , '%f9' : '\xf9' , '%F9' : '\xf9' ,
'%fa' : '\xfa' , '%Fa' : '\xfa' , '%fA' : '\xfa' , '%FA' : '\xfa' , '%fb' : '\xfb' ,
'%Fb' : '\xfb' , '%fB' : '\xfb' , '%FB' : '\xfb' , '%fc' : '\xfc' , '%Fc' : '\xfc' ,
'%fC' : '\xfc' , '%FC' : '\xfc' , '%fd' : '\xfd' , '%Fd' : '\xfd' , '%fD' : '\xfd' ,
'%FD' : '\xfd' , '%fe' : '\xfe' , '%Fe' : '\xfe' , '%fE' : '\xfe' , '%FE' : '\xfe' ,
'%ff' : '\xff' , '%Ff' : '\xff' , '%fF' : '\xff' , '%FF' : '\xff'
}
function encodedReplacer ( match ) {
return EncodedLookup [ match ]
}
const STATE _KEY = 0
const STATE _VALUE = 1
const STATE _CHARSET = 2
const STATE _LANG = 3
function parseParams ( str ) {
const res = [ ]
let state = STATE _KEY
let charset = ''
let inquote = false
let escaping = false
let p = 0
let tmp = ''
const len = str . length
for ( var i = 0 ; i < len ; ++ i ) { // eslint-disable-line no-var
const char = str [ i ]
if ( char === '\\' && inquote ) {
if ( escaping ) { escaping = false } else {
escaping = true
continue
}
} else if ( char === '"' ) {
if ( ! escaping ) {
if ( inquote ) {
inquote = false
state = STATE _KEY
} else { inquote = true }
continue
} else { escaping = false }
} else {
if ( escaping && inquote ) { tmp += '\\' }
escaping = false
if ( ( state === STATE _CHARSET || state === STATE _LANG ) && char === "'" ) {
if ( state === STATE _CHARSET ) {
state = STATE _LANG
charset = tmp . substring ( 1 )
} else { state = STATE _VALUE }
tmp = ''
continue
} else if ( state === STATE _KEY &&
( char === '*' || char === '=' ) &&
res . length ) {
state = char === '*'
? STATE _CHARSET
: STATE _VALUE
res [ p ] = [ tmp , undefined ]
tmp = ''
continue
} else if ( ! inquote && char === ';' ) {
state = STATE _KEY
if ( charset ) {
if ( tmp . length ) {
tmp = decodeText ( tmp . replace ( RE _ENCODED , encodedReplacer ) ,
'binary' ,
charset )
}
charset = ''
} else if ( tmp . length ) {
tmp = decodeText ( tmp , 'binary' , 'utf8' )
}
if ( res [ p ] === undefined ) { res [ p ] = tmp } else { res [ p ] [ 1 ] = tmp }
tmp = ''
++ p
continue
} else if ( ! inquote && ( char === ' ' || char === '\t' ) ) { continue }
}
tmp += char
}
if ( charset && tmp . length ) {
tmp = decodeText ( tmp . replace ( RE _ENCODED , encodedReplacer ) ,
'binary' ,
charset )
} else if ( tmp ) {
tmp = decodeText ( tmp , 'binary' , 'utf8' )
}
if ( res [ p ] === undefined ) {
if ( tmp ) { res [ p ] = tmp }
} else { res [ p ] [ 1 ] = tmp }
return res
}
module . exports = parseParams
2022-10-03 18:04:49 +01:00
2019-12-03 10:28:59 -05:00
/***/ } )
2023-03-09 17:42:29 +01:00
/******/ } ) ;
/************************************************************************/
/******/ // The module cache
/******/ var _ _webpack _module _cache _ _ = { } ;
/******/
/******/ // The require function
/******/ function _ _nccwpck _require _ _ ( moduleId ) {
/******/ // Check if module is in cache
/******/ var cachedModule = _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ if ( cachedModule !== undefined ) {
/******/ return cachedModule . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = _ _webpack _module _cache _ _ [ moduleId ] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ var threw = true ;
/******/ try {
/******/ _ _webpack _modules _ _ [ moduleId ] . call ( module . exports , module , module . exports , _ _nccwpck _require _ _ ) ;
/******/ threw = false ;
/******/ } finally {
/******/ if ( threw ) delete _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ }
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/************************************************************************/
/******/ /* webpack/runtime/compat */
/******/
/******/ if ( typeof _ _nccwpck _require _ _ !== 'undefined' ) _ _nccwpck _require _ _ . ab = _ _dirname + "/" ;
/******/
/************************************************************************/
/******/
/******/ // startup
/******/ // Load entry module and return exports
/******/ // This entry module is referenced by other modules so it can't be inlined
/******/ var _ _webpack _exports _ _ = _ _nccwpck _require _ _ ( 3109 ) ;
/******/ module . exports = _ _webpack _exports _ _ ;
/******/
/******/ } ) ( )
;