2020-01-27 16:37:12 +01:00
module . exports =
2021-02-22 00:27:22 +01:00
/******/ ( ( ) => { // webpackBootstrap
/******/ var _ _webpack _modules _ _ = ( {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 8527 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . configAuthentication = void 0 ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 5747 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2087 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 5622 ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const github = _ _importStar ( _ _nccwpck _require _ _ ( 5438 ) ) ;
function configAuthentication ( registryUrl , alwaysAuth ) {
const npmrc = path . resolve ( process . env [ 'RUNNER_TEMP' ] || process . cwd ( ) , '.npmrc' ) ;
if ( ! registryUrl . endsWith ( '/' ) ) {
registryUrl += '/' ;
}
writeRegistryToFile ( registryUrl , npmrc , alwaysAuth ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . configAuthentication = configAuthentication ;
function writeRegistryToFile ( registryUrl , fileLocation , alwaysAuth ) {
let scope = core . getInput ( 'scope' ) ;
if ( ! scope && registryUrl . indexOf ( 'npm.pkg.github.com' ) > - 1 ) {
scope = github . context . repo . owner ;
}
if ( scope && scope [ 0 ] != '@' ) {
scope = '@' + scope ;
}
if ( scope ) {
scope = scope . toLowerCase ( ) ;
}
core . debug ( ` Setting auth in ${ fileLocation } ` ) ;
let newContents = '' ;
if ( fs . existsSync ( fileLocation ) ) {
const curContents = fs . readFileSync ( fileLocation , 'utf8' ) ;
curContents . split ( os . EOL ) . forEach ( ( line ) => {
// Add current contents unless they are setting the registry
if ( ! line . toLowerCase ( ) . startsWith ( 'registry' ) ) {
newContents += line + os . EOL ;
}
} ) ;
}
// Remove http: or https: from front of registry.
const authString = registryUrl . replace ( /(^\w+:|^)/ , '' ) + ':_authToken=${NODE_AUTH_TOKEN}' ;
const registryString = scope
? ` ${ scope } :registry= ${ registryUrl } `
: ` registry= ${ registryUrl } ` ;
const alwaysAuthString = ` always-auth= ${ alwaysAuth } ` ;
newContents += ` ${ authString } ${ os . EOL } ${ registryString } ${ os . EOL } ${ alwaysAuthString } ` ;
fs . writeFileSync ( fileLocation , newContents ) ;
core . exportVariable ( 'NPM_CONFIG_USERCONFIG' , fileLocation ) ;
// Export empty node_auth_token if didn't exist so npm doesn't complain about not being able to find it
core . exportVariable ( 'NODE_AUTH_TOKEN' , process . env . NODE _AUTH _TOKEN || 'XXXXX-XXXXX-XXXXX-XXXXX' ) ;
}
//# sourceMappingURL=authutil.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 1480 :
/***/ ( function ( module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-01-27 16:37:12 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getVersionsFromDist = exports . getNode = void 0 ;
const os = _ _nccwpck _require _ _ ( 2087 ) ;
const assert = _ _importStar ( _ _nccwpck _require _ _ ( 2357 ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const hc = _ _importStar ( _ _nccwpck _require _ _ ( 9925 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const tc = _ _importStar ( _ _nccwpck _require _ _ ( 7784 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 5622 ) ) ;
const semver = _ _importStar ( _ _nccwpck _require _ _ ( 5911 ) ) ;
const fs = _ _nccwpck _require _ _ ( 5747 ) ;
function getNode ( versionSpec , stable , checkLatest , auth , arch = os . arch ( ) ) {
2020-01-27 16:37:12 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2021-02-22 00:27:22 +01:00
let osPlat = os . platform ( ) ;
let osArch = translateArchToDistUrl ( arch ) ;
if ( checkLatest ) {
core . info ( 'Attempt to resolve the latest version from manifest...' ) ;
const resolvedVersion = yield resolveVersionFromManifest ( versionSpec , stable , auth , osArch ) ;
if ( resolvedVersion ) {
versionSpec = resolvedVersion ;
core . info ( ` Resolved as ' ${ versionSpec } ' ` ) ;
2020-01-27 16:37:12 +01:00
}
else {
2021-02-22 00:27:22 +01:00
core . info ( ` Failed to resolve version ${ versionSpec } from manifest ` ) ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
// check cache
let toolPath ;
toolPath = tc . find ( 'node' , versionSpec , osArch ) ;
// If not found in cache, download
if ( toolPath ) {
core . info ( ` Found in cache @ ${ toolPath } ` ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
else {
core . info ( ` Attempting to download ${ versionSpec } ... ` ) ;
let downloadPath = '' ;
let info = null ;
//
// Try download from internal distribution (popular versions only)
//
try {
info = yield getInfoFromManifest ( versionSpec , stable , auth , osArch ) ;
if ( info ) {
core . info ( ` Acquiring ${ info . resolvedVersion } - ${ info . arch } from ${ info . downloadUrl } ` ) ;
downloadPath = yield tc . downloadTool ( info . downloadUrl , undefined , auth ) ;
2020-01-27 16:37:12 +01:00
}
else {
2021-02-22 00:27:22 +01:00
core . info ( 'Not found in manifest. Falling back to download directly from Node' ) ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
catch ( err ) {
// Rate limit?
if ( err instanceof tc . HTTPError &&
( err . httpStatusCode === 403 || err . httpStatusCode === 429 ) ) {
core . info ( ` Received HTTP status code ${ err . httpStatusCode } . This usually indicates the rate limit has been exceeded ` ) ;
2020-01-27 16:37:12 +01:00
}
else {
2021-02-22 00:27:22 +01:00
core . info ( err . message ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
core . debug ( err . stack ) ;
core . info ( 'Falling back to download directly from Node' ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
//
// Download from nodejs.org
//
if ( ! downloadPath ) {
info = yield getInfoFromDist ( versionSpec , arch ) ;
if ( ! info ) {
throw new Error ( ` Unable to find Node version ' ${ versionSpec } ' for platform ${ osPlat } and architecture ${ osArch } . ` ) ;
}
core . info ( ` Acquiring ${ info . resolvedVersion } - ${ info . arch } from ${ info . downloadUrl } ` ) ;
try {
downloadPath = yield tc . downloadTool ( info . downloadUrl ) ;
}
catch ( err ) {
if ( err instanceof tc . HTTPError && err . httpStatusCode == 404 ) {
return yield acquireNodeFromFallbackLocation ( info . resolvedVersion , info . arch ) ;
}
2020-01-27 16:37:12 +01:00
throw err ;
2021-02-22 00:27:22 +01:00
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
//
// Extract
//
core . info ( 'Extracting ...' ) ;
let extPath ;
info = info || { } ; // satisfy compiler, never null when reaches here
if ( osPlat == 'win32' ) {
let _7zPath = path . join ( _ _dirname , '..' , 'externals' , '7zr.exe' ) ;
extPath = yield tc . extract7z ( downloadPath , undefined , _7zPath ) ;
// 7z extracts to folder matching file name
let nestedPath = path . join ( extPath , path . basename ( info . fileName , '.7z' ) ) ;
if ( fs . existsSync ( nestedPath ) ) {
extPath = nestedPath ;
}
2020-01-27 16:37:12 +01:00
}
else {
2021-02-22 00:27:22 +01:00
extPath = yield tc . extractTar ( downloadPath , undefined , [
'xz' ,
'--strip' ,
'1'
] ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
//
// Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded
//
core . info ( 'Adding to the cache ...' ) ;
toolPath = yield tc . cacheDir ( extPath , 'node' , info . resolvedVersion , info . arch ) ;
core . info ( 'Done' ) ;
}
//
// a tool installer initimately knows details about the layout of that tool
// for example, node binary is in the bin folder after the extract on Mac/Linux.
// layouts could change by version, by platform etc... but that's the tool installers job
//
if ( osPlat != 'win32' ) {
toolPath = path . join ( toolPath , 'bin' ) ;
}
//
// prepend the tools path. instructs the agent to prepend for future tasks
core . addPath ( toolPath ) ;
2020-01-27 16:37:12 +01:00
} ) ;
}
2021-02-22 00:27:22 +01:00
exports . getNode = getNode ;
function getInfoFromManifest ( versionSpec , stable , auth , osArch = translateArchToDistUrl ( os . arch ( ) ) ) {
2020-01-27 16:37:12 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2021-02-22 00:27:22 +01:00
let info = null ;
const releases = yield tc . getManifestFromRepo ( 'actions' , 'node-versions' , auth , 'main' ) ;
const rel = yield tc . findFromManifest ( versionSpec , stable , releases , osArch ) ;
if ( rel && rel . files . length > 0 ) {
info = { } ;
info . resolvedVersion = rel . version ;
info . arch = rel . files [ 0 ] . arch ;
info . downloadUrl = rel . files [ 0 ] . download _url ;
info . fileName = rel . files [ 0 ] . filename ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
return info ;
} ) ;
}
function getInfoFromDist ( versionSpec , arch = os . arch ( ) ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let osPlat = os . platform ( ) ;
let osArch = translateArchToDistUrl ( arch ) ;
let version ;
version = yield queryDistForMatch ( versionSpec , arch ) ;
if ( ! version ) {
return null ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
//
// Download - a tool installer intimately knows how to get the tool (and construct urls)
//
version = semver . clean ( version ) || '' ;
let fileName = osPlat == 'win32'
? ` node-v ${ version } -win- ${ osArch } `
: ` node-v ${ version } - ${ osPlat } - ${ osArch } ` ;
let urlFileName = osPlat == 'win32' ? ` ${ fileName } .7z ` : ` ${ fileName } .tar.gz ` ;
let url = ` https://nodejs.org/dist/v ${ version } / ${ urlFileName } ` ;
return {
downloadUrl : url ,
resolvedVersion : version ,
arch : arch ,
fileName : fileName
} ;
} ) ;
}
function resolveVersionFromManifest ( versionSpec , stable , auth , osArch = translateArchToDistUrl ( os . arch ( ) ) ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-01-27 16:37:12 +01:00
try {
2021-02-22 00:27:22 +01:00
const info = yield getInfoFromManifest ( versionSpec , stable , auth , osArch ) ;
return info === null || info === void 0 ? void 0 : info . resolvedVersion ;
2020-01-27 16:37:12 +01:00
}
catch ( err ) {
2021-02-22 00:27:22 +01:00
core . info ( 'Unable to resolve version from manifest...' ) ;
core . debug ( err . message ) ;
2020-01-27 16:37:12 +01:00
}
} ) ;
}
2021-02-22 00:27:22 +01:00
// TODO - should we just export this from @actions/tool-cache? Lifted directly from there
function evaluateVersions ( versions , versionSpec ) {
let version = '' ;
core . debug ( ` evaluating ${ versions . length } versions ` ) ;
versions = versions . sort ( ( a , b ) => {
if ( semver . gt ( a , b ) ) {
return 1 ;
}
return - 1 ;
} ) ;
for ( let i = versions . length - 1 ; i >= 0 ; i -- ) {
const potential = versions [ i ] ;
const satisfied = semver . satisfies ( potential , versionSpec ) ;
if ( satisfied ) {
version = potential ;
break ;
}
}
if ( version ) {
core . debug ( ` matched: ${ version } ` ) ;
}
else {
core . debug ( 'match not found' ) ;
}
return version ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function queryDistForMatch ( versionSpec , arch = os . arch ( ) ) {
2020-01-27 16:37:12 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2021-02-22 00:27:22 +01:00
let osPlat = os . platform ( ) ;
let osArch = translateArchToDistUrl ( arch ) ;
// node offers a json list of versions
let dataFileName ;
switch ( osPlat ) {
case 'linux' :
dataFileName = ` linux- ${ osArch } ` ;
break ;
case 'darwin' :
dataFileName = ` osx- ${ osArch } -tar ` ;
break ;
case 'win32' :
dataFileName = ` win- ${ osArch } -exe ` ;
break ;
default :
throw new Error ( ` Unexpected OS ' ${ osPlat } ' ` ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
let versions = [ ] ;
let nodeVersions = yield module . exports . getVersionsFromDist ( ) ;
nodeVersions . forEach ( ( nodeVersion ) => {
// ensure this version supports your os and platform
if ( nodeVersion . files . indexOf ( dataFileName ) >= 0 ) {
versions . push ( nodeVersion . version ) ;
}
} ) ;
// get the latest version that matches the version spec
let version = evaluateVersions ( versions , versionSpec ) ;
return version ;
2020-01-27 16:37:12 +01:00
} ) ;
}
2021-02-22 00:27:22 +01:00
function getVersionsFromDist ( ) {
2020-01-27 16:37:12 +01:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2021-02-22 00:27:22 +01:00
let dataUrl = 'https://nodejs.org/dist/index.json' ;
let httpClient = new hc . HttpClient ( 'setup-node' , [ ] , {
allowRetries : true ,
maxRetries : 3
} ) ;
let response = yield httpClient . getJson ( dataUrl ) ;
return response . result || [ ] ;
2020-01-27 16:37:12 +01:00
} ) ;
}
2021-02-22 00:27:22 +01:00
exports . getVersionsFromDist = getVersionsFromDist ;
// For non LTS versions of Node, the files we need (for Windows) are sometimes located
// in a different folder than they normally are for other versions.
// Normally the format is similar to: https://nodejs.org/dist/v5.10.1/node-v5.10.1-win-x64.7z
// In this case, there will be two files located at:
// /dist/v5.10.1/win-x64/node.exe
// /dist/v5.10.1/win-x64/node.lib
// If this is not the structure, there may also be two files located at:
// /dist/v0.12.18/node.exe
// /dist/v0.12.18/node.lib
// This method attempts to download and cache the resources from these alternative locations.
// Note also that the files are normally zipped but in this case they are just an exe
// and lib file in a folder, not zipped.
function acquireNodeFromFallbackLocation ( version , arch = os . arch ( ) ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let osPlat = os . platform ( ) ;
let osArch = translateArchToDistUrl ( arch ) ;
// Create temporary folder to download in to
const tempDownloadFolder = 'temp_' + Math . floor ( Math . random ( ) * 2000000000 ) ;
const tempDirectory = process . env [ 'RUNNER_TEMP' ] || '' ;
assert . ok ( tempDirectory , 'Expected RUNNER_TEMP to be defined' ) ;
const tempDir = path . join ( tempDirectory , tempDownloadFolder ) ;
yield io . mkdirP ( tempDir ) ;
let exeUrl ;
let libUrl ;
try {
exeUrl = ` https://nodejs.org/dist/v ${ version } /win- ${ osArch } /node.exe ` ;
libUrl = ` https://nodejs.org/dist/v ${ version } /win- ${ osArch } /node.lib ` ;
core . info ( ` Downloading only node binary from ${ exeUrl } ` ) ;
const exePath = yield tc . downloadTool ( exeUrl ) ;
yield io . cp ( exePath , path . join ( tempDir , 'node.exe' ) ) ;
const libPath = yield tc . downloadTool ( libUrl ) ;
yield io . cp ( libPath , path . join ( tempDir , 'node.lib' ) ) ;
}
catch ( err ) {
if ( err instanceof tc . HTTPError && err . httpStatusCode == 404 ) {
exeUrl = ` https://nodejs.org/dist/v ${ version } /node.exe ` ;
libUrl = ` https://nodejs.org/dist/v ${ version } /node.lib ` ;
const exePath = yield tc . downloadTool ( exeUrl ) ;
yield io . cp ( exePath , path . join ( tempDir , 'node.exe' ) ) ;
const libPath = yield tc . downloadTool ( libUrl ) ;
yield io . cp ( libPath , path . join ( tempDir , 'node.lib' ) ) ;
}
else {
throw err ;
}
}
let toolPath = yield tc . cacheDir ( tempDir , 'node' , version , arch ) ;
core . addPath ( toolPath ) ;
return toolPath ;
} ) ;
}
// os.arch does not always match the relative download url, e.g.
// os.arch == 'arm' != node-v12.13.1-linux-armv7l.tar.gz
// All other currently supported architectures match, e.g.:
// os.arch = arm64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-arm64.tar.gz
// os.arch = x64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-x64.tar.gz
function translateArchToDistUrl ( arch ) {
switch ( arch ) {
case 'arm' :
return 'armv7l' ;
default :
return arch ;
}
}
//# sourceMappingURL=installer.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 3109 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
2020-01-27 16:37:12 +01:00
} ;
2021-02-22 00:27:22 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
2020-01-27 16:37:12 +01:00
} ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . run = void 0 ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 2186 ) ) ;
const installer = _ _importStar ( _ _nccwpck _require _ _ ( 1480 ) ) ;
const auth = _ _importStar ( _ _nccwpck _require _ _ ( 8527 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 5622 ) ) ;
const url _1 = _ _nccwpck _require _ _ ( 8835 ) ;
const os = _ _nccwpck _require _ _ ( 2087 ) ;
function run ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
//
// Version is optional. If supplied, install / use from the tool cache
// If not supplied then task is still used to setup proxy, auth, etc...
//
let version = core . getInput ( 'node-version' ) ;
if ( ! version ) {
version = core . getInput ( 'version' ) ;
}
let arch = core . getInput ( 'architecture' ) ;
// if architecture supplied but node-version is not
// if we don't throw a warning, the already installed x64 node will be used which is not probably what user meant.
if ( arch && ! version ) {
core . warning ( '`architecture` is provided but `node-version` is missing. In this configuration, the version/architecture of Node will not be changed. To fix this, provide `architecture` in combination with `node-version`' ) ;
}
if ( ! arch ) {
arch = os . arch ( ) ;
}
if ( version ) {
let token = core . getInput ( 'token' ) ;
let auth = ! token || isGhes ( ) ? undefined : ` token ${ token } ` ;
let stable = ( core . getInput ( 'stable' ) || 'true' ) . toUpperCase ( ) === 'TRUE' ;
const checkLatest = ( core . getInput ( 'check-latest' ) || 'false' ) . toUpperCase ( ) === 'TRUE' ;
yield installer . getNode ( version , stable , checkLatest , auth , arch ) ;
}
const registryUrl = core . getInput ( 'registry-url' ) ;
const alwaysAuth = core . getInput ( 'always-auth' ) ;
if ( registryUrl ) {
auth . configAuthentication ( registryUrl , alwaysAuth ) ;
}
const matchersPath = path . join ( _ _dirname , '..' , '.github' ) ;
core . info ( ` ##[add-matcher] ${ path . join ( matchersPath , 'tsc.json' ) } ` ) ;
core . info ( ` ##[add-matcher] ${ path . join ( matchersPath , 'eslint-stylish.json' ) } ` ) ;
core . info ( ` ##[add-matcher] ${ path . join ( matchersPath , 'eslint-compact.json' ) } ` ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
catch ( error ) {
core . setFailed ( error . message ) ;
}
} ) ;
}
exports . run = run ;
function isGhes ( ) {
const ghUrl = new url _1 . URL ( process . env [ 'GITHUB_SERVER_URL' ] || 'https://github.com' ) ;
return ghUrl . hostname . toUpperCase ( ) !== 'GITHUB.COM' ;
}
//# sourceMappingURL=main.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 9367 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const main _1 = _ _nccwpck _require _ _ ( 3109 ) ;
main _1 . run ( ) ;
//# sourceMappingURL=setup-node.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 7351 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
"use strict" ;
2020-05-19 15:25:54 +02:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2087 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
this . command = command ;
this . properties = properties ;
this . message = message ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
2020-01-27 16:37:12 +01:00
}
}
}
2021-02-22 00:27:22 +01:00
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
}
function escapeData ( s ) {
return utils _1 . toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
return utils _1 . toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
/***/ } ) ,
/***/ 2186 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const command _1 = _ _nccwpck _require _ _ ( 7351 ) ;
const file _command _1 = _ _nccwpck _require _ _ ( 717 ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2087 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 5622 ) ) ;
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
const convertedVal = utils _1 . toCommandValue ( val ) ;
process . env [ name ] = convertedVal ;
const filePath = process . env [ 'GITHUB_ENV' ] || '' ;
if ( filePath ) {
const delimiter = '_GitHubActionsFileCommandDelimeter_' ;
const commandValue = ` ${ name } << ${ delimiter } ${ os . EOL } ${ convertedVal } ${ os . EOL } ${ delimiter } ` ;
file _command _1 . issueCommand ( 'ENV' , commandValue ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
else {
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
const filePath = process . env [ 'GITHUB_PATH' ] || '' ;
if ( filePath ) {
file _command _1 . issueCommand ( 'PATH' , inputPath ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
else {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
* Gets the value of an input . The value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
return val . trim ( ) ;
}
exports . getInput = getInput ;
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
command _1 . issueCommand ( 'set-output' , { name } , value ) ;
}
exports . setOutput = setOutput ;
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
* /
function error ( message ) {
command _1 . issue ( 'error' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . error = error ;
/ * *
* Adds an warning issue
* @ param message warning issue message . Errors will be converted to string via toString ( )
* /
function warning ( message ) {
command _1 . issue ( 'warning' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . warning = warning ;
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
finally {
endGroup ( ) ;
2020-01-27 16:37:12 +01:00
}
return result ;
2021-02-22 00:27:22 +01:00
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
2020-01-27 16:37:12 +01:00
*
2021-02-22 00:27:22 +01:00
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
command _1 . issueCommand ( 'save-state' , { name } , value ) ;
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
//# sourceMappingURL=core.js.map
/***/ } ) ,
/***/ 717 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
"use strict" ;
// For internal use, subject to change.
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 5747 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2087 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 5278 ) ;
function issueCommand ( command , message ) {
const filePath = process . env [ ` GITHUB_ ${ command } ` ] ;
if ( ! filePath ) {
throw new Error ( ` Unable to find environment variable for file command ${ command } ` ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( ! fs . existsSync ( filePath ) ) {
throw new Error ( ` Missing file at path: ${ filePath } ` ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
fs . appendFileSync ( filePath , ` ${ utils _1 . toCommandValue ( message ) } ${ os . EOL } ` , {
encoding : 'utf8'
} ) ;
}
exports . issueCommand = issueCommand ;
//# sourceMappingURL=file-command.js.map
/***/ } ) ,
/***/ 5278 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . toCommandValue = toCommandValue ;
//# sourceMappingURL=utils.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 1514 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const tr = _ _importStar ( _ _nccwpck _require _ _ ( 8159 ) ) ;
/ * *
* Exec a command .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < number > exit code
* /
function exec ( commandLine , args , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const commandArgs = tr . argStringToArray ( commandLine ) ;
if ( commandArgs . length === 0 ) {
throw new Error ( ` Parameter 'commandLine' cannot be null or empty. ` ) ;
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
// Path to tool to execute should be first arg
const toolPath = commandArgs [ 0 ] ;
args = commandArgs . slice ( 1 ) . concat ( args || [ ] ) ;
const runner = new tr . ToolRunner ( toolPath , args , options ) ;
return runner . exec ( ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . exec = exec ;
//# sourceMappingURL=exec.js.map
2020-01-27 16:37:12 +01:00
2020-05-19 15:25:54 +02:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 8159 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2020-05-19 15:25:54 +02:00
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
2020-01-27 16:37:12 +01:00
return result ;
2020-05-19 15:25:54 +02:00
} ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2087 ) ) ;
const events = _ _importStar ( _ _nccwpck _require _ _ ( 8614 ) ) ;
const child = _ _importStar ( _ _nccwpck _require _ _ ( 3129 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 5622 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const ioUtil = _ _importStar ( _ _nccwpck _require _ _ ( 1962 ) ) ;
/* eslint-disable @typescript-eslint/unbound-method */
const IS _WINDOWS = process . platform === 'win32' ;
/ *
* Class for running command line tools . Handles quoting and arg parsing in a platform agnostic way .
* /
class ToolRunner extends events . EventEmitter {
constructor ( toolPath , args , options ) {
super ( ) ;
if ( ! toolPath ) {
throw new Error ( "Parameter 'toolPath' cannot be null or empty." ) ;
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
this . toolPath = toolPath ;
this . args = args || [ ] ;
this . options = options || { } ;
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
_debug ( message ) {
if ( this . options . listeners && this . options . listeners . debug ) {
this . options . listeners . debug ( message ) ;
}
}
_getCommandString ( options , noPrefix ) {
const toolPath = this . _getSpawnFileName ( ) ;
const args = this . _getSpawnArgs ( options ) ;
let cmd = noPrefix ? '' : '[command]' ; // omit prefix when piped to a second tool
if ( IS _WINDOWS ) {
// Windows + cmd file
if ( this . _isCmdFile ( ) ) {
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows + verbatim
else if ( options . windowsVerbatimArguments ) {
cmd += ` " ${ toolPath } " ` ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows (regular)
else {
cmd += this . _windowsQuoteCmdArg ( toolPath ) ;
for ( const a of args ) {
cmd += ` ${ this . _windowsQuoteCmdArg ( a ) } ` ;
2020-05-19 15:25:54 +02:00
}
}
}
2021-02-22 00:27:22 +01:00
else {
// OSX/Linux - this can likely be improved with some form of quoting.
// creating processes on Unix is fundamentally different than Windows.
// on Unix, execvp() takes an arg array.
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
return cmd ;
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
_processLineBuffer ( data , strBuffer , onLine ) {
try {
let s = strBuffer + data . toString ( ) ;
let n = s . indexOf ( os . EOL ) ;
while ( n > - 1 ) {
const line = s . substring ( 0 , n ) ;
onLine ( line ) ;
// the rest of the string ...
s = s . substring ( n + os . EOL . length ) ;
n = s . indexOf ( os . EOL ) ;
}
strBuffer = s ;
}
catch ( err ) {
// streaming lines to console is best effort. Don't fail a build.
this . _debug ( ` error processing line. Failed with error ${ err } ` ) ;
}
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
_getSpawnFileName ( ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
return process . env [ 'COMSPEC' ] || 'cmd.exe' ;
}
}
return this . toolPath ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_getSpawnArgs ( options ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
let argline = ` /D /S /C " ${ this . _windowsQuoteCmdArg ( this . toolPath ) } ` ;
for ( const a of this . args ) {
argline += ' ' ;
argline += options . windowsVerbatimArguments
? a
: this . _windowsQuoteCmdArg ( a ) ;
}
argline += '"' ;
return [ argline ] ;
}
}
return this . args ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_endsWith ( str , end ) {
return str . endsWith ( end ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_isCmdFile ( ) {
const upperToolPath = this . toolPath . toUpperCase ( ) ;
return ( this . _endsWith ( upperToolPath , '.CMD' ) ||
this . _endsWith ( upperToolPath , '.BAT' ) ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_windowsQuoteCmdArg ( arg ) {
// for .exe, apply the normal quoting rules that libuv applies
if ( ! this . _isCmdFile ( ) ) {
return this . _uvQuoteCmdArg ( arg ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// otherwise apply quoting rules specific to the cmd.exe command line parser.
// the libuv rules are generic and are not designed specifically for cmd.exe
// command line parser.
//
// for a detailed description of the cmd.exe command line parser, refer to
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
// need quotes for empty arg
if ( ! arg ) {
return '""' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// determine whether the arg needs to be quoted
const cmdSpecialChars = [
' ' ,
'\t' ,
'&' ,
'(' ,
')' ,
'[' ,
']' ,
'{' ,
'}' ,
'^' ,
'=' ,
';' ,
'!' ,
"'" ,
'+' ,
',' ,
'`' ,
'~' ,
'|' ,
'<' ,
'>' ,
'"'
] ;
let needsQuotes = false ;
for ( const char of arg ) {
if ( cmdSpecialChars . some ( x => x === char ) ) {
needsQuotes = true ;
break ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// short-circuit if quotes not needed
if ( ! needsQuotes ) {
return arg ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// the following quoting rules are very similar to the rules that by libuv applies.
//
// 1) wrap the string in quotes
//
// 2) double-up quotes - i.e. " => ""
//
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
// doesn't work well with a cmd.exe command line.
//
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
// for example, the command line:
// foo.exe "myarg:""my val"""
// is parsed by a .NET console app into an arg array:
// [ "myarg:\"my val\"" ]
// which is the same end result when applying libuv quoting rules. although the actual
// command line from libuv quoting rules would look like:
// foo.exe "myarg:\"my val\""
//
// 3) double-up slashes that precede a quote,
// e.g. hello \world => "hello \world"
// hello\"world => "hello\\""world"
// hello\\"world => "hello\\\\""world"
// hello world\ => "hello world\\"
//
// technically this is not required for a cmd.exe command line, or the batch argument parser.
// the reasons for including this as a .cmd quoting rule are:
//
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
//
// b) it's what we've been doing previously (by deferring to node default behavior) and we
// haven't heard any complaints about that aspect.
//
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
// by using %%.
//
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
//
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
// to an external program.
//
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
// % can be escaped within a .cmd file.
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ; // double the slash
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '"' ; // double the quote
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_uvQuoteCmdArg ( arg ) {
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
// is used.
//
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
// pasting copyright notice from Node within this function:
//
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
if ( ! arg ) {
// Need double quotation for empty argument
return '""' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( ! arg . includes ( ' ' ) && ! arg . includes ( '\t' ) && ! arg . includes ( '"' ) ) {
// No quotation needed
return arg ;
}
if ( ! arg . includes ( '"' ) && ! arg . includes ( '\\' ) ) {
// No embedded double quotes or backslashes, so I can just wrap
// quote marks around the whole thing.
return ` " ${ arg } " ` ;
}
// Expected input/output:
// input : hello"world
// output: "hello\"world"
// input : hello""world
// output: "hello\"\"world"
// input : hello\world
// output: hello\world
// input : hello\\world
// output: hello\\world
// input : hello\"world
// output: "hello\\\"world"
// input : hello\\"world
// output: "hello\\\\\"world"
// input : hello world\
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
// but it appears the comment is wrong, it should be "hello world\\"
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ;
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '\\' ;
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_cloneExecOptions ( options ) {
options = options || { } ;
const result = {
cwd : options . cwd || process . cwd ( ) ,
env : options . env || process . env ,
silent : options . silent || false ,
windowsVerbatimArguments : options . windowsVerbatimArguments || false ,
failOnStdErr : options . failOnStdErr || false ,
ignoreReturnCode : options . ignoreReturnCode || false ,
delay : options . delay || 10000
} ;
result . outStream = options . outStream || process . stdout ;
result . errStream = options . errStream || process . stderr ;
return result ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_getSpawnOptions ( options , toolPath ) {
options = options || { } ;
const result = { } ;
result . cwd = options . cwd ;
result . env = options . env ;
result [ 'windowsVerbatimArguments' ] =
options . windowsVerbatimArguments || this . _isCmdFile ( ) ;
if ( options . windowsVerbatimArguments ) {
result . argv0 = ` " ${ toolPath } " ` ;
}
return result ;
}
/ * *
* Exec a tool .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param tool path to tool to exec
* @ param options optional exec options . See ExecOptions
* @ returns number
* /
exec ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// root the tool path if it is unrooted and contains relative pathing
if ( ! ioUtil . isRooted ( this . toolPath ) &&
( this . toolPath . includes ( '/' ) ||
( IS _WINDOWS && this . toolPath . includes ( '\\' ) ) ) ) {
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
this . toolPath = path . resolve ( process . cwd ( ) , this . options . cwd || process . cwd ( ) , this . toolPath ) ;
}
// if the tool is only a file name, then resolve it from the PATH
// otherwise verify it exists (add extension on Windows if necessary)
this . toolPath = yield io . which ( this . toolPath , true ) ;
return new Promise ( ( resolve , reject ) => {
this . _debug ( ` exec tool: ${ this . toolPath } ` ) ;
this . _debug ( 'arguments:' ) ;
for ( const arg of this . args ) {
this . _debug ( ` ${ arg } ` ) ;
}
const optionsNonNull = this . _cloneExecOptions ( this . options ) ;
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( this . _getCommandString ( optionsNonNull ) + os . EOL ) ;
}
const state = new ExecState ( optionsNonNull , this . toolPath ) ;
state . on ( 'debug' , ( message ) => {
this . _debug ( message ) ;
} ) ;
const fileName = this . _getSpawnFileName ( ) ;
const cp = child . spawn ( fileName , this . _getSpawnArgs ( optionsNonNull ) , this . _getSpawnOptions ( this . options , fileName ) ) ;
const stdbuffer = '' ;
if ( cp . stdout ) {
cp . stdout . on ( 'data' , ( data ) => {
if ( this . options . listeners && this . options . listeners . stdout ) {
this . options . listeners . stdout ( data ) ;
}
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( data ) ;
}
this . _processLineBuffer ( data , stdbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . stdline ) {
this . options . listeners . stdline ( line ) ;
}
} ) ;
} ) ;
}
const errbuffer = '' ;
if ( cp . stderr ) {
cp . stderr . on ( 'data' , ( data ) => {
state . processStderr = true ;
if ( this . options . listeners && this . options . listeners . stderr ) {
this . options . listeners . stderr ( data ) ;
}
if ( ! optionsNonNull . silent &&
optionsNonNull . errStream &&
optionsNonNull . outStream ) {
const s = optionsNonNull . failOnStdErr
? optionsNonNull . errStream
: optionsNonNull . outStream ;
s . write ( data ) ;
}
this . _processLineBuffer ( data , errbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . errline ) {
this . options . listeners . errline ( line ) ;
}
} ) ;
} ) ;
}
cp . on ( 'error' , ( err ) => {
state . processError = err . message ;
state . processExited = true ;
state . processClosed = true ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'exit' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
this . _debug ( ` Exit code ${ code } received from tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'close' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
state . processClosed = true ;
this . _debug ( ` STDIO streams have closed for tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
state . on ( 'done' , ( error , exitCode ) => {
if ( stdbuffer . length > 0 ) {
this . emit ( 'stdline' , stdbuffer ) ;
}
if ( errbuffer . length > 0 ) {
this . emit ( 'errline' , errbuffer ) ;
}
cp . removeAllListeners ( ) ;
if ( error ) {
reject ( error ) ;
}
else {
resolve ( exitCode ) ;
}
} ) ;
if ( this . options . input ) {
if ( ! cp . stdin ) {
throw new Error ( 'child process missing stdin' ) ;
}
cp . stdin . end ( this . options . input ) ;
}
} ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
exports . ToolRunner = ToolRunner ;
/ * *
* Convert an arg string to an array of args . Handles escaping
*
* @ param argString string of arguments
* @ returns string [ ] array of arguments
* /
function argStringToArray ( argString ) {
const args = [ ] ;
let inQuotes = false ;
let escaped = false ;
let arg = '' ;
function append ( c ) {
// we only escape double quotes.
if ( escaped && c !== '"' ) {
arg += '\\' ;
}
arg += c ;
escaped = false ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
for ( let i = 0 ; i < argString . length ; i ++ ) {
const c = argString . charAt ( i ) ;
if ( c === '"' ) {
if ( ! escaped ) {
inQuotes = ! inQuotes ;
}
else {
append ( c ) ;
}
continue ;
}
if ( c === '\\' && escaped ) {
append ( c ) ;
continue ;
}
if ( c === '\\' && inQuotes ) {
escaped = true ;
continue ;
}
if ( c === ' ' && ! inQuotes ) {
if ( arg . length > 0 ) {
args . push ( arg ) ;
arg = '' ;
}
continue ;
}
append ( c ) ;
}
if ( arg . length > 0 ) {
args . push ( arg . trim ( ) ) ;
}
return args ;
}
exports . argStringToArray = argStringToArray ;
class ExecState extends events . EventEmitter {
constructor ( options , toolPath ) {
super ( ) ;
this . processClosed = false ; // tracks whether the process has exited and stdio is closed
this . processError = '' ;
this . processExitCode = 0 ;
this . processExited = false ; // tracks whether the process has exited
this . processStderr = false ; // tracks whether stderr was written to
this . delay = 10000 ; // 10 seconds
this . done = false ;
this . timeout = null ;
if ( ! toolPath ) {
throw new Error ( 'toolPath must not be empty' ) ;
}
this . options = options ;
this . toolPath = toolPath ;
if ( options . delay ) {
this . delay = options . delay ;
}
}
CheckComplete ( ) {
if ( this . done ) {
return ;
}
if ( this . processClosed ) {
this . _setResult ( ) ;
}
else if ( this . processExited ) {
this . timeout = setTimeout ( ExecState . HandleTimeout , this . delay , this ) ;
}
}
_debug ( message ) {
this . emit ( 'debug' , message ) ;
}
_setResult ( ) {
// determine whether there is an error
let error ;
if ( this . processExited ) {
if ( this . processError ) {
error = new Error ( ` There was an error when attempting to execute the process ' ${ this . toolPath } '. This may indicate the process failed to start. Error: ${ this . processError } ` ) ;
}
else if ( this . processExitCode !== 0 && ! this . options . ignoreReturnCode ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed with exit code ${ this . processExitCode } ` ) ;
}
else if ( this . processStderr && this . options . failOnStdErr ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed because one or more lines were written to the STDERR stream ` ) ;
}
}
// clear the timeout
if ( this . timeout ) {
clearTimeout ( this . timeout ) ;
this . timeout = null ;
}
this . done = true ;
this . emit ( 'done' , error , this . processExitCode ) ;
}
static HandleTimeout ( state ) {
if ( state . done ) {
return ;
}
if ( ! state . processClosed && state . processExited ) {
const message = ` The STDIO streams did not close within ${ state . delay /
1000 } seconds of the exit event from process '${state.toolPath}' . This may indicate a child process inherited the STDIO streams and has not yet exited . ` ;
state . _debug ( message ) ;
}
state . _setResult ( ) ;
}
}
//# sourceMappingURL=toolrunner.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 4087 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . Context = void 0 ;
const fs _1 = _ _nccwpck _require _ _ ( 5747 ) ;
const os _1 = _ _nccwpck _require _ _ ( 2087 ) ;
class Context {
/ * *
* Hydrate the context from the environment
* /
constructor ( ) {
this . payload = { } ;
if ( process . env . GITHUB _EVENT _PATH ) {
if ( fs _1 . existsSync ( process . env . GITHUB _EVENT _PATH ) ) {
this . payload = JSON . parse ( fs _1 . readFileSync ( process . env . GITHUB _EVENT _PATH , { encoding : 'utf8' } ) ) ;
}
else {
const path = process . env . GITHUB _EVENT _PATH ;
process . stdout . write ( ` GITHUB_EVENT_PATH ${ path } does not exist ${ os _1 . EOL } ` ) ;
}
}
this . eventName = process . env . GITHUB _EVENT _NAME ;
this . sha = process . env . GITHUB _SHA ;
this . ref = process . env . GITHUB _REF ;
this . workflow = process . env . GITHUB _WORKFLOW ;
this . action = process . env . GITHUB _ACTION ;
this . actor = process . env . GITHUB _ACTOR ;
this . job = process . env . GITHUB _JOB ;
this . runNumber = parseInt ( process . env . GITHUB _RUN _NUMBER , 10 ) ;
this . runId = parseInt ( process . env . GITHUB _RUN _ID , 10 ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
get issue ( ) {
const payload = this . payload ;
return Object . assign ( Object . assign ( { } , this . repo ) , { number : ( payload . issue || payload . pull _request || payload ) . number } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
get repo ( ) {
if ( process . env . GITHUB _REPOSITORY ) {
const [ owner , repo ] = process . env . GITHUB _REPOSITORY . split ( '/' ) ;
return { owner , repo } ;
}
if ( this . payload . repository ) {
return {
owner : this . payload . repository . owner . login ,
repo : this . payload . repository . name
} ;
}
throw new Error ( "context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'" ) ;
}
}
exports . Context = Context ;
//# sourceMappingURL=context.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 5438 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getOctokit = exports . context = void 0 ;
const Context = _ _importStar ( _ _nccwpck _require _ _ ( 4087 ) ) ;
const utils _1 = _ _nccwpck _require _ _ ( 3030 ) ;
exports . context = new Context . Context ( ) ;
/ * *
* Returns a hydrated octokit ready to use for GitHub Actions
*
* @ param token the repo PAT or GITHUB _TOKEN
* @ param options other options to set
* /
function getOctokit ( token , options ) {
return new utils _1 . GitHub ( utils _1 . getOctokitOptions ( token , options ) ) ;
}
exports . getOctokit = getOctokit ;
//# sourceMappingURL=github.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 7914 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getApiBaseUrl = exports . getProxyAgent = exports . getAuthString = void 0 ;
const httpClient = _ _importStar ( _ _nccwpck _require _ _ ( 893 ) ) ;
function getAuthString ( token , options ) {
if ( ! token && ! options . auth ) {
throw new Error ( 'Parameter token or opts.auth is required' ) ;
}
else if ( token && options . auth ) {
throw new Error ( 'Parameters token and opts.auth may not both be specified' ) ;
}
return typeof options . auth === 'string' ? options . auth : ` token ${ token } ` ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . getAuthString = getAuthString ;
function getProxyAgent ( destinationUrl ) {
const hc = new httpClient . HttpClient ( ) ;
return hc . getAgent ( destinationUrl ) ;
}
exports . getProxyAgent = getProxyAgent ;
function getApiBaseUrl ( ) {
return process . env [ 'GITHUB_API_URL' ] || 'https://api.github.com' ;
}
exports . getApiBaseUrl = getApiBaseUrl ;
//# sourceMappingURL=utils.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 3030 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . getOctokitOptions = exports . GitHub = exports . context = void 0 ;
const Context = _ _importStar ( _ _nccwpck _require _ _ ( 4087 ) ) ;
const Utils = _ _importStar ( _ _nccwpck _require _ _ ( 7914 ) ) ;
// octokit + plugins
const core _1 = _ _nccwpck _require _ _ ( 6762 ) ;
const plugin _rest _endpoint _methods _1 = _ _nccwpck _require _ _ ( 3044 ) ;
const plugin _paginate _rest _1 = _ _nccwpck _require _ _ ( 4193 ) ;
exports . context = new Context . Context ( ) ;
const baseUrl = Utils . getApiBaseUrl ( ) ;
const defaults = {
baseUrl ,
request : {
agent : Utils . getProxyAgent ( baseUrl )
}
} ;
exports . GitHub = core _1 . Octokit . plugin ( plugin _rest _endpoint _methods _1 . restEndpointMethods , plugin _paginate _rest _1 . paginateRest ) . defaults ( defaults ) ;
/ * *
* Convience function to correctly format Octokit Options to pass into the constructor .
*
* @ param token the repo PAT or GITHUB _TOKEN
* @ param options other options to set
* /
function getOctokitOptions ( token , options ) {
const opts = Object . assign ( { } , options || { } ) ; // Shallow clone - don't mutate the object provided by the caller
// Auth
const auth = Utils . getAuthString ( token , opts ) ;
if ( auth ) {
opts . auth = auth ;
}
return opts ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . getOctokitOptions = getOctokitOptions ;
//# sourceMappingURL=utils.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 893 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const http = _ _nccwpck _require _ _ ( 8605 ) ;
const https = _ _nccwpck _require _ _ ( 7211 ) ;
const pm = _ _nccwpck _require _ _ ( 211 ) ;
let tunnel ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
/ * *
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
function getProxyUrl ( serverUrl ) {
let proxyUrl = pm . getProxyUrl ( new URL ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientError extends Error {
constructor ( message , statusCode ) {
super ( message ) ;
this . name = 'HttpClientError' ;
this . statusCode = statusCode ;
Object . setPrototypeOf ( this , HttpClientError . prototype ) ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . HttpClientError = HttpClientError ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return new Promise ( async ( resolve , reject ) => {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
let parsedUrl = new URL ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
options ( requestUrl , additionalHeaders ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
get ( requestUrl , additionalHeaders ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
del ( requestUrl , additionalHeaders ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
post ( requestUrl , data , additionalHeaders ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
patch ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
put ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
head ( requestUrl , additionalHeaders ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
async getJson ( requestUrl , additionalHeaders = { } ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
let res = await this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
2020-10-01 18:03:22 +02:00
}
2021-02-22 00:27:22 +01:00
async postJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
2020-10-01 18:03:22 +02:00
}
2021-02-22 00:27:22 +01:00
async putJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
2020-10-01 18:03:22 +02:00
}
2021-02-22 00:27:22 +01:00
async patchJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
2020-10-01 18:03:22 +02:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
async request ( verb , requestUrl , data , headers ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
}
let parsedUrl = new URL ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
let maxTries = this . _allowRetries && RetryableHttpVerbs . indexOf ( verb ) != - 1
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
while ( numTries < maxTries ) {
response = await this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( let i = 0 ; i < this . handlers . length ; i ++ ) {
if ( this . handlers [ i ] . canHandleAuthentication ( response ) ) {
authenticationHandler = this . handlers [ i ] ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
}
let redirectsRemaining = this . _maxRedirects ;
while ( HttpRedirectCodes . indexOf ( response . message . statusCode ) != - 1 &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
let parsedRedirectUrl = new URL ( redirectUrl ) ;
if ( parsedUrl . protocol == 'https:' &&
parsedUrl . protocol != parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( let header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = await this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
}
if ( HttpResponseRetryCodes . indexOf ( response . message . statusCode ) == - 1 ) {
// If not a retry code, return immediately instead of retrying
return response ;
}
numTries += 1 ;
if ( numTries < maxTries ) {
await response . readBody ( ) ;
await this . _performExponentialBackoff ( numTries ) ;
}
}
return response ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return new Promise ( ( resolve , reject ) => {
let callbackForResult = function ( err , res ) {
if ( err ) {
reject ( err ) ;
}
resolve ( res ) ;
} ;
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
let socket ;
if ( typeof data === 'string' ) {
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
}
let callbackCalled = false ;
let handleResult = ( err , res ) => {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
}
} ;
let req = info . httpModule . request ( info . options , ( msg ) => {
let res = new HttpClientResponse ( msg ) ;
handleResult ( null , res ) ;
} ) ;
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( 'Request timeout: ' + info . options . path ) , null ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err , null ) ;
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
}
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
let parsedUrl = new URL ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
this . handlers . forEach ( handler => {
handler . prepareRequest ( info . options ) ;
} ) ;
}
return info ;
}
_mergeHeaders ( headers ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
let proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
let useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( ! ! agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( ! ! this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
if ( useProxy ) {
// If using proxy, need tunnel
if ( ! tunnel ) {
tunnel = _ _nccwpck _require _ _ ( 4294 ) ;
}
const agentOptions = {
maxSockets : maxSockets ,
keepAlive : this . _keepAlive ,
proxy : {
proxyAuth : ` ${ proxyUrl . username } : ${ proxyUrl . password } ` ,
host : proxyUrl . hostname ,
port : proxyUrl . port
}
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
}
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
}
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets : maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
}
static dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
let a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
}
return value ;
}
async _processResponse ( res , options ) {
return new Promise ( async ( resolve , reject ) => {
const statusCode = res . message . statusCode ;
const response = {
statusCode : statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode == HttpCodes . NotFound ) {
resolve ( response ) ;
}
let obj ;
let contents ;
// get the result from the body
try {
contents = await res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , HttpClient . dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = 'Failed request: (' + statusCode + ')' ;
}
let err = new HttpClientError ( msg , statusCode ) ;
err . result = response . result ;
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
exports . HttpClient = HttpClient ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 211 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
function getProxyUrl ( reqUrl ) {
let usingSsl = reqUrl . protocol === 'https:' ;
let proxyUrl ;
if ( checkBypass ( reqUrl ) ) {
return proxyUrl ;
}
let proxyVar ;
if ( usingSsl ) {
proxyVar = process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
}
else {
proxyVar = process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
}
if ( proxyVar ) {
proxyUrl = new URL ( proxyVar ) ;
}
return proxyUrl ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
let noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
let upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
for ( let upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
2020-01-27 16:37:12 +01:00
return false ;
}
2021-02-22 00:27:22 +01:00
exports . checkBypass = checkBypass ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 9925 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const url = _ _nccwpck _require _ _ ( 8835 ) ;
const http = _ _nccwpck _require _ _ ( 8605 ) ;
const https = _ _nccwpck _require _ _ ( 7211 ) ;
const pm = _ _nccwpck _require _ _ ( 6443 ) ;
let tunnel ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function getProxyUrl ( serverUrl ) {
let proxyUrl = pm . getProxyUrl ( url . parse ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [ HttpCodes . MovedPermanently , HttpCodes . ResourceMoved , HttpCodes . SeeOther , HttpCodes . TemporaryRedirect , HttpCodes . PermanentRedirect ] ;
const HttpResponseRetryCodes = [ HttpCodes . BadGateway , HttpCodes . ServiceUnavailable , HttpCodes . GatewayTimeout ] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
readBody ( ) {
return new Promise ( async ( resolve , reject ) => {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
let parsedUrl = url . parse ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
options ( requestUrl , additionalHeaders ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
get ( requestUrl , additionalHeaders ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
del ( requestUrl , additionalHeaders ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
post ( requestUrl , data , additionalHeaders ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
patch ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
put ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
head ( requestUrl , additionalHeaders ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
async getJson ( requestUrl , additionalHeaders = { } ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
let res = await this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async postJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async putJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async patchJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
async request ( verb , requestUrl , data , headers ) {
if ( this . _disposed ) {
throw new Error ( "Client has already been disposed." ) ;
}
let parsedUrl = url . parse ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
let maxTries = ( this . _allowRetries && RetryableHttpVerbs . indexOf ( verb ) != - 1 ) ? this . _maxRetries + 1 : 1 ;
let numTries = 0 ;
let response ;
while ( numTries < maxTries ) {
response = await this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response && response . message && response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( let i = 0 ; i < this . handlers . length ; i ++ ) {
if ( this . handlers [ i ] . canHandleAuthentication ( response ) ) {
authenticationHandler = this . handlers [ i ] ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
2020-09-06 18:09:41 +02:00
}
2021-02-22 00:27:22 +01:00
let redirectsRemaining = this . _maxRedirects ;
while ( HttpRedirectCodes . indexOf ( response . message . statusCode ) != - 1
&& this . _allowRedirects
&& redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ "location" ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
let parsedRedirectUrl = url . parse ( redirectUrl ) ;
if ( parsedUrl . protocol == 'https:' && parsedUrl . protocol != parsedRedirectUrl . protocol && ! this . _allowRedirectDowngrade ) {
throw new Error ( "Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true." ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response . readBody ( ) ;
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = await this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
2020-09-05 13:59:22 +02:00
}
2021-02-22 00:27:22 +01:00
if ( HttpResponseRetryCodes . indexOf ( response . message . statusCode ) == - 1 ) {
// If not a retry code, return immediately instead of retrying
return response ;
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
numTries += 1 ;
if ( numTries < maxTries ) {
await response . readBody ( ) ;
await this . _performExponentialBackoff ( numTries ) ;
2020-05-19 15:25:54 +02:00
}
}
2021-02-22 00:27:22 +01:00
return response ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return new Promise ( ( resolve , reject ) => {
let callbackForResult = function ( err , res ) {
if ( err ) {
reject ( err ) ;
}
resolve ( res ) ;
} ;
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
let socket ;
if ( typeof ( data ) === 'string' ) {
info . options . headers [ "Content-Length" ] = Buffer . byteLength ( data , 'utf8' ) ;
}
let callbackCalled = false ;
let handleResult = ( err , res ) => {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} ;
let req = info . httpModule . request ( info . options , ( msg ) => {
let res = new HttpClientResponse ( msg ) ;
handleResult ( null , res ) ;
2020-01-27 16:37:12 +01:00
} ) ;
2021-02-22 00:27:22 +01:00
req . on ( 'socket' , ( sock ) => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( 'Request timeout: ' + info . options . path ) , null ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err , null ) ;
} ) ;
if ( data && typeof ( data ) === 'string' ) {
req . write ( data , 'utf8' ) ;
}
if ( data && typeof ( data ) !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
let parsedUrl = url . parse ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port ? parseInt ( info . parsedUrl . port ) : defaultPort ;
info . options . path = ( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ "user-agent" ] = this . userAgent ;
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
this . handlers . forEach ( ( handler ) => {
handler . prepareRequest ( info . options ) ;
} ) ;
}
return info ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_mergeHeaders ( headers ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( c [ k . toLowerCase ( ) ] = obj [ k ] , c ) , { } ) ;
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( c [ k . toLowerCase ( ) ] = obj [ k ] , c ) , { } ) ;
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
let proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
let useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( ! ! agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( ! ! this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
if ( useProxy ) {
// If using proxy, need tunnel
if ( ! tunnel ) {
tunnel = _ _nccwpck _require _ _ ( 4294 ) ;
}
const agentOptions = {
maxSockets : maxSockets ,
keepAlive : this . _keepAlive ,
proxy : {
proxyAuth : proxyUrl . auth ,
host : proxyUrl . hostname ,
port : proxyUrl . port
} ,
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
2020-01-27 16:37:12 +01:00
}
else {
2021-02-22 00:27:22 +01:00
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets : maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , { rejectUnauthorized : false } ) ;
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
}
static dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
let a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
}
return value ;
}
async _processResponse ( res , options ) {
return new Promise ( async ( resolve , reject ) => {
const statusCode = res . message . statusCode ;
const response = {
statusCode : statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode == HttpCodes . NotFound ) {
resolve ( response ) ;
}
let obj ;
let contents ;
// get the result from the body
try {
contents = await res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , HttpClient . dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = "Failed request: (" + statusCode + ")" ;
}
let err = new Error ( msg ) ;
// attach statusCode and body obj (if available) to the error object
err [ 'statusCode' ] = statusCode ;
if ( response . result ) {
err [ 'result' ] = response . result ;
}
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
exports . HttpClient = HttpClient ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 6443 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const url = _ _nccwpck _require _ _ ( 8835 ) ;
function getProxyUrl ( reqUrl ) {
let usingSsl = reqUrl . protocol === 'https:' ;
let proxyUrl ;
if ( checkBypass ( reqUrl ) ) {
return proxyUrl ;
}
let proxyVar ;
if ( usingSsl ) {
proxyVar = process . env [ "https_proxy" ] ||
process . env [ "HTTPS_PROXY" ] ;
}
else {
proxyVar = process . env [ "http_proxy" ] ||
process . env [ "HTTP_PROXY" ] ;
}
if ( proxyVar ) {
proxyUrl = url . parse ( proxyVar ) ;
}
return proxyUrl ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
let noProxy = process . env [ "no_proxy" ] || process . env [ "NO_PROXY" ] || '' ;
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
let upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
for ( let upperNoProxyItem of noProxy . split ( ',' ) . map ( x => x . trim ( ) . toUpperCase ( ) ) . filter ( x => x ) ) {
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
return false ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . checkBypass = checkBypass ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 1962 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _a ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const assert _1 = _ _nccwpck _require _ _ ( 2357 ) ;
const fs = _ _nccwpck _require _ _ ( 5747 ) ;
const path = _ _nccwpck _require _ _ ( 5622 ) ;
_a = fs . promises , exports . chmod = _a . chmod , exports . copyFile = _a . copyFile , exports . lstat = _a . lstat , exports . mkdir = _a . mkdir , exports . readdir = _a . readdir , exports . readlink = _a . readlink , exports . rename = _a . rename , exports . rmdir = _a . rmdir , exports . stat = _a . stat , exports . symlink = _a . symlink , exports . unlink = _a . unlink ;
exports . IS _WINDOWS = process . platform === 'win32' ;
function exists ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield exports . stat ( fsPath ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
return false ;
}
throw err ;
}
return true ;
} ) ;
}
exports . exists = exists ;
function isDirectory ( fsPath , useStat = false ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = useStat ? yield exports . stat ( fsPath ) : yield exports . lstat ( fsPath ) ;
return stats . isDirectory ( ) ;
} ) ;
}
exports . isDirectory = isDirectory ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* On OSX / Linux , true if path starts with '/' . On Windows , true for paths like :
* \ , \ hello , \ \ hello \ share , C : , and C : \ hello ( and corresponding alternate separator cases ) .
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function isRooted ( p ) {
p = normalizeSeparators ( p ) ;
if ( ! p ) {
throw new Error ( 'isRooted() parameter "p" cannot be empty' ) ;
}
if ( exports . IS _WINDOWS ) {
return ( p . startsWith ( '\\' ) || /^[A-Z]:/i . test ( p ) // e.g. \ or \hello or \\hello
) ; // e.g. C: or C:\hello
}
return p . startsWith ( '/' ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . isRooted = isRooted ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Recursively create a directory at ` fsPath ` .
2020-01-27 16:37:12 +01:00
*
2021-02-22 00:27:22 +01:00
* This implementation is optimistic , meaning it attempts to create the full
* path first , and backs up the path stack from there .
*
* @ param fsPath The path to create
* @ param maxDepth The maximum recursion depth
* @ param depth The current recursion depth
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function mkdirP ( fsPath , maxDepth = 1000 , depth = 1 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( fsPath , 'a path argument must be provided' ) ;
fsPath = path . resolve ( fsPath ) ;
if ( depth >= maxDepth )
return exports . mkdir ( fsPath ) ;
try {
yield exports . mkdir ( fsPath ) ;
return ;
}
catch ( err ) {
switch ( err . code ) {
case 'ENOENT' : {
yield mkdirP ( path . dirname ( fsPath ) , maxDepth , depth + 1 ) ;
yield exports . mkdir ( fsPath ) ;
return ;
}
default : {
let stats ;
try {
stats = yield exports . stat ( fsPath ) ;
}
catch ( err2 ) {
throw err ;
}
if ( ! stats . isDirectory ( ) )
throw err ;
}
}
}
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Best effort attempt to determine whether a file exists and is executable .
* @ param filePath file path to check
* @ param extensions additional file extensions to try
* @ return if file exists and is executable , returns the file path . otherwise empty string .
* /
function tryGetExecutablePath ( filePath , extensions ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stats = undefined ;
try {
// test file exists
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// on Windows, test for valid extension
const upperExt = path . extname ( filePath ) . toUpperCase ( ) ;
if ( extensions . some ( validExt => validExt . toUpperCase ( ) === upperExt ) ) {
return filePath ;
}
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
// try each extension
const originalFilePath = filePath ;
for ( const extension of extensions ) {
filePath = originalFilePath + extension ;
stats = undefined ;
try {
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path . dirname ( filePath ) ;
const upperName = path . basename ( filePath ) . toUpperCase ( ) ;
for ( const actualName of yield exports . readdir ( directory ) ) {
if ( upperName === actualName . toUpperCase ( ) ) {
filePath = path . join ( directory , actualName ) ;
break ;
}
}
}
catch ( err ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine the actual case of the file ' ${ filePath } ': ${ err } ` ) ;
}
return filePath ;
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
}
return '' ;
} ) ;
}
exports . tryGetExecutablePath = tryGetExecutablePath ;
function normalizeSeparators ( p ) {
p = p || '' ;
if ( exports . IS _WINDOWS ) {
// convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// remove redundant slashes
return p . replace ( /\\\\+/g , '\\' ) ;
}
// remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
}
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
function isUnixExecutable ( stats ) {
return ( ( stats . mode & 1 ) > 0 ||
( ( stats . mode & 8 ) > 0 && stats . gid === process . getgid ( ) ) ||
( ( stats . mode & 64 ) > 0 && stats . uid === process . getuid ( ) ) ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
//# sourceMappingURL=io-util.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 7436 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const childProcess = _ _nccwpck _require _ _ ( 3129 ) ;
const path = _ _nccwpck _require _ _ ( 5622 ) ;
const util _1 = _ _nccwpck _require _ _ ( 1669 ) ;
const ioUtil = _ _nccwpck _require _ _ ( 1962 ) ;
const exec = util _1 . promisify ( childProcess . exec ) ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Copies a file or folder .
* Based off of shelljs - https : //github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
2020-01-27 16:37:12 +01:00
*
2021-02-22 00:27:22 +01:00
* @ param source source path
* @ param dest destination path
* @ param options optional . See CopyOptions .
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function cp ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const { force , recursive } = readCopyOptions ( options ) ;
const destStat = ( yield ioUtil . exists ( dest ) ) ? yield ioUtil . stat ( dest ) : null ;
// Dest is an existing file, but not forcing
if ( destStat && destStat . isFile ( ) && ! force ) {
return ;
}
// If dest is an existing directory, should copy inside.
const newDest = destStat && destStat . isDirectory ( )
? path . join ( dest , path . basename ( source ) )
: dest ;
if ( ! ( yield ioUtil . exists ( source ) ) ) {
throw new Error ( ` no such file or directory: ${ source } ` ) ;
}
const sourceStat = yield ioUtil . stat ( source ) ;
if ( sourceStat . isDirectory ( ) ) {
if ( ! recursive ) {
throw new Error ( ` Failed to copy. ${ source } is a directory, but tried to copy without recursive flag. ` ) ;
}
else {
yield cpDirRecursive ( source , newDest , 0 , force ) ;
}
}
else {
if ( path . relative ( source , newDest ) === '' ) {
// a file cannot be copied to itself
throw new Error ( ` ' ${ newDest } ' and ' ${ source } ' are the same file ` ) ;
}
yield copyFile ( source , newDest , force ) ;
}
} ) ;
}
exports . cp = cp ;
/ * *
* Moves a path .
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See MoveOptions .
* /
function mv ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( yield ioUtil . exists ( dest ) ) {
let destExists = true ;
if ( yield ioUtil . isDirectory ( dest ) ) {
// If dest is directory copy src into dest
dest = path . join ( dest , path . basename ( source ) ) ;
destExists = yield ioUtil . exists ( dest ) ;
}
if ( destExists ) {
if ( options . force == null || options . force ) {
yield rmRF ( dest ) ;
}
else {
throw new Error ( 'Destination already exists' ) ;
}
}
}
yield mkdirP ( path . dirname ( dest ) ) ;
yield ioUtil . rename ( source , dest ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . mv = mv ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Remove a path recursively with force
2020-01-27 16:37:12 +01:00
*
2021-02-22 00:27:22 +01:00
* @ param inputPath path to remove
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function rmRF ( inputPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ioUtil . IS _WINDOWS ) {
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
try {
if ( yield ioUtil . isDirectory ( inputPath , true ) ) {
yield exec ( ` rd /s /q " ${ inputPath } " ` ) ;
}
else {
yield exec ( ` del /f /a " ${ inputPath } " ` ) ;
}
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
try {
yield ioUtil . unlink ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
}
else {
let isDir = false ;
try {
isDir = yield ioUtil . isDirectory ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
return ;
}
if ( isDir ) {
yield exec ( ` rm -rf " ${ inputPath } " ` ) ;
}
else {
yield ioUtil . unlink ( inputPath ) ;
}
}
} ) ;
}
exports . rmRF = rmRF ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Make a directory . Creates the full path with folders in between
* Will throw if it fails
2020-01-27 16:37:12 +01:00
*
2021-02-22 00:27:22 +01:00
* @ param fsPath path to create
* @ returns Promise < void >
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function mkdirP ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield ioUtil . mkdirP ( fsPath ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . mkdirP = mkdirP ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Returns path of a tool had the tool actually been invoked . Resolves via paths .
* If you check and the tool does not exist , it will throw .
2020-01-27 16:37:12 +01:00
*
2021-02-22 00:27:22 +01:00
* @ param tool name of the tool
* @ param check whether to check if tool exists
* @ returns Promise < string > path to tool
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function which ( tool , check ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// recursive when check=true
if ( check ) {
const result = yield which ( tool , false ) ;
if ( ! result ) {
if ( ioUtil . IS _WINDOWS ) {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file. ` ) ;
}
else {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable. ` ) ;
}
}
}
try {
// build the list of extensions to try
const extensions = [ ] ;
if ( ioUtil . IS _WINDOWS && process . env . PATHEXT ) {
for ( const extension of process . env . PATHEXT . split ( path . delimiter ) ) {
if ( extension ) {
extensions . push ( extension ) ;
}
}
}
// if it's rooted, return it if exists. otherwise return empty.
if ( ioUtil . isRooted ( tool ) ) {
const filePath = yield ioUtil . tryGetExecutablePath ( tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
return '' ;
}
// if any path separators, return empty
if ( tool . includes ( '/' ) || ( ioUtil . IS _WINDOWS && tool . includes ( '\\' ) ) ) {
return '' ;
}
// build the list of directories
//
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
// it feels like we should not do this. Checking the current directory seems like more of a use
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
// across platforms.
const directories = [ ] ;
if ( process . env . PATH ) {
for ( const p of process . env . PATH . split ( path . delimiter ) ) {
if ( p ) {
directories . push ( p ) ;
}
}
}
// return the first match
for ( const directory of directories ) {
const filePath = yield ioUtil . tryGetExecutablePath ( directory + path . sep + tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
}
return '' ;
}
catch ( err ) {
throw new Error ( ` which failed with message ${ err . message } ` ) ;
}
} ) ;
}
exports . which = which ;
function readCopyOptions ( options ) {
const force = options . force == null ? true : options . force ;
const recursive = Boolean ( options . recursive ) ;
return { force , recursive } ;
}
function cpDirRecursive ( sourceDir , destDir , currentDepth , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Ensure there is not a run away recursive copy
if ( currentDepth >= 255 )
return ;
currentDepth ++ ;
yield mkdirP ( destDir ) ;
const files = yield ioUtil . readdir ( sourceDir ) ;
for ( const fileName of files ) {
const srcFile = ` ${ sourceDir } / ${ fileName } ` ;
const destFile = ` ${ destDir } / ${ fileName } ` ;
const srcFileStat = yield ioUtil . lstat ( srcFile ) ;
if ( srcFileStat . isDirectory ( ) ) {
// Recurse
yield cpDirRecursive ( srcFile , destFile , currentDepth , force ) ;
}
else {
yield copyFile ( srcFile , destFile , force ) ;
}
}
// Change the mode for the newly created directory
yield ioUtil . chmod ( destDir , ( yield ioUtil . stat ( sourceDir ) ) . mode ) ;
} ) ;
}
// Buffered file copy
function copyFile ( srcFile , destFile , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ( yield ioUtil . lstat ( srcFile ) ) . isSymbolicLink ( ) ) {
// unlink/re-link it
try {
yield ioUtil . lstat ( destFile ) ;
yield ioUtil . unlink ( destFile ) ;
}
catch ( e ) {
// Try to override file permission
if ( e . code === 'EPERM' ) {
yield ioUtil . chmod ( destFile , '0666' ) ;
yield ioUtil . unlink ( destFile ) ;
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
const symlinkFull = yield ioUtil . readlink ( srcFile ) ;
yield ioUtil . symlink ( symlinkFull , destFile , ioUtil . IS _WINDOWS ? 'junction' : null ) ;
}
else if ( ! ( yield ioUtil . exists ( destFile ) ) || force ) {
yield ioUtil . copyFile ( srcFile , destFile ) ;
}
} ) ;
}
//# sourceMappingURL=io.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 2473 :
/***/ ( function ( module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const semver = _ _importStar ( _ _nccwpck _require _ _ ( 5911 ) ) ;
const core _1 = _ _nccwpck _require _ _ ( 6705 ) ;
// needs to be require for core node modules to be mocked
/* eslint @typescript-eslint/no-require-imports: 0 */
const os = _ _nccwpck _require _ _ ( 2087 ) ;
const cp = _ _nccwpck _require _ _ ( 3129 ) ;
const fs = _ _nccwpck _require _ _ ( 5747 ) ;
function _findMatch ( versionSpec , stable , candidates , archFilter ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const platFilter = os . platform ( ) ;
let result ;
let match ;
let file ;
for ( const candidate of candidates ) {
const version = candidate . version ;
core _1 . debug ( ` check ${ version } satisfies ${ versionSpec } ` ) ;
if ( semver . satisfies ( version , versionSpec ) &&
( ! stable || candidate . stable === stable ) ) {
file = candidate . files . find ( item => {
core _1 . debug ( ` ${ item . arch } === ${ archFilter } && ${ item . platform } === ${ platFilter } ` ) ;
let chk = item . arch === archFilter && item . platform === platFilter ;
if ( chk && item . platform _version ) {
const osVersion = module . exports . _getOsVersion ( ) ;
if ( osVersion === item . platform _version ) {
chk = true ;
}
else {
chk = semver . satisfies ( osVersion , item . platform _version ) ;
}
}
return chk ;
} ) ;
if ( file ) {
core _1 . debug ( ` matched ${ candidate . version } ` ) ;
match = candidate ;
break ;
}
}
}
if ( match && file ) {
// clone since we're mutating the file list to be only the file that matches
result = Object . assign ( { } , match ) ;
result . files = [ file ] ;
}
return result ;
} ) ;
}
exports . _findMatch = _findMatch ;
function _getOsVersion ( ) {
// TODO: add windows and other linux, arm variants
// right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python)
const plat = os . platform ( ) ;
let version = '' ;
if ( plat === 'darwin' ) {
version = cp . execSync ( 'sw_vers -productVersion' ) . toString ( ) ;
}
else if ( plat === 'linux' ) {
// lsb_release process not in some containers, readfile
// Run cat /etc/lsb-release
// DISTRIB_ID=Ubuntu
// DISTRIB_RELEASE=18.04
// DISTRIB_CODENAME=bionic
// DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS"
const lsbContents = module . exports . _readLinuxVersionFile ( ) ;
if ( lsbContents ) {
const lines = lsbContents . split ( '\n' ) ;
for ( const line of lines ) {
const parts = line . split ( '=' ) ;
if ( parts . length === 2 && parts [ 0 ] . trim ( ) === 'DISTRIB_RELEASE' ) {
version = parts [ 1 ] . trim ( ) ;
break ;
}
}
}
}
return version ;
}
exports . _getOsVersion = _getOsVersion ;
function _readLinuxVersionFile ( ) {
const lsbFile = '/etc/lsb-release' ;
let contents = '' ;
if ( fs . existsSync ( lsbFile ) ) {
contents = fs . readFileSync ( lsbFile ) . toString ( ) ;
}
return contents ;
}
exports . _readLinuxVersionFile = _readLinuxVersionFile ;
//# sourceMappingURL=manifest.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 8279 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 6705 ) ) ;
/ * *
* Internal class for retries
* /
class RetryHelper {
constructor ( maxAttempts , minSeconds , maxSeconds ) {
if ( maxAttempts < 1 ) {
throw new Error ( 'max attempts should be greater than or equal to 1' ) ;
}
this . maxAttempts = maxAttempts ;
this . minSeconds = Math . floor ( minSeconds ) ;
this . maxSeconds = Math . floor ( maxSeconds ) ;
if ( this . minSeconds > this . maxSeconds ) {
throw new Error ( 'min seconds should be less than or equal to max seconds' ) ;
}
}
execute ( action , isRetryable ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let attempt = 1 ;
while ( attempt < this . maxAttempts ) {
// Try
try {
return yield action ( ) ;
}
catch ( err ) {
if ( isRetryable && ! isRetryable ( err ) ) {
throw err ;
}
core . info ( err . message ) ;
}
// Sleep
const seconds = this . getSleepAmount ( ) ;
core . info ( ` Waiting ${ seconds } seconds before trying again ` ) ;
yield this . sleep ( seconds ) ;
attempt ++ ;
}
// Last attempt
return yield action ( ) ;
} ) ;
}
getSleepAmount ( ) {
return ( Math . floor ( Math . random ( ) * ( this . maxSeconds - this . minSeconds + 1 ) ) +
this . minSeconds ) ;
}
sleep ( seconds ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( resolve => setTimeout ( resolve , seconds * 1000 ) ) ;
} ) ;
}
}
exports . RetryHelper = RetryHelper ;
//# sourceMappingURL=retry-helper.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 7784 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const core = _ _importStar ( _ _nccwpck _require _ _ ( 6705 ) ) ;
const io = _ _importStar ( _ _nccwpck _require _ _ ( 7436 ) ) ;
const fs = _ _importStar ( _ _nccwpck _require _ _ ( 5747 ) ) ;
const mm = _ _importStar ( _ _nccwpck _require _ _ ( 2473 ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2087 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 5622 ) ) ;
const httpm = _ _importStar ( _ _nccwpck _require _ _ ( 7371 ) ) ;
const semver = _ _importStar ( _ _nccwpck _require _ _ ( 5911 ) ) ;
const stream = _ _importStar ( _ _nccwpck _require _ _ ( 2413 ) ) ;
const util = _ _importStar ( _ _nccwpck _require _ _ ( 1669 ) ) ;
const v4 _1 = _ _importDefault ( _ _nccwpck _require _ _ ( 824 ) ) ;
const exec _1 = _ _nccwpck _require _ _ ( 1514 ) ;
const assert _1 = _ _nccwpck _require _ _ ( 2357 ) ;
const retry _helper _1 = _ _nccwpck _require _ _ ( 8279 ) ;
class HTTPError extends Error {
constructor ( httpStatusCode ) {
super ( ` Unexpected HTTP response: ${ httpStatusCode } ` ) ;
this . httpStatusCode = httpStatusCode ;
Object . setPrototypeOf ( this , new . target . prototype ) ;
}
}
exports . HTTPError = HTTPError ;
const IS _WINDOWS = process . platform === 'win32' ;
const userAgent = 'actions/tool-cache' ;
/ * *
* Download a tool from an url and stream it into a file
*
* @ param url url of tool to download
* @ param dest path to download tool
* @ param auth authorization header
* @ returns path to downloaded tool
* /
function downloadTool ( url , dest , auth ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
dest = dest || path . join ( _getTempDirectory ( ) , v4 _1 . default ( ) ) ;
yield io . mkdirP ( path . dirname ( dest ) ) ;
core . debug ( ` Downloading ${ url } ` ) ;
core . debug ( ` Destination ${ dest } ` ) ;
const maxAttempts = 3 ;
const minSeconds = _getGlobal ( 'TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS' , 10 ) ;
const maxSeconds = _getGlobal ( 'TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS' , 20 ) ;
const retryHelper = new retry _helper _1 . RetryHelper ( maxAttempts , minSeconds , maxSeconds ) ;
return yield retryHelper . execute ( ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield downloadToolAttempt ( url , dest || '' , auth ) ;
} ) , ( err ) => {
if ( err instanceof HTTPError && err . httpStatusCode ) {
// Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
if ( err . httpStatusCode < 500 &&
err . httpStatusCode !== 408 &&
err . httpStatusCode !== 429 ) {
return false ;
}
}
// Otherwise retry
return true ;
} ) ;
} ) ;
}
exports . downloadTool = downloadTool ;
function downloadToolAttempt ( url , dest , auth ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( fs . existsSync ( dest ) ) {
throw new Error ( ` Destination file path ${ dest } already exists ` ) ;
}
// Get the response headers
const http = new httpm . HttpClient ( userAgent , [ ] , {
allowRetries : false
} ) ;
let headers ;
if ( auth ) {
core . debug ( 'set auth' ) ;
headers = {
authorization : auth
} ;
}
const response = yield http . get ( url , headers ) ;
if ( response . message . statusCode !== 200 ) {
const err = new HTTPError ( response . message . statusCode ) ;
core . debug ( ` Failed to download from " ${ url } ". Code( ${ response . message . statusCode } ) Message( ${ response . message . statusMessage } ) ` ) ;
throw err ;
}
// Download the response body
const pipeline = util . promisify ( stream . pipeline ) ;
const responseMessageFactory = _getGlobal ( 'TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY' , ( ) => response . message ) ;
const readStream = responseMessageFactory ( ) ;
let succeeded = false ;
try {
yield pipeline ( readStream , fs . createWriteStream ( dest ) ) ;
core . debug ( 'download complete' ) ;
succeeded = true ;
return dest ;
}
finally {
// Error, delete dest before retry
if ( ! succeeded ) {
core . debug ( 'download failed' ) ;
try {
yield io . rmRF ( dest ) ;
}
catch ( err ) {
core . debug ( ` Failed to delete ' ${ dest } '. ${ err . message } ` ) ;
}
}
}
} ) ;
}
/ * *
* Extract a . 7 z file
*
* @ param file path to the . 7 z file
* @ param dest destination directory . Optional .
* @ param _7zPath path to 7 zr . exe . Optional , for long path support . Most . 7 z archives do not have this
* problem . If your . 7 z archive contains very long paths , you can pass the path to 7 zr . exe which will
* gracefully handle long paths . By default 7 zdec . exe is used because it is a very small program and is
* bundled with the tool lib . However it does not support long paths . 7 zr . exe is the reduced command line
* interface , it is smaller than the full command line interface , and it does support long paths . At the
* time of this writing , it is freely available from the LZMA SDK that is available on the 7 zip website .
* Be sure to check the current license agreement . If 7 zr . exe is bundled with your action , then the path
* to 7 zr . exe can be pass to this function .
* @ returns path to the destination directory
* /
function extract7z ( file , dest , _7zPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( IS _WINDOWS , 'extract7z() not supported on current OS' ) ;
assert _1 . ok ( file , 'parameter "file" is required' ) ;
dest = yield _createExtractFolder ( dest ) ;
const originalCwd = process . cwd ( ) ;
process . chdir ( dest ) ;
if ( _7zPath ) {
try {
const logLevel = core . isDebug ( ) ? '-bb1' : '-bb0' ;
const args = [
'x' ,
logLevel ,
'-bd' ,
'-sccUTF-8' ,
file
] ;
const options = {
silent : true
} ;
yield exec _1 . exec ( ` " ${ _7zPath } " ` , args , options ) ;
}
finally {
process . chdir ( originalCwd ) ;
}
}
else {
const escapedScript = path
. join ( _ _dirname , '..' , 'scripts' , 'Invoke-7zdec.ps1' )
. replace ( /'/g , "''" )
. replace ( /"|\n|\r/g , '' ) ; // double-up single quotes, remove double quotes and newlines
const escapedFile = file . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ;
const escapedTarget = dest . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ;
const command = ` & ' ${ escapedScript } ' -Source ' ${ escapedFile } ' -Target ' ${ escapedTarget } ' ` ;
const args = [
'-NoLogo' ,
'-Sta' ,
'-NoProfile' ,
'-NonInteractive' ,
'-ExecutionPolicy' ,
'Unrestricted' ,
'-Command' ,
command
] ;
const options = {
silent : true
} ;
try {
const powershellPath = yield io . which ( 'powershell' , true ) ;
yield exec _1 . exec ( ` " ${ powershellPath } " ` , args , options ) ;
}
finally {
process . chdir ( originalCwd ) ;
}
}
return dest ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . extract7z = extract7z ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Extract a compressed tar archive
2020-01-27 16:37:12 +01:00
*
2021-02-22 00:27:22 +01:00
* @ param file path to the tar
* @ param dest destination directory . Optional .
* @ param flags flags for the tar command to use for extraction . Defaults to 'xz' ( extracting gzipped tars ) . Optional .
* @ returns path to the destination directory
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function extractTar ( file , dest , flags = 'xz' ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! file ) {
throw new Error ( "parameter 'file' is required" ) ;
}
// Create dest
dest = yield _createExtractFolder ( dest ) ;
// Determine whether GNU tar
core . debug ( 'Checking tar --version' ) ;
let versionOutput = '' ;
yield exec _1 . exec ( 'tar --version' , [ ] , {
ignoreReturnCode : true ,
silent : true ,
listeners : {
stdout : ( data ) => ( versionOutput += data . toString ( ) ) ,
stderr : ( data ) => ( versionOutput += data . toString ( ) )
}
} ) ;
core . debug ( versionOutput . trim ( ) ) ;
const isGnuTar = versionOutput . toUpperCase ( ) . includes ( 'GNU TAR' ) ;
// Initialize args
let args ;
if ( flags instanceof Array ) {
args = flags ;
}
else {
args = [ flags ] ;
}
if ( core . isDebug ( ) && ! flags . includes ( 'v' ) ) {
args . push ( '-v' ) ;
}
let destArg = dest ;
let fileArg = file ;
if ( IS _WINDOWS && isGnuTar ) {
args . push ( '--force-local' ) ;
destArg = dest . replace ( /\\/g , '/' ) ;
// Technically only the dest needs to have `/` but for aesthetic consistency
// convert slashes in the file arg too.
fileArg = file . replace ( /\\/g , '/' ) ;
}
if ( isGnuTar ) {
// Suppress warnings when using GNU tar to extract archives created by BSD tar
args . push ( '--warning=no-unknown-keyword' ) ;
}
args . push ( '-C' , destArg , '-f' , fileArg ) ;
yield exec _1 . exec ( ` tar ` , args ) ;
return dest ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . extractTar = extractTar ;
/ * *
* Extract a zip
*
* @ param file path to the zip
* @ param dest destination directory . Optional .
* @ returns path to the destination directory
* /
function extractZip ( file , dest ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! file ) {
throw new Error ( "parameter 'file' is required" ) ;
}
dest = yield _createExtractFolder ( dest ) ;
if ( IS _WINDOWS ) {
yield extractZipWin ( file , dest ) ;
}
else {
yield extractZipNix ( file , dest ) ;
}
return dest ;
} ) ;
}
exports . extractZip = extractZip ;
function extractZipWin ( file , dest ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// build the powershell command
const escapedFile = file . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ; // double-up single quotes, remove double quotes and newlines
const escapedDest = dest . replace ( /'/g , "''" ) . replace ( /"|\n|\r/g , '' ) ;
const command = ` $ ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory(' ${ escapedFile } ', ' ${ escapedDest } ') ` ;
// run powershell
const powershellPath = yield io . which ( 'powershell' , true ) ;
const args = [
'-NoLogo' ,
'-Sta' ,
'-NoProfile' ,
'-NonInteractive' ,
'-ExecutionPolicy' ,
'Unrestricted' ,
'-Command' ,
command
] ;
yield exec _1 . exec ( ` " ${ powershellPath } " ` , args ) ;
} ) ;
}
function extractZipNix ( file , dest ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const unzipPath = yield io . which ( 'unzip' , true ) ;
const args = [ file ] ;
if ( ! core . isDebug ( ) ) {
args . unshift ( '-q' ) ;
}
yield exec _1 . exec ( ` " ${ unzipPath } " ` , args , { cwd : dest } ) ;
} ) ;
}
/ * *
* Caches a directory and installs it into the tool cacheDir
*
* @ param sourceDir the directory to cache into tools
* @ param tool tool name
* @ param version version of the tool . semver format
* @ param arch architecture of the tool . Optional . Defaults to machine architecture
* /
function cacheDir ( sourceDir , tool , version , arch ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
version = semver . clean ( version ) || version ;
arch = arch || os . arch ( ) ;
core . debug ( ` Caching tool ${ tool } ${ version } ${ arch } ` ) ;
core . debug ( ` source dir: ${ sourceDir } ` ) ;
if ( ! fs . statSync ( sourceDir ) . isDirectory ( ) ) {
throw new Error ( 'sourceDir is not a directory' ) ;
}
// Create the tool dir
const destPath = yield _createToolPath ( tool , version , arch ) ;
// copy each child item. do not move. move can fail on Windows
// due to anti-virus software having an open handle on a file.
for ( const itemName of fs . readdirSync ( sourceDir ) ) {
const s = path . join ( sourceDir , itemName ) ;
yield io . cp ( s , destPath , { recursive : true } ) ;
}
// write .complete
_completeToolPath ( tool , version , arch ) ;
return destPath ;
} ) ;
}
exports . cacheDir = cacheDir ;
/ * *
* Caches a downloaded file ( GUID ) and installs it
* into the tool cache with a given targetName
*
* @ param sourceFile the file to cache into tools . Typically a result of downloadTool which is a guid .
* @ param targetFile the name of the file name in the tools directory
* @ param tool tool name
* @ param version version of the tool . semver format
* @ param arch architecture of the tool . Optional . Defaults to machine architecture
* /
function cacheFile ( sourceFile , targetFile , tool , version , arch ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
version = semver . clean ( version ) || version ;
arch = arch || os . arch ( ) ;
core . debug ( ` Caching tool ${ tool } ${ version } ${ arch } ` ) ;
core . debug ( ` source file: ${ sourceFile } ` ) ;
if ( ! fs . statSync ( sourceFile ) . isFile ( ) ) {
throw new Error ( 'sourceFile is not a file' ) ;
}
// create the tool dir
const destFolder = yield _createToolPath ( tool , version , arch ) ;
// copy instead of move. move can fail on Windows due to
// anti-virus software having an open handle on a file.
const destPath = path . join ( destFolder , targetFile ) ;
core . debug ( ` destination file ${ destPath } ` ) ;
yield io . cp ( sourceFile , destPath ) ;
// write .complete
_completeToolPath ( tool , version , arch ) ;
return destFolder ;
} ) ;
}
exports . cacheFile = cacheFile ;
/ * *
* Finds the path to a tool version in the local installed tool cache
*
* @ param toolName name of the tool
* @ param versionSpec version of the tool
* @ param arch optional arch . defaults to arch of computer
* /
function find ( toolName , versionSpec , arch ) {
if ( ! toolName ) {
throw new Error ( 'toolName parameter is required' ) ;
}
if ( ! versionSpec ) {
throw new Error ( 'versionSpec parameter is required' ) ;
}
arch = arch || os . arch ( ) ;
// attempt to resolve an explicit version
if ( ! _isExplicitVersion ( versionSpec ) ) {
const localVersions = findAllVersions ( toolName , arch ) ;
const match = _evaluateVersions ( localVersions , versionSpec ) ;
versionSpec = match ;
}
// check for the explicit version in the cache
let toolPath = '' ;
if ( versionSpec ) {
versionSpec = semver . clean ( versionSpec ) || '' ;
const cachePath = path . join ( _getCacheDirectory ( ) , toolName , versionSpec , arch ) ;
core . debug ( ` checking cache: ${ cachePath } ` ) ;
if ( fs . existsSync ( cachePath ) && fs . existsSync ( ` ${ cachePath } .complete ` ) ) {
core . debug ( ` Found tool in cache ${ toolName } ${ versionSpec } ${ arch } ` ) ;
toolPath = cachePath ;
}
else {
core . debug ( 'not found' ) ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
return toolPath ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . find = find ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Finds the paths to all versions of a tool that are installed in the local tool cache
*
* @ param toolName name of the tool
* @ param arch optional arch . defaults to arch of computer
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function findAllVersions ( toolName , arch ) {
const versions = [ ] ;
arch = arch || os . arch ( ) ;
const toolPath = path . join ( _getCacheDirectory ( ) , toolName ) ;
if ( fs . existsSync ( toolPath ) ) {
const children = fs . readdirSync ( toolPath ) ;
for ( const child of children ) {
if ( _isExplicitVersion ( child ) ) {
const fullPath = path . join ( toolPath , child , arch || '' ) ;
if ( fs . existsSync ( fullPath ) && fs . existsSync ( ` ${ fullPath } .complete ` ) ) {
versions . push ( child ) ;
}
}
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
return versions ;
}
exports . findAllVersions = findAllVersions ;
function getManifestFromRepo ( owner , repo , auth , branch = 'master' ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let releases = [ ] ;
const treeUrl = ` https://api.github.com/repos/ ${ owner } / ${ repo } /git/trees/ ${ branch } ` ;
const http = new httpm . HttpClient ( 'tool-cache' ) ;
const headers = { } ;
if ( auth ) {
core . debug ( 'set auth' ) ;
headers . authorization = auth ;
}
const response = yield http . getJson ( treeUrl , headers ) ;
if ( ! response . result ) {
return releases ;
}
let manifestUrl = '' ;
for ( const item of response . result . tree ) {
if ( item . path === 'versions-manifest.json' ) {
manifestUrl = item . url ;
break ;
}
}
headers [ 'accept' ] = 'application/vnd.github.VERSION.raw' ;
let versionsRaw = yield ( yield http . get ( manifestUrl , headers ) ) . readBody ( ) ;
if ( versionsRaw ) {
// shouldn't be needed but protects against invalid json saved with BOM
versionsRaw = versionsRaw . replace ( /^\uFEFF/ , '' ) ;
try {
releases = JSON . parse ( versionsRaw ) ;
}
catch ( _a ) {
core . debug ( 'Invalid json' ) ;
}
}
return releases ;
2020-01-27 16:37:12 +01:00
} ) ;
2021-02-22 00:27:22 +01:00
}
exports . getManifestFromRepo = getManifestFromRepo ;
function findFromManifest ( versionSpec , stable , manifest , archFilter = os . arch ( ) ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// wrap the internal impl
const match = yield mm . _findMatch ( versionSpec , stable , manifest , archFilter ) ;
return match ;
} ) ;
}
exports . findFromManifest = findFromManifest ;
function _createExtractFolder ( dest ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! dest ) {
// create a temp dir
dest = path . join ( _getTempDirectory ( ) , v4 _1 . default ( ) ) ;
}
yield io . mkdirP ( dest ) ;
return dest ;
} ) ;
}
function _createToolPath ( tool , version , arch ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const folderPath = path . join ( _getCacheDirectory ( ) , tool , semver . clean ( version ) || version , arch || '' ) ;
core . debug ( ` destination ${ folderPath } ` ) ;
const markerPath = ` ${ folderPath } .complete ` ;
yield io . rmRF ( folderPath ) ;
yield io . rmRF ( markerPath ) ;
yield io . mkdirP ( folderPath ) ;
return folderPath ;
} ) ;
}
function _completeToolPath ( tool , version , arch ) {
const folderPath = path . join ( _getCacheDirectory ( ) , tool , semver . clean ( version ) || version , arch || '' ) ;
const markerPath = ` ${ folderPath } .complete ` ;
fs . writeFileSync ( markerPath , '' ) ;
core . debug ( 'finished caching tool' ) ;
}
function _isExplicitVersion ( versionSpec ) {
const c = semver . clean ( versionSpec ) || '' ;
core . debug ( ` isExplicit: ${ c } ` ) ;
const valid = semver . valid ( c ) != null ;
core . debug ( ` explicit? ${ valid } ` ) ;
return valid ;
}
function _evaluateVersions ( versions , versionSpec ) {
let version = '' ;
core . debug ( ` evaluating ${ versions . length } versions ` ) ;
versions = versions . sort ( ( a , b ) => {
if ( semver . gt ( a , b ) ) {
return 1 ;
}
return - 1 ;
} ) ;
for ( let i = versions . length - 1 ; i >= 0 ; i -- ) {
const potential = versions [ i ] ;
const satisfied = semver . satisfies ( potential , versionSpec ) ;
if ( satisfied ) {
version = potential ;
break ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( version ) {
core . debug ( ` matched: ${ version } ` ) ;
}
else {
core . debug ( 'match not found' ) ;
}
return version ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Gets RUNNER _TOOL _CACHE
* /
function _getCacheDirectory ( ) {
const cacheDirectory = process . env [ 'RUNNER_TOOL_CACHE' ] || '' ;
assert _1 . ok ( cacheDirectory , 'Expected RUNNER_TOOL_CACHE to be defined' ) ;
return cacheDirectory ;
}
/ * *
* Gets RUNNER _TEMP
* /
function _getTempDirectory ( ) {
const tempDirectory = process . env [ 'RUNNER_TEMP' ] || '' ;
assert _1 . ok ( tempDirectory , 'Expected RUNNER_TEMP to be defined' ) ;
return tempDirectory ;
}
/ * *
* Gets a global variable
* /
function _getGlobal ( key , defaultValue ) {
/* eslint-disable @typescript-eslint/no-explicit-any */
const value = global [ key ] ;
/* eslint-enable @typescript-eslint/no-explicit-any */
return value !== undefined ? value : defaultValue ;
}
//# sourceMappingURL=tool-cache.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 3532 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
"use strict" ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2087 ) ) ;
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
}
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
}
exports . toCommandValue = toCommandValue ;
function escapeData ( s ) {
return toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
return toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 6705 :
/***/ ( function ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) {
2020-01-27 16:37:12 +01:00
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const command _1 = _ _nccwpck _require _ _ ( 3532 ) ;
const os = _ _importStar ( _ _nccwpck _require _ _ ( 2087 ) ) ;
const path = _ _importStar ( _ _nccwpck _require _ _ ( 5622 ) ) ;
2020-01-27 16:37:12 +01:00
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
2020-10-01 18:03:22 +02:00
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
2020-01-27 16:37:12 +01:00
* /
2020-10-01 18:03:22 +02:00
// eslint-disable-next-line @typescript-eslint/no-explicit-any
2020-01-27 16:37:12 +01:00
function exportVariable ( name , val ) {
2021-02-22 00:27:22 +01:00
const convertedVal = command _1 . toCommandValue ( val ) ;
2020-10-01 18:03:22 +02:00
process . env [ name ] = convertedVal ;
2021-02-22 00:27:22 +01:00
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
2020-01-27 16:37:12 +01:00
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
2021-02-22 00:27:22 +01:00
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
2020-01-27 16:37:12 +01:00
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
* Gets the value of an input . The value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
}
return val . trim ( ) ;
}
exports . getInput = getInput ;
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
2020-10-01 18:03:22 +02:00
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
2020-01-27 16:37:12 +01:00
* /
2020-10-01 18:03:22 +02:00
// eslint-disable-next-line @typescript-eslint/no-explicit-any
2020-01-27 16:37:12 +01:00
function setOutput ( name , value ) {
command _1 . issueCommand ( 'set-output' , { name } , value ) ;
}
exports . setOutput = setOutput ;
2020-10-01 18:03:22 +02:00
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
2020-01-27 16:37:12 +01:00
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
2020-10-01 18:03:22 +02:00
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
2020-01-27 16:37:12 +01:00
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
2020-10-01 18:03:22 +02:00
* @ param message error issue message . Errors will be converted to string via toString ( )
2020-01-27 16:37:12 +01:00
* /
function error ( message ) {
2020-10-01 18:03:22 +02:00
command _1 . issue ( 'error' , message instanceof Error ? message . toString ( ) : message ) ;
2020-01-27 16:37:12 +01:00
}
exports . error = error ;
/ * *
* Adds an warning issue
2020-10-01 18:03:22 +02:00
* @ param message warning issue message . Errors will be converted to string via toString ( )
2020-01-27 16:37:12 +01:00
* /
function warning ( message ) {
2020-10-01 18:03:22 +02:00
command _1 . issue ( 'warning' , message instanceof Error ? message . toString ( ) : message ) ;
2020-01-27 16:37:12 +01:00
}
exports . warning = warning ;
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
2020-10-01 18:03:22 +02:00
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
2020-01-27 16:37:12 +01:00
* /
2020-10-01 18:03:22 +02:00
// eslint-disable-next-line @typescript-eslint/no-explicit-any
2020-01-27 16:37:12 +01:00
function saveState ( name , value ) {
command _1 . issueCommand ( 'save-state' , { name } , value ) ;
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
2021-02-22 00:27:22 +01:00
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
//# sourceMappingURL=core.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 7371 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const url = _ _nccwpck _require _ _ ( 8835 ) ;
const http = _ _nccwpck _require _ _ ( 8605 ) ;
const https = _ _nccwpck _require _ _ ( 7211 ) ;
const pm = _ _nccwpck _require _ _ ( 3118 ) ;
let tunnel ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function getProxyUrl ( serverUrl ) {
let proxyUrl = pm . getProxyUrl ( url . parse ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return new Promise ( async ( resolve , reject ) => {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
2020-03-10 16:51:57 +01:00
} ) ;
2021-02-22 00:27:22 +01:00
}
}
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
let parsedUrl = url . parse ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
2020-03-10 16:51:57 +01:00
}
2021-02-22 00:27:22 +01:00
}
options ( requestUrl , additionalHeaders ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
}
get ( requestUrl , additionalHeaders ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
}
del ( requestUrl , additionalHeaders ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
}
head ( requestUrl , additionalHeaders ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
async getJson ( requestUrl , additionalHeaders = { } ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
let res = await this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async postJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async putJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async patchJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
async request ( verb , requestUrl , data , headers ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
2020-03-10 16:51:57 +01:00
}
2021-02-22 00:27:22 +01:00
let parsedUrl = url . parse ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
let maxTries = this . _allowRetries && RetryableHttpVerbs . indexOf ( verb ) != - 1
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
while ( numTries < maxTries ) {
response = await this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( let i = 0 ; i < this . handlers . length ; i ++ ) {
if ( this . handlers [ i ] . canHandleAuthentication ( response ) ) {
authenticationHandler = this . handlers [ i ] ;
break ;
}
2020-03-10 16:51:57 +01:00
}
2021-02-22 00:27:22 +01:00
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
2020-03-10 16:51:57 +01:00
}
}
2021-02-22 00:27:22 +01:00
let redirectsRemaining = this . _maxRedirects ;
while ( HttpRedirectCodes . indexOf ( response . message . statusCode ) != - 1 &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
let parsedRedirectUrl = url . parse ( redirectUrl ) ;
if ( parsedUrl . protocol == 'https:' &&
parsedUrl . protocol != parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( let header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = await this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( HttpResponseRetryCodes . indexOf ( response . message . statusCode ) == - 1 ) {
// If not a retry code, return immediately instead of retrying
return response ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
numTries += 1 ;
if ( numTries < maxTries ) {
await response . readBody ( ) ;
await this . _performExponentialBackoff ( numTries ) ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
return response ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return new Promise ( ( resolve , reject ) => {
let callbackForResult = function ( err , res ) {
if ( err ) {
reject ( err ) ;
}
resolve ( res ) ;
} ;
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
let socket ;
if ( typeof data === 'string' ) {
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
let callbackCalled = false ;
let handleResult = ( err , res ) => {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
}
} ;
let req = info . httpModule . request ( info . options , ( msg ) => {
let res = new HttpClientResponse ( msg ) ;
handleResult ( null , res ) ;
} ) ;
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( 'Request timeout: ' + info . options . path ) , null ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err , null ) ;
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
else {
req . end ( ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
let parsedUrl = url . parse ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
this . handlers . forEach ( handler => {
handler . prepareRequest ( info . options ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
return info ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_mergeHeaders ( headers ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
_getAgent ( parsedUrl ) {
let agent ;
let proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
let useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// if agent is already assigned use that agent.
if ( ! ! agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( ! ! this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
if ( useProxy ) {
// If using proxy, need tunnel
if ( ! tunnel ) {
tunnel = _ _nccwpck _require _ _ ( 4294 ) ;
}
const agentOptions = {
maxSockets : maxSockets ,
keepAlive : this . _keepAlive ,
proxy : {
proxyAuth : proxyUrl . auth ,
host : proxyUrl . hostname ,
port : proxyUrl . port
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
}
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets : maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
}
static dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
let a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
2020-05-19 15:25:54 +02:00
}
}
2021-02-22 00:27:22 +01:00
return value ;
}
async _processResponse ( res , options ) {
return new Promise ( async ( resolve , reject ) => {
const statusCode = res . message . statusCode ;
const response = {
statusCode : statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode == HttpCodes . NotFound ) {
resolve ( response ) ;
}
let obj ;
let contents ;
// get the result from the body
2020-05-19 15:25:54 +02:00
try {
2021-02-22 00:27:22 +01:00
contents = await res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , HttpClient . dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = 'Failed request: (' + statusCode + ')' ;
}
let err = new Error ( msg ) ;
// attach statusCode and body obj (if available) to the error object
err [ 'statusCode' ] = statusCode ;
if ( response . result ) {
err [ 'result' ] = response . result ;
}
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . HttpClient = HttpClient ;
/***/ } ) ,
/***/ 3118 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const url = _ _nccwpck _require _ _ ( 8835 ) ;
function getProxyUrl ( reqUrl ) {
let usingSsl = reqUrl . protocol === 'https:' ;
let proxyUrl ;
if ( checkBypass ( reqUrl ) ) {
return proxyUrl ;
}
let proxyVar ;
if ( usingSsl ) {
proxyVar = process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
}
else {
proxyVar = process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
}
if ( proxyVar ) {
proxyUrl = url . parse ( proxyVar ) ;
}
return proxyUrl ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
let noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
let upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
for ( let upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
return false ;
2020-03-10 16:51:57 +01:00
}
2021-02-22 00:27:22 +01:00
exports . checkBypass = checkBypass ;
/***/ } ) ,
/***/ 334 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
async function auth ( token ) {
const tokenType = token . split ( /\./ ) . length === 3 ? "app" : /^v\d+\./ . test ( token ) ? "installation" : "oauth" ;
return {
type : "token" ,
token : token ,
tokenType
} ;
2020-03-10 16:51:57 +01:00
}
2021-02-22 00:27:22 +01:00
2020-03-10 16:51:57 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Prefix token for usage in the Authorization header
*
* @ param token OAuth token or JSON Web Token
2020-03-10 16:51:57 +01:00
* /
2021-02-22 00:27:22 +01:00
function withAuthorizationPrefix ( token ) {
if ( token . split ( /\./ ) . length === 3 ) {
return ` bearer ${ token } ` ;
}
return ` token ${ token } ` ;
2020-03-10 16:51:57 +01:00
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
async function hook ( token , request , route , parameters ) {
const endpoint = request . endpoint . merge ( route , parameters ) ;
endpoint . headers . authorization = withAuthorizationPrefix ( token ) ;
return request ( endpoint ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const createTokenAuth = function createTokenAuth ( token ) {
if ( ! token ) {
throw new Error ( "[@octokit/auth-token] No token passed to createTokenAuth" ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( typeof token !== "string" ) {
throw new Error ( "[@octokit/auth-token] Token passed to createTokenAuth is not a string" ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
token = token . replace ( /^(token|bearer) +/i , "" ) ;
return Object . assign ( auth . bind ( null , token ) , {
hook : hook . bind ( null , token )
} ) ;
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . createTokenAuth = createTokenAuth ;
//# sourceMappingURL=index.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 6762 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
var universalUserAgent = _ _nccwpck _require _ _ ( 5030 ) ;
var beforeAfterHook = _ _nccwpck _require _ _ ( 3682 ) ;
var request = _ _nccwpck _require _ _ ( 6234 ) ;
var graphql = _ _nccwpck _require _ _ ( 8467 ) ;
var authToken = _ _nccwpck _require _ _ ( 334 ) ;
function _objectWithoutPropertiesLoose ( source , excluded ) {
if ( source == null ) return { } ;
var target = { } ;
var sourceKeys = Object . keys ( source ) ;
var key , i ;
for ( i = 0 ; i < sourceKeys . length ; i ++ ) {
key = sourceKeys [ i ] ;
if ( excluded . indexOf ( key ) >= 0 ) continue ;
target [ key ] = source [ key ] ;
}
return target ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function _objectWithoutProperties ( source , excluded ) {
if ( source == null ) return { } ;
var target = _objectWithoutPropertiesLoose ( source , excluded ) ;
var key , i ;
if ( Object . getOwnPropertySymbols ) {
var sourceSymbolKeys = Object . getOwnPropertySymbols ( source ) ;
for ( i = 0 ; i < sourceSymbolKeys . length ; i ++ ) {
key = sourceSymbolKeys [ i ] ;
if ( excluded . indexOf ( key ) >= 0 ) continue ;
if ( ! Object . prototype . propertyIsEnumerable . call ( source , key ) ) continue ;
target [ key ] = source [ key ] ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
}
return target ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
const VERSION = "3.2.5" ;
class Octokit {
constructor ( options = { } ) {
const hook = new beforeAfterHook . Collection ( ) ;
const requestDefaults = {
baseUrl : request . request . endpoint . DEFAULTS . baseUrl ,
headers : { } ,
request : Object . assign ( { } , options . request , {
hook : hook . bind ( null , "request" )
} ) ,
mediaType : {
previews : [ ] ,
format : ""
}
} ; // prepend default user agent with `options.userAgent` if set
requestDefaults . headers [ "user-agent" ] = [ options . userAgent , ` octokit-core.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } ` ] . filter ( Boolean ) . join ( " " ) ;
if ( options . baseUrl ) {
requestDefaults . baseUrl = options . baseUrl ;
2020-02-05 17:20:33 +01:00
}
2021-02-22 00:27:22 +01:00
if ( options . previews ) {
requestDefaults . mediaType . previews = options . previews ;
2020-02-05 17:20:33 +01:00
}
2021-02-22 00:27:22 +01:00
if ( options . timeZone ) {
requestDefaults . headers [ "time-zone" ] = options . timeZone ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
this . request = request . request . defaults ( requestDefaults ) ;
this . graphql = graphql . withCustomRequest ( this . request ) . defaults ( requestDefaults ) ;
this . log = Object . assign ( {
debug : ( ) => { } ,
info : ( ) => { } ,
warn : console . warn . bind ( console ) ,
error : console . error . bind ( console )
} , options . log ) ;
this . hook = hook ; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
// (2) If only `options.auth` is set, use the default token authentication strategy.
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
// TODO: type `options.auth` based on `options.authStrategy`.
if ( ! options . authStrategy ) {
if ( ! options . auth ) {
// (1)
this . auth = async ( ) => ( {
type : "unauthenticated"
} ) ;
} else {
// (2)
const auth = authToken . createTokenAuth ( options . auth ) ; // @ts-ignore ¯\_(ツ)_/¯
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
}
} else {
const {
authStrategy
} = options ,
otherOptions = _objectWithoutProperties ( options , [ "authStrategy" ] ) ;
const auth = authStrategy ( Object . assign ( {
request : this . request ,
log : this . log ,
// we pass the current octokit instance as well as its constructor options
// to allow for authentication strategies that return a new octokit instance
// that shares the same internal state as the current one. The original
// requirement for this was the "event-octokit" authentication strategy
// of https://github.com/probot/octokit-auth-probot.
octokit : this ,
octokitOptions : otherOptions
} , options . auth ) ) ; // @ts-ignore ¯\_(ツ)_/¯
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
} // apply plugins
// https://stackoverflow.com/a/16345172
const classConstructor = this . constructor ;
classConstructor . plugins . forEach ( plugin => {
Object . assign ( this , plugin ( this , options ) ) ;
} ) ;
}
static defaults ( defaults ) {
const OctokitWithDefaults = class extends this {
constructor ( ... args ) {
const options = args [ 0 ] || { } ;
if ( typeof defaults === "function" ) {
super ( defaults ( options ) ) ;
return ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
super ( Object . assign ( { } , defaults , options , options . userAgent && defaults . userAgent ? {
userAgent : ` ${ options . userAgent } ${ defaults . userAgent } `
} : null ) ) ;
}
} ;
return OctokitWithDefaults ;
}
/ * *
* Attach a plugin ( or many ) to your Octokit instance .
*
* @ example
* const API = Octokit . plugin ( plugin1 , plugin2 , plugin3 , ... )
* /
static plugin ( ... newPlugins ) {
var _a ;
const currentPlugins = this . plugins ;
const NewOctokit = ( _a = class extends this { } , _a . plugins = currentPlugins . concat ( newPlugins . filter ( plugin => ! currentPlugins . includes ( plugin ) ) ) , _a ) ;
return NewOctokit ;
}
}
Octokit . VERSION = VERSION ;
Octokit . plugins = [ ] ;
exports . Octokit = Octokit ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 9440 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
var isPlainObject = _ _nccwpck _require _ _ ( 558 ) ;
var universalUserAgent = _ _nccwpck _require _ _ ( 5030 ) ;
function lowercaseKeys ( object ) {
if ( ! object ) {
return { } ;
}
return Object . keys ( object ) . reduce ( ( newObj , key ) => {
newObj [ key . toLowerCase ( ) ] = object [ key ] ;
return newObj ;
} , { } ) ;
}
function mergeDeep ( defaults , options ) {
const result = Object . assign ( { } , defaults ) ;
Object . keys ( options ) . forEach ( key => {
if ( isPlainObject . isPlainObject ( options [ key ] ) ) {
if ( ! ( key in defaults ) ) Object . assign ( result , {
[ key ] : options [ key ]
} ) ; else result [ key ] = mergeDeep ( defaults [ key ] , options [ key ] ) ;
} else {
Object . assign ( result , {
[ key ] : options [ key ]
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} ) ;
return result ;
}
function removeUndefinedProperties ( obj ) {
for ( const key in obj ) {
if ( obj [ key ] === undefined ) {
delete obj [ key ] ;
}
}
return obj ;
}
function merge ( defaults , route , options ) {
if ( typeof route === "string" ) {
let [ method , url ] = route . split ( " " ) ;
options = Object . assign ( url ? {
method ,
url
} : {
url : method
} , options ) ;
} else {
options = Object . assign ( { } , route ) ;
} // lowercase header names before merging with defaults to avoid duplicates
options . headers = lowercaseKeys ( options . headers ) ; // remove properties with undefined values before merging
removeUndefinedProperties ( options ) ;
removeUndefinedProperties ( options . headers ) ;
const mergedOptions = mergeDeep ( defaults || { } , options ) ; // mediaType.previews arrays are merged, instead of overwritten
if ( defaults && defaults . mediaType . previews . length ) {
mergedOptions . mediaType . previews = defaults . mediaType . previews . filter ( preview => ! mergedOptions . mediaType . previews . includes ( preview ) ) . concat ( mergedOptions . mediaType . previews ) ;
}
mergedOptions . mediaType . previews = mergedOptions . mediaType . previews . map ( preview => preview . replace ( /-preview/ , "" ) ) ;
return mergedOptions ;
}
function addQueryParameters ( url , parameters ) {
const separator = /\?/ . test ( url ) ? "&" : "?" ;
const names = Object . keys ( parameters ) ;
if ( names . length === 0 ) {
return url ;
}
return url + separator + names . map ( name => {
if ( name === "q" ) {
return "q=" + parameters . q . split ( "+" ) . map ( encodeURIComponent ) . join ( "+" ) ;
}
return ` ${ name } = ${ encodeURIComponent ( parameters [ name ] ) } ` ;
} ) . join ( "&" ) ;
}
const urlVariableRegex = /\{[^}]+\}/g ;
function removeNonChars ( variableName ) {
return variableName . replace ( /^\W+|\W+$/g , "" ) . split ( /,/ ) ;
}
function extractUrlVariableNames ( url ) {
const matches = url . match ( urlVariableRegex ) ;
if ( ! matches ) {
return [ ] ;
}
return matches . map ( removeNonChars ) . reduce ( ( a , b ) => a . concat ( b ) , [ ] ) ;
}
function omit ( object , keysToOmit ) {
return Object . keys ( object ) . filter ( option => ! keysToOmit . includes ( option ) ) . reduce ( ( obj , key ) => {
obj [ key ] = object [ key ] ;
return obj ;
} , { } ) ;
}
// Based on https://github.com/bramstein/url-template, licensed under BSD
// TODO: create separate package.
//
// Copyright (c) 2012-2014, Bram Stein
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// 3. The name of the author may not be used to endorse or promote products
// derived from this software without specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/* istanbul ignore file */
function encodeReserved ( str ) {
return str . split ( /(%[0-9A-Fa-f]{2})/g ) . map ( function ( part ) {
if ( ! /%[0-9A-Fa-f]/ . test ( part ) ) {
part = encodeURI ( part ) . replace ( /%5B/g , "[" ) . replace ( /%5D/g , "]" ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
return part ;
} ) . join ( "" ) ;
}
function encodeUnreserved ( str ) {
return encodeURIComponent ( str ) . replace ( /[!'()*]/g , function ( c ) {
return "%" + c . charCodeAt ( 0 ) . toString ( 16 ) . toUpperCase ( ) ;
} ) ;
}
function encodeValue ( operator , value , key ) {
value = operator === "+" || operator === "#" ? encodeReserved ( value ) : encodeUnreserved ( value ) ;
if ( key ) {
return encodeUnreserved ( key ) + "=" + value ;
} else {
return value ;
}
}
function isDefined ( value ) {
return value !== undefined && value !== null ;
}
function isKeyOperator ( operator ) {
return operator === ";" || operator === "&" || operator === "?" ;
}
function getValues ( context , operator , key , modifier ) {
var value = context [ key ] ,
result = [ ] ;
if ( isDefined ( value ) && value !== "" ) {
if ( typeof value === "string" || typeof value === "number" || typeof value === "boolean" ) {
value = value . toString ( ) ;
if ( modifier && modifier !== "*" ) {
value = value . substring ( 0 , parseInt ( modifier , 10 ) ) ;
}
result . push ( encodeValue ( operator , value , isKeyOperator ( operator ) ? key : "" ) ) ;
} else {
if ( modifier === "*" ) {
if ( Array . isArray ( value ) ) {
value . filter ( isDefined ) . forEach ( function ( value ) {
result . push ( encodeValue ( operator , value , isKeyOperator ( operator ) ? key : "" ) ) ;
} ) ;
} else {
Object . keys ( value ) . forEach ( function ( k ) {
if ( isDefined ( value [ k ] ) ) {
result . push ( encodeValue ( operator , value [ k ] , k ) ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} else {
const tmp = [ ] ;
if ( Array . isArray ( value ) ) {
value . filter ( isDefined ) . forEach ( function ( value ) {
tmp . push ( encodeValue ( operator , value ) ) ;
} ) ;
} else {
Object . keys ( value ) . forEach ( function ( k ) {
if ( isDefined ( value [ k ] ) ) {
tmp . push ( encodeUnreserved ( k ) ) ;
tmp . push ( encodeValue ( operator , value [ k ] . toString ( ) ) ) ;
}
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( isKeyOperator ( operator ) ) {
result . push ( encodeUnreserved ( key ) + "=" + tmp . join ( "," ) ) ;
} else if ( tmp . length !== 0 ) {
result . push ( tmp . join ( "," ) ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} else {
if ( operator === ";" ) {
if ( isDefined ( value ) ) {
result . push ( encodeUnreserved ( key ) ) ;
}
} else if ( value === "" && ( operator === "&" || operator === "?" ) ) {
result . push ( encodeUnreserved ( key ) + "=" ) ;
} else if ( value === "" ) {
result . push ( "" ) ;
2020-02-05 17:20:33 +01:00
}
2021-02-22 00:27:22 +01:00
}
return result ;
}
function parseUrl ( template ) {
return {
expand : expand . bind ( null , template )
} ;
}
function expand ( template , context ) {
var operators = [ "+" , "#" , "." , "/" , ";" , "?" , "&" ] ;
return template . replace ( /\{([^\{\}]+)\}|([^\{\}]+)/g , function ( _ , expression , literal ) {
if ( expression ) {
let operator = "" ;
const values = [ ] ;
if ( operators . indexOf ( expression . charAt ( 0 ) ) !== - 1 ) {
operator = expression . charAt ( 0 ) ;
expression = expression . substr ( 1 ) ;
}
expression . split ( /,/g ) . forEach ( function ( variable ) {
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/ . exec ( variable ) ;
values . push ( getValues ( context , operator , tmp [ 1 ] , tmp [ 2 ] || tmp [ 3 ] ) ) ;
} ) ;
if ( operator && operator !== "+" ) {
var separator = "," ;
if ( operator === "?" ) {
separator = "&" ;
} else if ( operator !== "#" ) {
separator = operator ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
return ( values . length !== 0 ? operator : "" ) + values . join ( separator ) ;
} else {
return values . join ( "," ) ;
}
} else {
return encodeReserved ( literal ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} ) ;
}
function parse ( options ) {
// https://fetch.spec.whatwg.org/#methods
let method = options . method . toUpperCase ( ) ; // replace :varname with {varname} to make it RFC 6570 compatible
let url = ( options . url || "/" ) . replace ( /:([a-z]\w+)/g , "{$1}" ) ;
let headers = Object . assign ( { } , options . headers ) ;
let body ;
let parameters = omit ( options , [ "method" , "baseUrl" , "url" , "headers" , "request" , "mediaType" ] ) ; // extract variable names from URL to calculate remaining variables later
const urlVariableNames = extractUrlVariableNames ( url ) ;
url = parseUrl ( url ) . expand ( parameters ) ;
if ( ! /^http/ . test ( url ) ) {
url = options . baseUrl + url ;
}
const omittedParameters = Object . keys ( options ) . filter ( option => urlVariableNames . includes ( option ) ) . concat ( "baseUrl" ) ;
const remainingParameters = omit ( parameters , omittedParameters ) ;
const isBinaryRequest = /application\/octet-stream/i . test ( headers . accept ) ;
if ( ! isBinaryRequest ) {
if ( options . mediaType . format ) {
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
headers . accept = headers . accept . split ( /,/ ) . map ( preview => preview . replace ( /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/ , ` application/vnd $ 1 $ 2. ${ options . mediaType . format } ` ) ) . join ( "," ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( options . mediaType . previews . length ) {
const previewsFromAcceptHeader = headers . accept . match ( /[\w-]+(?=-preview)/g ) || [ ] ;
headers . accept = previewsFromAcceptHeader . concat ( options . mediaType . previews ) . map ( preview => {
const format = options . mediaType . format ? ` . ${ options . mediaType . format } ` : "+json" ;
return ` application/vnd.github. ${ preview } -preview ${ format } ` ;
} ) . join ( "," ) ;
2020-02-05 17:20:33 +01:00
}
2021-02-22 00:27:22 +01:00
} // for GET/HEAD requests, set URL query parameters from remaining parameters
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
if ( [ "GET" , "HEAD" ] . includes ( method ) ) {
url = addQueryParameters ( url , remainingParameters ) ;
} else {
if ( "data" in remainingParameters ) {
body = remainingParameters . data ;
} else {
if ( Object . keys ( remainingParameters ) . length ) {
body = remainingParameters ;
} else {
headers [ "content-length" ] = 0 ;
}
}
} // default content-type for JSON if body is set
if ( ! headers [ "content-type" ] && typeof body !== "undefined" ) {
headers [ "content-type" ] = "application/json; charset=utf-8" ;
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
// fetch does not allow to set `content-length` header, but we can set body to an empty string
if ( [ "PATCH" , "PUT" ] . includes ( method ) && typeof body === "undefined" ) {
body = "" ;
} // Only return body/request keys if present
return Object . assign ( {
method ,
url ,
headers
} , typeof body !== "undefined" ? {
body
} : null , options . request ? {
request : options . request
} : null ) ;
}
function endpointWithDefaults ( defaults , route , options ) {
return parse ( merge ( defaults , route , options ) ) ;
}
function withDefaults ( oldDefaults , newDefaults ) {
const DEFAULTS = merge ( oldDefaults , newDefaults ) ;
const endpoint = endpointWithDefaults . bind ( null , DEFAULTS ) ;
return Object . assign ( endpoint , {
DEFAULTS ,
defaults : withDefaults . bind ( null , DEFAULTS ) ,
merge : merge . bind ( null , DEFAULTS ) ,
parse
} ) ;
}
const VERSION = "6.0.11" ;
const userAgent = ` octokit-endpoint.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } ` ; // DEFAULTS has all properties set that EndpointOptions has, except url.
// So we use RequestParameters and add method as additional required property.
const DEFAULTS = {
method : "GET" ,
baseUrl : "https://api.github.com" ,
headers : {
accept : "application/vnd.github.v3+json" ,
"user-agent" : userAgent
} ,
mediaType : {
format : "" ,
previews : [ ]
}
} ;
const endpoint = withDefaults ( null , DEFAULTS ) ;
exports . endpoint = endpoint ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 558 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
/ * !
* is - plain - object < https : //github.com/jonschlinkert/is-plain-object>
*
* Copyright ( c ) 2014 - 2017 , Jon Schlinkert .
* Released under the MIT License .
* /
function isObject ( o ) {
return Object . prototype . toString . call ( o ) === '[object Object]' ;
}
function isPlainObject ( o ) {
var ctor , prot ;
if ( isObject ( o ) === false ) return false ;
// If has modified constructor
ctor = o . constructor ;
if ( ctor === undefined ) return true ;
// If has modified prototype
prot = ctor . prototype ;
if ( isObject ( prot ) === false ) return false ;
// If constructor does not have an Object-specific method
if ( prot . hasOwnProperty ( 'isPrototypeOf' ) === false ) {
return false ;
}
// Most likely a plain Object
return true ;
}
exports . isPlainObject = isPlainObject ;
/***/ } ) ,
/***/ 8467 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
var request = _ _nccwpck _require _ _ ( 6234 ) ;
var universalUserAgent = _ _nccwpck _require _ _ ( 5030 ) ;
const VERSION = "4.6.0" ;
class GraphqlError extends Error {
constructor ( request , response ) {
const message = response . data . errors [ 0 ] . message ;
super ( message ) ;
Object . assign ( this , response . data ) ;
Object . assign ( this , {
headers : response . headers
} ) ;
this . name = "GraphqlError" ;
this . request = request ; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
}
}
const NON _VARIABLE _OPTIONS = [ "method" , "baseUrl" , "url" , "headers" , "request" , "query" , "mediaType" ] ;
const GHES _V3 _SUFFIX _REGEX = /\/api\/v3\/?$/ ;
function graphql ( request , query , options ) {
if ( typeof query === "string" && options && "query" in options ) {
return Promise . reject ( new Error ( ` [@octokit/graphql] "query" cannot be used as variable name ` ) ) ;
}
const parsedOptions = typeof query === "string" ? Object . assign ( {
query
} , options ) : query ;
const requestOptions = Object . keys ( parsedOptions ) . reduce ( ( result , key ) => {
if ( NON _VARIABLE _OPTIONS . includes ( key ) ) {
result [ key ] = parsedOptions [ key ] ;
return result ;
}
if ( ! result . variables ) {
result . variables = { } ;
}
result . variables [ key ] = parsedOptions [ key ] ;
return result ;
} , { } ) ; // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
// https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
const baseUrl = parsedOptions . baseUrl || request . endpoint . DEFAULTS . baseUrl ;
if ( GHES _V3 _SUFFIX _REGEX . test ( baseUrl ) ) {
requestOptions . url = baseUrl . replace ( GHES _V3 _SUFFIX _REGEX , "/api/graphql" ) ;
}
return request ( requestOptions ) . then ( response => {
if ( response . data . errors ) {
const headers = { } ;
for ( const key of Object . keys ( response . headers ) ) {
headers [ key ] = response . headers [ key ] ;
}
throw new GraphqlError ( requestOptions , {
headers ,
data : response . data
} ) ;
2020-02-05 17:20:33 +01:00
}
2021-02-22 00:27:22 +01:00
return response . data . data ;
} ) ;
}
function withDefaults ( request$1 , newDefaults ) {
const newRequest = request$1 . defaults ( newDefaults ) ;
const newApi = ( query , options ) => {
return graphql ( newRequest , query , options ) ;
} ;
return Object . assign ( newApi , {
defaults : withDefaults . bind ( null , newRequest ) ,
endpoint : request . request . endpoint
} ) ;
}
const graphql$1 = withDefaults ( request . request , {
headers : {
"user-agent" : ` octokit-graphql.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } `
} ,
method : "POST" ,
url : "/graphql"
} ) ;
function withCustomRequest ( customRequest ) {
return withDefaults ( customRequest , {
method : "POST" ,
url : "/graphql"
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . graphql = graphql$1 ;
exports . withCustomRequest = withCustomRequest ;
//# sourceMappingURL=index.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 4193 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const VERSION = "2.10.0" ;
/ * *
* Some “ list ” response that can be paginated have a different response structure
2020-01-27 16:37:12 +01:00
*
2021-02-22 00:27:22 +01:00
* They have a ` total_count ` key in the response ( search also has ` incomplete_results ` ,
* / i n s t a l l a t i o n / r e p o s i t o r i e s a l s o h a s ` r e p o s i t o r y _ s e l e c t i o n ` ) , a s w e l l a s a k e y w i t h
* the list of the items which name varies from endpoint to endpoint .
*
* Octokit normalizes these responses so that paginated results are always returned following
* the same structure . One challenge is that if the list response has only one page , no Link
* header is provided , so this header alone is not sufficient to check wether a response is
* paginated or not .
*
* We check if a "total_count" key is present in the response data , but also make sure that
* a "url" property is not , as the "Get the combined status for a specific ref" endpoint would
* otherwise match : https : //developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function normalizePaginatedListResponse ( response ) {
const responseNeedsNormalization = "total_count" in response . data && ! ( "url" in response . data ) ;
if ( ! responseNeedsNormalization ) return response ; // keep the additional properties intact as there is currently no other way
// to retrieve the same information.
const incompleteResults = response . data . incomplete _results ;
const repositorySelection = response . data . repository _selection ;
const totalCount = response . data . total _count ;
delete response . data . incomplete _results ;
delete response . data . repository _selection ;
delete response . data . total _count ;
const namespaceKey = Object . keys ( response . data ) [ 0 ] ;
const data = response . data [ namespaceKey ] ;
response . data = data ;
if ( typeof incompleteResults !== "undefined" ) {
response . data . incomplete _results = incompleteResults ;
}
if ( typeof repositorySelection !== "undefined" ) {
response . data . repository _selection = repositorySelection ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
response . data . total _count = totalCount ;
return response ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function iterator ( octokit , route , parameters ) {
const options = typeof route === "function" ? route . endpoint ( parameters ) : octokit . request . endpoint ( route , parameters ) ;
const requestMethod = typeof route === "function" ? route : octokit . request ;
const method = options . method ;
const headers = options . headers ;
let url = options . url ;
return {
[ Symbol . asyncIterator ] : ( ) => ( {
async next ( ) {
if ( ! url ) return {
done : true
} ;
const response = await requestMethod ( {
method ,
url ,
headers
} ) ;
const normalizedResponse = normalizePaginatedListResponse ( response ) ; // `response.headers.link` format:
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
// sets `url` to undefined if "next" URL is not present or `link` header is not set
url = ( ( normalizedResponse . headers . link || "" ) . match ( /<([^>]+)>;\s*rel="next"/ ) || [ ] ) [ 1 ] ;
return {
value : normalizedResponse
} ;
}
} )
} ;
}
function paginate ( octokit , route , parameters , mapFn ) {
if ( typeof parameters === "function" ) {
mapFn = parameters ;
parameters = undefined ;
}
return gather ( octokit , [ ] , iterator ( octokit , route , parameters ) [ Symbol . asyncIterator ] ( ) , mapFn ) ;
}
function gather ( octokit , results , iterator , mapFn ) {
return iterator . next ( ) . then ( result => {
if ( result . done ) {
return results ;
}
let earlyExit = false ;
function done ( ) {
earlyExit = true ;
}
results = results . concat ( mapFn ? mapFn ( result . value , done ) : result . value . data ) ;
if ( earlyExit ) {
return results ;
}
return gather ( octokit , results , iterator , mapFn ) ;
} ) ;
}
const composePaginateRest = Object . assign ( paginate , {
iterator
} ) ;
/ * *
* @ param octokit Octokit instance
* @ param options Options passed to Octokit constructor
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function paginateRest ( octokit ) {
return {
paginate : Object . assign ( paginate . bind ( null , octokit ) , {
iterator : iterator . bind ( null , octokit )
} )
} ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
paginateRest . VERSION = VERSION ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . composePaginateRest = composePaginateRest ;
exports . paginateRest = paginateRest ;
//# sourceMappingURL=index.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 3044 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
const Endpoints = {
actions : {
addSelectedRepoToOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ] ,
cancelWorkflowRun : [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" ] ,
createOrUpdateOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}" ] ,
createOrUpdateRepoSecret : [ "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
createRegistrationTokenForOrg : [ "POST /orgs/{org}/actions/runners/registration-token" ] ,
createRegistrationTokenForRepo : [ "POST /repos/{owner}/{repo}/actions/runners/registration-token" ] ,
createRemoveTokenForOrg : [ "POST /orgs/{org}/actions/runners/remove-token" ] ,
createRemoveTokenForRepo : [ "POST /repos/{owner}/{repo}/actions/runners/remove-token" ] ,
createWorkflowDispatch : [ "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" ] ,
deleteArtifact : [ "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ] ,
deleteOrgSecret : [ "DELETE /orgs/{org}/actions/secrets/{secret_name}" ] ,
deleteRepoSecret : [ "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
deleteSelfHostedRunnerFromOrg : [ "DELETE /orgs/{org}/actions/runners/{runner_id}" ] ,
deleteSelfHostedRunnerFromRepo : [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" ] ,
deleteWorkflowRun : [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
deleteWorkflowRunLogs : [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ] ,
disableSelectedRepositoryGithubActionsOrganization : [ "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" ] ,
disableWorkflow : [ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" ] ,
downloadArtifact : [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" ] ,
downloadJobLogsForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" ] ,
downloadWorkflowRunLogs : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ] ,
enableSelectedRepositoryGithubActionsOrganization : [ "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" ] ,
enableWorkflow : [ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" ] ,
getAllowedActionsOrganization : [ "GET /orgs/{org}/actions/permissions/selected-actions" ] ,
getAllowedActionsRepository : [ "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" ] ,
getArtifact : [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ] ,
getGithubActionsPermissionsOrganization : [ "GET /orgs/{org}/actions/permissions" ] ,
getGithubActionsPermissionsRepository : [ "GET /repos/{owner}/{repo}/actions/permissions" ] ,
getJobForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}" ] ,
getOrgPublicKey : [ "GET /orgs/{org}/actions/secrets/public-key" ] ,
getOrgSecret : [ "GET /orgs/{org}/actions/secrets/{secret_name}" ] ,
getRepoPermissions : [ "GET /repos/{owner}/{repo}/actions/permissions" , { } , {
renamed : [ "actions" , "getGithubActionsPermissionsRepository" ]
} ] ,
getRepoPublicKey : [ "GET /repos/{owner}/{repo}/actions/secrets/public-key" ] ,
getRepoSecret : [ "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
getSelfHostedRunnerForOrg : [ "GET /orgs/{org}/actions/runners/{runner_id}" ] ,
getSelfHostedRunnerForRepo : [ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" ] ,
getWorkflow : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}" ] ,
getWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
getWorkflowRunUsage : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" ] ,
getWorkflowUsage : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" ] ,
listArtifactsForRepo : [ "GET /repos/{owner}/{repo}/actions/artifacts" ] ,
listJobsForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" ] ,
listOrgSecrets : [ "GET /orgs/{org}/actions/secrets" ] ,
listRepoSecrets : [ "GET /repos/{owner}/{repo}/actions/secrets" ] ,
listRepoWorkflows : [ "GET /repos/{owner}/{repo}/actions/workflows" ] ,
listRunnerApplicationsForOrg : [ "GET /orgs/{org}/actions/runners/downloads" ] ,
listRunnerApplicationsForRepo : [ "GET /repos/{owner}/{repo}/actions/runners/downloads" ] ,
listSelectedReposForOrgSecret : [ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" ] ,
listSelectedRepositoriesEnabledGithubActionsOrganization : [ "GET /orgs/{org}/actions/permissions/repositories" ] ,
listSelfHostedRunnersForOrg : [ "GET /orgs/{org}/actions/runners" ] ,
listSelfHostedRunnersForRepo : [ "GET /repos/{owner}/{repo}/actions/runners" ] ,
listWorkflowRunArtifacts : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" ] ,
listWorkflowRuns : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" ] ,
listWorkflowRunsForRepo : [ "GET /repos/{owner}/{repo}/actions/runs" ] ,
reRunWorkflow : [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun" ] ,
removeSelectedRepoFromOrgSecret : [ "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ] ,
setAllowedActionsOrganization : [ "PUT /orgs/{org}/actions/permissions/selected-actions" ] ,
setAllowedActionsRepository : [ "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" ] ,
setGithubActionsPermissionsOrganization : [ "PUT /orgs/{org}/actions/permissions" ] ,
setGithubActionsPermissionsRepository : [ "PUT /repos/{owner}/{repo}/actions/permissions" ] ,
setSelectedReposForOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" ] ,
setSelectedRepositoriesEnabledGithubActionsOrganization : [ "PUT /orgs/{org}/actions/permissions/repositories" ]
} ,
activity : {
checkRepoIsStarredByAuthenticatedUser : [ "GET /user/starred/{owner}/{repo}" ] ,
deleteRepoSubscription : [ "DELETE /repos/{owner}/{repo}/subscription" ] ,
deleteThreadSubscription : [ "DELETE /notifications/threads/{thread_id}/subscription" ] ,
getFeeds : [ "GET /feeds" ] ,
getRepoSubscription : [ "GET /repos/{owner}/{repo}/subscription" ] ,
getThread : [ "GET /notifications/threads/{thread_id}" ] ,
getThreadSubscriptionForAuthenticatedUser : [ "GET /notifications/threads/{thread_id}/subscription" ] ,
listEventsForAuthenticatedUser : [ "GET /users/{username}/events" ] ,
listNotificationsForAuthenticatedUser : [ "GET /notifications" ] ,
listOrgEventsForAuthenticatedUser : [ "GET /users/{username}/events/orgs/{org}" ] ,
listPublicEvents : [ "GET /events" ] ,
listPublicEventsForRepoNetwork : [ "GET /networks/{owner}/{repo}/events" ] ,
listPublicEventsForUser : [ "GET /users/{username}/events/public" ] ,
listPublicOrgEvents : [ "GET /orgs/{org}/events" ] ,
listReceivedEventsForUser : [ "GET /users/{username}/received_events" ] ,
listReceivedPublicEventsForUser : [ "GET /users/{username}/received_events/public" ] ,
listRepoEvents : [ "GET /repos/{owner}/{repo}/events" ] ,
listRepoNotificationsForAuthenticatedUser : [ "GET /repos/{owner}/{repo}/notifications" ] ,
listReposStarredByAuthenticatedUser : [ "GET /user/starred" ] ,
listReposStarredByUser : [ "GET /users/{username}/starred" ] ,
listReposWatchedByUser : [ "GET /users/{username}/subscriptions" ] ,
listStargazersForRepo : [ "GET /repos/{owner}/{repo}/stargazers" ] ,
listWatchedReposForAuthenticatedUser : [ "GET /user/subscriptions" ] ,
listWatchersForRepo : [ "GET /repos/{owner}/{repo}/subscribers" ] ,
markNotificationsAsRead : [ "PUT /notifications" ] ,
markRepoNotificationsAsRead : [ "PUT /repos/{owner}/{repo}/notifications" ] ,
markThreadAsRead : [ "PATCH /notifications/threads/{thread_id}" ] ,
setRepoSubscription : [ "PUT /repos/{owner}/{repo}/subscription" ] ,
setThreadSubscription : [ "PUT /notifications/threads/{thread_id}/subscription" ] ,
starRepoForAuthenticatedUser : [ "PUT /user/starred/{owner}/{repo}" ] ,
unstarRepoForAuthenticatedUser : [ "DELETE /user/starred/{owner}/{repo}" ]
} ,
apps : {
addRepoToInstallation : [ "PUT /user/installations/{installation_id}/repositories/{repository_id}" ] ,
checkToken : [ "POST /applications/{client_id}/token" ] ,
createContentAttachment : [ "POST /content_references/{content_reference_id}/attachments" , {
mediaType : {
previews : [ "corsair" ]
}
} ] ,
createFromManifest : [ "POST /app-manifests/{code}/conversions" ] ,
createInstallationAccessToken : [ "POST /app/installations/{installation_id}/access_tokens" ] ,
deleteAuthorization : [ "DELETE /applications/{client_id}/grant" ] ,
deleteInstallation : [ "DELETE /app/installations/{installation_id}" ] ,
deleteToken : [ "DELETE /applications/{client_id}/token" ] ,
getAuthenticated : [ "GET /app" ] ,
getBySlug : [ "GET /apps/{app_slug}" ] ,
getInstallation : [ "GET /app/installations/{installation_id}" ] ,
getOrgInstallation : [ "GET /orgs/{org}/installation" ] ,
getRepoInstallation : [ "GET /repos/{owner}/{repo}/installation" ] ,
getSubscriptionPlanForAccount : [ "GET /marketplace_listing/accounts/{account_id}" ] ,
getSubscriptionPlanForAccountStubbed : [ "GET /marketplace_listing/stubbed/accounts/{account_id}" ] ,
getUserInstallation : [ "GET /users/{username}/installation" ] ,
getWebhookConfigForApp : [ "GET /app/hook/config" ] ,
listAccountsForPlan : [ "GET /marketplace_listing/plans/{plan_id}/accounts" ] ,
listAccountsForPlanStubbed : [ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" ] ,
listInstallationReposForAuthenticatedUser : [ "GET /user/installations/{installation_id}/repositories" ] ,
listInstallations : [ "GET /app/installations" ] ,
listInstallationsForAuthenticatedUser : [ "GET /user/installations" ] ,
listPlans : [ "GET /marketplace_listing/plans" ] ,
listPlansStubbed : [ "GET /marketplace_listing/stubbed/plans" ] ,
listReposAccessibleToInstallation : [ "GET /installation/repositories" ] ,
listSubscriptionsForAuthenticatedUser : [ "GET /user/marketplace_purchases" ] ,
listSubscriptionsForAuthenticatedUserStubbed : [ "GET /user/marketplace_purchases/stubbed" ] ,
removeRepoFromInstallation : [ "DELETE /user/installations/{installation_id}/repositories/{repository_id}" ] ,
resetToken : [ "PATCH /applications/{client_id}/token" ] ,
revokeInstallationAccessToken : [ "DELETE /installation/token" ] ,
scopeToken : [ "POST /applications/{client_id}/token/scoped" ] ,
suspendInstallation : [ "PUT /app/installations/{installation_id}/suspended" ] ,
unsuspendInstallation : [ "DELETE /app/installations/{installation_id}/suspended" ] ,
updateWebhookConfigForApp : [ "PATCH /app/hook/config" ]
} ,
billing : {
getGithubActionsBillingOrg : [ "GET /orgs/{org}/settings/billing/actions" ] ,
getGithubActionsBillingUser : [ "GET /users/{username}/settings/billing/actions" ] ,
getGithubPackagesBillingOrg : [ "GET /orgs/{org}/settings/billing/packages" ] ,
getGithubPackagesBillingUser : [ "GET /users/{username}/settings/billing/packages" ] ,
getSharedStorageBillingOrg : [ "GET /orgs/{org}/settings/billing/shared-storage" ] ,
getSharedStorageBillingUser : [ "GET /users/{username}/settings/billing/shared-storage" ]
} ,
checks : {
create : [ "POST /repos/{owner}/{repo}/check-runs" ] ,
createSuite : [ "POST /repos/{owner}/{repo}/check-suites" ] ,
get : [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}" ] ,
getSuite : [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}" ] ,
listAnnotations : [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" ] ,
listForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-runs" ] ,
listForSuite : [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" ] ,
listSuitesForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-suites" ] ,
rerequestSuite : [ "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" ] ,
setSuitesPreferences : [ "PATCH /repos/{owner}/{repo}/check-suites/preferences" ] ,
update : [ "PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}" ]
} ,
codeScanning : {
deleteAnalysis : [ "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" ] ,
getAlert : [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" , { } , {
renamedParameters : {
alert _id : "alert_number"
}
} ] ,
getAnalysis : [ "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" ] ,
getSarif : [ "GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}" ] ,
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/code-scanning/alerts" ] ,
listAlertsInstances : [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" ] ,
listRecentAnalyses : [ "GET /repos/{owner}/{repo}/code-scanning/analyses" ] ,
updateAlert : [ "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" ] ,
uploadSarif : [ "POST /repos/{owner}/{repo}/code-scanning/sarifs" ]
} ,
codesOfConduct : {
getAllCodesOfConduct : [ "GET /codes_of_conduct" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
} ] ,
getConductCode : [ "GET /codes_of_conduct/{key}" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
} ] ,
getForRepo : [ "GET /repos/{owner}/{repo}/community/code_of_conduct" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
} ]
} ,
emojis : {
get : [ "GET /emojis" ]
} ,
enterpriseAdmin : {
disableSelectedOrganizationGithubActionsEnterprise : [ "DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}" ] ,
enableSelectedOrganizationGithubActionsEnterprise : [ "PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}" ] ,
getAllowedActionsEnterprise : [ "GET /enterprises/{enterprise}/actions/permissions/selected-actions" ] ,
getGithubActionsPermissionsEnterprise : [ "GET /enterprises/{enterprise}/actions/permissions" ] ,
listSelectedOrganizationsEnabledGithubActionsEnterprise : [ "GET /enterprises/{enterprise}/actions/permissions/organizations" ] ,
setAllowedActionsEnterprise : [ "PUT /enterprises/{enterprise}/actions/permissions/selected-actions" ] ,
setGithubActionsPermissionsEnterprise : [ "PUT /enterprises/{enterprise}/actions/permissions" ] ,
setSelectedOrganizationsEnabledGithubActionsEnterprise : [ "PUT /enterprises/{enterprise}/actions/permissions/organizations" ]
} ,
gists : {
checkIsStarred : [ "GET /gists/{gist_id}/star" ] ,
create : [ "POST /gists" ] ,
createComment : [ "POST /gists/{gist_id}/comments" ] ,
delete : [ "DELETE /gists/{gist_id}" ] ,
deleteComment : [ "DELETE /gists/{gist_id}/comments/{comment_id}" ] ,
fork : [ "POST /gists/{gist_id}/forks" ] ,
get : [ "GET /gists/{gist_id}" ] ,
getComment : [ "GET /gists/{gist_id}/comments/{comment_id}" ] ,
getRevision : [ "GET /gists/{gist_id}/{sha}" ] ,
list : [ "GET /gists" ] ,
listComments : [ "GET /gists/{gist_id}/comments" ] ,
listCommits : [ "GET /gists/{gist_id}/commits" ] ,
listForUser : [ "GET /users/{username}/gists" ] ,
listForks : [ "GET /gists/{gist_id}/forks" ] ,
listPublic : [ "GET /gists/public" ] ,
listStarred : [ "GET /gists/starred" ] ,
star : [ "PUT /gists/{gist_id}/star" ] ,
unstar : [ "DELETE /gists/{gist_id}/star" ] ,
update : [ "PATCH /gists/{gist_id}" ] ,
updateComment : [ "PATCH /gists/{gist_id}/comments/{comment_id}" ]
} ,
git : {
createBlob : [ "POST /repos/{owner}/{repo}/git/blobs" ] ,
createCommit : [ "POST /repos/{owner}/{repo}/git/commits" ] ,
createRef : [ "POST /repos/{owner}/{repo}/git/refs" ] ,
createTag : [ "POST /repos/{owner}/{repo}/git/tags" ] ,
createTree : [ "POST /repos/{owner}/{repo}/git/trees" ] ,
deleteRef : [ "DELETE /repos/{owner}/{repo}/git/refs/{ref}" ] ,
getBlob : [ "GET /repos/{owner}/{repo}/git/blobs/{file_sha}" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/git/commits/{commit_sha}" ] ,
getRef : [ "GET /repos/{owner}/{repo}/git/ref/{ref}" ] ,
getTag : [ "GET /repos/{owner}/{repo}/git/tags/{tag_sha}" ] ,
getTree : [ "GET /repos/{owner}/{repo}/git/trees/{tree_sha}" ] ,
listMatchingRefs : [ "GET /repos/{owner}/{repo}/git/matching-refs/{ref}" ] ,
updateRef : [ "PATCH /repos/{owner}/{repo}/git/refs/{ref}" ]
} ,
gitignore : {
getAllTemplates : [ "GET /gitignore/templates" ] ,
getTemplate : [ "GET /gitignore/templates/{name}" ]
} ,
interactions : {
getRestrictionsForAuthenticatedUser : [ "GET /user/interaction-limits" ] ,
getRestrictionsForOrg : [ "GET /orgs/{org}/interaction-limits" ] ,
getRestrictionsForRepo : [ "GET /repos/{owner}/{repo}/interaction-limits" ] ,
getRestrictionsForYourPublicRepos : [ "GET /user/interaction-limits" , { } , {
renamed : [ "interactions" , "getRestrictionsForAuthenticatedUser" ]
} ] ,
removeRestrictionsForAuthenticatedUser : [ "DELETE /user/interaction-limits" ] ,
removeRestrictionsForOrg : [ "DELETE /orgs/{org}/interaction-limits" ] ,
removeRestrictionsForRepo : [ "DELETE /repos/{owner}/{repo}/interaction-limits" ] ,
removeRestrictionsForYourPublicRepos : [ "DELETE /user/interaction-limits" , { } , {
renamed : [ "interactions" , "removeRestrictionsForAuthenticatedUser" ]
} ] ,
setRestrictionsForAuthenticatedUser : [ "PUT /user/interaction-limits" ] ,
setRestrictionsForOrg : [ "PUT /orgs/{org}/interaction-limits" ] ,
setRestrictionsForRepo : [ "PUT /repos/{owner}/{repo}/interaction-limits" ] ,
setRestrictionsForYourPublicRepos : [ "PUT /user/interaction-limits" , { } , {
renamed : [ "interactions" , "setRestrictionsForAuthenticatedUser" ]
} ]
} ,
issues : {
addAssignees : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" ] ,
addLabels : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
checkUserCanBeAssigned : [ "GET /repos/{owner}/{repo}/assignees/{assignee}" ] ,
create : [ "POST /repos/{owner}/{repo}/issues" ] ,
createComment : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" ] ,
createLabel : [ "POST /repos/{owner}/{repo}/labels" ] ,
createMilestone : [ "POST /repos/{owner}/{repo}/milestones" ] ,
deleteComment : [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
deleteLabel : [ "DELETE /repos/{owner}/{repo}/labels/{name}" ] ,
deleteMilestone : [ "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" ] ,
get : [ "GET /repos/{owner}/{repo}/issues/{issue_number}" ] ,
getComment : [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
getEvent : [ "GET /repos/{owner}/{repo}/issues/events/{event_id}" ] ,
getLabel : [ "GET /repos/{owner}/{repo}/labels/{name}" ] ,
getMilestone : [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}" ] ,
list : [ "GET /issues" ] ,
listAssignees : [ "GET /repos/{owner}/{repo}/assignees" ] ,
listComments : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/comments" ] ,
listCommentsForRepo : [ "GET /repos/{owner}/{repo}/issues/comments" ] ,
listEvents : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/events" ] ,
listEventsForRepo : [ "GET /repos/{owner}/{repo}/issues/events" ] ,
listEventsForTimeline : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" , {
mediaType : {
previews : [ "mockingbird" ]
}
} ] ,
listForAuthenticatedUser : [ "GET /user/issues" ] ,
listForOrg : [ "GET /orgs/{org}/issues" ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/issues" ] ,
listLabelsForMilestone : [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" ] ,
listLabelsForRepo : [ "GET /repos/{owner}/{repo}/labels" ] ,
listLabelsOnIssue : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
listMilestones : [ "GET /repos/{owner}/{repo}/milestones" ] ,
lock : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
removeAllLabels : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
removeAssignees : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" ] ,
removeLabel : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" ] ,
setLabels : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
unlock : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
update : [ "PATCH /repos/{owner}/{repo}/issues/{issue_number}" ] ,
updateComment : [ "PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
updateLabel : [ "PATCH /repos/{owner}/{repo}/labels/{name}" ] ,
updateMilestone : [ "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" ]
} ,
licenses : {
get : [ "GET /licenses/{license}" ] ,
getAllCommonlyUsed : [ "GET /licenses" ] ,
getForRepo : [ "GET /repos/{owner}/{repo}/license" ]
} ,
markdown : {
render : [ "POST /markdown" ] ,
renderRaw : [ "POST /markdown/raw" , {
headers : {
"content-type" : "text/plain; charset=utf-8"
}
} ]
} ,
meta : {
get : [ "GET /meta" ] ,
getOctocat : [ "GET /octocat" ] ,
getZen : [ "GET /zen" ] ,
root : [ "GET /" ]
} ,
migrations : {
cancelImport : [ "DELETE /repos/{owner}/{repo}/import" ] ,
deleteArchiveForAuthenticatedUser : [ "DELETE /user/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
deleteArchiveForOrg : [ "DELETE /orgs/{org}/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
downloadArchiveForOrg : [ "GET /orgs/{org}/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
getArchiveForAuthenticatedUser : [ "GET /user/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
getCommitAuthors : [ "GET /repos/{owner}/{repo}/import/authors" ] ,
getImportStatus : [ "GET /repos/{owner}/{repo}/import" ] ,
getLargeFiles : [ "GET /repos/{owner}/{repo}/import/large_files" ] ,
getStatusForAuthenticatedUser : [ "GET /user/migrations/{migration_id}" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
getStatusForOrg : [ "GET /orgs/{org}/migrations/{migration_id}" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listForAuthenticatedUser : [ "GET /user/migrations" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listForOrg : [ "GET /orgs/{org}/migrations" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listReposForOrg : [ "GET /orgs/{org}/migrations/{migration_id}/repositories" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listReposForUser : [ "GET /user/migrations/{migration_id}/repositories" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
mapCommitAuthor : [ "PATCH /repos/{owner}/{repo}/import/authors/{author_id}" ] ,
setLfsPreference : [ "PATCH /repos/{owner}/{repo}/import/lfs" ] ,
startForAuthenticatedUser : [ "POST /user/migrations" ] ,
startForOrg : [ "POST /orgs/{org}/migrations" ] ,
startImport : [ "PUT /repos/{owner}/{repo}/import" ] ,
unlockRepoForAuthenticatedUser : [ "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
unlockRepoForOrg : [ "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
updateImport : [ "PATCH /repos/{owner}/{repo}/import" ]
} ,
orgs : {
blockUser : [ "PUT /orgs/{org}/blocks/{username}" ] ,
cancelInvitation : [ "DELETE /orgs/{org}/invitations/{invitation_id}" ] ,
checkBlockedUser : [ "GET /orgs/{org}/blocks/{username}" ] ,
checkMembershipForUser : [ "GET /orgs/{org}/members/{username}" ] ,
checkPublicMembershipForUser : [ "GET /orgs/{org}/public_members/{username}" ] ,
convertMemberToOutsideCollaborator : [ "PUT /orgs/{org}/outside_collaborators/{username}" ] ,
createInvitation : [ "POST /orgs/{org}/invitations" ] ,
createWebhook : [ "POST /orgs/{org}/hooks" ] ,
deleteWebhook : [ "DELETE /orgs/{org}/hooks/{hook_id}" ] ,
get : [ "GET /orgs/{org}" ] ,
getMembershipForAuthenticatedUser : [ "GET /user/memberships/orgs/{org}" ] ,
getMembershipForUser : [ "GET /orgs/{org}/memberships/{username}" ] ,
getWebhook : [ "GET /orgs/{org}/hooks/{hook_id}" ] ,
getWebhookConfigForOrg : [ "GET /orgs/{org}/hooks/{hook_id}/config" ] ,
list : [ "GET /organizations" ] ,
listAppInstallations : [ "GET /orgs/{org}/installations" ] ,
listBlockedUsers : [ "GET /orgs/{org}/blocks" ] ,
listFailedInvitations : [ "GET /orgs/{org}/failed_invitations" ] ,
listForAuthenticatedUser : [ "GET /user/orgs" ] ,
listForUser : [ "GET /users/{username}/orgs" ] ,
listInvitationTeams : [ "GET /orgs/{org}/invitations/{invitation_id}/teams" ] ,
listMembers : [ "GET /orgs/{org}/members" ] ,
listMembershipsForAuthenticatedUser : [ "GET /user/memberships/orgs" ] ,
listOutsideCollaborators : [ "GET /orgs/{org}/outside_collaborators" ] ,
listPendingInvitations : [ "GET /orgs/{org}/invitations" ] ,
listPublicMembers : [ "GET /orgs/{org}/public_members" ] ,
listWebhooks : [ "GET /orgs/{org}/hooks" ] ,
pingWebhook : [ "POST /orgs/{org}/hooks/{hook_id}/pings" ] ,
removeMember : [ "DELETE /orgs/{org}/members/{username}" ] ,
removeMembershipForUser : [ "DELETE /orgs/{org}/memberships/{username}" ] ,
removeOutsideCollaborator : [ "DELETE /orgs/{org}/outside_collaborators/{username}" ] ,
removePublicMembershipForAuthenticatedUser : [ "DELETE /orgs/{org}/public_members/{username}" ] ,
setMembershipForUser : [ "PUT /orgs/{org}/memberships/{username}" ] ,
setPublicMembershipForAuthenticatedUser : [ "PUT /orgs/{org}/public_members/{username}" ] ,
unblockUser : [ "DELETE /orgs/{org}/blocks/{username}" ] ,
update : [ "PATCH /orgs/{org}" ] ,
updateMembershipForAuthenticatedUser : [ "PATCH /user/memberships/orgs/{org}" ] ,
updateWebhook : [ "PATCH /orgs/{org}/hooks/{hook_id}" ] ,
updateWebhookConfigForOrg : [ "PATCH /orgs/{org}/hooks/{hook_id}/config" ]
} ,
packages : {
deletePackageForAuthenticatedUser : [ "DELETE /user/packages/{package_type}/{package_name}" ] ,
deletePackageForOrg : [ "DELETE /orgs/{org}/packages/{package_type}/{package_name}" ] ,
deletePackageVersionForAuthenticatedUser : [ "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" ] ,
deletePackageVersionForOrg : [ "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" ] ,
getAllPackageVersionsForAPackageOwnedByAnOrg : [ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" ] ,
getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser : [ "GET /user/packages/{package_type}/{package_name}/versions" ] ,
getAllPackageVersionsForPackageOwnedByUser : [ "GET /users/{username}/packages/{package_type}/{package_name}/versions" ] ,
getPackageForAuthenticatedUser : [ "GET /user/packages/{package_type}/{package_name}" ] ,
getPackageForOrganization : [ "GET /orgs/{org}/packages/{package_type}/{package_name}" ] ,
getPackageForUser : [ "GET /users/{username}/packages/{package_type}/{package_name}" ] ,
getPackageVersionForAuthenticatedUser : [ "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" ] ,
getPackageVersionForOrganization : [ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" ] ,
getPackageVersionForUser : [ "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" ] ,
restorePackageForAuthenticatedUser : [ "POST /user/packages/{package_type}/{package_name}/restore" ] ,
restorePackageForOrg : [ "POST /orgs/{org}/packages/{package_type}/{package_name}/restore" ] ,
restorePackageVersionForAuthenticatedUser : [ "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" ] ,
restorePackageVersionForOrg : [ "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" ]
} ,
projects : {
addCollaborator : [ "PUT /projects/{project_id}/collaborators/{username}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createCard : [ "POST /projects/columns/{column_id}/cards" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createColumn : [ "POST /projects/{project_id}/columns" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createForAuthenticatedUser : [ "POST /user/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createForOrg : [ "POST /orgs/{org}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createForRepo : [ "POST /repos/{owner}/{repo}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
delete : [ "DELETE /projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
deleteCard : [ "DELETE /projects/columns/cards/{card_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
deleteColumn : [ "DELETE /projects/columns/{column_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
get : [ "GET /projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
getCard : [ "GET /projects/columns/cards/{card_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
getColumn : [ "GET /projects/columns/{column_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
getPermissionForUser : [ "GET /projects/{project_id}/collaborators/{username}/permission" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listCards : [ "GET /projects/columns/{column_id}/cards" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listCollaborators : [ "GET /projects/{project_id}/collaborators" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listColumns : [ "GET /projects/{project_id}/columns" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listForOrg : [ "GET /orgs/{org}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listForUser : [ "GET /users/{username}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
moveCard : [ "POST /projects/columns/cards/{card_id}/moves" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
moveColumn : [ "POST /projects/columns/{column_id}/moves" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
removeCollaborator : [ "DELETE /projects/{project_id}/collaborators/{username}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
update : [ "PATCH /projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
updateCard : [ "PATCH /projects/columns/cards/{card_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
updateColumn : [ "PATCH /projects/columns/{column_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ]
} ,
pulls : {
checkIfMerged : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
create : [ "POST /repos/{owner}/{repo}/pulls" ] ,
createReplyForReviewComment : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" ] ,
createReview : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
createReviewComment : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" ] ,
deletePendingReview : [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
deleteReviewComment : [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
dismissReview : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" ] ,
get : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
getReview : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
getReviewComment : [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
list : [ "GET /repos/{owner}/{repo}/pulls" ] ,
listCommentsForReview : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" ] ,
listCommits : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits" ] ,
listFiles : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/files" ] ,
listRequestedReviewers : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
listReviewComments : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" ] ,
listReviewCommentsForRepo : [ "GET /repos/{owner}/{repo}/pulls/comments" ] ,
listReviews : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
merge : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
removeRequestedReviewers : [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
requestReviewers : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
submitReview : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" ] ,
update : [ "PATCH /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
updateBranch : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" , {
mediaType : {
previews : [ "lydian" ]
}
} ] ,
updateReview : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
updateReviewComment : [ "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" ]
} ,
rateLimit : {
get : [ "GET /rate_limit" ]
} ,
reactions : {
createForCommitComment : [ "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForIssue : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForIssueComment : [ "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForPullRequestReviewComment : [ "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForTeamDiscussionCommentInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForTeamDiscussionInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForCommitComment : [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForIssue : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForIssueComment : [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForPullRequestComment : [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForTeamDiscussion : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForTeamDiscussionComment : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteLegacy : [ "DELETE /reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} , {
deprecated : "octokit.reactions.deleteLegacy() is deprecated, see https://docs.github.com/v3/reactions/#delete-a-reaction-legacy"
} ] ,
listForCommitComment : [ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForIssue : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForIssueComment : [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForPullRequestReviewComment : [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForTeamDiscussionCommentInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForTeamDiscussionInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ]
} ,
repos : {
acceptInvitation : [ "PATCH /user/repository_invitations/{invitation_id}" ] ,
addAppAccessRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
} ] ,
addCollaborator : [ "PUT /repos/{owner}/{repo}/collaborators/{username}" ] ,
addStatusCheckContexts : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
addTeamAccessRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
addUserAccessRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
} ] ,
checkCollaborator : [ "GET /repos/{owner}/{repo}/collaborators/{username}" ] ,
checkVulnerabilityAlerts : [ "GET /repos/{owner}/{repo}/vulnerability-alerts" , {
mediaType : {
previews : [ "dorian" ]
}
} ] ,
compareCommits : [ "GET /repos/{owner}/{repo}/compare/{base}...{head}" ] ,
createCommitComment : [ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" ] ,
createCommitSignatureProtection : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
mediaType : {
previews : [ "zzzax" ]
}
} ] ,
createCommitStatus : [ "POST /repos/{owner}/{repo}/statuses/{sha}" ] ,
createDeployKey : [ "POST /repos/{owner}/{repo}/keys" ] ,
createDeployment : [ "POST /repos/{owner}/{repo}/deployments" ] ,
createDeploymentStatus : [ "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ] ,
createDispatchEvent : [ "POST /repos/{owner}/{repo}/dispatches" ] ,
createForAuthenticatedUser : [ "POST /user/repos" ] ,
createFork : [ "POST /repos/{owner}/{repo}/forks" ] ,
createInOrg : [ "POST /orgs/{org}/repos" ] ,
createOrUpdateFileContents : [ "PUT /repos/{owner}/{repo}/contents/{path}" ] ,
createPagesSite : [ "POST /repos/{owner}/{repo}/pages" , {
mediaType : {
previews : [ "switcheroo" ]
}
} ] ,
createRelease : [ "POST /repos/{owner}/{repo}/releases" ] ,
createUsingTemplate : [ "POST /repos/{template_owner}/{template_repo}/generate" , {
mediaType : {
previews : [ "baptiste" ]
}
} ] ,
createWebhook : [ "POST /repos/{owner}/{repo}/hooks" ] ,
declineInvitation : [ "DELETE /user/repository_invitations/{invitation_id}" ] ,
delete : [ "DELETE /repos/{owner}/{repo}" ] ,
deleteAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ] ,
deleteAdminBranchProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
deleteBranchProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
deleteCommitComment : [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}" ] ,
deleteCommitSignatureProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
mediaType : {
previews : [ "zzzax" ]
}
} ] ,
deleteDeployKey : [ "DELETE /repos/{owner}/{repo}/keys/{key_id}" ] ,
deleteDeployment : [ "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" ] ,
deleteFile : [ "DELETE /repos/{owner}/{repo}/contents/{path}" ] ,
deleteInvitation : [ "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" ] ,
deletePagesSite : [ "DELETE /repos/{owner}/{repo}/pages" , {
mediaType : {
previews : [ "switcheroo" ]
}
} ] ,
deletePullRequestReviewProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
deleteRelease : [ "DELETE /repos/{owner}/{repo}/releases/{release_id}" ] ,
deleteReleaseAsset : [ "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
deleteWebhook : [ "DELETE /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
disableAutomatedSecurityFixes : [ "DELETE /repos/{owner}/{repo}/automated-security-fixes" , {
mediaType : {
previews : [ "london" ]
}
} ] ,
disableVulnerabilityAlerts : [ "DELETE /repos/{owner}/{repo}/vulnerability-alerts" , {
mediaType : {
previews : [ "dorian" ]
}
} ] ,
downloadArchive : [ "GET /repos/{owner}/{repo}/zipball/{ref}" , { } , {
renamed : [ "repos" , "downloadZipballArchive" ]
} ] ,
downloadTarballArchive : [ "GET /repos/{owner}/{repo}/tarball/{ref}" ] ,
downloadZipballArchive : [ "GET /repos/{owner}/{repo}/zipball/{ref}" ] ,
enableAutomatedSecurityFixes : [ "PUT /repos/{owner}/{repo}/automated-security-fixes" , {
mediaType : {
previews : [ "london" ]
}
} ] ,
enableVulnerabilityAlerts : [ "PUT /repos/{owner}/{repo}/vulnerability-alerts" , {
mediaType : {
previews : [ "dorian" ]
}
} ] ,
get : [ "GET /repos/{owner}/{repo}" ] ,
getAccessRestrictions : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ] ,
getAdminBranchProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
getAllStatusCheckContexts : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ] ,
getAllTopics : [ "GET /repos/{owner}/{repo}/topics" , {
mediaType : {
previews : [ "mercy" ]
}
} ] ,
getAppsWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ] ,
getBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}" ] ,
getBranchProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
getClones : [ "GET /repos/{owner}/{repo}/traffic/clones" ] ,
getCodeFrequencyStats : [ "GET /repos/{owner}/{repo}/stats/code_frequency" ] ,
getCollaboratorPermissionLevel : [ "GET /repos/{owner}/{repo}/collaborators/{username}/permission" ] ,
getCombinedStatusForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/status" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/commits/{ref}" ] ,
getCommitActivityStats : [ "GET /repos/{owner}/{repo}/stats/commit_activity" ] ,
getCommitComment : [ "GET /repos/{owner}/{repo}/comments/{comment_id}" ] ,
getCommitSignatureProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
mediaType : {
previews : [ "zzzax" ]
}
} ] ,
getCommunityProfileMetrics : [ "GET /repos/{owner}/{repo}/community/profile" ] ,
getContent : [ "GET /repos/{owner}/{repo}/contents/{path}" ] ,
getContributorsStats : [ "GET /repos/{owner}/{repo}/stats/contributors" ] ,
getDeployKey : [ "GET /repos/{owner}/{repo}/keys/{key_id}" ] ,
getDeployment : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}" ] ,
getDeploymentStatus : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" ] ,
getLatestPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/latest" ] ,
getLatestRelease : [ "GET /repos/{owner}/{repo}/releases/latest" ] ,
getPages : [ "GET /repos/{owner}/{repo}/pages" ] ,
getPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/{build_id}" ] ,
getParticipationStats : [ "GET /repos/{owner}/{repo}/stats/participation" ] ,
getPullRequestReviewProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
getPunchCardStats : [ "GET /repos/{owner}/{repo}/stats/punch_card" ] ,
getReadme : [ "GET /repos/{owner}/{repo}/readme" ] ,
getRelease : [ "GET /repos/{owner}/{repo}/releases/{release_id}" ] ,
getReleaseAsset : [ "GET /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
getReleaseByTag : [ "GET /repos/{owner}/{repo}/releases/tags/{tag}" ] ,
getStatusChecksProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
getTeamsWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ] ,
getTopPaths : [ "GET /repos/{owner}/{repo}/traffic/popular/paths" ] ,
getTopReferrers : [ "GET /repos/{owner}/{repo}/traffic/popular/referrers" ] ,
getUsersWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ] ,
getViews : [ "GET /repos/{owner}/{repo}/traffic/views" ] ,
getWebhook : [ "GET /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
getWebhookConfigForRepo : [ "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" ] ,
listBranches : [ "GET /repos/{owner}/{repo}/branches" ] ,
listBranchesForHeadCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" , {
mediaType : {
previews : [ "groot" ]
}
} ] ,
listCollaborators : [ "GET /repos/{owner}/{repo}/collaborators" ] ,
listCommentsForCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" ] ,
listCommitCommentsForRepo : [ "GET /repos/{owner}/{repo}/comments" ] ,
listCommitStatusesForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/statuses" ] ,
listCommits : [ "GET /repos/{owner}/{repo}/commits" ] ,
listContributors : [ "GET /repos/{owner}/{repo}/contributors" ] ,
listDeployKeys : [ "GET /repos/{owner}/{repo}/keys" ] ,
listDeploymentStatuses : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ] ,
listDeployments : [ "GET /repos/{owner}/{repo}/deployments" ] ,
listForAuthenticatedUser : [ "GET /user/repos" ] ,
listForOrg : [ "GET /orgs/{org}/repos" ] ,
listForUser : [ "GET /users/{username}/repos" ] ,
listForks : [ "GET /repos/{owner}/{repo}/forks" ] ,
listInvitations : [ "GET /repos/{owner}/{repo}/invitations" ] ,
listInvitationsForAuthenticatedUser : [ "GET /user/repository_invitations" ] ,
listLanguages : [ "GET /repos/{owner}/{repo}/languages" ] ,
listPagesBuilds : [ "GET /repos/{owner}/{repo}/pages/builds" ] ,
listPublic : [ "GET /repositories" ] ,
listPullRequestsAssociatedWithCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" , {
mediaType : {
previews : [ "groot" ]
}
} ] ,
listReleaseAssets : [ "GET /repos/{owner}/{repo}/releases/{release_id}/assets" ] ,
listReleases : [ "GET /repos/{owner}/{repo}/releases" ] ,
listTags : [ "GET /repos/{owner}/{repo}/tags" ] ,
listTeams : [ "GET /repos/{owner}/{repo}/teams" ] ,
listWebhooks : [ "GET /repos/{owner}/{repo}/hooks" ] ,
merge : [ "POST /repos/{owner}/{repo}/merges" ] ,
pingWebhook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/pings" ] ,
removeAppAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
} ] ,
removeCollaborator : [ "DELETE /repos/{owner}/{repo}/collaborators/{username}" ] ,
removeStatusCheckContexts : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
removeStatusCheckProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
removeTeamAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
removeUserAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
} ] ,
renameBranch : [ "POST /repos/{owner}/{repo}/branches/{branch}/rename" ] ,
replaceAllTopics : [ "PUT /repos/{owner}/{repo}/topics" , {
mediaType : {
previews : [ "mercy" ]
}
} ] ,
requestPagesBuild : [ "POST /repos/{owner}/{repo}/pages/builds" ] ,
setAdminBranchProtection : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
setAppAccessRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
} ] ,
setStatusCheckContexts : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
setTeamAccessRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
setUserAccessRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
} ] ,
testPushWebhook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/tests" ] ,
transfer : [ "POST /repos/{owner}/{repo}/transfer" ] ,
update : [ "PATCH /repos/{owner}/{repo}" ] ,
updateBranchProtection : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
updateCommitComment : [ "PATCH /repos/{owner}/{repo}/comments/{comment_id}" ] ,
updateInformationAboutPagesSite : [ "PUT /repos/{owner}/{repo}/pages" ] ,
updateInvitation : [ "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" ] ,
updatePullRequestReviewProtection : [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
updateRelease : [ "PATCH /repos/{owner}/{repo}/releases/{release_id}" ] ,
updateReleaseAsset : [ "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
updateStatusCheckPotection : [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" , { } , {
renamed : [ "repos" , "updateStatusCheckProtection" ]
} ] ,
updateStatusCheckProtection : [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
updateWebhook : [ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
updateWebhookConfigForRepo : [ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" ] ,
uploadReleaseAsset : [ "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}" , {
baseUrl : "https://uploads.github.com"
} ]
} ,
search : {
code : [ "GET /search/code" ] ,
commits : [ "GET /search/commits" , {
mediaType : {
previews : [ "cloak" ]
}
} ] ,
issuesAndPullRequests : [ "GET /search/issues" ] ,
labels : [ "GET /search/labels" ] ,
repos : [ "GET /search/repositories" ] ,
topics : [ "GET /search/topics" , {
mediaType : {
previews : [ "mercy" ]
}
} ] ,
users : [ "GET /search/users" ]
} ,
secretScanning : {
getAlert : [ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" ] ,
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/secret-scanning/alerts" ] ,
updateAlert : [ "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" ]
} ,
teams : {
addOrUpdateMembershipForUserInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
addOrUpdateProjectPermissionsInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
addOrUpdateRepoPermissionsInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
checkPermissionsForProjectInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
checkPermissionsForRepoInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
create : [ "POST /orgs/{org}/teams" ] ,
createDiscussionCommentInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ] ,
createDiscussionInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions" ] ,
deleteDiscussionCommentInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
deleteDiscussionInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
deleteInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}" ] ,
getByName : [ "GET /orgs/{org}/teams/{team_slug}" ] ,
getDiscussionCommentInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
getDiscussionInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
getMembershipForUserInOrg : [ "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
list : [ "GET /orgs/{org}/teams" ] ,
listChildInOrg : [ "GET /orgs/{org}/teams/{team_slug}/teams" ] ,
listDiscussionCommentsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ] ,
listDiscussionsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions" ] ,
listForAuthenticatedUser : [ "GET /user/teams" ] ,
listMembersInOrg : [ "GET /orgs/{org}/teams/{team_slug}/members" ] ,
listPendingInvitationsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/invitations" ] ,
listProjectsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listReposInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos" ] ,
removeMembershipForUserInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
removeProjectInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" ] ,
removeRepoInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
updateDiscussionCommentInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
updateDiscussionInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
updateInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}" ]
} ,
users : {
addEmailForAuthenticated : [ "POST /user/emails" ] ,
block : [ "PUT /user/blocks/{username}" ] ,
checkBlocked : [ "GET /user/blocks/{username}" ] ,
checkFollowingForUser : [ "GET /users/{username}/following/{target_user}" ] ,
checkPersonIsFollowedByAuthenticated : [ "GET /user/following/{username}" ] ,
createGpgKeyForAuthenticated : [ "POST /user/gpg_keys" ] ,
createPublicSshKeyForAuthenticated : [ "POST /user/keys" ] ,
deleteEmailForAuthenticated : [ "DELETE /user/emails" ] ,
deleteGpgKeyForAuthenticated : [ "DELETE /user/gpg_keys/{gpg_key_id}" ] ,
deletePublicSshKeyForAuthenticated : [ "DELETE /user/keys/{key_id}" ] ,
follow : [ "PUT /user/following/{username}" ] ,
getAuthenticated : [ "GET /user" ] ,
getByUsername : [ "GET /users/{username}" ] ,
getContextForUser : [ "GET /users/{username}/hovercard" ] ,
getGpgKeyForAuthenticated : [ "GET /user/gpg_keys/{gpg_key_id}" ] ,
getPublicSshKeyForAuthenticated : [ "GET /user/keys/{key_id}" ] ,
list : [ "GET /users" ] ,
listBlockedByAuthenticated : [ "GET /user/blocks" ] ,
listEmailsForAuthenticated : [ "GET /user/emails" ] ,
listFollowedByAuthenticated : [ "GET /user/following" ] ,
listFollowersForAuthenticatedUser : [ "GET /user/followers" ] ,
listFollowersForUser : [ "GET /users/{username}/followers" ] ,
listFollowingForUser : [ "GET /users/{username}/following" ] ,
listGpgKeysForAuthenticated : [ "GET /user/gpg_keys" ] ,
listGpgKeysForUser : [ "GET /users/{username}/gpg_keys" ] ,
listPublicEmailsForAuthenticated : [ "GET /user/public_emails" ] ,
listPublicKeysForUser : [ "GET /users/{username}/keys" ] ,
listPublicSshKeysForAuthenticated : [ "GET /user/keys" ] ,
setPrimaryEmailVisibilityForAuthenticated : [ "PATCH /user/email/visibility" ] ,
unblock : [ "DELETE /user/blocks/{username}" ] ,
unfollow : [ "DELETE /user/following/{username}" ] ,
updateAuthenticated : [ "PATCH /user" ]
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const VERSION = "4.12.0" ;
function endpointsToMethods ( octokit , endpointsMap ) {
const newMethods = { } ;
for ( const [ scope , endpoints ] of Object . entries ( endpointsMap ) ) {
for ( const [ methodName , endpoint ] of Object . entries ( endpoints ) ) {
const [ route , defaults , decorations ] = endpoint ;
const [ method , url ] = route . split ( / / ) ;
const endpointDefaults = Object . assign ( {
method ,
url
} , defaults ) ;
if ( ! newMethods [ scope ] ) {
newMethods [ scope ] = { } ;
}
const scopeMethods = newMethods [ scope ] ;
if ( decorations ) {
scopeMethods [ methodName ] = decorate ( octokit , scope , methodName , endpointDefaults , decorations ) ;
continue ;
}
scopeMethods [ methodName ] = octokit . request . defaults ( endpointDefaults ) ;
}
}
return newMethods ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function decorate ( octokit , scope , methodName , defaults , decorations ) {
const requestWithDefaults = octokit . request . defaults ( defaults ) ;
/* istanbul ignore next */
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function withDecorations ( ... args ) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
let options = requestWithDefaults . endpoint . merge ( ... args ) ; // There are currently no other decorations than `.mapToData`
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( decorations . mapToData ) {
options = Object . assign ( { } , options , {
data : options [ decorations . mapToData ] ,
[ decorations . mapToData ] : undefined
} ) ;
return requestWithDefaults ( options ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( decorations . renamed ) {
const [ newScope , newMethodName ] = decorations . renamed ;
octokit . log . warn ( ` octokit. ${ scope } . ${ methodName } () has been renamed to octokit. ${ newScope } . ${ newMethodName } () ` ) ;
}
if ( decorations . deprecated ) {
octokit . log . warn ( decorations . deprecated ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( decorations . renamedParameters ) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
const options = requestWithDefaults . endpoint . merge ( ... args ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
for ( const [ name , alias ] of Object . entries ( decorations . renamedParameters ) ) {
if ( name in options ) {
octokit . log . warn ( ` " ${ name } " parameter is deprecated for "octokit. ${ scope } . ${ methodName } ()". Use " ${ alias } " instead ` ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( ! ( alias in options ) ) {
options [ alias ] = options [ name ] ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
delete options [ name ] ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return requestWithDefaults ( options ) ;
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return requestWithDefaults ( ... args ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return Object . assign ( withDecorations , requestWithDefaults ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function restEndpointMethods ( octokit ) {
return endpointsToMethods ( octokit , Endpoints ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
restEndpointMethods . VERSION = VERSION ;
exports . restEndpointMethods = restEndpointMethods ;
//# sourceMappingURL=index.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 537 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2020-01-27 16:37:12 +01:00
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
2021-02-22 00:27:22 +01:00
var deprecation = _ _nccwpck _require _ _ ( 8932 ) ;
var once = _interopDefault ( _ _nccwpck _require _ _ ( 1223 ) ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const logOnce = once ( deprecation => console . warn ( deprecation ) ) ;
/ * *
* Error with extra properties to help with debugging
* /
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
class RequestError extends Error {
constructor ( message , statusCode , options ) {
super ( message ) ; // Maintains proper stack trace (only available on V8)
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/* istanbul ignore next */
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this . name = "HttpError" ;
this . status = statusCode ;
Object . defineProperty ( this , "code" , {
get ( ) {
logOnce ( new deprecation . Deprecation ( "[@octokit/request-error] `error.code` is deprecated, use `error.status`." ) ) ;
return statusCode ;
}
} ) ;
this . headers = options . headers || { } ; // redact request credentials without mutating original request options
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const requestCopy = Object . assign ( { } , options . request ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( options . request . headers . authorization ) {
requestCopy . headers = Object . assign ( { } , options . request . headers , {
authorization : options . request . headers . authorization . replace ( / .*$/ , " [REDACTED]" )
} ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
requestCopy . url = requestCopy . url // client_id & client_secret can be passed as URL query parameters to increase rate limit
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
. replace ( /\bclient_secret=\w+/g , "client_secret=[REDACTED]" ) // OAuth tokens can be passed as URL query parameters, although it is not recommended
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
. replace ( /\baccess_token=\w+/g , "access_token=[REDACTED]" ) ;
this . request = requestCopy ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . RequestError = RequestError ;
//# sourceMappingURL=index.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 6234 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var endpoint = _ _nccwpck _require _ _ ( 9440 ) ;
var universalUserAgent = _ _nccwpck _require _ _ ( 5030 ) ;
var isPlainObject = _ _nccwpck _require _ _ ( 9062 ) ;
var nodeFetch = _interopDefault ( _ _nccwpck _require _ _ ( 467 ) ) ;
var requestError = _ _nccwpck _require _ _ ( 537 ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const VERSION = "5.4.14" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function getBufferResponse ( response ) {
return response . arrayBuffer ( ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function fetchWrapper ( requestOptions ) {
if ( isPlainObject . isPlainObject ( requestOptions . body ) || Array . isArray ( requestOptions . body ) ) {
requestOptions . body = JSON . stringify ( requestOptions . body ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let headers = { } ;
let status ;
let url ;
const fetch = requestOptions . request && requestOptions . request . fetch || nodeFetch ;
return fetch ( requestOptions . url , Object . assign ( {
method : requestOptions . method ,
body : requestOptions . body ,
headers : requestOptions . headers ,
redirect : requestOptions . redirect
} , requestOptions . request ) ) . then ( response => {
url = response . url ;
status = response . status ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
for ( const keyAndValue of response . headers ) {
headers [ keyAndValue [ 0 ] ] = keyAndValue [ 1 ] ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( status === 204 || status === 205 ) {
return ;
} // GitHub API returns 200 for HEAD requests
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( requestOptions . method === "HEAD" ) {
if ( status < 400 ) {
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
throw new requestError . RequestError ( response . statusText , status , {
headers ,
request : requestOptions
} ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( status === 304 ) {
throw new requestError . RequestError ( "Not modified" , status , {
headers ,
request : requestOptions
} ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( status >= 400 ) {
return response . text ( ) . then ( message => {
const error = new requestError . RequestError ( message , status , {
headers ,
request : requestOptions
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
try {
let responseBody = JSON . parse ( error . message ) ;
Object . assign ( error , responseBody ) ;
let errors = responseBody . errors ; // Assumption `errors` would always be in Array format
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
error . message = error . message + ": " + errors . map ( JSON . stringify ) . join ( ", " ) ;
} catch ( e ) { // ignore, see octokit/rest.js#684
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
throw error ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
const contentType = response . headers . get ( "content-type" ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( /application\/json/ . test ( contentType ) ) {
return response . json ( ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( ! contentType || /^text\/|charset=utf-8$/ . test ( contentType ) ) {
return response . text ( ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return getBufferResponse ( response ) ;
} ) . then ( data => {
return {
status ,
url ,
headers ,
data
} ;
} ) . catch ( error => {
if ( error instanceof requestError . RequestError ) {
throw error ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
throw new requestError . RequestError ( error . message , 500 , {
headers ,
request : requestOptions
} ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function withDefaults ( oldEndpoint , newDefaults ) {
const endpoint = oldEndpoint . defaults ( newDefaults ) ;
const newApi = function ( route , parameters ) {
const endpointOptions = endpoint . merge ( route , parameters ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( ! endpointOptions . request || ! endpointOptions . request . hook ) {
return fetchWrapper ( endpoint . parse ( endpointOptions ) ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const request = ( route , parameters ) => {
return fetchWrapper ( endpoint . parse ( endpoint . merge ( route , parameters ) ) ) ;
2020-01-27 16:37:12 +01:00
} ;
2021-02-22 00:27:22 +01:00
Object . assign ( request , {
endpoint ,
defaults : withDefaults . bind ( null , endpoint )
} ) ;
return endpointOptions . request . hook ( request , endpointOptions ) ;
} ;
return Object . assign ( newApi , {
endpoint ,
defaults : withDefaults . bind ( null , endpoint )
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
const request = withDefaults ( endpoint . endpoint , {
headers : {
"user-agent" : ` octokit-request.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } `
}
} ) ;
exports . request = request ;
//# sourceMappingURL=index.js.map
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 9062 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * !
* is - plain - object < https : //github.com/jonschlinkert/is-plain-object>
*
* Copyright ( c ) 2014 - 2017 , Jon Schlinkert .
* Released under the MIT License .
* /
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function isObject ( o ) {
return Object . prototype . toString . call ( o ) === '[object Object]' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function isPlainObject ( o ) {
var ctor , prot ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( isObject ( o ) === false ) return false ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// If has modified constructor
ctor = o . constructor ;
if ( ctor === undefined ) return true ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// If has modified prototype
prot = ctor . prototype ;
if ( isObject ( prot ) === false ) return false ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// If constructor does not have an Object-specific method
if ( prot . hasOwnProperty ( 'isPrototypeOf' ) === false ) {
return false ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Most likely a plain Object
return true ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . isPlainObject = isPlainObject ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 3682 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var register = _ _nccwpck _require _ _ ( 4670 )
var addHook = _ _nccwpck _require _ _ ( 5549 )
var removeHook = _ _nccwpck _require _ _ ( 6819 )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function . bind
var bindable = bind . bind ( bind )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function bindApi ( hook , state , name ) {
var removeHookRef = bindable ( removeHook , null ) . apply ( null , name ? [ state , name ] : [ state ] )
hook . api = { remove : removeHookRef }
hook . remove = removeHookRef
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
; [ 'before' , 'error' , 'after' , 'wrap' ] . forEach ( function ( kind ) {
var args = name ? [ state , kind , name ] : [ state , kind ]
hook [ kind ] = hook . api [ kind ] = bindable ( addHook , null ) . apply ( null , args )
} )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function HookSingular ( ) {
var singularHookName = 'h'
var singularHookState = {
registry : { }
}
var singularHook = register . bind ( null , singularHookState , singularHookName )
bindApi ( singularHook , singularHookState , singularHookName )
return singularHook
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function HookCollection ( ) {
var state = {
registry : { }
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var hook = register . bind ( null , state )
bindApi ( hook , state )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return hook
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var collectionHookDeprecationMessageDisplayed = false
function Hook ( ) {
if ( ! collectionHookDeprecationMessageDisplayed ) {
console . warn ( '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' )
collectionHookDeprecationMessageDisplayed = true
}
return HookCollection ( )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Hook . Singular = HookSingular . bind ( )
Hook . Collection = HookCollection . bind ( )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
module . exports = Hook
// expose constructors as a named property for TypeScript
module . exports . Hook = Hook
module . exports . Singular = Hook . Singular
module . exports . Collection = Hook . Collection
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 5549 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
module . exports = addHook ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function addHook ( state , kind , name , hook ) {
var orig = hook ;
if ( ! state . registry [ name ] ) {
state . registry [ name ] = [ ] ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( kind === "before" ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( orig . bind ( null , options ) )
. then ( method . bind ( null , options ) ) ;
} ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( kind === "after" ) {
hook = function ( method , options ) {
var result ;
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. then ( function ( result _ ) {
result = result _ ;
return orig ( result , options ) ;
} )
. then ( function ( ) {
return result ;
} ) ;
} ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( kind === "error" ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. catch ( function ( error ) {
return orig ( error , options ) ;
} ) ;
} ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
state . registry [ name ] . push ( {
hook : hook ,
orig : orig ,
} ) ;
}
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 4670 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
module . exports = register ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function register ( state , name , method , options ) {
if ( typeof method !== "function" ) {
throw new Error ( "method for before hook must be a function" ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( ! options ) {
options = { } ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( Array . isArray ( name ) ) {
return name . reverse ( ) . reduce ( function ( callback , name ) {
return register . bind ( null , state , name , callback , options ) ;
} , method ) ( ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return Promise . resolve ( ) . then ( function ( ) {
if ( ! state . registry [ name ] ) {
return method ( options ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return state . registry [ name ] . reduce ( function ( method , registered ) {
return registered . hook . bind ( null , method , options ) ;
} , method ) ( ) ;
} ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 6819 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
module . exports = removeHook ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function removeHook ( state , name , method ) {
if ( ! state . registry [ name ] ) {
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var index = state . registry [ name ]
. map ( function ( registered ) {
return registered . orig ;
} )
. indexOf ( method ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( index === - 1 ) {
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
state . registry [ name ] . splice ( index , 1 ) ;
}
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 8932 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
class Deprecation extends Error {
constructor ( message ) {
super ( message ) ; // Maintains proper stack trace (only available on V8)
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/* istanbul ignore next */
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this . name = 'Deprecation' ;
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
exports . Deprecation = Deprecation ;
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 467 :
/***/ ( ( module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var Stream = _interopDefault ( _ _nccwpck _require _ _ ( 2413 ) ) ;
var http = _interopDefault ( _ _nccwpck _require _ _ ( 8605 ) ) ;
var Url = _interopDefault ( _ _nccwpck _require _ _ ( 8835 ) ) ;
var https = _interopDefault ( _ _nccwpck _require _ _ ( 7211 ) ) ;
var zlib = _interopDefault ( _ _nccwpck _require _ _ ( 8761 ) ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// fix for "Readable" isn't a named export issue
const Readable = Stream . Readable ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const BUFFER = Symbol ( 'buffer' ) ;
const TYPE = Symbol ( 'type' ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
class Blob {
constructor ( ) {
this [ TYPE ] = '' ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const blobParts = arguments [ 0 ] ;
const options = arguments [ 1 ] ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const buffers = [ ] ;
let size = 0 ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( blobParts ) {
const a = blobParts ;
const length = Number ( a . length ) ;
for ( let i = 0 ; i < length ; i ++ ) {
const element = a [ i ] ;
let buffer ;
if ( element instanceof Buffer ) {
buffer = element ;
} else if ( ArrayBuffer . isView ( element ) ) {
buffer = Buffer . from ( element . buffer , element . byteOffset , element . byteLength ) ;
} else if ( element instanceof ArrayBuffer ) {
buffer = Buffer . from ( element ) ;
} else if ( element instanceof Blob ) {
buffer = element [ BUFFER ] ;
} else {
buffer = Buffer . from ( typeof element === 'string' ? element : String ( element ) ) ;
}
size += buffer . length ;
buffers . push ( buffer ) ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this [ BUFFER ] = Buffer . concat ( buffers ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let type = options && options . type !== undefined && String ( options . type ) . toLowerCase ( ) ;
if ( type && ! /[^\u0020-\u007E]/ . test ( type ) ) {
this [ TYPE ] = type ;
}
}
get size ( ) {
return this [ BUFFER ] . length ;
}
get type ( ) {
return this [ TYPE ] ;
}
text ( ) {
return Promise . resolve ( this [ BUFFER ] . toString ( ) ) ;
}
arrayBuffer ( ) {
const buf = this [ BUFFER ] ;
const ab = buf . buffer . slice ( buf . byteOffset , buf . byteOffset + buf . byteLength ) ;
return Promise . resolve ( ab ) ;
}
stream ( ) {
const readable = new Readable ( ) ;
readable . _read = function ( ) { } ;
readable . push ( this [ BUFFER ] ) ;
readable . push ( null ) ;
return readable ;
}
toString ( ) {
return '[object Blob]' ;
}
slice ( ) {
const size = this . size ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const start = arguments [ 0 ] ;
const end = arguments [ 1 ] ;
let relativeStart , relativeEnd ;
if ( start === undefined ) {
relativeStart = 0 ;
} else if ( start < 0 ) {
relativeStart = Math . max ( size + start , 0 ) ;
} else {
relativeStart = Math . min ( start , size ) ;
}
if ( end === undefined ) {
relativeEnd = size ;
} else if ( end < 0 ) {
relativeEnd = Math . max ( size + end , 0 ) ;
} else {
relativeEnd = Math . min ( end , size ) ;
}
const span = Math . max ( relativeEnd - relativeStart , 0 ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const buffer = this [ BUFFER ] ;
const slicedBuffer = buffer . slice ( relativeStart , relativeStart + span ) ;
const blob = new Blob ( [ ] , { type : arguments [ 2 ] } ) ;
blob [ BUFFER ] = slicedBuffer ;
return blob ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperties ( Blob . prototype , {
size : { enumerable : true } ,
type : { enumerable : true } ,
slice : { enumerable : true }
} ) ;
Object . defineProperty ( Blob . prototype , Symbol . toStringTag , {
value : 'Blob' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* fetch - error . js
2020-01-27 16:37:12 +01:00
*
2021-02-22 00:27:22 +01:00
* FetchError interface for operational errors
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Create FetchError instance
*
* @ param String message Error message for human
* @ param String type Error type for machine
* @ param String systemError For Node . js system error
* @ return FetchError
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function FetchError ( message , type , systemError ) {
Error . call ( this , message ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this . message = message ;
this . type = type ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// when err.type is `system`, err.code contains system error code
if ( systemError ) {
this . code = this . errno = systemError . code ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// hide custom error implementation details from end-users
Error . captureStackTrace ( this , this . constructor ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
FetchError . prototype = Object . create ( Error . prototype ) ;
FetchError . prototype . constructor = FetchError ;
FetchError . prototype . name = 'FetchError' ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let convert ;
try {
convert = _ _nccwpck _require _ _ ( 2877 ) . convert ;
} catch ( e ) { }
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const INTERNALS = Symbol ( 'Body internals' ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// fix an issue where "PassThrough" isn't a named export for node <10
const PassThrough = Stream . PassThrough ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Body mixin
*
* Ref : https : //fetch.spec.whatwg.org/#body
*
* @ param Stream body Readable stream
* @ param Object opts Response options
* @ return Void
* /
function Body ( body ) {
var _this = this ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _ref = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ,
_ref$size = _ref . size ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let size = _ref$size === undefined ? 0 : _ref$size ;
var _ref$timeout = _ref . timeout ;
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( body == null ) {
// body is undefined or null
body = null ;
} else if ( isURLSearchParams ( body ) ) {
// body is a URLSearchParams
body = Buffer . from ( body . toString ( ) ) ;
} else if ( isBlob ( body ) ) ; else if ( Buffer . isBuffer ( body ) ) ; else if ( Object . prototype . toString . call ( body ) === '[object ArrayBuffer]' ) {
// body is ArrayBuffer
body = Buffer . from ( body ) ;
} else if ( ArrayBuffer . isView ( body ) ) {
// body is ArrayBufferView
body = Buffer . from ( body . buffer , body . byteOffset , body . byteLength ) ;
} else if ( body instanceof Stream ) ; else {
// none of the above
// coerce to string then buffer
body = Buffer . from ( String ( body ) ) ;
}
this [ INTERNALS ] = {
body ,
disturbed : false ,
error : null
} ;
this . size = size ;
this . timeout = timeout ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( body instanceof Stream ) {
body . on ( 'error' , function ( err ) {
const error = err . name === 'AbortError' ? err : new FetchError ( ` Invalid response body while trying to fetch ${ _this . url } : ${ err . message } ` , 'system' , err ) ;
_this [ INTERNALS ] . error = error ;
} ) ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
Body . prototype = {
get body ( ) {
return this [ INTERNALS ] . body ;
} ,
get bodyUsed ( ) {
return this [ INTERNALS ] . disturbed ;
} ,
/ * *
* Decode response as ArrayBuffer
*
* @ return Promise
* /
arrayBuffer ( ) {
return consumeBody . call ( this ) . then ( function ( buf ) {
return buf . buffer . slice ( buf . byteOffset , buf . byteOffset + buf . byteLength ) ;
} ) ;
} ,
/ * *
* Return raw response as Blob
*
* @ return Promise
* /
blob ( ) {
let ct = this . headers && this . headers . get ( 'content-type' ) || '' ;
return consumeBody . call ( this ) . then ( function ( buf ) {
return Object . assign (
// Prevent copying
new Blob ( [ ] , {
type : ct . toLowerCase ( )
} ) , {
[ BUFFER ] : buf
} ) ;
} ) ;
} ,
/ * *
* Decode response as json
*
* @ return Promise
* /
json ( ) {
var _this2 = this ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return consumeBody . call ( this ) . then ( function ( buffer ) {
try {
return JSON . parse ( buffer . toString ( ) ) ;
} catch ( err ) {
return Body . Promise . reject ( new FetchError ( ` invalid json response body at ${ _this2 . url } reason: ${ err . message } ` , 'invalid-json' ) ) ;
}
} ) ;
} ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Decode response as text
*
* @ return Promise
* /
text ( ) {
return consumeBody . call ( this ) . then ( function ( buffer ) {
return buffer . toString ( ) ;
} ) ;
} ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Decode response as buffer ( non - spec api )
*
* @ return Promise
* /
buffer ( ) {
return consumeBody . call ( this ) ;
} ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Decode response as text , while automatically detecting the encoding and
* trying to decode to UTF - 8 ( non - spec api )
*
* @ return Promise
* /
textConverted ( ) {
var _this3 = this ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return consumeBody . call ( this ) . then ( function ( buffer ) {
return convertBody ( buffer , _this3 . headers ) ;
} ) ;
}
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// In browsers, all properties are enumerable.
Object . defineProperties ( Body . prototype , {
body : { enumerable : true } ,
bodyUsed : { enumerable : true } ,
arrayBuffer : { enumerable : true } ,
blob : { enumerable : true } ,
json : { enumerable : true } ,
text : { enumerable : true }
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Body . mixIn = function ( proto ) {
for ( const name of Object . getOwnPropertyNames ( Body . prototype ) ) {
// istanbul ignore else: future proof
if ( ! ( name in proto ) ) {
const desc = Object . getOwnPropertyDescriptor ( Body . prototype , name ) ;
Object . defineProperty ( proto , name , desc ) ;
}
}
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Consume and convert an entire Body to a Buffer .
*
* Ref : https : //fetch.spec.whatwg.org/#concept-body-consume-body
*
* @ return Promise
* /
function consumeBody ( ) {
var _this4 = this ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( this [ INTERNALS ] . disturbed ) {
return Body . Promise . reject ( new TypeError ( ` body used already for: ${ this . url } ` ) ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this [ INTERNALS ] . disturbed = true ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( this [ INTERNALS ] . error ) {
return Body . Promise . reject ( this [ INTERNALS ] . error ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let body = this . body ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// body is null
if ( body === null ) {
return Body . Promise . resolve ( Buffer . alloc ( 0 ) ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// body is blob
if ( isBlob ( body ) ) {
body = body . stream ( ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// body is buffer
if ( Buffer . isBuffer ( body ) ) {
return Body . Promise . resolve ( body ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// istanbul ignore if: should never happen
if ( ! ( body instanceof Stream ) ) {
return Body . Promise . resolve ( Buffer . alloc ( 0 ) ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// body is stream
// get ready to actually consume the body
let accum = [ ] ;
let accumBytes = 0 ;
let abort = false ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return new Body . Promise ( function ( resolve , reject ) {
let resTimeout ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// allow timeout on slow response body
if ( _this4 . timeout ) {
resTimeout = setTimeout ( function ( ) {
abort = true ;
reject ( new FetchError ( ` Response timeout while trying to fetch ${ _this4 . url } (over ${ _this4 . timeout } ms) ` , 'body-timeout' ) ) ;
} , _this4 . timeout ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// handle stream errors
body . on ( 'error' , function ( err ) {
if ( err . name === 'AbortError' ) {
// if the request was aborted, reject with this Error
abort = true ;
reject ( err ) ;
} else {
// other errors, such as incorrect content-encoding
reject ( new FetchError ( ` Invalid response body while trying to fetch ${ _this4 . url } : ${ err . message } ` , 'system' , err ) ) ;
}
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
body . on ( 'data' , function ( chunk ) {
if ( abort || chunk === null ) {
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( _this4 . size && accumBytes + chunk . length > _this4 . size ) {
abort = true ;
reject ( new FetchError ( ` content size at ${ _this4 . url } over limit: ${ _this4 . size } ` , 'max-size' ) ) ;
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
accumBytes += chunk . length ;
accum . push ( chunk ) ;
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
body . on ( 'end' , function ( ) {
if ( abort ) {
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
clearTimeout ( resTimeout ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
try {
resolve ( Buffer . concat ( accum , accumBytes ) ) ;
} catch ( err ) {
// handle streams that have accumulated too much data (issue #414)
reject ( new FetchError ( ` Could not create Buffer from response body for ${ _this4 . url } : ${ err . message } ` , 'system' , err ) ) ;
}
} ) ;
} ) ;
}
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Detect buffer encoding and convert to target encoding
* ref : http : //www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
*
* @ param Buffer buffer Incoming buffer
* @ param String encoding Target encoding
* @ return String
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function convertBody ( buffer , headers ) {
if ( typeof convert !== 'function' ) {
throw new Error ( 'The package `encoding` must be installed to use the textConverted() function' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const ct = headers . get ( 'content-type' ) ;
let charset = 'utf-8' ;
let res , str ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// header
if ( ct ) {
res = /charset=([^;]*)/i . exec ( ct ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// no charset in content type, peek at response body for at most 1024 bytes
str = buffer . slice ( 0 , 1024 ) . toString ( ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// html5
if ( ! res && str ) {
res = /<meta.+?charset=(['"])(.+?)\1/i . exec ( str ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// html4
if ( ! res && str ) {
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i . exec ( str ) ;
if ( ! res ) {
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i . exec ( str ) ;
if ( res ) {
res . pop ( ) ; // drop last quote
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( res ) {
res = /charset=(.*)/i . exec ( res . pop ( ) ) ;
}
}
// xml
if ( ! res && str ) {
res = /<\?xml.+?encoding=(['"])(.+?)\1/i . exec ( str ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// found charset
if ( res ) {
charset = res . pop ( ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// prevent decode issues when sites use incorrect encoding
// ref: https://hsivonen.fi/encoding-menu/
if ( charset === 'gb2312' || charset === 'gbk' ) {
charset = 'gb18030' ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// turn raw buffers into a single utf-8 buffer
return convert ( buffer , 'UTF-8' , charset ) . toString ( ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Detect a URLSearchParams object
* ref : https : //github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
*
* @ param Object obj Object to detect by type or brand
* @ return String
* /
function isURLSearchParams ( obj ) {
// Duck-typing as a necessary condition.
if ( typeof obj !== 'object' || typeof obj . append !== 'function' || typeof obj . delete !== 'function' || typeof obj . get !== 'function' || typeof obj . getAll !== 'function' || typeof obj . has !== 'function' || typeof obj . set !== 'function' ) {
return false ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Brand-checking and more duck-typing as optional condition.
return obj . constructor . name === 'URLSearchParams' || Object . prototype . toString . call ( obj ) === '[object URLSearchParams]' || typeof obj . sort === 'function' ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Check if ` obj ` is a W3C ` Blob ` object ( which ` File ` inherits from )
* @ param { * } obj
* @ return { boolean }
* /
function isBlob ( obj ) {
return typeof obj === 'object' && typeof obj . arrayBuffer === 'function' && typeof obj . type === 'string' && typeof obj . stream === 'function' && typeof obj . constructor === 'function' && typeof obj . constructor . name === 'string' && /^(Blob|File)$/ . test ( obj . constructor . name ) && /^(Blob|File)$/ . test ( obj [ Symbol . toStringTag ] ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Clone body given Res / Req instance
*
* @ param Mixed instance Response or Request instance
* @ return Mixed
* /
function clone ( instance ) {
let p1 , p2 ;
let body = instance . body ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// don't allow cloning a used body
if ( instance . bodyUsed ) {
throw new Error ( 'cannot clone body after it is used' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// check that body is a stream and not form-data object
// note: we can't clone the form-data object without having it as a dependency
if ( body instanceof Stream && typeof body . getBoundary !== 'function' ) {
// tee instance body
p1 = new PassThrough ( ) ;
p2 = new PassThrough ( ) ;
body . pipe ( p1 ) ;
body . pipe ( p2 ) ;
// set instance body to teed body and return the other teed body
instance [ INTERNALS ] . body = p1 ;
body = p2 ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return body ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Performs the operation "extract a `Content-Type` value from |object|" as
* specified in the specification :
* https : //fetch.spec.whatwg.org/#concept-bodyinit-extract
*
* This function assumes that instance . body is present .
*
* @ param Mixed instance Any options . body input
* /
function extractContentType ( body ) {
if ( body === null ) {
// body is null
return null ;
} else if ( typeof body === 'string' ) {
// body is string
return 'text/plain;charset=UTF-8' ;
} else if ( isURLSearchParams ( body ) ) {
// body is a URLSearchParams
return 'application/x-www-form-urlencoded;charset=UTF-8' ;
} else if ( isBlob ( body ) ) {
// body is blob
return body . type || null ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
return null ;
} else if ( Object . prototype . toString . call ( body ) === '[object ArrayBuffer]' ) {
// body is ArrayBuffer
return null ;
} else if ( ArrayBuffer . isView ( body ) ) {
// body is ArrayBufferView
return null ;
} else if ( typeof body . getBoundary === 'function' ) {
// detect form data input from form-data module
return ` multipart/form-data;boundary= ${ body . getBoundary ( ) } ` ;
} else if ( body instanceof Stream ) {
// body is stream
// can't really do much about this
return null ;
} else {
// Body constructor defaults other things to string
return 'text/plain;charset=UTF-8' ;
}
2020-06-29 20:56:37 +02:00
}
2021-02-22 00:27:22 +01:00
/ * *
* The Fetch Standard treats this as if "total bytes" is a property on the body .
* For us , we have to explicitly get it with a function .
*
* ref : https : //fetch.spec.whatwg.org/#concept-body-total-bytes
*
* @ param Body instance Instance of Body
* @ return Number ? Number of bytes , or null if not possible
* /
function getTotalBytes ( instance ) {
const body = instance . body ;
if ( body === null ) {
// body is null
return 0 ;
} else if ( isBlob ( body ) ) {
return body . size ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
return body . length ;
} else if ( body && typeof body . getLengthSync === 'function' ) {
// detect form data input from form-data module
if ( body . _lengthRetrievers && body . _lengthRetrievers . length == 0 || // 1.x
body . hasKnownLength && body . hasKnownLength ( ) ) {
// 2.x
return body . getLengthSync ( ) ;
}
return null ;
} else {
// body is stream
return null ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Write a Body to a Node . js WritableStream ( e . g . http . Request ) object .
*
* @ param Body instance Instance of Body
* @ return Void
* /
function writeToStream ( dest , instance ) {
const body = instance . body ;
if ( body === null ) {
// body is null
dest . end ( ) ;
} else if ( isBlob ( body ) ) {
body . stream ( ) . pipe ( dest ) ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
dest . write ( body ) ;
dest . end ( ) ;
} else {
// body is stream
body . pipe ( dest ) ;
}
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
// expose Promise
Body . Promise = global . Promise ;
/ * *
* headers . js
*
* Headers class offers convenient helpers
* /
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/ ;
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ ;
function validateName ( name ) {
name = ` ${ name } ` ;
if ( invalidTokenRegex . test ( name ) || name === '' ) {
throw new TypeError ( ` ${ name } is not a legal HTTP header name ` ) ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function validateValue ( value ) {
value = ` ${ value } ` ;
if ( invalidHeaderCharRegex . test ( value ) ) {
throw new TypeError ( ` ${ value } is not a legal HTTP header value ` ) ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Find the key in the map object given a header name .
*
* Returns undefined if not found .
*
* @ param String name Header name
* @ return String | Undefined
* /
function find ( map , name ) {
name = name . toLowerCase ( ) ;
for ( const key in map ) {
if ( key . toLowerCase ( ) === name ) {
return key ;
}
}
return undefined ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
const MAP = Symbol ( 'map' ) ;
class Headers {
/ * *
* Headers class
*
* @ param Object headers Response headers
* @ return Void
* /
constructor ( ) {
let init = arguments . length > 0 && arguments [ 0 ] !== undefined ? arguments [ 0 ] : undefined ;
this [ MAP ] = Object . create ( null ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( init instanceof Headers ) {
const rawHeaders = init . raw ( ) ;
const headerNames = Object . keys ( rawHeaders ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
for ( const headerName of headerNames ) {
for ( const value of rawHeaders [ headerName ] ) {
this . append ( headerName , value ) ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// We don't worry about converting prop to ByteString here as append()
// will handle it.
if ( init == null ) ; else if ( typeof init === 'object' ) {
const method = init [ Symbol . iterator ] ;
if ( method != null ) {
if ( typeof method !== 'function' ) {
throw new TypeError ( 'Header pairs must be iterable' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// sequence<sequence<ByteString>>
// Note: per spec we have to first exhaust the lists then process them
const pairs = [ ] ;
for ( const pair of init ) {
if ( typeof pair !== 'object' || typeof pair [ Symbol . iterator ] !== 'function' ) {
throw new TypeError ( 'Each header pair must be iterable' ) ;
}
pairs . push ( Array . from ( pair ) ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
for ( const pair of pairs ) {
if ( pair . length !== 2 ) {
throw new TypeError ( 'Each header pair must be a name/value tuple' ) ;
}
this . append ( pair [ 0 ] , pair [ 1 ] ) ;
}
} else {
// record<ByteString, ByteString>
for ( const key of Object . keys ( init ) ) {
const value = init [ key ] ;
this . append ( key , value ) ;
}
}
} else {
throw new TypeError ( 'Provided initializer must be an object' ) ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Return combined header value given name
*
* @ param String name Header name
* @ return Mixed
* /
get ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key === undefined ) {
return null ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return this [ MAP ] [ key ] . join ( ', ' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Iterate over all headers
*
* @ param Function callback Executed for each item with parameters ( value , name , thisArg )
* @ param Boolean thisArg ` this ` context for callback function
* @ return Void
* /
forEach ( callback ) {
let thisArg = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : undefined ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let pairs = getHeaders ( this ) ;
let i = 0 ;
while ( i < pairs . length ) {
var _pairs$i = pairs [ i ] ;
const name = _pairs$i [ 0 ] ,
value = _pairs$i [ 1 ] ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
callback . call ( thisArg , value , name , this ) ;
pairs = getHeaders ( this ) ;
i ++ ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Overwrite header values given name
*
* @ param String name Header name
* @ param String value Header value
* @ return Void
* /
set ( name , value ) {
name = ` ${ name } ` ;
value = ` ${ value } ` ;
validateName ( name ) ;
validateValue ( value ) ;
const key = find ( this [ MAP ] , name ) ;
this [ MAP ] [ key !== undefined ? key : name ] = [ value ] ;
}
/ * *
* Append a value onto existing header
*
* @ param String name Header name
* @ param String value Header value
* @ return Void
* /
append ( name , value ) {
name = ` ${ name } ` ;
value = ` ${ value } ` ;
validateName ( name ) ;
validateValue ( value ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key !== undefined ) {
this [ MAP ] [ key ] . push ( value ) ;
} else {
this [ MAP ] [ name ] = [ value ] ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Check for header name existence
*
* @ param String name Header name
* @ return Boolean
* /
has ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
return find ( this [ MAP ] , name ) !== undefined ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Delete all header values given name
*
* @ param String name Header name
* @ return Void
* /
delete ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key !== undefined ) {
delete this [ MAP ] [ key ] ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Return raw headers ( non - spec api )
*
* @ return Object
* /
raw ( ) {
return this [ MAP ] ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Get an iterator on keys .
*
* @ return Iterator
* /
keys ( ) {
return createHeadersIterator ( this , 'key' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Get an iterator on values .
*
* @ return Iterator
* /
values ( ) {
return createHeadersIterator ( this , 'value' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Get an iterator on entries .
*
* This is the default iterator of the Headers object .
*
* @ return Iterator
* /
[ Symbol . iterator ] ( ) {
return createHeadersIterator ( this , 'key+value' ) ;
}
}
Headers . prototype . entries = Headers . prototype [ Symbol . iterator ] ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( Headers . prototype , Symbol . toStringTag , {
value : 'Headers' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
Object . defineProperties ( Headers . prototype , {
get : { enumerable : true } ,
forEach : { enumerable : true } ,
set : { enumerable : true } ,
append : { enumerable : true } ,
has : { enumerable : true } ,
delete : { enumerable : true } ,
keys : { enumerable : true } ,
values : { enumerable : true } ,
entries : { enumerable : true }
} ) ;
function getHeaders ( headers ) {
let kind = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : 'key+value' ;
const keys = Object . keys ( headers [ MAP ] ) . sort ( ) ;
return keys . map ( kind === 'key' ? function ( k ) {
return k . toLowerCase ( ) ;
} : kind === 'value' ? function ( k ) {
return headers [ MAP ] [ k ] . join ( ', ' ) ;
} : function ( k ) {
return [ k . toLowerCase ( ) , headers [ MAP ] [ k ] . join ( ', ' ) ] ;
} ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const INTERNAL = Symbol ( 'internal' ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function createHeadersIterator ( target , kind ) {
const iterator = Object . create ( HeadersIteratorPrototype ) ;
iterator [ INTERNAL ] = {
target ,
kind ,
index : 0
} ;
return iterator ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const HeadersIteratorPrototype = Object . setPrototypeOf ( {
next ( ) {
// istanbul ignore if
if ( ! this || Object . getPrototypeOf ( this ) !== HeadersIteratorPrototype ) {
throw new TypeError ( 'Value of `this` is not a HeadersIterator' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var _INTERNAL = this [ INTERNAL ] ;
const target = _INTERNAL . target ,
kind = _INTERNAL . kind ,
index = _INTERNAL . index ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const values = getHeaders ( target , kind ) ;
const len = values . length ;
if ( index >= len ) {
return {
value : undefined ,
done : true
} ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this [ INTERNAL ] . index = index + 1 ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return {
value : values [ index ] ,
done : false
} ;
}
} , Object . getPrototypeOf ( Object . getPrototypeOf ( [ ] [ Symbol . iterator ] ( ) ) ) ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( HeadersIteratorPrototype , Symbol . toStringTag , {
value : 'HeadersIterator' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Export the Headers object in a form that Node . js can consume .
*
* @ param Headers headers
* @ return Object
* /
function exportNodeCompatibleHeaders ( headers ) {
const obj = Object . assign ( { _ _proto _ _ : null } , headers [ MAP ] ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// http.request() only supports string as Host header. This hack makes
// specifying custom Host header possible.
const hostHeaderKey = find ( headers [ MAP ] , 'Host' ) ;
if ( hostHeaderKey !== undefined ) {
obj [ hostHeaderKey ] = obj [ hostHeaderKey ] [ 0 ] ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return obj ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Create a Headers object from an object of headers , ignoring those that do
* not conform to HTTP grammar productions .
*
* @ param Object obj Object of headers
* @ return Headers
* /
function createHeadersLenient ( obj ) {
const headers = new Headers ( ) ;
for ( const name of Object . keys ( obj ) ) {
if ( invalidTokenRegex . test ( name ) ) {
continue ;
}
if ( Array . isArray ( obj [ name ] ) ) {
for ( const val of obj [ name ] ) {
if ( invalidHeaderCharRegex . test ( val ) ) {
continue ;
}
if ( headers [ MAP ] [ name ] === undefined ) {
headers [ MAP ] [ name ] = [ val ] ;
} else {
headers [ MAP ] [ name ] . push ( val ) ;
}
}
} else if ( ! invalidHeaderCharRegex . test ( obj [ name ] ) ) {
headers [ MAP ] [ name ] = [ obj [ name ] ] ;
}
}
return headers ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
const INTERNALS$1 = Symbol ( 'Response internals' ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// fix an issue where "STATUS_CODES" aren't a named export for node <10
const STATUS _CODES = http . STATUS _CODES ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Response class
*
* @ param Stream body Readable stream
* @ param Object opts Response options
* @ return Void
* /
class Response {
constructor ( ) {
let body = arguments . length > 0 && arguments [ 0 ] !== undefined ? arguments [ 0 ] : null ;
let opts = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Body . call ( this , body , opts ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const status = opts . status || 200 ;
const headers = new Headers ( opts . headers ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( body != null && ! headers . has ( 'Content-Type' ) ) {
const contentType = extractContentType ( body ) ;
if ( contentType ) {
headers . append ( 'Content-Type' , contentType ) ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this [ INTERNALS$1 ] = {
url : opts . url ,
status ,
statusText : opts . statusText || STATUS _CODES [ status ] ,
headers ,
counter : opts . counter
} ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
get url ( ) {
return this [ INTERNALS$1 ] . url || '' ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
get status ( ) {
return this [ INTERNALS$1 ] . status ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Convenience property representing if the request ended normally
* /
get ok ( ) {
return this [ INTERNALS$1 ] . status >= 200 && this [ INTERNALS$1 ] . status < 300 ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
get redirected ( ) {
return this [ INTERNALS$1 ] . counter > 0 ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
get statusText ( ) {
return this [ INTERNALS$1 ] . statusText ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
get headers ( ) {
return this [ INTERNALS$1 ] . headers ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Clone this response
*
* @ return Response
* /
clone ( ) {
return new Response ( clone ( this ) , {
url : this . url ,
status : this . status ,
statusText : this . statusText ,
headers : this . headers ,
ok : this . ok ,
redirected : this . redirected
} ) ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Body . mixIn ( Response . prototype ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperties ( Response . prototype , {
url : { enumerable : true } ,
status : { enumerable : true } ,
ok : { enumerable : true } ,
redirected : { enumerable : true } ,
statusText : { enumerable : true } ,
headers : { enumerable : true } ,
clone : { enumerable : true }
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( Response . prototype , Symbol . toStringTag , {
value : 'Response' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const INTERNALS$2 = Symbol ( 'Request internals' ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// fix an issue where "format", "parse" aren't a named export for node <10
const parse _url = Url . parse ;
const format _url = Url . format ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const streamDestructionSupported = 'destroy' in Stream . Readable . prototype ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Check if a value is an instance of Request .
*
* @ param Mixed input
* @ return Boolean
* /
function isRequest ( input ) {
return typeof input === 'object' && typeof input [ INTERNALS$2 ] === 'object' ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function isAbortSignal ( signal ) {
const proto = signal && typeof signal === 'object' && Object . getPrototypeOf ( signal ) ;
return ! ! ( proto && proto . constructor . name === 'AbortSignal' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Request class
*
* @ param Mixed input Url or Request instance
* @ param Object init Custom options
* @ return Void
* /
class Request {
constructor ( input ) {
let init = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let parsedURL ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// normalize input
if ( ! isRequest ( input ) ) {
if ( input && input . href ) {
// in order to support Node.js' Url objects; though WHATWG's URL objects
// will fall into this branch also (since their `toString()` will return
// `href` property anyway)
parsedURL = parse _url ( input . href ) ;
} else {
// coerce input to a string before attempting to parse
parsedURL = parse _url ( ` ${ input } ` ) ;
}
input = { } ;
} else {
parsedURL = parse _url ( input . url ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let method = init . method || input . method || 'GET' ;
method = method . toUpperCase ( ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( ( init . body != null || isRequest ( input ) && input . body !== null ) && ( method === 'GET' || method === 'HEAD' ) ) {
throw new TypeError ( 'Request with GET/HEAD method cannot have body' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let inputBody = init . body != null ? init . body : isRequest ( input ) && input . body !== null ? clone ( input ) : null ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Body . call ( this , inputBody , {
timeout : init . timeout || input . timeout || 0 ,
size : init . size || input . size || 0
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const headers = new Headers ( init . headers || input . headers || { } ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( inputBody != null && ! headers . has ( 'Content-Type' ) ) {
const contentType = extractContentType ( inputBody ) ;
if ( contentType ) {
headers . append ( 'Content-Type' , contentType ) ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let signal = isRequest ( input ) ? input . signal : null ;
if ( 'signal' in init ) signal = init . signal ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( signal != null && ! isAbortSignal ( signal ) ) {
throw new TypeError ( 'Expected signal to be an instanceof AbortSignal' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this [ INTERNALS$2 ] = {
method ,
redirect : init . redirect || input . redirect || 'follow' ,
headers ,
parsedURL ,
signal
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// node-fetch-only options
this . follow = init . follow !== undefined ? init . follow : input . follow !== undefined ? input . follow : 20 ;
this . compress = init . compress !== undefined ? init . compress : input . compress !== undefined ? input . compress : true ;
this . counter = init . counter || input . counter || 0 ;
this . agent = init . agent || input . agent ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
get method ( ) {
return this [ INTERNALS$2 ] . method ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
get url ( ) {
return format _url ( this [ INTERNALS$2 ] . parsedURL ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
get headers ( ) {
return this [ INTERNALS$2 ] . headers ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
get redirect ( ) {
return this [ INTERNALS$2 ] . redirect ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
get signal ( ) {
return this [ INTERNALS$2 ] . signal ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Clone this request
*
* @ return Request
* /
clone ( ) {
return new Request ( this ) ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
Body . mixIn ( Request . prototype ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( Request . prototype , Symbol . toStringTag , {
value : 'Request' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperties ( Request . prototype , {
method : { enumerable : true } ,
url : { enumerable : true } ,
headers : { enumerable : true } ,
redirect : { enumerable : true } ,
clone : { enumerable : true } ,
signal : { enumerable : true }
} ) ;
2020-01-27 16:37:12 +01:00
/ * *
2021-02-22 00:27:22 +01:00
* Convert a Request to Node . js http request options .
2020-01-27 16:37:12 +01:00
*
2021-02-22 00:27:22 +01:00
* @ param Request A Request instance
* @ return Object The options object to be passed to http . request
2020-01-27 16:37:12 +01:00
* /
2021-02-22 00:27:22 +01:00
function getNodeRequestOptions ( request ) {
const parsedURL = request [ INTERNALS$2 ] . parsedURL ;
const headers = new Headers ( request [ INTERNALS$2 ] . headers ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// fetch step 1.3
if ( ! headers . has ( 'Accept' ) ) {
headers . set ( 'Accept' , '*/*' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Basic fetch
if ( ! parsedURL . protocol || ! parsedURL . hostname ) {
throw new TypeError ( 'Only absolute URLs are supported' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( ! /^https?:$/ . test ( parsedURL . protocol ) ) {
throw new TypeError ( 'Only HTTP(S) protocols are supported' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( request . signal && request . body instanceof Stream . Readable && ! streamDestructionSupported ) {
throw new Error ( 'Cancellation of streamed requests with AbortSignal is not supported in node < 8' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP-network-or-cache fetch steps 2.4-2.7
let contentLengthValue = null ;
if ( request . body == null && /^(POST|PUT)$/i . test ( request . method ) ) {
contentLengthValue = '0' ;
}
if ( request . body != null ) {
const totalBytes = getTotalBytes ( request ) ;
if ( typeof totalBytes === 'number' ) {
contentLengthValue = String ( totalBytes ) ;
}
}
if ( contentLengthValue ) {
headers . set ( 'Content-Length' , contentLengthValue ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP-network-or-cache fetch step 2.11
if ( ! headers . has ( 'User-Agent' ) ) {
headers . set ( 'User-Agent' , 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)' ) ;
}
// HTTP-network-or-cache fetch step 2.15
if ( request . compress && ! headers . has ( 'Accept-Encoding' ) ) {
headers . set ( 'Accept-Encoding' , 'gzip,deflate' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let agent = request . agent ;
if ( typeof agent === 'function' ) {
agent = agent ( parsedURL ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( ! headers . has ( 'Connection' ) && ! agent ) {
headers . set ( 'Connection' , 'close' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP-network fetch step 4.2
// chunked encoding is handled by Node.js
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return Object . assign ( { } , parsedURL , {
method : request . method ,
headers : exportNodeCompatibleHeaders ( headers ) ,
agent
} ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* abort - error . js
*
* AbortError interface for cancelled requests
* /
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Create AbortError instance
*
* @ param String message Error message for human
* @ return AbortError
* /
function AbortError ( message ) {
Error . call ( this , message ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this . type = 'aborted' ;
this . message = message ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// hide custom error implementation details from end-users
Error . captureStackTrace ( this , this . constructor ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
AbortError . prototype = Object . create ( Error . prototype ) ;
AbortError . prototype . constructor = AbortError ;
AbortError . prototype . name = 'AbortError' ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
const PassThrough$1 = Stream . PassThrough ;
const resolve _url = Url . resolve ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Fetch function
*
* @ param Mixed url Absolute url or Request instance
* @ param Object opts Fetch options
* @ return Promise
* /
function fetch ( url , opts ) {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// allow custom promise
if ( ! fetch . Promise ) {
throw new Error ( 'native promise missing, set fetch.Promise to your favorite alternative' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Body . Promise = fetch . Promise ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// wrap http.request into fetch
return new fetch . Promise ( function ( resolve , reject ) {
// build request object
const request = new Request ( url , opts ) ;
const options = getNodeRequestOptions ( request ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const send = ( options . protocol === 'https:' ? https : http ) . request ;
const signal = request . signal ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
let response = null ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const abort = function abort ( ) {
let error = new AbortError ( 'The user aborted a request.' ) ;
reject ( error ) ;
if ( request . body && request . body instanceof Stream . Readable ) {
request . body . destroy ( error ) ;
}
if ( ! response || ! response . body ) return ;
response . body . emit ( 'error' , error ) ;
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( signal && signal . aborted ) {
abort ( ) ;
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const abortAndFinalize = function abortAndFinalize ( ) {
abort ( ) ;
finalize ( ) ;
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// send request
const req = send ( options ) ;
let reqTimeout ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( signal ) {
signal . addEventListener ( 'abort' , abortAndFinalize ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function finalize ( ) {
req . abort ( ) ;
if ( signal ) signal . removeEventListener ( 'abort' , abortAndFinalize ) ;
clearTimeout ( reqTimeout ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( request . timeout ) {
req . once ( 'socket' , function ( socket ) {
reqTimeout = setTimeout ( function ( ) {
reject ( new FetchError ( ` network timeout at: ${ request . url } ` , 'request-timeout' ) ) ;
finalize ( ) ;
} , request . timeout ) ;
} ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
req . on ( 'error' , function ( err ) {
reject ( new FetchError ( ` request to ${ request . url } failed, reason: ${ err . message } ` , 'system' , err ) ) ;
finalize ( ) ;
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
req . on ( 'response' , function ( res ) {
clearTimeout ( reqTimeout ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const headers = createHeadersLenient ( res . headers ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP fetch step 5
if ( fetch . isRedirect ( res . statusCode ) ) {
// HTTP fetch step 5.2
const location = headers . get ( 'Location' ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP fetch step 5.3
const locationURL = location === null ? null : resolve _url ( request . url , location ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP fetch step 5.5
switch ( request . redirect ) {
case 'error' :
reject ( new FetchError ( ` uri requested responds with a redirect, redirect mode is set to error: ${ request . url } ` , 'no-redirect' ) ) ;
finalize ( ) ;
return ;
case 'manual' :
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
if ( locationURL !== null ) {
// handle corrupted header
try {
headers . set ( 'Location' , locationURL ) ;
} catch ( err ) {
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
reject ( err ) ;
}
}
break ;
case 'follow' :
// HTTP-redirect fetch step 2
if ( locationURL === null ) {
break ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP-redirect fetch step 5
if ( request . counter >= request . follow ) {
reject ( new FetchError ( ` maximum redirect reached at: ${ request . url } ` , 'max-redirect' ) ) ;
finalize ( ) ;
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
const requestOpts = {
headers : new Headers ( request . headers ) ,
follow : request . follow ,
counter : request . counter + 1 ,
agent : request . agent ,
compress : request . compress ,
method : request . method ,
body : request . body ,
signal : request . signal ,
timeout : request . timeout ,
size : request . size
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP-redirect fetch step 9
if ( res . statusCode !== 303 && request . body && getTotalBytes ( request ) === null ) {
reject ( new FetchError ( 'Cannot follow redirect with body being a readable stream' , 'unsupported-redirect' ) ) ;
finalize ( ) ;
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP-redirect fetch step 11
if ( res . statusCode === 303 || ( res . statusCode === 301 || res . statusCode === 302 ) && request . method === 'POST' ) {
requestOpts . method = 'GET' ;
requestOpts . body = undefined ;
requestOpts . headers . delete ( 'content-length' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP-redirect fetch step 15
resolve ( fetch ( new Request ( locationURL , requestOpts ) ) ) ;
finalize ( ) ;
return ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// prepare response
res . once ( 'end' , function ( ) {
if ( signal ) signal . removeEventListener ( 'abort' , abortAndFinalize ) ;
} ) ;
let body = res . pipe ( new PassThrough$1 ( ) ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
const response _options = {
url : request . url ,
status : res . statusCode ,
statusText : res . statusMessage ,
headers : headers ,
size : request . size ,
timeout : request . timeout ,
counter : request . counter
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP-network fetch step 12.1.1.3
const codings = headers . get ( 'Content-Encoding' ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// HTTP-network fetch step 12.1.1.4: handle content codings
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// in following scenarios we ignore compression support
// 1. compression support is disabled
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
if ( ! request . compress || request . method === 'HEAD' || codings === null || res . statusCode === 204 || res . statusCode === 304 ) {
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// For Node v6+
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
const zlibOptions = {
flush : zlib . Z _SYNC _FLUSH ,
finishFlush : zlib . Z _SYNC _FLUSH
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// for gzip
if ( codings == 'gzip' || codings == 'x-gzip' ) {
body = body . pipe ( zlib . createGunzip ( zlibOptions ) ) ;
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// for deflate
if ( codings == 'deflate' || codings == 'x-deflate' ) {
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
const raw = res . pipe ( new PassThrough$1 ( ) ) ;
raw . once ( 'data' , function ( chunk ) {
// see http://stackoverflow.com/questions/37519828
if ( ( chunk [ 0 ] & 0x0F ) === 0x08 ) {
body = body . pipe ( zlib . createInflate ( ) ) ;
} else {
body = body . pipe ( zlib . createInflateRaw ( ) ) ;
}
response = new Response ( body , response _options ) ;
resolve ( response ) ;
} ) ;
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// for br
if ( codings == 'br' && typeof zlib . createBrotliDecompress === 'function' ) {
body = body . pipe ( zlib . createBrotliDecompress ( ) ) ;
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// otherwise, use response as-is
response = new Response ( body , response _options ) ;
resolve ( response ) ;
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
writeToStream ( req , request ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
/ * *
* Redirect code matching
*
* @ param Number code Status code
* @ return Boolean
* /
fetch . isRedirect = function ( code ) {
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308 ;
} ;
// expose Promise
fetch . Promise = global . Promise ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
module . exports = exports = fetch ;
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
exports . default = exports ;
exports . Headers = Headers ;
exports . Request = Request ;
exports . Response = Response ;
exports . FetchError = FetchError ;
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 1223 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var wrappy = _ _nccwpck _require _ _ ( 2940 )
module . exports = wrappy ( once )
module . exports . strict = wrappy ( onceStrict )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
once . proto = once ( function ( ) {
Object . defineProperty ( Function . prototype , 'once' , {
value : function ( ) {
return once ( this )
} ,
configurable : true
} )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( Function . prototype , 'onceStrict' , {
value : function ( ) {
return onceStrict ( this )
} ,
configurable : true
} )
} )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function once ( fn ) {
var f = function ( ) {
if ( f . called ) return f . value
f . called = true
return f . value = fn . apply ( this , arguments )
}
f . called = false
return f
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function onceStrict ( fn ) {
var f = function ( ) {
if ( f . called )
throw new Error ( f . onceError )
f . called = true
return f . value = fn . apply ( this , arguments )
}
var name = fn . name || 'Function wrapped with `once`'
f . onceError = name + " shouldn't be called more than once"
f . called = false
return f
2020-01-27 16:37:12 +01:00
}
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 5911 :
/***/ ( ( module , exports ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports = module . exports = SemVer
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var debug
/* istanbul ignore next */
if ( typeof process === 'object' &&
process . env &&
process . env . NODE _DEBUG &&
/\bsemver\b/i . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments , 0 )
args . unshift ( 'SEMVER' )
console . log . apply ( console , args )
}
} else {
debug = function ( ) { }
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
exports . SEMVER _SPEC _VERSION = '2.0.0'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var MAX _LENGTH = 256
var MAX _SAFE _INTEGER = Number . MAX _SAFE _INTEGER ||
/* istanbul ignore next */ 9007199254740991
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Max safe segment length for coercion.
var MAX _SAFE _COMPONENT _LENGTH = 16
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// The actual regexps go on exports.re
var re = exports . re = [ ]
var src = exports . src = [ ]
var R = 0
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// ## Numeric Identifier
// A single `0`, or a non-zero digit followed by zero or more digits.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var NUMERICIDENTIFIER = R ++
src [ NUMERICIDENTIFIER ] = '0|[1-9]\\d*'
var NUMERICIDENTIFIERLOOSE = R ++
src [ NUMERICIDENTIFIERLOOSE ] = '[0-9]+'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var NONNUMERICIDENTIFIER = R ++
src [ NONNUMERICIDENTIFIER ] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// ## Main Version
// Three dot-separated numeric identifiers.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var MAINVERSION = R ++
src [ MAINVERSION ] = '(' + src [ NUMERICIDENTIFIER ] + ')\\.' +
'(' + src [ NUMERICIDENTIFIER ] + ')\\.' +
'(' + src [ NUMERICIDENTIFIER ] + ')'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var MAINVERSIONLOOSE = R ++
src [ MAINVERSIONLOOSE ] = '(' + src [ NUMERICIDENTIFIERLOOSE ] + ')\\.' +
'(' + src [ NUMERICIDENTIFIERLOOSE ] + ')\\.' +
'(' + src [ NUMERICIDENTIFIERLOOSE ] + ')'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var PRERELEASEIDENTIFIER = R ++
src [ PRERELEASEIDENTIFIER ] = '(?:' + src [ NUMERICIDENTIFIER ] +
'|' + src [ NONNUMERICIDENTIFIER ] + ')'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var PRERELEASEIDENTIFIERLOOSE = R ++
src [ PRERELEASEIDENTIFIERLOOSE ] = '(?:' + src [ NUMERICIDENTIFIERLOOSE ] +
'|' + src [ NONNUMERICIDENTIFIER ] + ')'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
// identifiers.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var PRERELEASE = R ++
src [ PRERELEASE ] = '(?:-(' + src [ PRERELEASEIDENTIFIER ] +
'(?:\\.' + src [ PRERELEASEIDENTIFIER ] + ')*))'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var PRERELEASELOOSE = R ++
src [ PRERELEASELOOSE ] = '(?:-?(' + src [ PRERELEASEIDENTIFIERLOOSE ] +
'(?:\\.' + src [ PRERELEASEIDENTIFIERLOOSE ] + ')*))'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var BUILDIDENTIFIER = R ++
src [ BUILDIDENTIFIER ] = '[0-9A-Za-z-]+'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
// identifiers.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var BUILD = R ++
src [ BUILD ] = '(?:\\+(' + src [ BUILDIDENTIFIER ] +
'(?:\\.' + src [ BUILDIDENTIFIER ] + ')*))'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// ## Full Version String
// A main version, followed optionally by a pre-release version and
// build metadata.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Note that the only major, minor, patch, and pre-release sections of
// the version string are capturing groups. The build metadata is not a
// capturing group, because it should not ever be used in version
// comparison.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var FULL = R ++
var FULLPLAIN = 'v?' + src [ MAINVERSION ] +
src [ PRERELEASE ] + '?' +
src [ BUILD ] + '?'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
src [ FULL ] = '^' + FULLPLAIN + '$'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
// common in the npm registry.
var LOOSEPLAIN = '[v=\\s]*' + src [ MAINVERSIONLOOSE ] +
src [ PRERELEASELOOSE ] + '?' +
src [ BUILD ] + '?'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var LOOSE = R ++
src [ LOOSE ] = '^' + LOOSEPLAIN + '$'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var GTLT = R ++
src [ GTLT ] = '((?:<|>)?=?)'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Something like "2.*" or "1.2.x".
// Note that "x.x" is a valid xRange identifer, meaning "any version"
// Only the first item is strictly required.
var XRANGEIDENTIFIERLOOSE = R ++
src [ XRANGEIDENTIFIERLOOSE ] = src [ NUMERICIDENTIFIERLOOSE ] + '|x|X|\\*'
var XRANGEIDENTIFIER = R ++
src [ XRANGEIDENTIFIER ] = src [ NUMERICIDENTIFIER ] + '|x|X|\\*'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var XRANGEPLAIN = R ++
src [ XRANGEPLAIN ] = '[v=\\s]*(' + src [ XRANGEIDENTIFIER ] + ')' +
'(?:\\.(' + src [ XRANGEIDENTIFIER ] + ')' +
'(?:\\.(' + src [ XRANGEIDENTIFIER ] + ')' +
'(?:' + src [ PRERELEASE ] + ')?' +
src [ BUILD ] + '?' +
')?)?'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var XRANGEPLAINLOOSE = R ++
src [ XRANGEPLAINLOOSE ] = '[v=\\s]*(' + src [ XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:\\.(' + src [ XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:\\.(' + src [ XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:' + src [ PRERELEASELOOSE ] + ')?' +
src [ BUILD ] + '?' +
')?)?'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var XRANGE = R ++
src [ XRANGE ] = '^' + src [ GTLT ] + '\\s*' + src [ XRANGEPLAIN ] + '$'
var XRANGELOOSE = R ++
src [ XRANGELOOSE ] = '^' + src [ GTLT ] + '\\s*' + src [ XRANGEPLAINLOOSE ] + '$'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
var COERCE = R ++
src [ COERCE ] = '(?:^|[^\\d])' +
'(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '})' +
'(?:\\.(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '}))?' +
'(?:\\.(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '}))?' +
'(?:$|[^\\d])'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Tilde ranges.
// Meaning is "reasonably at or greater than"
var LONETILDE = R ++
src [ LONETILDE ] = '(?:~>?)'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var TILDETRIM = R ++
src [ TILDETRIM ] = '(\\s*)' + src [ LONETILDE ] + '\\s+'
re [ TILDETRIM ] = new RegExp ( src [ TILDETRIM ] , 'g' )
var tildeTrimReplace = '$1~'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var TILDE = R ++
src [ TILDE ] = '^' + src [ LONETILDE ] + src [ XRANGEPLAIN ] + '$'
var TILDELOOSE = R ++
src [ TILDELOOSE ] = '^' + src [ LONETILDE ] + src [ XRANGEPLAINLOOSE ] + '$'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Caret ranges.
// Meaning is "at least and backwards compatible with"
var LONECARET = R ++
src [ LONECARET ] = '(?:\\^)'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var CARETTRIM = R ++
src [ CARETTRIM ] = '(\\s*)' + src [ LONECARET ] + '\\s+'
re [ CARETTRIM ] = new RegExp ( src [ CARETTRIM ] , 'g' )
var caretTrimReplace = '$1^'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var CARET = R ++
src [ CARET ] = '^' + src [ LONECARET ] + src [ XRANGEPLAIN ] + '$'
var CARETLOOSE = R ++
src [ CARETLOOSE ] = '^' + src [ LONECARET ] + src [ XRANGEPLAINLOOSE ] + '$'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// A simple gt/lt/eq thing, or just "" to indicate "any version"
var COMPARATORLOOSE = R ++
src [ COMPARATORLOOSE ] = '^' + src [ GTLT ] + '\\s*(' + LOOSEPLAIN + ')$|^$'
var COMPARATOR = R ++
src [ COMPARATOR ] = '^' + src [ GTLT ] + '\\s*(' + FULLPLAIN + ')$|^$'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// An expression to strip any whitespace between the gtlt and the thing
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
var COMPARATORTRIM = R ++
src [ COMPARATORTRIM ] = '(\\s*)' + src [ GTLT ] +
'\\s*(' + LOOSEPLAIN + '|' + src [ XRANGEPLAIN ] + ')'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// this one has to use the /g flag
re [ COMPARATORTRIM ] = new RegExp ( src [ COMPARATORTRIM ] , 'g' )
var comparatorTrimReplace = '$1$2$3'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Something like `1.2.3 - 1.2.4`
// Note that these all use the loose form, because they'll be
// checked against either the strict or loose comparator form
// later.
var HYPHENRANGE = R ++
src [ HYPHENRANGE ] = '^\\s*(' + src [ XRANGEPLAIN ] + ')' +
'\\s+-\\s+' +
'(' + src [ XRANGEPLAIN ] + ')' +
'\\s*$'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var HYPHENRANGELOOSE = R ++
src [ HYPHENRANGELOOSE ] = '^\\s*(' + src [ XRANGEPLAINLOOSE ] + ')' +
'\\s+-\\s+' +
'(' + src [ XRANGEPLAINLOOSE ] + ')' +
'\\s*$'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Star ranges basically just allow anything at all.
var STAR = R ++
src [ STAR ] = '(<|>)?=?\\s*\\*'
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Compile to actual regexp objects.
// All are flag-free, unless they were created above with a flag.
for ( var i = 0 ; i < R ; i ++ ) {
debug ( i , src [ i ] )
if ( ! re [ i ] ) {
re [ i ] = new RegExp ( src [ i ] )
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
exports . parse = parse
function parse ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( version instanceof SemVer ) {
return version
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( typeof version !== 'string' ) {
return null
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( version . length > MAX _LENGTH ) {
return null
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var r = options . loose ? re [ LOOSE ] : re [ FULL ]
if ( ! r . test ( version ) ) {
return null
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
try {
return new SemVer ( version , options )
} catch ( er ) {
return null
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
exports . valid = valid
function valid ( version , options ) {
var v = parse ( version , options )
return v ? v . version : null
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . clean = clean
function clean ( version , options ) {
var s = parse ( version . trim ( ) . replace ( /^[=v]+/ , '' ) , options )
return s ? s . version : null
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . SemVer = SemVer
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function SemVer ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( version instanceof SemVer ) {
if ( version . loose === options . loose ) {
return version
} else {
version = version . version
}
} else if ( typeof version !== 'string' ) {
throw new TypeError ( 'Invalid Version: ' + version )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( version . length > MAX _LENGTH ) {
throw new TypeError ( 'version is longer than ' + MAX _LENGTH + ' characters' )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( ! ( this instanceof SemVer ) ) {
return new SemVer ( version , options )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
debug ( 'SemVer' , version , options )
this . options = options
this . loose = ! ! options . loose
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var m = version . trim ( ) . match ( options . loose ? re [ LOOSE ] : re [ FULL ] )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( ! m ) {
throw new TypeError ( 'Invalid Version: ' + version )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this . raw = version
// these are actually numbers
this . major = + m [ 1 ]
this . minor = + m [ 2 ]
this . patch = + m [ 3 ]
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( this . major > MAX _SAFE _INTEGER || this . major < 0 ) {
throw new TypeError ( 'Invalid major version' )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( this . minor > MAX _SAFE _INTEGER || this . minor < 0 ) {
throw new TypeError ( 'Invalid minor version' )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( this . patch > MAX _SAFE _INTEGER || this . patch < 0 ) {
throw new TypeError ( 'Invalid patch version' )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// numberify any prerelease numeric ids
if ( ! m [ 4 ] ) {
this . prerelease = [ ]
} else {
this . prerelease = m [ 4 ] . split ( '.' ) . map ( function ( id ) {
if ( /^[0-9]+$/ . test ( id ) ) {
var num = + id
if ( num >= 0 && num < MAX _SAFE _INTEGER ) {
return num
}
}
return id
} )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this . build = m [ 5 ] ? m [ 5 ] . split ( '.' ) : [ ]
this . format ( )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
SemVer . prototype . format = function ( ) {
this . version = this . major + '.' + this . minor + '.' + this . patch
if ( this . prerelease . length ) {
this . version += '-' + this . prerelease . join ( '.' )
}
return this . version
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
SemVer . prototype . toString = function ( ) {
return this . version
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
SemVer . prototype . compare = function ( other ) {
debug ( 'SemVer.compare' , this . version , this . options , other )
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return this . compareMain ( other ) || this . comparePre ( other )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
SemVer . prototype . compareMain = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return compareIdentifiers ( this . major , other . major ) ||
compareIdentifiers ( this . minor , other . minor ) ||
compareIdentifiers ( this . patch , other . patch )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
SemVer . prototype . comparePre = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// NOT having a prerelease is > having one
if ( this . prerelease . length && ! other . prerelease . length ) {
return - 1
} else if ( ! this . prerelease . length && other . prerelease . length ) {
return 1
} else if ( ! this . prerelease . length && ! other . prerelease . length ) {
return 0
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
var i = 0
do {
var a = this . prerelease [ i ]
var b = other . prerelease [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
SemVer . prototype . compareBuild = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var i = 0
do {
var a = this . build [ i ]
var b = other . build [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
SemVer . prototype . inc = function ( release , identifier ) {
switch ( release ) {
case 'premajor' :
this . prerelease . length = 0
this . patch = 0
this . minor = 0
this . major ++
this . inc ( 'pre' , identifier )
break
case 'preminor' :
this . prerelease . length = 0
this . patch = 0
this . minor ++
this . inc ( 'pre' , identifier )
break
case 'prepatch' :
// If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not
// relevant at this point.
this . prerelease . length = 0
this . inc ( 'patch' , identifier )
this . inc ( 'pre' , identifier )
break
// If the input is a non-prerelease version, this acts the same as
// prepatch.
case 'prerelease' :
if ( this . prerelease . length === 0 ) {
this . inc ( 'patch' , identifier )
}
this . inc ( 'pre' , identifier )
break
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
case 'major' :
// If this is a pre-major version, bump up to the same major version.
// Otherwise increment major.
// 1.0.0-5 bumps to 1.0.0
// 1.1.0 bumps to 2.0.0
if ( this . minor !== 0 ||
this . patch !== 0 ||
this . prerelease . length === 0 ) {
this . major ++
}
this . minor = 0
this . patch = 0
this . prerelease = [ ]
break
case 'minor' :
// If this is a pre-minor version, bump up to the same minor version.
// Otherwise increment minor.
// 1.2.0-5 bumps to 1.2.0
// 1.2.1 bumps to 1.3.0
if ( this . patch !== 0 || this . prerelease . length === 0 ) {
this . minor ++
}
this . patch = 0
this . prerelease = [ ]
break
case 'patch' :
// If this is not a pre-release version, it will increment the patch.
// If it is a pre-release it will bump up to the same patch version.
// 1.2.0-5 patches to 1.2.0
// 1.2.0 patches to 1.2.1
if ( this . prerelease . length === 0 ) {
this . patch ++
}
this . prerelease = [ ]
break
// This probably shouldn't be used publicly.
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
case 'pre' :
if ( this . prerelease . length === 0 ) {
this . prerelease = [ 0 ]
} else {
var i = this . prerelease . length
while ( -- i >= 0 ) {
if ( typeof this . prerelease [ i ] === 'number' ) {
this . prerelease [ i ] ++
i = - 2
}
}
if ( i === - 1 ) {
// didn't increment anything
this . prerelease . push ( 0 )
}
}
if ( identifier ) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
if ( this . prerelease [ 0 ] === identifier ) {
if ( isNaN ( this . prerelease [ 1 ] ) ) {
this . prerelease = [ identifier , 0 ]
}
} else {
this . prerelease = [ identifier , 0 ]
}
}
break
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
default :
throw new Error ( 'invalid increment argument: ' + release )
}
this . format ( )
this . raw = this . version
return this
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . inc = inc
function inc ( version , release , loose , identifier ) {
if ( typeof ( loose ) === 'string' ) {
identifier = loose
loose = undefined
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
try {
return new SemVer ( version , loose ) . inc ( release , identifier ) . version
} catch ( er ) {
return null
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . diff = diff
function diff ( version1 , version2 ) {
if ( eq ( version1 , version2 ) ) {
return null
} else {
var v1 = parse ( version1 )
var v2 = parse ( version2 )
var prefix = ''
if ( v1 . prerelease . length || v2 . prerelease . length ) {
prefix = 'pre'
var defaultResult = 'prerelease'
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
for ( var key in v1 ) {
if ( key === 'major' || key === 'minor' || key === 'patch' ) {
if ( v1 [ key ] !== v2 [ key ] ) {
return prefix + key
}
}
}
return defaultResult // may be undefined
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
exports . compareIdentifiers = compareIdentifiers
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var numeric = /^[0-9]+$/
function compareIdentifiers ( a , b ) {
var anum = numeric . test ( a )
var bnum = numeric . test ( b )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( anum && bnum ) {
a = + a
b = + b
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return a === b ? 0
: ( anum && ! bnum ) ? - 1
: ( bnum && ! anum ) ? 1
: a < b ? - 1
: 1
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . rcompareIdentifiers = rcompareIdentifiers
function rcompareIdentifiers ( a , b ) {
return compareIdentifiers ( b , a )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . major = major
function major ( a , loose ) {
return new SemVer ( a , loose ) . major
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . minor = minor
function minor ( a , loose ) {
return new SemVer ( a , loose ) . minor
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . patch = patch
function patch ( a , loose ) {
return new SemVer ( a , loose ) . patch
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . compare = compare
function compare ( a , b , loose ) {
return new SemVer ( a , loose ) . compare ( new SemVer ( b , loose ) )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . compareLoose = compareLoose
function compareLoose ( a , b ) {
return compare ( a , b , true )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . compareBuild = compareBuild
function compareBuild ( a , b , loose ) {
var versionA = new SemVer ( a , loose )
var versionB = new SemVer ( b , loose )
return versionA . compare ( versionB ) || versionA . compareBuild ( versionB )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . rcompare = rcompare
function rcompare ( a , b , loose ) {
return compare ( b , a , loose )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . sort = sort
function sort ( list , loose ) {
return list . sort ( function ( a , b ) {
return exports . compareBuild ( a , b , loose )
} )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . rsort = rsort
function rsort ( list , loose ) {
return list . sort ( function ( a , b ) {
return exports . compareBuild ( b , a , loose )
} )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . gt = gt
function gt ( a , b , loose ) {
return compare ( a , b , loose ) > 0
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . lt = lt
function lt ( a , b , loose ) {
return compare ( a , b , loose ) < 0
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . eq = eq
function eq ( a , b , loose ) {
return compare ( a , b , loose ) === 0
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . neq = neq
function neq ( a , b , loose ) {
return compare ( a , b , loose ) !== 0
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . gte = gte
function gte ( a , b , loose ) {
return compare ( a , b , loose ) >= 0
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . lte = lte
function lte ( a , b , loose ) {
return compare ( a , b , loose ) <= 0
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . cmp = cmp
function cmp ( a , op , b , loose ) {
switch ( op ) {
case '===' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a === b
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
case '!==' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a !== b
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
case '' :
case '=' :
case '==' :
return eq ( a , b , loose )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
case '!=' :
return neq ( a , b , loose )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
case '>' :
return gt ( a , b , loose )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
case '>=' :
return gte ( a , b , loose )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
case '<' :
return lt ( a , b , loose )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
case '<=' :
return lte ( a , b , loose )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
default :
throw new TypeError ( 'Invalid operator: ' + op )
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
exports . Comparator = Comparator
function Comparator ( comp , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( comp instanceof Comparator ) {
if ( comp . loose === ! ! options . loose ) {
return comp
} else {
comp = comp . value
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( ! ( this instanceof Comparator ) ) {
return new Comparator ( comp , options )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
debug ( 'comparator' , comp , options )
this . options = options
this . loose = ! ! options . loose
this . parse ( comp )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( this . semver === ANY ) {
this . value = ''
} else {
this . value = this . operator + this . semver . version
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
debug ( 'comp' , this )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var ANY = { }
Comparator . prototype . parse = function ( comp ) {
var r = this . options . loose ? re [ COMPARATORLOOSE ] : re [ COMPARATOR ]
var m = comp . match ( r )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( ! m ) {
throw new TypeError ( 'Invalid comparator: ' + comp )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
this . operator = m [ 1 ] !== undefined ? m [ 1 ] : ''
if ( this . operator === '=' ) {
this . operator = ''
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// if it literally is just '>' or '' then allow anything.
if ( ! m [ 2 ] ) {
this . semver = ANY
} else {
this . semver = new SemVer ( m [ 2 ] , this . options . loose )
2020-01-27 16:37:12 +01:00
}
}
2021-02-22 00:27:22 +01:00
Comparator . prototype . toString = function ( ) {
return this . value
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
Comparator . prototype . test = function ( version ) {
debug ( 'Comparator.test' , version , this . options . loose )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( this . semver === ANY || version === ANY ) {
return true
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
return cmp ( version , this . operator , this . semver , this . options )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
Comparator . prototype . intersects = function ( comp , options ) {
if ( ! ( comp instanceof Comparator ) ) {
throw new TypeError ( 'a Comparator is required' )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
var rangeTmp
if ( this . operator === '' ) {
if ( this . value === '' ) {
return true
}
rangeTmp = new Range ( comp . value , options )
return satisfies ( this . value , rangeTmp , options )
} else if ( comp . operator === '' ) {
if ( comp . value === '' ) {
return true
}
rangeTmp = new Range ( this . value , options )
return satisfies ( comp . semver , rangeTmp , options )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
var sameDirectionIncreasing =
( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '>=' || comp . operator === '>' )
var sameDirectionDecreasing =
( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '<=' || comp . operator === '<' )
var sameSemVer = this . semver . version === comp . semver . version
var differentDirectionsInclusive =
( this . operator === '>=' || this . operator === '<=' ) &&
( comp . operator === '>=' || comp . operator === '<=' )
var oppositeDirectionsLessThan =
cmp ( this . semver , '<' , comp . semver , options ) &&
( ( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '<=' || comp . operator === '<' ) )
var oppositeDirectionsGreaterThan =
cmp ( this . semver , '>' , comp . semver , options ) &&
( ( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '>=' || comp . operator === '>' ) )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return sameDirectionIncreasing || sameDirectionDecreasing ||
( sameSemVer && differentDirectionsInclusive ) ||
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . Range = Range
function Range ( range , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( range instanceof Range ) {
if ( range . loose === ! ! options . loose &&
range . includePrerelease === ! ! options . includePrerelease ) {
return range
} else {
return new Range ( range . raw , options )
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( range instanceof Comparator ) {
return new Range ( range . value , options )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
if ( ! ( this instanceof Range ) ) {
return new Range ( range , options )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
this . options = options
this . loose = ! ! options . loose
this . includePrerelease = ! ! options . includePrerelease
// First, split based on boolean or ||
this . raw = range
this . set = range . split ( /\s*\|\|\s*/ ) . map ( function ( range ) {
return this . parseRange ( range . trim ( ) )
} , this ) . filter ( function ( c ) {
// throw out any that are not relevant for whatever reason
return c . length
} )
if ( ! this . set . length ) {
throw new TypeError ( 'Invalid SemVer Range: ' + range )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
this . format ( )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
Range . prototype . format = function ( ) {
this . range = this . set . map ( function ( comps ) {
return comps . join ( ' ' ) . trim ( )
} ) . join ( '||' ) . trim ( )
return this . range
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
Range . prototype . toString = function ( ) {
return this . range
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
Range . prototype . parseRange = function ( range ) {
var loose = this . options . loose
range = range . trim ( )
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? re [ HYPHENRANGELOOSE ] : re [ HYPHENRANGE ]
range = range . replace ( hr , hyphenReplace )
debug ( 'hyphen replace' , range )
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range . replace ( re [ COMPARATORTRIM ] , comparatorTrimReplace )
debug ( 'comparator trim' , range , re [ COMPARATORTRIM ] )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// `~ 1.2.3` => `~1.2.3`
range = range . replace ( re [ TILDETRIM ] , tildeTrimReplace )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// `^ 1.2.3` => `^1.2.3`
range = range . replace ( re [ CARETTRIM ] , caretTrimReplace )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// normalize spaces
range = range . split ( /\s+/ ) . join ( ' ' )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// At this point, the range is completely trimmed and
// ready to be split into comparators.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var compRe = loose ? re [ COMPARATORLOOSE ] : re [ COMPARATOR ]
var set = range . split ( ' ' ) . map ( function ( comp ) {
return parseComparator ( comp , this . options )
} , this ) . join ( ' ' ) . split ( /\s+/ )
if ( this . options . loose ) {
// in loose mode, throw out any that are not valid comparators
set = set . filter ( function ( comp ) {
return ! ! comp . match ( compRe )
} )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
set = set . map ( function ( comp ) {
return new Comparator ( comp , this . options )
} , this )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return set
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
Range . prototype . intersects = function ( range , options ) {
if ( ! ( range instanceof Range ) ) {
throw new TypeError ( 'a Range is required' )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
return this . set . some ( function ( thisComparators ) {
return (
isSatisfiable ( thisComparators , options ) &&
range . set . some ( function ( rangeComparators ) {
return (
isSatisfiable ( rangeComparators , options ) &&
thisComparators . every ( function ( thisComparator ) {
return rangeComparators . every ( function ( rangeComparator ) {
return thisComparator . intersects ( rangeComparator , options )
} )
} )
)
} )
)
} )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// take a set of comparators and determine whether there
// exists a version which can satisfy it
function isSatisfiable ( comparators , options ) {
var result = true
var remainingComparators = comparators . slice ( )
var testComparator = remainingComparators . pop ( )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
while ( result && remainingComparators . length ) {
result = remainingComparators . every ( function ( otherComparator ) {
return testComparator . intersects ( otherComparator , options )
} )
testComparator = remainingComparators . pop ( )
}
return result
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// Mostly just for testing and legacy API reasons
exports . toComparators = toComparators
function toComparators ( range , options ) {
return new Range ( range , options ) . set . map ( function ( comp ) {
return comp . map ( function ( c ) {
return c . value
} ) . join ( ' ' ) . trim ( ) . split ( ' ' )
} )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// comprised of xranges, tildes, stars, and gtlt's at this point.
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
function parseComparator ( comp , options ) {
debug ( 'comp' , comp , options )
comp = replaceCarets ( comp , options )
debug ( 'caret' , comp )
comp = replaceTildes ( comp , options )
debug ( 'tildes' , comp )
comp = replaceXRanges ( comp , options )
debug ( 'xrange' , comp )
comp = replaceStars ( comp , options )
debug ( 'stars' , comp )
return comp
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function isX ( id ) {
return ! id || id . toLowerCase ( ) === 'x' || id === '*'
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// ~, ~> --> * (any, kinda silly)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
function replaceTildes ( comp , options ) {
return comp . trim ( ) . split ( /\s+/ ) . map ( function ( comp ) {
return replaceTilde ( comp , options )
} ) . join ( ' ' )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function replaceTilde ( comp , options ) {
var r = options . loose ? re [ TILDELOOSE ] : re [ TILDE ]
return comp . replace ( r , function ( _ , M , m , p , pr ) {
debug ( 'tilde' , comp , _ , M , m , p , pr )
var ret
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( isX ( p ) ) {
// ~1.2 == >=1.2.0 <1.3.0
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
} else if ( pr ) {
debug ( 'replaceTilde pr' , pr )
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0'
} else {
// ~1.2.3 == >=1.2.3 <1.3.0
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
debug ( 'tilde return' , ret )
return ret
} )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// ^ --> * (any, kinda silly)
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
// ^1.2.3 --> >=1.2.3 <2.0.0
// ^1.2.0 --> >=1.2.0 <2.0.0
function replaceCarets ( comp , options ) {
return comp . trim ( ) . split ( /\s+/ ) . map ( function ( comp ) {
return replaceCaret ( comp , options )
} ) . join ( ' ' )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function replaceCaret ( comp , options ) {
debug ( 'caret' , comp , options )
var r = options . loose ? re [ CARETLOOSE ] : re [ CARET ]
return comp . replace ( r , function ( _ , M , m , p , pr ) {
debug ( 'caret' , comp , _ , M , m , p , pr )
var ret
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( isX ( p ) ) {
if ( M === '0' ) {
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
} else {
ret = '>=' + M + '.' + m + '.0 <' + ( + M + 1 ) + '.0.0'
}
} else if ( pr ) {
debug ( 'replaceCaret pr' , pr )
if ( M === '0' ) {
if ( m === '0' ) {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + m + '.' + ( + p + 1 )
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + ( + M + 1 ) + '.0.0'
}
} else {
debug ( 'no pr' )
if ( M === '0' ) {
if ( m === '0' ) {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + m + '.' + ( + p + 1 )
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + ( + M + 1 ) + '.0.0'
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
debug ( 'caret return' , ret )
return ret
} )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function replaceXRanges ( comp , options ) {
debug ( 'replaceXRanges' , comp , options )
return comp . split ( /\s+/ ) . map ( function ( comp ) {
return replaceXRange ( comp , options )
} ) . join ( ' ' )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function replaceXRange ( comp , options ) {
comp = comp . trim ( )
var r = options . loose ? re [ XRANGELOOSE ] : re [ XRANGE ]
return comp . replace ( r , function ( ret , gtlt , M , m , p , pr ) {
debug ( 'xRange' , comp , ret , gtlt , M , m , p , pr )
var xM = isX ( M )
var xm = xM || isX ( m )
var xp = xm || isX ( p )
var anyX = xp
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( gtlt === '=' && anyX ) {
gtlt = ''
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( xM ) {
if ( gtlt === '>' || gtlt === '<' ) {
// nothing is allowed
ret = '<0.0.0'
} else {
// nothing is forbidden
ret = '*'
}
} else if ( gtlt && anyX ) {
// we know patch is an x, because we have any x at all.
// replace X with 0
if ( xm ) {
m = 0
}
p = 0
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( gtlt === '>' ) {
// >1 => >=2.0.0
// >1.2 => >=1.3.0
// >1.2.3 => >= 1.2.4
gtlt = '>='
if ( xm ) {
M = + M + 1
m = 0
p = 0
} else {
m = + m + 1
p = 0
}
} else if ( gtlt === '<=' ) {
// <=0.7.x is actually <0.8.0, since any 0.7.x should
// pass. Similarly, <=7.x is actually <8.0.0, etc.
gtlt = '<'
if ( xm ) {
M = + M + 1
} else {
m = + m + 1
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
ret = gtlt + M + '.' + m + '.' + p
} else if ( xm ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( xp ) {
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
debug ( 'xRange return' , ret )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return ret
} )
2020-01-27 16:37:12 +01:00
}
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
// Because * is AND-ed with everything else in the comparator,
// and '' means "any version", just remove the *s entirely.
function replaceStars ( comp , options ) {
debug ( 'replaceStars' , comp , options )
// Looseness is ignored here. star is always as loose as it gets!
return comp . trim ( ) . replace ( re [ STAR ] , '' )
}
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
// This function is passed to string.replace(re[HYPHENRANGE])
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0
function hyphenReplace ( $0 ,
from , fM , fm , fp , fpr , fb ,
to , tM , tm , tp , tpr , tb ) {
if ( isX ( fM ) ) {
from = ''
} else if ( isX ( fm ) ) {
from = '>=' + fM + '.0.0'
} else if ( isX ( fp ) ) {
from = '>=' + fM + '.' + fm + '.0'
} else {
from = '>=' + from
}
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
if ( isX ( tM ) ) {
to = ''
} else if ( isX ( tm ) ) {
to = '<' + ( + tM + 1 ) + '.0.0'
} else if ( isX ( tp ) ) {
to = '<' + tM + '.' + ( + tm + 1 ) + '.0'
} else if ( tpr ) {
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
} else {
to = '<=' + to
}
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
return ( from + ' ' + to ) . trim ( )
}
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
// if ANY of the sets match ALL of its comparators, then pass
Range . prototype . test = function ( version ) {
if ( ! version ) {
return false
}
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
}
for ( var i = 0 ; i < this . set . length ; i ++ ) {
if ( testSet ( this . set [ i ] , version , this . options ) ) {
return true
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
}
return false
2020-05-19 15:25:54 +02:00
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function testSet ( set , version , options ) {
for ( var i = 0 ; i < set . length ; i ++ ) {
if ( ! set [ i ] . test ( version ) ) {
return false
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( version . prerelease . length && ! options . includePrerelease ) {
// Find the set of versions that are allowed to have prereleases
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
// That should allow `1.2.3-pr.2` to pass.
// However, `1.2.4-alpha.notready` should NOT be allowed,
// even though it's within the range set by the comparators.
for ( i = 0 ; i < set . length ; i ++ ) {
debug ( set [ i ] . semver )
if ( set [ i ] . semver === ANY ) {
continue
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( set [ i ] . semver . prerelease . length > 0 ) {
var allowed = set [ i ] . semver
if ( allowed . major === version . major &&
allowed . minor === version . minor &&
allowed . patch === version . patch ) {
return true
}
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// Version has a -pre, but it's not one of the ones we like.
return false
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return true
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . satisfies = satisfies
function satisfies ( version , range , options ) {
try {
range = new Range ( range , options )
} catch ( er ) {
return false
}
return range . test ( version )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . maxSatisfying = maxSatisfying
function maxSatisfying ( versions , range , options ) {
var max = null
var maxSV = null
try {
var rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( function ( v ) {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! max || maxSV . compare ( v ) === - 1 ) {
// compare(max, v, true)
max = v
maxSV = new SemVer ( max , options )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
}
} )
return max
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
exports . minSatisfying = minSatisfying
function minSatisfying ( versions , range , options ) {
var min = null
var minSV = null
try {
var rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( function ( v ) {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! min || minSV . compare ( v ) === 1 ) {
// compare(min, v, true)
min = v
minSV = new SemVer ( min , options )
}
}
} )
return min
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . minVersion = minVersion
function minVersion ( range , loose ) {
range = new Range ( range , loose )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var minver = new SemVer ( '0.0.0' )
if ( range . test ( minver ) ) {
return minver
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
minver = new SemVer ( '0.0.0-0' )
if ( range . test ( minver ) ) {
return minver
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
minver = null
for ( var i = 0 ; i < range . set . length ; ++ i ) {
var comparators = range . set [ i ]
comparators . forEach ( function ( comparator ) {
// Clone to avoid manipulating the comparator's semver object.
var compver = new SemVer ( comparator . semver . version )
switch ( comparator . operator ) {
case '>' :
if ( compver . prerelease . length === 0 ) {
compver . patch ++
} else {
compver . prerelease . push ( 0 )
}
compver . raw = compver . format ( )
/* fallthrough */
case '' :
case '>=' :
if ( ! minver || gt ( minver , compver ) ) {
minver = compver
}
break
case '<' :
case '<=' :
/* Ignore maximum versions */
break
/* istanbul ignore next */
default :
throw new Error ( 'Unexpected operation: ' + comparator . operator )
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} )
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( minver && range . test ( minver ) ) {
return minver
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return null
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . validRange = validRange
function validRange ( range , options ) {
try {
// Return '*' instead of '' so that truthiness works.
// This will throw if it's invalid anyway
return new Range ( range , options ) . range || '*'
} catch ( er ) {
return null
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
// Determine if version is less than all the versions possible in the range
exports . ltr = ltr
function ltr ( version , range , options ) {
return outside ( version , range , '<' , options )
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
// Determine if version is greater than all the versions possible in the range.
exports . gtr = gtr
function gtr ( version , range , options ) {
return outside ( version , range , '>' , options )
2020-05-19 15:25:54 +02:00
}
2021-02-22 00:27:22 +01:00
exports . outside = outside
function outside ( version , range , hilo , options ) {
version = new SemVer ( version , options )
range = new Range ( range , options )
var gtfn , ltefn , ltfn , comp , ecomp
switch ( hilo ) {
case '>' :
gtfn = gt
ltefn = lte
ltfn = lt
comp = '>'
ecomp = '>='
break
case '<' :
gtfn = lt
ltefn = gte
ltfn = gt
comp = '<'
ecomp = '<='
break
default :
throw new TypeError ( 'Must provide a hilo val of "<" or ">"' )
}
// If it satisifes the range it is not outside
if ( satisfies ( version , range , options ) ) {
return false
}
// From now on, variable terms are as if we're in "gtr" mode.
// but note that everything is flipped for the "ltr" function.
for ( var i = 0 ; i < range . set . length ; ++ i ) {
var comparators = range . set [ i ]
var high = null
var low = null
comparators . forEach ( function ( comparator ) {
if ( comparator . semver === ANY ) {
comparator = new Comparator ( '>=0.0.0' )
}
high = high || comparator
low = low || comparator
if ( gtfn ( comparator . semver , high . semver , options ) ) {
high = comparator
} else if ( ltfn ( comparator . semver , low . semver , options ) ) {
low = comparator
}
} )
// If the edge version comparator has a operator then our version
// isn't outside it
if ( high . operator === comp || high . operator === ecomp ) {
return false
}
// If the lowest version comparator has an operator and our version
// is less than it then it isn't higher than the range
if ( ( ! low . operator || low . operator === comp ) &&
ltefn ( version , low . semver ) ) {
return false
} else if ( low . operator === ecomp && ltfn ( version , low . semver ) ) {
return false
}
}
return true
}
exports . prerelease = prerelease
function prerelease ( version , options ) {
var parsed = parse ( version , options )
return ( parsed && parsed . prerelease . length ) ? parsed . prerelease : null
}
exports . intersects = intersects
function intersects ( r1 , r2 , options ) {
r1 = new Range ( r1 , options )
r2 = new Range ( r2 , options )
return r1 . intersects ( r2 )
}
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
exports . coerce = coerce
function coerce ( version , options ) {
if ( version instanceof SemVer ) {
return version
}
if ( typeof version !== 'string' ) {
return null
}
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
var match = version . match ( re [ COERCE ] )
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
if ( match == null ) {
return null
}
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
return parse ( match [ 1 ] +
'.' + ( match [ 2 ] || '0' ) +
'.' + ( match [ 3 ] || '0' ) , options )
2020-05-19 15:25:54 +02:00
}
2020-01-27 16:37:12 +01:00
2020-05-19 15:25:54 +02:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 4294 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-05-19 15:25:54 +02:00
2021-02-22 00:27:22 +01:00
module . exports = _ _nccwpck _require _ _ ( 4219 ) ;
2020-05-19 15:25:54 +02:00
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 4219 :
/***/ ( ( _ _unused _webpack _module , exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
var net = _ _nccwpck _require _ _ ( 1631 ) ;
var tls = _ _nccwpck _require _ _ ( 4016 ) ;
var http = _ _nccwpck _require _ _ ( 8605 ) ;
var https = _ _nccwpck _require _ _ ( 7211 ) ;
var events = _ _nccwpck _require _ _ ( 8614 ) ;
var assert = _ _nccwpck _require _ _ ( 2357 ) ;
var util = _ _nccwpck _require _ _ ( 1669 ) ;
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} ) ;
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
2020-01-27 16:37:12 +01:00
return ;
}
2021-02-22 00:27:22 +01:00
this . sockets . splice ( pos , 1 ) ;
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
} ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
}
}
}
}
return target ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
}
} else {
debug = function ( ) { } ;
}
exports . debug = debug ; // for test
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 5030 :
/***/ ( ( _ _unused _webpack _module , exports ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . defineProperty ( exports , "__esModule" , ( { value : true } ) ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function getUserAgent ( ) {
if ( typeof navigator === "object" && "userAgent" in navigator ) {
return navigator . userAgent ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( typeof process === "object" && "version" in process ) {
return ` Node.js/ ${ process . version . substr ( 1 ) } ( ${ process . platform } ; ${ process . arch } ) ` ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return "<environment undetectable>" ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
exports . getUserAgent = getUserAgent ;
//# sourceMappingURL=index.js.map
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 2707 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
* /
var byteToHex = [ ] ;
for ( var i = 0 ; i < 256 ; ++ i ) {
byteToHex [ i ] = ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
function bytesToUuid ( buf , offset ) {
var i = offset || 0 ;
var bth = byteToHex ;
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
return ( [ bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ] ) . join ( '' ) ;
2020-01-27 16:37:12 +01:00
}
2021-02-22 00:27:22 +01:00
module . exports = bytesToUuid ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 5859 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var crypto = _ _nccwpck _require _ _ ( 6417 ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
module . exports = function nodeRNG ( ) {
return crypto . randomBytes ( 16 ) ;
} ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 824 :
/***/ ( ( module , _ _unused _webpack _exports , _ _nccwpck _require _ _ ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var rng = _ _nccwpck _require _ _ ( 5859 ) ;
var bytesToUuid = _ _nccwpck _require _ _ ( 2707 ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function v4 ( options , buf , offset ) {
var i = buf && offset || 0 ;
if ( typeof ( options ) == 'string' ) {
buf = options === 'binary' ? new Array ( 16 ) : null ;
options = null ;
}
options = options || { } ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
var rnds = options . random || ( options . rng || rng ) ( ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = ( rnds [ 6 ] & 0x0f ) | 0x40 ;
rnds [ 8 ] = ( rnds [ 8 ] & 0x3f ) | 0x80 ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Copy bytes to buffer, if provided
if ( buf ) {
for ( var ii = 0 ; ii < 16 ; ++ ii ) {
buf [ i + ii ] = rnds [ ii ] ;
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return buf || bytesToUuid ( rnds ) ;
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
module . exports = v4 ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 2940 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
module . exports = wrappy
function wrappy ( fn , cb ) {
if ( fn && cb ) return wrappy ( fn ) ( cb )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
if ( typeof fn !== 'function' )
throw new TypeError ( 'need wrapper function' )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
Object . keys ( fn ) . forEach ( function ( k ) {
wrapper [ k ] = fn [ k ]
} )
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
return wrapper
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
function wrapper ( ) {
var args = new Array ( arguments . length )
for ( var i = 0 ; i < args . length ; i ++ ) {
args [ i ] = arguments [ i ]
}
var ret = fn . apply ( this , args )
var cb = args [ args . length - 1 ]
if ( typeof ret === 'function' && ret !== cb ) {
Object . keys ( cb ) . forEach ( function ( k ) {
ret [ k ] = cb [ k ]
} )
}
return ret
}
}
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 2877 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
module . exports = eval ( "require" ) ( "encoding" ) ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 2357 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "assert" ) ; ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 3129 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "child_process" ) ; ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 6417 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "crypto" ) ; ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 8614 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "events" ) ; ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 5747 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "fs" ) ; ;
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 8605 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
module . exports = require ( "http" ) ; ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 7211 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "https" ) ; ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 1631 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "net" ) ; ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 2087 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "os" ) ; ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 5622 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "path" ) ; ;
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 2413 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "stream" ) ; ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 4016 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
"use strict" ;
module . exports = require ( "tls" ) ; ;
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ } ) ,
2020-01-27 16:37:12 +01:00
2021-02-22 00:27:22 +01:00
/***/ 8835 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "url" ) ; ;
2020-01-27 16:37:12 +01:00
2020-03-10 16:51:57 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 1669 :
/***/ ( ( module ) => {
2020-03-10 16:51:57 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
module . exports = require ( "util" ) ; ;
2020-03-10 16:51:57 +01:00
2020-01-27 16:37:12 +01:00
/***/ } ) ,
2021-02-22 00:27:22 +01:00
/***/ 8761 :
/***/ ( ( module ) => {
2020-01-27 16:37:12 +01:00
"use strict" ;
2021-02-22 00:27:22 +01:00
module . exports = require ( "zlib" ) ; ;
2020-01-27 16:37:12 +01:00
/***/ } )
2021-02-22 00:27:22 +01:00
/******/ } ) ;
/************************************************************************/
/******/ // The module cache
/******/ var _ _webpack _module _cache _ _ = { } ;
/******/
/******/ // The require function
/******/ function _ _nccwpck _require _ _ ( moduleId ) {
/******/ // Check if module is in cache
/******/ if ( _ _webpack _module _cache _ _ [ moduleId ] ) {
/******/ return _ _webpack _module _cache _ _ [ moduleId ] . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = _ _webpack _module _cache _ _ [ moduleId ] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ var threw = true ;
/******/ try {
/******/ _ _webpack _modules _ _ [ moduleId ] . call ( module . exports , module , module . exports , _ _nccwpck _require _ _ ) ;
/******/ threw = false ;
/******/ } finally {
/******/ if ( threw ) delete _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ }
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/************************************************************************/
/******/ /* webpack/runtime/compat */
/******/
/******/ _ _nccwpck _require _ _ . ab = _ _dirname + "/" ; /************************************************************************/
/******/ // module exports must be returned from runtime so entry inlining is disabled
/******/ // startup
/******/ // Load entry module and return exports
/******/ return _ _nccwpck _require _ _ ( 9367 ) ;
/******/ } ) ( )
;