2020-04-28 15:45:21 +02:00
module . exports =
/******/ ( function ( modules , runtime ) { // webpackBootstrap
/******/ "use strict" ;
/******/ // The module cache
/******/ var installedModules = { } ;
/******/
/******/ // The require function
/******/ function _ _webpack _require _ _ ( moduleId ) {
/******/
/******/ // Check if module is in cache
/******/ if ( installedModules [ moduleId ] ) {
/******/ return installedModules [ moduleId ] . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules [ moduleId ] = {
/******/ i : moduleId ,
/******/ l : false ,
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ modules [ moduleId ] . call ( module . exports , module , module . exports , _ _webpack _require _ _ ) ;
/******/
/******/ // Flag the module as loaded
/******/ module . l = true ;
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/******/
/******/ _ _webpack _require _ _ . ab = _ _dirname + "/" ;
/******/
/******/ // the startup function
/******/ function startup ( ) {
/******/ // Load entry module and return exports
/******/ return _ _webpack _require _ _ ( 799 ) ;
/******/ } ;
/******/
/******/ // run startup
/******/ return startup ( ) ;
/******/ } )
/************************************************************************/
/******/ ( {
/***/ 11 :
/***/ ( function ( module ) {
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
module . exports = wrappy
function wrappy ( fn , cb ) {
if ( fn && cb ) return wrappy ( fn ) ( cb )
if ( typeof fn !== 'function' )
throw new TypeError ( 'need wrapper function' )
Object . keys ( fn ) . forEach ( function ( k ) {
wrapper [ k ] = fn [ k ]
} )
return wrapper
function wrapper ( ) {
var args = new Array ( arguments . length )
for ( var i = 0 ; i < args . length ; i ++ ) {
args [ i ] = arguments [ i ]
}
var ret = fn . apply ( this , args )
var cb = args [ args . length - 1 ]
if ( typeof ret === 'function' && ret !== cb ) {
Object . keys ( cb ) . forEach ( function ( k ) {
ret [ k ] = cb [ k ]
} )
}
return ret
}
}
/***/ } ) ,
2021-12-07 17:44:54 +01:00
/***/ 13 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
const assert = _ _webpack _require _ _ ( 357 )
const path = _ _webpack _require _ _ ( 622 )
const fs = _ _webpack _require _ _ ( 747 )
let glob = undefined
try {
glob = _ _webpack _require _ _ ( 402 )
} catch ( _err ) {
// treat glob as optional.
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
const defaultGlobOpts = {
nosort : true ,
silent : true
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// for EMFILE handling
let timeout = 0
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
const isWindows = ( process . platform === "win32" )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
const defaults = options => {
const methods = [
'unlink' ,
'chmod' ,
'stat' ,
'lstat' ,
'rmdir' ,
'readdir'
]
methods . forEach ( m => {
options [ m ] = options [ m ] || fs [ m ]
m = m + 'Sync'
options [ m ] = options [ m ] || fs [ m ]
2020-04-28 15:45:21 +02:00
} )
2021-12-07 17:44:54 +01:00
options . maxBusyTries = options . maxBusyTries || 3
options . emfileWait = options . emfileWait || 1000
if ( options . glob === false ) {
options . disableGlob = true
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
if ( options . disableGlob !== true && glob === undefined ) {
throw Error ( 'glob dependency not found, set `options.disableGlob = true` if intentional' )
}
options . disableGlob = options . disableGlob || false
options . glob = options . glob || defaultGlobOpts
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
const rimraf = ( p , options , cb ) => {
if ( typeof options === 'function' ) {
cb = options
options = { }
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
assert ( p , 'rimraf: missing path' )
assert . equal ( typeof p , 'string' , 'rimraf: path should be a string' )
assert . equal ( typeof cb , 'function' , 'rimraf: callback function required' )
assert ( options , 'rimraf: invalid options argument provided' )
assert . equal ( typeof options , 'object' , 'rimraf: options should be object' )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
defaults ( options )
2020-11-13 20:53:33 +01:00
2021-12-07 17:44:54 +01:00
let busyTries = 0
let errState = null
let n = 0
2020-11-13 20:53:33 +01:00
2021-12-07 17:44:54 +01:00
const next = ( er ) => {
errState = errState || er
if ( -- n === 0 )
cb ( errState )
}
2020-11-13 20:53:33 +01:00
2021-12-07 17:44:54 +01:00
const afterGlob = ( er , results ) => {
if ( er )
return cb ( er )
2020-11-13 20:53:33 +01:00
2021-12-07 17:44:54 +01:00
n = results . length
if ( n === 0 )
return cb ( )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
results . forEach ( p => {
const CB = ( er ) => {
if ( er ) {
if ( ( er . code === "EBUSY" || er . code === "ENOTEMPTY" || er . code === "EPERM" ) &&
busyTries < options . maxBusyTries ) {
busyTries ++
// try again, with the same exact callback as this one.
return setTimeout ( ( ) => rimraf _ ( p , options , CB ) , busyTries * 100 )
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// this one won't happen if graceful-fs is used.
if ( er . code === "EMFILE" && timeout < options . emfileWait ) {
return setTimeout ( ( ) => rimraf _ ( p , options , CB ) , timeout ++ )
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// already gone
if ( er . code === "ENOENT" ) er = null
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
timeout = 0
next ( er )
}
rimraf _ ( p , options , CB )
} )
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
if ( options . disableGlob || ! glob . hasMagic ( p ) )
return afterGlob ( null , [ p ] )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
options . lstat ( p , ( er , stat ) => {
if ( ! er )
return afterGlob ( null , [ p ] )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
glob ( p , options . glob , afterGlob )
} )
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
// Two possible strategies.
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
//
// Both result in an extra syscall when you guess wrong. However, there
// are likely far more normal files in the world than directories. This
// is based on the assumption that a the average number of files per
// directory is >= 1.
//
// If anyone ever complains about this, then I guess the strategy could
// be made configurable somehow. But until then, YAGNI.
const rimraf _ = ( p , options , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// sunos lets the root user unlink directories, which is... weird.
// so we have to lstat here and make sure it's not a dir.
options . lstat ( p , ( er , st ) => {
if ( er && er . code === "ENOENT" )
return cb ( null )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// Windows can EPERM on stat. Life is suffering.
if ( er && er . code === "EPERM" && isWindows )
fixWinEPERM ( p , options , er , cb )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
if ( st && st . isDirectory ( ) )
return rmdir ( p , options , er , cb )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
options . unlink ( p , er => {
if ( er ) {
if ( er . code === "ENOENT" )
return cb ( null )
if ( er . code === "EPERM" )
return ( isWindows )
? fixWinEPERM ( p , options , er , cb )
: rmdir ( p , options , er , cb )
if ( er . code === "EISDIR" )
return rmdir ( p , options , er , cb )
}
return cb ( er )
} )
} )
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
const fixWinEPERM = ( p , options , er , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
options . chmod ( p , 0o666 , er2 => {
if ( er2 )
cb ( er2 . code === "ENOENT" ? null : er )
else
options . stat ( p , ( er3 , stats ) => {
if ( er3 )
cb ( er3 . code === "ENOENT" ? null : er )
else if ( stats . isDirectory ( ) )
rmdir ( p , options , er , cb )
else
options . unlink ( p , cb )
} )
2020-04-28 15:45:21 +02:00
} )
}
2021-12-07 17:44:54 +01:00
const fixWinEPERMSync = ( p , options , er ) => {
assert ( p )
assert ( options )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
try {
options . chmodSync ( p , 0o666 )
} catch ( er2 ) {
if ( er2 . code === "ENOENT" )
return
else
throw er
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
let stats
try {
stats = options . statSync ( p )
} catch ( er3 ) {
if ( er3 . code === "ENOENT" )
return
else
throw er
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
if ( stats . isDirectory ( ) )
rmdirSync ( p , options , er )
else
options . unlinkSync ( p )
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
const rmdir = ( p , options , originalEr , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
// if we guessed wrong, and it's not a directory, then
// raise the original error.
options . rmdir ( p , er => {
if ( er && ( er . code === "ENOTEMPTY" || er . code === "EEXIST" || er . code === "EPERM" ) )
rmkids ( p , options , cb )
else if ( er && er . code === "ENOTDIR" )
cb ( originalEr )
else
cb ( er )
} )
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
const rmkids = ( p , options , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
options . readdir ( p , ( er , files ) => {
if ( er )
return cb ( er )
let n = files . length
if ( n === 0 )
return options . rmdir ( p , cb )
let errState
files . forEach ( f => {
rimraf ( path . join ( p , f ) , options , er => {
if ( errState )
return
if ( er )
return cb ( errState = er )
if ( -- n === 0 )
options . rmdir ( p , cb )
} )
} )
} )
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// this looks simpler, and is strictly *faster*, but will
// tie up the JavaScript thread and fail on excessively
// deep directory trees.
const rimrafSync = ( p , options ) => {
options = options || { }
defaults ( options )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
assert ( p , 'rimraf: missing path' )
assert . equal ( typeof p , 'string' , 'rimraf: path should be a string' )
assert ( options , 'rimraf: missing options' )
assert . equal ( typeof options , 'object' , 'rimraf: options should be object' )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
let results
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
if ( options . disableGlob || ! glob . hasMagic ( p ) ) {
results = [ p ]
} else {
try {
options . lstatSync ( p )
results = [ p ]
} catch ( er ) {
results = glob . sync ( p , options . glob )
}
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
if ( ! results . length )
return
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
for ( let i = 0 ; i < results . length ; i ++ ) {
const p = results [ i ]
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
let st
try {
st = options . lstatSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// Windows can EPERM on stat. Life is suffering.
if ( er . code === "EPERM" && isWindows )
fixWinEPERMSync ( p , options , er )
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
try {
// sunos lets the root user unlink directories, which is... weird.
if ( st && st . isDirectory ( ) )
rmdirSync ( p , options , null )
else
options . unlinkSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
if ( er . code === "EPERM" )
return isWindows ? fixWinEPERMSync ( p , options , er ) : rmdirSync ( p , options , er )
if ( er . code !== "EISDIR" )
throw er
rmdirSync ( p , options , er )
}
}
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
const rmdirSync = ( p , options , originalEr ) => {
assert ( p )
assert ( options )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
try {
options . rmdirSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
if ( er . code === "ENOTDIR" )
throw originalEr
if ( er . code === "ENOTEMPTY" || er . code === "EEXIST" || er . code === "EPERM" )
rmkidsSync ( p , options )
}
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
const rmkidsSync = ( p , options ) => {
assert ( p )
assert ( options )
options . readdirSync ( p ) . forEach ( f => rimrafSync ( path . join ( p , f ) , options ) )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// We only end up here once we got ENOTEMPTY at least once, and
// at this point, we are guaranteed to have removed all the kids.
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
// try really hard to delete stuff on windows, because it has a
// PROFOUNDLY annoying habit of not closing handles promptly when
// files are deleted, resulting in spurious ENOTEMPTY errors.
const retries = isWindows ? 100 : 1
let i = 0
do {
let threw = true
try {
const ret = options . rmdirSync ( p , options )
threw = false
return ret
} finally {
if ( ++ i < retries && threw )
continue
}
} while ( true )
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
module . exports = rimraf
rimraf . sync = rimrafSync
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
/***/ } ) ,
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
/***/ 16 :
/***/ ( function ( module ) {
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
module . exports = require ( "tls" ) ;
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
/***/ } ) ,
2020-04-28 15:45:21 +02:00
2023-03-08 22:06:44 +01:00
/***/ 22 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _validate = _interopRequireDefault ( _ _webpack _require _ _ ( 78 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function parse ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
let v ;
const arr = new Uint8Array ( 16 ) ; // Parse ########-....-....-....-............
arr [ 0 ] = ( v = parseInt ( uuid . slice ( 0 , 8 ) , 16 ) ) >>> 24 ;
arr [ 1 ] = v >>> 16 & 0xff ;
arr [ 2 ] = v >>> 8 & 0xff ;
arr [ 3 ] = v & 0xff ; // Parse ........-####-....-....-............
arr [ 4 ] = ( v = parseInt ( uuid . slice ( 9 , 13 ) , 16 ) ) >>> 8 ;
arr [ 5 ] = v & 0xff ; // Parse ........-....-####-....-............
arr [ 6 ] = ( v = parseInt ( uuid . slice ( 14 , 18 ) , 16 ) ) >>> 8 ;
arr [ 7 ] = v & 0xff ; // Parse ........-....-....-####-............
arr [ 8 ] = ( v = parseInt ( uuid . slice ( 19 , 23 ) , 16 ) ) >>> 8 ;
arr [ 9 ] = v & 0xff ; // Parse ........-....-....-....-############
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
arr [ 10 ] = ( v = parseInt ( uuid . slice ( 24 , 36 ) , 16 ) ) / 0x10000000000 & 0xff ;
arr [ 11 ] = v / 0x100000000 & 0xff ;
arr [ 12 ] = v >>> 24 & 0xff ;
arr [ 13 ] = v >>> 16 & 0xff ;
arr [ 14 ] = v >>> 8 & 0xff ;
arr [ 15 ] = v & 0xff ;
return arr ;
}
var _default = parse ;
exports . default = _default ;
/***/ } ) ,
2021-12-07 17:44:54 +01:00
/***/ 49 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
var wrappy = _ _webpack _require _ _ ( 11 )
module . exports = wrappy ( once )
module . exports . strict = wrappy ( onceStrict )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
once . proto = once ( function ( ) {
Object . defineProperty ( Function . prototype , 'once' , {
value : function ( ) {
return once ( this )
} ,
configurable : true
2020-04-28 15:45:21 +02:00
} )
2021-12-07 17:44:54 +01:00
Object . defineProperty ( Function . prototype , 'onceStrict' , {
value : function ( ) {
return onceStrict ( this )
} ,
configurable : true
} )
} )
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
function once ( fn ) {
var f = function ( ) {
if ( f . called ) return f . value
f . called = true
return f . value = fn . apply ( this , arguments )
}
f . called = false
return f
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
function onceStrict ( fn ) {
var f = function ( ) {
if ( f . called )
throw new Error ( f . onceError )
f . called = true
return f . value = fn . apply ( this , arguments )
}
var name = fn . name || 'Function wrapped with `once`'
f . onceError = name + " shouldn't be called more than once"
f . called = false
return f
}
2020-04-28 15:45:21 +02:00
2023-03-08 22:06:44 +01:00
/***/ } ) ,
/***/ 62 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
Object . defineProperty ( exports , "v1" , {
enumerable : true ,
get : function ( ) {
return _v . default ;
}
} ) ;
Object . defineProperty ( exports , "v3" , {
enumerable : true ,
get : function ( ) {
return _v2 . default ;
}
} ) ;
Object . defineProperty ( exports , "v4" , {
enumerable : true ,
get : function ( ) {
return _v3 . default ;
}
} ) ;
Object . defineProperty ( exports , "v5" , {
enumerable : true ,
get : function ( ) {
return _v4 . default ;
}
} ) ;
Object . defineProperty ( exports , "NIL" , {
enumerable : true ,
get : function ( ) {
return _nil . default ;
}
} ) ;
Object . defineProperty ( exports , "version" , {
enumerable : true ,
get : function ( ) {
return _version . default ;
}
} ) ;
Object . defineProperty ( exports , "validate" , {
enumerable : true ,
get : function ( ) {
return _validate . default ;
}
} ) ;
Object . defineProperty ( exports , "stringify" , {
enumerable : true ,
get : function ( ) {
return _stringify . default ;
}
} ) ;
Object . defineProperty ( exports , "parse" , {
enumerable : true ,
get : function ( ) {
return _parse . default ;
}
} ) ;
var _v = _interopRequireDefault ( _ _webpack _require _ _ ( 893 ) ) ;
var _v2 = _interopRequireDefault ( _ _webpack _require _ _ ( 209 ) ) ;
var _v3 = _interopRequireDefault ( _ _webpack _require _ _ ( 733 ) ) ;
var _v4 = _interopRequireDefault ( _ _webpack _require _ _ ( 384 ) ) ;
var _nil = _interopRequireDefault ( _ _webpack _require _ _ ( 327 ) ) ;
var _version = _interopRequireDefault ( _ _webpack _require _ _ ( 695 ) ) ;
var _validate = _interopRequireDefault ( _ _webpack _require _ _ ( 78 ) ) ;
var _stringify = _interopRequireDefault ( _ _webpack _require _ _ ( 411 ) ) ;
var _parse = _interopRequireDefault ( _ _webpack _require _ _ ( 22 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
/***/ } ) ,
/***/ 78 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _regex = _interopRequireDefault ( _ _webpack _require _ _ ( 456 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function validate ( uuid ) {
return typeof uuid === 'string' && _regex . default . test ( uuid ) ;
}
var _default = validate ;
exports . default = _default ;
2021-12-07 17:44:54 +01:00
/***/ } ) ,
/***/ 82 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2023-03-08 22:06:44 +01:00
exports . toCommandProperties = exports . toCommandValue = void 0 ;
2021-12-07 17:44:54 +01:00
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
}
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
}
exports . toCommandValue = toCommandValue ;
2023-03-08 22:06:44 +01:00
/ * *
*
* @ param annotationProperties
* @ returns The command properties to send with the actual annotation command
* See IssueCommandProperties : https : //github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
* /
function toCommandProperties ( annotationProperties ) {
if ( ! Object . keys ( annotationProperties ) . length ) {
return { } ;
}
return {
title : annotationProperties . title ,
file : annotationProperties . file ,
line : annotationProperties . startLine ,
endLine : annotationProperties . endLine ,
col : annotationProperties . startColumn ,
endColumn : annotationProperties . endColumn
} ;
}
exports . toCommandProperties = toCommandProperties ;
2021-12-07 17:44:54 +01:00
//# sourceMappingURL=utils.js.map
/***/ } ) ,
/***/ 87 :
/***/ ( function ( module ) {
module . exports = require ( "os" ) ;
/***/ } ) ,
/***/ 93 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = minimatch
minimatch . Minimatch = Minimatch
var path = { sep : '/' }
try {
path = _ _webpack _require _ _ ( 622 )
} catch ( er ) { }
var GLOBSTAR = minimatch . GLOBSTAR = Minimatch . GLOBSTAR = { }
var expand = _ _webpack _require _ _ ( 306 )
var plTypes = {
'!' : { open : '(?:(?!(?:' , close : '))[^/]*?)' } ,
'?' : { open : '(?:' , close : ')?' } ,
'+' : { open : '(?:' , close : ')+' } ,
'*' : { open : '(?:' , close : ')*' } ,
'@' : { open : '(?:' , close : ')' }
}
// any single thing other than /
// don't need to escape / when using new RegExp()
var qmark = '[^/]'
// * => any number of characters
var star = qmark + '*?'
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
// characters that need to be escaped in RegExp.
var reSpecials = charSet ( '().*{}+?[]^$\\!' )
// "abc" -> { a:true, b:true, c:true }
function charSet ( s ) {
return s . split ( '' ) . reduce ( function ( set , c ) {
set [ c ] = true
return set
} , { } )
}
// normalizes slashes.
var slashSplit = /\/+/
minimatch . filter = filter
function filter ( pattern , options ) {
options = options || { }
return function ( p , i , list ) {
return minimatch ( p , pattern , options )
}
}
function ext ( a , b ) {
a = a || { }
b = b || { }
var t = { }
Object . keys ( b ) . forEach ( function ( k ) {
t [ k ] = b [ k ]
} )
Object . keys ( a ) . forEach ( function ( k ) {
t [ k ] = a [ k ]
} )
return t
}
minimatch . defaults = function ( def ) {
if ( ! def || ! Object . keys ( def ) . length ) return minimatch
var orig = minimatch
var m = function minimatch ( p , pattern , options ) {
return orig . minimatch ( p , pattern , ext ( def , options ) )
}
m . Minimatch = function Minimatch ( pattern , options ) {
return new orig . Minimatch ( pattern , ext ( def , options ) )
}
return m
}
Minimatch . defaults = function ( def ) {
if ( ! def || ! Object . keys ( def ) . length ) return Minimatch
return minimatch . defaults ( def ) . Minimatch
}
function minimatch ( p , pattern , options ) {
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'glob pattern string required' )
}
if ( ! options ) options = { }
// shortcut: comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
return false
}
// "" only matches ""
if ( pattern . trim ( ) === '' ) return p === ''
return new Minimatch ( pattern , options ) . match ( p )
}
function Minimatch ( pattern , options ) {
if ( ! ( this instanceof Minimatch ) ) {
return new Minimatch ( pattern , options )
}
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'glob pattern string required' )
}
if ( ! options ) options = { }
pattern = pattern . trim ( )
// windows support: need to use /, not \
if ( path . sep !== '/' ) {
pattern = pattern . split ( path . sep ) . join ( '/' )
}
this . options = options
this . set = [ ]
this . pattern = pattern
this . regexp = null
this . negate = false
this . comment = false
this . empty = false
// make the set of regexps etc.
this . make ( )
}
Minimatch . prototype . debug = function ( ) { }
Minimatch . prototype . make = make
function make ( ) {
// don't do it more than once.
if ( this . _made ) return
var pattern = this . pattern
var options = this . options
// empty patterns and comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
this . comment = true
return
}
if ( ! pattern ) {
this . empty = true
return
}
// step 1: figure out negation, etc.
this . parseNegate ( )
// step 2: expand braces
var set = this . globSet = this . braceExpand ( )
if ( options . debug ) this . debug = console . error
this . debug ( this . pattern , set )
// step 3: now we have a set, so turn each one into a series of path-portion
// matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
set = this . globParts = set . map ( function ( s ) {
return s . split ( slashSplit )
} )
this . debug ( this . pattern , set )
// glob --> regexps
set = set . map ( function ( s , si , set ) {
return s . map ( this . parse , this )
} , this )
this . debug ( this . pattern , set )
// filter out everything that didn't compile properly.
set = set . filter ( function ( s ) {
return s . indexOf ( false ) === - 1
} )
this . debug ( this . pattern , set )
this . set = set
}
Minimatch . prototype . parseNegate = parseNegate
function parseNegate ( ) {
var pattern = this . pattern
var negate = false
var options = this . options
var negateOffset = 0
if ( options . nonegate ) return
for ( var i = 0 , l = pattern . length
; i < l && pattern . charAt ( i ) === '!'
; i ++ ) {
negate = ! negate
negateOffset ++
}
2020-04-28 15:45:21 +02:00
if ( negateOffset ) this . pattern = pattern . substr ( negateOffset )
this . negate = negate
}
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
minimatch . braceExpand = function ( pattern , options ) {
return braceExpand ( pattern , options )
}
Minimatch . prototype . braceExpand = braceExpand
function braceExpand ( pattern , options ) {
if ( ! options ) {
if ( this instanceof Minimatch ) {
options = this . options
} else {
options = { }
}
}
pattern = typeof pattern === 'undefined'
? this . pattern : pattern
if ( typeof pattern === 'undefined' ) {
throw new TypeError ( 'undefined pattern' )
}
if ( options . nobrace ||
! pattern . match ( /\{.*\}/ ) ) {
// shortcut. no need to expand.
return [ pattern ]
}
return expand ( pattern )
}
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
Minimatch . prototype . parse = parse
var SUBPARSE = { }
function parse ( pattern , isSub ) {
if ( pattern . length > 1024 * 64 ) {
throw new TypeError ( 'pattern is too long' )
}
var options = this . options
// shortcuts
if ( ! options . noglobstar && pattern === '**' ) return GLOBSTAR
if ( pattern === '' ) return ''
var re = ''
var hasMagic = ! ! options . nocase
var escaping = false
// ? => one single character
var patternListStack = [ ]
var negativeLists = [ ]
var stateChar
var inClass = false
var reClassStart = - 1
var classStart = - 1
// . and .. never match anything that doesn't start with .,
// even when options.dot is set.
var patternStart = pattern . charAt ( 0 ) === '.' ? '' // anything
// not (start or / followed by . or .. followed by / or end)
: options . dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
: '(?!\\.)'
var self = this
function clearStateChar ( ) {
if ( stateChar ) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch ( stateChar ) {
case '*' :
re += star
hasMagic = true
break
case '?' :
re += qmark
hasMagic = true
break
default :
re += '\\' + stateChar
break
}
self . debug ( 'clearStateChar %j %j' , stateChar , re )
stateChar = false
}
}
for ( var i = 0 , len = pattern . length , c
; ( i < len ) && ( c = pattern . charAt ( i ) )
; i ++ ) {
this . debug ( '%s\t%s %s %j' , pattern , i , re , c )
// skip over any that are escaped.
if ( escaping && reSpecials [ c ] ) {
re += '\\' + c
escaping = false
continue
}
switch ( c ) {
case '/' :
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
case '\\' :
clearStateChar ( )
escaping = true
continue
// the various stateChar values
// for the "extglob" stuff.
case '?' :
case '*' :
case '+' :
case '@' :
case '!' :
this . debug ( '%s\t%s %s %j <-- stateChar' , pattern , i , re , c )
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if ( inClass ) {
this . debug ( ' in class' )
if ( c === '!' && i === classStart + 1 ) c = '^'
re += c
continue
}
// if we already have a stateChar, then it means
// that there was something like ** or +? in there.
// Handle the stateChar, then proceed with this one.
self . debug ( 'call clearStateChar %j' , stateChar )
clearStateChar ( )
stateChar = c
// if extglob is disabled, then +(asdf|foo) isn't a thing.
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if ( options . noext ) clearStateChar ( )
continue
case '(' :
if ( inClass ) {
re += '('
continue
}
if ( ! stateChar ) {
re += '\\('
continue
}
patternListStack . push ( {
type : stateChar ,
start : i - 1 ,
reStart : re . length ,
open : plTypes [ stateChar ] . open ,
close : plTypes [ stateChar ] . close
} )
// negation is (?:(?!js)[^/]*)
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
this . debug ( 'plType %j %j' , stateChar , re )
stateChar = false
continue
case ')' :
if ( inClass || ! patternListStack . length ) {
re += '\\)'
continue
}
clearStateChar ( )
hasMagic = true
var pl = patternListStack . pop ( )
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
re += pl . close
if ( pl . type === '!' ) {
negativeLists . push ( pl )
}
pl . reEnd = re . length
continue
case '|' :
if ( inClass || ! patternListStack . length || escaping ) {
re += '\\|'
escaping = false
continue
}
clearStateChar ( )
re += '|'
continue
// these are mostly the same in regexp and glob
case '[' :
// swallow any state-tracking char before the [
clearStateChar ( )
if ( inClass ) {
re += '\\' + c
continue
}
inClass = true
classStart = i
reClassStart = re . length
re += c
continue
case ']' :
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if ( i === classStart + 1 || ! inClass ) {
re += '\\' + c
escaping = false
continue
}
// handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]"
if ( inClass ) {
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
var cs = pattern . substring ( classStart + 1 , i )
try {
RegExp ( '[' + cs + ']' )
} catch ( er ) {
// not a valid class!
var sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ] + '\\]'
hasMagic = hasMagic || sp [ 1 ]
inClass = false
continue
}
}
// finish up the class.
hasMagic = true
inClass = false
re += c
continue
default :
// swallow any state char that wasn't consumed
clearStateChar ( )
if ( escaping ) {
// no need
escaping = false
} else if ( reSpecials [ c ]
&& ! ( c === '^' && inClass ) ) {
re += '\\'
}
re += c
} // switch
} // for
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if ( inClass ) {
// split where the last [ was, and escape it
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
cs = pattern . substr ( classStart + 1 )
sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ]
hasMagic = hasMagic || sp [ 1 ]
}
// handle the case where we had a +( thing at the *end*
// of the pattern.
// each pattern list stack adds 3 chars, and we need to go through
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
for ( pl = patternListStack . pop ( ) ; pl ; pl = patternListStack . pop ( ) ) {
var tail = re . slice ( pl . reStart + pl . open . length )
this . debug ( 'setting tail' , re , pl )
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail . replace ( /((?:\\{2}){0,64})(\\?)\|/g , function ( _ , $1 , $2 ) {
if ( ! $2 ) {
// the | isn't already escaped, so escape it.
$2 = '\\'
}
// need to escape all those slashes *again*, without escaping the
// one that we need for escaping the | character. As it works out,
// escaping an even number of slashes can be done by simply repeating
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
return $1 + $1 + $2 + '|'
} )
this . debug ( 'tail=%j\n %s' , tail , tail , pl , re )
var t = pl . type === '*' ? star
: pl . type === '?' ? qmark
: '\\' + pl . type
hasMagic = true
re = re . slice ( 0 , pl . reStart ) + t + '\\(' + tail
}
// handle trailing things that only matter at the very end.
clearStateChar ( )
if ( escaping ) {
// trailing \\
re += '\\\\'
}
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
var addPatternStart = false
switch ( re . charAt ( 0 ) ) {
case '.' :
case '[' :
case '(' : addPatternStart = true
}
// Hack to work around lack of negative lookbehind in JS
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
// like 'a.xyz.yz' doesn't match. So, the first negative
// lookahead, has to look ALL the way ahead, to the end of
// the pattern.
for ( var n = negativeLists . length - 1 ; n > - 1 ; n -- ) {
var nl = negativeLists [ n ]
var nlBefore = re . slice ( 0 , nl . reStart )
var nlFirst = re . slice ( nl . reStart , nl . reEnd - 8 )
var nlLast = re . slice ( nl . reEnd - 8 , nl . reEnd )
var nlAfter = re . slice ( nl . reEnd )
nlLast += nlAfter
// Handle nested stuff like *(*.js|!(*.json)), where open parens
// mean that we should *not* include the ) in the bit that is considered
// "after" the negated section.
var openParensBefore = nlBefore . split ( '(' ) . length - 1
var cleanAfter = nlAfter
for ( i = 0 ; i < openParensBefore ; i ++ ) {
cleanAfter = cleanAfter . replace ( /\)[+*?]?/ , '' )
}
nlAfter = cleanAfter
var dollar = ''
if ( nlAfter === '' && isSub !== SUBPARSE ) {
dollar = '$'
}
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
re = newRe
}
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
if ( re !== '' && hasMagic ) {
re = '(?=.)' + re
}
if ( addPatternStart ) {
re = patternStart + re
}
// parsing just a piece of a larger pattern.
if ( isSub === SUBPARSE ) {
return [ re , hasMagic ]
}
// skip the regexp for non-magical patterns
// unescape anything in it, though, so that it'll be
// an exact match against a file etc.
if ( ! hasMagic ) {
return globUnescape ( pattern )
}
var flags = options . nocase ? 'i' : ''
try {
var regExp = new RegExp ( '^' + re + '$' , flags )
} catch ( er ) {
// If it was an invalid regular expression, then it can't match
// anything. This trick looks for a character after the end of
// the string, which is of course impossible, except in multi-line
// mode, but it's not a /m regex.
return new RegExp ( '$.' )
}
regExp . _glob = pattern
regExp . _src = re
return regExp
}
minimatch . makeRe = function ( pattern , options ) {
return new Minimatch ( pattern , options || { } ) . makeRe ( )
}
Minimatch . prototype . makeRe = makeRe
function makeRe ( ) {
if ( this . regexp || this . regexp === false ) return this . regexp
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
var set = this . set
if ( ! set . length ) {
this . regexp = false
return this . regexp
}
var options = this . options
var twoStar = options . noglobstar ? star
: options . dot ? twoStarDot
: twoStarNoDot
var flags = options . nocase ? 'i' : ''
var re = set . map ( function ( pattern ) {
return pattern . map ( function ( p ) {
return ( p === GLOBSTAR ) ? twoStar
: ( typeof p === 'string' ) ? regExpEscape ( p )
: p . _src
} ) . join ( '\\\/' )
} ) . join ( '|' )
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^(?:' + re + ')$'
// can match anything, as long as it's not this.
if ( this . negate ) re = '^(?!' + re + ').*$'
try {
this . regexp = new RegExp ( re , flags )
} catch ( ex ) {
this . regexp = false
}
return this . regexp
}
minimatch . match = function ( list , pattern , options ) {
options = options || { }
var mm = new Minimatch ( pattern , options )
list = list . filter ( function ( f ) {
return mm . match ( f )
} )
if ( mm . options . nonull && ! list . length ) {
list . push ( pattern )
}
return list
}
Minimatch . prototype . match = match
function match ( f , partial ) {
this . debug ( 'match' , f , this . pattern )
// short-circuit in the case of busted things.
// comments, etc.
if ( this . comment ) return false
if ( this . empty ) return f === ''
if ( f === '/' && partial ) return true
var options = this . options
// windows: need to use /, not \
if ( path . sep !== '/' ) {
f = f . split ( path . sep ) . join ( '/' )
}
// treat the test path as a set of pathparts.
f = f . split ( slashSplit )
this . debug ( this . pattern , 'split' , f )
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
var set = this . set
this . debug ( this . pattern , 'set' , set )
// Find the basename of the path by looking for the last non-empty segment
var filename
var i
for ( i = f . length - 1 ; i >= 0 ; i -- ) {
filename = f [ i ]
if ( filename ) break
}
for ( i = 0 ; i < set . length ; i ++ ) {
var pattern = set [ i ]
var file = f
if ( options . matchBase && pattern . length === 1 ) {
file = [ filename ]
}
var hit = this . matchOne ( file , pattern , partial )
if ( hit ) {
if ( options . flipNegate ) return true
return ! this . negate
}
}
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if ( options . flipNegate ) return false
return this . negate
}
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
Minimatch . prototype . matchOne = function ( file , pattern , partial ) {
var options = this . options
this . debug ( 'matchOne' ,
{ 'this' : this , file : file , pattern : pattern } )
this . debug ( 'matchOne' , file . length , pattern . length )
for ( var fi = 0 ,
pi = 0 ,
fl = file . length ,
pl = pattern . length
; ( fi < fl ) && ( pi < pl )
; fi ++ , pi ++ ) {
this . debug ( 'matchOne loop' )
var p = pattern [ pi ]
var f = file [ fi ]
this . debug ( pattern , p , f )
// should be impossible.
// some invalid regexp stuff in the set.
if ( p === false ) return false
if ( p === GLOBSTAR ) {
this . debug ( 'GLOBSTAR' , [ pattern , p , f ] )
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi
var pr = pi + 1
if ( pr === pl ) {
this . debug ( '** at the end' )
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for ( ; fi < fl ; fi ++ ) {
if ( file [ fi ] === '.' || file [ fi ] === '..' ||
( ! options . dot && file [ fi ] . charAt ( 0 ) === '.' ) ) return false
}
return true
}
// ok, let's see if we can swallow whatever we can.
while ( fr < fl ) {
var swallowee = file [ fr ]
this . debug ( '\nglobstar while' , file , fr , pattern , pr , swallowee )
// XXX remove this slice. Just pass the start index.
if ( this . matchOne ( file . slice ( fr ) , pattern . slice ( pr ) , partial ) ) {
this . debug ( 'globstar found match!' , fr , fl , swallowee )
// found a match.
return true
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if ( swallowee === '.' || swallowee === '..' ||
( ! options . dot && swallowee . charAt ( 0 ) === '.' ) ) {
this . debug ( 'dot detected!' , file , fr , pattern , pr )
break
}
// ** swallows a segment, and continue.
this . debug ( 'globstar swallow a segment, and continue' )
fr ++
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
if ( partial ) {
// ran out of file
this . debug ( '\n>>> no match, partial?' , file , fr , pattern , pr )
if ( fr === fl ) return true
}
return false
}
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit
if ( typeof p === 'string' ) {
if ( options . nocase ) {
hit = f . toLowerCase ( ) === p . toLowerCase ( )
} else {
hit = f === p
}
this . debug ( 'string match' , p , f , hit )
} else {
hit = f . match ( p )
this . debug ( 'pattern match' , p , f , hit )
}
if ( ! hit ) return false
}
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if ( fi === fl && pi === pl ) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true
} else if ( fi === fl ) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial
} else if ( pi === pl ) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
var emptyFileEnd = ( fi === fl - 1 ) && ( file [ fi ] === '' )
return emptyFileEnd
}
// should be unreachable.
throw new Error ( 'wtf?' )
}
// replace stuff like \* with *
function globUnescape ( s ) {
return s . replace ( /\\(.)/g , '$1' )
}
function regExpEscape ( s ) {
return s . replace ( /[-[\]{}()*+?.,\\^$|#\s]/g , '\\$&' )
}
2023-03-08 22:06:44 +01:00
/***/ } ) ,
/***/ 95 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . checkBypass = exports . getProxyUrl = void 0 ;
function getProxyUrl ( reqUrl ) {
const usingSsl = reqUrl . protocol === 'https:' ;
if ( checkBypass ( reqUrl ) ) {
return undefined ;
}
const proxyVar = ( ( ) => {
if ( usingSsl ) {
return process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
}
else {
return process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
}
} ) ( ) ;
if ( proxyVar ) {
return new URL ( proxyVar ) ;
}
else {
return undefined ;
}
}
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
const reqHost = reqUrl . hostname ;
if ( isLoopbackAddress ( reqHost ) ) {
return true ;
}
const noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
const upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
for ( const upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperNoProxyItem === '*' ||
upperReqHosts . some ( x => x === upperNoProxyItem ||
x . endsWith ( ` . ${ upperNoProxyItem } ` ) ||
( upperNoProxyItem . startsWith ( '.' ) &&
x . endsWith ( ` ${ upperNoProxyItem } ` ) ) ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
function isLoopbackAddress ( host ) {
const hostLower = host . toLowerCase ( ) ;
return ( hostLower === 'localhost' ||
hostLower . startsWith ( '127.' ) ||
hostLower . startsWith ( '[::1]' ) ||
hostLower . startsWith ( '[0:0:0:0:0:0:0:1]' ) ) ;
}
//# sourceMappingURL=proxy.js.map
2020-11-13 20:53:33 +01:00
/***/ } ) ,
/***/ 102 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
// For internal use, subject to change.
2023-03-08 22:06:44 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
2020-11-13 20:53:33 +01:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-03-08 22:06:44 +01:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
2020-11-13 20:53:33 +01:00
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2023-03-08 22:06:44 +01:00
exports . prepareKeyValueMessage = exports . issueFileCommand = void 0 ;
2020-11-13 20:53:33 +01:00
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
2023-03-08 22:06:44 +01:00
const uuid _1 = _ _webpack _require _ _ ( 62 ) ;
2020-11-13 20:53:33 +01:00
const utils _1 = _ _webpack _require _ _ ( 82 ) ;
2023-03-08 22:06:44 +01:00
function issueFileCommand ( command , message ) {
2020-11-13 20:53:33 +01:00
const filePath = process . env [ ` GITHUB_ ${ command } ` ] ;
if ( ! filePath ) {
throw new Error ( ` Unable to find environment variable for file command ${ command } ` ) ;
}
if ( ! fs . existsSync ( filePath ) ) {
throw new Error ( ` Missing file at path: ${ filePath } ` ) ;
}
fs . appendFileSync ( filePath , ` ${ utils _1 . toCommandValue ( message ) } ${ os . EOL } ` , {
encoding : 'utf8'
} ) ;
}
2023-03-08 22:06:44 +01:00
exports . issueFileCommand = issueFileCommand ;
function prepareKeyValueMessage ( key , value ) {
const delimiter = ` ghadelimiter_ ${ uuid _1 . v4 ( ) } ` ;
const convertedValue = utils _1 . toCommandValue ( value ) ;
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if ( key . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: name should not contain the delimiter " ${ delimiter } " ` ) ;
}
if ( convertedValue . includes ( delimiter ) ) {
throw new Error ( ` Unexpected input: value should not contain the delimiter " ${ delimiter } " ` ) ;
}
return ` ${ key } << ${ delimiter } ${ os . EOL } ${ convertedValue } ${ os . EOL } ${ delimiter } ` ;
}
exports . prepareKeyValueMessage = prepareKeyValueMessage ;
2020-11-13 20:53:33 +01:00
//# sourceMappingURL=file-command.js.map
2020-04-28 15:45:21 +02:00
/***/ } ) ,
/***/ 117 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var pathModule = _ _webpack _require _ _ ( 622 ) ;
var isWindows = process . platform === 'win32' ;
var fs = _ _webpack _require _ _ ( 747 ) ;
// JavaScript implementation of realpath, ported from node pre-v6
var DEBUG = process . env . NODE _DEBUG && /fs/ . test ( process . env . NODE _DEBUG ) ;
function rethrow ( ) {
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
// is fairly slow to generate.
var callback ;
if ( DEBUG ) {
var backtrace = new Error ;
callback = debugCallback ;
} else
callback = missingCallback ;
return callback ;
function debugCallback ( err ) {
if ( err ) {
backtrace . message = err . message ;
err = backtrace ;
missingCallback ( err ) ;
}
}
function missingCallback ( err ) {
if ( err ) {
if ( process . throwDeprecation )
throw err ; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
else if ( ! process . noDeprecation ) {
var msg = 'fs: missing callback ' + ( err . stack || err . message ) ;
if ( process . traceDeprecation )
console . trace ( msg ) ;
else
console . error ( msg ) ;
}
}
}
}
function maybeCallback ( cb ) {
return typeof cb === 'function' ? cb : rethrow ( ) ;
}
var normalize = pathModule . normalize ;
// Regexp that finds the next partion of a (partial) path
// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
if ( isWindows ) {
var nextPartRe = /(.*?)(?:[\/\\]+|$)/g ;
} else {
var nextPartRe = /(.*?)(?:[\/]+|$)/g ;
}
// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
if ( isWindows ) {
var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/ ;
} else {
var splitRootRe = /^[\/]*/ ;
}
exports . realpathSync = function realpathSync ( p , cache ) {
// make p is absolute
p = pathModule . resolve ( p ) ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , p ) ) {
return cache [ p ] ;
}
var original = p ,
seenLinks = { } ,
knownHard = { } ;
// current character position in p
var pos ;
// the partial path so far, including a trailing slash if any
var current ;
// the partial path without a trailing slash (except when pointing at a root)
var base ;
// the partial path scanned in the previous round, with slash
var previous ;
start ( ) ;
function start ( ) {
// Skip over roots
var m = splitRootRe . exec ( p ) ;
pos = m [ 0 ] . length ;
current = m [ 0 ] ;
base = m [ 0 ] ;
previous = '' ;
// On windows, check that the root exists. On unix there is no need.
if ( isWindows && ! knownHard [ base ] ) {
fs . lstatSync ( base ) ;
knownHard [ base ] = true ;
}
}
// walk down the path, swapping out linked pathparts for their real
// values
// NB: p.length changes.
while ( pos < p . length ) {
// find the next part
nextPartRe . lastIndex = pos ;
var result = nextPartRe . exec ( p ) ;
previous = current ;
current += result [ 0 ] ;
base = previous + result [ 1 ] ;
pos = nextPartRe . lastIndex ;
// continue if not a symlink
if ( knownHard [ base ] || ( cache && cache [ base ] === base ) ) {
continue ;
}
var resolvedLink ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , base ) ) {
// some known symbolic link. no need to stat again.
resolvedLink = cache [ base ] ;
} else {
var stat = fs . lstatSync ( base ) ;
if ( ! stat . isSymbolicLink ( ) ) {
knownHard [ base ] = true ;
if ( cache ) cache [ base ] = base ;
continue ;
}
// read the link if it wasn't read before
// dev/ino always return 0 on windows, so skip the check.
var linkTarget = null ;
if ( ! isWindows ) {
var id = stat . dev . toString ( 32 ) + ':' + stat . ino . toString ( 32 ) ;
if ( seenLinks . hasOwnProperty ( id ) ) {
linkTarget = seenLinks [ id ] ;
}
}
if ( linkTarget === null ) {
fs . statSync ( base ) ;
linkTarget = fs . readlinkSync ( base ) ;
}
resolvedLink = pathModule . resolve ( previous , linkTarget ) ;
// track this, if given a cache.
if ( cache ) cache [ base ] = resolvedLink ;
if ( ! isWindows ) seenLinks [ id ] = linkTarget ;
}
// resolve the link, then start over
p = pathModule . resolve ( resolvedLink , p . slice ( pos ) ) ;
start ( ) ;
}
if ( cache ) cache [ original ] = p ;
return p ;
} ;
exports . realpath = function realpath ( p , cache , cb ) {
if ( typeof cb !== 'function' ) {
cb = maybeCallback ( cache ) ;
cache = null ;
}
// make p is absolute
p = pathModule . resolve ( p ) ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , p ) ) {
return process . nextTick ( cb . bind ( null , null , cache [ p ] ) ) ;
}
var original = p ,
seenLinks = { } ,
knownHard = { } ;
// current character position in p
var pos ;
// the partial path so far, including a trailing slash if any
var current ;
// the partial path without a trailing slash (except when pointing at a root)
var base ;
// the partial path scanned in the previous round, with slash
var previous ;
start ( ) ;
function start ( ) {
// Skip over roots
var m = splitRootRe . exec ( p ) ;
pos = m [ 0 ] . length ;
current = m [ 0 ] ;
base = m [ 0 ] ;
previous = '' ;
// On windows, check that the root exists. On unix there is no need.
if ( isWindows && ! knownHard [ base ] ) {
fs . lstat ( base , function ( err ) {
if ( err ) return cb ( err ) ;
knownHard [ base ] = true ;
LOOP ( ) ;
} ) ;
} else {
process . nextTick ( LOOP ) ;
}
}
// walk down the path, swapping out linked pathparts for their real
// values
function LOOP ( ) {
// stop if scanned past end of path
if ( pos >= p . length ) {
if ( cache ) cache [ original ] = p ;
return cb ( null , p ) ;
}
// find the next part
nextPartRe . lastIndex = pos ;
var result = nextPartRe . exec ( p ) ;
previous = current ;
current += result [ 0 ] ;
base = previous + result [ 1 ] ;
pos = nextPartRe . lastIndex ;
// continue if not a symlink
if ( knownHard [ base ] || ( cache && cache [ base ] === base ) ) {
return process . nextTick ( LOOP ) ;
}
if ( cache && Object . prototype . hasOwnProperty . call ( cache , base ) ) {
// known symbolic link. no need to stat again.
return gotResolvedLink ( cache [ base ] ) ;
}
return fs . lstat ( base , gotStat ) ;
}
function gotStat ( err , stat ) {
if ( err ) return cb ( err ) ;
// if not a symlink, skip to the next path part
if ( ! stat . isSymbolicLink ( ) ) {
knownHard [ base ] = true ;
if ( cache ) cache [ base ] = base ;
return process . nextTick ( LOOP ) ;
}
// stat & read the link if not read before
// call gotTarget as soon as the link target is known
// dev/ino always return 0 on windows, so skip the check.
if ( ! isWindows ) {
var id = stat . dev . toString ( 32 ) + ':' + stat . ino . toString ( 32 ) ;
if ( seenLinks . hasOwnProperty ( id ) ) {
return gotTarget ( null , seenLinks [ id ] , base ) ;
}
}
fs . stat ( base , function ( err ) {
if ( err ) return cb ( err ) ;
fs . readlink ( base , function ( err , target ) {
if ( ! isWindows ) seenLinks [ id ] = target ;
gotTarget ( err , target ) ;
} ) ;
} ) ;
}
function gotTarget ( err , target , base ) {
if ( err ) return cb ( err ) ;
var resolvedLink = pathModule . resolve ( previous , target ) ;
if ( cache ) cache [ base ] = resolvedLink ;
gotResolvedLink ( resolvedLink ) ;
}
function gotResolvedLink ( resolvedLink ) {
// resolve the link, then start over
p = pathModule . resolve ( resolvedLink , p . slice ( pos ) ) ;
start ( ) ;
}
} ;
/***/ } ) ,
2021-04-06 22:50:27 +02:00
/***/ 141 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
"use strict" ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var net = _ _webpack _require _ _ ( 631 ) ;
var tls = _ _webpack _require _ _ ( 16 ) ;
var http = _ _webpack _require _ _ ( 605 ) ;
var https = _ _webpack _require _ _ ( 211 ) ;
var events = _ _webpack _require _ _ ( 614 ) ;
var assert = _ _webpack _require _ _ ( 357 ) ;
var util = _ _webpack _require _ _ ( 669 ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
}
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
}
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
}
} ) ;
} ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
} ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
this . sockets . splice ( pos , 1 ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
} ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
}
}
}
}
return target ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
} else {
debug = function ( ) { } ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
exports . debug = debug ; // for test
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/***/ } ) ,
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/***/ 150 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * !
* Tmp
*
* Copyright ( c ) 2011 - 2017 KARASZI Istvan < github @ spam . raszi . hu >
*
* MIT Licensed
* /
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ *
* Module dependencies .
* /
const fs = _ _webpack _require _ _ ( 747 ) ;
const os = _ _webpack _require _ _ ( 87 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const crypto = _ _webpack _require _ _ ( 417 ) ;
2021-12-07 17:44:54 +01:00
const _c = { fs : fs . constants , os : os . constants } ;
const rimraf = _ _webpack _require _ _ ( 13 ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ *
* The working inner variables .
* /
const
// the random characters to choose from
RANDOM _CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' ,
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
TEMPLATE _PATTERN = /XXXXXX/ ,
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
DEFAULT _TRIES = 3 ,
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
CREATE _FLAGS = ( _c . O _CREAT || _c . fs . O _CREAT ) | ( _c . O _EXCL || _c . fs . O _EXCL ) | ( _c . O _RDWR || _c . fs . O _RDWR ) ,
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// constants are off on the windows platform and will not match the actual errno codes
IS _WIN32 = os . platform ( ) === 'win32' ,
2021-04-06 22:50:27 +02:00
EBADF = _c . EBADF || _c . os . errno . EBADF ,
ENOENT = _c . ENOENT || _c . os . errno . ENOENT ,
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
DIR _MODE = 0o700 /* 448 */ ,
FILE _MODE = 0o600 /* 384 */ ,
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
EXIT = 'exit' ,
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// this will hold the objects need to be removed on exit
2021-12-07 17:44:54 +01:00
_removeObjects = [ ] ,
// API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback
FN _RMDIR _SYNC = fs . rmdirSync . bind ( fs ) ,
FN _RIMRAF _SYNC = rimraf . sync ;
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
let
2021-04-06 22:50:27 +02:00
_gracefulCleanup = false ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
2021-12-07 17:44:54 +01:00
* Gets a temporary file name .
2021-04-06 22:50:27 +02:00
*
* @ param { ( Options | tmpNameCallback ) } options options or callback
* @ param { ? tmpNameCallback } callback the callback function
* /
function tmpName ( options , callback ) {
2021-12-07 17:44:54 +01:00
const
2021-04-06 22:50:27 +02:00
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
2021-12-07 17:44:54 +01:00
cb = args [ 1 ] ;
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
try {
_assertAndSanitizeOptions ( opts ) ;
} catch ( err ) {
return cb ( err ) ;
}
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
let tries = opts . tries ;
2021-04-06 22:50:27 +02:00
( function _getUniqueName ( ) {
try {
const name = _generateTmpName ( opts ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// check whether the path exists then retry if needed
fs . stat ( name , function ( err ) {
/* istanbul ignore else */
if ( ! err ) {
/* istanbul ignore else */
if ( tries -- > 0 ) return _getUniqueName ( ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
return cb ( new Error ( 'Could not get a unique tmp filename, max tries reached ' + name ) ) ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
cb ( null , name ) ;
} ) ;
} catch ( err ) {
cb ( err ) ;
}
} ( ) ) ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
* Synchronous version of tmpName .
*
* @ param { Object } options
* @ returns { string } the generated random name
* @ throws { Error } if the options are invalid or could not generate a filename
* /
function tmpNameSync ( options ) {
2021-12-07 17:44:54 +01:00
const
2021-04-06 22:50:27 +02:00
args = _parseArguments ( options ) ,
2021-12-07 17:44:54 +01:00
opts = args [ 0 ] ;
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
_assertAndSanitizeOptions ( opts ) ;
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
let tries = opts . tries ;
2021-04-06 22:50:27 +02:00
do {
const name = _generateTmpName ( opts ) ;
try {
fs . statSync ( name ) ;
} catch ( e ) {
return name ;
}
} while ( tries -- > 0 ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
throw new Error ( 'Could not get a unique tmp filename, max tries reached' ) ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
/ * *
* Creates and opens a temporary file .
*
2021-12-07 17:44:54 +01:00
* @ param { ( Options | null | undefined | fileCallback ) } options the config options or the callback function or null or undefined
2021-04-06 22:50:27 +02:00
* @ param { ? fileCallback } callback
* /
function file ( options , callback ) {
2021-12-07 17:44:54 +01:00
const
2021-04-06 22:50:27 +02:00
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// gets a temporary filename
tmpName ( opts , function _tmpNameCreated ( err , name ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// create and open the file
fs . open ( name , CREATE _FLAGS , opts . mode || FILE _MODE , function _fileCreated ( err , fd ) {
2021-12-07 17:44:54 +01:00
/* istanbu ignore else */
2021-04-06 22:50:27 +02:00
if ( err ) return cb ( err ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( opts . discardDescriptor ) {
2021-12-07 17:44:54 +01:00
return fs . close ( fd , function _discardCallback ( possibleErr ) {
// the chance of getting an error on close here is rather low and might occur in the most edgiest cases only
return cb ( possibleErr , name , undefined , _prepareTmpFileRemoveCallback ( name , - 1 , opts , false ) ) ;
2021-04-06 22:50:27 +02:00
} ) ;
2021-12-07 17:44:54 +01:00
} else {
// detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care
// about the descriptor
const discardOrDetachDescriptor = opts . discardDescriptor || opts . detachDescriptor ;
cb ( null , name , fd , _prepareTmpFileRemoveCallback ( name , discardOrDetachDescriptor ? - 1 : fd , opts , false ) ) ;
2021-04-06 22:50:27 +02:00
}
} ) ;
} ) ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
* Synchronous version of file .
*
* @ param { Options } options
* @ returns { FileSyncObject } object consists of name , fd and removeCallback
* @ throws { Error } if cannot create a file
* /
function fileSync ( options ) {
2021-12-07 17:44:54 +01:00
const
2021-04-06 22:50:27 +02:00
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
const discardOrDetachDescriptor = opts . discardDescriptor || opts . detachDescriptor ;
const name = tmpNameSync ( opts ) ;
var fd = fs . openSync ( name , CREATE _FLAGS , opts . mode || FILE _MODE ) ;
/* istanbul ignore else */
if ( opts . discardDescriptor ) {
fs . closeSync ( fd ) ;
fd = undefined ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
return {
name : name ,
fd : fd ,
2021-12-07 17:44:54 +01:00
removeCallback : _prepareTmpFileRemoveCallback ( name , discardOrDetachDescriptor ? - 1 : fd , opts , true )
2021-04-06 22:50:27 +02:00
} ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
/ * *
* Creates a temporary directory .
*
* @ param { ( Options | dirCallback ) } options the options or the callback function
* @ param { ? dirCallback } callback
* /
function dir ( options , callback ) {
2021-12-07 17:44:54 +01:00
const
2021-04-06 22:50:27 +02:00
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// gets a temporary filename
tmpName ( opts , function _tmpNameCreated ( err , name ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// create the directory
fs . mkdir ( name , opts . mode || DIR _MODE , function _dirCreated ( err ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
cb ( null , name , _prepareTmpDirRemoveCallback ( name , opts , false ) ) ;
2021-04-06 22:50:27 +02:00
} ) ;
} ) ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
/ * *
* Synchronous version of dir .
*
* @ param { Options } options
* @ returns { DirSyncObject } object consists of name and removeCallback
* @ throws { Error } if it cannot create a directory
* /
function dirSync ( options ) {
2021-12-07 17:44:54 +01:00
const
2021-04-06 22:50:27 +02:00
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
const name = tmpNameSync ( opts ) ;
fs . mkdirSync ( name , opts . mode || DIR _MODE ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
return {
name : name ,
2021-12-07 17:44:54 +01:00
removeCallback : _prepareTmpDirRemoveCallback ( name , opts , true )
2021-04-06 22:50:27 +02:00
} ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
* Removes files asynchronously .
*
* @ param { Object } fdPath
* @ param { Function } next
* @ private
* /
function _removeFileAsync ( fdPath , next ) {
const _handler = function ( err ) {
2021-12-07 17:44:54 +01:00
if ( err && ! _isENOENT ( err ) ) {
2021-04-06 22:50:27 +02:00
// reraise any unanticipated error
return next ( err ) ;
}
next ( ) ;
2021-12-07 17:44:54 +01:00
} ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( 0 <= fdPath [ 0 ] )
2021-12-07 17:44:54 +01:00
fs . close ( fdPath [ 0 ] , function ( ) {
2021-04-06 22:50:27 +02:00
fs . unlink ( fdPath [ 1 ] , _handler ) ;
} ) ;
else fs . unlink ( fdPath [ 1 ] , _handler ) ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
/ * *
* Removes files synchronously .
*
* @ param { Object } fdPath
* @ private
* /
function _removeFileSync ( fdPath ) {
2021-12-07 17:44:54 +01:00
let rethrownException = null ;
2021-04-06 22:50:27 +02:00
try {
if ( 0 <= fdPath [ 0 ] ) fs . closeSync ( fdPath [ 0 ] ) ;
} catch ( e ) {
// reraise any unanticipated error
2021-12-07 17:44:54 +01:00
if ( ! _isEBADF ( e ) && ! _isENOENT ( e ) ) throw e ;
2021-04-06 22:50:27 +02:00
} finally {
try {
fs . unlinkSync ( fdPath [ 1 ] ) ;
}
catch ( e ) {
// reraise any unanticipated error
2021-12-07 17:44:54 +01:00
if ( ! _isENOENT ( e ) ) rethrownException = e ;
2021-04-06 22:50:27 +02:00
}
}
2021-12-07 17:44:54 +01:00
if ( rethrownException !== null ) {
throw rethrownException ;
}
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
/ * *
* Prepares the callback for removal of the temporary file .
*
2021-12-07 17:44:54 +01:00
* Returns either a sync callback or a async callback depending on whether
* fileSync or file was called , which is expressed by the sync parameter .
*
2021-04-06 22:50:27 +02:00
* @ param { string } name the path of the file
* @ param { number } fd file descriptor
* @ param { Object } opts
2021-12-07 17:44:54 +01:00
* @ param { boolean } sync
* @ returns { fileCallback | fileCallbackSync }
2021-04-06 22:50:27 +02:00
* @ private
* /
2021-12-07 17:44:54 +01:00
function _prepareTmpFileRemoveCallback ( name , fd , opts , sync ) {
const removeCallbackSync = _prepareRemoveCallback ( _removeFileSync , [ fd , name ] , sync ) ;
const removeCallback = _prepareRemoveCallback ( _removeFileAsync , [ fd , name ] , sync , removeCallbackSync ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( ! opts . keep ) _removeObjects . unshift ( removeCallbackSync ) ;
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
return sync ? removeCallbackSync : removeCallback ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
/ * *
* Prepares the callback for removal of the temporary directory .
*
2021-12-07 17:44:54 +01:00
* Returns either a sync callback or a async callback depending on whether
* tmpFileSync or tmpFile was called , which is expressed by the sync parameter .
*
2021-04-06 22:50:27 +02:00
* @ param { string } name
* @ param { Object } opts
2021-12-07 17:44:54 +01:00
* @ param { boolean } sync
2021-04-06 22:50:27 +02:00
* @ returns { Function } the callback
* @ private
* /
2021-12-07 17:44:54 +01:00
function _prepareTmpDirRemoveCallback ( name , opts , sync ) {
const removeFunction = opts . unsafeCleanup ? rimraf : fs . rmdir . bind ( fs ) ;
const removeFunctionSync = opts . unsafeCleanup ? FN _RIMRAF _SYNC : FN _RMDIR _SYNC ;
const removeCallbackSync = _prepareRemoveCallback ( removeFunctionSync , name , sync ) ;
const removeCallback = _prepareRemoveCallback ( removeFunction , name , sync , removeCallbackSync ) ;
2021-04-06 22:50:27 +02:00
if ( ! opts . keep ) _removeObjects . unshift ( removeCallbackSync ) ;
2021-12-07 17:44:54 +01:00
return sync ? removeCallbackSync : removeCallback ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
/ * *
* Creates a guarded function wrapping the removeFunction call .
*
2021-12-07 17:44:54 +01:00
* The cleanup callback is save to be called multiple times .
* Subsequent invocations will be ignored .
*
2021-04-06 22:50:27 +02:00
* @ param { Function } removeFunction
2021-12-07 17:44:54 +01:00
* @ param { string } fileOrDirName
* @ param { boolean } sync
* @ param { cleanupCallbackSync ? } cleanupCallbackSync
* @ returns { cleanupCallback | cleanupCallbackSync }
2021-04-06 22:50:27 +02:00
* @ private
* /
2021-12-07 17:44:54 +01:00
function _prepareRemoveCallback ( removeFunction , fileOrDirName , sync , cleanupCallbackSync ) {
let called = false ;
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// if sync is true, the next parameter will be ignored
2021-04-06 22:50:27 +02:00
return function _cleanupCallback ( next ) {
2021-12-07 17:44:54 +01:00
/* istanbul ignore else */
2021-04-06 22:50:27 +02:00
if ( ! called ) {
2021-12-07 17:44:54 +01:00
// remove cleanupCallback from cache
2021-04-06 22:50:27 +02:00
const toRemove = cleanupCallbackSync || _cleanupCallback ;
const index = _removeObjects . indexOf ( toRemove ) ;
/* istanbul ignore else */
if ( index >= 0 ) _removeObjects . splice ( index , 1 ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
called = true ;
2021-12-07 17:44:54 +01:00
if ( sync || removeFunction === FN _RMDIR _SYNC || removeFunction === FN _RIMRAF _SYNC ) {
return removeFunction ( fileOrDirName ) ;
} else {
return removeFunction ( fileOrDirName , next || function ( ) { } ) ;
}
}
2021-04-06 22:50:27 +02:00
} ;
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
* The garbage collector .
*
* @ private
* /
function _garbageCollector ( ) {
/* istanbul ignore else */
if ( ! _gracefulCleanup ) return ;
// the function being called removes itself from _removeObjects,
// loop until _removeObjects is empty
while ( _removeObjects . length ) {
try {
_removeObjects [ 0 ] ( ) ;
} catch ( e ) {
// already removed?
2020-04-28 15:45:21 +02:00
}
}
2021-04-06 22:50:27 +02:00
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
2021-12-07 17:44:54 +01:00
* Random name generator based on crypto .
* Adapted from http : //blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
*
* @ param { number } howMany
* @ returns { string } the generated random name
* @ private
2021-04-06 22:50:27 +02:00
* /
2021-12-07 17:44:54 +01:00
function _randomChars ( howMany ) {
let
value = [ ] ,
rnd = null ;
// make sure that we do not fail because we ran out of entropy
try {
rnd = crypto . randomBytes ( howMany ) ;
} catch ( e ) {
rnd = crypto . pseudoRandomBytes ( howMany ) ;
}
for ( var i = 0 ; i < howMany ; i ++ ) {
value . push ( RANDOM _CHARS [ rnd [ i ] % RANDOM _CHARS . length ] ) ;
}
return value . join ( '' ) ;
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
/ * *
2021-12-07 17:44:54 +01:00
* Helper which determines whether a string s is blank , that is undefined , or empty or null .
*
* @ private
* @ param { string } s
* @ returns { Boolean } true whether the string s is blank , false otherwise
2021-04-06 22:50:27 +02:00
* /
2021-12-07 17:44:54 +01:00
function _isBlank ( s ) {
return s === null || _isUndefined ( s ) || ! s . trim ( ) ;
2021-04-06 22:50:27 +02:00
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
2021-12-07 17:44:54 +01:00
* Checks whether the ` obj ` parameter is defined or not .
2021-04-06 22:50:27 +02:00
*
2021-12-07 17:44:54 +01:00
* @ param { Object } obj
* @ returns { boolean } true if the object is undefined
* @ private
2021-04-06 22:50:27 +02:00
* /
2021-12-07 17:44:54 +01:00
function _isUndefined ( obj ) {
return typeof obj === 'undefined' ;
2021-04-06 22:50:27 +02:00
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
2021-12-07 17:44:54 +01:00
* Parses the function arguments .
*
* This function helps to have optional arguments .
2021-04-06 22:50:27 +02:00
*
2021-12-07 17:44:54 +01:00
* @ param { ( Options | null | undefined | Function ) } options
* @ param { ? Function } callback
* @ returns { Array } parsed arguments
2021-04-06 22:50:27 +02:00
* @ private
* /
2021-12-07 17:44:54 +01:00
function _parseArguments ( options , callback ) {
/* istanbul ignore else */
if ( typeof options === 'function' ) {
return [ { } , options ] ;
}
/* istanbul ignore else */
if ( _isUndefined ( options ) ) {
return [ { } , callback ] ;
}
// copy options so we do not leak the changes we make internally
const actualOptions = { } ;
for ( const key of Object . getOwnPropertyNames ( options ) ) {
actualOptions [ key ] = options [ key ] ;
}
return [ actualOptions , callback ] ;
2021-04-06 22:50:27 +02:00
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
2021-12-07 17:44:54 +01:00
* Generates a new temporary name .
*
* @ param { Object } opts
* @ returns { string } the new random name according to opts
* @ private
2021-04-06 22:50:27 +02:00
* /
2021-12-07 17:44:54 +01:00
function _generateTmpName ( opts ) {
const tmpDir = opts . tmpdir ;
/* istanbul ignore else */
if ( ! _isUndefined ( opts . name ) )
return path . join ( tmpDir , opts . dir , opts . name ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( opts . template ) )
return path . join ( tmpDir , opts . dir , opts . template ) . replace ( TEMPLATE _PATTERN , _randomChars ( 6 ) ) ;
// prefix and postfix
const name = [
opts . prefix ? opts . prefix : 'tmp' ,
'-' ,
process . pid ,
'-' ,
_randomChars ( 12 ) ,
opts . postfix ? '-' + opts . postfix : ''
] . join ( '' ) ;
return path . join ( tmpDir , opts . dir , name ) ;
2021-04-06 22:50:27 +02:00
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
2021-12-07 17:44:54 +01:00
* Asserts whether the specified options are valid , also sanitizes options and provides sane defaults for missing
* options .
2021-04-06 22:50:27 +02:00
*
2021-12-07 17:44:54 +01:00
* @ param { Options } options
2021-04-06 22:50:27 +02:00
* @ private
* /
2021-12-07 17:44:54 +01:00
function _assertAndSanitizeOptions ( options ) {
options . tmpdir = _getTmpDir ( options ) ;
const tmpDir = options . tmpdir ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . name ) )
_assertIsRelative ( options . name , 'name' , tmpDir ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . dir ) )
_assertIsRelative ( options . dir , 'dir' , tmpDir ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . template ) ) {
_assertIsRelative ( options . template , 'template' , tmpDir ) ;
if ( ! options . template . match ( TEMPLATE _PATTERN ) )
throw new Error ( ` Invalid template, found " ${ options . template } ". ` ) ;
}
/* istanbul ignore else */
if ( ! _isUndefined ( options . tries ) && isNaN ( options . tries ) || options . tries < 0 )
throw new Error ( ` Invalid tries, found " ${ options . tries } ". ` ) ;
// if a name was specified we will try once
options . tries = _isUndefined ( options . name ) ? options . tries || DEFAULT _TRIES : 1 ;
options . keep = ! ! options . keep ;
options . detachDescriptor = ! ! options . detachDescriptor ;
options . discardDescriptor = ! ! options . discardDescriptor ;
options . unsafeCleanup = ! ! options . unsafeCleanup ;
// sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to
options . dir = _isUndefined ( options . dir ) ? '' : path . relative ( tmpDir , _resolvePath ( options . dir , tmpDir ) ) ;
options . template = _isUndefined ( options . template ) ? undefined : path . relative ( tmpDir , _resolvePath ( options . template , tmpDir ) ) ;
// sanitize further if template is relative to options.dir
options . template = _isBlank ( options . template ) ? undefined : path . relative ( options . dir , options . template ) ;
// for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to
options . name = _isUndefined ( options . name ) ? undefined : _sanitizeName ( options . name ) ;
options . prefix = _isUndefined ( options . prefix ) ? '' : options . prefix ;
options . postfix = _isUndefined ( options . postfix ) ? '' : options . postfix ;
2021-04-06 22:50:27 +02:00
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
2021-12-07 17:44:54 +01:00
* Resolve the specified path name in respect to tmpDir .
*
* The specified name might include relative path components , e . g . . . /
* so we need to resolve in order to be sure that is is located inside tmpDir
2021-04-06 22:50:27 +02:00
*
2021-12-07 17:44:54 +01:00
* @ param name
* @ param tmpDir
* @ returns { string }
2021-04-06 22:50:27 +02:00
* @ private
* /
2021-12-07 17:44:54 +01:00
function _resolvePath ( name , tmpDir ) {
const sanitizedName = _sanitizeName ( name ) ;
if ( sanitizedName . startsWith ( tmpDir ) ) {
return path . resolve ( sanitizedName ) ;
} else {
return path . resolve ( path . join ( tmpDir , sanitizedName ) ) ;
}
2021-04-06 22:50:27 +02:00
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
2021-12-07 17:44:54 +01:00
* Sanitize the specified path name by removing all quote characters .
2021-04-06 22:50:27 +02:00
*
2021-12-07 17:44:54 +01:00
* @ param name
* @ returns { string }
2021-04-06 22:50:27 +02:00
* @ private
* /
2021-12-07 17:44:54 +01:00
function _sanitizeName ( name ) {
if ( _isBlank ( name ) ) {
return name ;
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
return name . replace ( /["']/g , '' ) ;
2021-04-06 22:50:27 +02:00
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
2021-12-07 17:44:54 +01:00
* Asserts whether specified name is relative to the specified tmpDir .
2021-04-06 22:50:27 +02:00
*
2021-12-07 17:44:54 +01:00
* @ param { string } name
* @ param { string } option
* @ param { string } tmpDir
* @ throws { Error }
2021-04-06 22:50:27 +02:00
* @ private
* /
2021-12-07 17:44:54 +01:00
function _assertIsRelative ( name , option , tmpDir ) {
if ( option === 'name' ) {
// assert that name is not absolute and does not contain a path
if ( path . isAbsolute ( name ) )
throw new Error ( ` ${ option } option must not contain an absolute path, found " ${ name } ". ` ) ;
// must not fail on valid .<name> or ..<name> or similar such constructs
let basename = path . basename ( name ) ;
if ( basename === '..' || basename === '.' || basename !== name )
throw new Error ( ` ${ option } option must not contain a path, found " ${ name } ". ` ) ;
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
else { // if (option === 'dir' || option === 'template') {
// assert that dir or template are relative to tmpDir
if ( path . isAbsolute ( name ) && ! name . startsWith ( tmpDir ) ) {
throw new Error ( ` ${ option } option must be relative to " ${ tmpDir } ", found " ${ name } ". ` ) ;
2021-04-06 22:50:27 +02:00
}
2021-12-07 17:44:54 +01:00
let resolvedPath = _resolvePath ( name , tmpDir ) ;
if ( ! resolvedPath . startsWith ( tmpDir ) )
throw new Error ( ` ${ option } option must be relative to " ${ tmpDir } ", found " ${ resolvedPath } ". ` ) ;
}
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
/ * *
* Helper for testing against EBADF to compensate changes made to Node 7. x under Windows .
*
* @ private
* /
function _isEBADF ( error ) {
return _isExpectedError ( error , - EBADF , 'EBADF' ) ;
}
/ * *
* Helper for testing against ENOENT to compensate changes made to Node 7. x under Windows .
*
* @ private
* /
function _isENOENT ( error ) {
return _isExpectedError ( error , - ENOENT , 'ENOENT' ) ;
}
/ * *
* Helper to determine whether the expected error code matches the actual code and errno ,
* which will differ between the supported node versions .
*
* - Node >= 7.0 :
* error . code { string }
* error . errno { number } any numerical value will be negated
*
* CAVEAT
*
* On windows , the errno for EBADF is - 4083 but os . constants . errno . EBADF is different and we must assume that ENOENT
* is no different here .
*
* @ param { SystemError } error
* @ param { number } errno
* @ param { string } code
* @ private
* /
function _isExpectedError ( error , errno , code ) {
return IS _WIN32 ? error . code === code : error . code === code && error . errno === errno ;
}
/ * *
* Sets the graceful cleanup .
*
* If graceful cleanup is set , tmp will remove all controlled temporary objects on process exit , otherwise the
* temporary objects will remain in place , waiting to be cleaned up on system restart or otherwise scheduled temporary
* object removals .
* /
function setGracefulCleanup ( ) {
_gracefulCleanup = true ;
}
/ * *
* Returns the currently configured tmp dir from os . tmpdir ( ) .
*
* @ private
* @ param { ? Options } options
* @ returns { string } the currently configured tmp dir
* /
function _getTmpDir ( options ) {
return path . resolve ( _sanitizeName ( options && options . tmpdir || os . tmpdir ( ) ) ) ;
}
// Install process exit listener
process . addListener ( EXIT , _garbageCollector ) ;
2021-04-06 22:50:27 +02:00
/ * *
* Configuration options .
*
* @ typedef { Object } Options
2021-12-07 17:44:54 +01:00
* @ property { ? boolean } keep the temporary object ( file or dir ) will not be garbage collected
2021-04-06 22:50:27 +02:00
* @ property { ? number } tries the number of tries before give up the name generation
2021-12-07 17:44:54 +01:00
* @ property ( ? int ) mode the access mode , defaults are 0o700 for directories and 0o600 for files
2021-04-06 22:50:27 +02:00
* @ property { ? string } template the "mkstemp" like filename template
2021-12-07 17:44:54 +01:00
* @ property { ? string } name fixed name relative to tmpdir or the specified dir option
* @ property { ? string } dir tmp directory relative to the root tmp directory in use
2021-04-06 22:50:27 +02:00
* @ property { ? string } prefix prefix for the generated name
* @ property { ? string } postfix postfix for the generated name
2021-12-07 17:44:54 +01:00
* @ property { ? string } tmpdir the root tmp directory which overrides the os tmpdir
2021-04-06 22:50:27 +02:00
* @ property { ? boolean } unsafeCleanup recursively removes the created temporary directory , even when it ' s not empty
2021-12-07 17:44:54 +01:00
* @ property { ? boolean } detachDescriptor detaches the file descriptor , caller is responsible for closing the file , tmp will no longer try closing the file during garbage collection
* @ property { ? boolean } discardDescriptor discards the file descriptor ( closes file , fd is - 1 ) , tmp will no longer try closing the file during garbage collection
2021-04-06 22:50:27 +02:00
* /
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
* @ typedef { Object } FileSyncObject
* @ property { string } name the name of the file
2021-12-07 17:44:54 +01:00
* @ property { string } fd the file descriptor or - 1 if the fd has been discarded
2021-04-06 22:50:27 +02:00
* @ property { fileCallback } removeCallback the callback function to remove the file
* /
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
* @ typedef { Object } DirSyncObject
* @ property { string } name the name of the directory
* @ property { fileCallback } removeCallback the callback function to remove the directory
* /
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
* @ callback tmpNameCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* /
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
* @ callback fileCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
2021-12-07 17:44:54 +01:00
* @ param { number } fd the file descriptor or - 1 if the fd had been discarded
2021-04-06 22:50:27 +02:00
* @ param { cleanupCallback } fn the cleanup callback function
* /
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
/ * *
* @ callback fileCallbackSync
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { number } fd the file descriptor or - 1 if the fd had been discarded
* @ param { cleanupCallbackSync } fn the cleanup callback function
* /
2021-04-06 22:50:27 +02:00
/ * *
* @ callback dirCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { cleanupCallback } fn the cleanup callback function
* /
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
/ * *
* @ callback dirCallbackSync
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { cleanupCallbackSync } fn the cleanup callback function
* /
2021-04-06 22:50:27 +02:00
/ * *
* Removes the temporary created file or directory .
*
* @ callback cleanupCallback
2021-12-07 17:44:54 +01:00
* @ param { simpleCallback } [ next ] function to call whenever the tmp object needs to be removed
* /
/ * *
* Removes the temporary created file or directory .
*
* @ callback cleanupCallbackSync
2021-04-06 22:50:27 +02:00
* /
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
/ * *
* Callback function for function composition .
* @ see { @ link https : //github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
*
* @ callback simpleCallback
* /
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// exporting all the needed methods
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will
2021-04-06 22:50:27 +02:00
// allow users to reconfigure the temporary directory
Object . defineProperty ( module . exports , 'tmpdir' , {
enumerable : true ,
configurable : false ,
get : function ( ) {
return _getTmpDir ( ) ;
}
} ) ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
module . exports . dir = dir ;
module . exports . dirSync = dirSync ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
module . exports . file = file ;
module . exports . fileSync = fileSync ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
module . exports . tmpName = tmpName ;
module . exports . tmpNameSync = tmpNameSync ;
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
module . exports . setGracefulCleanup = setGracefulCleanup ;
2020-04-28 15:45:21 +02:00
/***/ } ) ,
/***/ 176 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core _1 = _ _webpack _require _ _ ( 470 ) ;
/ * *
* Status Reporter that displays information about the progress / status of an artifact that is being uploaded or downloaded
*
* Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable
* The total status of the upload / download gets displayed according to this value
* If there is a large file that is being uploaded , extra information about the individual status can also be displayed using the updateLargeFileStatus function
* /
class StatusReporter {
constructor ( displayFrequencyInMilliseconds ) {
this . totalNumberOfFilesToProcess = 0 ;
this . processedCount = 0 ;
this . largeFiles = new Map ( ) ;
this . totalFileStatus = undefined ;
this . displayFrequencyInMilliseconds = displayFrequencyInMilliseconds ;
}
setTotalNumberOfFilesToProcess ( fileTotal ) {
this . totalNumberOfFilesToProcess = fileTotal ;
2021-12-07 17:44:54 +01:00
this . processedCount = 0 ;
2020-04-28 15:45:21 +02:00
}
start ( ) {
// displays information about the total upload/download status
this . totalFileStatus = setInterval ( ( ) => {
// display 1 decimal place without any rounding
const percentage = this . formatPercentage ( this . processedCount , this . totalNumberOfFilesToProcess ) ;
core _1 . info ( ` Total file count: ${ this . totalNumberOfFilesToProcess } ---- Processed file # ${ this . processedCount } ( ${ percentage . slice ( 0 , percentage . indexOf ( '.' ) + 2 ) } %) ` ) ;
} , this . displayFrequencyInMilliseconds ) ;
}
// if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload
2021-12-07 17:44:54 +01:00
updateLargeFileStatus ( fileName , chunkStartIndex , chunkEndIndex , totalUploadFileSize ) {
2020-04-28 15:45:21 +02:00
// display 1 decimal place without any rounding
2021-12-07 17:44:54 +01:00
const percentage = this . formatPercentage ( chunkEndIndex , totalUploadFileSize ) ;
core _1 . info ( ` Uploaded ${ fileName } ( ${ percentage . slice ( 0 , percentage . indexOf ( '.' ) + 2 ) } %) bytes ${ chunkStartIndex } : ${ chunkEndIndex } ` ) ;
2020-04-28 15:45:21 +02:00
}
stop ( ) {
if ( this . totalFileStatus ) {
clearInterval ( this . totalFileStatus ) ;
}
}
incrementProcessedCount ( ) {
this . processedCount ++ ;
}
formatPercentage ( numerator , denominator ) {
// toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed
return ( ( numerator / denominator ) * 100 ) . toFixed ( 4 ) . toString ( ) ;
}
}
exports . StatusReporter = StatusReporter ;
//# sourceMappingURL=status-reporter.js.map
/***/ } ) ,
2023-03-08 22:06:44 +01:00
/***/ 209 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _v = _interopRequireDefault ( _ _webpack _require _ _ ( 212 ) ) ;
var _md = _interopRequireDefault ( _ _webpack _require _ _ ( 803 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v3 = ( 0 , _v . default ) ( 'v3' , 0x30 , _md . default ) ;
var _default = v3 ;
exports . default = _default ;
/***/ } ) ,
2020-04-28 15:45:21 +02:00
/***/ 211 :
/***/ ( function ( module ) {
module . exports = require ( "https" ) ;
/***/ } ) ,
2023-03-08 22:06:44 +01:00
/***/ 212 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = _default ;
exports . URL = exports . DNS = void 0 ;
var _stringify = _interopRequireDefault ( _ _webpack _require _ _ ( 411 ) ) ;
var _parse = _interopRequireDefault ( _ _webpack _require _ _ ( 22 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function stringToBytes ( str ) {
str = unescape ( encodeURIComponent ( str ) ) ; // UTF8 escape
const bytes = [ ] ;
for ( let i = 0 ; i < str . length ; ++ i ) {
bytes . push ( str . charCodeAt ( i ) ) ;
}
return bytes ;
}
const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8' ;
exports . DNS = DNS ;
const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8' ;
exports . URL = URL ;
function _default ( name , version , hashfunc ) {
function generateUUID ( value , namespace , buf , offset ) {
if ( typeof value === 'string' ) {
value = stringToBytes ( value ) ;
}
if ( typeof namespace === 'string' ) {
namespace = ( 0 , _parse . default ) ( namespace ) ;
}
if ( namespace . length !== 16 ) {
throw TypeError ( 'Namespace must be array-like (16 iterable integer values, 0-255)' ) ;
} // Compute hash of namespace and value, Per 4.3
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
// hashfunc([...namespace, ... value])`
let bytes = new Uint8Array ( 16 + value . length ) ;
bytes . set ( namespace ) ;
bytes . set ( value , namespace . length ) ;
bytes = hashfunc ( bytes ) ;
bytes [ 6 ] = bytes [ 6 ] & 0x0f | version ;
bytes [ 8 ] = bytes [ 8 ] & 0x3f | 0x80 ;
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = bytes [ i ] ;
}
return buf ;
}
return ( 0 , _stringify . default ) ( bytes ) ;
} // Function#name is not settable on some platforms (#270)
try {
generateUUID . name = name ; // eslint-disable-next-line no-empty
} catch ( err ) { } // For CommonJS default export support
generateUUID . DNS = DNS ;
generateUUID . URL = URL ;
return generateUUID ;
}
/***/ } ) ,
2020-04-28 15:45:21 +02:00
/***/ 214 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const artifact _client _1 = _ _webpack _require _ _ ( 359 ) ;
/ * *
* Constructs an ArtifactClient
* /
function create ( ) {
return artifact _client _1 . DefaultArtifactClient . create ( ) ;
}
exports . create = create ;
//# sourceMappingURL=artifact-client.js.map
/***/ } ) ,
/***/ 226 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
class BasicCredentialHandler {
constructor ( username , password ) {
this . username = username ;
this . password = password ;
}
prepareRequest ( options ) {
2020-04-30 22:50:01 +02:00
options . headers [ 'Authorization' ] =
'Basic ' +
Buffer . from ( this . username + ':' + this . password ) . toString ( 'base64' ) ;
2020-04-28 15:45:21 +02:00
}
// This handler cannot handle 401
canHandleAuthentication ( response ) {
return false ;
}
handleAuthentication ( httpClient , requestInfo , objs ) {
return null ;
}
}
exports . BasicCredentialHandler = BasicCredentialHandler ;
class BearerCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
options . headers [ 'Authorization' ] = 'Bearer ' + this . token ;
}
// This handler cannot handle 401
canHandleAuthentication ( response ) {
return false ;
}
handleAuthentication ( httpClient , requestInfo , objs ) {
return null ;
}
}
exports . BearerCredentialHandler = BearerCredentialHandler ;
class PersonalAccessTokenCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
2020-04-30 22:50:01 +02:00
options . headers [ 'Authorization' ] =
'Basic ' + Buffer . from ( 'PAT:' + this . token ) . toString ( 'base64' ) ;
2020-04-28 15:45:21 +02:00
}
// This handler cannot handle 401
canHandleAuthentication ( response ) {
return false ;
}
handleAuthentication ( httpClient , requestInfo , objs ) {
return null ;
}
}
exports . PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler ;
/***/ } ) ,
/***/ 245 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = globSync
globSync . GlobSync = GlobSync
var fs = _ _webpack _require _ _ ( 747 )
var rp = _ _webpack _require _ _ ( 302 )
var minimatch = _ _webpack _require _ _ ( 93 )
var Minimatch = minimatch . Minimatch
2021-04-06 22:50:27 +02:00
var Glob = _ _webpack _require _ _ ( 402 ) . Glob
2020-04-28 15:45:21 +02:00
var util = _ _webpack _require _ _ ( 669 )
var path = _ _webpack _require _ _ ( 622 )
var assert = _ _webpack _require _ _ ( 357 )
var isAbsolute = _ _webpack _require _ _ ( 681 )
var common = _ _webpack _require _ _ ( 856 )
var alphasort = common . alphasort
var alphasorti = common . alphasorti
var setopts = common . setopts
var ownProp = common . ownProp
var childrenIgnored = common . childrenIgnored
var isIgnored = common . isIgnored
function globSync ( pattern , options ) {
if ( typeof options === 'function' || arguments . length === 3 )
throw new TypeError ( 'callback provided to sync glob\n' +
'See: https://github.com/isaacs/node-glob/issues/167' )
return new GlobSync ( pattern , options ) . found
}
function GlobSync ( pattern , options ) {
if ( ! pattern )
throw new Error ( 'must provide pattern' )
if ( typeof options === 'function' || arguments . length === 3 )
throw new TypeError ( 'callback provided to sync glob\n' +
'See: https://github.com/isaacs/node-glob/issues/167' )
if ( ! ( this instanceof GlobSync ) )
return new GlobSync ( pattern , options )
setopts ( this , pattern , options )
if ( this . noprocess )
return this
var n = this . minimatch . set . length
this . matches = new Array ( n )
for ( var i = 0 ; i < n ; i ++ ) {
this . _process ( this . minimatch . set [ i ] , i , false )
}
this . _finish ( )
}
GlobSync . prototype . _finish = function ( ) {
assert ( this instanceof GlobSync )
if ( this . realpath ) {
var self = this
this . matches . forEach ( function ( matchset , index ) {
var set = self . matches [ index ] = Object . create ( null )
for ( var p in matchset ) {
try {
p = self . _makeAbs ( p )
var real = rp . realpathSync ( p , self . realpathCache )
set [ real ] = true
} catch ( er ) {
if ( er . syscall === 'stat' )
set [ self . _makeAbs ( p ) ] = true
else
throw er
}
}
} )
}
common . finish ( this )
}
GlobSync . prototype . _process = function ( pattern , index , inGlobStar ) {
assert ( this instanceof GlobSync )
// Get the first [n] parts of pattern that are all strings.
var n = 0
while ( typeof pattern [ n ] === 'string' ) {
n ++
}
// now n is the index of the first one that is *not* a string.
// See if there's anything else
var prefix
switch ( n ) {
// if not, then this is rather simple
case pattern . length :
this . _processSimple ( pattern . join ( '/' ) , index )
return
case 0 :
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default :
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern . slice ( 0 , n ) . join ( '/' )
break
}
var remain = pattern . slice ( n )
// get the list of entries.
var read
if ( prefix === null )
read = '.'
else if ( isAbsolute ( prefix ) || isAbsolute ( pattern . join ( '/' ) ) ) {
if ( ! prefix || ! isAbsolute ( prefix ) )
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this . _makeAbs ( read )
//if ignored, skip processing
if ( childrenIgnored ( this , read ) )
return
var isGlobStar = remain [ 0 ] === minimatch . GLOBSTAR
if ( isGlobStar )
this . _processGlobStar ( prefix , read , abs , remain , index , inGlobStar )
else
this . _processReaddir ( prefix , read , abs , remain , index , inGlobStar )
}
GlobSync . prototype . _processReaddir = function ( prefix , read , abs , remain , index , inGlobStar ) {
var entries = this . _readdir ( abs , inGlobStar )
// if the abs isn't a dir, then nothing can match!
if ( ! entries )
return
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain [ 0 ]
var negate = ! ! this . minimatch . negate
var rawGlob = pn . _glob
var dotOk = this . dot || rawGlob . charAt ( 0 ) === '.'
var matchedEntries = [ ]
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) !== '.' || dotOk ) {
var m
if ( negate && ! prefix ) {
m = ! e . match ( pn )
} else {
m = e . match ( pn )
}
if ( m )
matchedEntries . push ( e )
}
}
var len = matchedEntries . length
// If there are no matched entries, then nothing matches.
if ( len === 0 )
return
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if ( remain . length === 1 && ! this . mark && ! this . stat ) {
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
if ( prefix ) {
if ( prefix . slice ( - 1 ) !== '/' )
e = prefix + '/' + e
else
e = prefix + e
}
if ( e . charAt ( 0 ) === '/' && ! this . nomount ) {
e = path . join ( this . root , e )
}
this . _emitMatch ( index , e )
}
// This was the last one, and no stats were needed
return
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain . shift ( )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
var newPattern
if ( prefix )
newPattern = [ prefix , e ]
else
newPattern = [ e ]
this . _process ( newPattern . concat ( remain ) , index , inGlobStar )
}
}
GlobSync . prototype . _emitMatch = function ( index , e ) {
if ( isIgnored ( this , e ) )
return
var abs = this . _makeAbs ( e )
if ( this . mark )
e = this . _mark ( e )
if ( this . absolute ) {
e = abs
}
if ( this . matches [ index ] [ e ] )
return
if ( this . nodir ) {
var c = this . cache [ abs ]
if ( c === 'DIR' || Array . isArray ( c ) )
return
}
this . matches [ index ] [ e ] = true
if ( this . stat )
this . _stat ( e )
}
GlobSync . prototype . _readdirInGlobStar = function ( abs ) {
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if ( this . follow )
return this . _readdir ( abs , false )
var entries
var lstat
var stat
try {
lstat = fs . lstatSync ( abs )
} catch ( er ) {
if ( er . code === 'ENOENT' ) {
// lstat failed, doesn't exist
return null
}
}
var isSym = lstat && lstat . isSymbolicLink ( )
this . symlinks [ abs ] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if ( ! isSym && lstat && ! lstat . isDirectory ( ) )
this . cache [ abs ] = 'FILE'
else
entries = this . _readdir ( abs , false )
return entries
}
GlobSync . prototype . _readdir = function ( abs , inGlobStar ) {
var entries
if ( inGlobStar && ! ownProp ( this . symlinks , abs ) )
return this . _readdirInGlobStar ( abs )
if ( ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( ! c || c === 'FILE' )
return null
if ( Array . isArray ( c ) )
return c
}
try {
return this . _readdirEntries ( abs , fs . readdirSync ( abs ) )
} catch ( er ) {
this . _readdirError ( abs , er )
return null
}
}
GlobSync . prototype . _readdirEntries = function ( abs , entries ) {
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if ( ! this . mark && ! this . stat ) {
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( abs === '/' )
e = abs + e
else
e = abs + '/' + e
this . cache [ e ] = true
}
}
this . cache [ abs ] = entries
// mark and cache dir-ness
return entries
}
GlobSync . prototype . _readdirError = function ( f , er ) {
// handle errors, and cache the information
switch ( er . code ) {
case 'ENOTSUP' : // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR' : // totally normal. means it *does* exist.
var abs = this . _makeAbs ( f )
this . cache [ abs ] = 'FILE'
if ( abs === this . cwdAbs ) {
var error = new Error ( er . code + ' invalid cwd ' + this . cwd )
error . path = this . cwd
error . code = er . code
throw error
}
break
case 'ENOENT' : // not terribly unusual
case 'ELOOP' :
case 'ENAMETOOLONG' :
case 'UNKNOWN' :
this . cache [ this . _makeAbs ( f ) ] = false
break
default : // some unusual error. Treat as failure.
this . cache [ this . _makeAbs ( f ) ] = false
if ( this . strict )
throw er
if ( ! this . silent )
console . error ( 'glob error' , er )
break
}
}
GlobSync . prototype . _processGlobStar = function ( prefix , read , abs , remain , index , inGlobStar ) {
var entries = this . _readdir ( abs , inGlobStar )
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if ( ! entries )
return
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain . slice ( 1 )
var gspref = prefix ? [ prefix ] : [ ]
var noGlobStar = gspref . concat ( remainWithoutGlobStar )
// the noGlobStar pattern exits the inGlobStar state
this . _process ( noGlobStar , index , false )
var len = entries . length
var isSym = this . symlinks [ abs ]
// If it's a symlink, and we're in a globstar, then stop
if ( isSym && inGlobStar )
return
for ( var i = 0 ; i < len ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) === '.' && ! this . dot )
continue
// these two cases enter the inGlobStar state
var instead = gspref . concat ( entries [ i ] , remainWithoutGlobStar )
this . _process ( instead , index , true )
var below = gspref . concat ( entries [ i ] , remain )
this . _process ( below , index , true )
}
}
GlobSync . prototype . _processSimple = function ( prefix , index ) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var exists = this . _stat ( prefix )
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
// If it doesn't exist, then just mark the lack of results
if ( ! exists )
return
if ( prefix && isAbsolute ( prefix ) && ! this . nomount ) {
var trail = /[\/\\]$/ . test ( prefix )
if ( prefix . charAt ( 0 ) === '/' ) {
prefix = path . join ( this . root , prefix )
} else {
prefix = path . resolve ( this . root , prefix )
if ( trail )
prefix += '/'
}
}
if ( process . platform === 'win32' )
prefix = prefix . replace ( /\\/g , '/' )
// Mark this as a match
this . _emitMatch ( index , prefix )
}
// Returns either 'DIR', 'FILE', or false
GlobSync . prototype . _stat = function ( f ) {
var abs = this . _makeAbs ( f )
var needDir = f . slice ( - 1 ) === '/'
if ( f . length > this . maxLength )
return false
if ( ! this . stat && ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( Array . isArray ( c ) )
c = 'DIR'
// It exists, but maybe not how we need it
if ( ! needDir || c === 'DIR' )
return c
if ( needDir && c === 'FILE' )
return false
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var exists
var stat = this . statCache [ abs ]
if ( ! stat ) {
var lstat
try {
lstat = fs . lstatSync ( abs )
} catch ( er ) {
if ( er && ( er . code === 'ENOENT' || er . code === 'ENOTDIR' ) ) {
this . statCache [ abs ] = false
return false
}
}
if ( lstat && lstat . isSymbolicLink ( ) ) {
try {
stat = fs . statSync ( abs )
} catch ( er ) {
stat = lstat
}
} else {
stat = lstat
}
}
this . statCache [ abs ] = stat
var c = true
if ( stat )
c = stat . isDirectory ( ) ? 'DIR' : 'FILE'
this . cache [ abs ] = this . cache [ abs ] || c
if ( needDir && c === 'FILE' )
return false
return c
}
GlobSync . prototype . _mark = function ( p ) {
return common . mark ( this , p )
}
GlobSync . prototype . _makeAbs = function ( f ) {
return common . makeAbs ( this , f )
}
/***/ } ) ,
/***/ 302 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = realpath
realpath . realpath = realpath
realpath . sync = realpathSync
realpath . realpathSync = realpathSync
realpath . monkeypatch = monkeypatch
realpath . unmonkeypatch = unmonkeypatch
var fs = _ _webpack _require _ _ ( 747 )
var origRealpath = fs . realpath
var origRealpathSync = fs . realpathSync
var version = process . version
var ok = /^v[0-5]\./ . test ( version )
var old = _ _webpack _require _ _ ( 117 )
function newError ( er ) {
return er && er . syscall === 'realpath' && (
er . code === 'ELOOP' ||
er . code === 'ENOMEM' ||
er . code === 'ENAMETOOLONG'
)
}
function realpath ( p , cache , cb ) {
if ( ok ) {
return origRealpath ( p , cache , cb )
}
if ( typeof cache === 'function' ) {
cb = cache
cache = null
}
origRealpath ( p , cache , function ( er , result ) {
if ( newError ( er ) ) {
old . realpath ( p , cache , cb )
} else {
cb ( er , result )
}
} )
}
function realpathSync ( p , cache ) {
if ( ok ) {
return origRealpathSync ( p , cache )
}
try {
return origRealpathSync ( p , cache )
} catch ( er ) {
if ( newError ( er ) ) {
return old . realpathSync ( p , cache )
} else {
throw er
}
}
}
function monkeypatch ( ) {
fs . realpath = realpath
fs . realpathSync = realpathSync
}
function unmonkeypatch ( ) {
fs . realpath = origRealpath
fs . realpathSync = origRealpathSync
}
/***/ } ) ,
/***/ 306 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var concatMap = _ _webpack _require _ _ ( 896 ) ;
var balanced = _ _webpack _require _ _ ( 621 ) ;
module . exports = expandTop ;
var escSlash = '\0SLASH' + Math . random ( ) + '\0' ;
var escOpen = '\0OPEN' + Math . random ( ) + '\0' ;
var escClose = '\0CLOSE' + Math . random ( ) + '\0' ;
var escComma = '\0COMMA' + Math . random ( ) + '\0' ;
var escPeriod = '\0PERIOD' + Math . random ( ) + '\0' ;
function numeric ( str ) {
return parseInt ( str , 10 ) == str
? parseInt ( str , 10 )
: str . charCodeAt ( 0 ) ;
}
function escapeBraces ( str ) {
return str . split ( '\\\\' ) . join ( escSlash )
. split ( '\\{' ) . join ( escOpen )
. split ( '\\}' ) . join ( escClose )
. split ( '\\,' ) . join ( escComma )
. split ( '\\.' ) . join ( escPeriod ) ;
}
function unescapeBraces ( str ) {
return str . split ( escSlash ) . join ( '\\' )
. split ( escOpen ) . join ( '{' )
. split ( escClose ) . join ( '}' )
. split ( escComma ) . join ( ',' )
. split ( escPeriod ) . join ( '.' ) ;
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts ( str ) {
if ( ! str )
return [ '' ] ;
var parts = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m )
return str . split ( ',' ) ;
var pre = m . pre ;
var body = m . body ;
var post = m . post ;
var p = pre . split ( ',' ) ;
p [ p . length - 1 ] += '{' + body + '}' ;
var postParts = parseCommaParts ( post ) ;
if ( post . length ) {
p [ p . length - 1 ] += postParts . shift ( ) ;
p . push . apply ( p , postParts ) ;
}
parts . push . apply ( parts , p ) ;
return parts ;
}
function expandTop ( str ) {
if ( ! str )
return [ ] ;
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if ( str . substr ( 0 , 2 ) === '{}' ) {
str = '\\{\\}' + str . substr ( 2 ) ;
}
return expand ( escapeBraces ( str ) , true ) . map ( unescapeBraces ) ;
}
function identity ( e ) {
return e ;
}
function embrace ( str ) {
return '{' + str + '}' ;
}
function isPadded ( el ) {
return /^-?0\d/ . test ( el ) ;
}
function lte ( i , y ) {
return i <= y ;
}
function gte ( i , y ) {
return i >= y ;
}
function expand ( str , isTop ) {
var expansions = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m || /\$$/ . test ( m . pre ) ) return [ str ] ;
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isSequence = isNumericSequence || isAlphaSequence ;
var isOptions = m . body . indexOf ( ',' ) >= 0 ;
if ( ! isSequence && ! isOptions ) {
// {a},b}
if ( m . post . match ( /,.*\}/ ) ) {
str = m . pre + '{' + m . body + escClose + m . post ;
return expand ( str ) ;
}
return [ str ] ;
}
var n ;
if ( isSequence ) {
n = m . body . split ( /\.\./ ) ;
} else {
n = parseCommaParts ( m . body ) ;
if ( n . length === 1 ) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand ( n [ 0 ] , false ) . map ( embrace ) ;
if ( n . length === 1 ) {
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
return post . map ( function ( p ) {
return m . pre + n [ 0 ] + p ;
} ) ;
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m . pre ;
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
var N ;
if ( isSequence ) {
var x = numeric ( n [ 0 ] ) ;
var y = numeric ( n [ 1 ] ) ;
var width = Math . max ( n [ 0 ] . length , n [ 1 ] . length )
var incr = n . length == 3
? Math . abs ( numeric ( n [ 2 ] ) )
: 1 ;
var test = lte ;
var reverse = y < x ;
if ( reverse ) {
incr *= - 1 ;
test = gte ;
}
var pad = n . some ( isPadded ) ;
N = [ ] ;
for ( var i = x ; test ( i , y ) ; i += incr ) {
var c ;
if ( isAlphaSequence ) {
c = String . fromCharCode ( i ) ;
if ( c === '\\' )
c = '' ;
} else {
c = String ( i ) ;
if ( pad ) {
var need = width - c . length ;
if ( need > 0 ) {
var z = new Array ( need + 1 ) . join ( '0' ) ;
if ( i < 0 )
c = '-' + z + c . slice ( 1 ) ;
else
c = z + c ;
}
}
}
N . push ( c ) ;
}
} else {
N = concatMap ( n , function ( el ) { return expand ( el , false ) } ) ;
}
for ( var j = 0 ; j < N . length ; j ++ ) {
for ( var k = 0 ; k < post . length ; k ++ ) {
var expansion = pre + N [ j ] + post [ k ] ;
if ( ! isTop || isSequence || expansion )
expansions . push ( expansion ) ;
}
}
return expansions ;
}
/***/ } ) ,
/***/ 315 :
/***/ ( function ( module ) {
if ( typeof Object . create === 'function' ) {
// implementation from standard node.js 'util' module
module . exports = function inherits ( ctor , superCtor ) {
if ( superCtor ) {
ctor . super _ = superCtor
ctor . prototype = Object . create ( superCtor . prototype , {
constructor : {
value : ctor ,
enumerable : false ,
writable : true ,
configurable : true
}
} )
}
} ;
} else {
// old school shim for old browsers
module . exports = function inherits ( ctor , superCtor ) {
if ( superCtor ) {
ctor . super _ = superCtor
var TempCtor = function ( ) { }
TempCtor . prototype = superCtor . prototype
ctor . prototype = new TempCtor ( )
ctor . prototype . constructor = ctor
}
}
}
2023-03-08 22:06:44 +01:00
/***/ } ) ,
/***/ 327 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _default = '00000000-0000-0000-0000-000000000000' ;
exports . default = _default ;
2020-04-28 15:45:21 +02:00
/***/ } ) ,
/***/ 357 :
/***/ ( function ( module ) {
module . exports = require ( "assert" ) ;
/***/ } ) ,
/***/ 359 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const upload _specification _1 = _ _webpack _require _ _ ( 590 ) ;
const upload _http _client _1 = _ _webpack _require _ _ ( 608 ) ;
const utils _1 = _ _webpack _require _ _ ( 870 ) ;
2021-12-07 17:44:54 +01:00
const path _and _artifact _name _validation _1 = _ _webpack _require _ _ ( 553 ) ;
2020-04-28 15:45:21 +02:00
const download _http _client _1 = _ _webpack _require _ _ ( 855 ) ;
const download _specification _1 = _ _webpack _require _ _ ( 532 ) ;
const config _variables _1 = _ _webpack _require _ _ ( 401 ) ;
const path _1 = _ _webpack _require _ _ ( 622 ) ;
class DefaultArtifactClient {
/ * *
* Constructs a DefaultArtifactClient
* /
static create ( ) {
return new DefaultArtifactClient ( ) ;
}
/ * *
* Uploads an artifact
* /
uploadArtifact ( name , files , rootDirectory , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2021-12-07 17:44:54 +01:00
core . info ( ` Starting artifact upload
For more detailed logs during the artifact upload process , enable step - debugging : https : //docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`);
path _and _artifact _name _validation _1 . checkArtifactName ( name ) ;
2020-04-28 15:45:21 +02:00
// Get specification for the files being uploaded
const uploadSpecification = upload _specification _1 . getUploadSpecification ( name , rootDirectory , files ) ;
const uploadResponse = {
artifactName : name ,
artifactItems : [ ] ,
size : 0 ,
failedItems : [ ]
} ;
const uploadHttpClient = new upload _http _client _1 . UploadHttpClient ( ) ;
if ( uploadSpecification . length === 0 ) {
core . warning ( ` No files found that can be uploaded ` ) ;
}
else {
// Create an entry for the artifact in the file container
2020-12-15 16:55:26 +01:00
const response = yield uploadHttpClient . createArtifactInFileContainer ( name , options ) ;
2020-04-28 15:45:21 +02:00
if ( ! response . fileContainerResourceUrl ) {
core . debug ( response . toString ( ) ) ;
throw new Error ( 'No URL provided by the Artifact Service to upload an artifact to' ) ;
}
core . debug ( ` Upload Resource URL: ${ response . fileContainerResourceUrl } ` ) ;
2021-12-07 17:44:54 +01:00
core . info ( ` Container for artifact " ${ name } " successfully created. Starting upload of file(s) ` ) ;
2020-04-28 15:45:21 +02:00
// Upload each of the files that were found concurrently
const uploadResult = yield uploadHttpClient . uploadArtifactToFileContainer ( response . fileContainerResourceUrl , uploadSpecification , options ) ;
// Update the size of the artifact to indicate we are done uploading
// The uncompressed size is used for display when downloading a zip of the artifact from the UI
2021-12-07 17:44:54 +01:00
core . info ( ` File upload process has finished. Finalizing the artifact upload ` ) ;
2020-04-28 15:45:21 +02:00
yield uploadHttpClient . patchArtifactSize ( uploadResult . totalSize , name ) ;
2021-12-07 17:44:54 +01:00
if ( uploadResult . failedItems . length > 0 ) {
core . info ( ` Upload finished. There were ${ uploadResult . failedItems . length } items that failed to upload ` ) ;
}
else {
core . info ( ` Artifact has been finalized. All files have been successfully uploaded! ` ) ;
}
core . info ( `
The raw size of all the files that were specified for upload is $ { uploadResult . totalSize } bytes
The size of all the files that were uploaded is $ { uploadResult . uploadSize } bytes . This takes into account any gzip compression used to reduce the upload size , time and storage
Note : The size of downloaded zips can differ significantly from the reported size . For more information see : https : //github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`);
2020-04-28 15:45:21 +02:00
uploadResponse . artifactItems = uploadSpecification . map ( item => item . absoluteFilePath ) ;
uploadResponse . size = uploadResult . uploadSize ;
uploadResponse . failedItems = uploadResult . failedItems ;
}
return uploadResponse ;
} ) ;
}
downloadArtifact ( name , path , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const downloadHttpClient = new download _http _client _1 . DownloadHttpClient ( ) ;
const artifacts = yield downloadHttpClient . listArtifacts ( ) ;
if ( artifacts . count === 0 ) {
throw new Error ( ` Unable to find any artifacts for the associated workflow ` ) ;
}
const artifactToDownload = artifacts . value . find ( artifact => {
return artifact . name === name ;
} ) ;
if ( ! artifactToDownload ) {
throw new Error ( ` Unable to find an artifact with the name: ${ name } ` ) ;
}
const items = yield downloadHttpClient . getContainerItems ( artifactToDownload . name , artifactToDownload . fileContainerResourceUrl ) ;
if ( ! path ) {
path = config _variables _1 . getWorkSpaceDirectory ( ) ;
}
path = path _1 . normalize ( path ) ;
path = path _1 . resolve ( path ) ;
// During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories
2020-07-31 17:16:59 +02:00
const downloadSpecification = download _specification _1 . getDownloadSpecification ( name , items . value , path , ( options === null || options === void 0 ? void 0 : options . createArtifactFolder ) || false ) ;
2020-04-28 15:45:21 +02:00
if ( downloadSpecification . filesToDownload . length === 0 ) {
core . info ( ` No downloadable files were found for the artifact: ${ artifactToDownload . name } ` ) ;
}
else {
// Create all necessary directories recursively before starting any download
yield utils _1 . createDirectoriesForArtifact ( downloadSpecification . directoryStructure ) ;
core . info ( 'Directory structure has been setup for the artifact' ) ;
yield utils _1 . createEmptyFilesForArtifact ( downloadSpecification . emptyFilesToCreate ) ;
yield downloadHttpClient . downloadSingleArtifact ( downloadSpecification . filesToDownload ) ;
}
return {
artifactName : name ,
downloadPath : downloadSpecification . rootDownloadLocation
} ;
} ) ;
}
downloadAllArtifacts ( path ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const downloadHttpClient = new download _http _client _1 . DownloadHttpClient ( ) ;
const response = [ ] ;
const artifacts = yield downloadHttpClient . listArtifacts ( ) ;
if ( artifacts . count === 0 ) {
core . info ( 'Unable to find any artifacts for the associated workflow' ) ;
return response ;
}
if ( ! path ) {
path = config _variables _1 . getWorkSpaceDirectory ( ) ;
}
path = path _1 . normalize ( path ) ;
path = path _1 . resolve ( path ) ;
let downloadedArtifacts = 0 ;
while ( downloadedArtifacts < artifacts . count ) {
const currentArtifactToDownload = artifacts . value [ downloadedArtifacts ] ;
downloadedArtifacts += 1 ;
2021-12-07 17:44:54 +01:00
core . info ( ` starting download of artifact ${ currentArtifactToDownload . name } : ${ downloadedArtifacts } / ${ artifacts . count } ` ) ;
2020-04-28 15:45:21 +02:00
// Get container entries for the specific artifact
const items = yield downloadHttpClient . getContainerItems ( currentArtifactToDownload . name , currentArtifactToDownload . fileContainerResourceUrl ) ;
const downloadSpecification = download _specification _1 . getDownloadSpecification ( currentArtifactToDownload . name , items . value , path , true ) ;
if ( downloadSpecification . filesToDownload . length === 0 ) {
core . info ( ` No downloadable files were found for any artifact ${ currentArtifactToDownload . name } ` ) ;
}
else {
yield utils _1 . createDirectoriesForArtifact ( downloadSpecification . directoryStructure ) ;
yield utils _1 . createEmptyFilesForArtifact ( downloadSpecification . emptyFilesToCreate ) ;
yield downloadHttpClient . downloadSingleArtifact ( downloadSpecification . filesToDownload ) ;
}
response . push ( {
artifactName : currentArtifactToDownload . name ,
downloadPath : downloadSpecification . rootDownloadLocation
} ) ;
}
return response ;
} ) ;
}
}
exports . DefaultArtifactClient = DefaultArtifactClient ;
//# sourceMappingURL=artifact-client.js.map
/***/ } ) ,
2023-03-08 22:06:44 +01:00
/***/ 363 :
2020-04-28 15:45:21 +02:00
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
2023-03-08 22:06:44 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2020-04-28 15:45:21 +02:00
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2023-03-08 22:06:44 +01:00
exports . PersonalAccessTokenCredentialHandler = exports . BearerCredentialHandler = exports . BasicCredentialHandler = void 0 ;
class BasicCredentialHandler {
constructor ( username , password ) {
this . username = username ;
this . password = password ;
}
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` ${ this . username } : ${ this . password } ` ) . toString ( 'base64' ) } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
2020-04-28 15:45:21 +02:00
}
2023-03-08 22:06:44 +01:00
exports . BasicCredentialHandler = BasicCredentialHandler ;
class BearerCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Bearer ${ this . token } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
}
exports . BearerCredentialHandler = BearerCredentialHandler ;
class PersonalAccessTokenCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
if ( ! options . headers ) {
throw Error ( 'The request has no headers' ) ;
}
options . headers [ 'Authorization' ] = ` Basic ${ Buffer . from ( ` PAT: ${ this . token } ` ) . toString ( 'base64' ) } ` ;
}
// This handler cannot handle 401
canHandleAuthentication ( ) {
return false ;
}
handleAuthentication ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
throw new Error ( 'not implemented' ) ;
} ) ;
}
}
exports . PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler ;
//# sourceMappingURL=auth.js.map
/***/ } ) ,
/***/ 384 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _v = _interopRequireDefault ( _ _webpack _require _ _ ( 212 ) ) ;
var _sha = _interopRequireDefault ( _ _webpack _require _ _ ( 498 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const v5 = ( 0 , _v . default ) ( 'v5' , 0x50 , _sha . default ) ;
var _default = v5 ;
exports . default = _default ;
/***/ } ) ,
/***/ 401 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
// The number of concurrent uploads that happens at the same time
function getUploadFileConcurrency ( ) {
return 2 ;
}
exports . getUploadFileConcurrency = getUploadFileConcurrency ;
// When uploading large files that can't be uploaded with a single http call, this controls
// the chunk size that is used during upload
function getUploadChunkSize ( ) {
return 8 * 1024 * 1024 ; // 8 MB Chunks
}
exports . getUploadChunkSize = getUploadChunkSize ;
// The maximum number of retries that can be attempted before an upload or download fails
function getRetryLimit ( ) {
2020-04-28 15:45:21 +02:00
return 5 ;
}
exports . getRetryLimit = getRetryLimit ;
// With exponential backoff, the larger the retry count, the larger the wait time before another attempt
// The retry multiplier controls by how much the backOff time increases depending on the number of retries
function getRetryMultiplier ( ) {
return 1.5 ;
}
exports . getRetryMultiplier = getRetryMultiplier ;
// The initial wait time if an upload or download fails and a retry is being attempted for the first time
function getInitialRetryIntervalInMilliseconds ( ) {
return 3000 ;
}
exports . getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds ;
// The number of concurrent downloads that happens at the same time
function getDownloadFileConcurrency ( ) {
return 2 ;
}
exports . getDownloadFileConcurrency = getDownloadFileConcurrency ;
function getRuntimeToken ( ) {
const token = process . env [ 'ACTIONS_RUNTIME_TOKEN' ] ;
if ( ! token ) {
throw new Error ( 'Unable to get ACTIONS_RUNTIME_TOKEN env variable' ) ;
}
return token ;
}
exports . getRuntimeToken = getRuntimeToken ;
function getRuntimeUrl ( ) {
const runtimeUrl = process . env [ 'ACTIONS_RUNTIME_URL' ] ;
if ( ! runtimeUrl ) {
throw new Error ( 'Unable to get ACTIONS_RUNTIME_URL env variable' ) ;
}
return runtimeUrl ;
}
exports . getRuntimeUrl = getRuntimeUrl ;
function getWorkFlowRunId ( ) {
const workFlowRunId = process . env [ 'GITHUB_RUN_ID' ] ;
if ( ! workFlowRunId ) {
throw new Error ( 'Unable to get GITHUB_RUN_ID env variable' ) ;
}
return workFlowRunId ;
}
exports . getWorkFlowRunId = getWorkFlowRunId ;
function getWorkSpaceDirectory ( ) {
const workspaceDirectory = process . env [ 'GITHUB_WORKSPACE' ] ;
if ( ! workspaceDirectory ) {
throw new Error ( 'Unable to get GITHUB_WORKSPACE env variable' ) ;
}
return workspaceDirectory ;
}
exports . getWorkSpaceDirectory = getWorkSpaceDirectory ;
2020-12-15 16:55:26 +01:00
function getRetentionDays ( ) {
return process . env [ 'GITHUB_RETENTION_DAYS' ] ;
}
exports . getRetentionDays = getRetentionDays ;
2020-04-28 15:45:21 +02:00
//# sourceMappingURL=config-variables.js.map
/***/ } ) ,
/***/ 402 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2021-04-06 22:50:27 +02:00
// Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern, false)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern, inGlobStar)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
//
// If inGlobStar and PREFIX is symlink and points to dir
// set ENTRIES = []
// else readdir(PREFIX) as ENTRIES
// If fail, END
//
// with ENTRIES
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// // Mark that this entry is a globstar match
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
module . exports = glob
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var fs = _ _webpack _require _ _ ( 747 )
var rp = _ _webpack _require _ _ ( 302 )
var minimatch = _ _webpack _require _ _ ( 93 )
var Minimatch = minimatch . Minimatch
var inherits = _ _webpack _require _ _ ( 689 )
var EE = _ _webpack _require _ _ ( 614 ) . EventEmitter
var path = _ _webpack _require _ _ ( 622 )
var assert = _ _webpack _require _ _ ( 357 )
var isAbsolute = _ _webpack _require _ _ ( 681 )
var globSync = _ _webpack _require _ _ ( 245 )
var common = _ _webpack _require _ _ ( 856 )
var alphasort = common . alphasort
var alphasorti = common . alphasorti
var setopts = common . setopts
var ownProp = common . ownProp
var inflight = _ _webpack _require _ _ ( 674 )
var util = _ _webpack _require _ _ ( 669 )
var childrenIgnored = common . childrenIgnored
var isIgnored = common . isIgnored
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var once = _ _webpack _require _ _ ( 49 )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function glob ( pattern , options , cb ) {
if ( typeof options === 'function' ) cb = options , options = { }
if ( ! options ) options = { }
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( options . sync ) {
if ( cb )
throw new TypeError ( 'callback provided to sync glob' )
return globSync ( pattern , options )
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
return new Glob ( pattern , options , cb )
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
glob . sync = globSync
var GlobSync = glob . GlobSync = globSync . GlobSync
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// old api surface
glob . glob = glob
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function extend ( origin , add ) {
if ( add === null || typeof add !== 'object' ) {
return origin
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
var keys = Object . keys ( add )
var i = keys . length
while ( i -- ) {
origin [ keys [ i ] ] = add [ keys [ i ] ]
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
return origin
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
glob . hasMagic = function ( pattern , options _ ) {
var options = extend ( { } , options _ )
options . noprocess = true
var g = new Glob ( pattern , options )
var set = g . minimatch . set
if ( ! pattern )
return false
if ( set . length > 1 )
return true
for ( var j = 0 ; j < set [ 0 ] . length ; j ++ ) {
if ( typeof set [ 0 ] [ j ] !== 'string' )
return true
}
return false
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
glob . Glob = Glob
inherits ( Glob , EE )
function Glob ( pattern , options , cb ) {
2020-04-28 15:45:21 +02:00
if ( typeof options === 'function' ) {
2021-04-06 22:50:27 +02:00
cb = options
options = null
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
if ( options && options . sync ) {
if ( cb )
throw new TypeError ( 'callback provided to sync glob' )
return new GlobSync ( pattern , options )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
if ( ! ( this instanceof Glob ) )
return new Glob ( pattern , options , cb )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
setopts ( this , pattern , options )
this . _didRealPath = false
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// process each pattern in the minimatch set
var n = this . minimatch . set . length
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this . matches = new Array ( n )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( typeof cb === 'function' ) {
cb = once ( cb )
this . on ( 'error' , cb )
this . on ( 'end' , function ( matches ) {
cb ( null , matches )
} )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
var self = this
this . _processing = 0
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
this . _emitQueue = [ ]
this . _processQueue = [ ]
this . paused = false
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( this . noprocess )
return this
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( n === 0 )
return done ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var sync = true
for ( var i = 0 ; i < n ; i ++ ) {
this . _process ( this . minimatch . set [ i ] , i , false , done )
}
sync = false
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function done ( ) {
-- self . _processing
if ( self . _processing <= 0 ) {
if ( sync ) {
process . nextTick ( function ( ) {
self . _finish ( )
} )
} else {
self . _finish ( )
}
}
}
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
Glob . prototype . _finish = function ( ) {
assert ( this instanceof Glob )
if ( this . aborted )
return
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( this . realpath && ! this . _didRealpath )
return this . _realpath ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
common . finish ( this )
this . emit ( 'end' , this . found )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . _realpath = function ( ) {
if ( this . _didRealpath )
return
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
this . _didRealpath = true
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var n = this . matches . length
if ( n === 0 )
return this . _finish ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var self = this
for ( var i = 0 ; i < this . matches . length ; i ++ )
this . _realpathSet ( i , next )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function next ( ) {
if ( -- n === 0 )
self . _finish ( )
}
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . _realpathSet = function ( index , cb ) {
var matchset = this . matches [ index ]
if ( ! matchset )
return cb ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var found = Object . keys ( matchset )
var self = this
var n = found . length
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( n === 0 )
return cb ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var set = this . matches [ index ] = Object . create ( null )
found . forEach ( function ( p , i ) {
// If there's a problem with the stat, then it means that
// one or more of the links in the realpath couldn't be
// resolved. just return the abs value in that case.
p = self . _makeAbs ( p )
rp . realpath ( p , self . realpathCache , function ( er , real ) {
if ( ! er )
set [ real ] = true
else if ( er . syscall === 'stat' )
set [ p ] = true
else
self . emit ( 'error' , er ) // srsly wtf right here
if ( -- n === 0 ) {
self . matches [ index ] = set
cb ( )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
} )
} )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . _mark = function ( p ) {
return common . mark ( this , p )
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
Glob . prototype . _makeAbs = function ( f ) {
return common . makeAbs ( this , f )
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
Glob . prototype . abort = function ( ) {
this . aborted = true
this . emit ( 'abort' )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . pause = function ( ) {
if ( ! this . paused ) {
this . paused = true
this . emit ( 'pause' )
}
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
Glob . prototype . resume = function ( ) {
if ( this . paused ) {
this . emit ( 'resume' )
this . paused = false
if ( this . _emitQueue . length ) {
var eq = this . _emitQueue . slice ( 0 )
this . _emitQueue . length = 0
for ( var i = 0 ; i < eq . length ; i ++ ) {
var e = eq [ i ]
this . _emitMatch ( e [ 0 ] , e [ 1 ] )
}
}
if ( this . _processQueue . length ) {
var pq = this . _processQueue . slice ( 0 )
this . _processQueue . length = 0
for ( var i = 0 ; i < pq . length ; i ++ ) {
var p = pq [ i ]
this . _processing --
this . _process ( p [ 0 ] , p [ 1 ] , p [ 2 ] , p [ 3 ] )
}
}
}
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
Glob . prototype . _process = function ( pattern , index , inGlobStar , cb ) {
assert ( this instanceof Glob )
assert ( typeof cb === 'function' )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( this . aborted )
return
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
this . _processing ++
if ( this . paused ) {
this . _processQueue . push ( [ pattern , index , inGlobStar , cb ] )
return
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
//console.error('PROCESS %d', this._processing, pattern)
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// Get the first [n] parts of pattern that are all strings.
var n = 0
while ( typeof pattern [ n ] === 'string' ) {
n ++
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
// now n is the index of the first one that is *not* a string.
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// see if there's anything else
var prefix
switch ( n ) {
// if not, then this is rather simple
case pattern . length :
this . _processSimple ( pattern . join ( '/' ) , index , cb )
return
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
case 0 :
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default :
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern . slice ( 0 , n ) . join ( '/' )
break
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
var remain = pattern . slice ( n )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// get the list of entries.
var read
if ( prefix === null )
read = '.'
else if ( isAbsolute ( prefix ) || isAbsolute ( pattern . join ( '/' ) ) ) {
if ( ! prefix || ! isAbsolute ( prefix ) )
prefix = '/' + prefix
read = prefix
} else
read = prefix
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var abs = this . _makeAbs ( read )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
//if ignored, skip _processing
if ( childrenIgnored ( this , read ) )
return cb ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var isGlobStar = remain [ 0 ] === minimatch . GLOBSTAR
if ( isGlobStar )
this . _processGlobStar ( prefix , read , abs , remain , index , inGlobStar , cb )
else
this . _processReaddir ( prefix , read , abs , remain , index , inGlobStar , cb )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . _processReaddir = function ( prefix , read , abs , remain , index , inGlobStar , cb ) {
var self = this
this . _readdir ( abs , inGlobStar , function ( er , entries ) {
return self . _processReaddir2 ( prefix , read , abs , remain , index , inGlobStar , entries , cb )
} )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . _processReaddir2 = function ( prefix , read , abs , remain , index , inGlobStar , entries , cb ) {
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// if the abs isn't a dir, then nothing can match!
if ( ! entries )
return cb ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain [ 0 ]
var negate = ! ! this . minimatch . negate
var rawGlob = pn . _glob
var dotOk = this . dot || rawGlob . charAt ( 0 ) === '.'
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var matchedEntries = [ ]
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) !== '.' || dotOk ) {
var m
if ( negate && ! prefix ) {
m = ! e . match ( pn )
} else {
m = e . match ( pn )
}
if ( m )
matchedEntries . push ( e )
2020-04-28 15:45:21 +02:00
}
}
2021-04-06 22:50:27 +02:00
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var len = matchedEntries . length
// If there are no matched entries, then nothing matches.
if ( len === 0 )
return cb ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if ( remain . length === 1 && ! this . mark && ! this . stat ) {
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
if ( prefix ) {
if ( prefix !== '/' )
e = prefix + '/' + e
else
e = prefix + e
}
if ( e . charAt ( 0 ) === '/' && ! this . nomount ) {
e = path . join ( this . root , e )
}
this . _emitMatch ( index , e )
}
// This was the last one, and no stats were needed
return cb ( )
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain . shift ( )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
var newPattern
if ( prefix ) {
if ( prefix !== '/' )
e = prefix + '/' + e
else
e = prefix + e
}
this . _process ( [ e ] . concat ( remain ) , index , inGlobStar , cb )
}
cb ( )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . _emitMatch = function ( index , e ) {
if ( this . aborted )
return
if ( isIgnored ( this , e ) )
return
if ( this . paused ) {
this . _emitQueue . push ( [ index , e ] )
return
}
var abs = isAbsolute ( e ) ? e : this . _makeAbs ( e )
if ( this . mark )
e = this . _mark ( e )
if ( this . absolute )
e = abs
if ( this . matches [ index ] [ e ] )
return
if ( this . nodir ) {
var c = this . cache [ abs ]
if ( c === 'DIR' || Array . isArray ( c ) )
return
}
this . matches [ index ] [ e ] = true
var st = this . statCache [ abs ]
if ( st )
this . emit ( 'stat' , e , st )
this . emit ( 'match' , e )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . _readdirInGlobStar = function ( abs , cb ) {
if ( this . aborted )
return
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if ( this . follow )
return this . _readdir ( abs , false , cb )
var lstatkey = 'lstat\0' + abs
var self = this
var lstatcb = inflight ( lstatkey , lstatcb _ )
if ( lstatcb )
fs . lstat ( abs , lstatcb )
function lstatcb _ ( er , lstat ) {
if ( er && er . code === 'ENOENT' )
return cb ( )
var isSym = lstat && lstat . isSymbolicLink ( )
self . symlinks [ abs ] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if ( ! isSym && lstat && ! lstat . isDirectory ( ) ) {
self . cache [ abs ] = 'FILE'
cb ( )
} else
self . _readdir ( abs , false , cb )
}
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . _readdir = function ( abs , inGlobStar , cb ) {
if ( this . aborted )
return
cb = inflight ( 'readdir\0' + abs + '\0' + inGlobStar , cb )
if ( ! cb )
return
//console.error('RD %j %j', +inGlobStar, abs)
if ( inGlobStar && ! ownProp ( this . symlinks , abs ) )
return this . _readdirInGlobStar ( abs , cb )
if ( ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( ! c || c === 'FILE' )
return cb ( )
if ( Array . isArray ( c ) )
return cb ( null , c )
}
var self = this
fs . readdir ( abs , readdirCb ( this , abs , cb ) )
}
function readdirCb ( self , abs , cb ) {
return function ( er , entries ) {
if ( er )
self . _readdirError ( abs , er , cb )
else
self . _readdirEntries ( abs , entries , cb )
}
}
Glob . prototype . _readdirEntries = function ( abs , entries , cb ) {
if ( this . aborted )
return
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if ( ! this . mark && ! this . stat ) {
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( abs === '/' )
e = abs + e
else
e = abs + '/' + e
this . cache [ e ] = true
}
}
this . cache [ abs ] = entries
return cb ( null , entries )
}
Glob . prototype . _readdirError = function ( f , er , cb ) {
if ( this . aborted )
return
// handle errors, and cache the information
switch ( er . code ) {
case 'ENOTSUP' : // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR' : // totally normal. means it *does* exist.
var abs = this . _makeAbs ( f )
this . cache [ abs ] = 'FILE'
if ( abs === this . cwdAbs ) {
var error = new Error ( er . code + ' invalid cwd ' + this . cwd )
error . path = this . cwd
error . code = er . code
this . emit ( 'error' , error )
this . abort ( )
}
break
case 'ENOENT' : // not terribly unusual
case 'ELOOP' :
case 'ENAMETOOLONG' :
case 'UNKNOWN' :
this . cache [ this . _makeAbs ( f ) ] = false
break
default : // some unusual error. Treat as failure.
this . cache [ this . _makeAbs ( f ) ] = false
if ( this . strict ) {
this . emit ( 'error' , er )
// If the error is handled, then we abort
// if not, we threw out of here
this . abort ( )
}
if ( ! this . silent )
console . error ( 'glob error' , er )
break
}
return cb ( )
}
Glob . prototype . _processGlobStar = function ( prefix , read , abs , remain , index , inGlobStar , cb ) {
var self = this
this . _readdir ( abs , inGlobStar , function ( er , entries ) {
self . _processGlobStar2 ( prefix , read , abs , remain , index , inGlobStar , entries , cb )
} )
}
Glob . prototype . _processGlobStar2 = function ( prefix , read , abs , remain , index , inGlobStar , entries , cb ) {
//console.error('pgs2', prefix, remain[0], entries)
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if ( ! entries )
return cb ( )
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain . slice ( 1 )
var gspref = prefix ? [ prefix ] : [ ]
var noGlobStar = gspref . concat ( remainWithoutGlobStar )
// the noGlobStar pattern exits the inGlobStar state
this . _process ( noGlobStar , index , false , cb )
var isSym = this . symlinks [ abs ]
var len = entries . length
// If it's a symlink, and we're in a globstar, then stop
if ( isSym && inGlobStar )
return cb ( )
for ( var i = 0 ; i < len ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) === '.' && ! this . dot )
continue
// these two cases enter the inGlobStar state
var instead = gspref . concat ( entries [ i ] , remainWithoutGlobStar )
this . _process ( instead , index , true , cb )
var below = gspref . concat ( entries [ i ] , remain )
this . _process ( below , index , true , cb )
}
cb ( )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . _processSimple = function ( prefix , index , cb ) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var self = this
this . _stat ( prefix , function ( er , exists ) {
self . _processSimple2 ( prefix , index , er , exists , cb )
} )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
Glob . prototype . _processSimple2 = function ( prefix , index , er , exists , cb ) {
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
//console.error('ps2', prefix, exists)
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// If it doesn't exist, then just mark the lack of results
if ( ! exists )
return cb ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( prefix && isAbsolute ( prefix ) && ! this . nomount ) {
var trail = /[\/\\]$/ . test ( prefix )
if ( prefix . charAt ( 0 ) === '/' ) {
prefix = path . join ( this . root , prefix )
} else {
prefix = path . resolve ( this . root , prefix )
if ( trail )
prefix += '/'
2020-04-28 15:45:21 +02:00
}
}
2021-04-06 22:50:27 +02:00
if ( process . platform === 'win32' )
prefix = prefix . replace ( /\\/g , '/' )
// Mark this as a match
this . _emitMatch ( index , prefix )
cb ( )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
// Returns either 'DIR', 'FILE', or false
Glob . prototype . _stat = function ( f , cb ) {
var abs = this . _makeAbs ( f )
var needDir = f . slice ( - 1 ) === '/'
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( f . length > this . maxLength )
return cb ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( ! this . stat && ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( Array . isArray ( c ) )
c = 'DIR'
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// It exists, but maybe not how we need it
if ( ! needDir || c === 'DIR' )
return cb ( null , c )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( needDir && c === 'FILE' )
return cb ( )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var exists
var stat = this . statCache [ abs ]
if ( stat !== undefined ) {
if ( stat === false )
return cb ( null , stat )
else {
var type = stat . isDirectory ( ) ? 'DIR' : 'FILE'
if ( needDir && type === 'FILE' )
return cb ( )
else
return cb ( null , type , stat )
}
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var self = this
var statcb = inflight ( 'stat\0' + abs , lstatcb _ )
if ( statcb )
fs . lstat ( abs , statcb )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
function lstatcb _ ( er , lstat ) {
if ( lstat && lstat . isSymbolicLink ( ) ) {
// If it's a symlink, then treat it as the target, unless
// the target does not exist, then treat it as a file.
return fs . stat ( abs , function ( er , stat ) {
if ( er )
self . _stat2 ( f , abs , null , lstat , cb )
else
self . _stat2 ( f , abs , er , stat , cb )
} )
} else {
self . _stat2 ( f , abs , er , lstat , cb )
}
}
}
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
Glob . prototype . _stat2 = function ( f , abs , er , stat , cb ) {
if ( er && ( er . code === 'ENOENT' || er . code === 'ENOTDIR' ) ) {
this . statCache [ abs ] = false
return cb ( )
2020-04-28 15:45:21 +02:00
}
2021-04-06 22:50:27 +02:00
var needDir = f . slice ( - 1 ) === '/'
this . statCache [ abs ] = stat
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( abs . slice ( - 1 ) === '/' && stat && ! stat . isDirectory ( ) )
return cb ( null , false , stat )
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
var c = true
if ( stat )
c = stat . isDirectory ( ) ? 'DIR' : 'FILE'
this . cache [ abs ] = this . cache [ abs ] || c
2020-04-28 15:45:21 +02:00
2021-04-06 22:50:27 +02:00
if ( needDir && c === 'FILE' )
return cb ( )
return cb ( null , c , stat )
}
2020-04-28 15:45:21 +02:00
2023-03-08 22:06:44 +01:00
/***/ } ) ,
/***/ 411 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _validate = _interopRequireDefault ( _ _webpack _require _ _ ( 78 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
* /
const byteToHex = [ ] ;
for ( let i = 0 ; i < 256 ; ++ i ) {
byteToHex . push ( ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ) ;
}
function stringify ( arr , offset = 0 ) {
// Note: Be careful editing this code! It's been tuned for performance
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
const uuid = ( byteToHex [ arr [ offset + 0 ] ] + byteToHex [ arr [ offset + 1 ] ] + byteToHex [ arr [ offset + 2 ] ] + byteToHex [ arr [ offset + 3 ] ] + '-' + byteToHex [ arr [ offset + 4 ] ] + byteToHex [ arr [ offset + 5 ] ] + '-' + byteToHex [ arr [ offset + 6 ] ] + byteToHex [ arr [ offset + 7 ] ] + '-' + byteToHex [ arr [ offset + 8 ] ] + byteToHex [ arr [ offset + 9 ] ] + '-' + byteToHex [ arr [ offset + 10 ] ] + byteToHex [ arr [ offset + 11 ] ] + byteToHex [ arr [ offset + 12 ] ] + byteToHex [ arr [ offset + 13 ] ] + byteToHex [ arr [ offset + 14 ] ] + byteToHex [ arr [ offset + 15 ] ] ) . toLowerCase ( ) ; // Consistency check for valid UUID. If this throws, it's likely due to one
// of the following:
// - One or more input array values don't map to a hex octet (leading to
// "undefined" in the uuid)
// - Invalid input values for the RFC `version` or `variant` fields
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Stringified UUID is invalid' ) ;
}
return uuid ;
}
var _default = stringify ;
exports . default = _default ;
2020-04-28 15:45:21 +02:00
/***/ } ) ,
/***/ 413 :
2023-03-08 22:06:44 +01:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = _ _webpack _require _ _ ( 141 ) ;
2020-04-28 15:45:21 +02:00
/***/ } ) ,
/***/ 417 :
/***/ ( function ( module ) {
module . exports = require ( "crypto" ) ;
/***/ } ) ,
/***/ 431 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
2023-03-08 22:06:44 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
2020-04-28 15:45:21 +02:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2023-03-08 22:06:44 +01:00
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
2020-04-28 15:45:21 +02:00
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2023-03-08 22:06:44 +01:00
exports . issue = exports . issueCommand = void 0 ;
2020-04-28 15:45:21 +02:00
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
2020-11-13 20:53:33 +01:00
const utils _1 = _ _webpack _require _ _ ( 82 ) ;
2020-04-28 15:45:21 +02:00
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
}
}
function escapeData ( s ) {
2020-11-13 20:53:33 +01:00
return utils _1 . toCommandValue ( s )
2020-04-28 15:45:21 +02:00
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
2020-11-13 20:53:33 +01:00
return utils _1 . toCommandValue ( s )
2020-04-28 15:45:21 +02:00
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
/***/ } ) ,
/***/ 452 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const utils _1 = _ _webpack _require _ _ ( 870 ) ;
/ * *
* Used for managing http clients during either upload or download
* /
class HttpManager {
2020-07-31 17:16:59 +02:00
constructor ( clientCount , userAgent ) {
2020-04-28 15:45:21 +02:00
if ( clientCount < 1 ) {
throw new Error ( 'There must be at least one client' ) ;
}
2020-07-31 17:16:59 +02:00
this . userAgent = userAgent ;
this . clients = new Array ( clientCount ) . fill ( utils _1 . createHttpClient ( userAgent ) ) ;
2020-04-28 15:45:21 +02:00
}
getClient ( index ) {
return this . clients [ index ] ;
}
// client disposal is necessary if a keep-alive connection is used to properly close the connection
// for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292
disposeAndReplaceClient ( index ) {
this . clients [ index ] . dispose ( ) ;
2020-07-31 17:16:59 +02:00
this . clients [ index ] = utils _1 . createHttpClient ( this . userAgent ) ;
2020-04-28 15:45:21 +02:00
}
disposeAndReplaceAllClients ( ) {
for ( const [ index ] of this . clients . entries ( ) ) {
this . disposeAndReplaceClient ( index ) ;
}
}
}
exports . HttpManager = HttpManager ;
//# sourceMappingURL=http-manager.js.map
/***/ } ) ,
2023-03-08 22:06:44 +01:00
/***/ 456 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i ;
exports . default = _default ;
/***/ } ) ,
2020-04-28 15:45:21 +02:00
/***/ 470 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
2023-03-08 22:06:44 +01:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-04-28 15:45:21 +02:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2023-03-08 22:06:44 +01:00
exports . getIDToken = exports . getState = exports . saveState = exports . group = exports . endGroup = exports . startGroup = exports . info = exports . notice = exports . warning = exports . error = exports . debug = exports . isDebug = exports . setFailed = exports . setCommandEcho = exports . setOutput = exports . getBooleanInput = exports . getMultilineInput = exports . getInput = exports . addPath = exports . setSecret = exports . exportVariable = exports . ExitCode = void 0 ;
2020-04-28 15:45:21 +02:00
const command _1 = _ _webpack _require _ _ ( 431 ) ;
2020-11-13 20:53:33 +01:00
const file _command _1 = _ _webpack _require _ _ ( 102 ) ;
const utils _1 = _ _webpack _require _ _ ( 82 ) ;
2020-04-28 15:45:21 +02:00
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
2023-03-08 22:06:44 +01:00
const oidc _utils _1 = _ _webpack _require _ _ ( 742 ) ;
2020-04-28 15:45:21 +02:00
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
2020-04-30 22:50:01 +02:00
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
2020-04-28 15:45:21 +02:00
* /
2020-04-30 22:50:01 +02:00
// eslint-disable-next-line @typescript-eslint/no-explicit-any
2020-04-28 15:45:21 +02:00
function exportVariable ( name , val ) {
2020-11-13 20:53:33 +01:00
const convertedVal = utils _1 . toCommandValue ( val ) ;
2020-04-30 22:50:01 +02:00
process . env [ name ] = convertedVal ;
2020-11-13 20:53:33 +01:00
const filePath = process . env [ 'GITHUB_ENV' ] || '' ;
if ( filePath ) {
2023-03-08 22:06:44 +01:00
return file _command _1 . issueFileCommand ( 'ENV' , file _command _1 . prepareKeyValueMessage ( name , val ) ) ;
2020-11-13 20:53:33 +01:00
}
2023-03-08 22:06:44 +01:00
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
2020-04-28 15:45:21 +02:00
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
2020-11-13 20:53:33 +01:00
const filePath = process . env [ 'GITHUB_PATH' ] || '' ;
if ( filePath ) {
2023-03-08 22:06:44 +01:00
file _command _1 . issueFileCommand ( 'PATH' , inputPath ) ;
2020-11-13 20:53:33 +01:00
}
else {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
}
2020-04-28 15:45:21 +02:00
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
2023-03-08 22:06:44 +01:00
* Gets the value of an input .
* Unless trimWhitespace is set to false in InputOptions , the value is also trimmed .
* Returns an empty string if the value is not defined .
2020-04-28 15:45:21 +02:00
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
}
2023-03-08 22:06:44 +01:00
if ( options && options . trimWhitespace === false ) {
return val ;
}
2020-04-28 15:45:21 +02:00
return val . trim ( ) ;
}
exports . getInput = getInput ;
2023-03-08 22:06:44 +01:00
/ * *
* Gets the values of an multiline input . Each value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string [ ]
*
* /
function getMultilineInput ( name , options ) {
const inputs = getInput ( name , options )
. split ( '\n' )
. filter ( x => x !== '' ) ;
if ( options && options . trimWhitespace === false ) {
return inputs ;
}
return inputs . map ( input => input . trim ( ) ) ;
}
exports . getMultilineInput = getMultilineInput ;
/ * *
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification .
* Support boolean input list : ` true | True | TRUE | false | False | FALSE ` .
* The return value is also in boolean type .
* ref : https : //yaml.org/spec/1.2/spec.html#id2804923
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns boolean
* /
function getBooleanInput ( name , options ) {
const trueValue = [ 'true' , 'True' , 'TRUE' ] ;
const falseValue = [ 'false' , 'False' , 'FALSE' ] ;
const val = getInput ( name , options ) ;
if ( trueValue . includes ( val ) )
return true ;
if ( falseValue . includes ( val ) )
return false ;
throw new TypeError ( ` Input does not meet YAML 1.2 "Core Schema" specification: ${ name } \n ` +
` Support boolean input list: \` true | True | TRUE | false | False | FALSE \` ` ) ;
}
exports . getBooleanInput = getBooleanInput ;
2020-04-28 15:45:21 +02:00
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
2020-04-30 22:50:01 +02:00
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
2020-04-28 15:45:21 +02:00
* /
2020-04-30 22:50:01 +02:00
// eslint-disable-next-line @typescript-eslint/no-explicit-any
2020-04-28 15:45:21 +02:00
function setOutput ( name , value ) {
2023-03-08 22:06:44 +01:00
const filePath = process . env [ 'GITHUB_OUTPUT' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'OUTPUT' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
process . stdout . write ( os . EOL ) ;
command _1 . issueCommand ( 'set-output' , { name } , utils _1 . toCommandValue ( value ) ) ;
2020-04-28 15:45:21 +02:00
}
exports . setOutput = setOutput ;
2020-04-30 22:50:01 +02:00
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
2020-04-28 15:45:21 +02:00
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
2020-04-30 22:50:01 +02:00
* @ param message error issue message . Errors will be converted to string via toString ( )
2023-03-08 22:06:44 +01:00
* @ param properties optional properties to add to the annotation .
2020-04-28 15:45:21 +02:00
* /
2023-03-08 22:06:44 +01:00
function error ( message , properties = { } ) {
command _1 . issueCommand ( 'error' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2020-04-28 15:45:21 +02:00
}
exports . error = error ;
/ * *
2023-03-08 22:06:44 +01:00
* Adds a warning issue
2020-04-30 22:50:01 +02:00
* @ param message warning issue message . Errors will be converted to string via toString ( )
2023-03-08 22:06:44 +01:00
* @ param properties optional properties to add to the annotation .
2020-04-28 15:45:21 +02:00
* /
2023-03-08 22:06:44 +01:00
function warning ( message , properties = { } ) {
command _1 . issueCommand ( 'warning' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
2020-04-28 15:45:21 +02:00
}
exports . warning = warning ;
2023-03-08 22:06:44 +01:00
/ * *
* Adds a notice issue
* @ param message notice issue message . Errors will be converted to string via toString ( )
* @ param properties optional properties to add to the annotation .
* /
function notice ( message , properties = { } ) {
command _1 . issueCommand ( 'notice' , utils _1 . toCommandProperties ( properties ) , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . notice = notice ;
2020-04-28 15:45:21 +02:00
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
2020-04-30 22:50:01 +02:00
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
2020-04-28 15:45:21 +02:00
* /
2020-04-30 22:50:01 +02:00
// eslint-disable-next-line @typescript-eslint/no-explicit-any
2020-04-28 15:45:21 +02:00
function saveState ( name , value ) {
2023-03-08 22:06:44 +01:00
const filePath = process . env [ 'GITHUB_STATE' ] || '' ;
if ( filePath ) {
return file _command _1 . issueFileCommand ( 'STATE' , file _command _1 . prepareKeyValueMessage ( name , value ) ) ;
}
command _1 . issueCommand ( 'save-state' , { name } , utils _1 . toCommandValue ( value ) ) ;
2020-04-28 15:45:21 +02:00
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
2023-03-08 22:06:44 +01:00
function getIDToken ( aud ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield oidc _utils _1 . OidcClient . getIDToken ( aud ) ;
} ) ;
}
exports . getIDToken = getIDToken ;
/ * *
* Summary exports
* /
var summary _1 = _ _webpack _require _ _ ( 665 ) ;
Object . defineProperty ( exports , "summary" , { enumerable : true , get : function ( ) { return summary _1 . summary ; } } ) ;
/ * *
* @ deprecated use core . summary
* /
var summary _2 = _ _webpack _require _ _ ( 665 ) ;
Object . defineProperty ( exports , "markdownSummary" , { enumerable : true , get : function ( ) { return summary _2 . markdownSummary ; } } ) ;
/ * *
* Path exports
* /
var path _utils _1 = _ _webpack _require _ _ ( 573 ) ;
Object . defineProperty ( exports , "toPosixPath" , { enumerable : true , get : function ( ) { return path _utils _1 . toPosixPath ; } } ) ;
Object . defineProperty ( exports , "toWin32Path" , { enumerable : true , get : function ( ) { return path _utils _1 . toWin32Path ; } } ) ;
Object . defineProperty ( exports , "toPlatformPath" , { enumerable : true , get : function ( ) { return path _utils _1 . toPlatformPath ; } } ) ;
2020-04-28 15:45:21 +02:00
//# sourceMappingURL=core.js.map
/***/ } ) ,
2021-01-04 15:47:26 +01:00
/***/ 489 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const utils _1 = _ _webpack _require _ _ ( 870 ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const config _variables _1 = _ _webpack _require _ _ ( 401 ) ;
function retry ( name , operation , customErrorMessages , maxAttempts ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let response = undefined ;
let statusCode = undefined ;
let isRetryable = false ;
let errorMessage = '' ;
let customErrorInformation = undefined ;
let attempt = 1 ;
while ( attempt <= maxAttempts ) {
try {
response = yield operation ( ) ;
statusCode = response . message . statusCode ;
if ( utils _1 . isSuccessStatusCode ( statusCode ) ) {
return response ;
}
// Extra error information that we want to display if a particular response code is hit
if ( statusCode ) {
customErrorInformation = customErrorMessages . get ( statusCode ) ;
}
isRetryable = utils _1 . isRetryableStatusCode ( statusCode ) ;
errorMessage = ` Artifact service responded with ${ statusCode } ` ;
}
catch ( error ) {
isRetryable = true ;
errorMessage = error . message ;
}
if ( ! isRetryable ) {
core . info ( ` ${ name } - Error is not retryable ` ) ;
if ( response ) {
utils _1 . displayHttpDiagnostics ( response ) ;
}
break ;
}
core . info ( ` ${ name } - Attempt ${ attempt } of ${ maxAttempts } failed with error: ${ errorMessage } ` ) ;
yield utils _1 . sleep ( utils _1 . getExponentialRetryTimeInMilliseconds ( attempt ) ) ;
attempt ++ ;
}
if ( response ) {
utils _1 . displayHttpDiagnostics ( response ) ;
}
if ( customErrorInformation ) {
throw Error ( ` ${ name } failed: ${ customErrorInformation } ` ) ;
}
throw Error ( ` ${ name } failed: ${ errorMessage } ` ) ;
} ) ;
}
exports . retry = retry ;
function retryHttpClientRequest ( name , method , customErrorMessages = new Map ( ) , maxAttempts = config _variables _1 . getRetryLimit ( ) ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield retry ( name , method , customErrorMessages , maxAttempts ) ;
} ) ;
}
exports . retryHttpClientRequest = retryHttpClientRequest ;
//# sourceMappingURL=requestUtils.js.map
/***/ } ) ,
2023-03-08 22:06:44 +01:00
/***/ 498 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _crypto = _interopRequireDefault ( _ _webpack _require _ _ ( 417 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function sha1 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'sha1' ) . update ( bytes ) . digest ( ) ;
}
var _default = sha1 ;
exports . default = _default ;
/***/ } ) ,
2020-04-28 15:45:21 +02:00
/***/ 532 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
/ * *
* Creates a specification for a set of files that will be downloaded
* @ param artifactName the name of the artifact
* @ param artifactEntries a set of container entries that describe that files that make up an artifact
* @ param downloadPath the path where the artifact will be downloaded to
* @ param includeRootDirectory specifies if there should be an extra directory ( denoted by the artifact name ) where the artifact files should be downloaded to
* /
function getDownloadSpecification ( artifactName , artifactEntries , downloadPath , includeRootDirectory ) {
// use a set for the directory paths so that there are no duplicates
const directories = new Set ( ) ;
const specifications = {
rootDownloadLocation : includeRootDirectory
? path . join ( downloadPath , artifactName )
: downloadPath ,
directoryStructure : [ ] ,
emptyFilesToCreate : [ ] ,
filesToDownload : [ ]
} ;
for ( const entry of artifactEntries ) {
// Ignore artifacts in the container that don't begin with the same name
if ( entry . path . startsWith ( ` ${ artifactName } / ` ) ||
entry . path . startsWith ( ` ${ artifactName } \\ ` ) ) {
// normalize all separators to the local OS
const normalizedPathEntry = path . normalize ( entry . path ) ;
// entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path
const filePath = path . join ( downloadPath , includeRootDirectory
? normalizedPathEntry
: normalizedPathEntry . replace ( artifactName , '' ) ) ;
// Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder'
// itemType cannot be relied upon. The file must be used to determine the directory structure
if ( entry . itemType === 'file' ) {
// Get the directories that we need to create from the filePath for each individual file
directories . add ( path . dirname ( filePath ) ) ;
if ( entry . fileLength === 0 ) {
// An empty file was uploaded, create the empty files locally so that no extra http calls are made
specifications . emptyFilesToCreate . push ( filePath ) ;
}
else {
specifications . filesToDownload . push ( {
sourceLocation : entry . contentLocation ,
targetPath : filePath
} ) ;
}
}
}
}
specifications . directoryStructure = Array . from ( directories ) ;
return specifications ;
}
exports . getDownloadSpecification = getDownloadSpecification ;
//# sourceMappingURL=download-specification.js.map
/***/ } ) ,
/***/ 539 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const http = _ _webpack _require _ _ ( 605 ) ;
const https = _ _webpack _require _ _ ( 211 ) ;
const pm = _ _webpack _require _ _ ( 950 ) ;
let tunnel ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
/ * *
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
function getProxyUrl ( serverUrl ) {
2020-12-15 16:55:26 +01:00
let proxyUrl = pm . getProxyUrl ( new URL ( serverUrl ) ) ;
2020-04-28 15:45:21 +02:00
return proxyUrl ? proxyUrl . href : '' ;
}
exports . getProxyUrl = getProxyUrl ;
2020-04-30 22:50:01 +02:00
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
2020-04-28 15:45:21 +02:00
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
2020-12-15 16:55:26 +01:00
class HttpClientError extends Error {
constructor ( message , statusCode ) {
super ( message ) ;
this . name = 'HttpClientError' ;
this . statusCode = statusCode ;
Object . setPrototypeOf ( this , HttpClientError . prototype ) ;
}
}
exports . HttpClientError = HttpClientError ;
2020-04-28 15:45:21 +02:00
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return new Promise ( async ( resolve , reject ) => {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ;
}
}
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
2020-12-15 16:55:26 +01:00
let parsedUrl = new URL ( requestUrl ) ;
2020-04-28 15:45:21 +02:00
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
}
}
options ( requestUrl , additionalHeaders ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
}
get ( requestUrl , additionalHeaders ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
}
del ( requestUrl , additionalHeaders ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
}
head ( requestUrl , additionalHeaders ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
async getJson ( requestUrl , additionalHeaders = { } ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
let res = await this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async postJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async putJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async patchJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
async request ( verb , requestUrl , data , headers ) {
if ( this . _disposed ) {
2020-04-30 22:50:01 +02:00
throw new Error ( 'Client has already been disposed.' ) ;
2020-04-28 15:45:21 +02:00
}
2020-12-15 16:55:26 +01:00
let parsedUrl = new URL ( requestUrl ) ;
2020-04-28 15:45:21 +02:00
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
2020-04-30 22:50:01 +02:00
let maxTries = this . _allowRetries && RetryableHttpVerbs . indexOf ( verb ) != - 1
? this . _maxRetries + 1
: 1 ;
2020-04-28 15:45:21 +02:00
let numTries = 0 ;
let response ;
while ( numTries < maxTries ) {
response = await this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
2020-04-30 22:50:01 +02:00
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
2020-04-28 15:45:21 +02:00
let authenticationHandler ;
for ( let i = 0 ; i < this . handlers . length ; i ++ ) {
if ( this . handlers [ i ] . canHandleAuthentication ( response ) ) {
authenticationHandler = this . handlers [ i ] ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
}
let redirectsRemaining = this . _maxRedirects ;
2020-04-30 22:50:01 +02:00
while ( HttpRedirectCodes . indexOf ( response . message . statusCode ) != - 1 &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
2020-04-28 15:45:21 +02:00
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
2020-12-15 16:55:26 +01:00
let parsedRedirectUrl = new URL ( redirectUrl ) ;
2020-04-30 22:50:01 +02:00
if ( parsedUrl . protocol == 'https:' &&
parsedUrl . protocol != parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
2020-04-28 15:45:21 +02:00
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response . readBody ( ) ;
2020-04-30 22:50:01 +02:00
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( let header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
2020-04-28 15:45:21 +02:00
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = await this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
}
if ( HttpResponseRetryCodes . indexOf ( response . message . statusCode ) == - 1 ) {
// If not a retry code, return immediately instead of retrying
return response ;
}
numTries += 1 ;
if ( numTries < maxTries ) {
await response . readBody ( ) ;
await this . _performExponentialBackoff ( numTries ) ;
}
}
return response ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return new Promise ( ( resolve , reject ) => {
let callbackForResult = function ( err , res ) {
if ( err ) {
reject ( err ) ;
}
resolve ( res ) ;
} ;
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
let socket ;
2020-04-30 22:50:01 +02:00
if ( typeof data === 'string' ) {
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
2020-04-28 15:45:21 +02:00
}
let callbackCalled = false ;
let handleResult = ( err , res ) => {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
}
} ;
let req = info . httpModule . request ( info . options , ( msg ) => {
let res = new HttpClientResponse ( msg ) ;
handleResult ( null , res ) ;
} ) ;
2020-04-30 22:50:01 +02:00
req . on ( 'socket' , sock => {
2020-04-28 15:45:21 +02:00
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( 'Request timeout: ' + info . options . path ) , null ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err , null ) ;
} ) ;
2020-04-30 22:50:01 +02:00
if ( data && typeof data === 'string' ) {
2020-04-28 15:45:21 +02:00
req . write ( data , 'utf8' ) ;
}
2020-04-30 22:50:01 +02:00
if ( data && typeof data !== 'string' ) {
2020-04-28 15:45:21 +02:00
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
2020-12-15 16:55:26 +01:00
let parsedUrl = new URL ( serverUrl ) ;
2020-04-28 15:45:21 +02:00
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
2020-04-30 22:50:01 +02:00
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
2020-04-28 15:45:21 +02:00
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
2020-04-30 22:50:01 +02:00
info . options . headers [ 'user-agent' ] = this . userAgent ;
2020-04-28 15:45:21 +02:00
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
2020-04-30 22:50:01 +02:00
this . handlers . forEach ( handler => {
2020-04-28 15:45:21 +02:00
handler . prepareRequest ( info . options ) ;
} ) ;
}
return info ;
}
_mergeHeaders ( headers ) {
2020-04-30 22:50:01 +02:00
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
2020-04-28 15:45:21 +02:00
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
2020-04-30 22:50:01 +02:00
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
2020-04-28 15:45:21 +02:00
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
let proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
let useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( ! ! agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( ! ! this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
if ( useProxy ) {
// If using proxy, need tunnel
if ( ! tunnel ) {
2023-03-08 22:06:44 +01:00
tunnel = _ _webpack _require _ _ ( 413 ) ;
2020-04-28 15:45:21 +02:00
}
const agentOptions = {
maxSockets : maxSockets ,
keepAlive : this . _keepAlive ,
proxy : {
2021-04-06 22:50:27 +02:00
... ( ( proxyUrl . username || proxyUrl . password ) && {
proxyAuth : ` ${ proxyUrl . username } : ${ proxyUrl . password } `
} ) ,
2020-04-28 15:45:21 +02:00
host : proxyUrl . hostname ,
port : proxyUrl . port
2020-04-30 22:50:01 +02:00
}
2020-04-28 15:45:21 +02:00
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
}
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
}
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets : maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
2021-12-07 17:44:54 +01:00
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
}
static dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
let a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
}
return value ;
}
async _processResponse ( res , options ) {
return new Promise ( async ( resolve , reject ) => {
const statusCode = res . message . statusCode ;
const response = {
statusCode : statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode == HttpCodes . NotFound ) {
resolve ( response ) ;
}
let obj ;
let contents ;
// get the result from the body
try {
contents = await res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , HttpClient . dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = 'Failed request: (' + statusCode + ')' ;
}
let err = new HttpClientError ( msg , statusCode ) ;
err . result = response . result ;
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ;
}
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
exports . HttpClient = HttpClient ;
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
/***/ } ) ,
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
/***/ 553 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
"use strict" ;
2020-04-28 15:45:21 +02:00
2021-12-07 17:44:54 +01:00
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core _1 = _ _webpack _require _ _ ( 470 ) ;
/ * *
* Invalid characters that cannot be in the artifact name or an uploaded file . Will be rejected
* from the server if attempted to be sent over . These characters are not allowed due to limitations with certain
* file systems such as NTFS . To maintain platform - agnostic behavior , all characters that are not supported by an
* individual filesystem / platform will not be supported on all fileSystems / platforms
*
* FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
* /
const invalidArtifactFilePathCharacters = new Map ( [
[ '"' , ' Double quote "' ] ,
[ ':' , ' Colon :' ] ,
[ '<' , ' Less than <' ] ,
[ '>' , ' Greater than >' ] ,
[ '|' , ' Vertical bar |' ] ,
[ '*' , ' Asterisk *' ] ,
[ '?' , ' Question mark ?' ] ,
[ '\r' , ' Carriage return \\r' ] ,
[ '\n' , ' Line feed \\n' ]
] ) ;
const invalidArtifactNameCharacters = new Map ( [
... invalidArtifactFilePathCharacters ,
[ '\\' , ' Backslash \\' ] ,
[ '/' , ' Forward slash /' ]
] ) ;
/ * *
* Scans the name of the artifact to make sure there are no illegal characters
* /
function checkArtifactName ( name ) {
if ( ! name ) {
throw new Error ( ` Artifact name: ${ name } , is incorrectly provided ` ) ;
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
for ( const [ invalidCharacterKey , errorMessageForCharacter ] of invalidArtifactNameCharacters ) {
if ( name . includes ( invalidCharacterKey ) ) {
throw new Error ( ` Artifact name is not valid: ${ name } . Contains the following character: ${ errorMessageForCharacter }
Invalid characters include : $ { Array . from ( invalidArtifactNameCharacters . values ( ) ) . toString ( ) }
These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS . To maintain file system agnostic behavior , these characters are intentionally not allowed to prevent potential problems with downloads on different file systems . ` );
}
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
core _1 . info ( ` Artifact name is valid! ` ) ;
2020-04-28 15:45:21 +02:00
}
2021-12-07 17:44:54 +01:00
exports . checkArtifactName = checkArtifactName ;
/ * *
* Scans the name of the filePath used to make sure there are no illegal characters
* /
function checkArtifactFilePath ( path ) {
if ( ! path ) {
throw new Error ( ` Artifact path: ${ path } , is incorrectly provided ` ) ;
}
for ( const [ invalidCharacterKey , errorMessageForCharacter ] of invalidArtifactFilePathCharacters ) {
if ( path . includes ( invalidCharacterKey ) ) {
throw new Error ( ` Artifact path is not valid: ${ path } . Contains the following character: ${ errorMessageForCharacter }
Invalid characters include : $ { Array . from ( invalidArtifactFilePathCharacters . values ( ) ) . toString ( ) }
The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS . To maintain file system agnostic behavior , these characters are intentionally not allowed to prevent potential problems with downloads on different file systems .
` );
}
2020-04-28 15:45:21 +02:00
}
}
2021-12-07 17:44:54 +01:00
exports . checkArtifactFilePath = checkArtifactFilePath ;
//# sourceMappingURL=path-and-artifact-name-validation.js.map
2020-04-28 15:45:21 +02:00
/***/ } ) ,
2023-03-08 22:06:44 +01:00
/***/ 573 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . toPlatformPath = exports . toWin32Path = exports . toPosixPath = void 0 ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
/ * *
* toPosixPath converts the given path to the posix form . On Windows , \ \ will be
* replaced with / .
*
* @ param pth . Path to transform .
* @ return string Posix path .
* /
function toPosixPath ( pth ) {
return pth . replace ( /[\\]/g , '/' ) ;
}
exports . toPosixPath = toPosixPath ;
/ * *
* toWin32Path converts the given path to the win32 form . On Linux , / w i l l b e
* replaced with \ \ .
*
* @ param pth . Path to transform .
* @ return string Win32 path .
* /
function toWin32Path ( pth ) {
return pth . replace ( /[/]/g , '\\' ) ;
}
exports . toWin32Path = toWin32Path ;
/ * *
* toPlatformPath converts the given path to a platform - specific path . It does
* this by replacing instances of / a n d \ w i t h t h e p l a t f o r m - s p e c i f i c p a t h
* separator .
*
* @ param pth The path to platformize .
* @ return string The platform - specific path .
* /
function toPlatformPath ( pth ) {
return pth . replace ( /[/\\]/g , path . sep ) ;
}
exports . toPlatformPath = toPlatformPath ;
//# sourceMappingURL=path-utils.js.map
/***/ } ) ,
2020-04-28 15:45:21 +02:00
/***/ 590 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const core _1 = _ _webpack _require _ _ ( 470 ) ;
const path _1 = _ _webpack _require _ _ ( 622 ) ;
2021-12-07 17:44:54 +01:00
const path _and _artifact _name _validation _1 = _ _webpack _require _ _ ( 553 ) ;
2020-04-28 15:45:21 +02:00
/ * *
* Creates a specification that describes how each file that is part of the artifact will be uploaded
* @ param artifactName the name of the artifact being uploaded . Used during upload to denote where the artifact is stored on the server
* @ param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file
* @ param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact
* /
function getUploadSpecification ( artifactName , rootDirectory , artifactFiles ) {
2021-12-07 17:44:54 +01:00
// artifact name was checked earlier on, no need to check again
2020-04-28 15:45:21 +02:00
const specifications = [ ] ;
if ( ! fs . existsSync ( rootDirectory ) ) {
throw new Error ( ` Provided rootDirectory ${ rootDirectory } does not exist ` ) ;
}
if ( ! fs . lstatSync ( rootDirectory ) . isDirectory ( ) ) {
throw new Error ( ` Provided rootDirectory ${ rootDirectory } is not a valid directory ` ) ;
}
// Normalize and resolve, this allows for either absolute or relative paths to be used
rootDirectory = path _1 . normalize ( rootDirectory ) ;
rootDirectory = path _1 . resolve ( rootDirectory ) ;
/ *
Example to demonstrate behavior
Input :
artifactName : my - artifact
rootDirectory : '/home/user/files/plz-upload'
artifactFiles : [
'/home/user/files/plz-upload/file1.txt' ,
'/home/user/files/plz-upload/file2.txt' ,
'/home/user/files/plz-upload/dir/file3.txt'
]
Output :
specifications : [
[ '/home/user/files/plz-upload/file1.txt' , 'my-artifact/file1.txt' ] ,
[ '/home/user/files/plz-upload/file1.txt' , 'my-artifact/file2.txt' ] ,
[ '/home/user/files/plz-upload/file1.txt' , 'my-artifact/dir/file3.txt' ]
]
* /
for ( let file of artifactFiles ) {
if ( ! fs . existsSync ( file ) ) {
throw new Error ( ` File ${ file } does not exist ` ) ;
}
if ( ! fs . lstatSync ( file ) . isDirectory ( ) ) {
// Normalize and resolve, this allows for either absolute or relative paths to be used
file = path _1 . normalize ( file ) ;
file = path _1 . resolve ( file ) ;
if ( ! file . startsWith ( rootDirectory ) ) {
throw new Error ( ` The rootDirectory: ${ rootDirectory } is not a parent directory of the file: ${ file } ` ) ;
}
// Check for forbidden characters in file paths that will be rejected during upload
const uploadPath = file . replace ( rootDirectory , '' ) ;
2021-12-07 17:44:54 +01:00
path _and _artifact _name _validation _1 . checkArtifactFilePath ( uploadPath ) ;
2020-04-28 15:45:21 +02:00
/ *
uploadFilePath denotes where the file will be uploaded in the file container on the server . During a run , if multiple artifacts are uploaded , they will all
be saved in the same container . The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
path . join handles all the following cases and would return ' artifact - name / file - to - upload . txt
join ( 'artifact-name/' , 'file-to-upload.txt' )
join ( 'artifact-name/' , '/file-to-upload.txt' )
join ( 'artifact-name' , 'file-to-upload.txt' )
join ( 'artifact-name' , '/file-to-upload.txt' )
* /
specifications . push ( {
absoluteFilePath : file ,
uploadFilePath : path _1 . join ( artifactName , uploadPath )
} ) ;
}
else {
// Directories are rejected by the server during upload
core _1 . debug ( ` Removing ${ file } from rawSearchResults because it is a directory ` ) ;
}
}
return specifications ;
}
exports . getUploadSpecification = getUploadSpecification ;
//# sourceMappingURL=upload-specification.js.map
/***/ } ) ,
/***/ 605 :
/***/ ( function ( module ) {
module . exports = require ( "http" ) ;
/***/ } ) ,
/***/ 608 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const tmp = _ _importStar ( _ _webpack _require _ _ ( 875 ) ) ;
2023-03-08 22:06:44 +01:00
const stream = _ _importStar ( _ _webpack _require _ _ ( 794 ) ) ;
2020-04-28 15:45:21 +02:00
const utils _1 = _ _webpack _require _ _ ( 870 ) ;
const config _variables _1 = _ _webpack _require _ _ ( 401 ) ;
const util _1 = _ _webpack _require _ _ ( 669 ) ;
const url _1 = _ _webpack _require _ _ ( 835 ) ;
const perf _hooks _1 = _ _webpack _require _ _ ( 630 ) ;
const status _reporter _1 = _ _webpack _require _ _ ( 176 ) ;
2021-01-04 15:47:26 +01:00
const http _client _1 = _ _webpack _require _ _ ( 539 ) ;
2020-04-28 15:45:21 +02:00
const http _manager _1 = _ _webpack _require _ _ ( 452 ) ;
const upload _gzip _1 = _ _webpack _require _ _ ( 647 ) ;
2021-01-04 15:47:26 +01:00
const requestUtils _1 = _ _webpack _require _ _ ( 489 ) ;
2020-04-28 15:45:21 +02:00
const stat = util _1 . promisify ( fs . stat ) ;
class UploadHttpClient {
constructor ( ) {
2020-08-04 17:55:46 +02:00
this . uploadHttpManager = new http _manager _1 . HttpManager ( config _variables _1 . getUploadFileConcurrency ( ) , '@actions/artifact-upload' ) ;
2020-04-28 15:45:21 +02:00
this . statusReporter = new status _reporter _1 . StatusReporter ( 10000 ) ;
}
/ * *
* Creates a file container for the new artifact in the remote blob storage / file service
* @ param { string } artifactName Name of the artifact being created
* @ returns The response from the Artifact Service if the file container was successfully created
* /
2020-12-15 16:55:26 +01:00
createArtifactInFileContainer ( artifactName , options ) {
2020-04-28 15:45:21 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const parameters = {
Type : 'actions_storage' ,
Name : artifactName
} ;
2020-12-15 16:55:26 +01:00
// calculate retention period
if ( options && options . retentionDays ) {
const maxRetentionStr = config _variables _1 . getRetentionDays ( ) ;
parameters . RetentionDays = utils _1 . getProperRetention ( options . retentionDays , maxRetentionStr ) ;
}
2020-04-28 15:45:21 +02:00
const data = JSON . stringify ( parameters , null , 2 ) ;
const artifactUrl = utils _1 . getArtifactUrl ( ) ;
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
const client = this . uploadHttpManager . getClient ( 0 ) ;
2020-07-31 17:16:59 +02:00
const headers = utils _1 . getUploadHeaders ( 'application/json' , false ) ;
2021-01-04 15:47:26 +01:00
// Extra information to display when a particular HTTP code is returned
// If a 403 is returned when trying to create a file container, the customer has exceeded
// their storage quota so no new artifact containers can be created
const customErrorMessages = new Map ( [
[
http _client _1 . HttpCodes . Forbidden ,
'Artifact storage quota has been hit. Unable to upload any new artifacts'
] ,
[
http _client _1 . HttpCodes . BadRequest ,
` The artifact name ${ artifactName } is not valid. Request URL ${ artifactUrl } `
]
] ) ;
const response = yield requestUtils _1 . retryHttpClientRequest ( 'Create Artifact Container' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return client . post ( artifactUrl , data , headers ) ; } ) , customErrorMessages ) ;
const body = yield response . readBody ( ) ;
return JSON . parse ( body ) ;
2020-04-28 15:45:21 +02:00
} ) ;
}
/ * *
* Concurrently upload all of the files in chunks
* @ param { string } uploadUrl Base Url for the artifact that was created
* @ param { SearchResult [ ] } filesToUpload A list of information about the files being uploaded
* @ returns The size of all the files uploaded in bytes
* /
uploadArtifactToFileContainer ( uploadUrl , filesToUpload , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const FILE _CONCURRENCY = config _variables _1 . getUploadFileConcurrency ( ) ;
const MAX _CHUNK _SIZE = config _variables _1 . getUploadChunkSize ( ) ;
core . debug ( ` File Concurrency: ${ FILE _CONCURRENCY } , and Chunk Size: ${ MAX _CHUNK _SIZE } ` ) ;
const parameters = [ ] ;
// by default, file uploads will continue if there is an error unless specified differently in the options
let continueOnError = true ;
if ( options ) {
if ( options . continueOnError === false ) {
continueOnError = false ;
}
}
// prepare the necessary parameters to upload all the files
for ( const file of filesToUpload ) {
const resourceUrl = new url _1 . URL ( uploadUrl ) ;
resourceUrl . searchParams . append ( 'itemPath' , file . uploadFilePath ) ;
parameters . push ( {
file : file . absoluteFilePath ,
resourceUrl : resourceUrl . toString ( ) ,
maxChunkSize : MAX _CHUNK _SIZE ,
continueOnError
} ) ;
}
const parallelUploads = [ ... new Array ( FILE _CONCURRENCY ) . keys ( ) ] ;
const failedItemsToReport = [ ] ;
let currentFile = 0 ;
let completedFiles = 0 ;
let uploadFileSize = 0 ;
let totalFileSize = 0 ;
let abortPendingFileUploads = false ;
this . statusReporter . setTotalNumberOfFilesToProcess ( filesToUpload . length ) ;
this . statusReporter . start ( ) ;
// only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors
yield Promise . all ( parallelUploads . map ( ( index ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
while ( currentFile < filesToUpload . length ) {
const currentFileParameters = parameters [ currentFile ] ;
currentFile += 1 ;
if ( abortPendingFileUploads ) {
failedItemsToReport . push ( currentFileParameters . file ) ;
continue ;
}
const startTime = perf _hooks _1 . performance . now ( ) ;
const uploadFileResult = yield this . uploadFileAsync ( index , currentFileParameters ) ;
if ( core . isDebug ( ) ) {
core . debug ( ` File: ${ ++ completedFiles } / ${ filesToUpload . length } . ${ currentFileParameters . file } took ${ ( perf _hooks _1 . performance . now ( ) - startTime ) . toFixed ( 3 ) } milliseconds to finish upload ` ) ;
}
uploadFileSize += uploadFileResult . successfulUploadSize ;
totalFileSize += uploadFileResult . totalSize ;
if ( uploadFileResult . isSuccess === false ) {
failedItemsToReport . push ( currentFileParameters . file ) ;
if ( ! continueOnError ) {
// fail fast
core . error ( ` aborting artifact upload ` ) ;
abortPendingFileUploads = true ;
}
}
this . statusReporter . incrementProcessedCount ( ) ;
}
} ) ) ) ;
this . statusReporter . stop ( ) ;
// done uploading, safety dispose all connections
this . uploadHttpManager . disposeAndReplaceAllClients ( ) ;
core . info ( ` Total size of all the files uploaded is ${ uploadFileSize } bytes ` ) ;
return {
uploadSize : uploadFileSize ,
totalSize : totalFileSize ,
failedItems : failedItemsToReport
} ;
} ) ;
}
/ * *
* Asynchronously uploads a file . The file is compressed and uploaded using GZip if it is determined to save space .
* If the upload file is bigger than the max chunk size it will be uploaded via multiple calls
* @ param { number } httpClientIndex The index of the httpClient that is being used to make all of the calls
* @ param { UploadFileParameters } parameters Information about the file that needs to be uploaded
* @ returns The size of the file that was uploaded in bytes along with any failed uploads
* /
uploadFileAsync ( httpClientIndex , parameters ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2021-12-07 17:44:54 +01:00
const fileStat = yield stat ( parameters . file ) ;
const totalFileSize = fileStat . size ;
// on Windows with mkfifo from MSYS2 stats.isFIFO returns false, so we check if running on Windows node and
// if the file has size of 0 to compensate
const isFIFO = fileStat . isFIFO ( ) || ( process . platform === 'win32' && totalFileSize === 0 ) ;
2020-04-28 15:45:21 +02:00
let offset = 0 ;
let isUploadSuccessful = true ;
let failedChunkSizes = 0 ;
let uploadFileSize = 0 ;
let isGzip = true ;
2021-12-07 17:44:54 +01:00
// the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O
2020-04-28 15:45:21 +02:00
// for creating a new GZip file, an in-memory buffer is used for compression
2021-12-07 17:44:54 +01:00
// with named pipes the file size is reported as zero in that case don't read the file in memory
if ( ! isFIFO && totalFileSize < 65536 ) {
core . debug ( ` ${ parameters . file } is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size ` ) ;
2020-04-28 15:45:21 +02:00
const buffer = yield upload _gzip _1 . createGZipFileInBuffer ( parameters . file ) ;
2021-12-07 17:44:54 +01:00
// An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
2020-07-31 17:16:59 +02:00
// it will not properly get reset to the start of the stream if a chunk upload needs to be retried
let openUploadStream ;
2020-04-28 15:45:21 +02:00
if ( totalFileSize < buffer . byteLength ) {
// compression did not help with reducing the size, use a readable stream from the original file for upload
2021-12-07 17:44:54 +01:00
core . debug ( ` The gzip file created for ${ parameters . file } did not help with reducing the size of the file. The original file will be uploaded as-is ` ) ;
2020-07-31 17:16:59 +02:00
openUploadStream = ( ) => fs . createReadStream ( parameters . file ) ;
2020-04-28 15:45:21 +02:00
isGzip = false ;
uploadFileSize = totalFileSize ;
}
else {
// create a readable stream using a PassThrough stream that is both readable and writable
2021-12-07 17:44:54 +01:00
core . debug ( ` A gzip file created for ${ parameters . file } helped with reducing the size of the original file. The file will be uploaded using gzip. ` ) ;
2020-07-31 17:16:59 +02:00
openUploadStream = ( ) => {
const passThrough = new stream . PassThrough ( ) ;
passThrough . end ( buffer ) ;
return passThrough ;
} ;
2020-04-28 15:45:21 +02:00
uploadFileSize = buffer . byteLength ;
}
2020-07-31 17:16:59 +02:00
const result = yield this . uploadChunk ( httpClientIndex , parameters . resourceUrl , openUploadStream , 0 , uploadFileSize - 1 , uploadFileSize , isGzip , totalFileSize ) ;
2020-04-28 15:45:21 +02:00
if ( ! result ) {
// chunk failed to upload
isUploadSuccessful = false ;
failedChunkSizes += uploadFileSize ;
core . warning ( ` Aborting upload for ${ parameters . file } due to failure ` ) ;
}
return {
isSuccess : isUploadSuccessful ,
successfulUploadSize : uploadFileSize - failedChunkSizes ,
totalSize : totalFileSize
} ;
}
else {
// the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
// npm tmp-promise package and this file gets used to create a GZipped file
const tempFile = yield tmp . file ( ) ;
2021-12-07 17:44:54 +01:00
core . debug ( ` ${ parameters . file } is greater than 64k in size. Creating a gzip file on-disk ${ tempFile . path } to potentially reduce the upload size ` ) ;
2020-04-28 15:45:21 +02:00
// create a GZip file of the original file being uploaded, the original file should not be modified in any way
uploadFileSize = yield upload _gzip _1 . createGZipFileOnDisk ( parameters . file , tempFile . path ) ;
let uploadFilePath = tempFile . path ;
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
2021-12-07 17:44:54 +01:00
// for named pipes totalFileSize is zero, this assumes compression did help
if ( ! isFIFO && totalFileSize < uploadFileSize ) {
core . debug ( ` The gzip file created for ${ parameters . file } did not help with reducing the size of the file. The original file will be uploaded as-is ` ) ;
2020-04-28 15:45:21 +02:00
uploadFileSize = totalFileSize ;
uploadFilePath = parameters . file ;
isGzip = false ;
}
2021-12-07 17:44:54 +01:00
else {
core . debug ( ` The gzip file created for ${ parameters . file } is smaller than the original file. The file will be uploaded using gzip. ` ) ;
}
2020-04-28 15:45:21 +02:00
let abortFileUpload = false ;
// upload only a single chunk at a time
while ( offset < uploadFileSize ) {
const chunkSize = Math . min ( uploadFileSize - offset , parameters . maxChunkSize ) ;
2021-12-07 17:44:54 +01:00
const startChunkIndex = offset ;
const endChunkIndex = offset + chunkSize - 1 ;
2020-04-28 15:45:21 +02:00
offset += parameters . maxChunkSize ;
if ( abortFileUpload ) {
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
failedChunkSizes += chunkSize ;
continue ;
}
2020-07-31 17:16:59 +02:00
const result = yield this . uploadChunk ( httpClientIndex , parameters . resourceUrl , ( ) => fs . createReadStream ( uploadFilePath , {
2021-12-07 17:44:54 +01:00
start : startChunkIndex ,
end : endChunkIndex ,
2020-04-28 15:45:21 +02:00
autoClose : false
2021-12-07 17:44:54 +01:00
} ) , startChunkIndex , endChunkIndex , uploadFileSize , isGzip , totalFileSize ) ;
2020-04-28 15:45:21 +02:00
if ( ! result ) {
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
// successfully uploaded so the server may report a different size for what was uploaded
isUploadSuccessful = false ;
failedChunkSizes += chunkSize ;
core . warning ( ` Aborting upload for ${ parameters . file } due to failure ` ) ;
abortFileUpload = true ;
}
2021-12-07 17:44:54 +01:00
else {
// if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status
if ( uploadFileSize > 8388608 ) {
this . statusReporter . updateLargeFileStatus ( parameters . file , startChunkIndex , endChunkIndex , uploadFileSize ) ;
}
}
2020-04-28 15:45:21 +02:00
}
// Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
// calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
2021-12-07 17:44:54 +01:00
core . debug ( ` deleting temporary gzip file ${ tempFile . path } ` ) ;
2020-04-28 15:45:21 +02:00
yield tempFile . cleanup ( ) ;
return {
isSuccess : isUploadSuccessful ,
successfulUploadSize : uploadFileSize - failedChunkSizes ,
totalSize : totalFileSize
} ;
}
} ) ;
}
/ * *
* Uploads a chunk of an individual file to the specified resourceUrl . If the upload fails and the status code
* indicates a retryable status , we try to upload the chunk as well
* @ param { number } httpClientIndex The index of the httpClient being used to make all the necessary calls
* @ param { string } resourceUrl Url of the resource that the chunk will be uploaded to
2020-07-31 17:16:59 +02:00
* @ param { NodeJS . ReadableStream } openStream Stream of the file that will be uploaded
2020-04-28 15:45:21 +02:00
* @ param { number } start Starting byte index of file that the chunk belongs to
* @ param { number } end Ending byte index of file that the chunk belongs to
* @ param { number } uploadFileSize Total size of the file in bytes that is being uploaded
* @ param { boolean } isGzip Denotes if we are uploading a Gzip compressed stream
* @ param { number } totalFileSize Original total size of the file that is being uploaded
* @ returns if the chunk was successfully uploaded
* /
2020-07-31 17:16:59 +02:00
uploadChunk ( httpClientIndex , resourceUrl , openStream , start , end , uploadFileSize , isGzip , totalFileSize ) {
2020-04-28 15:45:21 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// prepare all the necessary headers before making any http call
2020-07-31 17:16:59 +02:00
const headers = utils _1 . getUploadHeaders ( 'application/octet-stream' , true , isGzip , totalFileSize , end - start + 1 , utils _1 . getContentRange ( start , end , uploadFileSize ) ) ;
2020-04-28 15:45:21 +02:00
const uploadChunkRequest = ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const client = this . uploadHttpManager . getClient ( httpClientIndex ) ;
2020-07-31 17:16:59 +02:00
return yield client . sendStream ( 'PUT' , resourceUrl , openStream ( ) , headers ) ;
2020-04-28 15:45:21 +02:00
} ) ;
let retryCount = 0 ;
const retryLimit = config _variables _1 . getRetryLimit ( ) ;
// Increments the current retry count and then checks if the retry limit has been reached
// If there have been too many retries, fail so the download stops
const incrementAndCheckRetryLimit = ( response ) => {
retryCount ++ ;
if ( retryCount > retryLimit ) {
if ( response ) {
utils _1 . displayHttpDiagnostics ( response ) ;
}
core . info ( ` Retry limit has been reached for chunk at offset ${ start } to ${ resourceUrl } ` ) ;
return true ;
}
return false ;
} ;
const backOff = ( retryAfterValue ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
this . uploadHttpManager . disposeAndReplaceClient ( httpClientIndex ) ;
if ( retryAfterValue ) {
core . info ( ` Backoff due to too many requests, retry # ${ retryCount } . Waiting for ${ retryAfterValue } milliseconds before continuing the upload ` ) ;
2021-01-04 15:47:26 +01:00
yield utils _1 . sleep ( retryAfterValue ) ;
2020-04-28 15:45:21 +02:00
}
else {
const backoffTime = utils _1 . getExponentialRetryTimeInMilliseconds ( retryCount ) ;
core . info ( ` Exponential backoff for retry # ${ retryCount } . Waiting for ${ backoffTime } milliseconds before continuing the upload at offset ${ start } ` ) ;
2021-01-04 15:47:26 +01:00
yield utils _1 . sleep ( backoffTime ) ;
2020-04-28 15:45:21 +02:00
}
core . info ( ` Finished backoff for retry # ${ retryCount } , continuing with upload ` ) ;
return ;
} ) ;
// allow for failed chunks to be retried multiple times
while ( retryCount <= retryLimit ) {
let response ;
try {
response = yield uploadChunkRequest ( ) ;
}
catch ( error ) {
// if an error is caught, it is usually indicative of a timeout so retry the upload
core . info ( ` An error has been caught http-client index ${ httpClientIndex } , retrying the upload ` ) ;
// eslint-disable-next-line no-console
console . log ( error ) ;
if ( incrementAndCheckRetryLimit ( ) ) {
return false ;
}
yield backOff ( ) ;
continue ;
}
// Always read the body of the response. There is potential for a resource leak if the body is not read which will
// result in the connection remaining open along with unintended consequences when trying to dispose of the client
yield response . readBody ( ) ;
if ( utils _1 . isSuccessStatusCode ( response . message . statusCode ) ) {
return true ;
}
else if ( utils _1 . isRetryableStatusCode ( response . message . statusCode ) ) {
core . info ( ` A ${ response . message . statusCode } status code has been received, will attempt to retry the upload ` ) ;
if ( incrementAndCheckRetryLimit ( response ) ) {
return false ;
}
utils _1 . isThrottledStatusCode ( response . message . statusCode )
? yield backOff ( utils _1 . tryGetRetryAfterValueTimeInMilliseconds ( response . message . headers ) )
: yield backOff ( ) ;
}
else {
core . error ( ` Unexpected response. Unable to upload chunk to ${ resourceUrl } ` ) ;
utils _1 . displayHttpDiagnostics ( response ) ;
return false ;
}
}
return false ;
} ) ;
}
/ * *
* Updates the size of the artifact from - 1 which was initially set when the container was first created for the artifact .
* Updating the size indicates that we are done uploading all the contents of the artifact
* /
patchArtifactSize ( size , artifactName ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const resourceUrl = new url _1 . URL ( utils _1 . getArtifactUrl ( ) ) ;
resourceUrl . searchParams . append ( 'artifactName' , artifactName ) ;
const parameters = { Size : size } ;
const data = JSON . stringify ( parameters , null , 2 ) ;
core . debug ( ` URL is ${ resourceUrl . toString ( ) } ` ) ;
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
const client = this . uploadHttpManager . getClient ( 0 ) ;
2021-01-04 15:47:26 +01:00
const headers = utils _1 . getUploadHeaders ( 'application/json' , false ) ;
// Extra information to display when a particular HTTP code is returned
const customErrorMessages = new Map ( [
[
http _client _1 . HttpCodes . NotFound ,
` An Artifact with the name ${ artifactName } was not found `
]
] ) ;
// TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this
const response = yield requestUtils _1 . retryHttpClientRequest ( 'Finalize artifact upload' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return client . patch ( resourceUrl . toString ( ) , data , headers ) ; } ) , customErrorMessages ) ;
yield response . readBody ( ) ;
core . debug ( ` Artifact ${ artifactName } has been successfully uploaded, total size in bytes: ${ size } ` ) ;
2020-04-28 15:45:21 +02:00
} ) ;
}
}
exports . UploadHttpClient = UploadHttpClient ;
//# sourceMappingURL=upload-http-client.js.map
/***/ } ) ,
/***/ 614 :
/***/ ( function ( module ) {
module . exports = require ( "events" ) ;
/***/ } ) ,
/***/ 621 :
/***/ ( function ( module ) {
"use strict" ;
module . exports = balanced ;
function balanced ( a , b , str ) {
if ( a instanceof RegExp ) a = maybeMatch ( a , str ) ;
if ( b instanceof RegExp ) b = maybeMatch ( b , str ) ;
var r = range ( a , b , str ) ;
return r && {
start : r [ 0 ] ,
end : r [ 1 ] ,
pre : str . slice ( 0 , r [ 0 ] ) ,
body : str . slice ( r [ 0 ] + a . length , r [ 1 ] ) ,
post : str . slice ( r [ 1 ] + b . length )
} ;
}
function maybeMatch ( reg , str ) {
var m = str . match ( reg ) ;
return m ? m [ 0 ] : null ;
}
balanced . range = range ;
function range ( a , b , str ) {
var begs , beg , left , right , result ;
var ai = str . indexOf ( a ) ;
var bi = str . indexOf ( b , ai + 1 ) ;
var i = ai ;
if ( ai >= 0 && bi > 0 ) {
begs = [ ] ;
left = str . length ;
while ( i >= 0 && ! result ) {
if ( i == ai ) {
begs . push ( i ) ;
ai = str . indexOf ( a , i + 1 ) ;
} else if ( begs . length == 1 ) {
result = [ begs . pop ( ) , bi ] ;
} else {
beg = begs . pop ( ) ;
if ( beg < left ) {
left = beg ;
right = bi ;
}
bi = str . indexOf ( b , i + 1 ) ;
}
i = ai < bi && ai >= 0 ? ai : bi ;
}
if ( begs . length ) {
result = [ left , right ] ;
}
}
return result ;
}
/***/ } ) ,
/***/ 622 :
/***/ ( function ( module ) {
module . exports = require ( "path" ) ;
/***/ } ) ,
/***/ 630 :
/***/ ( function ( module ) {
module . exports = require ( "perf_hooks" ) ;
/***/ } ) ,
/***/ 631 :
/***/ ( function ( module ) {
module . exports = require ( "net" ) ;
/***/ } ) ,
/***/ 647 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _asyncValues = ( this && this . _ _asyncValues ) || function ( o ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var m = o [ Symbol . asyncIterator ] , i ;
return m ? m . call ( o ) : ( o = typeof _ _values === "function" ? _ _values ( o ) : o [ Symbol . iterator ] ( ) , i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ) ;
function verb ( n ) { i [ n ] = o [ n ] && function ( v ) { return new Promise ( function ( resolve , reject ) { v = o [ n ] ( v ) , settle ( resolve , reject , v . done , v . value ) ; } ) ; } ; }
function settle ( resolve , reject , d , v ) { Promise . resolve ( v ) . then ( function ( v ) { resolve ( { value : v , done : d } ) ; } , reject ) ; }
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const zlib = _ _importStar ( _ _webpack _require _ _ ( 761 ) ) ;
const util _1 = _ _webpack _require _ _ ( 669 ) ;
const stat = util _1 . promisify ( fs . stat ) ;
2021-12-07 17:44:54 +01:00
/ * *
* GZipping certain files that are already compressed will likely not yield further size reductions . Creating large temporary gzip
* files then will just waste a lot of time before ultimately being discarded ( especially for very large files ) .
* If any of these types of files are encountered then on - disk gzip creation will be skipped and the original file will be uploaded as - is
* /
const gzipExemptFileExtensions = [
'.gzip' ,
'.zip' ,
'.tar.lz' ,
'.tar.gz' ,
'.tar.bz2' ,
'.7z'
] ;
2020-04-28 15:45:21 +02:00
/ * *
* Creates a Gzip compressed file of an original file at the provided temporary filepath location
* @ param { string } originalFilePath filepath of whatever will be compressed . The original file will be unmodified
* @ param { string } tempFilePath the location of where the Gzip file will be created
* @ returns the size of gzip file that gets created
* /
function createGZipFileOnDisk ( originalFilePath , tempFilePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2021-12-07 17:44:54 +01:00
for ( const gzipExemptExtension of gzipExemptFileExtensions ) {
if ( originalFilePath . endsWith ( gzipExemptExtension ) ) {
// return a really large number so that the original file gets uploaded
return Number . MAX _SAFE _INTEGER ;
}
}
2020-04-28 15:45:21 +02:00
return new Promise ( ( resolve , reject ) => {
const inputStream = fs . createReadStream ( originalFilePath ) ;
const gzip = zlib . createGzip ( ) ;
const outputStream = fs . createWriteStream ( tempFilePath ) ;
inputStream . pipe ( gzip ) . pipe ( outputStream ) ;
outputStream . on ( 'finish' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload
const size = ( yield stat ( tempFilePath ) ) . size ;
resolve ( size ) ;
} ) ) ;
outputStream . on ( 'error' , error => {
// eslint-disable-next-line no-console
console . log ( error ) ;
reject ;
} ) ;
} ) ;
} ) ;
}
exports . createGZipFileOnDisk = createGZipFileOnDisk ;
/ * *
* Creates a GZip file in memory using a buffer . Should be used for smaller files to reduce disk I / O
* @ param originalFilePath the path to the original file that is being GZipped
* @ returns a buffer with the GZip file
* /
function createGZipFileInBuffer ( originalFilePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
var e _1 , _a ;
const inputStream = fs . createReadStream ( originalFilePath ) ;
const gzip = zlib . createGzip ( ) ;
inputStream . pipe ( gzip ) ;
// read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043
const chunks = [ ] ;
try {
for ( var gzip _1 = _ _asyncValues ( gzip ) , gzip _1 _1 ; gzip _1 _1 = yield gzip _1 . next ( ) , ! gzip _1 _1 . done ; ) {
const chunk = gzip _1 _1 . value ;
chunks . push ( chunk ) ;
}
}
catch ( e _1 _1 ) { e _1 = { error : e _1 _1 } ; }
finally {
try {
if ( gzip _1 _1 && ! gzip _1 _1 . done && ( _a = gzip _1 . return ) ) yield _a . call ( gzip _1 ) ;
}
finally { if ( e _1 ) throw e _1 . error ; }
}
resolve ( Buffer . concat ( chunks ) ) ;
} ) ) ;
} ) ;
}
exports . createGZipFileInBuffer = createGZipFileInBuffer ;
//# sourceMappingURL=upload-gzip.js.map
/***/ } ) ,
2023-03-08 22:06:44 +01:00
/***/ 665 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
2020-04-28 15:45:21 +02:00
2023-03-08 22:06:44 +01:00
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . summary = exports . markdownSummary = exports . SUMMARY _DOCS _URL = exports . SUMMARY _ENV _VAR = void 0 ;
const os _1 = _ _webpack _require _ _ ( 87 ) ;
const fs _1 = _ _webpack _require _ _ ( 747 ) ;
const { access , appendFile , writeFile } = fs _1 . promises ;
exports . SUMMARY _ENV _VAR = 'GITHUB_STEP_SUMMARY' ;
exports . SUMMARY _DOCS _URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary' ;
class Summary {
constructor ( ) {
this . _buffer = '' ;
}
/ * *
* Finds the summary file path from the environment , rejects if env var is not found or file does not exist
* Also checks r / w permissions .
*
* @ returns step summary file path
* /
filePath ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _filePath ) {
return this . _filePath ;
}
const pathFromEnv = process . env [ exports . SUMMARY _ENV _VAR ] ;
if ( ! pathFromEnv ) {
throw new Error ( ` Unable to find environment variable for $ ${ exports . SUMMARY _ENV _VAR } . Check if your runtime environment supports job summaries. ` ) ;
}
try {
yield access ( pathFromEnv , fs _1 . constants . R _OK | fs _1 . constants . W _OK ) ;
}
catch ( _a ) {
throw new Error ( ` Unable to access summary file: ' ${ pathFromEnv } '. Check if the file has correct read/write permissions. ` ) ;
}
this . _filePath = pathFromEnv ;
return this . _filePath ;
} ) ;
}
/ * *
* Wraps content in an HTML tag , adding any HTML attributes
*
* @ param { string } tag HTML tag to wrap
* @ param { string | null } content content within the tag
* @ param { [ attribute : string ] : string } attrs key - value list of HTML attributes to add
*
* @ returns { string } content wrapped in HTML element
* /
wrap ( tag , content , attrs = { } ) {
const htmlAttrs = Object . entries ( attrs )
. map ( ( [ key , value ] ) => ` ${ key } =" ${ value } " ` )
. join ( '' ) ;
if ( ! content ) {
return ` < ${ tag } ${ htmlAttrs } > ` ;
}
return ` < ${ tag } ${ htmlAttrs } > ${ content } </ ${ tag } > ` ;
}
/ * *
* Writes text in the buffer to the summary buffer file and empties buffer . Will append by default .
*
* @ param { SummaryWriteOptions } [ options ] ( optional ) options for write operation
*
* @ returns { Promise < Summary > } summary instance
* /
write ( options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const overwrite = ! ! ( options === null || options === void 0 ? void 0 : options . overwrite ) ;
const filePath = yield this . filePath ( ) ;
const writeFunc = overwrite ? writeFile : appendFile ;
yield writeFunc ( filePath , this . _buffer , { encoding : 'utf8' } ) ;
return this . emptyBuffer ( ) ;
} ) ;
}
/ * *
* Clears the summary buffer and wipes the summary file
*
* @ returns { Summary } summary instance
* /
clear ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . emptyBuffer ( ) . write ( { overwrite : true } ) ;
} ) ;
}
/ * *
* Returns the current summary buffer as a string
*
* @ returns { string } string of summary buffer
* /
stringify ( ) {
return this . _buffer ;
}
/ * *
* If the summary buffer is empty
*
* @ returns { boolen } true if the buffer is empty
* /
isEmptyBuffer ( ) {
return this . _buffer . length === 0 ;
}
/ * *
* Resets the summary buffer without writing to summary file
*
* @ returns { Summary } summary instance
* /
emptyBuffer ( ) {
this . _buffer = '' ;
return this ;
}
/ * *
* Adds raw text to the summary buffer
*
* @ param { string } text content to add
* @ param { boolean } [ addEOL = false ] ( optional ) append an EOL to the raw text ( default : false )
*
* @ returns { Summary } summary instance
* /
addRaw ( text , addEOL = false ) {
this . _buffer += text ;
return addEOL ? this . addEOL ( ) : this ;
}
/ * *
* Adds the operating system - specific end - of - line marker to the buffer
*
* @ returns { Summary } summary instance
* /
addEOL ( ) {
return this . addRaw ( os _1 . EOL ) ;
}
/ * *
* Adds an HTML codeblock to the summary buffer
*
* @ param { string } code content to render within fenced code block
* @ param { string } lang ( optional ) language to syntax highlight code
*
* @ returns { Summary } summary instance
* /
addCodeBlock ( code , lang ) {
const attrs = Object . assign ( { } , ( lang && { lang } ) ) ;
const element = this . wrap ( 'pre' , this . wrap ( 'code' , code ) , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML list to the summary buffer
*
* @ param { string [ ] } items list of items to render
* @ param { boolean } [ ordered = false ] ( optional ) if the rendered list should be ordered or not ( default : false )
*
* @ returns { Summary } summary instance
* /
addList ( items , ordered = false ) {
const tag = ordered ? 'ol' : 'ul' ;
const listItems = items . map ( item => this . wrap ( 'li' , item ) ) . join ( '' ) ;
const element = this . wrap ( tag , listItems ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML table to the summary buffer
*
* @ param { SummaryTableCell [ ] } rows table rows
*
* @ returns { Summary } summary instance
* /
addTable ( rows ) {
const tableBody = rows
. map ( row => {
const cells = row
. map ( cell => {
if ( typeof cell === 'string' ) {
return this . wrap ( 'td' , cell ) ;
}
const { header , data , colspan , rowspan } = cell ;
const tag = header ? 'th' : 'td' ;
const attrs = Object . assign ( Object . assign ( { } , ( colspan && { colspan } ) ) , ( rowspan && { rowspan } ) ) ;
return this . wrap ( tag , data , attrs ) ;
} )
. join ( '' ) ;
return this . wrap ( 'tr' , cells ) ;
} )
. join ( '' ) ;
const element = this . wrap ( 'table' , tableBody ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds a collapsable HTML details element to the summary buffer
*
* @ param { string } label text for the closed state
* @ param { string } content collapsable content
*
* @ returns { Summary } summary instance
* /
addDetails ( label , content ) {
const element = this . wrap ( 'details' , this . wrap ( 'summary' , label ) + content ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML image tag to the summary buffer
*
* @ param { string } src path to the image you to embed
* @ param { string } alt text description of the image
* @ param { SummaryImageOptions } options ( optional ) addition image attributes
*
* @ returns { Summary } summary instance
* /
addImage ( src , alt , options ) {
const { width , height } = options || { } ;
const attrs = Object . assign ( Object . assign ( { } , ( width && { width } ) ) , ( height && { height } ) ) ;
const element = this . wrap ( 'img' , null , Object . assign ( { src , alt } , attrs ) ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML section heading element
*
* @ param { string } text heading text
* @ param { number | string } [ level = 1 ] ( optional ) the heading level , default : 1
*
* @ returns { Summary } summary instance
* /
addHeading ( text , level ) {
const tag = ` h ${ level } ` ;
const allowedTag = [ 'h1' , 'h2' , 'h3' , 'h4' , 'h5' , 'h6' ] . includes ( tag )
? tag
: 'h1' ;
const element = this . wrap ( allowedTag , text ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML thematic break ( < hr > ) to the summary buffer
*
* @ returns { Summary } summary instance
* /
addSeparator ( ) {
const element = this . wrap ( 'hr' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML line break ( < br > ) to the summary buffer
*
* @ returns { Summary } summary instance
* /
addBreak ( ) {
const element = this . wrap ( 'br' , null ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML blockquote to the summary buffer
*
* @ param { string } text quote text
* @ param { string } cite ( optional ) citation url
*
* @ returns { Summary } summary instance
* /
addQuote ( text , cite ) {
const attrs = Object . assign ( { } , ( cite && { cite } ) ) ;
const element = this . wrap ( 'blockquote' , text , attrs ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
/ * *
* Adds an HTML anchor tag to the summary buffer
*
* @ param { string } text link text / content
* @ param { string } href hyperlink
*
* @ returns { Summary } summary instance
* /
addLink ( text , href ) {
const element = this . wrap ( 'a' , text , { href } ) ;
return this . addRaw ( element ) . addEOL ( ) ;
}
}
const _summary = new Summary ( ) ;
/ * *
* @ deprecated use ` core.summary `
* /
exports . markdownSummary = _summary ;
exports . summary = _summary ;
//# sourceMappingURL=summary.js.map
/***/ } ) ,
/***/ 669 :
/***/ ( function ( module ) {
module . exports = require ( "util" ) ;
/***/ } ) ,
/***/ 674 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var wrappy = _ _webpack _require _ _ ( 11 )
var reqs = Object . create ( null )
var once = _ _webpack _require _ _ ( 49 )
module . exports = wrappy ( inflight )
function inflight ( key , cb ) {
if ( reqs [ key ] ) {
reqs [ key ] . push ( cb )
return null
} else {
reqs [ key ] = [ cb ]
return makeres ( key )
}
}
function makeres ( key ) {
return once ( function RES ( ) {
var cbs = reqs [ key ]
var len = cbs . length
var args = slice ( arguments )
// XXX It's somewhat ambiguous whether a new callback added in this
// pass should be queued for later execution if something in the
// list of callbacks throws, or if it should just be discarded.
// However, it's such an edge case that it hardly matters, and either
// choice is likely as surprising as the other.
// As it happens, we do go ahead and schedule it for later execution.
try {
for ( var i = 0 ; i < len ; i ++ ) {
cbs [ i ] . apply ( null , args )
}
} finally {
if ( cbs . length > len ) {
// added more in the interim.
// de-zalgo, just in case, but don't call again.
cbs . splice ( 0 , len )
process . nextTick ( function ( ) {
RES . apply ( null , args )
} )
} else {
delete reqs [ key ]
}
}
} )
}
2020-04-28 15:45:21 +02:00
function slice ( args ) {
var length = args . length
var array = [ ]
for ( var i = 0 ; i < length ; i ++ ) array [ i ] = args [ i ]
return array
}
/***/ } ) ,
/***/ 681 :
/***/ ( function ( module ) {
"use strict" ;
function posix ( path ) {
return path . charAt ( 0 ) === '/' ;
}
function win32 ( path ) {
// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/ ;
var result = splitDeviceRe . exec ( path ) ;
var device = result [ 1 ] || '' ;
var isUnc = Boolean ( device && device . charAt ( 1 ) !== ':' ) ;
// UNC paths are always absolute
return Boolean ( result [ 2 ] || isUnc ) ;
}
module . exports = process . platform === 'win32' ? win32 : posix ;
module . exports . posix = posix ;
module . exports . win32 = win32 ;
/***/ } ) ,
/***/ 689 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
try {
var util = _ _webpack _require _ _ ( 669 ) ;
/* istanbul ignore next */
if ( typeof util . inherits !== 'function' ) throw '' ;
module . exports = util . inherits ;
} catch ( e ) {
/* istanbul ignore next */
module . exports = _ _webpack _require _ _ ( 315 ) ;
}
/***/ } ) ,
/***/ 694 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
var Inputs ;
( function ( Inputs ) {
Inputs [ "Name" ] = "name" ;
Inputs [ "Path" ] = "path" ;
} ) ( Inputs = exports . Inputs || ( exports . Inputs = { } ) ) ;
2020-07-15 12:09:31 +02:00
var Outputs ;
( function ( Outputs ) {
Outputs [ "DownloadPath" ] = "download-path" ;
} ) ( Outputs = exports . Outputs || ( exports . Outputs = { } ) ) ;
2020-04-28 15:45:21 +02:00
2023-03-08 22:06:44 +01:00
/***/ } ) ,
/***/ 695 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _validate = _interopRequireDefault ( _ _webpack _require _ _ ( 78 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function version ( uuid ) {
if ( ! ( 0 , _validate . default ) ( uuid ) ) {
throw TypeError ( 'Invalid UUID' ) ;
}
return parseInt ( uuid . substr ( 14 , 1 ) , 16 ) ;
}
var _default = version ;
exports . default = _default ;
/***/ } ) ,
/***/ 733 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _rng = _interopRequireDefault ( _ _webpack _require _ _ ( 844 ) ) ;
var _stringify = _interopRequireDefault ( _ _webpack _require _ _ ( 411 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function v4 ( options , buf , offset ) {
options = options || { } ;
const rnds = options . random || ( options . rng || _rng . default ) ( ) ; // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = rnds [ 6 ] & 0x0f | 0x40 ;
rnds [ 8 ] = rnds [ 8 ] & 0x3f | 0x80 ; // Copy bytes to buffer, if provided
if ( buf ) {
offset = offset || 0 ;
for ( let i = 0 ; i < 16 ; ++ i ) {
buf [ offset + i ] = rnds [ i ] ;
}
return buf ;
}
return ( 0 , _stringify . default ) ( rnds ) ;
}
var _default = v4 ;
exports . default = _default ;
/***/ } ) ,
/***/ 742 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . OidcClient = void 0 ;
const http _client _1 = _ _webpack _require _ _ ( 993 ) ;
const auth _1 = _ _webpack _require _ _ ( 363 ) ;
const core _1 = _ _webpack _require _ _ ( 470 ) ;
class OidcClient {
static createHttpClient ( allowRetry = true , maxRetry = 10 ) {
const requestOptions = {
allowRetries : allowRetry ,
maxRetries : maxRetry
} ;
return new http _client _1 . HttpClient ( 'actions/oidc-client' , [ new auth _1 . BearerCredentialHandler ( OidcClient . getRequestToken ( ) ) ] , requestOptions ) ;
}
static getRequestToken ( ) {
const token = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_TOKEN' ] ;
if ( ! token ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable' ) ;
}
return token ;
}
static getIDTokenUrl ( ) {
const runtimeUrl = process . env [ 'ACTIONS_ID_TOKEN_REQUEST_URL' ] ;
if ( ! runtimeUrl ) {
throw new Error ( 'Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable' ) ;
}
return runtimeUrl ;
}
static getCall ( id _token _url ) {
var _a ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpclient = OidcClient . createHttpClient ( ) ;
const res = yield httpclient
. getJson ( id _token _url )
. catch ( error => {
throw new Error ( ` Failed to get ID Token. \n
Error Code : $ { error . statusCode } \ n
Error Message : $ { error . result . message } ` );
} ) ;
const id _token = ( _a = res . result ) === null || _a === void 0 ? void 0 : _a . value ;
if ( ! id _token ) {
throw new Error ( 'Response json body do not have ID Token field' ) ;
}
return id _token ;
} ) ;
}
static getIDToken ( audience ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
// New ID Token is requested from action service
let id _token _url = OidcClient . getIDTokenUrl ( ) ;
if ( audience ) {
const encodedAudience = encodeURIComponent ( audience ) ;
id _token _url = ` ${ id _token _url } &audience= ${ encodedAudience } ` ;
}
core _1 . debug ( ` ID token url is ${ id _token _url } ` ) ;
const id _token = yield OidcClient . getCall ( id _token _url ) ;
core _1 . setSecret ( id _token ) ;
return id _token ;
}
catch ( error ) {
throw new Error ( ` Error message: ${ error . message } ` ) ;
}
} ) ;
}
}
exports . OidcClient = OidcClient ;
//# sourceMappingURL=oidc-utils.js.map
2020-04-28 15:45:21 +02:00
/***/ } ) ,
/***/ 747 :
/***/ ( function ( module ) {
module . exports = require ( "fs" ) ;
/***/ } ) ,
/***/ 761 :
/***/ ( function ( module ) {
module . exports = require ( "zlib" ) ;
/***/ } ) ,
2023-03-08 22:06:44 +01:00
/***/ 794 :
/***/ ( function ( module ) {
module . exports = require ( "stream" ) ;
/***/ } ) ,
2020-04-28 15:45:21 +02:00
/***/ 799 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const artifact = _ _importStar ( _ _webpack _require _ _ ( 214 ) ) ;
2020-07-30 12:01:38 +02:00
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
2020-07-15 12:09:31 +02:00
const path _1 = _ _webpack _require _ _ ( 622 ) ;
2020-04-28 15:45:21 +02:00
const constants _1 = _ _webpack _require _ _ ( 694 ) ;
function run ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
const name = core . getInput ( constants _1 . Inputs . Name , { required : false } ) ;
const path = core . getInput ( constants _1 . Inputs . Path , { required : false } ) ;
2020-07-30 12:01:38 +02:00
let resolvedPath ;
// resolve tilde expansions, path.replace only replaces the first occurrence of a pattern
if ( path . startsWith ( ` ~ ` ) ) {
resolvedPath = path _1 . resolve ( path . replace ( '~' , os . homedir ( ) ) ) ;
}
else {
resolvedPath = path _1 . resolve ( path ) ;
}
core . debug ( ` Resolved path is ${ resolvedPath } ` ) ;
2020-04-28 15:45:21 +02:00
const artifactClient = artifact . create ( ) ;
if ( ! name ) {
// download all artifacts
2020-07-15 12:09:31 +02:00
core . info ( 'No artifact name specified, downloading all artifacts' ) ;
core . info ( 'Creating an extra directory for each artifact that is being downloaded' ) ;
2020-07-30 12:01:38 +02:00
const downloadResponse = yield artifactClient . downloadAllArtifacts ( resolvedPath ) ;
2020-04-28 15:45:21 +02:00
core . info ( ` There were ${ downloadResponse . length } artifacts downloaded ` ) ;
for ( const artifact of downloadResponse ) {
core . info ( ` Artifact ${ artifact . artifactName } was downloaded to ${ artifact . downloadPath } ` ) ;
}
}
else {
// download a single artifact
2020-07-15 12:09:31 +02:00
core . info ( ` Starting download for ${ name } ` ) ;
2020-04-28 15:45:21 +02:00
const downloadOptions = {
createArtifactFolder : false
} ;
2020-07-30 12:01:38 +02:00
const downloadResponse = yield artifactClient . downloadArtifact ( name , resolvedPath , downloadOptions ) ;
2020-04-28 15:45:21 +02:00
core . info ( ` Artifact ${ downloadResponse . artifactName } was downloaded to ${ downloadResponse . downloadPath } ` ) ;
}
2020-07-15 12:09:31 +02:00
// output the directory that the artifact(s) was/were downloaded to
// if no path is provided, an empty string resolves to the current working directory
2020-07-30 12:01:38 +02:00
core . setOutput ( constants _1 . Outputs . DownloadPath , resolvedPath ) ;
2020-04-28 15:45:21 +02:00
core . info ( 'Artifact download has finished successfully' ) ;
}
catch ( err ) {
core . setFailed ( err . message ) ;
}
} ) ;
}
run ( ) ;
2023-03-08 22:06:44 +01:00
/***/ } ) ,
/***/ 803 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _crypto = _interopRequireDefault ( _ _webpack _require _ _ ( 417 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
function md5 ( bytes ) {
if ( Array . isArray ( bytes ) ) {
bytes = Buffer . from ( bytes ) ;
} else if ( typeof bytes === 'string' ) {
bytes = Buffer . from ( bytes , 'utf8' ) ;
}
return _crypto . default . createHash ( 'md5' ) . update ( bytes ) . digest ( ) ;
}
var _default = md5 ;
exports . default = _default ;
2020-04-28 15:45:21 +02:00
/***/ } ) ,
/***/ 835 :
/***/ ( function ( module ) {
module . exports = require ( "url" ) ;
/***/ } ) ,
2023-03-08 22:06:44 +01:00
/***/ 844 :
2020-04-28 15:45:21 +02:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
2023-03-08 22:06:44 +01:00
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = rng ;
var _crypto = _interopRequireDefault ( _ _webpack _require _ _ ( 417 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
const rnds8Pool = new Uint8Array ( 256 ) ; // # of random values to pre-allocate
let poolPtr = rnds8Pool . length ;
function rng ( ) {
if ( poolPtr > rnds8Pool . length - 16 ) {
_crypto . default . randomFillSync ( rnds8Pool ) ;
poolPtr = 0 ;
}
return rnds8Pool . slice ( poolPtr , poolPtr += 16 ) ;
}
/***/ } ) ,
/***/ 855 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
2020-04-28 15:45:21 +02:00
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const zlib = _ _importStar ( _ _webpack _require _ _ ( 761 ) ) ;
const utils _1 = _ _webpack _require _ _ ( 870 ) ;
const url _1 = _ _webpack _require _ _ ( 835 ) ;
const status _reporter _1 = _ _webpack _require _ _ ( 176 ) ;
const perf _hooks _1 = _ _webpack _require _ _ ( 630 ) ;
const http _manager _1 = _ _webpack _require _ _ ( 452 ) ;
const config _variables _1 = _ _webpack _require _ _ ( 401 ) ;
2021-01-04 15:47:26 +01:00
const requestUtils _1 = _ _webpack _require _ _ ( 489 ) ;
2020-04-28 15:45:21 +02:00
class DownloadHttpClient {
constructor ( ) {
2020-08-04 17:55:46 +02:00
this . downloadHttpManager = new http _manager _1 . HttpManager ( config _variables _1 . getDownloadFileConcurrency ( ) , '@actions/artifact-download' ) ;
2020-04-28 15:45:21 +02:00
// downloads are usually significantly faster than uploads so display status information every second
this . statusReporter = new status _reporter _1 . StatusReporter ( 1000 ) ;
}
/ * *
* Gets a list of all artifacts that are in a specific container
* /
listArtifacts ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const artifactUrl = utils _1 . getArtifactUrl ( ) ;
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
const client = this . downloadHttpManager . getClient ( 0 ) ;
2020-07-31 17:16:59 +02:00
const headers = utils _1 . getDownloadHeaders ( 'application/json' ) ;
2021-01-04 15:47:26 +01:00
const response = yield requestUtils _1 . retryHttpClientRequest ( 'List Artifacts' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return client . get ( artifactUrl , headers ) ; } ) ) ;
2020-04-28 15:45:21 +02:00
const body = yield response . readBody ( ) ;
2021-01-04 15:47:26 +01:00
return JSON . parse ( body ) ;
2020-04-28 15:45:21 +02:00
} ) ;
}
/ * *
* Fetches a set of container items that describe the contents of an artifact
* @ param artifactName the name of the artifact
* @ param containerUrl the artifact container URL for the run
* /
getContainerItems ( artifactName , containerUrl ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// the itemPath search parameter controls which containers will be returned
const resourceUrl = new url _1 . URL ( containerUrl ) ;
resourceUrl . searchParams . append ( 'itemPath' , artifactName ) ;
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
const client = this . downloadHttpManager . getClient ( 0 ) ;
2020-07-31 17:16:59 +02:00
const headers = utils _1 . getDownloadHeaders ( 'application/json' ) ;
2021-01-04 15:47:26 +01:00
const response = yield requestUtils _1 . retryHttpClientRequest ( 'Get Container Items' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return client . get ( resourceUrl . toString ( ) , headers ) ; } ) ) ;
2020-04-28 15:45:21 +02:00
const body = yield response . readBody ( ) ;
2021-01-04 15:47:26 +01:00
return JSON . parse ( body ) ;
2020-04-28 15:45:21 +02:00
} ) ;
}
/ * *
* Concurrently downloads all the files that are part of an artifact
* @ param downloadItems information about what items to download and where to save them
* /
downloadSingleArtifact ( downloadItems ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const DOWNLOAD _CONCURRENCY = config _variables _1 . getDownloadFileConcurrency ( ) ;
// limit the number of files downloaded at a single time
core . debug ( ` Download file concurrency is set to ${ DOWNLOAD _CONCURRENCY } ` ) ;
const parallelDownloads = [ ... new Array ( DOWNLOAD _CONCURRENCY ) . keys ( ) ] ;
let currentFile = 0 ;
let downloadedFiles = 0 ;
core . info ( ` Total number of files that will be downloaded: ${ downloadItems . length } ` ) ;
this . statusReporter . setTotalNumberOfFilesToProcess ( downloadItems . length ) ;
this . statusReporter . start ( ) ;
yield Promise . all ( parallelDownloads . map ( ( index ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
while ( currentFile < downloadItems . length ) {
const currentFileToDownload = downloadItems [ currentFile ] ;
currentFile += 1 ;
const startTime = perf _hooks _1 . performance . now ( ) ;
yield this . downloadIndividualFile ( index , currentFileToDownload . sourceLocation , currentFileToDownload . targetPath ) ;
if ( core . isDebug ( ) ) {
core . debug ( ` File: ${ ++ downloadedFiles } / ${ downloadItems . length } . ${ currentFileToDownload . targetPath } took ${ ( perf _hooks _1 . performance . now ( ) - startTime ) . toFixed ( 3 ) } milliseconds to finish downloading ` ) ;
}
this . statusReporter . incrementProcessedCount ( ) ;
}
} ) ) )
. catch ( error => {
throw new Error ( ` Unable to download the artifact: ${ error } ` ) ;
} )
. finally ( ( ) => {
this . statusReporter . stop ( ) ;
// safety dispose all connections
this . downloadHttpManager . disposeAndReplaceAllClients ( ) ;
} ) ;
} ) ;
}
/ * *
* Downloads an individual file
* @ param httpClientIndex the index of the http client that is used to make all of the calls
* @ param artifactLocation origin location where a file will be downloaded from
* @ param downloadPath destination location for the file being downloaded
* /
downloadIndividualFile ( httpClientIndex , artifactLocation , downloadPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let retryCount = 0 ;
const retryLimit = config _variables _1 . getRetryLimit ( ) ;
2020-12-15 16:55:26 +01:00
let destinationStream = fs . createWriteStream ( downloadPath ) ;
2020-07-31 17:16:59 +02:00
const headers = utils _1 . getDownloadHeaders ( 'application/json' , true , true ) ;
2020-04-28 15:45:21 +02:00
// a single GET request is used to download a file
const makeDownloadRequest = ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const client = this . downloadHttpManager . getClient ( httpClientIndex ) ;
2020-07-31 17:16:59 +02:00
return yield client . get ( artifactLocation , headers ) ;
2020-04-28 15:45:21 +02:00
} ) ;
// check the response headers to determine if the file was compressed using gzip
2020-07-31 17:16:59 +02:00
const isGzip = ( incomingHeaders ) => {
return ( 'content-encoding' in incomingHeaders &&
incomingHeaders [ 'content-encoding' ] === 'gzip' ) ;
2020-04-28 15:45:21 +02:00
} ;
// Increments the current retry count and then checks if the retry limit has been reached
// If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided,
// it will be used
const backOff = ( retryAfterValue ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
retryCount ++ ;
if ( retryCount > retryLimit ) {
return Promise . reject ( new Error ( ` Retry limit has been reached. Unable to download ${ artifactLocation } ` ) ) ;
}
else {
this . downloadHttpManager . disposeAndReplaceClient ( httpClientIndex ) ;
if ( retryAfterValue ) {
// Back off by waiting the specified time denoted by the retry-after header
core . info ( ` Backoff due to too many requests, retry # ${ retryCount } . Waiting for ${ retryAfterValue } milliseconds before continuing the download ` ) ;
2021-01-04 15:47:26 +01:00
yield utils _1 . sleep ( retryAfterValue ) ;
2020-04-28 15:45:21 +02:00
}
else {
// Back off using an exponential value that depends on the retry count
const backoffTime = utils _1 . getExponentialRetryTimeInMilliseconds ( retryCount ) ;
core . info ( ` Exponential backoff for retry # ${ retryCount } . Waiting for ${ backoffTime } milliseconds before continuing the download ` ) ;
2021-01-04 15:47:26 +01:00
yield utils _1 . sleep ( backoffTime ) ;
2020-04-28 15:45:21 +02:00
}
core . info ( ` Finished backoff for retry # ${ retryCount } , continuing with download ` ) ;
}
} ) ;
2020-12-15 16:55:26 +01:00
const isAllBytesReceived = ( expected , received ) => {
// be lenient, if any input is missing, assume success, i.e. not truncated
if ( ! expected ||
! received ||
process . env [ 'ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION' ] ) {
core . info ( 'Skipping download validation.' ) ;
return true ;
}
return parseInt ( expected ) === received ;
} ;
const resetDestinationStream = ( fileDownloadPath ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
destinationStream . close ( ) ;
yield utils _1 . rmFile ( fileDownloadPath ) ;
destinationStream = fs . createWriteStream ( fileDownloadPath ) ;
} ) ;
2020-04-28 15:45:21 +02:00
// keep trying to download a file until a retry limit has been reached
while ( retryCount <= retryLimit ) {
let response ;
try {
response = yield makeDownloadRequest ( ) ;
}
catch ( error ) {
// if an error is caught, it is usually indicative of a timeout so retry the download
core . info ( 'An error occurred while attempting to download a file' ) ;
// eslint-disable-next-line no-console
console . log ( error ) ;
// increment the retryCount and use exponential backoff to wait before making the next request
yield backOff ( ) ;
continue ;
}
2020-12-15 16:55:26 +01:00
let forceRetry = false ;
2020-04-28 15:45:21 +02:00
if ( utils _1 . isSuccessStatusCode ( response . message . statusCode ) ) {
// The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string
// which can cause some gzip encoded data to be lost
// Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents
2020-12-15 16:55:26 +01:00
try {
const isGzipped = isGzip ( response . message . headers ) ;
yield this . pipeResponseToFile ( response , destinationStream , isGzipped ) ;
if ( isGzipped ||
isAllBytesReceived ( response . message . headers [ 'content-length' ] , yield utils _1 . getFileSize ( downloadPath ) ) ) {
return ;
}
else {
forceRetry = true ;
}
}
catch ( error ) {
// retry on error, most likely streams were corrupted
forceRetry = true ;
}
2020-04-28 15:45:21 +02:00
}
2020-12-15 16:55:26 +01:00
if ( forceRetry || utils _1 . isRetryableStatusCode ( response . message . statusCode ) ) {
2020-04-28 15:45:21 +02:00
core . info ( ` A ${ response . message . statusCode } response code has been received while attempting to download an artifact ` ) ;
2020-12-15 16:55:26 +01:00
resetDestinationStream ( downloadPath ) ;
2020-04-28 15:45:21 +02:00
// if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff
utils _1 . isThrottledStatusCode ( response . message . statusCode )
? yield backOff ( utils _1 . tryGetRetryAfterValueTimeInMilliseconds ( response . message . headers ) )
: yield backOff ( ) ;
}
else {
// Some unexpected response code, fail immediately and stop the download
utils _1 . displayHttpDiagnostics ( response ) ;
return Promise . reject ( new Error ( ` Unexpected http ${ response . message . statusCode } during download for ${ artifactLocation } ` ) ) ;
}
}
} ) ;
}
/ * *
* Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary
* @ param response the http response received when downloading a file
* @ param destinationStream the stream where the file should be written to
* @ param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it
* /
pipeResponseToFile ( response , destinationStream , isGzip ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield new Promise ( ( resolve , reject ) => {
if ( isGzip ) {
const gunzip = zlib . createGunzip ( ) ;
response . message
2020-12-15 16:55:26 +01:00
. on ( 'error' , error => {
core . error ( ` An error occurred while attempting to read the response stream ` ) ;
gunzip . close ( ) ;
destinationStream . close ( ) ;
reject ( error ) ;
} )
2020-04-28 15:45:21 +02:00
. pipe ( gunzip )
2020-12-15 16:55:26 +01:00
. on ( 'error' , error => {
core . error ( ` An error occurred while attempting to decompress the response stream ` ) ;
destinationStream . close ( ) ;
reject ( error ) ;
} )
2020-04-28 15:45:21 +02:00
. pipe ( destinationStream )
. on ( 'close' , ( ) => {
resolve ( ) ;
} )
. on ( 'error' , error => {
2020-12-15 16:55:26 +01:00
core . error ( ` An error occurred while writing a downloaded file to ${ destinationStream . path } ` ) ;
2020-04-28 15:45:21 +02:00
reject ( error ) ;
} ) ;
}
else {
response . message
2020-12-15 16:55:26 +01:00
. on ( 'error' , error => {
core . error ( ` An error occurred while attempting to read the response stream ` ) ;
destinationStream . close ( ) ;
reject ( error ) ;
} )
2020-04-28 15:45:21 +02:00
. pipe ( destinationStream )
. on ( 'close' , ( ) => {
resolve ( ) ;
} )
. on ( 'error' , error => {
2020-12-15 16:55:26 +01:00
core . error ( ` An error occurred while writing a downloaded file to ${ destinationStream . path } ` ) ;
2020-04-28 15:45:21 +02:00
reject ( error ) ;
} ) ;
}
} ) ;
return ;
} ) ;
}
}
exports . DownloadHttpClient = DownloadHttpClient ;
//# sourceMappingURL=download-http-client.js.map
/***/ } ) ,
/***/ 856 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
exports . alphasort = alphasort
exports . alphasorti = alphasorti
exports . setopts = setopts
exports . ownProp = ownProp
exports . makeAbs = makeAbs
exports . finish = finish
exports . mark = mark
exports . isIgnored = isIgnored
exports . childrenIgnored = childrenIgnored
function ownProp ( obj , field ) {
return Object . prototype . hasOwnProperty . call ( obj , field )
}
var path = _ _webpack _require _ _ ( 622 )
var minimatch = _ _webpack _require _ _ ( 93 )
var isAbsolute = _ _webpack _require _ _ ( 681 )
var Minimatch = minimatch . Minimatch
function alphasorti ( a , b ) {
return a . toLowerCase ( ) . localeCompare ( b . toLowerCase ( ) )
}
function alphasort ( a , b ) {
return a . localeCompare ( b )
}
function setupIgnores ( self , options ) {
self . ignore = options . ignore || [ ]
if ( ! Array . isArray ( self . ignore ) )
self . ignore = [ self . ignore ]
if ( self . ignore . length ) {
self . ignore = self . ignore . map ( ignoreMap )
}
}
// ignore patterns are always in dot:true mode.
function ignoreMap ( pattern ) {
var gmatcher = null
if ( pattern . slice ( - 3 ) === '/**' ) {
var gpattern = pattern . replace ( /(\/\*\*)+$/ , '' )
gmatcher = new Minimatch ( gpattern , { dot : true } )
}
return {
matcher : new Minimatch ( pattern , { dot : true } ) ,
gmatcher : gmatcher
}
}
function setopts ( self , pattern , options ) {
if ( ! options )
options = { }
// base-matching: just use globstar for that.
if ( options . matchBase && - 1 === pattern . indexOf ( "/" ) ) {
if ( options . noglobstar ) {
throw new Error ( "base matching requires globstar" )
}
pattern = "**/" + pattern
}
self . silent = ! ! options . silent
self . pattern = pattern
self . strict = options . strict !== false
self . realpath = ! ! options . realpath
self . realpathCache = options . realpathCache || Object . create ( null )
self . follow = ! ! options . follow
self . dot = ! ! options . dot
self . mark = ! ! options . mark
self . nodir = ! ! options . nodir
if ( self . nodir )
self . mark = true
self . sync = ! ! options . sync
self . nounique = ! ! options . nounique
self . nonull = ! ! options . nonull
self . nosort = ! ! options . nosort
self . nocase = ! ! options . nocase
self . stat = ! ! options . stat
self . noprocess = ! ! options . noprocess
self . absolute = ! ! options . absolute
self . maxLength = options . maxLength || Infinity
self . cache = options . cache || Object . create ( null )
self . statCache = options . statCache || Object . create ( null )
self . symlinks = options . symlinks || Object . create ( null )
setupIgnores ( self , options )
self . changedCwd = false
var cwd = process . cwd ( )
if ( ! ownProp ( options , "cwd" ) )
self . cwd = cwd
else {
self . cwd = path . resolve ( options . cwd )
self . changedCwd = self . cwd !== cwd
}
self . root = options . root || path . resolve ( self . cwd , "/" )
self . root = path . resolve ( self . root )
if ( process . platform === "win32" )
self . root = self . root . replace ( /\\/g , "/" )
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
self . cwdAbs = isAbsolute ( self . cwd ) ? self . cwd : makeAbs ( self , self . cwd )
if ( process . platform === "win32" )
self . cwdAbs = self . cwdAbs . replace ( /\\/g , "/" )
self . nomount = ! ! options . nomount
// disable comments and negation in Minimatch.
// Note that they are not supported in Glob itself anyway.
options . nonegate = true
options . nocomment = true
self . minimatch = new Minimatch ( pattern , options )
self . options = self . minimatch . options
}
function finish ( self ) {
var nou = self . nounique
var all = nou ? [ ] : Object . create ( null )
for ( var i = 0 , l = self . matches . length ; i < l ; i ++ ) {
var matches = self . matches [ i ]
if ( ! matches || Object . keys ( matches ) . length === 0 ) {
if ( self . nonull ) {
// do like the shell, and spit out the literal glob
var literal = self . minimatch . globSet [ i ]
if ( nou )
all . push ( literal )
else
all [ literal ] = true
}
} else {
// had matches
var m = Object . keys ( matches )
if ( nou )
all . push . apply ( all , m )
else
m . forEach ( function ( m ) {
all [ m ] = true
} )
}
}
if ( ! nou )
all = Object . keys ( all )
if ( ! self . nosort )
all = all . sort ( self . nocase ? alphasorti : alphasort )
// at *some* point we statted all of these
if ( self . mark ) {
for ( var i = 0 ; i < all . length ; i ++ ) {
all [ i ] = self . _mark ( all [ i ] )
}
if ( self . nodir ) {
all = all . filter ( function ( e ) {
var notDir = ! ( /\/$/ . test ( e ) )
var c = self . cache [ e ] || self . cache [ makeAbs ( self , e ) ]
if ( notDir && c )
notDir = c !== 'DIR' && ! Array . isArray ( c )
return notDir
} )
}
}
if ( self . ignore . length )
all = all . filter ( function ( m ) {
return ! isIgnored ( self , m )
} )
self . found = all
}
function mark ( self , p ) {
var abs = makeAbs ( self , p )
var c = self . cache [ abs ]
var m = p
if ( c ) {
var isDir = c === 'DIR' || Array . isArray ( c )
var slash = p . slice ( - 1 ) === '/'
if ( isDir && ! slash )
m += '/'
else if ( ! isDir && slash )
m = m . slice ( 0 , - 1 )
if ( m !== p ) {
var mabs = makeAbs ( self , m )
self . statCache [ mabs ] = self . statCache [ abs ]
self . cache [ mabs ] = self . cache [ abs ]
}
}
return m
}
// lotta situps...
function makeAbs ( self , f ) {
var abs = f
if ( f . charAt ( 0 ) === '/' ) {
abs = path . join ( self . root , f )
} else if ( isAbsolute ( f ) || f === '' ) {
abs = f
} else if ( self . changedCwd ) {
abs = path . resolve ( self . cwd , f )
} else {
abs = path . resolve ( f )
}
if ( process . platform === 'win32' )
abs = abs . replace ( /\\/g , '/' )
return abs
}
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
function isIgnored ( self , path ) {
if ( ! self . ignore . length )
return false
return self . ignore . some ( function ( item ) {
return item . matcher . match ( path ) || ! ! ( item . gmatcher && item . gmatcher . match ( path ) )
} )
}
function childrenIgnored ( self , path ) {
if ( ! self . ignore . length )
return false
return self . ignore . some ( function ( item ) {
return ! ! ( item . gmatcher && item . gmatcher . match ( path ) )
} )
}
/***/ } ) ,
/***/ 870 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core _1 = _ _webpack _require _ _ ( 470 ) ;
const fs _1 = _ _webpack _require _ _ ( 747 ) ;
const http _client _1 = _ _webpack _require _ _ ( 539 ) ;
const auth _1 = _ _webpack _require _ _ ( 226 ) ;
const config _variables _1 = _ _webpack _require _ _ ( 401 ) ;
/ * *
* Returns a retry time in milliseconds that exponentially gets larger
* depending on the amount of retries that have been attempted
* /
function getExponentialRetryTimeInMilliseconds ( retryCount ) {
if ( retryCount < 0 ) {
throw new Error ( 'RetryCount should not be negative' ) ;
}
else if ( retryCount === 0 ) {
return config _variables _1 . getInitialRetryIntervalInMilliseconds ( ) ;
}
const minTime = config _variables _1 . getInitialRetryIntervalInMilliseconds ( ) * config _variables _1 . getRetryMultiplier ( ) * retryCount ;
const maxTime = minTime * config _variables _1 . getRetryMultiplier ( ) ;
// returns a random number between the minTime (inclusive) and the maxTime (exclusive)
2021-12-07 17:44:54 +01:00
return Math . trunc ( Math . random ( ) * ( maxTime - minTime ) + minTime ) ;
2020-04-28 15:45:21 +02:00
}
exports . getExponentialRetryTimeInMilliseconds = getExponentialRetryTimeInMilliseconds ;
/ * *
* Parses a env variable that is a number
* /
function parseEnvNumber ( key ) {
const value = Number ( process . env [ key ] ) ;
if ( Number . isNaN ( value ) || value < 0 ) {
return undefined ;
}
return value ;
}
exports . parseEnvNumber = parseEnvNumber ;
/ * *
* Various utility functions to help with the necessary API calls
* /
function getApiVersion ( ) {
return '6.0-preview' ;
}
exports . getApiVersion = getApiVersion ;
function isSuccessStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
return statusCode >= 200 && statusCode < 300 ;
}
exports . isSuccessStatusCode = isSuccessStatusCode ;
function isForbiddenStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
return statusCode === http _client _1 . HttpCodes . Forbidden ;
}
exports . isForbiddenStatusCode = isForbiddenStatusCode ;
function isRetryableStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
const retryableStatusCodes = [
http _client _1 . HttpCodes . BadGateway ,
http _client _1 . HttpCodes . GatewayTimeout ,
2021-06-16 22:19:05 +02:00
http _client _1 . HttpCodes . InternalServerError ,
http _client _1 . HttpCodes . ServiceUnavailable ,
2020-08-04 17:55:46 +02:00
http _client _1 . HttpCodes . TooManyRequests ,
413 // Payload Too Large
2020-04-28 15:45:21 +02:00
] ;
return retryableStatusCodes . includes ( statusCode ) ;
}
exports . isRetryableStatusCode = isRetryableStatusCode ;
function isThrottledStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
return statusCode === http _client _1 . HttpCodes . TooManyRequests ;
}
exports . isThrottledStatusCode = isThrottledStatusCode ;
/ * *
* Attempts to get the retry - after value from a set of http headers . The retry time
* is originally denoted in seconds , so if present , it is converted to milliseconds
* @ param headers all the headers received when making an http call
* /
function tryGetRetryAfterValueTimeInMilliseconds ( headers ) {
if ( headers [ 'retry-after' ] ) {
const retryTime = Number ( headers [ 'retry-after' ] ) ;
if ( ! isNaN ( retryTime ) ) {
core _1 . info ( ` Retry-After header is present with a value of ${ retryTime } ` ) ;
return retryTime * 1000 ;
}
core _1 . info ( ` Returned retry-after header value: ${ retryTime } is non-numeric and cannot be used ` ) ;
return undefined ;
}
core _1 . info ( ` No retry-after header was found. Dumping all headers for diagnostic purposes ` ) ;
// eslint-disable-next-line no-console
console . log ( headers ) ;
return undefined ;
}
exports . tryGetRetryAfterValueTimeInMilliseconds = tryGetRetryAfterValueTimeInMilliseconds ;
function getContentRange ( start , end , total ) {
// Format: `bytes start-end/fileSize
// start and end are inclusive
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/200
return ` bytes ${ start } - ${ end } / ${ total } ` ;
}
exports . getContentRange = getContentRange ;
/ * *
* Sets all the necessary headers when downloading an artifact
* @ param { string } contentType the type of content being uploaded
* @ param { boolean } isKeepAlive is the same connection being used to make multiple calls
* @ param { boolean } acceptGzip can we accept a gzip encoded response
* @ param { string } acceptType the type of content that we can accept
2020-07-31 17:16:59 +02:00
* @ returns appropriate headers to make a specific http call during artifact download
2020-04-28 15:45:21 +02:00
* /
2020-07-31 17:16:59 +02:00
function getDownloadHeaders ( contentType , isKeepAlive , acceptGzip ) {
2020-04-28 15:45:21 +02:00
const requestOptions = { } ;
if ( contentType ) {
requestOptions [ 'Content-Type' ] = contentType ;
}
if ( isKeepAlive ) {
requestOptions [ 'Connection' ] = 'Keep-Alive' ;
// keep alive for at least 10 seconds before closing the connection
requestOptions [ 'Keep-Alive' ] = '10' ;
}
if ( acceptGzip ) {
// if we are expecting a response with gzip encoding, it should be using an octet-stream in the accept header
requestOptions [ 'Accept-Encoding' ] = 'gzip' ;
requestOptions [ 'Accept' ] = ` application/octet-stream;api-version= ${ getApiVersion ( ) } ` ;
}
else {
// default to application/json if we are not working with gzip content
requestOptions [ 'Accept' ] = ` application/json;api-version= ${ getApiVersion ( ) } ` ;
}
return requestOptions ;
}
2020-07-31 17:16:59 +02:00
exports . getDownloadHeaders = getDownloadHeaders ;
2020-04-28 15:45:21 +02:00
/ * *
* Sets all the necessary headers when uploading an artifact
* @ param { string } contentType the type of content being uploaded
* @ param { boolean } isKeepAlive is the same connection being used to make multiple calls
* @ param { boolean } isGzip is the connection being used to upload GZip compressed content
* @ param { number } uncompressedLength the original size of the content if something is being uploaded that has been compressed
* @ param { number } contentLength the length of the content that is being uploaded
* @ param { string } contentRange the range of the content that is being uploaded
2020-07-31 17:16:59 +02:00
* @ returns appropriate headers to make a specific http call during artifact upload
2020-04-28 15:45:21 +02:00
* /
2020-07-31 17:16:59 +02:00
function getUploadHeaders ( contentType , isKeepAlive , isGzip , uncompressedLength , contentLength , contentRange ) {
2020-04-28 15:45:21 +02:00
const requestOptions = { } ;
requestOptions [ 'Accept' ] = ` application/json;api-version= ${ getApiVersion ( ) } ` ;
if ( contentType ) {
requestOptions [ 'Content-Type' ] = contentType ;
}
if ( isKeepAlive ) {
requestOptions [ 'Connection' ] = 'Keep-Alive' ;
// keep alive for at least 10 seconds before closing the connection
requestOptions [ 'Keep-Alive' ] = '10' ;
}
if ( isGzip ) {
requestOptions [ 'Content-Encoding' ] = 'gzip' ;
requestOptions [ 'x-tfs-filelength' ] = uncompressedLength ;
}
if ( contentLength ) {
requestOptions [ 'Content-Length' ] = contentLength ;
}
if ( contentRange ) {
requestOptions [ 'Content-Range' ] = contentRange ;
}
return requestOptions ;
}
2020-07-31 17:16:59 +02:00
exports . getUploadHeaders = getUploadHeaders ;
function createHttpClient ( userAgent ) {
return new http _client _1 . HttpClient ( userAgent , [
2020-04-28 15:45:21 +02:00
new auth _1 . BearerCredentialHandler ( config _variables _1 . getRuntimeToken ( ) )
] ) ;
}
exports . createHttpClient = createHttpClient ;
function getArtifactUrl ( ) {
const artifactUrl = ` ${ config _variables _1 . getRuntimeUrl ( ) } _apis/pipelines/workflows/ ${ config _variables _1 . getWorkFlowRunId ( ) } /artifacts?api-version= ${ getApiVersion ( ) } ` ;
core _1 . debug ( ` Artifact Url: ${ artifactUrl } ` ) ;
return artifactUrl ;
}
exports . getArtifactUrl = getArtifactUrl ;
/ * *
* Uh oh ! Something might have gone wrong during either upload or download . The IHtttpClientResponse object contains information
* about the http call that was made by the actions http client . This information might be useful to display for diagnostic purposes , but
* this entire object is really big and most of the information is not really useful . This function takes the response object and displays only
* the information that we want .
*
* Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided .
* Other information such as the headers , the response code and message might be useful , so this is displayed .
* /
function displayHttpDiagnostics ( response ) {
core _1 . info ( ` ##### Begin Diagnostic HTTP information #####
Status Code : $ { response . message . statusCode }
Status Message : $ { response . message . statusMessage }
Header Information : $ { JSON . stringify ( response . message . headers , undefined , 2 ) }
# # # # # # End Diagnostic HTTP information # # # # # # ` );
}
exports . displayHttpDiagnostics = displayHttpDiagnostics ;
function createDirectoriesForArtifact ( directories ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
for ( const directory of directories ) {
yield fs _1 . promises . mkdir ( directory , {
recursive : true
} ) ;
}
} ) ;
}
exports . createDirectoriesForArtifact = createDirectoriesForArtifact ;
function createEmptyFilesForArtifact ( emptyFilesToCreate ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
for ( const filePath of emptyFilesToCreate ) {
yield ( yield fs _1 . promises . open ( filePath , 'w' ) ) . close ( ) ;
}
} ) ;
}
exports . createEmptyFilesForArtifact = createEmptyFilesForArtifact ;
2020-12-15 16:55:26 +01:00
function getFileSize ( filePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = yield fs _1 . promises . stat ( filePath ) ;
core _1 . debug ( ` ${ filePath } size:( ${ stats . size } ) blksize:( ${ stats . blksize } ) blocks:( ${ stats . blocks } ) ` ) ;
return stats . size ;
} ) ;
}
exports . getFileSize = getFileSize ;
function rmFile ( filePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield fs _1 . promises . unlink ( filePath ) ;
} ) ;
}
exports . rmFile = rmFile ;
function getProperRetention ( retentionInput , retentionSetting ) {
if ( retentionInput < 0 ) {
throw new Error ( 'Invalid retention, minimum value is 1.' ) ;
}
let retention = retentionInput ;
if ( retentionSetting ) {
const maxRetention = parseInt ( retentionSetting ) ;
if ( ! isNaN ( maxRetention ) && maxRetention < retention ) {
core _1 . warning ( ` Retention days is greater than the max value allowed by the repository setting, reduce retention to ${ maxRetention } days ` ) ;
retention = maxRetention ;
}
}
return retention ;
}
exports . getProperRetention = getProperRetention ;
2021-01-04 15:47:26 +01:00
function sleep ( milliseconds ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( resolve => setTimeout ( resolve , milliseconds ) ) ;
} ) ;
}
exports . sleep = sleep ;
2020-04-28 15:45:21 +02:00
//# sourceMappingURL=utils.js.map
/***/ } ) ,
/***/ 875 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2021-12-07 17:44:54 +01:00
"use strict" ;
const { promisify } = _ _webpack _require _ _ ( 669 ) ;
2021-04-06 22:50:27 +02:00
const tmp = _ _webpack _require _ _ ( 150 ) ;
2020-04-28 15:45:21 +02:00
// file
module . exports . fileSync = tmp . fileSync ;
const fileWithOptions = promisify ( ( options , cb ) =>
tmp . file ( options , ( err , path , fd , cleanup ) =>
err ? cb ( err ) : cb ( undefined , { path , fd , cleanup : promisify ( cleanup ) } )
)
) ;
module . exports . file = async ( options ) => fileWithOptions ( options ) ;
module . exports . withFile = async function withFile ( fn , options ) {
const { path , fd , cleanup } = await module . exports . file ( options ) ;
try {
return await fn ( { path , fd } ) ;
} finally {
await cleanup ( ) ;
}
} ;
// directory
module . exports . dirSync = tmp . dirSync ;
const dirWithOptions = promisify ( ( options , cb ) =>
tmp . dir ( options , ( err , path , cleanup ) =>
err ? cb ( err ) : cb ( undefined , { path , cleanup : promisify ( cleanup ) } )
)
) ;
module . exports . dir = async ( options ) => dirWithOptions ( options ) ;
module . exports . withDir = async function withDir ( fn , options ) {
const { path , cleanup } = await module . exports . dir ( options ) ;
try {
return await fn ( { path } ) ;
} finally {
await cleanup ( ) ;
}
} ;
// name generation
module . exports . tmpNameSync = tmp . tmpNameSync ;
module . exports . tmpName = promisify ( tmp . tmpName ) ;
module . exports . tmpdir = tmp . tmpdir ;
module . exports . setGracefulCleanup = tmp . setGracefulCleanup ;
2023-03-08 22:06:44 +01:00
/***/ } ) ,
/***/ 893 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , {
value : true
} ) ;
exports . default = void 0 ;
var _rng = _interopRequireDefault ( _ _webpack _require _ _ ( 844 ) ) ;
var _stringify = _interopRequireDefault ( _ _webpack _require _ _ ( 411 ) ) ;
function _interopRequireDefault ( obj ) { return obj && obj . _ _esModule ? obj : { default : obj } ; }
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
let _nodeId ;
let _clockseq ; // Previous uuid creation time
let _lastMSecs = 0 ;
let _lastNSecs = 0 ; // See https://github.com/uuidjs/uuid for API details
function v1 ( options , buf , offset ) {
let i = buf && offset || 0 ;
const b = buf || new Array ( 16 ) ;
options = options || { } ;
let node = options . node || _nodeId ;
let clockseq = options . clockseq !== undefined ? options . clockseq : _clockseq ; // node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
if ( node == null || clockseq == null ) {
const seedBytes = options . random || ( options . rng || _rng . default ) ( ) ;
if ( node == null ) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [ seedBytes [ 0 ] | 0x01 , seedBytes [ 1 ] , seedBytes [ 2 ] , seedBytes [ 3 ] , seedBytes [ 4 ] , seedBytes [ 5 ] ] ;
}
if ( clockseq == null ) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = ( seedBytes [ 6 ] << 8 | seedBytes [ 7 ] ) & 0x3fff ;
}
} // UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
let msecs = options . msecs !== undefined ? options . msecs : Date . now ( ) ; // Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
let nsecs = options . nsecs !== undefined ? options . nsecs : _lastNSecs + 1 ; // Time since last uuid creation (in msecs)
const dt = msecs - _lastMSecs + ( nsecs - _lastNSecs ) / 10000 ; // Per 4.2.1.2, Bump clockseq on clock regression
if ( dt < 0 && options . clockseq === undefined ) {
clockseq = clockseq + 1 & 0x3fff ;
} // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
if ( ( dt < 0 || msecs > _lastMSecs ) && options . nsecs === undefined ) {
nsecs = 0 ;
} // Per 4.2.1.2 Throw error if too many uuids are requested
if ( nsecs >= 10000 ) {
throw new Error ( "uuid.v1(): Can't create more than 10M uuids/sec" ) ;
}
_lastMSecs = msecs ;
_lastNSecs = nsecs ;
_clockseq = clockseq ; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
msecs += 12219292800000 ; // `time_low`
const tl = ( ( msecs & 0xfffffff ) * 10000 + nsecs ) % 0x100000000 ;
b [ i ++ ] = tl >>> 24 & 0xff ;
b [ i ++ ] = tl >>> 16 & 0xff ;
b [ i ++ ] = tl >>> 8 & 0xff ;
b [ i ++ ] = tl & 0xff ; // `time_mid`
const tmh = msecs / 0x100000000 * 10000 & 0xfffffff ;
b [ i ++ ] = tmh >>> 8 & 0xff ;
b [ i ++ ] = tmh & 0xff ; // `time_high_and_version`
b [ i ++ ] = tmh >>> 24 & 0xf | 0x10 ; // include version
b [ i ++ ] = tmh >>> 16 & 0xff ; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
b [ i ++ ] = clockseq >>> 8 | 0x80 ; // `clock_seq_low`
b [ i ++ ] = clockseq & 0xff ; // `node`
for ( let n = 0 ; n < 6 ; ++ n ) {
b [ i + n ] = node [ n ] ;
}
return buf || ( 0 , _stringify . default ) ( b ) ;
}
var _default = v1 ;
exports . default = _default ;
2020-04-28 15:45:21 +02:00
/***/ } ) ,
/***/ 896 :
/***/ ( function ( module ) {
module . exports = function ( xs , fn ) {
var res = [ ] ;
for ( var i = 0 ; i < xs . length ; i ++ ) {
var x = fn ( xs [ i ] , i ) ;
if ( isArray ( x ) ) res . push . apply ( res , x ) ;
else res . push ( x ) ;
}
return res ;
} ;
var isArray = Array . isArray || function ( xs ) {
return Object . prototype . toString . call ( xs ) === '[object Array]' ;
} ;
/***/ } ) ,
/***/ 950 :
2020-12-15 16:55:26 +01:00
/***/ ( function ( _ _unusedmodule , exports ) {
2020-04-28 15:45:21 +02:00
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
function getProxyUrl ( reqUrl ) {
let usingSsl = reqUrl . protocol === 'https:' ;
let proxyUrl ;
if ( checkBypass ( reqUrl ) ) {
return proxyUrl ;
}
let proxyVar ;
if ( usingSsl ) {
2020-04-30 22:50:01 +02:00
proxyVar = process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
2020-04-28 15:45:21 +02:00
}
else {
2020-04-30 22:50:01 +02:00
proxyVar = process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
2020-04-28 15:45:21 +02:00
}
if ( proxyVar ) {
2020-12-15 16:55:26 +01:00
proxyUrl = new URL ( proxyVar ) ;
2020-04-28 15:45:21 +02:00
}
return proxyUrl ;
}
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
2020-04-30 22:50:01 +02:00
let noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
2020-04-28 15:45:21 +02:00
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
let upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
2020-04-30 22:50:01 +02:00
for ( let upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
2020-04-28 15:45:21 +02:00
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
2021-12-07 17:44:54 +01:00
/***/ } ) ,
2023-03-08 22:06:44 +01:00
/***/ 993 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
2021-12-07 17:44:54 +01:00
2023-03-08 22:06:44 +01:00
"use strict" ;
2021-12-07 17:44:54 +01:00
2023-03-08 22:06:44 +01:00
/* eslint-disable @typescript-eslint/no-explicit-any */
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . HttpClient = exports . isHttps = exports . HttpClientResponse = exports . HttpClientError = exports . getProxyUrl = exports . MediaTypes = exports . Headers = exports . HttpCodes = void 0 ;
const http = _ _importStar ( _ _webpack _require _ _ ( 605 ) ) ;
const https = _ _importStar ( _ _webpack _require _ _ ( 211 ) ) ;
const pm = _ _importStar ( _ _webpack _require _ _ ( 95 ) ) ;
const tunnel = _ _importStar ( _ _webpack _require _ _ ( 413 ) ) ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
/ * *
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
function getProxyUrl ( serverUrl ) {
const proxyUrl = pm . getProxyUrl ( new URL ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
}
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientError extends Error {
constructor ( message , statusCode ) {
super ( message ) ;
this . name = 'HttpClientError' ;
this . statusCode = statusCode ;
Object . setPrototypeOf ( this , HttpClientError . prototype ) ;
}
}
exports . HttpClientError = HttpClientError ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ) ;
} ) ;
}
}
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
const parsedUrl = new URL ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
}
}
options ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
get ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
del ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
} ) ;
}
head ( requestUrl , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
} ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
} ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
getJson ( requestUrl , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
const res = yield this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
postJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
putJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
patchJson ( requestUrl , obj , additionalHeaders = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
const res = yield this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
} ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
request ( verb , requestUrl , data , headers ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
}
const parsedUrl = new URL ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
const maxTries = this . _allowRetries && RetryableHttpVerbs . includes ( verb )
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
do {
response = yield this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( const handler of this . handlers ) {
if ( handler . canHandleAuthentication ( response ) ) {
authenticationHandler = handler ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
}
let redirectsRemaining = this . _maxRedirects ;
while ( response . message . statusCode &&
HttpRedirectCodes . includes ( response . message . statusCode ) &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
const parsedRedirectUrl = new URL ( redirectUrl ) ;
if ( parsedUrl . protocol === 'https:' &&
parsedUrl . protocol !== parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
yield response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( const header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = yield this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
}
if ( ! response . message . statusCode ||
! HttpResponseRetryCodes . includes ( response . message . statusCode ) ) {
// If not a retry code, return immediately instead of retrying
return response ;
}
numTries += 1 ;
if ( numTries < maxTries ) {
yield response . readBody ( ) ;
yield this . _performExponentialBackoff ( numTries ) ;
}
} while ( numTries < maxTries ) ;
return response ;
} ) ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => {
function callbackForResult ( err , res ) {
if ( err ) {
reject ( err ) ;
}
else if ( ! res ) {
// If `err` is not passed, then `res` must be passed.
reject ( new Error ( 'Unknown error' ) ) ;
}
else {
resolve ( res ) ;
}
}
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
if ( typeof data === 'string' ) {
if ( ! info . options . headers ) {
info . options . headers = { } ;
}
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
}
let callbackCalled = false ;
function handleResult ( err , res ) {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
}
}
const req = info . httpModule . request ( info . options , ( msg ) => {
const res = new HttpClientResponse ( msg ) ;
handleResult ( undefined , res ) ;
} ) ;
let socket ;
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( ` Request timeout: ${ info . options . path } ` ) ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err ) ;
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
}
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
const parsedUrl = new URL ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
for ( const handler of this . handlers ) {
handler . prepareRequest ( info . options ) ;
}
}
return info ;
}
_mergeHeaders ( headers ) {
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers || { } ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
const proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
const useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
if ( proxyUrl && proxyUrl . hostname ) {
const agentOptions = {
maxSockets ,
keepAlive : this . _keepAlive ,
proxy : Object . assign ( Object . assign ( { } , ( ( proxyUrl . username || proxyUrl . password ) && {
proxyAuth : ` ${ proxyUrl . username } : ${ proxyUrl . password } `
} ) ) , { host : proxyUrl . hostname , port : proxyUrl . port } )
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
}
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
}
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
} ) ;
}
_processResponse ( res , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const statusCode = res . message . statusCode || 0 ;
const response = {
statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode === HttpCodes . NotFound ) {
resolve ( response ) ;
}
// get the result from the body
function dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
const a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
}
return value ;
}
let obj ;
let contents ;
try {
contents = yield res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = ` Failed request: ( ${ statusCode } ) ` ;
}
const err = new HttpClientError ( msg , statusCode ) ;
err . result = response . result ;
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ) ;
} ) ;
}
}
exports . HttpClient = HttpClient ;
const lowercaseKeys = ( obj ) => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
//# sourceMappingURL=index.js.map
2021-12-07 17:44:54 +01:00
2020-04-28 15:45:21 +02:00
/***/ } )
/******/ } ) ;