mirror of
https://code.forgejo.org/actions/download-artifact.git
synced 2024-11-14 02:26:16 +01:00
3bdf740f02
* V2 Setup * Add end-to-end tests * Update tests * Update tests * Update tests * Update tests again * Misc Updates * Improve logs * Update release * Update README.md * @actions/artifact v0.2.0
2311 lines
No EOL
88 KiB
JavaScript
2311 lines
No EOL
88 KiB
JavaScript
module.exports =
|
|
/******/ (function(modules, runtime) { // webpackBootstrap
|
|
/******/ "use strict";
|
|
/******/ // The module cache
|
|
/******/ var installedModules = {};
|
|
/******/
|
|
/******/ // The require function
|
|
/******/ function __webpack_require__(moduleId) {
|
|
/******/
|
|
/******/ // Check if module is in cache
|
|
/******/ if(installedModules[moduleId]) {
|
|
/******/ return installedModules[moduleId].exports;
|
|
/******/ }
|
|
/******/ // Create a new module (and put it into the cache)
|
|
/******/ var module = installedModules[moduleId] = {
|
|
/******/ i: moduleId,
|
|
/******/ l: false,
|
|
/******/ exports: {}
|
|
/******/ };
|
|
/******/
|
|
/******/ // Execute the module function
|
|
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
|
/******/
|
|
/******/ // Flag the module as loaded
|
|
/******/ module.l = true;
|
|
/******/
|
|
/******/ // Return the exports of the module
|
|
/******/ return module.exports;
|
|
/******/ }
|
|
/******/
|
|
/******/
|
|
/******/ __webpack_require__.ab = __dirname + "/";
|
|
/******/
|
|
/******/ // the startup function
|
|
/******/ function startup() {
|
|
/******/ // Load entry module and return exports
|
|
/******/ return __webpack_require__(799);
|
|
/******/ };
|
|
/******/
|
|
/******/ // run startup
|
|
/******/ return startup();
|
|
/******/ })
|
|
/************************************************************************/
|
|
/******/ ({
|
|
|
|
/***/ 16:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("tls");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 82:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const fs = __importStar(__webpack_require__(747));
|
|
const core_1 = __webpack_require__(470);
|
|
const path_1 = __webpack_require__(622);
|
|
const internal_utils_1 = __webpack_require__(931);
|
|
/**
|
|
* Creates a specification that describes how each file that is part of the artifact will be uploaded
|
|
* @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server
|
|
* @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file
|
|
* @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact
|
|
*/
|
|
function getUploadSpecification(artifactName, rootDirectory, artifactFiles) {
|
|
internal_utils_1.checkArtifactName(artifactName);
|
|
const specifications = [];
|
|
if (!fs.existsSync(rootDirectory)) {
|
|
throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`);
|
|
}
|
|
if (!fs.lstatSync(rootDirectory).isDirectory()) {
|
|
throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`);
|
|
}
|
|
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
|
rootDirectory = path_1.normalize(rootDirectory);
|
|
rootDirectory = path_1.resolve(rootDirectory);
|
|
/*
|
|
Example to demonstrate behavior
|
|
|
|
Input:
|
|
artifactName: my-artifact
|
|
rootDirectory: '/home/user/files/plz-upload'
|
|
artifactFiles: [
|
|
'/home/user/files/plz-upload/file1.txt',
|
|
'/home/user/files/plz-upload/file2.txt',
|
|
'/home/user/files/plz-upload/dir/file3.txt'
|
|
]
|
|
|
|
Output:
|
|
specifications: [
|
|
['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'],
|
|
['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'],
|
|
['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt']
|
|
]
|
|
*/
|
|
for (let file of artifactFiles) {
|
|
if (!fs.existsSync(file)) {
|
|
throw new Error(`File ${file} does not exist`);
|
|
}
|
|
if (!fs.lstatSync(file).isDirectory()) {
|
|
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
|
file = path_1.normalize(file);
|
|
file = path_1.resolve(file);
|
|
if (!file.startsWith(rootDirectory)) {
|
|
throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`);
|
|
}
|
|
/*
|
|
uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all
|
|
be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
|
|
|
|
path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt
|
|
join('artifact-name/', 'file-to-upload.txt')
|
|
join('artifact-name/', '/file-to-upload.txt')
|
|
join('artifact-name', 'file-to-upload.txt')
|
|
join('artifact-name', '/file-to-upload.txt')
|
|
*/
|
|
specifications.push({
|
|
absoluteFilePath: file,
|
|
uploadFilePath: path_1.join(artifactName, file.replace(rootDirectory, ''))
|
|
});
|
|
}
|
|
else {
|
|
// Directories are rejected by the server during upload
|
|
core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`);
|
|
}
|
|
}
|
|
return specifications;
|
|
}
|
|
exports.getUploadSpecification = getUploadSpecification;
|
|
//# sourceMappingURL=internal-upload-specification.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 87:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("os");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 130:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const fs = __importStar(__webpack_require__(747));
|
|
const internal_utils_1 = __webpack_require__(931);
|
|
const url_1 = __webpack_require__(835);
|
|
const internal_config_variables_1 = __webpack_require__(717);
|
|
const core_1 = __webpack_require__(470);
|
|
/**
|
|
* Gets a list of all artifacts that are in a specific container
|
|
*/
|
|
function listArtifacts() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const artifactUrl = internal_utils_1.getArtifactUrl();
|
|
const client = internal_utils_1.createHttpClient();
|
|
const requestOptions = internal_utils_1.getRequestOptions('application/json');
|
|
const rawResponse = yield client.get(artifactUrl, requestOptions);
|
|
const body = yield rawResponse.readBody();
|
|
if (internal_utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
|
return JSON.parse(body);
|
|
}
|
|
// eslint-disable-next-line no-console
|
|
console.log(rawResponse);
|
|
throw new Error(`Unable to list artifacts for the run`);
|
|
});
|
|
}
|
|
exports.listArtifacts = listArtifacts;
|
|
/**
|
|
* Fetches a set of container items that describe the contents of an artifact
|
|
* @param artifactName the name of the artifact
|
|
* @param containerUrl the artifact container URL for the run
|
|
*/
|
|
function getContainerItems(artifactName, containerUrl) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
// The itemPath search parameter controls which containers will be returned
|
|
const resourceUrl = new url_1.URL(containerUrl);
|
|
resourceUrl.searchParams.append('itemPath', artifactName);
|
|
const client = internal_utils_1.createHttpClient();
|
|
const rawResponse = yield client.get(resourceUrl.toString());
|
|
const body = yield rawResponse.readBody();
|
|
if (internal_utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
|
return JSON.parse(body);
|
|
}
|
|
// eslint-disable-next-line no-console
|
|
console.log(rawResponse);
|
|
throw new Error(`Unable to get ContainersItems from ${resourceUrl}`);
|
|
});
|
|
}
|
|
exports.getContainerItems = getContainerItems;
|
|
/**
|
|
* Concurrently downloads all the files that are part of an artifact
|
|
* @param downloadItems information about what items to download and where to save them
|
|
*/
|
|
function downloadSingleArtifact(downloadItems) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const DOWNLOAD_CONCURRENCY = internal_config_variables_1.getDownloadFileConcurrency();
|
|
// Limit the number of files downloaded at a single time
|
|
const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()];
|
|
const client = internal_utils_1.createHttpClient();
|
|
let downloadedFiles = 0;
|
|
yield Promise.all(parallelDownloads.map(() => __awaiter(this, void 0, void 0, function* () {
|
|
while (downloadedFiles < downloadItems.length) {
|
|
const currentFileToDownload = downloadItems[downloadedFiles];
|
|
downloadedFiles += 1;
|
|
yield downloadIndividualFile(client, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath);
|
|
}
|
|
})));
|
|
});
|
|
}
|
|
exports.downloadSingleArtifact = downloadSingleArtifact;
|
|
/**
|
|
* Downloads an individual file
|
|
* @param client http client that will be used to make the necessary calls
|
|
* @param artifactLocation origin location where a file will be downloaded from
|
|
* @param downloadPath destination location for the file being downloaded
|
|
*/
|
|
function downloadIndividualFile(client, artifactLocation, downloadPath) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const stream = fs.createWriteStream(downloadPath);
|
|
const response = yield client.get(artifactLocation);
|
|
if (internal_utils_1.isSuccessStatusCode(response.message.statusCode)) {
|
|
yield pipeResponseToStream(response, stream);
|
|
}
|
|
else if (internal_utils_1.isRetryableStatusCode(response.message.statusCode)) {
|
|
core_1.warning(`Received http ${response.message.statusCode} during file download, will retry ${artifactLocation} after 10 seconds`);
|
|
yield new Promise(resolve => setTimeout(resolve, 10000));
|
|
const retryResponse = yield client.get(artifactLocation);
|
|
if (internal_utils_1.isSuccessStatusCode(retryResponse.message.statusCode)) {
|
|
yield pipeResponseToStream(response, stream);
|
|
}
|
|
else {
|
|
// eslint-disable-next-line no-console
|
|
console.log(retryResponse);
|
|
throw new Error(`Unable to download ${artifactLocation}`);
|
|
}
|
|
}
|
|
else {
|
|
// eslint-disable-next-line no-console
|
|
console.log(response);
|
|
throw new Error(`Unable to download ${artifactLocation}`);
|
|
}
|
|
});
|
|
}
|
|
exports.downloadIndividualFile = downloadIndividualFile;
|
|
function pipeResponseToStream(response, stream) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
return new Promise(resolve => {
|
|
response.message.pipe(stream).on('close', () => {
|
|
resolve();
|
|
});
|
|
});
|
|
});
|
|
}
|
|
exports.pipeResponseToStream = pipeResponseToStream;
|
|
//# sourceMappingURL=internal-download-http-client.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 141:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
var net = __webpack_require__(631);
|
|
var tls = __webpack_require__(16);
|
|
var http = __webpack_require__(605);
|
|
var https = __webpack_require__(211);
|
|
var events = __webpack_require__(614);
|
|
var assert = __webpack_require__(357);
|
|
var util = __webpack_require__(669);
|
|
|
|
|
|
exports.httpOverHttp = httpOverHttp;
|
|
exports.httpsOverHttp = httpsOverHttp;
|
|
exports.httpOverHttps = httpOverHttps;
|
|
exports.httpsOverHttps = httpsOverHttps;
|
|
|
|
|
|
function httpOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttp(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = http.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
function httpOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
return agent;
|
|
}
|
|
|
|
function httpsOverHttps(options) {
|
|
var agent = new TunnelingAgent(options);
|
|
agent.request = https.request;
|
|
agent.createSocket = createSecureSocket;
|
|
agent.defaultPort = 443;
|
|
return agent;
|
|
}
|
|
|
|
|
|
function TunnelingAgent(options) {
|
|
var self = this;
|
|
self.options = options || {};
|
|
self.proxyOptions = self.options.proxy || {};
|
|
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
|
self.requests = [];
|
|
self.sockets = [];
|
|
|
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
|
var options = toOptions(host, port, localAddress);
|
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
|
var pending = self.requests[i];
|
|
if (pending.host === options.host && pending.port === options.port) {
|
|
// Detect the request to connect same origin server,
|
|
// reuse the connection.
|
|
self.requests.splice(i, 1);
|
|
pending.request.onSocket(socket);
|
|
return;
|
|
}
|
|
}
|
|
socket.destroy();
|
|
self.removeSocket(socket);
|
|
});
|
|
}
|
|
util.inherits(TunnelingAgent, events.EventEmitter);
|
|
|
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
|
var self = this;
|
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
|
|
|
if (self.sockets.length >= this.maxSockets) {
|
|
// We are over limit so we'll add it to the queue.
|
|
self.requests.push(options);
|
|
return;
|
|
}
|
|
|
|
// If we are under maxSockets create a new one.
|
|
self.createSocket(options, function(socket) {
|
|
socket.on('free', onFree);
|
|
socket.on('close', onCloseOrRemove);
|
|
socket.on('agentRemove', onCloseOrRemove);
|
|
req.onSocket(socket);
|
|
|
|
function onFree() {
|
|
self.emit('free', socket, options);
|
|
}
|
|
|
|
function onCloseOrRemove(err) {
|
|
self.removeSocket(socket);
|
|
socket.removeListener('free', onFree);
|
|
socket.removeListener('close', onCloseOrRemove);
|
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
|
}
|
|
});
|
|
};
|
|
|
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
|
var self = this;
|
|
var placeholder = {};
|
|
self.sockets.push(placeholder);
|
|
|
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
|
method: 'CONNECT',
|
|
path: options.host + ':' + options.port,
|
|
agent: false,
|
|
headers: {
|
|
host: options.host + ':' + options.port
|
|
}
|
|
});
|
|
if (options.localAddress) {
|
|
connectOptions.localAddress = options.localAddress;
|
|
}
|
|
if (connectOptions.proxyAuth) {
|
|
connectOptions.headers = connectOptions.headers || {};
|
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
|
}
|
|
|
|
debug('making CONNECT request');
|
|
var connectReq = self.request(connectOptions);
|
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
|
connectReq.once('response', onResponse); // for v0.6
|
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
|
connectReq.once('error', onError);
|
|
connectReq.end();
|
|
|
|
function onResponse(res) {
|
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
|
res.upgrade = true;
|
|
}
|
|
|
|
function onUpgrade(res, socket, head) {
|
|
// Hacky.
|
|
process.nextTick(function() {
|
|
onConnect(res, socket, head);
|
|
});
|
|
}
|
|
|
|
function onConnect(res, socket, head) {
|
|
connectReq.removeAllListeners();
|
|
socket.removeAllListeners();
|
|
|
|
if (res.statusCode !== 200) {
|
|
debug('tunneling socket could not be established, statusCode=%d',
|
|
res.statusCode);
|
|
socket.destroy();
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'statusCode=' + res.statusCode);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
if (head.length > 0) {
|
|
debug('got illegal response body from proxy');
|
|
socket.destroy();
|
|
var error = new Error('got illegal response body from proxy');
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
return;
|
|
}
|
|
debug('tunneling connection has established');
|
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
|
return cb(socket);
|
|
}
|
|
|
|
function onError(cause) {
|
|
connectReq.removeAllListeners();
|
|
|
|
debug('tunneling socket could not be established, cause=%s\n',
|
|
cause.message, cause.stack);
|
|
var error = new Error('tunneling socket could not be established, ' +
|
|
'cause=' + cause.message);
|
|
error.code = 'ECONNRESET';
|
|
options.request.emit('error', error);
|
|
self.removeSocket(placeholder);
|
|
}
|
|
};
|
|
|
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
|
var pos = this.sockets.indexOf(socket)
|
|
if (pos === -1) {
|
|
return;
|
|
}
|
|
this.sockets.splice(pos, 1);
|
|
|
|
var pending = this.requests.shift();
|
|
if (pending) {
|
|
// If we have pending requests and a socket gets closed a new one
|
|
// needs to be created to take over in the pool for the one that closed.
|
|
this.createSocket(pending, function(socket) {
|
|
pending.request.onSocket(socket);
|
|
});
|
|
}
|
|
};
|
|
|
|
function createSecureSocket(options, cb) {
|
|
var self = this;
|
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
|
var hostHeader = options.request.getHeader('host');
|
|
var tlsOptions = mergeOptions({}, self.options, {
|
|
socket: socket,
|
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
|
});
|
|
|
|
// 0 is dummy port for v0.6
|
|
var secureSocket = tls.connect(0, tlsOptions);
|
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
|
cb(secureSocket);
|
|
});
|
|
}
|
|
|
|
|
|
function toOptions(host, port, localAddress) {
|
|
if (typeof host === 'string') { // since v0.10
|
|
return {
|
|
host: host,
|
|
port: port,
|
|
localAddress: localAddress
|
|
};
|
|
}
|
|
return host; // for v0.11 or later
|
|
}
|
|
|
|
function mergeOptions(target) {
|
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
|
var overrides = arguments[i];
|
|
if (typeof overrides === 'object') {
|
|
var keys = Object.keys(overrides);
|
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
|
var k = keys[j];
|
|
if (overrides[k] !== undefined) {
|
|
target[k] = overrides[k];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return target;
|
|
}
|
|
|
|
|
|
var debug;
|
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
|
debug = function() {
|
|
var args = Array.prototype.slice.call(arguments);
|
|
if (typeof args[0] === 'string') {
|
|
args[0] = 'TUNNEL: ' + args[0];
|
|
} else {
|
|
args.unshift('TUNNEL:');
|
|
}
|
|
console.error.apply(console, args);
|
|
}
|
|
} else {
|
|
debug = function() {};
|
|
}
|
|
exports.debug = debug; // for test
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 195:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const path = __importStar(__webpack_require__(622));
|
|
/**
|
|
* Creates a specification for a set of files that will be downloaded
|
|
* @param artifactName the name of the artifact
|
|
* @param artifactEntries a set of container entries that describe that files that make up an artifact
|
|
* @param downloadPath the path where the artifact will be downloaded to
|
|
* @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to
|
|
*/
|
|
function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) {
|
|
const directories = new Set();
|
|
const specifications = {
|
|
rootDownloadLocation: includeRootDirectory
|
|
? path.join(downloadPath, artifactName)
|
|
: downloadPath,
|
|
directoryStructure: [],
|
|
filesToDownload: []
|
|
};
|
|
for (const entry of artifactEntries) {
|
|
// Ignore artifacts in the container that don't begin with the same name
|
|
if (entry.path.startsWith(`${artifactName}/`) ||
|
|
entry.path.startsWith(`${artifactName}\\`)) {
|
|
// normalize all separators to the local OS
|
|
const normalizedPathEntry = path.normalize(entry.path);
|
|
// entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path
|
|
const filePath = path.join(downloadPath, includeRootDirectory
|
|
? normalizedPathEntry
|
|
: normalizedPathEntry.replace(artifactName, ''));
|
|
// Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder'
|
|
// itemType cannot be relied upon. The file must be used to determine the directory structure
|
|
if (entry.itemType === 'file') {
|
|
// Get the directories that we need to create from the filePath for each individual file
|
|
directories.add(path.dirname(filePath));
|
|
specifications.filesToDownload.push({
|
|
sourceLocation: entry.contentLocation,
|
|
targetPath: filePath
|
|
});
|
|
}
|
|
}
|
|
}
|
|
specifications.directoryStructure = Array.from(directories);
|
|
return specifications;
|
|
}
|
|
exports.getDownloadSpecification = getDownloadSpecification;
|
|
//# sourceMappingURL=internal-download-specification.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 211:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("https");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 214:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const internal_artifact_client_1 = __webpack_require__(369);
|
|
/**
|
|
* Constructs an ArtifactClient
|
|
*/
|
|
function create() {
|
|
return internal_artifact_client_1.DefaultArtifactClient.create();
|
|
}
|
|
exports.create = create;
|
|
//# sourceMappingURL=artifact-client.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 226:
|
|
/***/ (function(__unusedmodule, exports) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
class BasicCredentialHandler {
|
|
constructor(username, password) {
|
|
this.username = username;
|
|
this.password = password;
|
|
}
|
|
prepareRequest(options) {
|
|
options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64');
|
|
}
|
|
// This handler cannot handle 401
|
|
canHandleAuthentication(response) {
|
|
return false;
|
|
}
|
|
handleAuthentication(httpClient, requestInfo, objs) {
|
|
return null;
|
|
}
|
|
}
|
|
exports.BasicCredentialHandler = BasicCredentialHandler;
|
|
class BearerCredentialHandler {
|
|
constructor(token) {
|
|
this.token = token;
|
|
}
|
|
// currently implements pre-authorization
|
|
// TODO: support preAuth = false where it hooks on 401
|
|
prepareRequest(options) {
|
|
options.headers['Authorization'] = 'Bearer ' + this.token;
|
|
}
|
|
// This handler cannot handle 401
|
|
canHandleAuthentication(response) {
|
|
return false;
|
|
}
|
|
handleAuthentication(httpClient, requestInfo, objs) {
|
|
return null;
|
|
}
|
|
}
|
|
exports.BearerCredentialHandler = BearerCredentialHandler;
|
|
class PersonalAccessTokenCredentialHandler {
|
|
constructor(token) {
|
|
this.token = token;
|
|
}
|
|
// currently implements pre-authorization
|
|
// TODO: support preAuth = false where it hooks on 401
|
|
prepareRequest(options) {
|
|
options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
|
}
|
|
// This handler cannot handle 401
|
|
canHandleAuthentication(response) {
|
|
return false;
|
|
}
|
|
handleAuthentication(httpClient, requestInfo, objs) {
|
|
return null;
|
|
}
|
|
}
|
|
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 357:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("assert");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 369:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const core = __importStar(__webpack_require__(470));
|
|
const internal_upload_specification_1 = __webpack_require__(82);
|
|
const internal_upload_http_client_1 = __webpack_require__(715);
|
|
const internal_utils_1 = __webpack_require__(931);
|
|
const internal_download_http_client_1 = __webpack_require__(130);
|
|
const internal_download_specification_1 = __webpack_require__(195);
|
|
const internal_config_variables_1 = __webpack_require__(717);
|
|
const path_1 = __webpack_require__(622);
|
|
class DefaultArtifactClient {
|
|
/**
|
|
* Constructs a DefaultArtifactClient
|
|
*/
|
|
static create() {
|
|
return new DefaultArtifactClient();
|
|
}
|
|
/**
|
|
* Uploads an artifact
|
|
*/
|
|
uploadArtifact(name, files, rootDirectory, options) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
internal_utils_1.checkArtifactName(name);
|
|
// Get specification for the files being uploaded
|
|
const uploadSpecification = internal_upload_specification_1.getUploadSpecification(name, rootDirectory, files);
|
|
const uploadResponse = {
|
|
artifactName: name,
|
|
artifactItems: [],
|
|
size: 0,
|
|
failedItems: []
|
|
};
|
|
if (uploadSpecification.length === 0) {
|
|
core.warning(`No files found that can be uploaded`);
|
|
}
|
|
else {
|
|
// Create an entry for the artifact in the file container
|
|
const response = yield internal_upload_http_client_1.createArtifactInFileContainer(name);
|
|
if (!response.fileContainerResourceUrl) {
|
|
core.debug(response.toString());
|
|
throw new Error('No URL provided by the Artifact Service to upload an artifact to');
|
|
}
|
|
core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`);
|
|
// Upload each of the files that were found concurrently
|
|
const uploadResult = yield internal_upload_http_client_1.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options);
|
|
//Update the size of the artifact to indicate we are done uploading
|
|
yield internal_upload_http_client_1.patchArtifactSize(uploadResult.size, name);
|
|
core.info(`Finished uploading artifact ${name}. Reported size is ${uploadResult.size} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`);
|
|
uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath);
|
|
uploadResponse.size = uploadResult.size;
|
|
uploadResponse.failedItems = uploadResult.failedItems;
|
|
}
|
|
return uploadResponse;
|
|
});
|
|
}
|
|
downloadArtifact(name, path, options) {
|
|
var _a;
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const artifacts = yield internal_download_http_client_1.listArtifacts();
|
|
if (artifacts.count === 0) {
|
|
throw new Error(`Unable to find any artifacts for the associated workflow`);
|
|
}
|
|
const artifactToDownload = artifacts.value.find(artifact => {
|
|
return artifact.name === name;
|
|
});
|
|
if (!artifactToDownload) {
|
|
throw new Error(`Unable to find an artifact with the name: ${name}`);
|
|
}
|
|
const items = yield internal_download_http_client_1.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl);
|
|
if (!path) {
|
|
path = internal_config_variables_1.getWorkSpaceDirectory();
|
|
}
|
|
path = path_1.normalize(path);
|
|
path = path_1.resolve(path);
|
|
// During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories
|
|
const downloadSpecification = internal_download_specification_1.getDownloadSpecification(name, items.value, path, ((_a = options) === null || _a === void 0 ? void 0 : _a.createArtifactFolder) || false);
|
|
if (downloadSpecification.filesToDownload.length === 0) {
|
|
core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`);
|
|
}
|
|
else {
|
|
// Create all necessary directories recursively before starting any download
|
|
yield internal_utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure);
|
|
yield internal_download_http_client_1.downloadSingleArtifact(downloadSpecification.filesToDownload);
|
|
}
|
|
return {
|
|
artifactName: name,
|
|
downloadPath: downloadSpecification.rootDownloadLocation
|
|
};
|
|
});
|
|
}
|
|
downloadAllArtifacts(path) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const response = [];
|
|
const artifacts = yield internal_download_http_client_1.listArtifacts();
|
|
if (artifacts.count === 0) {
|
|
core.info('Unable to find any artifacts for the associated workflow');
|
|
return response;
|
|
}
|
|
if (!path) {
|
|
path = internal_config_variables_1.getWorkSpaceDirectory();
|
|
}
|
|
path = path_1.normalize(path);
|
|
path = path_1.resolve(path);
|
|
const ARTIFACT_CONCURRENCY = internal_config_variables_1.getDownloadArtifactConcurrency();
|
|
const parallelDownloads = [...new Array(ARTIFACT_CONCURRENCY).keys()];
|
|
let downloadedArtifacts = 0;
|
|
yield Promise.all(parallelDownloads.map(() => __awaiter(this, void 0, void 0, function* () {
|
|
while (downloadedArtifacts < artifacts.count) {
|
|
const currentArtifactToDownload = artifacts.value[downloadedArtifacts];
|
|
downloadedArtifacts += 1;
|
|
// Get container entries for the specific artifact
|
|
const items = yield internal_download_http_client_1.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl);
|
|
// Promise.All is not correctly inferring that 'path' is no longer possibly undefined: https://github.com/microsoft/TypeScript/issues/34925
|
|
const downloadSpecification = internal_download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, // eslint-disable-line @typescript-eslint/no-non-null-assertion
|
|
true);
|
|
if (downloadSpecification.filesToDownload.length === 0) {
|
|
core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`);
|
|
}
|
|
else {
|
|
yield internal_utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure);
|
|
yield internal_download_http_client_1.downloadSingleArtifact(downloadSpecification.filesToDownload);
|
|
}
|
|
response.push({
|
|
artifactName: currentArtifactToDownload.name,
|
|
downloadPath: downloadSpecification.rootDownloadLocation
|
|
});
|
|
}
|
|
})));
|
|
return response;
|
|
});
|
|
}
|
|
}
|
|
exports.DefaultArtifactClient = DefaultArtifactClient;
|
|
//# sourceMappingURL=internal-artifact-client.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 413:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
module.exports = __webpack_require__(141);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 431:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const os = __importStar(__webpack_require__(87));
|
|
/**
|
|
* Commands
|
|
*
|
|
* Command Format:
|
|
* ::name key=value,key=value::message
|
|
*
|
|
* Examples:
|
|
* ::warning::This is the message
|
|
* ::set-env name=MY_VAR::some value
|
|
*/
|
|
function issueCommand(command, properties, message) {
|
|
const cmd = new Command(command, properties, message);
|
|
process.stdout.write(cmd.toString() + os.EOL);
|
|
}
|
|
exports.issueCommand = issueCommand;
|
|
function issue(name, message = '') {
|
|
issueCommand(name, {}, message);
|
|
}
|
|
exports.issue = issue;
|
|
const CMD_STRING = '::';
|
|
class Command {
|
|
constructor(command, properties, message) {
|
|
if (!command) {
|
|
command = 'missing.command';
|
|
}
|
|
this.command = command;
|
|
this.properties = properties;
|
|
this.message = message;
|
|
}
|
|
toString() {
|
|
let cmdStr = CMD_STRING + this.command;
|
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
|
cmdStr += ' ';
|
|
let first = true;
|
|
for (const key in this.properties) {
|
|
if (this.properties.hasOwnProperty(key)) {
|
|
const val = this.properties[key];
|
|
if (val) {
|
|
if (first) {
|
|
first = false;
|
|
}
|
|
else {
|
|
cmdStr += ',';
|
|
}
|
|
cmdStr += `${key}=${escapeProperty(val)}`;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
|
return cmdStr;
|
|
}
|
|
}
|
|
function escapeData(s) {
|
|
return (s || '')
|
|
.replace(/%/g, '%25')
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A');
|
|
}
|
|
function escapeProperty(s) {
|
|
return (s || '')
|
|
.replace(/%/g, '%25')
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A')
|
|
.replace(/:/g, '%3A')
|
|
.replace(/,/g, '%2C');
|
|
}
|
|
//# sourceMappingURL=command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 470:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const command_1 = __webpack_require__(431);
|
|
const os = __importStar(__webpack_require__(87));
|
|
const path = __importStar(__webpack_require__(622));
|
|
/**
|
|
* The code to exit an action
|
|
*/
|
|
var ExitCode;
|
|
(function (ExitCode) {
|
|
/**
|
|
* A code indicating that the action was successful
|
|
*/
|
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
|
/**
|
|
* A code indicating that the action was a failure
|
|
*/
|
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
|
//-----------------------------------------------------------------------
|
|
// Variables
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets env variable for this action and future actions in the job
|
|
* @param name the name of the variable to set
|
|
* @param val the value of the variable
|
|
*/
|
|
function exportVariable(name, val) {
|
|
process.env[name] = val;
|
|
command_1.issueCommand('set-env', { name }, val);
|
|
}
|
|
exports.exportVariable = exportVariable;
|
|
/**
|
|
* Registers a secret which will get masked from logs
|
|
* @param secret value of the secret
|
|
*/
|
|
function setSecret(secret) {
|
|
command_1.issueCommand('add-mask', {}, secret);
|
|
}
|
|
exports.setSecret = setSecret;
|
|
/**
|
|
* Prepends inputPath to the PATH (for this action and future actions)
|
|
* @param inputPath
|
|
*/
|
|
function addPath(inputPath) {
|
|
command_1.issueCommand('add-path', {}, inputPath);
|
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
|
}
|
|
exports.addPath = addPath;
|
|
/**
|
|
* Gets the value of an input. The value is also trimmed.
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns string
|
|
*/
|
|
function getInput(name, options) {
|
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
|
if (options && options.required && !val) {
|
|
throw new Error(`Input required and not supplied: ${name}`);
|
|
}
|
|
return val.trim();
|
|
}
|
|
exports.getInput = getInput;
|
|
/**
|
|
* Sets the value of an output.
|
|
*
|
|
* @param name name of the output to set
|
|
* @param value value to store
|
|
*/
|
|
function setOutput(name, value) {
|
|
command_1.issueCommand('set-output', { name }, value);
|
|
}
|
|
exports.setOutput = setOutput;
|
|
//-----------------------------------------------------------------------
|
|
// Results
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets the action status to failed.
|
|
* When the action exits it will be with an exit code of 1
|
|
* @param message add error issue message
|
|
*/
|
|
function setFailed(message) {
|
|
process.exitCode = ExitCode.Failure;
|
|
error(message);
|
|
}
|
|
exports.setFailed = setFailed;
|
|
//-----------------------------------------------------------------------
|
|
// Logging Commands
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Writes debug message to user log
|
|
* @param message debug message
|
|
*/
|
|
function debug(message) {
|
|
command_1.issueCommand('debug', {}, message);
|
|
}
|
|
exports.debug = debug;
|
|
/**
|
|
* Adds an error issue
|
|
* @param message error issue message
|
|
*/
|
|
function error(message) {
|
|
command_1.issue('error', message);
|
|
}
|
|
exports.error = error;
|
|
/**
|
|
* Adds an warning issue
|
|
* @param message warning issue message
|
|
*/
|
|
function warning(message) {
|
|
command_1.issue('warning', message);
|
|
}
|
|
exports.warning = warning;
|
|
/**
|
|
* Writes info to log with console.log.
|
|
* @param message info message
|
|
*/
|
|
function info(message) {
|
|
process.stdout.write(message + os.EOL);
|
|
}
|
|
exports.info = info;
|
|
/**
|
|
* Begin an output group.
|
|
*
|
|
* Output until the next `groupEnd` will be foldable in this group
|
|
*
|
|
* @param name The name of the output group
|
|
*/
|
|
function startGroup(name) {
|
|
command_1.issue('group', name);
|
|
}
|
|
exports.startGroup = startGroup;
|
|
/**
|
|
* End an output group.
|
|
*/
|
|
function endGroup() {
|
|
command_1.issue('endgroup');
|
|
}
|
|
exports.endGroup = endGroup;
|
|
/**
|
|
* Wrap an asynchronous function call in a group.
|
|
*
|
|
* Returns the same type as the function itself.
|
|
*
|
|
* @param name The name of the group
|
|
* @param fn The function to wrap in the group
|
|
*/
|
|
function group(name, fn) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
startGroup(name);
|
|
let result;
|
|
try {
|
|
result = yield fn();
|
|
}
|
|
finally {
|
|
endGroup();
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
exports.group = group;
|
|
//-----------------------------------------------------------------------
|
|
// Wrapper action state
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
|
*
|
|
* @param name name of the state to store
|
|
* @param value value to store
|
|
*/
|
|
function saveState(name, value) {
|
|
command_1.issueCommand('save-state', { name }, value);
|
|
}
|
|
exports.saveState = saveState;
|
|
/**
|
|
* Gets the value of an state set by this action's main execution.
|
|
*
|
|
* @param name name of the state to get
|
|
* @returns string
|
|
*/
|
|
function getState(name) {
|
|
return process.env[`STATE_${name}`] || '';
|
|
}
|
|
exports.getState = getState;
|
|
//# sourceMappingURL=core.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 539:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const url = __webpack_require__(835);
|
|
const http = __webpack_require__(605);
|
|
const https = __webpack_require__(211);
|
|
const pm = __webpack_require__(950);
|
|
let tunnel;
|
|
var HttpCodes;
|
|
(function (HttpCodes) {
|
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
|
var Headers;
|
|
(function (Headers) {
|
|
Headers["Accept"] = "accept";
|
|
Headers["ContentType"] = "content-type";
|
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
|
var MediaTypes;
|
|
(function (MediaTypes) {
|
|
MediaTypes["ApplicationJson"] = "application/json";
|
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
|
/**
|
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
function getProxyUrl(serverUrl) {
|
|
let proxyUrl = pm.getProxyUrl(url.parse(serverUrl));
|
|
return proxyUrl ? proxyUrl.href : '';
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
|
|
const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
|
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
|
const ExponentialBackoffCeiling = 10;
|
|
const ExponentialBackoffTimeSlice = 5;
|
|
class HttpClientResponse {
|
|
constructor(message) {
|
|
this.message = message;
|
|
}
|
|
readBody() {
|
|
return new Promise(async (resolve, reject) => {
|
|
let output = Buffer.alloc(0);
|
|
this.message.on('data', (chunk) => {
|
|
output = Buffer.concat([output, chunk]);
|
|
});
|
|
this.message.on('end', () => {
|
|
resolve(output.toString());
|
|
});
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClientResponse = HttpClientResponse;
|
|
function isHttps(requestUrl) {
|
|
let parsedUrl = url.parse(requestUrl);
|
|
return parsedUrl.protocol === 'https:';
|
|
}
|
|
exports.isHttps = isHttps;
|
|
class HttpClient {
|
|
constructor(userAgent, handlers, requestOptions) {
|
|
this._ignoreSslError = false;
|
|
this._allowRedirects = true;
|
|
this._allowRedirectDowngrade = false;
|
|
this._maxRedirects = 50;
|
|
this._allowRetries = false;
|
|
this._maxRetries = 1;
|
|
this._keepAlive = false;
|
|
this._disposed = false;
|
|
this.userAgent = userAgent;
|
|
this.handlers = handlers || [];
|
|
this.requestOptions = requestOptions;
|
|
if (requestOptions) {
|
|
if (requestOptions.ignoreSslError != null) {
|
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
|
}
|
|
this._socketTimeout = requestOptions.socketTimeout;
|
|
if (requestOptions.allowRedirects != null) {
|
|
this._allowRedirects = requestOptions.allowRedirects;
|
|
}
|
|
if (requestOptions.allowRedirectDowngrade != null) {
|
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
|
}
|
|
if (requestOptions.maxRedirects != null) {
|
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
|
}
|
|
if (requestOptions.keepAlive != null) {
|
|
this._keepAlive = requestOptions.keepAlive;
|
|
}
|
|
if (requestOptions.allowRetries != null) {
|
|
this._allowRetries = requestOptions.allowRetries;
|
|
}
|
|
if (requestOptions.maxRetries != null) {
|
|
this._maxRetries = requestOptions.maxRetries;
|
|
}
|
|
}
|
|
}
|
|
options(requestUrl, additionalHeaders) {
|
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
get(requestUrl, additionalHeaders) {
|
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
del(requestUrl, additionalHeaders) {
|
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
post(requestUrl, data, additionalHeaders) {
|
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
patch(requestUrl, data, additionalHeaders) {
|
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
put(requestUrl, data, additionalHeaders) {
|
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
|
}
|
|
head(requestUrl, additionalHeaders) {
|
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
|
}
|
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
|
}
|
|
/**
|
|
* Gets a typed object from an endpoint
|
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
|
*/
|
|
async getJson(requestUrl, additionalHeaders = {}) {
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
let res = await this.get(requestUrl, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.post(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.put(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
|
let data = JSON.stringify(obj, null, 2);
|
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|
let res = await this.patch(requestUrl, data, additionalHeaders);
|
|
return this._processResponse(res, this.requestOptions);
|
|
}
|
|
/**
|
|
* Makes a raw http request.
|
|
* All other methods such as get, post, patch, and request ultimately call this.
|
|
* Prefer get, del, post and patch
|
|
*/
|
|
async request(verb, requestUrl, data, headers) {
|
|
if (this._disposed) {
|
|
throw new Error("Client has already been disposed.");
|
|
}
|
|
let parsedUrl = url.parse(requestUrl);
|
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
|
// Only perform retries on reads since writes may not be idempotent.
|
|
let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
|
|
let numTries = 0;
|
|
let response;
|
|
while (numTries < maxTries) {
|
|
response = await this.requestRaw(info, data);
|
|
// Check if it's an authentication challenge
|
|
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
|
let authenticationHandler;
|
|
for (let i = 0; i < this.handlers.length; i++) {
|
|
if (this.handlers[i].canHandleAuthentication(response)) {
|
|
authenticationHandler = this.handlers[i];
|
|
break;
|
|
}
|
|
}
|
|
if (authenticationHandler) {
|
|
return authenticationHandler.handleAuthentication(this, info, data);
|
|
}
|
|
else {
|
|
// We have received an unauthorized response but have no handlers to handle it.
|
|
// Let the response return to the caller.
|
|
return response;
|
|
}
|
|
}
|
|
let redirectsRemaining = this._maxRedirects;
|
|
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
|
|
&& this._allowRedirects
|
|
&& redirectsRemaining > 0) {
|
|
const redirectUrl = response.message.headers["location"];
|
|
if (!redirectUrl) {
|
|
// if there's no location to redirect to, we won't
|
|
break;
|
|
}
|
|
let parsedRedirectUrl = url.parse(redirectUrl);
|
|
if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) {
|
|
throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.");
|
|
}
|
|
// we need to finish reading the response before reassigning response
|
|
// which will leak the open socket.
|
|
await response.readBody();
|
|
// let's make the request with the new redirectUrl
|
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
|
response = await this.requestRaw(info, data);
|
|
redirectsRemaining--;
|
|
}
|
|
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
|
|
// If not a retry code, return immediately instead of retrying
|
|
return response;
|
|
}
|
|
numTries += 1;
|
|
if (numTries < maxTries) {
|
|
await response.readBody();
|
|
await this._performExponentialBackoff(numTries);
|
|
}
|
|
}
|
|
return response;
|
|
}
|
|
/**
|
|
* Needs to be called if keepAlive is set to true in request options.
|
|
*/
|
|
dispose() {
|
|
if (this._agent) {
|
|
this._agent.destroy();
|
|
}
|
|
this._disposed = true;
|
|
}
|
|
/**
|
|
* Raw request.
|
|
* @param info
|
|
* @param data
|
|
*/
|
|
requestRaw(info, data) {
|
|
return new Promise((resolve, reject) => {
|
|
let callbackForResult = function (err, res) {
|
|
if (err) {
|
|
reject(err);
|
|
}
|
|
resolve(res);
|
|
};
|
|
this.requestRawWithCallback(info, data, callbackForResult);
|
|
});
|
|
}
|
|
/**
|
|
* Raw request with callback.
|
|
* @param info
|
|
* @param data
|
|
* @param onResult
|
|
*/
|
|
requestRawWithCallback(info, data, onResult) {
|
|
let socket;
|
|
if (typeof (data) === 'string') {
|
|
info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
|
|
}
|
|
let callbackCalled = false;
|
|
let handleResult = (err, res) => {
|
|
if (!callbackCalled) {
|
|
callbackCalled = true;
|
|
onResult(err, res);
|
|
}
|
|
};
|
|
let req = info.httpModule.request(info.options, (msg) => {
|
|
let res = new HttpClientResponse(msg);
|
|
handleResult(null, res);
|
|
});
|
|
req.on('socket', (sock) => {
|
|
socket = sock;
|
|
});
|
|
// If we ever get disconnected, we want the socket to timeout eventually
|
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
|
if (socket) {
|
|
socket.end();
|
|
}
|
|
handleResult(new Error('Request timeout: ' + info.options.path), null);
|
|
});
|
|
req.on('error', function (err) {
|
|
// err has statusCode property
|
|
// res should have headers
|
|
handleResult(err, null);
|
|
});
|
|
if (data && typeof (data) === 'string') {
|
|
req.write(data, 'utf8');
|
|
}
|
|
if (data && typeof (data) !== 'string') {
|
|
data.on('close', function () {
|
|
req.end();
|
|
});
|
|
data.pipe(req);
|
|
}
|
|
else {
|
|
req.end();
|
|
}
|
|
}
|
|
/**
|
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|
*/
|
|
getAgent(serverUrl) {
|
|
let parsedUrl = url.parse(serverUrl);
|
|
return this._getAgent(parsedUrl);
|
|
}
|
|
_prepareRequest(method, requestUrl, headers) {
|
|
const info = {};
|
|
info.parsedUrl = requestUrl;
|
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
|
info.httpModule = usingSsl ? https : http;
|
|
const defaultPort = usingSsl ? 443 : 80;
|
|
info.options = {};
|
|
info.options.host = info.parsedUrl.hostname;
|
|
info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
|
|
info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
|
info.options.method = method;
|
|
info.options.headers = this._mergeHeaders(headers);
|
|
if (this.userAgent != null) {
|
|
info.options.headers["user-agent"] = this.userAgent;
|
|
}
|
|
info.options.agent = this._getAgent(info.parsedUrl);
|
|
// gives handlers an opportunity to participate
|
|
if (this.handlers) {
|
|
this.handlers.forEach((handler) => {
|
|
handler.prepareRequest(info.options);
|
|
});
|
|
}
|
|
return info;
|
|
}
|
|
_mergeHeaders(headers) {
|
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
|
}
|
|
return lowercaseKeys(headers || {});
|
|
}
|
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
|
let clientHeader;
|
|
if (this.requestOptions && this.requestOptions.headers) {
|
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
|
}
|
|
return additionalHeaders[header] || clientHeader || _default;
|
|
}
|
|
_getAgent(parsedUrl) {
|
|
let agent;
|
|
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
|
let useProxy = proxyUrl && proxyUrl.hostname;
|
|
if (this._keepAlive && useProxy) {
|
|
agent = this._proxyAgent;
|
|
}
|
|
if (this._keepAlive && !useProxy) {
|
|
agent = this._agent;
|
|
}
|
|
// if agent is already assigned use that agent.
|
|
if (!!agent) {
|
|
return agent;
|
|
}
|
|
const usingSsl = parsedUrl.protocol === 'https:';
|
|
let maxSockets = 100;
|
|
if (!!this.requestOptions) {
|
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
|
}
|
|
if (useProxy) {
|
|
// If using proxy, need tunnel
|
|
if (!tunnel) {
|
|
tunnel = __webpack_require__(413);
|
|
}
|
|
const agentOptions = {
|
|
maxSockets: maxSockets,
|
|
keepAlive: this._keepAlive,
|
|
proxy: {
|
|
proxyAuth: proxyUrl.auth,
|
|
host: proxyUrl.hostname,
|
|
port: proxyUrl.port
|
|
},
|
|
};
|
|
let tunnelAgent;
|
|
const overHttps = proxyUrl.protocol === 'https:';
|
|
if (usingSsl) {
|
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
|
}
|
|
else {
|
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
|
}
|
|
agent = tunnelAgent(agentOptions);
|
|
this._proxyAgent = agent;
|
|
}
|
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
|
if (this._keepAlive && !agent) {
|
|
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
|
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
|
this._agent = agent;
|
|
}
|
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
|
if (!agent) {
|
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
|
}
|
|
if (usingSsl && this._ignoreSslError) {
|
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
|
// we have to cast it to any and change it directly
|
|
agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
|
|
}
|
|
return agent;
|
|
}
|
|
_performExponentialBackoff(retryNumber) {
|
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
|
}
|
|
static dateTimeDeserializer(key, value) {
|
|
if (typeof value === 'string') {
|
|
let a = new Date(value);
|
|
if (!isNaN(a.valueOf())) {
|
|
return a;
|
|
}
|
|
}
|
|
return value;
|
|
}
|
|
async _processResponse(res, options) {
|
|
return new Promise(async (resolve, reject) => {
|
|
const statusCode = res.message.statusCode;
|
|
const response = {
|
|
statusCode: statusCode,
|
|
result: null,
|
|
headers: {}
|
|
};
|
|
// not found leads to null obj returned
|
|
if (statusCode == HttpCodes.NotFound) {
|
|
resolve(response);
|
|
}
|
|
let obj;
|
|
let contents;
|
|
// get the result from the body
|
|
try {
|
|
contents = await res.readBody();
|
|
if (contents && contents.length > 0) {
|
|
if (options && options.deserializeDates) {
|
|
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
|
|
}
|
|
else {
|
|
obj = JSON.parse(contents);
|
|
}
|
|
response.result = obj;
|
|
}
|
|
response.headers = res.message.headers;
|
|
}
|
|
catch (err) {
|
|
// Invalid resource (contents not json); leaving result obj null
|
|
}
|
|
// note that 3xx redirects are handled by the http layer.
|
|
if (statusCode > 299) {
|
|
let msg;
|
|
// if exception/error in body, attempt to get better error
|
|
if (obj && obj.message) {
|
|
msg = obj.message;
|
|
}
|
|
else if (contents && contents.length > 0) {
|
|
// it may be the case that the exception is in the body message as string
|
|
msg = contents;
|
|
}
|
|
else {
|
|
msg = "Failed request: (" + statusCode + ")";
|
|
}
|
|
let err = new Error(msg);
|
|
// attach statusCode and body obj (if available) to the error object
|
|
err['statusCode'] = statusCode;
|
|
if (response.result) {
|
|
err['result'] = response.result;
|
|
}
|
|
reject(err);
|
|
}
|
|
else {
|
|
resolve(response);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
exports.HttpClient = HttpClient;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 605:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("http");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 614:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("events");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 622:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("path");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 631:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("net");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 669:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("util");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 694:
|
|
/***/ (function(__unusedmodule, exports) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
var Inputs;
|
|
(function (Inputs) {
|
|
Inputs["Name"] = "name";
|
|
Inputs["Path"] = "path";
|
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 715:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const core_1 = __webpack_require__(470);
|
|
const fs = __importStar(__webpack_require__(747));
|
|
const url_1 = __webpack_require__(835);
|
|
const internal_utils_1 = __webpack_require__(931);
|
|
const internal_config_variables_1 = __webpack_require__(717);
|
|
/**
|
|
* Creates a file container for the new artifact in the remote blob storage/file service
|
|
* @param {string} artifactName Name of the artifact being created
|
|
* @returns The response from the Artifact Service if the file container was successfully created
|
|
*/
|
|
function createArtifactInFileContainer(artifactName) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const parameters = {
|
|
Type: 'actions_storage',
|
|
Name: artifactName
|
|
};
|
|
const data = JSON.stringify(parameters, null, 2);
|
|
const artifactUrl = internal_utils_1.getArtifactUrl();
|
|
const client = internal_utils_1.createHttpClient();
|
|
const requestOptions = internal_utils_1.getRequestOptions('application/json');
|
|
const rawResponse = yield client.post(artifactUrl, data, requestOptions);
|
|
const body = yield rawResponse.readBody();
|
|
if (internal_utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
|
return JSON.parse(body);
|
|
}
|
|
else {
|
|
// eslint-disable-next-line no-console
|
|
console.log(rawResponse);
|
|
throw new Error(`Unable to create a container for the artifact ${artifactName}`);
|
|
}
|
|
});
|
|
}
|
|
exports.createArtifactInFileContainer = createArtifactInFileContainer;
|
|
/**
|
|
* Concurrently upload all of the files in chunks
|
|
* @param {string} uploadUrl Base Url for the artifact that was created
|
|
* @param {SearchResult[]} filesToUpload A list of information about the files being uploaded
|
|
* @returns The size of all the files uploaded in bytes
|
|
*/
|
|
function uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const client = internal_utils_1.createHttpClient();
|
|
const FILE_CONCURRENCY = internal_config_variables_1.getUploadFileConcurrency();
|
|
const CHUNK_CONCURRENCY = internal_config_variables_1.getUploadChunkConcurrency();
|
|
const MAX_CHUNK_SIZE = internal_config_variables_1.getUploadChunkSize();
|
|
core_1.debug(`File Concurrency: ${FILE_CONCURRENCY}, Chunk Concurrency: ${CHUNK_CONCURRENCY} and Chunk Size: ${MAX_CHUNK_SIZE}`);
|
|
const parameters = [];
|
|
// by default, file uploads will continue if there is an error unless specified differently in the options
|
|
let continueOnError = true;
|
|
if (options) {
|
|
if (options.continueOnError === false) {
|
|
continueOnError = false;
|
|
}
|
|
}
|
|
// Prepare the necessary parameters to upload all the files
|
|
for (const file of filesToUpload) {
|
|
const resourceUrl = new url_1.URL(uploadUrl);
|
|
resourceUrl.searchParams.append('itemPath', file.uploadFilePath);
|
|
parameters.push({
|
|
file: file.absoluteFilePath,
|
|
resourceUrl: resourceUrl.toString(),
|
|
restClient: client,
|
|
concurrency: CHUNK_CONCURRENCY,
|
|
maxChunkSize: MAX_CHUNK_SIZE,
|
|
continueOnError
|
|
});
|
|
}
|
|
const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()];
|
|
const failedItemsToReport = [];
|
|
let uploadedFiles = 0;
|
|
let fileSizes = 0;
|
|
let abortPendingFileUploads = false;
|
|
// Only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors
|
|
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
|
|
while (uploadedFiles < filesToUpload.length) {
|
|
const currentFileParameters = parameters[uploadedFiles];
|
|
uploadedFiles += 1;
|
|
if (abortPendingFileUploads) {
|
|
failedItemsToReport.push(currentFileParameters.file);
|
|
continue;
|
|
}
|
|
const uploadFileResult = yield uploadFileAsync(currentFileParameters);
|
|
fileSizes += uploadFileResult.successfulUploadSize;
|
|
if (uploadFileResult.isSuccess === false) {
|
|
failedItemsToReport.push(currentFileParameters.file);
|
|
if (!continueOnError) {
|
|
// Existing uploads will be able to finish however all pending uploads will fail fast
|
|
abortPendingFileUploads = true;
|
|
}
|
|
}
|
|
}
|
|
})));
|
|
core_1.info(`Total size of all the files uploaded is ${fileSizes} bytes`);
|
|
return {
|
|
size: fileSizes,
|
|
failedItems: failedItemsToReport
|
|
};
|
|
});
|
|
}
|
|
exports.uploadArtifactToFileContainer = uploadArtifactToFileContainer;
|
|
/**
|
|
* Asynchronously uploads a file. If the file is bigger than the max chunk size it will be uploaded via multiple calls
|
|
* @param {UploadFileParameters} parameters Information about the file that needs to be uploaded
|
|
* @returns The size of the file that was uploaded in bytes along with any failed uploads
|
|
*/
|
|
function uploadFileAsync(parameters) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const fileSize = fs.statSync(parameters.file).size;
|
|
const parallelUploads = [...new Array(parameters.concurrency).keys()];
|
|
let offset = 0;
|
|
let isUploadSuccessful = true;
|
|
let failedChunkSizes = 0;
|
|
let abortFileUpload = false;
|
|
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
|
|
while (offset < fileSize) {
|
|
const chunkSize = Math.min(fileSize - offset, parameters.maxChunkSize);
|
|
if (abortFileUpload) {
|
|
// if we don't want to continue on error, any pending upload chunk will be marked as failed
|
|
failedChunkSizes += chunkSize;
|
|
continue;
|
|
}
|
|
const start = offset;
|
|
const end = offset + chunkSize - 1;
|
|
offset += parameters.maxChunkSize;
|
|
const chunk = fs.createReadStream(parameters.file, {
|
|
start,
|
|
end,
|
|
autoClose: false
|
|
});
|
|
const result = yield uploadChunk(parameters.restClient, parameters.resourceUrl, chunk, start, end, fileSize);
|
|
if (!result) {
|
|
/**
|
|
* Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
|
|
* successfully uploaded so the server may report a different size for what was uploaded
|
|
**/
|
|
isUploadSuccessful = false;
|
|
failedChunkSizes += chunkSize;
|
|
core_1.warning(`Aborting upload for ${parameters.file} due to failure`);
|
|
abortFileUpload = true;
|
|
}
|
|
}
|
|
})));
|
|
return {
|
|
isSuccess: isUploadSuccessful,
|
|
successfulUploadSize: fileSize - failedChunkSizes
|
|
};
|
|
});
|
|
}
|
|
/**
|
|
* Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code
|
|
* indicates a retryable status, we try to upload the chunk as well
|
|
* @param {HttpClient} restClient RestClient that will be making the appropriate HTTP call
|
|
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
|
|
* @param {NodeJS.ReadableStream} data Stream of the file that will be uploaded
|
|
* @param {number} start Starting byte index of file that the chunk belongs to
|
|
* @param {number} end Ending byte index of file that the chunk belongs to
|
|
* @param {number} totalSize Total size of the file in bytes that is being uploaded
|
|
* @returns if the chunk was successfully uploaded
|
|
*/
|
|
function uploadChunk(restClient, resourceUrl, data, start, end, totalSize) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
core_1.info(`Uploading chunk of size ${end -
|
|
start +
|
|
1} bytes at offset ${start} with content range: ${internal_utils_1.getContentRange(start, end, totalSize)}`);
|
|
const requestOptions = internal_utils_1.getRequestOptions('application/octet-stream', totalSize, internal_utils_1.getContentRange(start, end, totalSize));
|
|
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
|
|
return yield restClient.sendStream('PUT', resourceUrl, data, requestOptions);
|
|
});
|
|
const response = yield uploadChunkRequest();
|
|
if (internal_utils_1.isSuccessStatusCode(response.message.statusCode)) {
|
|
core_1.debug(`Chunk for ${start}:${end} was successfully uploaded to ${resourceUrl}`);
|
|
return true;
|
|
}
|
|
else if (internal_utils_1.isRetryableStatusCode(response.message.statusCode)) {
|
|
core_1.info(`Received http ${response.message.statusCode} during chunk upload, will retry at offset ${start} after 10 seconds.`);
|
|
yield new Promise(resolve => setTimeout(resolve, 10000));
|
|
const retryResponse = yield uploadChunkRequest();
|
|
if (internal_utils_1.isSuccessStatusCode(retryResponse.message.statusCode)) {
|
|
return true;
|
|
}
|
|
else {
|
|
core_1.info(`Unable to upload chunk even after retrying`);
|
|
// eslint-disable-next-line no-console
|
|
console.log(response);
|
|
return false;
|
|
}
|
|
}
|
|
// Upload must have failed spectacularly somehow, log full result for diagnostic purposes
|
|
// eslint-disable-next-line no-console
|
|
console.log(response);
|
|
return false;
|
|
});
|
|
}
|
|
/**
|
|
* Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact.
|
|
* Updating the size indicates that we are done uploading all the contents of the artifact. A server side check will be run
|
|
* to check that the artifact size is correct for billing purposes
|
|
*/
|
|
function patchArtifactSize(size, artifactName) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
const client = internal_utils_1.createHttpClient();
|
|
const requestOptions = internal_utils_1.getRequestOptions('application/json');
|
|
const resourceUrl = new url_1.URL(internal_utils_1.getArtifactUrl());
|
|
resourceUrl.searchParams.append('artifactName', artifactName);
|
|
const parameters = { Size: size };
|
|
const data = JSON.stringify(parameters, null, 2);
|
|
core_1.debug(`URL is ${resourceUrl.toString()}`);
|
|
const rawResponse = yield client.patch(resourceUrl.toString(), data, requestOptions);
|
|
const body = yield rawResponse.readBody();
|
|
if (internal_utils_1.isSuccessStatusCode(rawResponse.message.statusCode)) {
|
|
core_1.debug(`Artifact ${artifactName} has been successfully uploaded, total size ${size}`);
|
|
core_1.debug(body);
|
|
}
|
|
else if (rawResponse.message.statusCode === 404) {
|
|
throw new Error(`An Artifact with the name ${artifactName} was not found`);
|
|
}
|
|
else {
|
|
// eslint-disable-next-line no-console
|
|
console.log(body);
|
|
throw new Error(`Unable to finish uploading artifact ${artifactName}`);
|
|
}
|
|
});
|
|
}
|
|
exports.patchArtifactSize = patchArtifactSize;
|
|
//# sourceMappingURL=internal-upload-http-client.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 717:
|
|
/***/ (function(__unusedmodule, exports) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
function getUploadFileConcurrency() {
|
|
return 2;
|
|
}
|
|
exports.getUploadFileConcurrency = getUploadFileConcurrency;
|
|
function getUploadChunkConcurrency() {
|
|
return 1;
|
|
}
|
|
exports.getUploadChunkConcurrency = getUploadChunkConcurrency;
|
|
function getUploadChunkSize() {
|
|
return 4 * 1024 * 1024; // 4 MB Chunks
|
|
}
|
|
exports.getUploadChunkSize = getUploadChunkSize;
|
|
function getDownloadFileConcurrency() {
|
|
return 2;
|
|
}
|
|
exports.getDownloadFileConcurrency = getDownloadFileConcurrency;
|
|
function getDownloadArtifactConcurrency() {
|
|
// when downloading all artifact at once, this is number of concurrent artifacts being downloaded
|
|
return 1;
|
|
}
|
|
exports.getDownloadArtifactConcurrency = getDownloadArtifactConcurrency;
|
|
function getRuntimeToken() {
|
|
const token = process.env['ACTIONS_RUNTIME_TOKEN'];
|
|
if (!token) {
|
|
throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable');
|
|
}
|
|
return token;
|
|
}
|
|
exports.getRuntimeToken = getRuntimeToken;
|
|
function getRuntimeUrl() {
|
|
const runtimeUrl = process.env['ACTIONS_RUNTIME_URL'];
|
|
if (!runtimeUrl) {
|
|
throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable');
|
|
}
|
|
return runtimeUrl;
|
|
}
|
|
exports.getRuntimeUrl = getRuntimeUrl;
|
|
function getWorkFlowRunId() {
|
|
const workFlowRunId = process.env['GITHUB_RUN_ID'];
|
|
if (!workFlowRunId) {
|
|
throw new Error('Unable to get GITHUB_RUN_ID env variable');
|
|
}
|
|
return workFlowRunId;
|
|
}
|
|
exports.getWorkFlowRunId = getWorkFlowRunId;
|
|
function getWorkSpaceDirectory() {
|
|
const workspaceDirectory = process.env['GITHUB_WORKSPACE'];
|
|
if (!workspaceDirectory) {
|
|
throw new Error('Unable to get GITHUB_WORKSPACE env variable');
|
|
}
|
|
return workspaceDirectory;
|
|
}
|
|
exports.getWorkSpaceDirectory = getWorkSpaceDirectory;
|
|
//# sourceMappingURL=internal-config-variables.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 747:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("fs");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 799:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
if (mod && mod.__esModule) return mod;
|
|
var result = {};
|
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
result["default"] = mod;
|
|
return result;
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const core = __importStar(__webpack_require__(470));
|
|
const artifact = __importStar(__webpack_require__(214));
|
|
const constants_1 = __webpack_require__(694);
|
|
function run() {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
try {
|
|
const name = core.getInput(constants_1.Inputs.Name, { required: false });
|
|
const path = core.getInput(constants_1.Inputs.Path, { required: false });
|
|
const artifactClient = artifact.create();
|
|
if (!name) {
|
|
// download all artifacts
|
|
const downloadResponse = yield artifactClient.downloadAllArtifacts(path);
|
|
core.info(`There were ${downloadResponse.length} artifacts downloaded`);
|
|
for (const artifact of downloadResponse) {
|
|
core.info(`Artifact ${artifact.artifactName} was downloaded to ${artifact.downloadPath}`);
|
|
}
|
|
}
|
|
else {
|
|
// download a single artifact
|
|
const downloadOptions = {
|
|
createArtifactFolder: false
|
|
};
|
|
const downloadResponse = yield artifactClient.downloadArtifact(name, path, downloadOptions);
|
|
core.info(`Artifact ${downloadResponse.artifactName} was downloaded to ${downloadResponse.downloadPath}`);
|
|
}
|
|
core.info('Artifact download has finished successfully');
|
|
}
|
|
catch (err) {
|
|
core.setFailed(err.message);
|
|
}
|
|
});
|
|
}
|
|
run();
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 835:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("url");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 931:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const core_1 = __webpack_require__(470);
|
|
const fs_1 = __webpack_require__(747);
|
|
const http_client_1 = __webpack_require__(539);
|
|
const auth_1 = __webpack_require__(226);
|
|
const internal_config_variables_1 = __webpack_require__(717);
|
|
/**
|
|
* Parses a env variable that is a number
|
|
*/
|
|
function parseEnvNumber(key) {
|
|
const value = Number(process.env[key]);
|
|
if (Number.isNaN(value) || value < 0) {
|
|
return undefined;
|
|
}
|
|
return value;
|
|
}
|
|
exports.parseEnvNumber = parseEnvNumber;
|
|
/**
|
|
* Various utility functions to help with the necessary API calls
|
|
*/
|
|
function getApiVersion() {
|
|
return '6.0-preview';
|
|
}
|
|
exports.getApiVersion = getApiVersion;
|
|
function isSuccessStatusCode(statusCode) {
|
|
if (!statusCode) {
|
|
return false;
|
|
}
|
|
return statusCode >= 200 && statusCode < 300;
|
|
}
|
|
exports.isSuccessStatusCode = isSuccessStatusCode;
|
|
function isRetryableStatusCode(statusCode) {
|
|
if (!statusCode) {
|
|
return false;
|
|
}
|
|
const retryableStatusCodes = [
|
|
http_client_1.HttpCodes.BadGateway,
|
|
http_client_1.HttpCodes.ServiceUnavailable,
|
|
http_client_1.HttpCodes.GatewayTimeout
|
|
];
|
|
return retryableStatusCodes.includes(statusCode);
|
|
}
|
|
exports.isRetryableStatusCode = isRetryableStatusCode;
|
|
function getContentRange(start, end, total) {
|
|
// Format: `bytes start-end/fileSize
|
|
// start and end are inclusive
|
|
// For a 200 byte chunk starting at byte 0:
|
|
// Content-Range: bytes 0-199/200
|
|
return `bytes ${start}-${end}/${total}`;
|
|
}
|
|
exports.getContentRange = getContentRange;
|
|
function getRequestOptions(contentType, contentLength, contentRange) {
|
|
const requestOptions = {
|
|
Accept: `application/json;api-version=${getApiVersion()}`
|
|
};
|
|
if (contentType) {
|
|
requestOptions['Content-Type'] = contentType;
|
|
}
|
|
if (contentLength) {
|
|
requestOptions['Content-Length'] = contentLength;
|
|
}
|
|
if (contentRange) {
|
|
requestOptions['Content-Range'] = contentRange;
|
|
}
|
|
return requestOptions;
|
|
}
|
|
exports.getRequestOptions = getRequestOptions;
|
|
function createHttpClient() {
|
|
return new http_client_1.HttpClient('action/artifact', [
|
|
new auth_1.BearerCredentialHandler(internal_config_variables_1.getRuntimeToken())
|
|
]);
|
|
}
|
|
exports.createHttpClient = createHttpClient;
|
|
function getArtifactUrl() {
|
|
const artifactUrl = `${internal_config_variables_1.getRuntimeUrl()}_apis/pipelines/workflows/${internal_config_variables_1.getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`;
|
|
core_1.debug(`Artifact Url: ${artifactUrl}`);
|
|
return artifactUrl;
|
|
}
|
|
exports.getArtifactUrl = getArtifactUrl;
|
|
/**
|
|
* Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected
|
|
* from the server if attempted to be sent over. These characters are not allowed due to limitations with certain
|
|
* file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an
|
|
* individual filesystem/platform will not be supported on all fileSystems/platforms
|
|
*/
|
|
const invalidCharacters = ['\\', '/', '"', ':', '<', '>', '|', '*', '?', ' '];
|
|
/**
|
|
* Scans the name of the item being uploaded to make sure there are no illegal characters
|
|
*/
|
|
function checkArtifactName(name) {
|
|
if (!name) {
|
|
throw new Error(`Artifact name: ${name}, is incorrectly provided`);
|
|
}
|
|
for (const invalidChar of invalidCharacters) {
|
|
if (name.includes(invalidChar)) {
|
|
throw new Error(`Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid characters include: ${invalidCharacters.toString()}.`);
|
|
}
|
|
}
|
|
}
|
|
exports.checkArtifactName = checkArtifactName;
|
|
function createDirectoriesForArtifact(directories) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
for (const directory of directories) {
|
|
yield fs_1.promises.mkdir(directory, {
|
|
recursive: true
|
|
});
|
|
}
|
|
});
|
|
}
|
|
exports.createDirectoriesForArtifact = createDirectoriesForArtifact;
|
|
//# sourceMappingURL=internal-utils.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 950:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const url = __webpack_require__(835);
|
|
function getProxyUrl(reqUrl) {
|
|
let usingSsl = reqUrl.protocol === 'https:';
|
|
let proxyUrl;
|
|
if (checkBypass(reqUrl)) {
|
|
return proxyUrl;
|
|
}
|
|
let proxyVar;
|
|
if (usingSsl) {
|
|
proxyVar = process.env["https_proxy"] ||
|
|
process.env["HTTPS_PROXY"];
|
|
}
|
|
else {
|
|
proxyVar = process.env["http_proxy"] ||
|
|
process.env["HTTP_PROXY"];
|
|
}
|
|
if (proxyVar) {
|
|
proxyUrl = url.parse(proxyVar);
|
|
}
|
|
return proxyUrl;
|
|
}
|
|
exports.getProxyUrl = getProxyUrl;
|
|
function checkBypass(reqUrl) {
|
|
if (!reqUrl.hostname) {
|
|
return false;
|
|
}
|
|
let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || '';
|
|
if (!noProxy) {
|
|
return false;
|
|
}
|
|
// Determine the request port
|
|
let reqPort;
|
|
if (reqUrl.port) {
|
|
reqPort = Number(reqUrl.port);
|
|
}
|
|
else if (reqUrl.protocol === 'http:') {
|
|
reqPort = 80;
|
|
}
|
|
else if (reqUrl.protocol === 'https:') {
|
|
reqPort = 443;
|
|
}
|
|
// Format the request hostname and hostname with port
|
|
let upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
|
if (typeof reqPort === 'number') {
|
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
|
}
|
|
// Compare request host against noproxy
|
|
for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) {
|
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
exports.checkBypass = checkBypass;
|
|
|
|
|
|
/***/ })
|
|
|
|
/******/ }); |