mirror of
https://github.com/hashicorp/vault-action.git
synced 2025-11-07 07:06:56 +00:00
4744 lines
No EOL
126 KiB
JavaScript
4744 lines
No EOL
126 KiB
JavaScript
module.exports =
|
|
/******/ (function(modules, runtime) { // webpackBootstrap
|
|
/******/ "use strict";
|
|
/******/ // The module cache
|
|
/******/ var installedModules = {};
|
|
/******/
|
|
/******/ // The require function
|
|
/******/ function __webpack_require__(moduleId) {
|
|
/******/
|
|
/******/ // Check if module is in cache
|
|
/******/ if(installedModules[moduleId]) {
|
|
/******/ return installedModules[moduleId].exports;
|
|
/******/ }
|
|
/******/ // Create a new module (and put it into the cache)
|
|
/******/ var module = installedModules[moduleId] = {
|
|
/******/ i: moduleId,
|
|
/******/ l: false,
|
|
/******/ exports: {}
|
|
/******/ };
|
|
/******/
|
|
/******/ // Execute the module function
|
|
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
|
/******/
|
|
/******/ // Flag the module as loaded
|
|
/******/ module.l = true;
|
|
/******/
|
|
/******/ // Return the exports of the module
|
|
/******/ return module.exports;
|
|
/******/ }
|
|
/******/
|
|
/******/
|
|
/******/ __webpack_require__.ab = __dirname + "/";
|
|
/******/
|
|
/******/ // the startup function
|
|
/******/ function startup() {
|
|
/******/ // Load entry module and return exports
|
|
/******/ return __webpack_require__(104);
|
|
/******/ };
|
|
/******/
|
|
/******/ // run startup
|
|
/******/ return startup();
|
|
/******/ })
|
|
/************************************************************************/
|
|
/******/ ({
|
|
|
|
/***/ 9:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
var once = __webpack_require__(49);
|
|
|
|
var noop = function() {};
|
|
|
|
var isRequest = function(stream) {
|
|
return stream.setHeader && typeof stream.abort === 'function';
|
|
};
|
|
|
|
var isChildProcess = function(stream) {
|
|
return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
|
|
};
|
|
|
|
var eos = function(stream, opts, callback) {
|
|
if (typeof opts === 'function') return eos(stream, null, opts);
|
|
if (!opts) opts = {};
|
|
|
|
callback = once(callback || noop);
|
|
|
|
var ws = stream._writableState;
|
|
var rs = stream._readableState;
|
|
var readable = opts.readable || (opts.readable !== false && stream.readable);
|
|
var writable = opts.writable || (opts.writable !== false && stream.writable);
|
|
|
|
var onlegacyfinish = function() {
|
|
if (!stream.writable) onfinish();
|
|
};
|
|
|
|
var onfinish = function() {
|
|
writable = false;
|
|
if (!readable) callback.call(stream);
|
|
};
|
|
|
|
var onend = function() {
|
|
readable = false;
|
|
if (!writable) callback.call(stream);
|
|
};
|
|
|
|
var onexit = function(exitCode) {
|
|
callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
|
|
};
|
|
|
|
var onerror = function(err) {
|
|
callback.call(stream, err);
|
|
};
|
|
|
|
var onclose = function() {
|
|
if (readable && !(rs && rs.ended)) return callback.call(stream, new Error('premature close'));
|
|
if (writable && !(ws && ws.ended)) return callback.call(stream, new Error('premature close'));
|
|
};
|
|
|
|
var onrequest = function() {
|
|
stream.req.on('finish', onfinish);
|
|
};
|
|
|
|
if (isRequest(stream)) {
|
|
stream.on('complete', onfinish);
|
|
stream.on('abort', onclose);
|
|
if (stream.req) onrequest();
|
|
else stream.on('request', onrequest);
|
|
} else if (writable && !ws) { // legacy streams
|
|
stream.on('end', onlegacyfinish);
|
|
stream.on('close', onlegacyfinish);
|
|
}
|
|
|
|
if (isChildProcess(stream)) stream.on('exit', onexit);
|
|
|
|
stream.on('end', onend);
|
|
stream.on('finish', onfinish);
|
|
if (opts.error !== false) stream.on('error', onerror);
|
|
stream.on('close', onclose);
|
|
|
|
return function() {
|
|
stream.removeListener('complete', onfinish);
|
|
stream.removeListener('abort', onclose);
|
|
stream.removeListener('request', onrequest);
|
|
if (stream.req) stream.req.removeListener('finish', onfinish);
|
|
stream.removeListener('end', onlegacyfinish);
|
|
stream.removeListener('close', onlegacyfinish);
|
|
stream.removeListener('finish', onfinish);
|
|
stream.removeListener('exit', onexit);
|
|
stream.removeListener('end', onend);
|
|
stream.removeListener('error', onerror);
|
|
stream.removeListener('close', onclose);
|
|
};
|
|
};
|
|
|
|
module.exports = eos;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 11:
|
|
/***/ (function(module) {
|
|
|
|
// Returns a wrapper function that returns a wrapped callback
|
|
// The wrapper function should do some stuff, and return a
|
|
// presumably different callback function.
|
|
// This makes sure that own properties are retained, so that
|
|
// decorations and such are not lost along the way.
|
|
module.exports = wrappy
|
|
function wrappy (fn, cb) {
|
|
if (fn && cb) return wrappy(fn)(cb)
|
|
|
|
if (typeof fn !== 'function')
|
|
throw new TypeError('need wrapper function')
|
|
|
|
Object.keys(fn).forEach(function (k) {
|
|
wrapper[k] = fn[k]
|
|
})
|
|
|
|
return wrapper
|
|
|
|
function wrapper() {
|
|
var args = new Array(arguments.length)
|
|
for (var i = 0; i < args.length; i++) {
|
|
args[i] = arguments[i]
|
|
}
|
|
var ret = fn.apply(this, args)
|
|
var cb = args[args.length-1]
|
|
if (typeof ret === 'function' && ret !== cb) {
|
|
Object.keys(cb).forEach(function (k) {
|
|
ret[k] = cb[k]
|
|
})
|
|
}
|
|
return ret
|
|
}
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 16:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const pump = __webpack_require__(453);
|
|
const bufferStream = __webpack_require__(375);
|
|
|
|
class MaxBufferError extends Error {
|
|
constructor() {
|
|
super('maxBuffer exceeded');
|
|
this.name = 'MaxBufferError';
|
|
}
|
|
}
|
|
|
|
async function getStream(inputStream, options) {
|
|
if (!inputStream) {
|
|
return Promise.reject(new Error('Expected a stream'));
|
|
}
|
|
|
|
options = {
|
|
maxBuffer: Infinity,
|
|
...options
|
|
};
|
|
|
|
const {maxBuffer} = options;
|
|
|
|
let stream;
|
|
await new Promise((resolve, reject) => {
|
|
const rejectPromise = error => {
|
|
if (error) { // A null check
|
|
error.bufferedData = stream.getBufferedValue();
|
|
}
|
|
|
|
reject(error);
|
|
};
|
|
|
|
stream = pump(inputStream, bufferStream(options), error => {
|
|
if (error) {
|
|
rejectPromise(error);
|
|
return;
|
|
}
|
|
|
|
resolve();
|
|
});
|
|
|
|
stream.on('data', () => {
|
|
if (stream.getBufferedLength() > maxBuffer) {
|
|
rejectPromise(new MaxBufferError());
|
|
}
|
|
});
|
|
});
|
|
|
|
return stream.getBufferedValue();
|
|
}
|
|
|
|
module.exports = getStream;
|
|
// TODO: Remove this for the next major release
|
|
module.exports.default = getStream;
|
|
module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'});
|
|
module.exports.array = (stream, options) => getStream(stream, {...options, array: true});
|
|
module.exports.MaxBufferError = MaxBufferError;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 18:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const net = __webpack_require__(631);
|
|
|
|
class TimeoutError extends Error {
|
|
constructor(threshold, event) {
|
|
super(`Timeout awaiting '${event}' for ${threshold}ms`);
|
|
this.name = 'TimeoutError';
|
|
this.code = 'ETIMEDOUT';
|
|
this.event = event;
|
|
}
|
|
}
|
|
|
|
const reentry = Symbol('reentry');
|
|
|
|
const noop = () => {};
|
|
|
|
module.exports = (request, delays, options) => {
|
|
/* istanbul ignore next: this makes sure timed-out isn't called twice */
|
|
if (request[reentry]) {
|
|
return;
|
|
}
|
|
|
|
request[reentry] = true;
|
|
|
|
let stopNewTimeouts = false;
|
|
|
|
const addTimeout = (delay, callback, ...args) => {
|
|
// An error had been thrown before. Going further would result in uncaught errors.
|
|
// See https://github.com/sindresorhus/got/issues/631#issuecomment-435675051
|
|
if (stopNewTimeouts) {
|
|
return noop;
|
|
}
|
|
|
|
// Event loop order is timers, poll, immediates.
|
|
// The timed event may emit during the current tick poll phase, so
|
|
// defer calling the handler until the poll phase completes.
|
|
let immediate;
|
|
const timeout = setTimeout(() => {
|
|
immediate = setImmediate(callback, delay, ...args);
|
|
/* istanbul ignore next: added in node v9.7.0 */
|
|
if (immediate.unref) {
|
|
immediate.unref();
|
|
}
|
|
}, delay);
|
|
|
|
/* istanbul ignore next: in order to support electron renderer */
|
|
if (timeout.unref) {
|
|
timeout.unref();
|
|
}
|
|
|
|
const cancel = () => {
|
|
clearTimeout(timeout);
|
|
clearImmediate(immediate);
|
|
};
|
|
|
|
cancelers.push(cancel);
|
|
|
|
return cancel;
|
|
};
|
|
|
|
const {host, hostname} = options;
|
|
const timeoutHandler = (delay, event) => {
|
|
request.emit('error', new TimeoutError(delay, event));
|
|
request.once('error', () => {}); // Ignore the `socket hung up` error made by request.abort()
|
|
|
|
request.abort();
|
|
};
|
|
|
|
const cancelers = [];
|
|
const cancelTimeouts = () => {
|
|
stopNewTimeouts = true;
|
|
cancelers.forEach(cancelTimeout => cancelTimeout());
|
|
};
|
|
|
|
request.once('error', cancelTimeouts);
|
|
request.once('response', response => {
|
|
response.once('end', cancelTimeouts);
|
|
});
|
|
|
|
if (delays.request !== undefined) {
|
|
addTimeout(delays.request, timeoutHandler, 'request');
|
|
}
|
|
|
|
if (delays.socket !== undefined) {
|
|
const socketTimeoutHandler = () => {
|
|
timeoutHandler(delays.socket, 'socket');
|
|
};
|
|
|
|
request.setTimeout(delays.socket, socketTimeoutHandler);
|
|
|
|
// `request.setTimeout(0)` causes a memory leak.
|
|
// We can just remove the listener and forget about the timer - it's unreffed.
|
|
// See https://github.com/sindresorhus/got/issues/690
|
|
cancelers.push(() => request.removeListener('timeout', socketTimeoutHandler));
|
|
}
|
|
|
|
if (delays.lookup !== undefined && !request.socketPath && !net.isIP(hostname || host)) {
|
|
request.once('socket', socket => {
|
|
/* istanbul ignore next: hard to test */
|
|
if (socket.connecting) {
|
|
const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup');
|
|
socket.once('lookup', cancelTimeout);
|
|
}
|
|
});
|
|
}
|
|
|
|
if (delays.connect !== undefined) {
|
|
request.once('socket', socket => {
|
|
/* istanbul ignore next: hard to test */
|
|
if (socket.connecting) {
|
|
const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect');
|
|
|
|
if (request.socketPath || net.isIP(hostname || host)) {
|
|
socket.once('connect', timeConnect());
|
|
} else {
|
|
socket.once('lookup', error => {
|
|
if (error === null) {
|
|
socket.once('connect', timeConnect());
|
|
}
|
|
});
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
if (delays.secureConnect !== undefined && options.protocol === 'https:') {
|
|
request.once('socket', socket => {
|
|
/* istanbul ignore next: hard to test */
|
|
if (socket.connecting) {
|
|
socket.once('connect', () => {
|
|
const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect');
|
|
socket.once('secureConnect', cancelTimeout);
|
|
});
|
|
}
|
|
});
|
|
}
|
|
|
|
if (delays.send !== undefined) {
|
|
request.once('socket', socket => {
|
|
const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send');
|
|
/* istanbul ignore next: hard to test */
|
|
if (socket.connecting) {
|
|
socket.once('connect', () => {
|
|
request.once('upload-complete', timeRequest());
|
|
});
|
|
} else {
|
|
request.once('upload-complete', timeRequest());
|
|
}
|
|
});
|
|
}
|
|
|
|
if (delays.response !== undefined) {
|
|
request.once('upload-complete', () => {
|
|
const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response');
|
|
request.once('response', cancelTimeout);
|
|
});
|
|
}
|
|
};
|
|
|
|
module.exports.TimeoutError = TimeoutError;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 49:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
var wrappy = __webpack_require__(11)
|
|
module.exports = wrappy(once)
|
|
module.exports.strict = wrappy(onceStrict)
|
|
|
|
once.proto = once(function () {
|
|
Object.defineProperty(Function.prototype, 'once', {
|
|
value: function () {
|
|
return once(this)
|
|
},
|
|
configurable: true
|
|
})
|
|
|
|
Object.defineProperty(Function.prototype, 'onceStrict', {
|
|
value: function () {
|
|
return onceStrict(this)
|
|
},
|
|
configurable: true
|
|
})
|
|
})
|
|
|
|
function once (fn) {
|
|
var f = function () {
|
|
if (f.called) return f.value
|
|
f.called = true
|
|
return f.value = fn.apply(this, arguments)
|
|
}
|
|
f.called = false
|
|
return f
|
|
}
|
|
|
|
function onceStrict (fn) {
|
|
var f = function () {
|
|
if (f.called)
|
|
throw new Error(f.onceError)
|
|
f.called = true
|
|
return f.value = fn.apply(this, arguments)
|
|
}
|
|
var name = fn.name || 'Function wrapped with `once`'
|
|
f.onceError = name + " shouldn't be called more than once"
|
|
f.called = false
|
|
return f
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 53:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
// TODO: Use the `URL` global when targeting Node.js 10
|
|
const URLParser = typeof URL === 'undefined' ? __webpack_require__(835).URL : URL;
|
|
|
|
const testParameter = (name, filters) => {
|
|
return filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name);
|
|
};
|
|
|
|
const normalizeDataURL = (urlString, {stripHash}) => {
|
|
const parts = urlString.match(/^data:(.*?),(.*?)(?:#(.*))?$/);
|
|
|
|
if (!parts) {
|
|
throw new Error(`Invalid URL: ${urlString}`);
|
|
}
|
|
|
|
const mediaType = parts[1].split(';');
|
|
const body = parts[2];
|
|
const hash = stripHash ? '' : parts[3];
|
|
|
|
let base64 = false;
|
|
|
|
if (mediaType[mediaType.length - 1] === 'base64') {
|
|
mediaType.pop();
|
|
base64 = true;
|
|
}
|
|
|
|
// Lowercase MIME type
|
|
const mimeType = (mediaType.shift() || '').toLowerCase();
|
|
const attributes = mediaType
|
|
.filter(Boolean)
|
|
.map(attribute => {
|
|
let [key, value = ''] = attribute.split('=').map(string => string.trim());
|
|
|
|
// Lowercase `charset`
|
|
if (key === 'charset') {
|
|
value = value.toLowerCase();
|
|
}
|
|
|
|
return `${key}${value ? `=${value}` : ''}`;
|
|
});
|
|
|
|
const normalizedMediaType = [
|
|
...attributes
|
|
];
|
|
|
|
if (base64) {
|
|
normalizedMediaType.push('base64');
|
|
}
|
|
|
|
if (normalizedMediaType.length !== 0 || mimeType) {
|
|
normalizedMediaType.unshift(mimeType);
|
|
}
|
|
|
|
return `data:${normalizedMediaType.join(';')},${base64 ? body.trim() : body}${hash ? `#${hash}` : ''}`;
|
|
};
|
|
|
|
const normalizeUrl = (urlString, options) => {
|
|
options = {
|
|
defaultProtocol: 'http:',
|
|
normalizeProtocol: true,
|
|
forceHttp: false,
|
|
forceHttps: false,
|
|
stripAuthentication: true,
|
|
stripHash: false,
|
|
stripWWW: true,
|
|
removeQueryParameters: [/^utm_\w+/i],
|
|
removeTrailingSlash: true,
|
|
removeDirectoryIndex: false,
|
|
sortQueryParameters: true,
|
|
...options
|
|
};
|
|
|
|
// TODO: Remove this at some point in the future
|
|
if (Reflect.has(options, 'normalizeHttps')) {
|
|
throw new Error('options.normalizeHttps is renamed to options.forceHttp');
|
|
}
|
|
|
|
if (Reflect.has(options, 'normalizeHttp')) {
|
|
throw new Error('options.normalizeHttp is renamed to options.forceHttps');
|
|
}
|
|
|
|
if (Reflect.has(options, 'stripFragment')) {
|
|
throw new Error('options.stripFragment is renamed to options.stripHash');
|
|
}
|
|
|
|
urlString = urlString.trim();
|
|
|
|
// Data URL
|
|
if (/^data:/i.test(urlString)) {
|
|
return normalizeDataURL(urlString, options);
|
|
}
|
|
|
|
const hasRelativeProtocol = urlString.startsWith('//');
|
|
const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString);
|
|
|
|
// Prepend protocol
|
|
if (!isRelativeUrl) {
|
|
urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol);
|
|
}
|
|
|
|
const urlObj = new URLParser(urlString);
|
|
|
|
if (options.forceHttp && options.forceHttps) {
|
|
throw new Error('The `forceHttp` and `forceHttps` options cannot be used together');
|
|
}
|
|
|
|
if (options.forceHttp && urlObj.protocol === 'https:') {
|
|
urlObj.protocol = 'http:';
|
|
}
|
|
|
|
if (options.forceHttps && urlObj.protocol === 'http:') {
|
|
urlObj.protocol = 'https:';
|
|
}
|
|
|
|
// Remove auth
|
|
if (options.stripAuthentication) {
|
|
urlObj.username = '';
|
|
urlObj.password = '';
|
|
}
|
|
|
|
// Remove hash
|
|
if (options.stripHash) {
|
|
urlObj.hash = '';
|
|
}
|
|
|
|
// Remove duplicate slashes if not preceded by a protocol
|
|
if (urlObj.pathname) {
|
|
// TODO: Use the following instead when targeting Node.js 10
|
|
// `urlObj.pathname = urlObj.pathname.replace(/(?<!https?:)\/{2,}/g, '/');`
|
|
urlObj.pathname = urlObj.pathname.replace(/((?!:).|^)\/{2,}/g, (_, p1) => {
|
|
if (/^(?!\/)/g.test(p1)) {
|
|
return `${p1}/`;
|
|
}
|
|
|
|
return '/';
|
|
});
|
|
}
|
|
|
|
// Decode URI octets
|
|
if (urlObj.pathname) {
|
|
urlObj.pathname = decodeURI(urlObj.pathname);
|
|
}
|
|
|
|
// Remove directory index
|
|
if (options.removeDirectoryIndex === true) {
|
|
options.removeDirectoryIndex = [/^index\.[a-z]+$/];
|
|
}
|
|
|
|
if (Array.isArray(options.removeDirectoryIndex) && options.removeDirectoryIndex.length > 0) {
|
|
let pathComponents = urlObj.pathname.split('/');
|
|
const lastComponent = pathComponents[pathComponents.length - 1];
|
|
|
|
if (testParameter(lastComponent, options.removeDirectoryIndex)) {
|
|
pathComponents = pathComponents.slice(0, pathComponents.length - 1);
|
|
urlObj.pathname = pathComponents.slice(1).join('/') + '/';
|
|
}
|
|
}
|
|
|
|
if (urlObj.hostname) {
|
|
// Remove trailing dot
|
|
urlObj.hostname = urlObj.hostname.replace(/\.$/, '');
|
|
|
|
// Remove `www.`
|
|
if (options.stripWWW && /^www\.([a-z\-\d]{2,63})\.([a-z.]{2,5})$/.test(urlObj.hostname)) {
|
|
// Each label should be max 63 at length (min: 2).
|
|
// The extension should be max 5 at length (min: 2).
|
|
// Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names
|
|
urlObj.hostname = urlObj.hostname.replace(/^www\./, '');
|
|
}
|
|
}
|
|
|
|
// Remove query unwanted parameters
|
|
if (Array.isArray(options.removeQueryParameters)) {
|
|
for (const key of [...urlObj.searchParams.keys()]) {
|
|
if (testParameter(key, options.removeQueryParameters)) {
|
|
urlObj.searchParams.delete(key);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Sort query parameters
|
|
if (options.sortQueryParameters) {
|
|
urlObj.searchParams.sort();
|
|
}
|
|
|
|
if (options.removeTrailingSlash) {
|
|
urlObj.pathname = urlObj.pathname.replace(/\/$/, '');
|
|
}
|
|
|
|
// Take advantage of many of the Node `url` normalizations
|
|
urlString = urlObj.toString();
|
|
|
|
// Remove ending `/`
|
|
if ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '') {
|
|
urlString = urlString.replace(/\/$/, '');
|
|
}
|
|
|
|
// Restore relative protocol, if applicable
|
|
if (hasRelativeProtocol && !options.normalizeProtocol) {
|
|
urlString = urlString.replace(/^http:\/\//, '//');
|
|
}
|
|
|
|
// Remove http/https
|
|
if (options.stripProtocol) {
|
|
urlString = urlString.replace(/^(?:https?:)?\/\//, '');
|
|
}
|
|
|
|
return urlString;
|
|
};
|
|
|
|
module.exports = normalizeUrl;
|
|
// TODO: Remove this for the next major release
|
|
module.exports.default = normalizeUrl;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 57:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const fs = __webpack_require__(747);
|
|
const util = __webpack_require__(669);
|
|
const is = __webpack_require__(534);
|
|
const isFormData = __webpack_require__(504);
|
|
|
|
module.exports = async options => {
|
|
const {body} = options;
|
|
|
|
if (options.headers['content-length']) {
|
|
return Number(options.headers['content-length']);
|
|
}
|
|
|
|
if (!body && !options.stream) {
|
|
return 0;
|
|
}
|
|
|
|
if (is.string(body)) {
|
|
return Buffer.byteLength(body);
|
|
}
|
|
|
|
if (isFormData(body)) {
|
|
return util.promisify(body.getLength.bind(body))();
|
|
}
|
|
|
|
if (body instanceof fs.ReadStream) {
|
|
const {size} = await util.promisify(fs.stat)(body.path);
|
|
return size;
|
|
}
|
|
|
|
return null;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 86:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const {URL, URLSearchParams} = __webpack_require__(835); // TODO: Use the `URL` global when targeting Node.js 10
|
|
const urlLib = __webpack_require__(835);
|
|
const is = __webpack_require__(534);
|
|
const urlParseLax = __webpack_require__(173);
|
|
const lowercaseKeys = __webpack_require__(474);
|
|
const urlToOptions = __webpack_require__(811);
|
|
const isFormData = __webpack_require__(504);
|
|
const merge = __webpack_require__(821);
|
|
const knownHookEvents = __webpack_require__(433);
|
|
|
|
const retryAfterStatusCodes = new Set([413, 429, 503]);
|
|
|
|
// `preNormalize` handles static options (e.g. headers).
|
|
// For example, when you create a custom instance and make a request
|
|
// with no static changes, they won't be normalized again.
|
|
//
|
|
// `normalize` operates on dynamic options - they cannot be saved.
|
|
// For example, `body` is everytime different per request.
|
|
// When it's done normalizing the new options, it performs merge()
|
|
// on the prenormalized options and the normalized ones.
|
|
|
|
const preNormalize = (options, defaults) => {
|
|
if (is.nullOrUndefined(options.headers)) {
|
|
options.headers = {};
|
|
} else {
|
|
options.headers = lowercaseKeys(options.headers);
|
|
}
|
|
|
|
if (options.baseUrl && !options.baseUrl.toString().endsWith('/')) {
|
|
options.baseUrl += '/';
|
|
}
|
|
|
|
if (options.stream) {
|
|
options.json = false;
|
|
}
|
|
|
|
if (is.nullOrUndefined(options.hooks)) {
|
|
options.hooks = {};
|
|
} else if (!is.object(options.hooks)) {
|
|
throw new TypeError(`Parameter \`hooks\` must be an object, not ${is(options.hooks)}`);
|
|
}
|
|
|
|
for (const event of knownHookEvents) {
|
|
if (is.nullOrUndefined(options.hooks[event])) {
|
|
if (defaults) {
|
|
options.hooks[event] = [...defaults.hooks[event]];
|
|
} else {
|
|
options.hooks[event] = [];
|
|
}
|
|
}
|
|
}
|
|
|
|
if (is.number(options.timeout)) {
|
|
options.gotTimeout = {request: options.timeout};
|
|
} else if (is.object(options.timeout)) {
|
|
options.gotTimeout = options.timeout;
|
|
}
|
|
|
|
delete options.timeout;
|
|
|
|
const {retry} = options;
|
|
options.retry = {
|
|
retries: 0,
|
|
methods: [],
|
|
statusCodes: [],
|
|
errorCodes: []
|
|
};
|
|
|
|
if (is.nonEmptyObject(defaults) && retry !== false) {
|
|
options.retry = {...defaults.retry};
|
|
}
|
|
|
|
if (retry !== false) {
|
|
if (is.number(retry)) {
|
|
options.retry.retries = retry;
|
|
} else {
|
|
options.retry = {...options.retry, ...retry};
|
|
}
|
|
}
|
|
|
|
if (options.gotTimeout) {
|
|
options.retry.maxRetryAfter = Math.min(...[options.gotTimeout.request, options.gotTimeout.connection].filter(n => !is.nullOrUndefined(n)));
|
|
}
|
|
|
|
if (is.array(options.retry.methods)) {
|
|
options.retry.methods = new Set(options.retry.methods.map(method => method.toUpperCase()));
|
|
}
|
|
|
|
if (is.array(options.retry.statusCodes)) {
|
|
options.retry.statusCodes = new Set(options.retry.statusCodes);
|
|
}
|
|
|
|
if (is.array(options.retry.errorCodes)) {
|
|
options.retry.errorCodes = new Set(options.retry.errorCodes);
|
|
}
|
|
|
|
return options;
|
|
};
|
|
|
|
const normalize = (url, options, defaults) => {
|
|
if (is.plainObject(url)) {
|
|
options = {...url, ...options};
|
|
url = options.url || {};
|
|
delete options.url;
|
|
}
|
|
|
|
if (defaults) {
|
|
options = merge({}, defaults.options, options ? preNormalize(options, defaults.options) : {});
|
|
} else {
|
|
options = merge({}, preNormalize(options));
|
|
}
|
|
|
|
if (!is.string(url) && !is.object(url)) {
|
|
throw new TypeError(`Parameter \`url\` must be a string or object, not ${is(url)}`);
|
|
}
|
|
|
|
if (is.string(url)) {
|
|
if (options.baseUrl) {
|
|
if (url.toString().startsWith('/')) {
|
|
url = url.toString().slice(1);
|
|
}
|
|
|
|
url = urlToOptions(new URL(url, options.baseUrl));
|
|
} else {
|
|
url = url.replace(/^unix:/, 'http://$&');
|
|
url = urlParseLax(url);
|
|
}
|
|
} else if (is(url) === 'URL') {
|
|
url = urlToOptions(url);
|
|
}
|
|
|
|
// Override both null/undefined with default protocol
|
|
options = merge({path: ''}, url, {protocol: url.protocol || 'https:'}, options);
|
|
|
|
for (const hook of options.hooks.init) {
|
|
const called = hook(options);
|
|
|
|
if (is.promise(called)) {
|
|
throw new TypeError('The `init` hook must be a synchronous function');
|
|
}
|
|
}
|
|
|
|
const {baseUrl} = options;
|
|
Object.defineProperty(options, 'baseUrl', {
|
|
set: () => {
|
|
throw new Error('Failed to set baseUrl. Options are normalized already.');
|
|
},
|
|
get: () => baseUrl
|
|
});
|
|
|
|
const {query} = options;
|
|
if (is.nonEmptyString(query) || is.nonEmptyObject(query) || query instanceof URLSearchParams) {
|
|
if (!is.string(query)) {
|
|
options.query = (new URLSearchParams(query)).toString();
|
|
}
|
|
|
|
options.path = `${options.path.split('?')[0]}?${options.query}`;
|
|
delete options.query;
|
|
}
|
|
|
|
if (options.hostname === 'unix') {
|
|
const matches = /(.+?):(.+)/.exec(options.path);
|
|
|
|
if (matches) {
|
|
const [, socketPath, path] = matches;
|
|
options = {
|
|
...options,
|
|
socketPath,
|
|
path,
|
|
host: null
|
|
};
|
|
}
|
|
}
|
|
|
|
const {headers} = options;
|
|
for (const [key, value] of Object.entries(headers)) {
|
|
if (is.nullOrUndefined(value)) {
|
|
delete headers[key];
|
|
}
|
|
}
|
|
|
|
if (options.json && is.undefined(headers.accept)) {
|
|
headers.accept = 'application/json';
|
|
}
|
|
|
|
if (options.decompress && is.undefined(headers['accept-encoding'])) {
|
|
headers['accept-encoding'] = 'gzip, deflate';
|
|
}
|
|
|
|
const {body} = options;
|
|
if (is.nullOrUndefined(body)) {
|
|
options.method = options.method ? options.method.toUpperCase() : 'GET';
|
|
} else {
|
|
const isObject = is.object(body) && !is.buffer(body) && !is.nodeStream(body);
|
|
if (!is.nodeStream(body) && !is.string(body) && !is.buffer(body) && !(options.form || options.json)) {
|
|
throw new TypeError('The `body` option must be a stream.Readable, string or Buffer');
|
|
}
|
|
|
|
if (options.json && !(isObject || is.array(body))) {
|
|
throw new TypeError('The `body` option must be an Object or Array when the `json` option is used');
|
|
}
|
|
|
|
if (options.form && !isObject) {
|
|
throw new TypeError('The `body` option must be an Object when the `form` option is used');
|
|
}
|
|
|
|
if (isFormData(body)) {
|
|
// Special case for https://github.com/form-data/form-data
|
|
headers['content-type'] = headers['content-type'] || `multipart/form-data; boundary=${body.getBoundary()}`;
|
|
} else if (options.form) {
|
|
headers['content-type'] = headers['content-type'] || 'application/x-www-form-urlencoded';
|
|
options.body = (new URLSearchParams(body)).toString();
|
|
} else if (options.json) {
|
|
headers['content-type'] = headers['content-type'] || 'application/json';
|
|
options.body = JSON.stringify(body);
|
|
}
|
|
|
|
options.method = options.method ? options.method.toUpperCase() : 'POST';
|
|
}
|
|
|
|
if (!is.function(options.retry.retries)) {
|
|
const {retries} = options.retry;
|
|
|
|
options.retry.retries = (iteration, error) => {
|
|
if (iteration > retries) {
|
|
return 0;
|
|
}
|
|
|
|
if ((!error || !options.retry.errorCodes.has(error.code)) && (!options.retry.methods.has(error.method) || !options.retry.statusCodes.has(error.statusCode))) {
|
|
return 0;
|
|
}
|
|
|
|
if (Reflect.has(error, 'headers') && Reflect.has(error.headers, 'retry-after') && retryAfterStatusCodes.has(error.statusCode)) {
|
|
let after = Number(error.headers['retry-after']);
|
|
if (is.nan(after)) {
|
|
after = Date.parse(error.headers['retry-after']) - Date.now();
|
|
} else {
|
|
after *= 1000;
|
|
}
|
|
|
|
if (after > options.retry.maxRetryAfter) {
|
|
return 0;
|
|
}
|
|
|
|
return after;
|
|
}
|
|
|
|
if (error.statusCode === 413) {
|
|
return 0;
|
|
}
|
|
|
|
const noise = Math.random() * 100;
|
|
return ((2 ** (iteration - 1)) * 1000) + noise;
|
|
};
|
|
}
|
|
|
|
return options;
|
|
};
|
|
|
|
const reNormalize = options => normalize(urlLib.format(options), options);
|
|
|
|
module.exports = normalize;
|
|
module.exports.preNormalize = preNormalize;
|
|
module.exports.reNormalize = reNormalize;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 87:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("os");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 89:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
|
|
// We define these manually to ensure they're always copied
|
|
// even if they would move up the prototype chain
|
|
// https://nodejs.org/api/http.html#http_class_http_incomingmessage
|
|
const knownProps = [
|
|
'destroy',
|
|
'setTimeout',
|
|
'socket',
|
|
'headers',
|
|
'trailers',
|
|
'rawHeaders',
|
|
'statusCode',
|
|
'httpVersion',
|
|
'httpVersionMinor',
|
|
'httpVersionMajor',
|
|
'rawTrailers',
|
|
'statusMessage'
|
|
];
|
|
|
|
module.exports = (fromStream, toStream) => {
|
|
const fromProps = new Set(Object.keys(fromStream).concat(knownProps));
|
|
|
|
for (const prop of fromProps) {
|
|
// Don't overwrite existing properties
|
|
if (prop in toStream) {
|
|
continue;
|
|
}
|
|
|
|
toStream[prop] = typeof fromStream[prop] === 'function' ? fromStream[prop].bind(fromStream) : fromStream[prop];
|
|
}
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 93:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
const Readable = __webpack_require__(413).Readable;
|
|
const lowercaseKeys = __webpack_require__(474);
|
|
|
|
class Response extends Readable {
|
|
constructor(statusCode, headers, body, url) {
|
|
if (typeof statusCode !== 'number') {
|
|
throw new TypeError('Argument `statusCode` should be a number');
|
|
}
|
|
if (typeof headers !== 'object') {
|
|
throw new TypeError('Argument `headers` should be an object');
|
|
}
|
|
if (!(body instanceof Buffer)) {
|
|
throw new TypeError('Argument `body` should be a buffer');
|
|
}
|
|
if (typeof url !== 'string') {
|
|
throw new TypeError('Argument `url` should be a string');
|
|
}
|
|
|
|
super();
|
|
this.statusCode = statusCode;
|
|
this.headers = lowercaseKeys(headers);
|
|
this.body = body;
|
|
this.url = url;
|
|
}
|
|
|
|
_read() {
|
|
this.push(this.body);
|
|
this.push(null);
|
|
}
|
|
}
|
|
|
|
module.exports = Response;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 97:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
module.exports = object => {
|
|
const result = {};
|
|
|
|
for (const [key, value] of Object.entries(object)) {
|
|
result[key.toLowerCase()] = value;
|
|
}
|
|
|
|
return result;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 104:
|
|
/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) {
|
|
|
|
const core = __webpack_require__(470);
|
|
const { exportSecrets } = __webpack_require__(751);
|
|
|
|
(async () => {
|
|
try {
|
|
await core.group('Get Vault Secrets', exportSecrets);
|
|
} catch (error) {
|
|
core.setFailed(error.message);
|
|
}
|
|
})();
|
|
|
|
/***/ }),
|
|
|
|
/***/ 128:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
module.exports = (url, opts) => {
|
|
if (typeof url !== 'string') {
|
|
throw new TypeError(`Expected \`url\` to be of type \`string\`, got \`${typeof url}\``);
|
|
}
|
|
|
|
url = url.trim();
|
|
opts = Object.assign({https: false}, opts);
|
|
|
|
if (/^\.*\/|^(?!localhost)\w+:/.test(url)) {
|
|
return url;
|
|
}
|
|
|
|
return url.replace(/^(?!(?:\w+:)?\/\/)/, opts.https ? 'https://' : 'http://');
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 145:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const pump = __webpack_require__(453);
|
|
const bufferStream = __webpack_require__(966);
|
|
|
|
class MaxBufferError extends Error {
|
|
constructor() {
|
|
super('maxBuffer exceeded');
|
|
this.name = 'MaxBufferError';
|
|
}
|
|
}
|
|
|
|
function getStream(inputStream, options) {
|
|
if (!inputStream) {
|
|
return Promise.reject(new Error('Expected a stream'));
|
|
}
|
|
|
|
options = Object.assign({maxBuffer: Infinity}, options);
|
|
|
|
const {maxBuffer} = options;
|
|
|
|
let stream;
|
|
return new Promise((resolve, reject) => {
|
|
const rejectPromise = error => {
|
|
if (error) { // A null check
|
|
error.bufferedData = stream.getBufferedValue();
|
|
}
|
|
reject(error);
|
|
};
|
|
|
|
stream = pump(inputStream, bufferStream(options), error => {
|
|
if (error) {
|
|
rejectPromise(error);
|
|
return;
|
|
}
|
|
|
|
resolve();
|
|
});
|
|
|
|
stream.on('data', () => {
|
|
if (stream.getBufferedLength() > maxBuffer) {
|
|
rejectPromise(new MaxBufferError());
|
|
}
|
|
});
|
|
}).then(() => stream.getBufferedValue());
|
|
}
|
|
|
|
module.exports = getStream;
|
|
module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'}));
|
|
module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true}));
|
|
module.exports.MaxBufferError = MaxBufferError;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 154:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
// rfc7231 6.1
|
|
const statusCodeCacheableByDefault = [
|
|
200,
|
|
203,
|
|
204,
|
|
206,
|
|
300,
|
|
301,
|
|
404,
|
|
405,
|
|
410,
|
|
414,
|
|
501,
|
|
];
|
|
|
|
// This implementation does not understand partial responses (206)
|
|
const understoodStatuses = [
|
|
200,
|
|
203,
|
|
204,
|
|
300,
|
|
301,
|
|
302,
|
|
303,
|
|
307,
|
|
308,
|
|
404,
|
|
405,
|
|
410,
|
|
414,
|
|
501,
|
|
];
|
|
|
|
const hopByHopHeaders = {
|
|
date: true, // included, because we add Age update Date
|
|
connection: true,
|
|
'keep-alive': true,
|
|
'proxy-authenticate': true,
|
|
'proxy-authorization': true,
|
|
te: true,
|
|
trailer: true,
|
|
'transfer-encoding': true,
|
|
upgrade: true,
|
|
};
|
|
const excludedFromRevalidationUpdate = {
|
|
// Since the old body is reused, it doesn't make sense to change properties of the body
|
|
'content-length': true,
|
|
'content-encoding': true,
|
|
'transfer-encoding': true,
|
|
'content-range': true,
|
|
};
|
|
|
|
function parseCacheControl(header) {
|
|
const cc = {};
|
|
if (!header) return cc;
|
|
|
|
// TODO: When there is more than one value present for a given directive (e.g., two Expires header fields, multiple Cache-Control: max-age directives),
|
|
// the directive's value is considered invalid. Caches are encouraged to consider responses that have invalid freshness information to be stale
|
|
const parts = header.trim().split(/\s*,\s*/); // TODO: lame parsing
|
|
for (const part of parts) {
|
|
const [k, v] = part.split(/\s*=\s*/, 2);
|
|
cc[k] = v === undefined ? true : v.replace(/^"|"$/g, ''); // TODO: lame unquoting
|
|
}
|
|
|
|
return cc;
|
|
}
|
|
|
|
function formatCacheControl(cc) {
|
|
let parts = [];
|
|
for (const k in cc) {
|
|
const v = cc[k];
|
|
parts.push(v === true ? k : k + '=' + v);
|
|
}
|
|
if (!parts.length) {
|
|
return undefined;
|
|
}
|
|
return parts.join(', ');
|
|
}
|
|
|
|
module.exports = class CachePolicy {
|
|
constructor(
|
|
req,
|
|
res,
|
|
{
|
|
shared,
|
|
cacheHeuristic,
|
|
immutableMinTimeToLive,
|
|
ignoreCargoCult,
|
|
trustServerDate,
|
|
_fromObject,
|
|
} = {}
|
|
) {
|
|
if (_fromObject) {
|
|
this._fromObject(_fromObject);
|
|
return;
|
|
}
|
|
|
|
if (!res || !res.headers) {
|
|
throw Error('Response headers missing');
|
|
}
|
|
this._assertRequestHasHeaders(req);
|
|
|
|
this._responseTime = this.now();
|
|
this._isShared = shared !== false;
|
|
this._trustServerDate =
|
|
undefined !== trustServerDate ? trustServerDate : true;
|
|
this._cacheHeuristic =
|
|
undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE
|
|
this._immutableMinTtl =
|
|
undefined !== immutableMinTimeToLive
|
|
? immutableMinTimeToLive
|
|
: 24 * 3600 * 1000;
|
|
|
|
this._status = 'status' in res ? res.status : 200;
|
|
this._resHeaders = res.headers;
|
|
this._rescc = parseCacheControl(res.headers['cache-control']);
|
|
this._method = 'method' in req ? req.method : 'GET';
|
|
this._url = req.url;
|
|
this._host = req.headers.host;
|
|
this._noAuthorization = !req.headers.authorization;
|
|
this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used
|
|
this._reqcc = parseCacheControl(req.headers['cache-control']);
|
|
|
|
// Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching,
|
|
// so there's no point stricly adhering to the blindly copy&pasted directives.
|
|
if (
|
|
ignoreCargoCult &&
|
|
'pre-check' in this._rescc &&
|
|
'post-check' in this._rescc
|
|
) {
|
|
delete this._rescc['pre-check'];
|
|
delete this._rescc['post-check'];
|
|
delete this._rescc['no-cache'];
|
|
delete this._rescc['no-store'];
|
|
delete this._rescc['must-revalidate'];
|
|
this._resHeaders = Object.assign({}, this._resHeaders, {
|
|
'cache-control': formatCacheControl(this._rescc),
|
|
});
|
|
delete this._resHeaders.expires;
|
|
delete this._resHeaders.pragma;
|
|
}
|
|
|
|
// When the Cache-Control header field is not present in a request, caches MUST consider the no-cache request pragma-directive
|
|
// as having the same effect as if "Cache-Control: no-cache" were present (see Section 5.2.1).
|
|
if (
|
|
res.headers['cache-control'] == null &&
|
|
/no-cache/.test(res.headers.pragma)
|
|
) {
|
|
this._rescc['no-cache'] = true;
|
|
}
|
|
}
|
|
|
|
now() {
|
|
return Date.now();
|
|
}
|
|
|
|
storable() {
|
|
// The "no-store" request directive indicates that a cache MUST NOT store any part of either this request or any response to it.
|
|
return !!(
|
|
!this._reqcc['no-store'] &&
|
|
// A cache MUST NOT store a response to any request, unless:
|
|
// The request method is understood by the cache and defined as being cacheable, and
|
|
('GET' === this._method ||
|
|
'HEAD' === this._method ||
|
|
('POST' === this._method && this._hasExplicitExpiration())) &&
|
|
// the response status code is understood by the cache, and
|
|
understoodStatuses.indexOf(this._status) !== -1 &&
|
|
// the "no-store" cache directive does not appear in request or response header fields, and
|
|
!this._rescc['no-store'] &&
|
|
// the "private" response directive does not appear in the response, if the cache is shared, and
|
|
(!this._isShared || !this._rescc.private) &&
|
|
// the Authorization header field does not appear in the request, if the cache is shared,
|
|
(!this._isShared ||
|
|
this._noAuthorization ||
|
|
this._allowsStoringAuthenticated()) &&
|
|
// the response either:
|
|
// contains an Expires header field, or
|
|
(this._resHeaders.expires ||
|
|
// contains a max-age response directive, or
|
|
// contains a s-maxage response directive and the cache is shared, or
|
|
// contains a public response directive.
|
|
this._rescc.public ||
|
|
this._rescc['max-age'] ||
|
|
this._rescc['s-maxage'] ||
|
|
// has a status code that is defined as cacheable by default
|
|
statusCodeCacheableByDefault.indexOf(this._status) !== -1)
|
|
);
|
|
}
|
|
|
|
_hasExplicitExpiration() {
|
|
// 4.2.1 Calculating Freshness Lifetime
|
|
return (
|
|
(this._isShared && this._rescc['s-maxage']) ||
|
|
this._rescc['max-age'] ||
|
|
this._resHeaders.expires
|
|
);
|
|
}
|
|
|
|
_assertRequestHasHeaders(req) {
|
|
if (!req || !req.headers) {
|
|
throw Error('Request headers missing');
|
|
}
|
|
}
|
|
|
|
satisfiesWithoutRevalidation(req) {
|
|
this._assertRequestHasHeaders(req);
|
|
|
|
// When presented with a request, a cache MUST NOT reuse a stored response, unless:
|
|
// the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive,
|
|
// unless the stored response is successfully validated (Section 4.3), and
|
|
const requestCC = parseCacheControl(req.headers['cache-control']);
|
|
if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) {
|
|
return false;
|
|
}
|
|
|
|
if (requestCC['max-age'] && this.age() > requestCC['max-age']) {
|
|
return false;
|
|
}
|
|
|
|
if (
|
|
requestCC['min-fresh'] &&
|
|
this.timeToLive() < 1000 * requestCC['min-fresh']
|
|
) {
|
|
return false;
|
|
}
|
|
|
|
// the stored response is either:
|
|
// fresh, or allowed to be served stale
|
|
if (this.stale()) {
|
|
const allowsStale =
|
|
requestCC['max-stale'] &&
|
|
!this._rescc['must-revalidate'] &&
|
|
(true === requestCC['max-stale'] ||
|
|
requestCC['max-stale'] > this.age() - this.maxAge());
|
|
if (!allowsStale) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
return this._requestMatches(req, false);
|
|
}
|
|
|
|
_requestMatches(req, allowHeadMethod) {
|
|
// The presented effective request URI and that of the stored response match, and
|
|
return (
|
|
(!this._url || this._url === req.url) &&
|
|
this._host === req.headers.host &&
|
|
// the request method associated with the stored response allows it to be used for the presented request, and
|
|
(!req.method ||
|
|
this._method === req.method ||
|
|
(allowHeadMethod && 'HEAD' === req.method)) &&
|
|
// selecting header fields nominated by the stored response (if any) match those presented, and
|
|
this._varyMatches(req)
|
|
);
|
|
}
|
|
|
|
_allowsStoringAuthenticated() {
|
|
// following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage.
|
|
return (
|
|
this._rescc['must-revalidate'] ||
|
|
this._rescc.public ||
|
|
this._rescc['s-maxage']
|
|
);
|
|
}
|
|
|
|
_varyMatches(req) {
|
|
if (!this._resHeaders.vary) {
|
|
return true;
|
|
}
|
|
|
|
// A Vary header field-value of "*" always fails to match
|
|
if (this._resHeaders.vary === '*') {
|
|
return false;
|
|
}
|
|
|
|
const fields = this._resHeaders.vary
|
|
.trim()
|
|
.toLowerCase()
|
|
.split(/\s*,\s*/);
|
|
for (const name of fields) {
|
|
if (req.headers[name] !== this._reqHeaders[name]) return false;
|
|
}
|
|
return true;
|
|
}
|
|
|
|
_copyWithoutHopByHopHeaders(inHeaders) {
|
|
const headers = {};
|
|
for (const name in inHeaders) {
|
|
if (hopByHopHeaders[name]) continue;
|
|
headers[name] = inHeaders[name];
|
|
}
|
|
// 9.1. Connection
|
|
if (inHeaders.connection) {
|
|
const tokens = inHeaders.connection.trim().split(/\s*,\s*/);
|
|
for (const name of tokens) {
|
|
delete headers[name];
|
|
}
|
|
}
|
|
if (headers.warning) {
|
|
const warnings = headers.warning.split(/,/).filter(warning => {
|
|
return !/^\s*1[0-9][0-9]/.test(warning);
|
|
});
|
|
if (!warnings.length) {
|
|
delete headers.warning;
|
|
} else {
|
|
headers.warning = warnings.join(',').trim();
|
|
}
|
|
}
|
|
return headers;
|
|
}
|
|
|
|
responseHeaders() {
|
|
const headers = this._copyWithoutHopByHopHeaders(this._resHeaders);
|
|
const age = this.age();
|
|
|
|
// A cache SHOULD generate 113 warning if it heuristically chose a freshness
|
|
// lifetime greater than 24 hours and the response's age is greater than 24 hours.
|
|
if (
|
|
age > 3600 * 24 &&
|
|
!this._hasExplicitExpiration() &&
|
|
this.maxAge() > 3600 * 24
|
|
) {
|
|
headers.warning =
|
|
(headers.warning ? `${headers.warning}, ` : '') +
|
|
'113 - "rfc7234 5.5.4"';
|
|
}
|
|
headers.age = `${Math.round(age)}`;
|
|
headers.date = new Date(this.now()).toUTCString();
|
|
return headers;
|
|
}
|
|
|
|
/**
|
|
* Value of the Date response header or current time if Date was demed invalid
|
|
* @return timestamp
|
|
*/
|
|
date() {
|
|
if (this._trustServerDate) {
|
|
return this._serverDate();
|
|
}
|
|
return this._responseTime;
|
|
}
|
|
|
|
_serverDate() {
|
|
const dateValue = Date.parse(this._resHeaders.date);
|
|
if (isFinite(dateValue)) {
|
|
const maxClockDrift = 8 * 3600 * 1000;
|
|
const clockDrift = Math.abs(this._responseTime - dateValue);
|
|
if (clockDrift < maxClockDrift) {
|
|
return dateValue;
|
|
}
|
|
}
|
|
return this._responseTime;
|
|
}
|
|
|
|
/**
|
|
* Value of the Age header, in seconds, updated for the current time.
|
|
* May be fractional.
|
|
*
|
|
* @return Number
|
|
*/
|
|
age() {
|
|
let age = Math.max(0, (this._responseTime - this.date()) / 1000);
|
|
if (this._resHeaders.age) {
|
|
let ageValue = this._ageValue();
|
|
if (ageValue > age) age = ageValue;
|
|
}
|
|
|
|
const residentTime = (this.now() - this._responseTime) / 1000;
|
|
return age + residentTime;
|
|
}
|
|
|
|
_ageValue() {
|
|
const ageValue = parseInt(this._resHeaders.age);
|
|
return isFinite(ageValue) ? ageValue : 0;
|
|
}
|
|
|
|
/**
|
|
* Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`.
|
|
*
|
|
* For an up-to-date value, see `timeToLive()`.
|
|
*
|
|
* @return Number
|
|
*/
|
|
maxAge() {
|
|
if (!this.storable() || this._rescc['no-cache']) {
|
|
return 0;
|
|
}
|
|
|
|
// Shared responses with cookies are cacheable according to the RFC, but IMHO it'd be unwise to do so by default
|
|
// so this implementation requires explicit opt-in via public header
|
|
if (
|
|
this._isShared &&
|
|
(this._resHeaders['set-cookie'] &&
|
|
!this._rescc.public &&
|
|
!this._rescc.immutable)
|
|
) {
|
|
return 0;
|
|
}
|
|
|
|
if (this._resHeaders.vary === '*') {
|
|
return 0;
|
|
}
|
|
|
|
if (this._isShared) {
|
|
if (this._rescc['proxy-revalidate']) {
|
|
return 0;
|
|
}
|
|
// if a response includes the s-maxage directive, a shared cache recipient MUST ignore the Expires field.
|
|
if (this._rescc['s-maxage']) {
|
|
return parseInt(this._rescc['s-maxage'], 10);
|
|
}
|
|
}
|
|
|
|
// If a response includes a Cache-Control field with the max-age directive, a recipient MUST ignore the Expires field.
|
|
if (this._rescc['max-age']) {
|
|
return parseInt(this._rescc['max-age'], 10);
|
|
}
|
|
|
|
const defaultMinTtl = this._rescc.immutable ? this._immutableMinTtl : 0;
|
|
|
|
const dateValue = this._serverDate();
|
|
if (this._resHeaders.expires) {
|
|
const expires = Date.parse(this._resHeaders.expires);
|
|
// A cache recipient MUST interpret invalid date formats, especially the value "0", as representing a time in the past (i.e., "already expired").
|
|
if (Number.isNaN(expires) || expires < dateValue) {
|
|
return 0;
|
|
}
|
|
return Math.max(defaultMinTtl, (expires - dateValue) / 1000);
|
|
}
|
|
|
|
if (this._resHeaders['last-modified']) {
|
|
const lastModified = Date.parse(this._resHeaders['last-modified']);
|
|
if (isFinite(lastModified) && dateValue > lastModified) {
|
|
return Math.max(
|
|
defaultMinTtl,
|
|
((dateValue - lastModified) / 1000) * this._cacheHeuristic
|
|
);
|
|
}
|
|
}
|
|
|
|
return defaultMinTtl;
|
|
}
|
|
|
|
timeToLive() {
|
|
return Math.max(0, this.maxAge() - this.age()) * 1000;
|
|
}
|
|
|
|
stale() {
|
|
return this.maxAge() <= this.age();
|
|
}
|
|
|
|
static fromObject(obj) {
|
|
return new this(undefined, undefined, { _fromObject: obj });
|
|
}
|
|
|
|
_fromObject(obj) {
|
|
if (this._responseTime) throw Error('Reinitialized');
|
|
if (!obj || obj.v !== 1) throw Error('Invalid serialization');
|
|
|
|
this._responseTime = obj.t;
|
|
this._isShared = obj.sh;
|
|
this._cacheHeuristic = obj.ch;
|
|
this._immutableMinTtl =
|
|
obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000;
|
|
this._status = obj.st;
|
|
this._resHeaders = obj.resh;
|
|
this._rescc = obj.rescc;
|
|
this._method = obj.m;
|
|
this._url = obj.u;
|
|
this._host = obj.h;
|
|
this._noAuthorization = obj.a;
|
|
this._reqHeaders = obj.reqh;
|
|
this._reqcc = obj.reqcc;
|
|
}
|
|
|
|
toObject() {
|
|
return {
|
|
v: 1,
|
|
t: this._responseTime,
|
|
sh: this._isShared,
|
|
ch: this._cacheHeuristic,
|
|
imm: this._immutableMinTtl,
|
|
st: this._status,
|
|
resh: this._resHeaders,
|
|
rescc: this._rescc,
|
|
m: this._method,
|
|
u: this._url,
|
|
h: this._host,
|
|
a: this._noAuthorization,
|
|
reqh: this._reqHeaders,
|
|
reqcc: this._reqcc,
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Headers for sending to the origin server to revalidate stale response.
|
|
* Allows server to return 304 to allow reuse of the previous response.
|
|
*
|
|
* Hop by hop headers are always stripped.
|
|
* Revalidation headers may be added or removed, depending on request.
|
|
*/
|
|
revalidationHeaders(incomingReq) {
|
|
this._assertRequestHasHeaders(incomingReq);
|
|
const headers = this._copyWithoutHopByHopHeaders(incomingReq.headers);
|
|
|
|
// This implementation does not understand range requests
|
|
delete headers['if-range'];
|
|
|
|
if (!this._requestMatches(incomingReq, true) || !this.storable()) {
|
|
// revalidation allowed via HEAD
|
|
// not for the same resource, or wasn't allowed to be cached anyway
|
|
delete headers['if-none-match'];
|
|
delete headers['if-modified-since'];
|
|
return headers;
|
|
}
|
|
|
|
/* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */
|
|
if (this._resHeaders.etag) {
|
|
headers['if-none-match'] = headers['if-none-match']
|
|
? `${headers['if-none-match']}, ${this._resHeaders.etag}`
|
|
: this._resHeaders.etag;
|
|
}
|
|
|
|
// Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request.
|
|
const forbidsWeakValidators =
|
|
headers['accept-ranges'] ||
|
|
headers['if-match'] ||
|
|
headers['if-unmodified-since'] ||
|
|
(this._method && this._method != 'GET');
|
|
|
|
/* SHOULD send the Last-Modified value in non-subrange cache validation requests (using If-Modified-Since) if only a Last-Modified value has been provided by the origin server.
|
|
Note: This implementation does not understand partial responses (206) */
|
|
if (forbidsWeakValidators) {
|
|
delete headers['if-modified-since'];
|
|
|
|
if (headers['if-none-match']) {
|
|
const etags = headers['if-none-match']
|
|
.split(/,/)
|
|
.filter(etag => {
|
|
return !/^\s*W\//.test(etag);
|
|
});
|
|
if (!etags.length) {
|
|
delete headers['if-none-match'];
|
|
} else {
|
|
headers['if-none-match'] = etags.join(',').trim();
|
|
}
|
|
}
|
|
} else if (
|
|
this._resHeaders['last-modified'] &&
|
|
!headers['if-modified-since']
|
|
) {
|
|
headers['if-modified-since'] = this._resHeaders['last-modified'];
|
|
}
|
|
|
|
return headers;
|
|
}
|
|
|
|
/**
|
|
* Creates new CachePolicy with information combined from the previews response,
|
|
* and the new revalidation response.
|
|
*
|
|
* Returns {policy, modified} where modified is a boolean indicating
|
|
* whether the response body has been modified, and old cached body can't be used.
|
|
*
|
|
* @return {Object} {policy: CachePolicy, modified: Boolean}
|
|
*/
|
|
revalidatedPolicy(request, response) {
|
|
this._assertRequestHasHeaders(request);
|
|
if (!response || !response.headers) {
|
|
throw Error('Response headers missing');
|
|
}
|
|
|
|
// These aren't going to be supported exactly, since one CachePolicy object
|
|
// doesn't know about all the other cached objects.
|
|
let matches = false;
|
|
if (response.status !== undefined && response.status != 304) {
|
|
matches = false;
|
|
} else if (
|
|
response.headers.etag &&
|
|
!/^\s*W\//.test(response.headers.etag)
|
|
) {
|
|
// "All of the stored responses with the same strong validator are selected.
|
|
// If none of the stored responses contain the same strong validator,
|
|
// then the cache MUST NOT use the new response to update any stored responses."
|
|
matches =
|
|
this._resHeaders.etag &&
|
|
this._resHeaders.etag.replace(/^\s*W\//, '') ===
|
|
response.headers.etag;
|
|
} else if (this._resHeaders.etag && response.headers.etag) {
|
|
// "If the new response contains a weak validator and that validator corresponds
|
|
// to one of the cache's stored responses,
|
|
// then the most recent of those matching stored responses is selected for update."
|
|
matches =
|
|
this._resHeaders.etag.replace(/^\s*W\//, '') ===
|
|
response.headers.etag.replace(/^\s*W\//, '');
|
|
} else if (this._resHeaders['last-modified']) {
|
|
matches =
|
|
this._resHeaders['last-modified'] ===
|
|
response.headers['last-modified'];
|
|
} else {
|
|
// If the new response does not include any form of validator (such as in the case where
|
|
// a client generates an If-Modified-Since request from a source other than the Last-Modified
|
|
// response header field), and there is only one stored response, and that stored response also
|
|
// lacks a validator, then that stored response is selected for update.
|
|
if (
|
|
!this._resHeaders.etag &&
|
|
!this._resHeaders['last-modified'] &&
|
|
!response.headers.etag &&
|
|
!response.headers['last-modified']
|
|
) {
|
|
matches = true;
|
|
}
|
|
}
|
|
|
|
if (!matches) {
|
|
return {
|
|
policy: new this.constructor(request, response),
|
|
// Client receiving 304 without body, even if it's invalid/mismatched has no option
|
|
// but to reuse a cached body. We don't have a good way to tell clients to do
|
|
// error recovery in such case.
|
|
modified: response.status != 304,
|
|
matches: false,
|
|
};
|
|
}
|
|
|
|
// use other header fields provided in the 304 (Not Modified) response to replace all instances
|
|
// of the corresponding header fields in the stored response.
|
|
const headers = {};
|
|
for (const k in this._resHeaders) {
|
|
headers[k] =
|
|
k in response.headers && !excludedFromRevalidationUpdate[k]
|
|
? response.headers[k]
|
|
: this._resHeaders[k];
|
|
}
|
|
|
|
const newResponse = Object.assign({}, response, {
|
|
status: this._status,
|
|
method: this._method,
|
|
headers,
|
|
});
|
|
return {
|
|
policy: new this.constructor(request, newResponse, {
|
|
shared: this._isShared,
|
|
cacheHeuristic: this._cacheHeuristic,
|
|
immutableMinTimeToLive: this._immutableMinTtl,
|
|
trustServerDate: this._trustServerDate,
|
|
}),
|
|
modified: false,
|
|
matches: true,
|
|
};
|
|
}
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 173:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const url = __webpack_require__(835);
|
|
const prependHttp = __webpack_require__(128);
|
|
|
|
module.exports = (input, options) => {
|
|
if (typeof input !== 'string') {
|
|
throw new TypeError(`Expected \`url\` to be of type \`string\`, got \`${typeof input}\` instead.`);
|
|
}
|
|
|
|
const finalUrl = prependHttp(input, Object.assign({https: true}, options));
|
|
return url.parse(finalUrl);
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 205:
|
|
/***/ (function(__unusedmodule, exports) {
|
|
|
|
//TODO: handle reviver/dehydrate function like normal
|
|
//and handle indentation, like normal.
|
|
//if anyone needs this... please send pull request.
|
|
|
|
exports.stringify = function stringify (o) {
|
|
if('undefined' == typeof o) return o
|
|
|
|
if(o && Buffer.isBuffer(o))
|
|
return JSON.stringify(':base64:' + o.toString('base64'))
|
|
|
|
if(o && o.toJSON)
|
|
o = o.toJSON()
|
|
|
|
if(o && 'object' === typeof o) {
|
|
var s = ''
|
|
var array = Array.isArray(o)
|
|
s = array ? '[' : '{'
|
|
var first = true
|
|
|
|
for(var k in o) {
|
|
var ignore = 'function' == typeof o[k] || (!array && 'undefined' === typeof o[k])
|
|
if(Object.hasOwnProperty.call(o, k) && !ignore) {
|
|
if(!first)
|
|
s += ','
|
|
first = false
|
|
if (array) {
|
|
if(o[k] == undefined)
|
|
s += 'null'
|
|
else
|
|
s += stringify(o[k])
|
|
} else if (o[k] !== void(0)) {
|
|
s += stringify(k) + ':' + stringify(o[k])
|
|
}
|
|
}
|
|
}
|
|
|
|
s += array ? ']' : '}'
|
|
|
|
return s
|
|
} else if ('string' === typeof o) {
|
|
return JSON.stringify(/^:/.test(o) ? ':' + o : o)
|
|
} else if ('undefined' === typeof o) {
|
|
return 'null';
|
|
} else
|
|
return JSON.stringify(o)
|
|
}
|
|
|
|
exports.parse = function (s) {
|
|
return JSON.parse(s, function (key, value) {
|
|
if('string' === typeof value) {
|
|
if(/^:base64:/.test(value))
|
|
return new Buffer(value.substring(8), 'base64')
|
|
else
|
|
return /^:/.test(value) ? value.substring(1) : value
|
|
}
|
|
return value
|
|
})
|
|
}
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 211:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("https");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 262:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const is = __webpack_require__(534);
|
|
|
|
module.exports = function deepFreeze(object) {
|
|
for (const [key, value] of Object.entries(object)) {
|
|
if (is.plainObject(value) || is.array(value)) {
|
|
deepFreeze(object[key]);
|
|
}
|
|
}
|
|
|
|
return Object.freeze(object);
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 303:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
const EventEmitter = __webpack_require__(614);
|
|
const JSONB = __webpack_require__(205);
|
|
|
|
const loadStore = opts => {
|
|
const adapters = {
|
|
redis: '@keyv/redis',
|
|
mongodb: '@keyv/mongo',
|
|
mongo: '@keyv/mongo',
|
|
sqlite: '@keyv/sqlite',
|
|
postgresql: '@keyv/postgres',
|
|
postgres: '@keyv/postgres',
|
|
mysql: '@keyv/mysql'
|
|
};
|
|
if (opts.adapter || opts.uri) {
|
|
const adapter = opts.adapter || /^[^:]*/.exec(opts.uri)[0];
|
|
return new (require(adapters[adapter]))(opts);
|
|
}
|
|
return new Map();
|
|
};
|
|
|
|
class Keyv extends EventEmitter {
|
|
constructor(uri, opts) {
|
|
super();
|
|
this.opts = Object.assign(
|
|
{
|
|
namespace: 'keyv',
|
|
serialize: JSONB.stringify,
|
|
deserialize: JSONB.parse
|
|
},
|
|
(typeof uri === 'string') ? { uri } : uri,
|
|
opts
|
|
);
|
|
|
|
if (!this.opts.store) {
|
|
const adapterOpts = Object.assign({}, this.opts);
|
|
this.opts.store = loadStore(adapterOpts);
|
|
}
|
|
|
|
if (typeof this.opts.store.on === 'function') {
|
|
this.opts.store.on('error', err => this.emit('error', err));
|
|
}
|
|
|
|
this.opts.store.namespace = this.opts.namespace;
|
|
}
|
|
|
|
_getKeyPrefix(key) {
|
|
return `${this.opts.namespace}:${key}`;
|
|
}
|
|
|
|
get(key) {
|
|
key = this._getKeyPrefix(key);
|
|
const store = this.opts.store;
|
|
return Promise.resolve()
|
|
.then(() => store.get(key))
|
|
.then(data => {
|
|
data = (typeof data === 'string') ? this.opts.deserialize(data) : data;
|
|
if (data === undefined) {
|
|
return undefined;
|
|
}
|
|
if (typeof data.expires === 'number' && Date.now() > data.expires) {
|
|
this.delete(key);
|
|
return undefined;
|
|
}
|
|
return data.value;
|
|
});
|
|
}
|
|
|
|
set(key, value, ttl) {
|
|
key = this._getKeyPrefix(key);
|
|
if (typeof ttl === 'undefined') {
|
|
ttl = this.opts.ttl;
|
|
}
|
|
if (ttl === 0) {
|
|
ttl = undefined;
|
|
}
|
|
const store = this.opts.store;
|
|
|
|
return Promise.resolve()
|
|
.then(() => {
|
|
const expires = (typeof ttl === 'number') ? (Date.now() + ttl) : null;
|
|
value = { value, expires };
|
|
return store.set(key, this.opts.serialize(value), ttl);
|
|
})
|
|
.then(() => true);
|
|
}
|
|
|
|
delete(key) {
|
|
key = this._getKeyPrefix(key);
|
|
const store = this.opts.store;
|
|
return Promise.resolve()
|
|
.then(() => store.delete(key));
|
|
}
|
|
|
|
clear() {
|
|
const store = this.opts.store;
|
|
return Promise.resolve()
|
|
.then(() => store.clear());
|
|
}
|
|
}
|
|
|
|
module.exports = Keyv;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 308:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
|
|
module.exports = (socket, callback) => {
|
|
if (socket.writable && !socket.connecting) {
|
|
callback();
|
|
} else {
|
|
socket.once('connect', callback);
|
|
}
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 325:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
const PassThrough = __webpack_require__(413).PassThrough;
|
|
const mimicResponse = __webpack_require__(89);
|
|
|
|
const cloneResponse = response => {
|
|
if (!(response && response.pipe)) {
|
|
throw new TypeError('Parameter `response` must be a response stream.');
|
|
}
|
|
|
|
const clone = new PassThrough();
|
|
mimicResponse(response, clone);
|
|
|
|
return response.pipe(clone);
|
|
};
|
|
|
|
module.exports = cloneResponse;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 338:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const errors = __webpack_require__(774);
|
|
const asStream = __webpack_require__(794);
|
|
const asPromise = __webpack_require__(916);
|
|
const normalizeArguments = __webpack_require__(86);
|
|
const merge = __webpack_require__(821);
|
|
const deepFreeze = __webpack_require__(262);
|
|
|
|
const getPromiseOrStream = options => options.stream ? asStream(options) : asPromise(options);
|
|
|
|
const aliases = [
|
|
'get',
|
|
'post',
|
|
'put',
|
|
'patch',
|
|
'head',
|
|
'delete'
|
|
];
|
|
|
|
const create = defaults => {
|
|
defaults = merge({}, defaults);
|
|
normalizeArguments.preNormalize(defaults.options);
|
|
|
|
if (!defaults.handler) {
|
|
// This can't be getPromiseOrStream, because when merging
|
|
// the chain would stop at this point and no further handlers would be called.
|
|
defaults.handler = (options, next) => next(options);
|
|
}
|
|
|
|
function got(url, options) {
|
|
try {
|
|
return defaults.handler(normalizeArguments(url, options, defaults), getPromiseOrStream);
|
|
} catch (error) {
|
|
if (options && options.stream) {
|
|
throw error;
|
|
} else {
|
|
return Promise.reject(error);
|
|
}
|
|
}
|
|
}
|
|
|
|
got.create = create;
|
|
got.extend = options => {
|
|
let mutableDefaults;
|
|
if (options && Reflect.has(options, 'mutableDefaults')) {
|
|
mutableDefaults = options.mutableDefaults;
|
|
delete options.mutableDefaults;
|
|
} else {
|
|
mutableDefaults = defaults.mutableDefaults;
|
|
}
|
|
|
|
return create({
|
|
options: merge.options(defaults.options, options),
|
|
handler: defaults.handler,
|
|
mutableDefaults
|
|
});
|
|
};
|
|
|
|
got.mergeInstances = (...args) => create(merge.instances(args));
|
|
|
|
got.stream = (url, options) => got(url, {...options, stream: true});
|
|
|
|
for (const method of aliases) {
|
|
got[method] = (url, options) => got(url, {...options, method});
|
|
got.stream[method] = (url, options) => got.stream(url, {...options, method});
|
|
}
|
|
|
|
Object.assign(got, {...errors, mergeOptions: merge.options});
|
|
Object.defineProperty(got, 'defaults', {
|
|
value: defaults.mutableDefaults ? defaults : deepFreeze(defaults),
|
|
writable: defaults.mutableDefaults,
|
|
configurable: defaults.mutableDefaults,
|
|
enumerable: true
|
|
});
|
|
|
|
return got;
|
|
};
|
|
|
|
module.exports = create;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 365:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const {Transform} = __webpack_require__(413);
|
|
|
|
module.exports = {
|
|
download(response, emitter, downloadBodySize) {
|
|
let downloaded = 0;
|
|
|
|
return new Transform({
|
|
transform(chunk, encoding, callback) {
|
|
downloaded += chunk.length;
|
|
|
|
const percent = downloadBodySize ? downloaded / downloadBodySize : 0;
|
|
|
|
// Let `flush()` be responsible for emitting the last event
|
|
if (percent < 1) {
|
|
emitter.emit('downloadProgress', {
|
|
percent,
|
|
transferred: downloaded,
|
|
total: downloadBodySize
|
|
});
|
|
}
|
|
|
|
callback(null, chunk);
|
|
},
|
|
|
|
flush(callback) {
|
|
emitter.emit('downloadProgress', {
|
|
percent: 1,
|
|
transferred: downloaded,
|
|
total: downloadBodySize
|
|
});
|
|
|
|
callback();
|
|
}
|
|
});
|
|
},
|
|
|
|
upload(request, emitter, uploadBodySize) {
|
|
const uploadEventFrequency = 150;
|
|
let uploaded = 0;
|
|
let progressInterval;
|
|
|
|
emitter.emit('uploadProgress', {
|
|
percent: 0,
|
|
transferred: 0,
|
|
total: uploadBodySize
|
|
});
|
|
|
|
request.once('error', () => {
|
|
clearInterval(progressInterval);
|
|
});
|
|
|
|
request.once('response', () => {
|
|
clearInterval(progressInterval);
|
|
|
|
emitter.emit('uploadProgress', {
|
|
percent: 1,
|
|
transferred: uploaded,
|
|
total: uploadBodySize
|
|
});
|
|
});
|
|
|
|
request.once('socket', socket => {
|
|
const onSocketConnect = () => {
|
|
progressInterval = setInterval(() => {
|
|
const lastUploaded = uploaded;
|
|
/* istanbul ignore next: see #490 (occurs randomly!) */
|
|
const headersSize = request._header ? Buffer.byteLength(request._header) : 0;
|
|
uploaded = socket.bytesWritten - headersSize;
|
|
|
|
// Don't emit events with unchanged progress and
|
|
// prevent last event from being emitted, because
|
|
// it's emitted when `response` is emitted
|
|
if (uploaded === lastUploaded || uploaded === uploadBodySize) {
|
|
return;
|
|
}
|
|
|
|
emitter.emit('uploadProgress', {
|
|
percent: uploadBodySize ? uploaded / uploadBodySize : 0,
|
|
transferred: uploaded,
|
|
total: uploadBodySize
|
|
});
|
|
}, uploadEventFrequency);
|
|
};
|
|
|
|
/* istanbul ignore next: hard to test */
|
|
if (socket.connecting) {
|
|
socket.once('connect', onSocketConnect);
|
|
} else if (socket.writable) {
|
|
// The socket is being reused from pool,
|
|
// so the connect event will not be emitted
|
|
onSocketConnect();
|
|
}
|
|
});
|
|
}
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 375:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const {PassThrough: PassThroughStream} = __webpack_require__(413);
|
|
|
|
module.exports = options => {
|
|
options = {...options};
|
|
|
|
const {array} = options;
|
|
let {encoding} = options;
|
|
const isBuffer = encoding === 'buffer';
|
|
let objectMode = false;
|
|
|
|
if (array) {
|
|
objectMode = !(encoding || isBuffer);
|
|
} else {
|
|
encoding = encoding || 'utf8';
|
|
}
|
|
|
|
if (isBuffer) {
|
|
encoding = null;
|
|
}
|
|
|
|
const stream = new PassThroughStream({objectMode});
|
|
|
|
if (encoding) {
|
|
stream.setEncoding(encoding);
|
|
}
|
|
|
|
let length = 0;
|
|
const chunks = [];
|
|
|
|
stream.on('data', chunk => {
|
|
chunks.push(chunk);
|
|
|
|
if (objectMode) {
|
|
length = chunks.length;
|
|
} else {
|
|
length += chunk.length;
|
|
}
|
|
});
|
|
|
|
stream.getBufferedValue = () => {
|
|
if (array) {
|
|
return chunks;
|
|
}
|
|
|
|
return isBuffer ? Buffer.concat(chunks, length) : chunks.join('');
|
|
};
|
|
|
|
stream.getBufferedLength = () => length;
|
|
|
|
return stream;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 413:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("stream");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 431:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const os = __webpack_require__(87);
|
|
/**
|
|
* Commands
|
|
*
|
|
* Command Format:
|
|
* ##[name key=value;key=value]message
|
|
*
|
|
* Examples:
|
|
* ##[warning]This is the user warning message
|
|
* ##[set-secret name=mypassword]definitelyNotAPassword!
|
|
*/
|
|
function issueCommand(command, properties, message) {
|
|
const cmd = new Command(command, properties, message);
|
|
process.stdout.write(cmd.toString() + os.EOL);
|
|
}
|
|
exports.issueCommand = issueCommand;
|
|
function issue(name, message = '') {
|
|
issueCommand(name, {}, message);
|
|
}
|
|
exports.issue = issue;
|
|
const CMD_STRING = '::';
|
|
class Command {
|
|
constructor(command, properties, message) {
|
|
if (!command) {
|
|
command = 'missing.command';
|
|
}
|
|
this.command = command;
|
|
this.properties = properties;
|
|
this.message = message;
|
|
}
|
|
toString() {
|
|
let cmdStr = CMD_STRING + this.command;
|
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
|
cmdStr += ' ';
|
|
for (const key in this.properties) {
|
|
if (this.properties.hasOwnProperty(key)) {
|
|
const val = this.properties[key];
|
|
if (val) {
|
|
// safely append the val - avoid blowing up when attempting to
|
|
// call .replace() if message is not a string for some reason
|
|
cmdStr += `${key}=${escape(`${val || ''}`)},`;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
cmdStr += CMD_STRING;
|
|
// safely append the message - avoid blowing up when attempting to
|
|
// call .replace() if message is not a string for some reason
|
|
const message = `${this.message || ''}`;
|
|
cmdStr += escapeData(message);
|
|
return cmdStr;
|
|
}
|
|
}
|
|
function escapeData(s) {
|
|
return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A');
|
|
}
|
|
function escape(s) {
|
|
return s
|
|
.replace(/\r/g, '%0D')
|
|
.replace(/\n/g, '%0A')
|
|
.replace(/]/g, '%5D')
|
|
.replace(/;/g, '%3B');
|
|
}
|
|
//# sourceMappingURL=command.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 433:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
|
|
module.exports = [
|
|
'beforeError',
|
|
'init',
|
|
'beforeRequest',
|
|
'beforeRedirect',
|
|
'beforeRetry',
|
|
'afterResponse'
|
|
];
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 453:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
var once = __webpack_require__(49)
|
|
var eos = __webpack_require__(9)
|
|
var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes
|
|
|
|
var noop = function () {}
|
|
var ancient = /^v?\.0/.test(process.version)
|
|
|
|
var isFn = function (fn) {
|
|
return typeof fn === 'function'
|
|
}
|
|
|
|
var isFS = function (stream) {
|
|
if (!ancient) return false // newer node version do not need to care about fs is a special way
|
|
if (!fs) return false // browser
|
|
return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
|
|
}
|
|
|
|
var isRequest = function (stream) {
|
|
return stream.setHeader && isFn(stream.abort)
|
|
}
|
|
|
|
var destroyer = function (stream, reading, writing, callback) {
|
|
callback = once(callback)
|
|
|
|
var closed = false
|
|
stream.on('close', function () {
|
|
closed = true
|
|
})
|
|
|
|
eos(stream, {readable: reading, writable: writing}, function (err) {
|
|
if (err) return callback(err)
|
|
closed = true
|
|
callback()
|
|
})
|
|
|
|
var destroyed = false
|
|
return function (err) {
|
|
if (closed) return
|
|
if (destroyed) return
|
|
destroyed = true
|
|
|
|
if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks
|
|
if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
|
|
|
|
if (isFn(stream.destroy)) return stream.destroy()
|
|
|
|
callback(err || new Error('stream was destroyed'))
|
|
}
|
|
}
|
|
|
|
var call = function (fn) {
|
|
fn()
|
|
}
|
|
|
|
var pipe = function (from, to) {
|
|
return from.pipe(to)
|
|
}
|
|
|
|
var pump = function () {
|
|
var streams = Array.prototype.slice.call(arguments)
|
|
var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
|
|
|
|
if (Array.isArray(streams[0])) streams = streams[0]
|
|
if (streams.length < 2) throw new Error('pump requires two streams per minimum')
|
|
|
|
var error
|
|
var destroys = streams.map(function (stream, i) {
|
|
var reading = i < streams.length - 1
|
|
var writing = i > 0
|
|
return destroyer(stream, reading, writing, function (err) {
|
|
if (!error) error = err
|
|
if (err) destroys.forEach(call)
|
|
if (reading) return
|
|
destroys.forEach(call)
|
|
callback(error)
|
|
})
|
|
})
|
|
|
|
return streams.reduce(pipe)
|
|
}
|
|
|
|
module.exports = pump
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 456:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const deferToConnect = __webpack_require__(308);
|
|
|
|
module.exports = request => {
|
|
const timings = {
|
|
start: Date.now(),
|
|
socket: null,
|
|
lookup: null,
|
|
connect: null,
|
|
upload: null,
|
|
response: null,
|
|
end: null,
|
|
error: null,
|
|
phases: {
|
|
wait: null,
|
|
dns: null,
|
|
tcp: null,
|
|
request: null,
|
|
firstByte: null,
|
|
download: null,
|
|
total: null
|
|
}
|
|
};
|
|
|
|
const handleError = origin => {
|
|
const emit = origin.emit.bind(origin);
|
|
origin.emit = (event, ...args) => {
|
|
// Catches the `error` event
|
|
if (event === 'error') {
|
|
timings.error = Date.now();
|
|
timings.phases.total = timings.error - timings.start;
|
|
|
|
origin.emit = emit;
|
|
}
|
|
|
|
// Saves the original behavior
|
|
return emit(event, ...args);
|
|
};
|
|
};
|
|
|
|
let uploadFinished = false;
|
|
const onUpload = () => {
|
|
timings.upload = Date.now();
|
|
timings.phases.request = timings.upload - timings.connect;
|
|
};
|
|
|
|
handleError(request);
|
|
|
|
request.once('socket', socket => {
|
|
timings.socket = Date.now();
|
|
timings.phases.wait = timings.socket - timings.start;
|
|
|
|
const lookupListener = () => {
|
|
timings.lookup = Date.now();
|
|
timings.phases.dns = timings.lookup - timings.socket;
|
|
};
|
|
|
|
socket.once('lookup', lookupListener);
|
|
|
|
deferToConnect(socket, () => {
|
|
timings.connect = Date.now();
|
|
|
|
if (timings.lookup === null) {
|
|
socket.removeListener('lookup', lookupListener);
|
|
timings.lookup = timings.connect;
|
|
timings.phases.dns = timings.lookup - timings.socket;
|
|
}
|
|
|
|
timings.phases.tcp = timings.connect - timings.lookup;
|
|
|
|
if (uploadFinished && !timings.upload) {
|
|
onUpload();
|
|
}
|
|
});
|
|
});
|
|
|
|
request.once('finish', () => {
|
|
uploadFinished = true;
|
|
|
|
if (timings.connect) {
|
|
onUpload();
|
|
}
|
|
});
|
|
|
|
request.once('response', response => {
|
|
timings.response = Date.now();
|
|
timings.phases.firstByte = timings.response - timings.upload;
|
|
|
|
handleError(response);
|
|
|
|
response.once('end', () => {
|
|
timings.end = Date.now();
|
|
timings.phases.download = timings.end - timings.response;
|
|
timings.phases.total = timings.end - timings.start;
|
|
});
|
|
});
|
|
|
|
return timings;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 470:
|
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
});
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const command_1 = __webpack_require__(431);
|
|
const os = __webpack_require__(87);
|
|
const path = __webpack_require__(622);
|
|
/**
|
|
* The code to exit an action
|
|
*/
|
|
var ExitCode;
|
|
(function (ExitCode) {
|
|
/**
|
|
* A code indicating that the action was successful
|
|
*/
|
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
|
/**
|
|
* A code indicating that the action was a failure
|
|
*/
|
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
|
//-----------------------------------------------------------------------
|
|
// Variables
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* sets env variable for this action and future actions in the job
|
|
* @param name the name of the variable to set
|
|
* @param val the value of the variable
|
|
*/
|
|
function exportVariable(name, val) {
|
|
process.env[name] = val;
|
|
command_1.issueCommand('set-env', { name }, val);
|
|
}
|
|
exports.exportVariable = exportVariable;
|
|
/**
|
|
* exports the variable and registers a secret which will get masked from logs
|
|
* @param name the name of the variable to set
|
|
* @param val value of the secret
|
|
*/
|
|
function exportSecret(name, val) {
|
|
exportVariable(name, val);
|
|
// the runner will error with not implemented
|
|
// leaving the function but raising the error earlier
|
|
command_1.issueCommand('set-secret', {}, val);
|
|
throw new Error('Not implemented.');
|
|
}
|
|
exports.exportSecret = exportSecret;
|
|
/**
|
|
* Prepends inputPath to the PATH (for this action and future actions)
|
|
* @param inputPath
|
|
*/
|
|
function addPath(inputPath) {
|
|
command_1.issueCommand('add-path', {}, inputPath);
|
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
|
}
|
|
exports.addPath = addPath;
|
|
/**
|
|
* Gets the value of an input. The value is also trimmed.
|
|
*
|
|
* @param name name of the input to get
|
|
* @param options optional. See InputOptions.
|
|
* @returns string
|
|
*/
|
|
function getInput(name, options) {
|
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
|
if (options && options.required && !val) {
|
|
throw new Error(`Input required and not supplied: ${name}`);
|
|
}
|
|
return val.trim();
|
|
}
|
|
exports.getInput = getInput;
|
|
/**
|
|
* Sets the value of an output.
|
|
*
|
|
* @param name name of the output to set
|
|
* @param value value to store
|
|
*/
|
|
function setOutput(name, value) {
|
|
command_1.issueCommand('set-output', { name }, value);
|
|
}
|
|
exports.setOutput = setOutput;
|
|
//-----------------------------------------------------------------------
|
|
// Results
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Sets the action status to failed.
|
|
* When the action exits it will be with an exit code of 1
|
|
* @param message add error issue message
|
|
*/
|
|
function setFailed(message) {
|
|
process.exitCode = ExitCode.Failure;
|
|
error(message);
|
|
}
|
|
exports.setFailed = setFailed;
|
|
//-----------------------------------------------------------------------
|
|
// Logging Commands
|
|
//-----------------------------------------------------------------------
|
|
/**
|
|
* Writes debug message to user log
|
|
* @param message debug message
|
|
*/
|
|
function debug(message) {
|
|
command_1.issueCommand('debug', {}, message);
|
|
}
|
|
exports.debug = debug;
|
|
/**
|
|
* Adds an error issue
|
|
* @param message error issue message
|
|
*/
|
|
function error(message) {
|
|
command_1.issue('error', message);
|
|
}
|
|
exports.error = error;
|
|
/**
|
|
* Adds an warning issue
|
|
* @param message warning issue message
|
|
*/
|
|
function warning(message) {
|
|
command_1.issue('warning', message);
|
|
}
|
|
exports.warning = warning;
|
|
/**
|
|
* Writes info to log with console.log.
|
|
* @param message info message
|
|
*/
|
|
function info(message) {
|
|
process.stdout.write(message + os.EOL);
|
|
}
|
|
exports.info = info;
|
|
/**
|
|
* Begin an output group.
|
|
*
|
|
* Output until the next `groupEnd` will be foldable in this group
|
|
*
|
|
* @param name The name of the output group
|
|
*/
|
|
function startGroup(name) {
|
|
command_1.issue('group', name);
|
|
}
|
|
exports.startGroup = startGroup;
|
|
/**
|
|
* End an output group.
|
|
*/
|
|
function endGroup() {
|
|
command_1.issue('endgroup');
|
|
}
|
|
exports.endGroup = endGroup;
|
|
/**
|
|
* Wrap an asynchronous function call in a group.
|
|
*
|
|
* Returns the same type as the function itself.
|
|
*
|
|
* @param name The name of the group
|
|
* @param fn The function to wrap in the group
|
|
*/
|
|
function group(name, fn) {
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
startGroup(name);
|
|
let result;
|
|
try {
|
|
result = yield fn();
|
|
}
|
|
finally {
|
|
endGroup();
|
|
}
|
|
return result;
|
|
});
|
|
}
|
|
exports.group = group;
|
|
//# sourceMappingURL=core.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 474:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
module.exports = function (obj) {
|
|
var ret = {};
|
|
var keys = Object.keys(Object(obj));
|
|
|
|
for (var i = 0; i < keys.length; i++) {
|
|
ret[keys[i].toLowerCase()] = obj[keys[i]];
|
|
}
|
|
|
|
return ret;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 482:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = {"_from":"got","_id":"got@9.6.0","_inBundle":false,"_integrity":"sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==","_location":"/got","_phantomChildren":{},"_requested":{"type":"tag","registry":true,"raw":"got","name":"got","escapedName":"got","rawSpec":"","saveSpec":null,"fetchSpec":"latest"},"_requiredBy":["#USER","/"],"_resolved":"https://registry.npmjs.org/got/-/got-9.6.0.tgz","_shasum":"edf45e7d67f99545705de1f7bbeeeb121765ed85","_spec":"got","_where":"C:\\src\\richicoder1\\vault-action","ava":{"concurrency":4},"browser":{"decompress-response":false,"electron":false},"bugs":{"url":"https://github.com/sindresorhus/got/issues"},"bundleDependencies":false,"dependencies":{"@sindresorhus/is":"^0.14.0","@szmarczak/http-timer":"^1.1.2","cacheable-request":"^6.0.0","decompress-response":"^3.3.0","duplexer3":"^0.1.4","get-stream":"^4.1.0","lowercase-keys":"^1.0.1","mimic-response":"^1.0.1","p-cancelable":"^1.0.0","to-readable-stream":"^1.0.0","url-parse-lax":"^3.0.0"},"deprecated":false,"description":"Simplified HTTP requests","devDependencies":{"ava":"^1.1.0","coveralls":"^3.0.0","delay":"^4.1.0","form-data":"^2.3.3","get-port":"^4.0.0","np":"^3.1.0","nyc":"^13.1.0","p-event":"^2.1.0","pem":"^1.13.2","proxyquire":"^2.0.1","sinon":"^7.2.2","slow-stream":"0.0.4","tempfile":"^2.0.0","tempy":"^0.2.1","tough-cookie":"^3.0.0","xo":"^0.24.0"},"engines":{"node":">=8.6"},"files":["source"],"homepage":"https://github.com/sindresorhus/got#readme","keywords":["http","https","get","got","url","uri","request","util","utility","simple","curl","wget","fetch","net","network","electron"],"license":"MIT","main":"source","name":"got","repository":{"type":"git","url":"git+https://github.com/sindresorhus/got.git"},"scripts":{"release":"np","test":"xo && nyc ava"},"version":"9.6.0"};
|
|
|
|
/***/ }),
|
|
|
|
/***/ 504:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const is = __webpack_require__(534);
|
|
|
|
module.exports = body => is.nodeStream(body) && is.function(body.getBoundary);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 534:
|
|
/***/ (function(module, exports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
/// <reference lib="es2016"/>
|
|
/// <reference lib="es2017.sharedmemory"/>
|
|
/// <reference lib="esnext.asynciterable"/>
|
|
/// <reference lib="dom"/>
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
// TODO: Use the `URL` global when targeting Node.js 10
|
|
// tslint:disable-next-line
|
|
const URLGlobal = typeof URL === 'undefined' ? __webpack_require__(835).URL : URL;
|
|
const toString = Object.prototype.toString;
|
|
const isOfType = (type) => (value) => typeof value === type;
|
|
const isBuffer = (input) => !is.nullOrUndefined(input) && !is.nullOrUndefined(input.constructor) && is.function_(input.constructor.isBuffer) && input.constructor.isBuffer(input);
|
|
const getObjectType = (value) => {
|
|
const objectName = toString.call(value).slice(8, -1);
|
|
if (objectName) {
|
|
return objectName;
|
|
}
|
|
return null;
|
|
};
|
|
const isObjectOfType = (type) => (value) => getObjectType(value) === type;
|
|
function is(value) {
|
|
switch (value) {
|
|
case null:
|
|
return "null" /* null */;
|
|
case true:
|
|
case false:
|
|
return "boolean" /* boolean */;
|
|
default:
|
|
}
|
|
switch (typeof value) {
|
|
case 'undefined':
|
|
return "undefined" /* undefined */;
|
|
case 'string':
|
|
return "string" /* string */;
|
|
case 'number':
|
|
return "number" /* number */;
|
|
case 'symbol':
|
|
return "symbol" /* symbol */;
|
|
default:
|
|
}
|
|
if (is.function_(value)) {
|
|
return "Function" /* Function */;
|
|
}
|
|
if (is.observable(value)) {
|
|
return "Observable" /* Observable */;
|
|
}
|
|
if (Array.isArray(value)) {
|
|
return "Array" /* Array */;
|
|
}
|
|
if (isBuffer(value)) {
|
|
return "Buffer" /* Buffer */;
|
|
}
|
|
const tagType = getObjectType(value);
|
|
if (tagType) {
|
|
return tagType;
|
|
}
|
|
if (value instanceof String || value instanceof Boolean || value instanceof Number) {
|
|
throw new TypeError('Please don\'t use object wrappers for primitive types');
|
|
}
|
|
return "Object" /* Object */;
|
|
}
|
|
(function (is) {
|
|
// tslint:disable-next-line:strict-type-predicates
|
|
const isObject = (value) => typeof value === 'object';
|
|
// tslint:disable:variable-name
|
|
is.undefined = isOfType('undefined');
|
|
is.string = isOfType('string');
|
|
is.number = isOfType('number');
|
|
is.function_ = isOfType('function');
|
|
// tslint:disable-next-line:strict-type-predicates
|
|
is.null_ = (value) => value === null;
|
|
is.class_ = (value) => is.function_(value) && value.toString().startsWith('class ');
|
|
is.boolean = (value) => value === true || value === false;
|
|
is.symbol = isOfType('symbol');
|
|
// tslint:enable:variable-name
|
|
is.numericString = (value) => is.string(value) && value.length > 0 && !Number.isNaN(Number(value));
|
|
is.array = Array.isArray;
|
|
is.buffer = isBuffer;
|
|
is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value);
|
|
is.object = (value) => !is.nullOrUndefined(value) && (is.function_(value) || isObject(value));
|
|
is.iterable = (value) => !is.nullOrUndefined(value) && is.function_(value[Symbol.iterator]);
|
|
is.asyncIterable = (value) => !is.nullOrUndefined(value) && is.function_(value[Symbol.asyncIterator]);
|
|
is.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw);
|
|
is.nativePromise = (value) => isObjectOfType("Promise" /* Promise */)(value);
|
|
const hasPromiseAPI = (value) => !is.null_(value) &&
|
|
isObject(value) &&
|
|
is.function_(value.then) &&
|
|
is.function_(value.catch);
|
|
is.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value);
|
|
is.generatorFunction = isObjectOfType("GeneratorFunction" /* GeneratorFunction */);
|
|
is.asyncFunction = isObjectOfType("AsyncFunction" /* AsyncFunction */);
|
|
is.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype');
|
|
is.regExp = isObjectOfType("RegExp" /* RegExp */);
|
|
is.date = isObjectOfType("Date" /* Date */);
|
|
is.error = isObjectOfType("Error" /* Error */);
|
|
is.map = (value) => isObjectOfType("Map" /* Map */)(value);
|
|
is.set = (value) => isObjectOfType("Set" /* Set */)(value);
|
|
is.weakMap = (value) => isObjectOfType("WeakMap" /* WeakMap */)(value);
|
|
is.weakSet = (value) => isObjectOfType("WeakSet" /* WeakSet */)(value);
|
|
is.int8Array = isObjectOfType("Int8Array" /* Int8Array */);
|
|
is.uint8Array = isObjectOfType("Uint8Array" /* Uint8Array */);
|
|
is.uint8ClampedArray = isObjectOfType("Uint8ClampedArray" /* Uint8ClampedArray */);
|
|
is.int16Array = isObjectOfType("Int16Array" /* Int16Array */);
|
|
is.uint16Array = isObjectOfType("Uint16Array" /* Uint16Array */);
|
|
is.int32Array = isObjectOfType("Int32Array" /* Int32Array */);
|
|
is.uint32Array = isObjectOfType("Uint32Array" /* Uint32Array */);
|
|
is.float32Array = isObjectOfType("Float32Array" /* Float32Array */);
|
|
is.float64Array = isObjectOfType("Float64Array" /* Float64Array */);
|
|
is.arrayBuffer = isObjectOfType("ArrayBuffer" /* ArrayBuffer */);
|
|
is.sharedArrayBuffer = isObjectOfType("SharedArrayBuffer" /* SharedArrayBuffer */);
|
|
is.dataView = isObjectOfType("DataView" /* DataView */);
|
|
is.directInstanceOf = (instance, klass) => Object.getPrototypeOf(instance) === klass.prototype;
|
|
is.urlInstance = (value) => isObjectOfType("URL" /* URL */)(value);
|
|
is.urlString = (value) => {
|
|
if (!is.string(value)) {
|
|
return false;
|
|
}
|
|
try {
|
|
new URLGlobal(value); // tslint:disable-line no-unused-expression
|
|
return true;
|
|
}
|
|
catch (_a) {
|
|
return false;
|
|
}
|
|
};
|
|
is.truthy = (value) => Boolean(value);
|
|
is.falsy = (value) => !value;
|
|
is.nan = (value) => Number.isNaN(value);
|
|
const primitiveTypes = new Set([
|
|
'undefined',
|
|
'string',
|
|
'number',
|
|
'boolean',
|
|
'symbol'
|
|
]);
|
|
is.primitive = (value) => is.null_(value) || primitiveTypes.has(typeof value);
|
|
is.integer = (value) => Number.isInteger(value);
|
|
is.safeInteger = (value) => Number.isSafeInteger(value);
|
|
is.plainObject = (value) => {
|
|
// From: https://github.com/sindresorhus/is-plain-obj/blob/master/index.js
|
|
let prototype;
|
|
return getObjectType(value) === "Object" /* Object */ &&
|
|
(prototype = Object.getPrototypeOf(value), prototype === null || // tslint:disable-line:ban-comma-operator
|
|
prototype === Object.getPrototypeOf({}));
|
|
};
|
|
const typedArrayTypes = new Set([
|
|
"Int8Array" /* Int8Array */,
|
|
"Uint8Array" /* Uint8Array */,
|
|
"Uint8ClampedArray" /* Uint8ClampedArray */,
|
|
"Int16Array" /* Int16Array */,
|
|
"Uint16Array" /* Uint16Array */,
|
|
"Int32Array" /* Int32Array */,
|
|
"Uint32Array" /* Uint32Array */,
|
|
"Float32Array" /* Float32Array */,
|
|
"Float64Array" /* Float64Array */
|
|
]);
|
|
is.typedArray = (value) => {
|
|
const objectType = getObjectType(value);
|
|
if (objectType === null) {
|
|
return false;
|
|
}
|
|
return typedArrayTypes.has(objectType);
|
|
};
|
|
const isValidLength = (value) => is.safeInteger(value) && value > -1;
|
|
is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length);
|
|
is.inRange = (value, range) => {
|
|
if (is.number(range)) {
|
|
return value >= Math.min(0, range) && value <= Math.max(range, 0);
|
|
}
|
|
if (is.array(range) && range.length === 2) {
|
|
return value >= Math.min(...range) && value <= Math.max(...range);
|
|
}
|
|
throw new TypeError(`Invalid range: ${JSON.stringify(range)}`);
|
|
};
|
|
const NODE_TYPE_ELEMENT = 1;
|
|
const DOM_PROPERTIES_TO_CHECK = [
|
|
'innerHTML',
|
|
'ownerDocument',
|
|
'style',
|
|
'attributes',
|
|
'nodeValue'
|
|
];
|
|
is.domElement = (value) => is.object(value) && value.nodeType === NODE_TYPE_ELEMENT && is.string(value.nodeName) &&
|
|
!is.plainObject(value) && DOM_PROPERTIES_TO_CHECK.every(property => property in value);
|
|
is.observable = (value) => {
|
|
if (!value) {
|
|
return false;
|
|
}
|
|
if (value[Symbol.observable] && value === value[Symbol.observable]()) {
|
|
return true;
|
|
}
|
|
if (value['@@observable'] && value === value['@@observable']()) {
|
|
return true;
|
|
}
|
|
return false;
|
|
};
|
|
is.nodeStream = (value) => !is.nullOrUndefined(value) && isObject(value) && is.function_(value.pipe) && !is.observable(value);
|
|
is.infinite = (value) => value === Infinity || value === -Infinity;
|
|
const isAbsoluteMod2 = (rem) => (value) => is.integer(value) && Math.abs(value % 2) === rem;
|
|
is.even = isAbsoluteMod2(0);
|
|
is.odd = isAbsoluteMod2(1);
|
|
const isWhiteSpaceString = (value) => is.string(value) && /\S/.test(value) === false;
|
|
is.emptyArray = (value) => is.array(value) && value.length === 0;
|
|
is.nonEmptyArray = (value) => is.array(value) && value.length > 0;
|
|
is.emptyString = (value) => is.string(value) && value.length === 0;
|
|
is.nonEmptyString = (value) => is.string(value) && value.length > 0;
|
|
is.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value);
|
|
is.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0;
|
|
is.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0;
|
|
is.emptySet = (value) => is.set(value) && value.size === 0;
|
|
is.nonEmptySet = (value) => is.set(value) && value.size > 0;
|
|
is.emptyMap = (value) => is.map(value) && value.size === 0;
|
|
is.nonEmptyMap = (value) => is.map(value) && value.size > 0;
|
|
const predicateOnArray = (method, predicate, values) => {
|
|
if (is.function_(predicate) === false) {
|
|
throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`);
|
|
}
|
|
if (values.length === 0) {
|
|
throw new TypeError('Invalid number of values');
|
|
}
|
|
return method.call(values, predicate);
|
|
};
|
|
// tslint:disable variable-name
|
|
is.any = (predicate, ...values) => predicateOnArray(Array.prototype.some, predicate, values);
|
|
is.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values);
|
|
// tslint:enable variable-name
|
|
})(is || (is = {}));
|
|
// Some few keywords are reserved, but we'll populate them for Node.js users
|
|
// See https://github.com/Microsoft/TypeScript/issues/2536
|
|
Object.defineProperties(is, {
|
|
class: {
|
|
value: is.class_
|
|
},
|
|
function: {
|
|
value: is.function_
|
|
},
|
|
null: {
|
|
value: is.null_
|
|
}
|
|
});
|
|
exports.default = is;
|
|
// For CommonJS default export support
|
|
module.exports = is;
|
|
module.exports.default = is;
|
|
//# sourceMappingURL=index.js.map
|
|
|
|
/***/ }),
|
|
|
|
/***/ 557:
|
|
/***/ (function(module) {
|
|
|
|
"use strict";
|
|
|
|
|
|
class CancelError extends Error {
|
|
constructor(reason) {
|
|
super(reason || 'Promise was canceled');
|
|
this.name = 'CancelError';
|
|
}
|
|
|
|
get isCanceled() {
|
|
return true;
|
|
}
|
|
}
|
|
|
|
class PCancelable {
|
|
static fn(userFn) {
|
|
return (...args) => {
|
|
return new PCancelable((resolve, reject, onCancel) => {
|
|
args.push(onCancel);
|
|
userFn(...args).then(resolve, reject);
|
|
});
|
|
};
|
|
}
|
|
|
|
constructor(executor) {
|
|
this._cancelHandlers = [];
|
|
this._isPending = true;
|
|
this._isCanceled = false;
|
|
this._rejectOnCancel = true;
|
|
|
|
this._promise = new Promise((resolve, reject) => {
|
|
this._reject = reject;
|
|
|
|
const onResolve = value => {
|
|
this._isPending = false;
|
|
resolve(value);
|
|
};
|
|
|
|
const onReject = error => {
|
|
this._isPending = false;
|
|
reject(error);
|
|
};
|
|
|
|
const onCancel = handler => {
|
|
this._cancelHandlers.push(handler);
|
|
};
|
|
|
|
Object.defineProperties(onCancel, {
|
|
shouldReject: {
|
|
get: () => this._rejectOnCancel,
|
|
set: bool => {
|
|
this._rejectOnCancel = bool;
|
|
}
|
|
}
|
|
});
|
|
|
|
return executor(onResolve, onReject, onCancel);
|
|
});
|
|
}
|
|
|
|
then(onFulfilled, onRejected) {
|
|
return this._promise.then(onFulfilled, onRejected);
|
|
}
|
|
|
|
catch(onRejected) {
|
|
return this._promise.catch(onRejected);
|
|
}
|
|
|
|
finally(onFinally) {
|
|
return this._promise.finally(onFinally);
|
|
}
|
|
|
|
cancel(reason) {
|
|
if (!this._isPending || this._isCanceled) {
|
|
return;
|
|
}
|
|
|
|
if (this._cancelHandlers.length > 0) {
|
|
try {
|
|
for (const handler of this._cancelHandlers) {
|
|
handler();
|
|
}
|
|
} catch (error) {
|
|
this._reject(error);
|
|
}
|
|
}
|
|
|
|
this._isCanceled = true;
|
|
if (this._rejectOnCancel) {
|
|
this._reject(new CancelError(reason));
|
|
}
|
|
}
|
|
|
|
get isCanceled() {
|
|
return this._isCanceled;
|
|
}
|
|
}
|
|
|
|
Object.setPrototypeOf(PCancelable.prototype, Promise.prototype);
|
|
|
|
module.exports = PCancelable;
|
|
module.exports.default = PCancelable;
|
|
|
|
module.exports.CancelError = CancelError;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 584:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const {URL} = __webpack_require__(835); // TODO: Use the `URL` global when targeting Node.js 10
|
|
const util = __webpack_require__(669);
|
|
const EventEmitter = __webpack_require__(614);
|
|
const http = __webpack_require__(605);
|
|
const https = __webpack_require__(211);
|
|
const urlLib = __webpack_require__(835);
|
|
const CacheableRequest = __webpack_require__(946);
|
|
const toReadableStream = __webpack_require__(952);
|
|
const is = __webpack_require__(534);
|
|
const timer = __webpack_require__(456);
|
|
const timedOut = __webpack_require__(18);
|
|
const getBodySize = __webpack_require__(57);
|
|
const getResponse = __webpack_require__(633);
|
|
const progress = __webpack_require__(365);
|
|
const {CacheError, UnsupportedProtocolError, MaxRedirectsError, RequestError, TimeoutError} = __webpack_require__(774);
|
|
const urlToOptions = __webpack_require__(811);
|
|
|
|
const getMethodRedirectCodes = new Set([300, 301, 302, 303, 304, 305, 307, 308]);
|
|
const allMethodRedirectCodes = new Set([300, 303, 307, 308]);
|
|
|
|
module.exports = (options, input) => {
|
|
const emitter = new EventEmitter();
|
|
const redirects = [];
|
|
let currentRequest;
|
|
let requestUrl;
|
|
let redirectString;
|
|
let uploadBodySize;
|
|
let retryCount = 0;
|
|
let shouldAbort = false;
|
|
|
|
const setCookie = options.cookieJar ? util.promisify(options.cookieJar.setCookie.bind(options.cookieJar)) : null;
|
|
const getCookieString = options.cookieJar ? util.promisify(options.cookieJar.getCookieString.bind(options.cookieJar)) : null;
|
|
const agents = is.object(options.agent) ? options.agent : null;
|
|
|
|
const emitError = async error => {
|
|
try {
|
|
for (const hook of options.hooks.beforeError) {
|
|
// eslint-disable-next-line no-await-in-loop
|
|
error = await hook(error);
|
|
}
|
|
|
|
emitter.emit('error', error);
|
|
} catch (error2) {
|
|
emitter.emit('error', error2);
|
|
}
|
|
};
|
|
|
|
const get = async options => {
|
|
const currentUrl = redirectString || requestUrl;
|
|
|
|
if (options.protocol !== 'http:' && options.protocol !== 'https:') {
|
|
throw new UnsupportedProtocolError(options);
|
|
}
|
|
|
|
decodeURI(currentUrl);
|
|
|
|
let fn;
|
|
if (is.function(options.request)) {
|
|
fn = {request: options.request};
|
|
} else {
|
|
fn = options.protocol === 'https:' ? https : http;
|
|
}
|
|
|
|
if (agents) {
|
|
const protocolName = options.protocol === 'https:' ? 'https' : 'http';
|
|
options.agent = agents[protocolName] || options.agent;
|
|
}
|
|
|
|
/* istanbul ignore next: electron.net is broken */
|
|
if (options.useElectronNet && process.versions.electron) {
|
|
const r = ({x: require})['yx'.slice(1)]; // Trick webpack
|
|
const electron = r('electron');
|
|
fn = electron.net || electron.remote.net;
|
|
}
|
|
|
|
if (options.cookieJar) {
|
|
const cookieString = await getCookieString(currentUrl, {});
|
|
|
|
if (is.nonEmptyString(cookieString)) {
|
|
options.headers.cookie = cookieString;
|
|
}
|
|
}
|
|
|
|
let timings;
|
|
const handleResponse = async response => {
|
|
try {
|
|
/* istanbul ignore next: fixes https://github.com/electron/electron/blob/cbb460d47628a7a146adf4419ed48550a98b2923/lib/browser/api/net.js#L59-L65 */
|
|
if (options.useElectronNet) {
|
|
response = new Proxy(response, {
|
|
get: (target, name) => {
|
|
if (name === 'trailers' || name === 'rawTrailers') {
|
|
return [];
|
|
}
|
|
|
|
const value = target[name];
|
|
return is.function(value) ? value.bind(target) : value;
|
|
}
|
|
});
|
|
}
|
|
|
|
const {statusCode} = response;
|
|
response.url = currentUrl;
|
|
response.requestUrl = requestUrl;
|
|
response.retryCount = retryCount;
|
|
response.timings = timings;
|
|
response.redirectUrls = redirects;
|
|
response.request = {
|
|
gotOptions: options
|
|
};
|
|
|
|
const rawCookies = response.headers['set-cookie'];
|
|
if (options.cookieJar && rawCookies) {
|
|
await Promise.all(rawCookies.map(rawCookie => setCookie(rawCookie, response.url)));
|
|
}
|
|
|
|
if (options.followRedirect && 'location' in response.headers) {
|
|
if (allMethodRedirectCodes.has(statusCode) || (getMethodRedirectCodes.has(statusCode) && (options.method === 'GET' || options.method === 'HEAD'))) {
|
|
response.resume(); // We're being redirected, we don't care about the response.
|
|
|
|
if (statusCode === 303) {
|
|
// Server responded with "see other", indicating that the resource exists at another location,
|
|
// and the client should request it from that location via GET or HEAD.
|
|
options.method = 'GET';
|
|
}
|
|
|
|
if (redirects.length >= 10) {
|
|
throw new MaxRedirectsError(statusCode, redirects, options);
|
|
}
|
|
|
|
// Handles invalid URLs. See https://github.com/sindresorhus/got/issues/604
|
|
const redirectBuffer = Buffer.from(response.headers.location, 'binary').toString();
|
|
const redirectURL = new URL(redirectBuffer, currentUrl);
|
|
redirectString = redirectURL.toString();
|
|
|
|
redirects.push(redirectString);
|
|
|
|
const redirectOptions = {
|
|
...options,
|
|
...urlToOptions(redirectURL)
|
|
};
|
|
|
|
for (const hook of options.hooks.beforeRedirect) {
|
|
// eslint-disable-next-line no-await-in-loop
|
|
await hook(redirectOptions);
|
|
}
|
|
|
|
emitter.emit('redirect', response, redirectOptions);
|
|
|
|
await get(redirectOptions);
|
|
return;
|
|
}
|
|
}
|
|
|
|
getResponse(response, options, emitter);
|
|
} catch (error) {
|
|
emitError(error);
|
|
}
|
|
};
|
|
|
|
const handleRequest = request => {
|
|
if (shouldAbort) {
|
|
request.once('error', () => {});
|
|
request.abort();
|
|
return;
|
|
}
|
|
|
|
currentRequest = request;
|
|
|
|
request.once('error', error => {
|
|
if (request.aborted) {
|
|
return;
|
|
}
|
|
|
|
if (error instanceof timedOut.TimeoutError) {
|
|
error = new TimeoutError(error, options);
|
|
} else {
|
|
error = new RequestError(error, options);
|
|
}
|
|
|
|
if (emitter.retry(error) === false) {
|
|
emitError(error);
|
|
}
|
|
});
|
|
|
|
timings = timer(request);
|
|
|
|
progress.upload(request, emitter, uploadBodySize);
|
|
|
|
if (options.gotTimeout) {
|
|
timedOut(request, options.gotTimeout, options);
|
|
}
|
|
|
|
emitter.emit('request', request);
|
|
|
|
const uploadComplete = () => {
|
|
request.emit('upload-complete');
|
|
};
|
|
|
|
try {
|
|
if (is.nodeStream(options.body)) {
|
|
options.body.once('end', uploadComplete);
|
|
options.body.pipe(request);
|
|
options.body = undefined;
|
|
} else if (options.body) {
|
|
request.end(options.body, uploadComplete);
|
|
} else if (input && (options.method === 'POST' || options.method === 'PUT' || options.method === 'PATCH')) {
|
|
input.once('end', uploadComplete);
|
|
input.pipe(request);
|
|
} else {
|
|
request.end(uploadComplete);
|
|
}
|
|
} catch (error) {
|
|
emitError(new RequestError(error, options));
|
|
}
|
|
};
|
|
|
|
if (options.cache) {
|
|
const cacheableRequest = new CacheableRequest(fn.request, options.cache);
|
|
const cacheRequest = cacheableRequest(options, handleResponse);
|
|
|
|
cacheRequest.once('error', error => {
|
|
if (error instanceof CacheableRequest.RequestError) {
|
|
emitError(new RequestError(error, options));
|
|
} else {
|
|
emitError(new CacheError(error, options));
|
|
}
|
|
});
|
|
|
|
cacheRequest.once('request', handleRequest);
|
|
} else {
|
|
// Catches errors thrown by calling fn.request(...)
|
|
try {
|
|
handleRequest(fn.request(options, handleResponse));
|
|
} catch (error) {
|
|
emitError(new RequestError(error, options));
|
|
}
|
|
}
|
|
};
|
|
|
|
emitter.retry = error => {
|
|
let backoff;
|
|
|
|
try {
|
|
backoff = options.retry.retries(++retryCount, error);
|
|
} catch (error2) {
|
|
emitError(error2);
|
|
return;
|
|
}
|
|
|
|
if (backoff) {
|
|
const retry = async options => {
|
|
try {
|
|
for (const hook of options.hooks.beforeRetry) {
|
|
// eslint-disable-next-line no-await-in-loop
|
|
await hook(options, error, retryCount);
|
|
}
|
|
|
|
await get(options);
|
|
} catch (error) {
|
|
emitError(error);
|
|
}
|
|
};
|
|
|
|
setTimeout(retry, backoff, {...options, forceRefresh: true});
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
};
|
|
|
|
emitter.abort = () => {
|
|
if (currentRequest) {
|
|
currentRequest.once('error', () => {});
|
|
currentRequest.abort();
|
|
} else {
|
|
shouldAbort = true;
|
|
}
|
|
};
|
|
|
|
setImmediate(async () => {
|
|
try {
|
|
// Convert buffer to stream to receive upload progress events (#322)
|
|
const {body} = options;
|
|
if (is.buffer(body)) {
|
|
options.body = toReadableStream(body);
|
|
uploadBodySize = body.length;
|
|
} else {
|
|
uploadBodySize = await getBodySize(options);
|
|
}
|
|
|
|
if (is.undefined(options.headers['content-length']) && is.undefined(options.headers['transfer-encoding'])) {
|
|
if ((uploadBodySize > 0 || options.method === 'PUT') && !is.null(uploadBodySize)) {
|
|
options.headers['content-length'] = uploadBodySize;
|
|
}
|
|
}
|
|
|
|
for (const hook of options.hooks.beforeRequest) {
|
|
// eslint-disable-next-line no-await-in-loop
|
|
await hook(options);
|
|
}
|
|
|
|
requestUrl = options.href || (new URL(options.path, urlLib.format(options))).toString();
|
|
|
|
await get(options);
|
|
} catch (error) {
|
|
emitError(error);
|
|
}
|
|
});
|
|
|
|
return emitter;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 605:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("http");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 614:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("events");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 622:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("path");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 631:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("net");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 633:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const decompressResponse = __webpack_require__(861);
|
|
const is = __webpack_require__(534);
|
|
const mimicResponse = __webpack_require__(89);
|
|
const progress = __webpack_require__(365);
|
|
|
|
module.exports = (response, options, emitter) => {
|
|
const downloadBodySize = Number(response.headers['content-length']) || null;
|
|
|
|
const progressStream = progress.download(response, emitter, downloadBodySize);
|
|
|
|
mimicResponse(response, progressStream);
|
|
|
|
const newResponse = options.decompress === true &&
|
|
is.function(decompressResponse) &&
|
|
options.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream;
|
|
|
|
if (!options.decompress && ['gzip', 'deflate'].includes(response.headers['content-encoding'])) {
|
|
options.encoding = null;
|
|
}
|
|
|
|
emitter.emit('response', newResponse);
|
|
|
|
emitter.emit('downloadProgress', {
|
|
percent: 0,
|
|
transferred: 0,
|
|
total: downloadBodySize
|
|
});
|
|
|
|
response.pipe(progressStream);
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 669:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("util");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 718:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
var stream = __webpack_require__(413);
|
|
|
|
function DuplexWrapper(options, writable, readable) {
|
|
if (typeof readable === "undefined") {
|
|
readable = writable;
|
|
writable = options;
|
|
options = null;
|
|
}
|
|
|
|
stream.Duplex.call(this, options);
|
|
|
|
if (typeof readable.read !== "function") {
|
|
readable = (new stream.Readable(options)).wrap(readable);
|
|
}
|
|
|
|
this._writable = writable;
|
|
this._readable = readable;
|
|
this._waiting = false;
|
|
|
|
var self = this;
|
|
|
|
writable.once("finish", function() {
|
|
self.end();
|
|
});
|
|
|
|
this.once("finish", function() {
|
|
writable.end();
|
|
});
|
|
|
|
readable.on("readable", function() {
|
|
if (self._waiting) {
|
|
self._waiting = false;
|
|
self._read();
|
|
}
|
|
});
|
|
|
|
readable.once("end", function() {
|
|
self.push(null);
|
|
});
|
|
|
|
if (!options || typeof options.bubbleErrors === "undefined" || options.bubbleErrors) {
|
|
writable.on("error", function(err) {
|
|
self.emit("error", err);
|
|
});
|
|
|
|
readable.on("error", function(err) {
|
|
self.emit("error", err);
|
|
});
|
|
}
|
|
}
|
|
|
|
DuplexWrapper.prototype = Object.create(stream.Duplex.prototype, {constructor: {value: DuplexWrapper}});
|
|
|
|
DuplexWrapper.prototype._write = function _write(input, encoding, done) {
|
|
this._writable.write(input, encoding, done);
|
|
};
|
|
|
|
DuplexWrapper.prototype._read = function _read() {
|
|
var buf;
|
|
var reads = 0;
|
|
while ((buf = this._readable.read()) !== null) {
|
|
this.push(buf);
|
|
reads++;
|
|
}
|
|
if (reads === 0) {
|
|
this._waiting = true;
|
|
}
|
|
};
|
|
|
|
module.exports = function duplex2(options, writable, readable) {
|
|
return new DuplexWrapper(options, writable, readable);
|
|
};
|
|
|
|
module.exports.DuplexWrapper = DuplexWrapper;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 747:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("fs");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 751:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
const core = __webpack_require__(470);
|
|
const command = __webpack_require__(431);
|
|
const got = __webpack_require__(798);
|
|
|
|
async function exportSecrets() {
|
|
const vaultUrl = core.getInput('url', { required: true });
|
|
const vaultToken = core.getInput('token', { required: true });
|
|
|
|
const secretsInput = core.getInput('secrets', { required: true });
|
|
const secrets = parseSecretsInput(secretsInput);
|
|
|
|
for (const secret of secrets) {
|
|
const { secretPath, outputName, secretKey } = secret;
|
|
const result = await got(`${vaultUrl}/v1/secret/data/${secretPath}`, {
|
|
headers: {
|
|
'X-Vault-Token': vaultToken
|
|
}
|
|
});
|
|
|
|
const parsedResponse = JSON.parse(result.body);
|
|
const vaultKeyData = parsedResponse.data;
|
|
const versionData = vaultKeyData.data;
|
|
const value = versionData[secretKey];
|
|
command.issue('add-mask', value);
|
|
core.exportVariable(outputName, `${value}`);
|
|
core.debug(`✔ ${secretPath} => ${outputName}`);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Parses a secrets input string into key paths and their resulting environment variable name.
|
|
* @param {string} secretsInput
|
|
*/
|
|
function parseSecretsInput(secretsInput) {
|
|
const secrets = secretsInput
|
|
.split(';')
|
|
.filter(key => !!key)
|
|
.map(key => key.trim())
|
|
.filter(key => key.length !== 0);
|
|
|
|
/** @type {{ secretPath: string; outputName: string; dataKey: string; }[]} */
|
|
const output = [];
|
|
for (const secret of secrets) {
|
|
let path = secret;
|
|
let outputName = null;
|
|
|
|
const renameSigilIndex = secret.lastIndexOf('|');
|
|
if (renameSigilIndex > -1) {
|
|
path = secret.substring(0, renameSigilIndex).trim();
|
|
outputName = secret.substring(renameSigilIndex + 1).trim();
|
|
|
|
if (outputName.length < 1) {
|
|
throw Error(`You must provide a value when mapping a secret to a name. Input: "${secret}"`);
|
|
}
|
|
}
|
|
|
|
const pathParts = path
|
|
.split(/\s+/)
|
|
.map(part => part.trim())
|
|
.filter(part => part.length !== 0);
|
|
|
|
if (pathParts.length !== 2) {
|
|
throw Error(`You must provide a valid path and key. Input: "${secret}"`)
|
|
}
|
|
|
|
const [secretPath, secretKey] = pathParts;
|
|
|
|
// If we're not using a mapped name, normalize the key path into a variable name.
|
|
if (!outputName) {
|
|
outputName = normalizeOutputKey(secretKey);
|
|
}
|
|
|
|
output.push({
|
|
secretPath,
|
|
outputName,
|
|
secretKey
|
|
});
|
|
}
|
|
return output;
|
|
}
|
|
|
|
/**
|
|
* Replaces any forward-slash characters to
|
|
* @param {string} dataKey
|
|
*/
|
|
function normalizeOutputKey(dataKey) {
|
|
return dataKey.replace('/', '__').replace(/[^\w-]/, '').toUpperCase();
|
|
}
|
|
|
|
module.exports = {
|
|
exportSecrets,
|
|
parseSecretsInput,
|
|
normalizeOutputKey
|
|
};
|
|
|
|
/***/ }),
|
|
|
|
/***/ 761:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("zlib");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 774:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const urlLib = __webpack_require__(835);
|
|
const http = __webpack_require__(605);
|
|
const PCancelable = __webpack_require__(557);
|
|
const is = __webpack_require__(534);
|
|
|
|
class GotError extends Error {
|
|
constructor(message, error, options) {
|
|
super(message);
|
|
Error.captureStackTrace(this, this.constructor);
|
|
this.name = 'GotError';
|
|
|
|
if (!is.undefined(error.code)) {
|
|
this.code = error.code;
|
|
}
|
|
|
|
Object.assign(this, {
|
|
host: options.host,
|
|
hostname: options.hostname,
|
|
method: options.method,
|
|
path: options.path,
|
|
socketPath: options.socketPath,
|
|
protocol: options.protocol,
|
|
url: options.href,
|
|
gotOptions: options
|
|
});
|
|
}
|
|
}
|
|
|
|
module.exports.GotError = GotError;
|
|
|
|
module.exports.CacheError = class extends GotError {
|
|
constructor(error, options) {
|
|
super(error.message, error, options);
|
|
this.name = 'CacheError';
|
|
}
|
|
};
|
|
|
|
module.exports.RequestError = class extends GotError {
|
|
constructor(error, options) {
|
|
super(error.message, error, options);
|
|
this.name = 'RequestError';
|
|
}
|
|
};
|
|
|
|
module.exports.ReadError = class extends GotError {
|
|
constructor(error, options) {
|
|
super(error.message, error, options);
|
|
this.name = 'ReadError';
|
|
}
|
|
};
|
|
|
|
module.exports.ParseError = class extends GotError {
|
|
constructor(error, statusCode, options, data) {
|
|
super(`${error.message} in "${urlLib.format(options)}": \n${data.slice(0, 77)}...`, error, options);
|
|
this.name = 'ParseError';
|
|
this.statusCode = statusCode;
|
|
this.statusMessage = http.STATUS_CODES[this.statusCode];
|
|
}
|
|
};
|
|
|
|
module.exports.HTTPError = class extends GotError {
|
|
constructor(response, options) {
|
|
const {statusCode} = response;
|
|
let {statusMessage} = response;
|
|
|
|
if (statusMessage) {
|
|
statusMessage = statusMessage.replace(/\r?\n/g, ' ').trim();
|
|
} else {
|
|
statusMessage = http.STATUS_CODES[statusCode];
|
|
}
|
|
|
|
super(`Response code ${statusCode} (${statusMessage})`, {}, options);
|
|
this.name = 'HTTPError';
|
|
this.statusCode = statusCode;
|
|
this.statusMessage = statusMessage;
|
|
this.headers = response.headers;
|
|
this.body = response.body;
|
|
}
|
|
};
|
|
|
|
module.exports.MaxRedirectsError = class extends GotError {
|
|
constructor(statusCode, redirectUrls, options) {
|
|
super('Redirected 10 times. Aborting.', {}, options);
|
|
this.name = 'MaxRedirectsError';
|
|
this.statusCode = statusCode;
|
|
this.statusMessage = http.STATUS_CODES[this.statusCode];
|
|
this.redirectUrls = redirectUrls;
|
|
}
|
|
};
|
|
|
|
module.exports.UnsupportedProtocolError = class extends GotError {
|
|
constructor(options) {
|
|
super(`Unsupported protocol "${options.protocol}"`, {}, options);
|
|
this.name = 'UnsupportedProtocolError';
|
|
}
|
|
};
|
|
|
|
module.exports.TimeoutError = class extends GotError {
|
|
constructor(error, options) {
|
|
super(error.message, {code: 'ETIMEDOUT'}, options);
|
|
this.name = 'TimeoutError';
|
|
this.event = error.event;
|
|
}
|
|
};
|
|
|
|
module.exports.CancelError = PCancelable.CancelError;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 794:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const {PassThrough} = __webpack_require__(413);
|
|
const duplexer3 = __webpack_require__(718);
|
|
const requestAsEventEmitter = __webpack_require__(584);
|
|
const {HTTPError, ReadError} = __webpack_require__(774);
|
|
|
|
module.exports = options => {
|
|
const input = new PassThrough();
|
|
const output = new PassThrough();
|
|
const proxy = duplexer3(input, output);
|
|
const piped = new Set();
|
|
let isFinished = false;
|
|
|
|
options.retry.retries = () => 0;
|
|
|
|
if (options.body) {
|
|
proxy.write = () => {
|
|
throw new Error('Got\'s stream is not writable when the `body` option is used');
|
|
};
|
|
}
|
|
|
|
const emitter = requestAsEventEmitter(options, input);
|
|
|
|
// Cancels the request
|
|
proxy._destroy = emitter.abort;
|
|
|
|
emitter.on('response', response => {
|
|
const {statusCode} = response;
|
|
|
|
response.on('error', error => {
|
|
proxy.emit('error', new ReadError(error, options));
|
|
});
|
|
|
|
if (options.throwHttpErrors && statusCode !== 304 && (statusCode < 200 || statusCode > 299)) {
|
|
proxy.emit('error', new HTTPError(response, options), null, response);
|
|
return;
|
|
}
|
|
|
|
isFinished = true;
|
|
|
|
response.pipe(output);
|
|
|
|
for (const destination of piped) {
|
|
if (destination.headersSent) {
|
|
continue;
|
|
}
|
|
|
|
for (const [key, value] of Object.entries(response.headers)) {
|
|
// Got gives *decompressed* data. Overriding `content-encoding` header would result in an error.
|
|
// It's not possible to decompress already decompressed data, is it?
|
|
const allowed = options.decompress ? key !== 'content-encoding' : true;
|
|
if (allowed) {
|
|
destination.setHeader(key, value);
|
|
}
|
|
}
|
|
|
|
destination.statusCode = response.statusCode;
|
|
}
|
|
|
|
proxy.emit('response', response);
|
|
});
|
|
|
|
[
|
|
'error',
|
|
'request',
|
|
'redirect',
|
|
'uploadProgress',
|
|
'downloadProgress'
|
|
].forEach(event => emitter.on(event, (...args) => proxy.emit(event, ...args)));
|
|
|
|
const pipe = proxy.pipe.bind(proxy);
|
|
const unpipe = proxy.unpipe.bind(proxy);
|
|
proxy.pipe = (destination, options) => {
|
|
if (isFinished) {
|
|
throw new Error('Failed to pipe. The response has been emitted already.');
|
|
}
|
|
|
|
const result = pipe(destination, options);
|
|
|
|
if (Reflect.has(destination, 'setHeader')) {
|
|
piped.add(destination);
|
|
}
|
|
|
|
return result;
|
|
};
|
|
|
|
proxy.unpipe = stream => {
|
|
piped.delete(stream);
|
|
return unpipe(stream);
|
|
};
|
|
|
|
return proxy;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 798:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const pkg = __webpack_require__(482);
|
|
const create = __webpack_require__(338);
|
|
|
|
const defaults = {
|
|
options: {
|
|
retry: {
|
|
retries: 2,
|
|
methods: [
|
|
'GET',
|
|
'PUT',
|
|
'HEAD',
|
|
'DELETE',
|
|
'OPTIONS',
|
|
'TRACE'
|
|
],
|
|
statusCodes: [
|
|
408,
|
|
413,
|
|
429,
|
|
500,
|
|
502,
|
|
503,
|
|
504
|
|
],
|
|
errorCodes: [
|
|
'ETIMEDOUT',
|
|
'ECONNRESET',
|
|
'EADDRINUSE',
|
|
'ECONNREFUSED',
|
|
'EPIPE',
|
|
'ENOTFOUND',
|
|
'ENETUNREACH',
|
|
'EAI_AGAIN'
|
|
]
|
|
},
|
|
headers: {
|
|
'user-agent': `${pkg.name}/${pkg.version} (https://github.com/sindresorhus/got)`
|
|
},
|
|
hooks: {
|
|
beforeRequest: [],
|
|
beforeRedirect: [],
|
|
beforeRetry: [],
|
|
afterResponse: []
|
|
},
|
|
decompress: true,
|
|
throwHttpErrors: true,
|
|
followRedirect: true,
|
|
stream: false,
|
|
form: false,
|
|
json: false,
|
|
cache: false,
|
|
useElectronNet: false
|
|
},
|
|
mutableDefaults: false
|
|
};
|
|
|
|
const got = create(defaults);
|
|
|
|
module.exports = got;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 811:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const is = __webpack_require__(534);
|
|
|
|
module.exports = url => {
|
|
const options = {
|
|
protocol: url.protocol,
|
|
hostname: url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,
|
|
hash: url.hash,
|
|
search: url.search,
|
|
pathname: url.pathname,
|
|
href: url.href
|
|
};
|
|
|
|
if (is.string(url.port) && url.port.length > 0) {
|
|
options.port = Number(url.port);
|
|
}
|
|
|
|
if (url.username || url.password) {
|
|
options.auth = `${url.username}:${url.password}`;
|
|
}
|
|
|
|
options.path = is.null(url.search) ? url.pathname : `${url.pathname}${url.search}`;
|
|
|
|
return options;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 821:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const {URL} = __webpack_require__(835);
|
|
const is = __webpack_require__(534);
|
|
const knownHookEvents = __webpack_require__(433);
|
|
|
|
const merge = (target, ...sources) => {
|
|
for (const source of sources) {
|
|
for (const [key, sourceValue] of Object.entries(source)) {
|
|
if (is.undefined(sourceValue)) {
|
|
continue;
|
|
}
|
|
|
|
const targetValue = target[key];
|
|
if (is.urlInstance(targetValue) && (is.urlInstance(sourceValue) || is.string(sourceValue))) {
|
|
target[key] = new URL(sourceValue, targetValue);
|
|
} else if (is.plainObject(sourceValue)) {
|
|
if (is.plainObject(targetValue)) {
|
|
target[key] = merge({}, targetValue, sourceValue);
|
|
} else {
|
|
target[key] = merge({}, sourceValue);
|
|
}
|
|
} else if (is.array(sourceValue)) {
|
|
target[key] = merge([], sourceValue);
|
|
} else {
|
|
target[key] = sourceValue;
|
|
}
|
|
}
|
|
}
|
|
|
|
return target;
|
|
};
|
|
|
|
const mergeOptions = (...sources) => {
|
|
sources = sources.map(source => source || {});
|
|
const merged = merge({}, ...sources);
|
|
|
|
const hooks = {};
|
|
for (const hook of knownHookEvents) {
|
|
hooks[hook] = [];
|
|
}
|
|
|
|
for (const source of sources) {
|
|
if (source.hooks) {
|
|
for (const hook of knownHookEvents) {
|
|
hooks[hook] = hooks[hook].concat(source.hooks[hook]);
|
|
}
|
|
}
|
|
}
|
|
|
|
merged.hooks = hooks;
|
|
|
|
return merged;
|
|
};
|
|
|
|
const mergeInstances = (instances, methods) => {
|
|
const handlers = instances.map(instance => instance.defaults.handler);
|
|
const size = instances.length - 1;
|
|
|
|
return {
|
|
methods,
|
|
options: mergeOptions(...instances.map(instance => instance.defaults.options)),
|
|
handler: (options, next) => {
|
|
let iteration = -1;
|
|
const iterate = options => handlers[++iteration](options, iteration === size ? next : iterate);
|
|
|
|
return iterate(options);
|
|
}
|
|
};
|
|
};
|
|
|
|
module.exports = merge;
|
|
module.exports.options = mergeOptions;
|
|
module.exports.instances = mergeInstances;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 835:
|
|
/***/ (function(module) {
|
|
|
|
module.exports = require("url");
|
|
|
|
/***/ }),
|
|
|
|
/***/ 861:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const PassThrough = __webpack_require__(413).PassThrough;
|
|
const zlib = __webpack_require__(761);
|
|
const mimicResponse = __webpack_require__(89);
|
|
|
|
module.exports = response => {
|
|
// TODO: Use Array#includes when targeting Node.js 6
|
|
if (['gzip', 'deflate'].indexOf(response.headers['content-encoding']) === -1) {
|
|
return response;
|
|
}
|
|
|
|
const unzip = zlib.createUnzip();
|
|
const stream = new PassThrough();
|
|
|
|
mimicResponse(response, stream);
|
|
|
|
unzip.on('error', err => {
|
|
if (err.code === 'Z_BUF_ERROR') {
|
|
stream.end();
|
|
return;
|
|
}
|
|
|
|
stream.emit('error', err);
|
|
});
|
|
|
|
response.pipe(unzip).pipe(stream);
|
|
|
|
return stream;
|
|
};
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 916:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const EventEmitter = __webpack_require__(614);
|
|
const getStream = __webpack_require__(145);
|
|
const is = __webpack_require__(534);
|
|
const PCancelable = __webpack_require__(557);
|
|
const requestAsEventEmitter = __webpack_require__(584);
|
|
const {HTTPError, ParseError, ReadError} = __webpack_require__(774);
|
|
const {options: mergeOptions} = __webpack_require__(821);
|
|
const {reNormalize} = __webpack_require__(86);
|
|
|
|
const asPromise = options => {
|
|
const proxy = new EventEmitter();
|
|
|
|
const promise = new PCancelable((resolve, reject, onCancel) => {
|
|
const emitter = requestAsEventEmitter(options);
|
|
|
|
onCancel(emitter.abort);
|
|
|
|
emitter.on('response', async response => {
|
|
proxy.emit('response', response);
|
|
|
|
const stream = is.null(options.encoding) ? getStream.buffer(response) : getStream(response, options);
|
|
|
|
let data;
|
|
try {
|
|
data = await stream;
|
|
} catch (error) {
|
|
reject(new ReadError(error, options));
|
|
return;
|
|
}
|
|
|
|
const limitStatusCode = options.followRedirect ? 299 : 399;
|
|
|
|
response.body = data;
|
|
|
|
try {
|
|
for (const [index, hook] of Object.entries(options.hooks.afterResponse)) {
|
|
// eslint-disable-next-line no-await-in-loop
|
|
response = await hook(response, updatedOptions => {
|
|
updatedOptions = reNormalize(mergeOptions(options, {
|
|
...updatedOptions,
|
|
retry: 0,
|
|
throwHttpErrors: false
|
|
}));
|
|
|
|
// Remove any further hooks for that request, because we we'll call them anyway.
|
|
// The loop continues. We don't want duplicates (asPromise recursion).
|
|
updatedOptions.hooks.afterResponse = options.hooks.afterResponse.slice(0, index);
|
|
|
|
return asPromise(updatedOptions);
|
|
});
|
|
}
|
|
} catch (error) {
|
|
reject(error);
|
|
return;
|
|
}
|
|
|
|
const {statusCode} = response;
|
|
|
|
if (options.json && response.body) {
|
|
try {
|
|
response.body = JSON.parse(response.body);
|
|
} catch (error) {
|
|
if (statusCode >= 200 && statusCode < 300) {
|
|
const parseError = new ParseError(error, statusCode, options, data);
|
|
Object.defineProperty(parseError, 'response', {value: response});
|
|
reject(parseError);
|
|
return;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (statusCode !== 304 && (statusCode < 200 || statusCode > limitStatusCode)) {
|
|
const error = new HTTPError(response, options);
|
|
Object.defineProperty(error, 'response', {value: response});
|
|
if (emitter.retry(error) === false) {
|
|
if (options.throwHttpErrors) {
|
|
reject(error);
|
|
return;
|
|
}
|
|
|
|
resolve(response);
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
resolve(response);
|
|
});
|
|
|
|
emitter.once('error', reject);
|
|
[
|
|
'request',
|
|
'redirect',
|
|
'uploadProgress',
|
|
'downloadProgress'
|
|
].forEach(event => emitter.on(event, (...args) => proxy.emit(event, ...args)));
|
|
});
|
|
|
|
promise.on = (name, fn) => {
|
|
proxy.on(name, fn);
|
|
return promise;
|
|
};
|
|
|
|
return promise;
|
|
};
|
|
|
|
module.exports = asPromise;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 946:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
|
|
const EventEmitter = __webpack_require__(614);
|
|
const urlLib = __webpack_require__(835);
|
|
const normalizeUrl = __webpack_require__(53);
|
|
const getStream = __webpack_require__(16);
|
|
const CachePolicy = __webpack_require__(154);
|
|
const Response = __webpack_require__(93);
|
|
const lowercaseKeys = __webpack_require__(97);
|
|
const cloneResponse = __webpack_require__(325);
|
|
const Keyv = __webpack_require__(303);
|
|
|
|
class CacheableRequest {
|
|
constructor(request, cacheAdapter) {
|
|
if (typeof request !== 'function') {
|
|
throw new TypeError('Parameter `request` must be a function');
|
|
}
|
|
|
|
this.cache = new Keyv({
|
|
uri: typeof cacheAdapter === 'string' && cacheAdapter,
|
|
store: typeof cacheAdapter !== 'string' && cacheAdapter,
|
|
namespace: 'cacheable-request'
|
|
});
|
|
|
|
return this.createCacheableRequest(request);
|
|
}
|
|
|
|
createCacheableRequest(request) {
|
|
return (opts, cb) => {
|
|
let url;
|
|
if (typeof opts === 'string') {
|
|
url = normalizeUrlObject(urlLib.parse(opts));
|
|
opts = {};
|
|
} else if (opts instanceof urlLib.URL) {
|
|
url = normalizeUrlObject(urlLib.parse(opts.toString()));
|
|
opts = {};
|
|
} else {
|
|
const [pathname, ...searchParts] = (opts.path || '').split('?');
|
|
const search = searchParts.length > 0 ?
|
|
`?${searchParts.join('?')}` :
|
|
'';
|
|
url = normalizeUrlObject({ ...opts, pathname, search });
|
|
}
|
|
|
|
opts = {
|
|
headers: {},
|
|
method: 'GET',
|
|
cache: true,
|
|
strictTtl: false,
|
|
automaticFailover: false,
|
|
...opts,
|
|
...urlObjectToRequestOptions(url)
|
|
};
|
|
opts.headers = lowercaseKeys(opts.headers);
|
|
|
|
const ee = new EventEmitter();
|
|
const normalizedUrlString = normalizeUrl(
|
|
urlLib.format(url),
|
|
{
|
|
stripWWW: false,
|
|
removeTrailingSlash: false,
|
|
stripAuthentication: false
|
|
}
|
|
);
|
|
const key = `${opts.method}:${normalizedUrlString}`;
|
|
let revalidate = false;
|
|
let madeRequest = false;
|
|
|
|
const makeRequest = opts => {
|
|
madeRequest = true;
|
|
let requestErrored = false;
|
|
let requestErrorCallback;
|
|
|
|
const requestErrorPromise = new Promise(resolve => {
|
|
requestErrorCallback = () => {
|
|
if (!requestErrored) {
|
|
requestErrored = true;
|
|
resolve();
|
|
}
|
|
};
|
|
});
|
|
|
|
const handler = response => {
|
|
if (revalidate && !opts.forceRefresh) {
|
|
response.status = response.statusCode;
|
|
const revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response);
|
|
if (!revalidatedPolicy.modified) {
|
|
const headers = revalidatedPolicy.policy.responseHeaders();
|
|
response = new Response(revalidate.statusCode, headers, revalidate.body, revalidate.url);
|
|
response.cachePolicy = revalidatedPolicy.policy;
|
|
response.fromCache = true;
|
|
}
|
|
}
|
|
|
|
if (!response.fromCache) {
|
|
response.cachePolicy = new CachePolicy(opts, response, opts);
|
|
response.fromCache = false;
|
|
}
|
|
|
|
let clonedResponse;
|
|
if (opts.cache && response.cachePolicy.storable()) {
|
|
clonedResponse = cloneResponse(response);
|
|
|
|
(async () => {
|
|
try {
|
|
const bodyPromise = getStream.buffer(response);
|
|
|
|
await Promise.race([
|
|
requestErrorPromise,
|
|
new Promise(resolve => response.once('end', resolve))
|
|
]);
|
|
|
|
if (requestErrored) {
|
|
return;
|
|
}
|
|
|
|
const body = await bodyPromise;
|
|
|
|
const value = {
|
|
cachePolicy: response.cachePolicy.toObject(),
|
|
url: response.url,
|
|
statusCode: response.fromCache ? revalidate.statusCode : response.statusCode,
|
|
body
|
|
};
|
|
|
|
let ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined;
|
|
if (opts.maxTtl) {
|
|
ttl = ttl ? Math.min(ttl, opts.maxTtl) : opts.maxTtl;
|
|
}
|
|
|
|
await this.cache.set(key, value, ttl);
|
|
} catch (error) {
|
|
ee.emit('error', new CacheableRequest.CacheError(error));
|
|
}
|
|
})();
|
|
} else if (opts.cache && revalidate) {
|
|
(async () => {
|
|
try {
|
|
await this.cache.delete(key);
|
|
} catch (error) {
|
|
ee.emit('error', new CacheableRequest.CacheError(error));
|
|
}
|
|
})();
|
|
}
|
|
|
|
ee.emit('response', clonedResponse || response);
|
|
if (typeof cb === 'function') {
|
|
cb(clonedResponse || response);
|
|
}
|
|
};
|
|
|
|
try {
|
|
const req = request(opts, handler);
|
|
req.once('error', requestErrorCallback);
|
|
req.once('abort', requestErrorCallback);
|
|
ee.emit('request', req);
|
|
} catch (error) {
|
|
ee.emit('error', new CacheableRequest.RequestError(error));
|
|
}
|
|
};
|
|
|
|
(async () => {
|
|
const get = async opts => {
|
|
await Promise.resolve();
|
|
|
|
const cacheEntry = opts.cache ? await this.cache.get(key) : undefined;
|
|
if (typeof cacheEntry === 'undefined') {
|
|
return makeRequest(opts);
|
|
}
|
|
|
|
const policy = CachePolicy.fromObject(cacheEntry.cachePolicy);
|
|
if (policy.satisfiesWithoutRevalidation(opts) && !opts.forceRefresh) {
|
|
const headers = policy.responseHeaders();
|
|
const response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url);
|
|
response.cachePolicy = policy;
|
|
response.fromCache = true;
|
|
|
|
ee.emit('response', response);
|
|
if (typeof cb === 'function') {
|
|
cb(response);
|
|
}
|
|
} else {
|
|
revalidate = cacheEntry;
|
|
opts.headers = policy.revalidationHeaders(opts);
|
|
makeRequest(opts);
|
|
}
|
|
};
|
|
|
|
const errorHandler = error => ee.emit('error', new CacheableRequest.CacheError(error));
|
|
this.cache.once('error', errorHandler);
|
|
ee.on('response', () => this.cache.removeListener('error', errorHandler));
|
|
|
|
try {
|
|
await get(opts);
|
|
} catch (error) {
|
|
if (opts.automaticFailover && !madeRequest) {
|
|
makeRequest(opts);
|
|
}
|
|
|
|
ee.emit('error', new CacheableRequest.CacheError(error));
|
|
}
|
|
})();
|
|
|
|
return ee;
|
|
};
|
|
}
|
|
}
|
|
|
|
function urlObjectToRequestOptions(url) {
|
|
const options = { ...url };
|
|
options.path = `${url.pathname || '/'}${url.search || ''}`;
|
|
delete options.pathname;
|
|
delete options.search;
|
|
return options;
|
|
}
|
|
|
|
function normalizeUrlObject(url) {
|
|
// If url was parsed by url.parse or new URL:
|
|
// - hostname will be set
|
|
// - host will be hostname[:port]
|
|
// - port will be set if it was explicit in the parsed string
|
|
// Otherwise, url was from request options:
|
|
// - hostname or host may be set
|
|
// - host shall not have port encoded
|
|
return {
|
|
protocol: url.protocol,
|
|
auth: url.auth,
|
|
hostname: url.hostname || url.host || 'localhost',
|
|
port: url.port,
|
|
pathname: url.pathname,
|
|
search: url.search
|
|
};
|
|
}
|
|
|
|
CacheableRequest.RequestError = class extends Error {
|
|
constructor(error) {
|
|
super(error.message);
|
|
this.name = 'RequestError';
|
|
Object.assign(this, error);
|
|
}
|
|
};
|
|
|
|
CacheableRequest.CacheError = class extends Error {
|
|
constructor(error) {
|
|
super(error.message);
|
|
this.name = 'CacheError';
|
|
Object.assign(this, error);
|
|
}
|
|
};
|
|
|
|
module.exports = CacheableRequest;
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 952:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const {Readable} = __webpack_require__(413);
|
|
|
|
module.exports = input => (
|
|
new Readable({
|
|
read() {
|
|
this.push(input);
|
|
this.push(null);
|
|
}
|
|
})
|
|
);
|
|
|
|
|
|
/***/ }),
|
|
|
|
/***/ 966:
|
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
|
|
"use strict";
|
|
|
|
const {PassThrough} = __webpack_require__(413);
|
|
|
|
module.exports = options => {
|
|
options = Object.assign({}, options);
|
|
|
|
const {array} = options;
|
|
let {encoding} = options;
|
|
const buffer = encoding === 'buffer';
|
|
let objectMode = false;
|
|
|
|
if (array) {
|
|
objectMode = !(encoding || buffer);
|
|
} else {
|
|
encoding = encoding || 'utf8';
|
|
}
|
|
|
|
if (buffer) {
|
|
encoding = null;
|
|
}
|
|
|
|
let len = 0;
|
|
const ret = [];
|
|
const stream = new PassThrough({objectMode});
|
|
|
|
if (encoding) {
|
|
stream.setEncoding(encoding);
|
|
}
|
|
|
|
stream.on('data', chunk => {
|
|
ret.push(chunk);
|
|
|
|
if (objectMode) {
|
|
len = ret.length;
|
|
} else {
|
|
len += chunk.length;
|
|
}
|
|
});
|
|
|
|
stream.getBufferedValue = () => {
|
|
if (array) {
|
|
return ret;
|
|
}
|
|
|
|
return buffer ? Buffer.concat(ret, len) : ret.join('');
|
|
};
|
|
|
|
stream.getBufferedLength = () => len;
|
|
|
|
return stream;
|
|
};
|
|
|
|
|
|
/***/ })
|
|
|
|
/******/ }); |