mirror of
https://github.com/actions/checkout.git
synced 2024-11-21 19:09:32 +01:00
Add backports to v2 branch (#1040)
* Update licensed version * Backport for submodule command wrapping * Update NPM packages * Update dist/index.js * Rebuild using Node 12 * Rebuild after a more aggressive cleanup of local files * Backport change to replace datadog/squid with ubuntu/squid
This commit is contained in:
parent
e2f20e631a
commit
dc323e67f1
16 changed files with 722 additions and 3805 deletions
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
|
@ -142,7 +142,7 @@ jobs:
|
|||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: datadog/squid:latest
|
||||
image: ubuntu/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
env:
|
||||
|
|
BIN
.licenses/npm/@actions/core.dep.yml
generated
BIN
.licenses/npm/@actions/core.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/@actions/http-client-2.0.1.dep.yml
generated
Normal file
BIN
.licenses/npm/@actions/http-client-2.0.1.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@actions/io.dep.yml
generated
BIN
.licenses/npm/@actions/io.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/node-fetch.dep.yml
generated
BIN
.licenses/npm/node-fetch.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/qs.dep.yml
generated
BIN
.licenses/npm/qs.dep.yml
generated
Binary file not shown.
Binary file not shown.
BIN
.licenses/npm/uuid-8.3.2.dep.yml
generated
Normal file
BIN
.licenses/npm/uuid-8.3.2.dep.yml
generated
Normal file
Binary file not shown.
338
dist/index.js
vendored
338
dist/index.js
vendored
|
@ -98,6 +98,25 @@ module.exports = Octokit;
|
|||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
|
@ -108,11 +127,14 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const childProcess = __webpack_require__(129);
|
||||
const path = __webpack_require__(622);
|
||||
exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0;
|
||||
const assert_1 = __webpack_require__(357);
|
||||
const childProcess = __importStar(__webpack_require__(129));
|
||||
const path = __importStar(__webpack_require__(622));
|
||||
const util_1 = __webpack_require__(669);
|
||||
const ioUtil = __webpack_require__(672);
|
||||
const ioUtil = __importStar(__webpack_require__(672));
|
||||
const exec = util_1.promisify(childProcess.exec);
|
||||
const execFile = util_1.promisify(childProcess.execFile);
|
||||
/**
|
||||
* Copies a file or folder.
|
||||
* Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
|
||||
|
@ -123,14 +145,14 @@ const exec = util_1.promisify(childProcess.exec);
|
|||
*/
|
||||
function cp(source, dest, options = {}) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const { force, recursive } = readCopyOptions(options);
|
||||
const { force, recursive, copySourceDirectory } = readCopyOptions(options);
|
||||
const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;
|
||||
// Dest is an existing file, but not forcing
|
||||
if (destStat && destStat.isFile() && !force) {
|
||||
return;
|
||||
}
|
||||
// If dest is an existing directory, should copy inside.
|
||||
const newDest = destStat && destStat.isDirectory()
|
||||
const newDest = destStat && destStat.isDirectory() && copySourceDirectory
|
||||
? path.join(dest, path.basename(source))
|
||||
: dest;
|
||||
if (!(yield ioUtil.exists(source))) {
|
||||
|
@ -195,12 +217,22 @@ function rmRF(inputPath) {
|
|||
if (ioUtil.IS_WINDOWS) {
|
||||
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
|
||||
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
|
||||
// Check for invalid characters
|
||||
// https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
|
||||
if (/[*"<>|]/.test(inputPath)) {
|
||||
throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows');
|
||||
}
|
||||
try {
|
||||
const cmdPath = ioUtil.getCmdPath();
|
||||
if (yield ioUtil.isDirectory(inputPath, true)) {
|
||||
yield exec(`rd /s /q "${inputPath}"`);
|
||||
yield exec(`${cmdPath} /s /c "rd /s /q "%inputPath%""`, {
|
||||
env: { inputPath }
|
||||
});
|
||||
}
|
||||
else {
|
||||
yield exec(`del /f /a "${inputPath}"`);
|
||||
yield exec(`${cmdPath} /s /c "del /f /a "%inputPath%""`, {
|
||||
env: { inputPath }
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
|
@ -233,7 +265,7 @@ function rmRF(inputPath) {
|
|||
return;
|
||||
}
|
||||
if (isDir) {
|
||||
yield exec(`rm -rf "${inputPath}"`);
|
||||
yield execFile(`rm`, [`-rf`, `${inputPath}`]);
|
||||
}
|
||||
else {
|
||||
yield ioUtil.unlink(inputPath);
|
||||
|
@ -251,7 +283,8 @@ exports.rmRF = rmRF;
|
|||
*/
|
||||
function mkdirP(fsPath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield ioUtil.mkdirP(fsPath);
|
||||
assert_1.ok(fsPath, 'a path argument must be provided');
|
||||
yield ioUtil.mkdir(fsPath, { recursive: true });
|
||||
});
|
||||
}
|
||||
exports.mkdirP = mkdirP;
|
||||
|
@ -279,62 +312,80 @@ function which(tool, check) {
|
|||
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
try {
|
||||
// build the list of extensions to try
|
||||
const extensions = [];
|
||||
if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {
|
||||
for (const extension of process.env.PATHEXT.split(path.delimiter)) {
|
||||
if (extension) {
|
||||
extensions.push(extension);
|
||||
}
|
||||
}
|
||||
}
|
||||
// if it's rooted, return it if exists. otherwise return empty.
|
||||
if (ioUtil.isRooted(tool)) {
|
||||
const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);
|
||||
if (filePath) {
|
||||
return filePath;
|
||||
}
|
||||
return '';
|
||||
}
|
||||
// if any path separators, return empty
|
||||
if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) {
|
||||
return '';
|
||||
}
|
||||
// build the list of directories
|
||||
//
|
||||
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
|
||||
// it feels like we should not do this. Checking the current directory seems like more of a use
|
||||
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
|
||||
// across platforms.
|
||||
const directories = [];
|
||||
if (process.env.PATH) {
|
||||
for (const p of process.env.PATH.split(path.delimiter)) {
|
||||
if (p) {
|
||||
directories.push(p);
|
||||
}
|
||||
}
|
||||
}
|
||||
// return the first match
|
||||
for (const directory of directories) {
|
||||
const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);
|
||||
if (filePath) {
|
||||
return filePath;
|
||||
}
|
||||
}
|
||||
return '';
|
||||
}
|
||||
catch (err) {
|
||||
throw new Error(`which failed with message ${err.message}`);
|
||||
const matches = yield findInPath(tool);
|
||||
if (matches && matches.length > 0) {
|
||||
return matches[0];
|
||||
}
|
||||
return '';
|
||||
});
|
||||
}
|
||||
exports.which = which;
|
||||
/**
|
||||
* Returns a list of all occurrences of the given tool on the system path.
|
||||
*
|
||||
* @returns Promise<string[]> the paths of the tool
|
||||
*/
|
||||
function findInPath(tool) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!tool) {
|
||||
throw new Error("parameter 'tool' is required");
|
||||
}
|
||||
// build the list of extensions to try
|
||||
const extensions = [];
|
||||
if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) {
|
||||
for (const extension of process.env['PATHEXT'].split(path.delimiter)) {
|
||||
if (extension) {
|
||||
extensions.push(extension);
|
||||
}
|
||||
}
|
||||
}
|
||||
// if it's rooted, return it if exists. otherwise return empty.
|
||||
if (ioUtil.isRooted(tool)) {
|
||||
const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);
|
||||
if (filePath) {
|
||||
return [filePath];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
// if any path separators, return empty
|
||||
if (tool.includes(path.sep)) {
|
||||
return [];
|
||||
}
|
||||
// build the list of directories
|
||||
//
|
||||
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
|
||||
// it feels like we should not do this. Checking the current directory seems like more of a use
|
||||
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
|
||||
// across platforms.
|
||||
const directories = [];
|
||||
if (process.env.PATH) {
|
||||
for (const p of process.env.PATH.split(path.delimiter)) {
|
||||
if (p) {
|
||||
directories.push(p);
|
||||
}
|
||||
}
|
||||
}
|
||||
// find all matches
|
||||
const matches = [];
|
||||
for (const directory of directories) {
|
||||
const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions);
|
||||
if (filePath) {
|
||||
matches.push(filePath);
|
||||
}
|
||||
}
|
||||
return matches;
|
||||
});
|
||||
}
|
||||
exports.findInPath = findInPath;
|
||||
function readCopyOptions(options) {
|
||||
const force = options.force == null ? true : options.force;
|
||||
const recursive = Boolean(options.recursive);
|
||||
return { force, recursive };
|
||||
const copySourceDirectory = options.copySourceDirectory == null
|
||||
? true
|
||||
: Boolean(options.copySourceDirectory);
|
||||
return { force, recursive, copySourceDirectory };
|
||||
}
|
||||
function cpDirRecursive(sourceDir, destDir, currentDepth, force) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
|
@ -7064,7 +7115,9 @@ class GitAuthHelper {
|
|||
// Configure a placeholder value. This approach avoids the credential being captured
|
||||
// by process creation audit events, which are commonly logged. For more information,
|
||||
// refer to https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/manage/component-updates/command-line-process-auditing
|
||||
const output = yield this.git.submoduleForeach(`git config --local '${this.tokenConfigKey}' '${this.tokenPlaceholderConfigValue}' && git config --local --show-origin --name-only --get-regexp remote.origin.url`, this.settings.nestedSubmodules);
|
||||
const output = yield this.git.submoduleForeach(
|
||||
// wrap the pipeline in quotes to make sure it's handled properly by submoduleForeach, rather than just the first part of the pipeline
|
||||
`sh -c "git config --local '${this.tokenConfigKey}' '${this.tokenPlaceholderConfigValue}' && git config --local --show-origin --name-only --get-regexp remote.origin.url"`, this.settings.nestedSubmodules);
|
||||
// Replace the placeholder
|
||||
const configPaths = output.match(/(?<=(^|\n)file:)[^\t]+(?=\tremote\.origin\.url)/g) || [];
|
||||
for (const configPath of configPaths) {
|
||||
|
@ -7231,7 +7284,9 @@ class GitAuthHelper {
|
|||
}
|
||||
}
|
||||
const pattern = regexpHelper.escape(configKey);
|
||||
yield this.git.submoduleForeach(`git config --local --name-only --get-regexp '${pattern}' && git config --local --unset-all '${configKey}' || :`, true);
|
||||
yield this.git.submoduleForeach(
|
||||
// wrap the pipeline in quotes to make sure it's handled properly by submoduleForeach, rather than just the first part of the pipeline
|
||||
`sh -c "git config --local --name-only --get-regexp '${pattern}' && git config --local --unset-all '${configKey}' || :"`, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -10567,7 +10622,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
|
|||
});
|
||||
|
||||
const INTERNALS$2 = Symbol('Request internals');
|
||||
const URL = whatwgUrl.URL;
|
||||
const URL = Url.URL || whatwgUrl.URL;
|
||||
|
||||
// fix an issue where "format", "parse" aren't a named export for node <10
|
||||
const parse_url = Url.parse;
|
||||
|
@ -10830,9 +10885,17 @@ AbortError.prototype = Object.create(Error.prototype);
|
|||
AbortError.prototype.constructor = AbortError;
|
||||
AbortError.prototype.name = 'AbortError';
|
||||
|
||||
const URL$1 = Url.URL || whatwgUrl.URL;
|
||||
|
||||
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
|
||||
const PassThrough$1 = Stream.PassThrough;
|
||||
const resolve_url = Url.resolve;
|
||||
|
||||
const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
|
||||
const orig = new URL$1(original).hostname;
|
||||
const dest = new URL$1(destination).hostname;
|
||||
|
||||
return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetch function
|
||||
|
@ -10920,7 +10983,19 @@ function fetch(url, opts) {
|
|||
const location = headers.get('Location');
|
||||
|
||||
// HTTP fetch step 5.3
|
||||
const locationURL = location === null ? null : resolve_url(request.url, location);
|
||||
let locationURL = null;
|
||||
try {
|
||||
locationURL = location === null ? null : new URL$1(location, request.url).toString();
|
||||
} catch (err) {
|
||||
// error here can only be invalid URL in Location: header
|
||||
// do not throw when options.redirect == manual
|
||||
// let the user extract the errorneous redirect URL
|
||||
if (request.redirect !== 'manual') {
|
||||
reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
|
||||
finalize();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// HTTP fetch step 5.5
|
||||
switch (request.redirect) {
|
||||
|
@ -10968,6 +11043,12 @@ function fetch(url, opts) {
|
|||
size: request.size
|
||||
};
|
||||
|
||||
if (!isDomainOrSubdomain(request.url, locationURL)) {
|
||||
for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
|
||||
requestOpts.headers.delete(name);
|
||||
}
|
||||
}
|
||||
|
||||
// HTTP-redirect fetch step 9
|
||||
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
|
||||
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
|
||||
|
@ -13809,6 +13890,7 @@ var encode = function encode(str, defaultEncoder, charset, kind, format) {
|
|||
|
||||
i += 1;
|
||||
c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF));
|
||||
/* eslint operator-linebreak: [2, "before"] */
|
||||
out += hexTable[0xF0 | (c >> 18)]
|
||||
+ hexTable[0x80 | ((c >> 12) & 0x3F)]
|
||||
+ hexTable[0x80 | ((c >> 6) & 0x3F)]
|
||||
|
@ -16310,6 +16392,25 @@ module.exports = require("util");
|
|||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
|
@ -16321,9 +16422,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
};
|
||||
var _a;
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert_1 = __webpack_require__(357);
|
||||
const fs = __webpack_require__(747);
|
||||
const path = __webpack_require__(622);
|
||||
exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rename = exports.readlink = exports.readdir = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0;
|
||||
const fs = __importStar(__webpack_require__(747));
|
||||
const path = __importStar(__webpack_require__(622));
|
||||
_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
|
||||
exports.IS_WINDOWS = process.platform === 'win32';
|
||||
function exists(fsPath) {
|
||||
|
@ -16364,49 +16465,6 @@ function isRooted(p) {
|
|||
return p.startsWith('/');
|
||||
}
|
||||
exports.isRooted = isRooted;
|
||||
/**
|
||||
* Recursively create a directory at `fsPath`.
|
||||
*
|
||||
* This implementation is optimistic, meaning it attempts to create the full
|
||||
* path first, and backs up the path stack from there.
|
||||
*
|
||||
* @param fsPath The path to create
|
||||
* @param maxDepth The maximum recursion depth
|
||||
* @param depth The current recursion depth
|
||||
*/
|
||||
function mkdirP(fsPath, maxDepth = 1000, depth = 1) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
assert_1.ok(fsPath, 'a path argument must be provided');
|
||||
fsPath = path.resolve(fsPath);
|
||||
if (depth >= maxDepth)
|
||||
return exports.mkdir(fsPath);
|
||||
try {
|
||||
yield exports.mkdir(fsPath);
|
||||
return;
|
||||
}
|
||||
catch (err) {
|
||||
switch (err.code) {
|
||||
case 'ENOENT': {
|
||||
yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);
|
||||
yield exports.mkdir(fsPath);
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
let stats;
|
||||
try {
|
||||
stats = yield exports.stat(fsPath);
|
||||
}
|
||||
catch (err2) {
|
||||
throw err;
|
||||
}
|
||||
if (!stats.isDirectory())
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.mkdirP = mkdirP;
|
||||
/**
|
||||
* Best effort attempt to determine whether a file exists and is executable.
|
||||
* @param filePath file path to check
|
||||
|
@ -16503,6 +16561,12 @@ function isUnixExecutable(stats) {
|
|||
((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
|
||||
((stats.mode & 64) > 0 && stats.uid === process.getuid()));
|
||||
}
|
||||
// Get the path of cmd.exe in windows
|
||||
function getCmdPath() {
|
||||
var _a;
|
||||
return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`;
|
||||
}
|
||||
exports.getCmdPath = getCmdPath;
|
||||
//# sourceMappingURL=io-util.js.map
|
||||
|
||||
/***/ }),
|
||||
|
@ -17452,7 +17516,7 @@ var parseObject = function (chain, val, options, valuesParsed) {
|
|||
) {
|
||||
obj = [];
|
||||
obj[index] = leaf;
|
||||
} else {
|
||||
} else if (cleanRoot !== '__proto__') {
|
||||
obj[cleanRoot] = leaf;
|
||||
}
|
||||
}
|
||||
|
@ -34581,6 +34645,7 @@ var arrayPrefixGenerators = {
|
|||
};
|
||||
|
||||
var isArray = Array.isArray;
|
||||
var split = String.prototype.split;
|
||||
var push = Array.prototype.push;
|
||||
var pushToArray = function (arr, valueOrArray) {
|
||||
push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]);
|
||||
|
@ -34617,10 +34682,13 @@ var isNonNullishPrimitive = function isNonNullishPrimitive(v) {
|
|||
|| typeof v === 'bigint';
|
||||
};
|
||||
|
||||
var sentinel = {};
|
||||
|
||||
var stringify = function stringify(
|
||||
object,
|
||||
prefix,
|
||||
generateArrayPrefix,
|
||||
commaRoundTrip,
|
||||
strictNullHandling,
|
||||
skipNulls,
|
||||
encoder,
|
||||
|
@ -34636,8 +34704,23 @@ var stringify = function stringify(
|
|||
) {
|
||||
var obj = object;
|
||||
|
||||
if (sideChannel.has(object)) {
|
||||
throw new RangeError('Cyclic object value');
|
||||
var tmpSc = sideChannel;
|
||||
var step = 0;
|
||||
var findFlag = false;
|
||||
while ((tmpSc = tmpSc.get(sentinel)) !== void undefined && !findFlag) {
|
||||
// Where object last appeared in the ref tree
|
||||
var pos = tmpSc.get(object);
|
||||
step += 1;
|
||||
if (typeof pos !== 'undefined') {
|
||||
if (pos === step) {
|
||||
throw new RangeError('Cyclic object value');
|
||||
} else {
|
||||
findFlag = true; // Break while
|
||||
}
|
||||
}
|
||||
if (typeof tmpSc.get(sentinel) === 'undefined') {
|
||||
step = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof filter === 'function') {
|
||||
|
@ -34664,6 +34747,14 @@ var stringify = function stringify(
|
|||
if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) {
|
||||
if (encoder) {
|
||||
var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key', format);
|
||||
if (generateArrayPrefix === 'comma' && encodeValuesOnly) {
|
||||
var valuesArray = split.call(String(obj), ',');
|
||||
var valuesJoined = '';
|
||||
for (var i = 0; i < valuesArray.length; ++i) {
|
||||
valuesJoined += (i === 0 ? '' : ',') + formatter(encoder(valuesArray[i], defaults.encoder, charset, 'value', format));
|
||||
}
|
||||
return [formatter(keyValue) + (commaRoundTrip && isArray(obj) && valuesArray.length === 1 ? '[]' : '') + '=' + valuesJoined];
|
||||
}
|
||||
return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value', format))];
|
||||
}
|
||||
return [formatter(prefix) + '=' + formatter(String(obj))];
|
||||
|
@ -34678,7 +34769,7 @@ var stringify = function stringify(
|
|||
var objKeys;
|
||||
if (generateArrayPrefix === 'comma' && isArray(obj)) {
|
||||
// we need to join elements in
|
||||
objKeys = [{ value: obj.length > 0 ? obj.join(',') || null : undefined }];
|
||||
objKeys = [{ value: obj.length > 0 ? obj.join(',') || null : void undefined }];
|
||||
} else if (isArray(filter)) {
|
||||
objKeys = filter;
|
||||
} else {
|
||||
|
@ -34686,24 +34777,28 @@ var stringify = function stringify(
|
|||
objKeys = sort ? keys.sort(sort) : keys;
|
||||
}
|
||||
|
||||
for (var i = 0; i < objKeys.length; ++i) {
|
||||
var key = objKeys[i];
|
||||
var value = typeof key === 'object' && key.value !== undefined ? key.value : obj[key];
|
||||
var adjustedPrefix = commaRoundTrip && isArray(obj) && obj.length === 1 ? prefix + '[]' : prefix;
|
||||
|
||||
for (var j = 0; j < objKeys.length; ++j) {
|
||||
var key = objKeys[j];
|
||||
var value = typeof key === 'object' && typeof key.value !== 'undefined' ? key.value : obj[key];
|
||||
|
||||
if (skipNulls && value === null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var keyPrefix = isArray(obj)
|
||||
? typeof generateArrayPrefix === 'function' ? generateArrayPrefix(prefix, key) : prefix
|
||||
: prefix + (allowDots ? '.' + key : '[' + key + ']');
|
||||
? typeof generateArrayPrefix === 'function' ? generateArrayPrefix(adjustedPrefix, key) : adjustedPrefix
|
||||
: adjustedPrefix + (allowDots ? '.' + key : '[' + key + ']');
|
||||
|
||||
sideChannel.set(object, true);
|
||||
sideChannel.set(object, step);
|
||||
var valueSideChannel = getSideChannel();
|
||||
valueSideChannel.set(sentinel, sideChannel);
|
||||
pushToArray(values, stringify(
|
||||
value,
|
||||
keyPrefix,
|
||||
generateArrayPrefix,
|
||||
commaRoundTrip,
|
||||
strictNullHandling,
|
||||
skipNulls,
|
||||
encoder,
|
||||
|
@ -34727,7 +34822,7 @@ var normalizeStringifyOptions = function normalizeStringifyOptions(opts) {
|
|||
return defaults;
|
||||
}
|
||||
|
||||
if (opts.encoder !== null && opts.encoder !== undefined && typeof opts.encoder !== 'function') {
|
||||
if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') {
|
||||
throw new TypeError('Encoder has to be a function.');
|
||||
}
|
||||
|
||||
|
@ -34800,6 +34895,10 @@ module.exports = function (object, opts) {
|
|||
}
|
||||
|
||||
var generateArrayPrefix = arrayPrefixGenerators[arrayFormat];
|
||||
if (opts && 'commaRoundTrip' in opts && typeof opts.commaRoundTrip !== 'boolean') {
|
||||
throw new TypeError('`commaRoundTrip` must be a boolean, or absent');
|
||||
}
|
||||
var commaRoundTrip = generateArrayPrefix === 'comma' && opts && opts.commaRoundTrip;
|
||||
|
||||
if (!objKeys) {
|
||||
objKeys = Object.keys(obj);
|
||||
|
@ -34820,6 +34919,7 @@ module.exports = function (object, opts) {
|
|||
obj[key],
|
||||
key,
|
||||
generateArrayPrefix,
|
||||
commaRoundTrip,
|
||||
options.strictNullHandling,
|
||||
options.skipNulls,
|
||||
options.encode ? options.encoder : null,
|
||||
|
|
4154
package-lock.json
generated
4154
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "checkout",
|
||||
"version": "2.0.2",
|
||||
"version": "2.6.0",
|
||||
"description": "checkout action",
|
||||
"main": "lib/main.js",
|
||||
"scripts": {
|
||||
|
@ -31,7 +31,7 @@
|
|||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/github": "^2.2.0",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@actions/io": "^1.1.2",
|
||||
"@actions/tool-cache": "^1.1.2",
|
||||
"uuid": "^3.3.3"
|
||||
},
|
||||
|
@ -39,11 +39,12 @@
|
|||
"@types/jest": "^27.0.2",
|
||||
"@types/node": "^12.7.12",
|
||||
"@types/uuid": "^3.4.6",
|
||||
"@typescript-eslint/parser": "^5.1.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.45.0",
|
||||
"@typescript-eslint/parser": "^5.45.0",
|
||||
"@zeit/ncc": "^0.20.5",
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-plugin-github": "^4.3.2",
|
||||
"eslint-plugin-jest": "^25.2.2",
|
||||
"eslint-plugin-jest": "^25.7.0",
|
||||
"jest": "^27.3.0",
|
||||
"jest-circus": "^27.3.0",
|
||||
"js-yaml": "^3.13.1",
|
||||
|
|
|
@ -157,7 +157,8 @@ class GitAuthHelper {
|
|||
// by process creation audit events, which are commonly logged. For more information,
|
||||
// refer to https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/manage/component-updates/command-line-process-auditing
|
||||
const output = await this.git.submoduleForeach(
|
||||
`git config --local '${this.tokenConfigKey}' '${this.tokenPlaceholderConfigValue}' && git config --local --show-origin --name-only --get-regexp remote.origin.url`,
|
||||
// wrap the pipeline in quotes to make sure it's handled properly by submoduleForeach, rather than just the first part of the pipeline
|
||||
`sh -c "git config --local '${this.tokenConfigKey}' '${this.tokenPlaceholderConfigValue}' && git config --local --show-origin --name-only --get-regexp remote.origin.url"`,
|
||||
this.settings.nestedSubmodules
|
||||
)
|
||||
|
||||
|
@ -365,7 +366,8 @@ class GitAuthHelper {
|
|||
|
||||
const pattern = regexpHelper.escape(configKey)
|
||||
await this.git.submoduleForeach(
|
||||
`git config --local --name-only --get-regexp '${pattern}' && git config --local --unset-all '${configKey}' || :`,
|
||||
// wrap the pipeline in quotes to make sure it's handled properly by submoduleForeach, rather than just the first part of the pipeline
|
||||
`sh -c "git config --local --name-only --get-regexp '${pattern}' && git config --local --unset-all '${configKey}' || :"`,
|
||||
true
|
||||
)
|
||||
}
|
||||
|
|
|
@ -5,4 +5,4 @@ set -e
|
|||
src/misc/licensed-download.sh
|
||||
|
||||
echo 'Running: licensed cached'
|
||||
_temp/licensed-3.3.1/licensed status
|
||||
_temp/licensed-3.6.0/licensed status
|
|
@ -2,23 +2,23 @@
|
|||
|
||||
set -e
|
||||
|
||||
if [ ! -f _temp/licensed-3.3.1.done ]; then
|
||||
if [ ! -f _temp/licensed-3.6.0.done ]; then
|
||||
echo 'Clearing temp'
|
||||
rm -rf _temp/licensed-3.3.1 || true
|
||||
rm -rf _temp/licensed-3.6.0 || true
|
||||
|
||||
echo 'Downloading licensed'
|
||||
mkdir -p _temp/licensed-3.3.1
|
||||
pushd _temp/licensed-3.3.1
|
||||
mkdir -p _temp/licensed-3.6.0
|
||||
pushd _temp/licensed-3.6.0
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/3.3.1/licensed-3.3.1-darwin-x64.tar.gz
|
||||
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/3.6.0/licensed-3.6.0-darwin-x64.tar.gz
|
||||
else
|
||||
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/3.3.1/licensed-3.3.1-linux-x64.tar.gz
|
||||
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/3.6.0/licensed-3.6.0-linux-x64.tar.gz
|
||||
fi
|
||||
|
||||
echo 'Extracting licenesed'
|
||||
tar -xzf licensed.tar.gz
|
||||
popd
|
||||
touch _temp/licensed-3.3.1.done
|
||||
touch _temp/licensed-3.6.0.done
|
||||
else
|
||||
echo 'Licensed already downloaded'
|
||||
fi
|
||||
|
|
|
@ -5,4 +5,4 @@ set -e
|
|||
src/misc/licensed-download.sh
|
||||
|
||||
echo 'Running: licensed cached'
|
||||
_temp/licensed-3.3.1/licensed cache
|
||||
_temp/licensed-3.6.0/licensed cache
|
Loading…
Reference in a new issue