Merge pull request #66 from gradle/dependency-updates

Dependency updates
This commit is contained in:
Daz DeBoer 2024-03-11 21:44:18 -06:00 committed by GitHub
commit 3fe876afb8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 597 additions and 630 deletions

View file

@ -1,6 +1,6 @@
plugins {
id "com.gradle.enterprise" version "3.16.2"
id "com.gradle.common-custom-user-data-gradle-plugin" version "1.12.1"
id "com.gradle.common-custom-user-data-gradle-plugin" version "1.13"
}
gradleEnterprise {

View file

@ -1,6 +1,6 @@
plugins {
id("com.gradle.enterprise") version "3.16.2"
id("com.gradle.common-custom-user-data-gradle-plugin") version "1.12.1"
id("com.gradle.common-custom-user-data-gradle-plugin") version "1.13"
}
gradleEnterprise {

View file

@ -25,7 +25,7 @@ jobs:
DEVELOCITY_INJECTION_ENABLED: true
DEVELOCITY_URL: https://ge.solutions-team.gradle.com
DEVELOCITY_PLUGIN_VERSION: 3.16.2
DEVELOCITY_CCUD_PLUGIN_VERSION: 1.12.1
DEVELOCITY_CCUD_PLUGIN_VERSION: 1.13
GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} # This env var has not (yet) been renamed/aliased in GE plugin 3.16.2
strategy:
matrix:

View file

@ -2328,6 +2328,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
const promises_1 = __importDefault(__nccwpck_require__(73292));
const stream = __importStar(__nccwpck_require__(12781));
const fs_1 = __nccwpck_require__(57147);
const path = __importStar(__nccwpck_require__(71017));
const github = __importStar(__nccwpck_require__(21260));
const core = __importStar(__nccwpck_require__(42186));
const httpClient = __importStar(__nccwpck_require__(96255));
@ -2368,6 +2371,9 @@ function streamExtract(url, directory) {
return;
}
catch (error) {
if (error.message.includes('Malformed extraction path')) {
throw new Error(`Artifact download failed with unretryable error: ${error.message}`);
}
retryCount++;
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
// wait 5 seconds before retrying
@ -2390,6 +2396,8 @@ function streamExtractExternal(url, directory) {
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
};
const timer = setTimeout(timerFn, timeout);
const createdDirectories = new Set();
createdDirectories.add(directory);
response.message
.on('data', () => {
timer.refresh();
@ -2399,11 +2407,47 @@ function streamExtractExternal(url, directory) {
clearTimeout(timer);
reject(error);
})
.pipe(unzip_stream_1.default.Extract({ path: directory }))
.on('close', () => {
.pipe(unzip_stream_1.default.Parse())
.pipe(new stream.Transform({
objectMode: true,
transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () {
const fullPath = path.normalize(path.join(directory, entry.path));
if (!directory.endsWith(path.sep)) {
directory += path.sep;
}
if (!fullPath.startsWith(directory)) {
reject(new Error(`Malformed extraction path: ${fullPath}`));
}
if (entry.type === 'Directory') {
if (!createdDirectories.has(fullPath)) {
createdDirectories.add(fullPath);
yield resolveOrCreateDirectory(fullPath).then(() => {
entry.autodrain();
callback();
});
}
else {
entry.autodrain();
callback();
}
}
else {
core.info(`Extracting artifact entry: ${fullPath}`);
if (!createdDirectories.has(path.dirname(fullPath))) {
createdDirectories.add(path.dirname(fullPath));
yield resolveOrCreateDirectory(path.dirname(fullPath));
}
const writeStream = (0, fs_1.createWriteStream)(fullPath);
writeStream.on('finish', callback);
writeStream.on('error', reject);
entry.pipe(writeStream);
}
})
}))
.on('finish', () => __awaiter(this, void 0, void 0, function* () {
clearTimeout(timer);
resolve();
})
}))
.on('error', (error) => {
reject(error);
});
@ -3024,7 +3068,10 @@ function getResultsServiceUrl() {
exports.getResultsServiceUrl = getResultsServiceUrl;
function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
}
exports.isGhes = isGhes;
function getGitHubWorkspaceDir() {
@ -7127,7 +7174,10 @@ function assertDefined(name, value) {
exports.assertDefined = assertDefined;
function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
}
exports.isGhes = isGhes;
//# sourceMappingURL=cacheUtils.js.map
@ -15220,7 +15270,7 @@ class HttpClient {
if (this._keepAlive && useProxy) {
agent = this._proxyAgent;
}
if (this._keepAlive && !useProxy) {
if (!useProxy) {
agent = this._agent;
}
// if agent is already assigned use that agent.
@ -15252,16 +15302,12 @@ class HttpClient {
agent = tunnelAgent(agentOptions);
this._proxyAgent = agent;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) {
// if tunneling agent isn't assigned create a new agent
if (!agent) {
const options = { keepAlive: this._keepAlive, maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
@ -102798,35 +102844,43 @@ const coerce = (version, options) => {
let match = null
if (!options.rtl) {
match = version.match(re[t.COERCE])
match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE])
} else {
// Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
// With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4'
//
// Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus.
const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL]
let next
while ((next = re[t.COERCERTL].exec(version)) &&
while ((next = coerceRtlRegex.exec(version)) &&
(!match || match.index + match[0].length !== version.length)
) {
if (!match ||
next.index + next[0].length !== match.index + match[0].length) {
match = next
}
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length
}
// leave it in a clean state
re[t.COERCERTL].lastIndex = -1
coerceRtlRegex.lastIndex = -1
}
if (match === null) {
return null
}
return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options)
const major = match[2]
const minor = match[3] || '0'
const patch = match[4] || '0'
const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : ''
const build = options.includePrerelease && match[6] ? `+${match[6]}` : ''
return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options)
}
module.exports = coerce
@ -103518,12 +103572,17 @@ createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`)
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
createToken('COERCE', `${'(^|[^\\d])' +
createToken('COERCEPLAIN', `${'(^|[^\\d])' +
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`)
createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`)
createToken('COERCEFULL', src[t.COERCEPLAIN] +
`(?:${src[t.PRERELEASE]})?` +
`(?:${src[t.BUILD]})?` +
`(?:$|[^\\d])`)
createToken('COERCERTL', src[t.COERCE], true)
createToken('COERCERTLFULL', src[t.COERCEFULL], true)
// Tilde ranges.
// Meaning is "reasonably at or greater than"
@ -138940,7 +138999,7 @@ function setup() {
if ((0, input_params_1.getBuildScanPublishEnabled)() && verifyTermsOfServiceAgreement()) {
maybeExportVariable('DEVELOCITY_INJECTION_ENABLED', 'true');
maybeExportVariable('DEVELOCITY_PLUGIN_VERSION', '3.16.2');
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.12.1');
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.13');
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_URL', (0, input_params_1.getBuildScanTermsOfServiceUrl)());
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_AGREE', (0, input_params_1.getBuildScanTermsOfServiceAgree)());
}
@ -139310,21 +139369,21 @@ class CacheCleaner {
});
});
}
ageAllFiles(fileName = '*') {
return __awaiter(this, void 0, void 0, function* () {
ageAllFiles() {
return __awaiter(this, arguments, void 0, function* (fileName = '*') {
core.debug(`Aging all files in Gradle User Home with name ${fileName}`);
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0));
});
}
touchAllFiles(fileName = '*') {
return __awaiter(this, void 0, void 0, function* () {
touchAllFiles() {
return __awaiter(this, arguments, void 0, function* (fileName = '*') {
core.debug(`Touching all files in Gradle User Home with name ${fileName}`);
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date());
});
}
setUtimes(pattern, timestamp) {
var _a, e_1, _b, _c;
return __awaiter(this, void 0, void 0, function* () {
var _a, e_1, _b, _c;
const globber = yield glob.create(pattern, {
implicitDescendants: false
});
@ -139502,8 +139561,8 @@ class AbstractEntryExtractor {
});
}
saveExtractedCacheEntry(matchingFiles, artifactType, pattern, uniqueFileNames, previouslyRestoredEntries, entryListener) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
var _a;
const cacheKey = uniqueFileNames
? this.createCacheKeyFromFileNames(artifactType, matchingFiles)
: yield this.createCacheKeyFromFileContents(artifactType, pattern);
@ -141953,7 +142012,7 @@ function firstString() {
/***/ ((module) => {
"use strict";
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.0","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.4","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
/***/ }),

File diff suppressed because one or more lines are too long

View file

@ -2328,6 +2328,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
const promises_1 = __importDefault(__nccwpck_require__(73292));
const stream = __importStar(__nccwpck_require__(12781));
const fs_1 = __nccwpck_require__(57147);
const path = __importStar(__nccwpck_require__(71017));
const github = __importStar(__nccwpck_require__(21260));
const core = __importStar(__nccwpck_require__(42186));
const httpClient = __importStar(__nccwpck_require__(96255));
@ -2368,6 +2371,9 @@ function streamExtract(url, directory) {
return;
}
catch (error) {
if (error.message.includes('Malformed extraction path')) {
throw new Error(`Artifact download failed with unretryable error: ${error.message}`);
}
retryCount++;
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
// wait 5 seconds before retrying
@ -2390,6 +2396,8 @@ function streamExtractExternal(url, directory) {
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
};
const timer = setTimeout(timerFn, timeout);
const createdDirectories = new Set();
createdDirectories.add(directory);
response.message
.on('data', () => {
timer.refresh();
@ -2399,11 +2407,47 @@ function streamExtractExternal(url, directory) {
clearTimeout(timer);
reject(error);
})
.pipe(unzip_stream_1.default.Extract({ path: directory }))
.on('close', () => {
.pipe(unzip_stream_1.default.Parse())
.pipe(new stream.Transform({
objectMode: true,
transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () {
const fullPath = path.normalize(path.join(directory, entry.path));
if (!directory.endsWith(path.sep)) {
directory += path.sep;
}
if (!fullPath.startsWith(directory)) {
reject(new Error(`Malformed extraction path: ${fullPath}`));
}
if (entry.type === 'Directory') {
if (!createdDirectories.has(fullPath)) {
createdDirectories.add(fullPath);
yield resolveOrCreateDirectory(fullPath).then(() => {
entry.autodrain();
callback();
});
}
else {
entry.autodrain();
callback();
}
}
else {
core.info(`Extracting artifact entry: ${fullPath}`);
if (!createdDirectories.has(path.dirname(fullPath))) {
createdDirectories.add(path.dirname(fullPath));
yield resolveOrCreateDirectory(path.dirname(fullPath));
}
const writeStream = (0, fs_1.createWriteStream)(fullPath);
writeStream.on('finish', callback);
writeStream.on('error', reject);
entry.pipe(writeStream);
}
})
}))
.on('finish', () => __awaiter(this, void 0, void 0, function* () {
clearTimeout(timer);
resolve();
})
}))
.on('error', (error) => {
reject(error);
});
@ -3024,7 +3068,10 @@ function getResultsServiceUrl() {
exports.getResultsServiceUrl = getResultsServiceUrl;
function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
}
exports.isGhes = isGhes;
function getGitHubWorkspaceDir() {
@ -7127,7 +7174,10 @@ function assertDefined(name, value) {
exports.assertDefined = assertDefined;
function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
}
exports.isGhes = isGhes;
//# sourceMappingURL=cacheUtils.js.map
@ -15220,7 +15270,7 @@ class HttpClient {
if (this._keepAlive && useProxy) {
agent = this._proxyAgent;
}
if (this._keepAlive && !useProxy) {
if (!useProxy) {
agent = this._agent;
}
// if agent is already assigned use that agent.
@ -15252,16 +15302,12 @@ class HttpClient {
agent = tunnelAgent(agentOptions);
this._proxyAgent = agent;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) {
// if tunneling agent isn't assigned create a new agent
if (!agent) {
const options = { keepAlive: this._keepAlive, maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
@ -100251,35 +100297,43 @@ const coerce = (version, options) => {
let match = null
if (!options.rtl) {
match = version.match(re[t.COERCE])
match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE])
} else {
// Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
// With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4'
//
// Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus.
const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL]
let next
while ((next = re[t.COERCERTL].exec(version)) &&
while ((next = coerceRtlRegex.exec(version)) &&
(!match || match.index + match[0].length !== version.length)
) {
if (!match ||
next.index + next[0].length !== match.index + match[0].length) {
match = next
}
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length
}
// leave it in a clean state
re[t.COERCERTL].lastIndex = -1
coerceRtlRegex.lastIndex = -1
}
if (match === null) {
return null
}
return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options)
const major = match[2]
const minor = match[3] || '0'
const patch = match[4] || '0'
const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : ''
const build = options.includePrerelease && match[6] ? `+${match[6]}` : ''
return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options)
}
module.exports = coerce
@ -100971,12 +101025,17 @@ createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`)
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
createToken('COERCE', `${'(^|[^\\d])' +
createToken('COERCEPLAIN', `${'(^|[^\\d])' +
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`)
createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`)
createToken('COERCEFULL', src[t.COERCEPLAIN] +
`(?:${src[t.PRERELEASE]})?` +
`(?:${src[t.BUILD]})?` +
`(?:$|[^\\d])`)
createToken('COERCERTL', src[t.COERCE], true)
createToken('COERCERTLFULL', src[t.COERCEFULL], true)
// Tilde ranges.
// Meaning is "reasonably at or greater than"
@ -136393,7 +136452,7 @@ function setup() {
if ((0, input_params_1.getBuildScanPublishEnabled)() && verifyTermsOfServiceAgreement()) {
maybeExportVariable('DEVELOCITY_INJECTION_ENABLED', 'true');
maybeExportVariable('DEVELOCITY_PLUGIN_VERSION', '3.16.2');
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.12.1');
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.13');
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_URL', (0, input_params_1.getBuildScanTermsOfServiceUrl)());
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_AGREE', (0, input_params_1.getBuildScanTermsOfServiceAgree)());
}
@ -136763,21 +136822,21 @@ class CacheCleaner {
});
});
}
ageAllFiles(fileName = '*') {
return __awaiter(this, void 0, void 0, function* () {
ageAllFiles() {
return __awaiter(this, arguments, void 0, function* (fileName = '*') {
core.debug(`Aging all files in Gradle User Home with name ${fileName}`);
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0));
});
}
touchAllFiles(fileName = '*') {
return __awaiter(this, void 0, void 0, function* () {
touchAllFiles() {
return __awaiter(this, arguments, void 0, function* (fileName = '*') {
core.debug(`Touching all files in Gradle User Home with name ${fileName}`);
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date());
});
}
setUtimes(pattern, timestamp) {
var _a, e_1, _b, _c;
return __awaiter(this, void 0, void 0, function* () {
var _a, e_1, _b, _c;
const globber = yield glob.create(pattern, {
implicitDescendants: false
});
@ -136955,8 +137014,8 @@ class AbstractEntryExtractor {
});
}
saveExtractedCacheEntry(matchingFiles, artifactType, pattern, uniqueFileNames, previouslyRestoredEntries, entryListener) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
var _a;
const cacheKey = uniqueFileNames
? this.createCacheKeyFromFileNames(artifactType, matchingFiles)
: yield this.createCacheKeyFromFileContents(artifactType, pattern);
@ -139048,7 +139107,7 @@ function firstString() {
/***/ ((module) => {
"use strict";
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.0","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.4","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
/***/ }),

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

View file

@ -28,35 +28,35 @@
],
"license": "MIT",
"dependencies": {
"@actions/artifact": "2.1.0",
"@actions/cache": "3.2.3",
"@actions/artifact": "2.1.4",
"@actions/cache": "3.2.4",
"@actions/core": "1.10.1",
"@actions/exec": "1.1.1",
"@actions/github": "6.0.0",
"@actions/glob": "0.4.0",
"@actions/http-client": "2.2.0",
"@actions/http-client": "2.2.1",
"@actions/tool-cache": "2.0.1",
"@octokit/rest": "19.0.13",
"@octokit/webhooks-types": "7.3.1",
"semver": "7.5.4",
"@octokit/rest": "20.0.2",
"@octokit/webhooks-types": "7.4.0",
"semver": "7.6.0",
"string-argv": "0.3.2"
},
"devDependencies": {
"@types/jest": "29.5.11",
"@types/node": "20.10.0",
"@types/jest": "29.5.12",
"@types/node": "20.11.26",
"@types/unzipper": "0.10.9",
"@typescript-eslint/parser": "6.19.1",
"@typescript-eslint/parser": "7.2.0",
"@vercel/ncc": "0.38.1",
"eslint": "8.56.0",
"eslint-plugin-github": "4.10.1",
"eslint-plugin-jest": "27.6.3",
"eslint": "8.57.0",
"eslint-plugin-github": "4.10.2",
"eslint-plugin-jest": "27.9.0",
"eslint-plugin-prettier": "5.1.3",
"jest": "29.7.0",
"js-yaml": "4.1.0",
"npm-run-all": "4.1.5",
"patch-package": "8.0.0",
"prettier": "3.2.4",
"prettier": "3.2.5",
"ts-jest": "29.1.2",
"typescript": "5.3.3"
"typescript": "5.4.2"
}
}

View file

@ -9,7 +9,7 @@ export function setup(): void {
if (getBuildScanPublishEnabled() && verifyTermsOfServiceAgreement()) {
maybeExportVariable('DEVELOCITY_INJECTION_ENABLED', 'true')
maybeExportVariable('DEVELOCITY_PLUGIN_VERSION', '3.16.2')
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.12.1')
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.13')
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_URL', getBuildScanTermsOfServiceUrl())
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_AGREE', getBuildScanTermsOfServiceAgree())
}

View file

@ -20,7 +20,7 @@ dependencies {
testImplementation ('io.ratpack:ratpack-groovy-test:1.9.0') {
exclude group: 'org.codehaus.groovy', module: 'groovy-all'
}
testImplementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.16.1'
testImplementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.16.2'
}
test {

View file

@ -1,6 +1,6 @@
plugins {
id "com.gradle.enterprise" version "3.16.2"
id "com.gradle.common-custom-user-data-gradle-plugin" version "1.12.1"
id "com.gradle.common-custom-user-data-gradle-plugin" version "1.13"
}
gradleEnterprise {

View file

@ -17,7 +17,7 @@ import java.util.zip.GZIPOutputStream
class BaseInitScriptTest extends Specification {
static final String DEVELOCITY_PLUGIN_VERSION = '3.16.2'
static final String CCUD_PLUGIN_VERSION = '1.12.1'
static final String CCUD_PLUGIN_VERSION = '1.13'
static final TestGradleVersion GRADLE_3_X = new TestGradleVersion(GradleVersion.version('3.5.1'), 7, 9)
static final TestGradleVersion GRADLE_4_X = new TestGradleVersion(GradleVersion.version('4.10.3'), 7, 10)