mirror of
https://github.com/gradle/actions
synced 2024-11-23 18:02:13 +00:00
Only process build results once (#133)
On long-lived machines, it's possible that the `.build-results` directory isn't cleared between invocations. This will result in the job summary including results from previous jobs. By marking each build-results file as 'processed' at the end of the job, we can avoid this scenario.
This commit is contained in:
parent
6232a3f503
commit
e235596c88
11 changed files with 156 additions and 39 deletions
4
.github/workflows/demo-job-summary.yml
vendored
4
.github/workflows/demo-job-summary.yml
vendored
|
@ -16,6 +16,10 @@ jobs:
|
|||
npm install
|
||||
npm run build
|
||||
working-directory: sources
|
||||
- uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: '11'
|
||||
- name: Setup Gradle
|
||||
uses: ./setup-gradle
|
||||
- name: Build kotlin-dsl project
|
||||
|
|
36
dist/dependency-submission/main/index.js
vendored
36
dist/dependency-submission/main/index.js
vendored
|
@ -139677,21 +139677,42 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.loadBuildResults = void 0;
|
||||
exports.markBuildResultsProcessed = exports.loadBuildResults = void 0;
|
||||
const fs = __importStar(__nccwpck_require__(57147));
|
||||
const path = __importStar(__nccwpck_require__(71017));
|
||||
function loadBuildResults() {
|
||||
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP'], '.build-results');
|
||||
if (!fs.existsSync(buildResultsDir)) {
|
||||
return [];
|
||||
}
|
||||
return fs.readdirSync(buildResultsDir).map(file => {
|
||||
const filePath = path.join(buildResultsDir, file);
|
||||
return getUnprocessedResults().map(filePath => {
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
});
|
||||
}
|
||||
exports.loadBuildResults = loadBuildResults;
|
||||
function markBuildResultsProcessed() {
|
||||
getUnprocessedResults().forEach(markProcessed);
|
||||
}
|
||||
exports.markBuildResultsProcessed = markBuildResultsProcessed;
|
||||
function getUnprocessedResults() {
|
||||
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP'], '.build-results');
|
||||
if (!fs.existsSync(buildResultsDir)) {
|
||||
return [];
|
||||
}
|
||||
return fs
|
||||
.readdirSync(buildResultsDir)
|
||||
.map(file => {
|
||||
return path.resolve(buildResultsDir, file);
|
||||
})
|
||||
.filter(filePath => {
|
||||
return path.extname(filePath) === '.json' && !isProcessed(filePath);
|
||||
});
|
||||
}
|
||||
function isProcessed(resultFile) {
|
||||
const markerFile = `${resultFile}.processed`;
|
||||
return fs.existsSync(markerFile);
|
||||
}
|
||||
function markProcessed(resultFile) {
|
||||
const markerFile = `${resultFile}.processed`;
|
||||
fs.writeFileSync(markerFile, '');
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
@ -142420,6 +142441,7 @@ function complete(cacheConfig, summaryConfig) {
|
|||
yield caches.save(userHome, gradleUserHome, cacheListener, daemonController, cacheConfig);
|
||||
const cachingReport = (0, cache_reporting_1.generateCachingReport)(cacheListener);
|
||||
yield jobSummary.generateJobSummary(buildResults, cachingReport, summaryConfig);
|
||||
(0, build_results_1.markBuildResultsProcessed)();
|
||||
core.info('Completed post-action step');
|
||||
return true;
|
||||
});
|
||||
|
|
2
dist/dependency-submission/main/index.js.map
vendored
2
dist/dependency-submission/main/index.js.map
vendored
File diff suppressed because one or more lines are too long
36
dist/dependency-submission/post/index.js
vendored
36
dist/dependency-submission/post/index.js
vendored
|
@ -91105,21 +91105,42 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.loadBuildResults = void 0;
|
||||
exports.markBuildResultsProcessed = exports.loadBuildResults = void 0;
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
function loadBuildResults() {
|
||||
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP'], '.build-results');
|
||||
if (!fs.existsSync(buildResultsDir)) {
|
||||
return [];
|
||||
}
|
||||
return fs.readdirSync(buildResultsDir).map(file => {
|
||||
const filePath = path.join(buildResultsDir, file);
|
||||
return getUnprocessedResults().map(filePath => {
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
});
|
||||
}
|
||||
exports.loadBuildResults = loadBuildResults;
|
||||
function markBuildResultsProcessed() {
|
||||
getUnprocessedResults().forEach(markProcessed);
|
||||
}
|
||||
exports.markBuildResultsProcessed = markBuildResultsProcessed;
|
||||
function getUnprocessedResults() {
|
||||
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP'], '.build-results');
|
||||
if (!fs.existsSync(buildResultsDir)) {
|
||||
return [];
|
||||
}
|
||||
return fs
|
||||
.readdirSync(buildResultsDir)
|
||||
.map(file => {
|
||||
return path.resolve(buildResultsDir, file);
|
||||
})
|
||||
.filter(filePath => {
|
||||
return path.extname(filePath) === '.json' && !isProcessed(filePath);
|
||||
});
|
||||
}
|
||||
function isProcessed(resultFile) {
|
||||
const markerFile = `${resultFile}.processed`;
|
||||
return fs.existsSync(markerFile);
|
||||
}
|
||||
function markProcessed(resultFile) {
|
||||
const markerFile = `${resultFile}.processed`;
|
||||
fs.writeFileSync(markerFile, '');
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
@ -93188,6 +93209,7 @@ function complete(cacheConfig, summaryConfig) {
|
|||
yield caches.save(userHome, gradleUserHome, cacheListener, daemonController, cacheConfig);
|
||||
const cachingReport = (0, cache_reporting_1.generateCachingReport)(cacheListener);
|
||||
yield jobSummary.generateJobSummary(buildResults, cachingReport, summaryConfig);
|
||||
(0, build_results_1.markBuildResultsProcessed)();
|
||||
core.info('Completed post-action step');
|
||||
return true;
|
||||
});
|
||||
|
|
2
dist/dependency-submission/post/index.js.map
vendored
2
dist/dependency-submission/post/index.js.map
vendored
File diff suppressed because one or more lines are too long
36
dist/setup-gradle/main/index.js
vendored
36
dist/setup-gradle/main/index.js
vendored
|
@ -139677,21 +139677,42 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.loadBuildResults = void 0;
|
||||
exports.markBuildResultsProcessed = exports.loadBuildResults = void 0;
|
||||
const fs = __importStar(__nccwpck_require__(57147));
|
||||
const path = __importStar(__nccwpck_require__(71017));
|
||||
function loadBuildResults() {
|
||||
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP'], '.build-results');
|
||||
if (!fs.existsSync(buildResultsDir)) {
|
||||
return [];
|
||||
}
|
||||
return fs.readdirSync(buildResultsDir).map(file => {
|
||||
const filePath = path.join(buildResultsDir, file);
|
||||
return getUnprocessedResults().map(filePath => {
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
});
|
||||
}
|
||||
exports.loadBuildResults = loadBuildResults;
|
||||
function markBuildResultsProcessed() {
|
||||
getUnprocessedResults().forEach(markProcessed);
|
||||
}
|
||||
exports.markBuildResultsProcessed = markBuildResultsProcessed;
|
||||
function getUnprocessedResults() {
|
||||
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP'], '.build-results');
|
||||
if (!fs.existsSync(buildResultsDir)) {
|
||||
return [];
|
||||
}
|
||||
return fs
|
||||
.readdirSync(buildResultsDir)
|
||||
.map(file => {
|
||||
return path.resolve(buildResultsDir, file);
|
||||
})
|
||||
.filter(filePath => {
|
||||
return path.extname(filePath) === '.json' && !isProcessed(filePath);
|
||||
});
|
||||
}
|
||||
function isProcessed(resultFile) {
|
||||
const markerFile = `${resultFile}.processed`;
|
||||
return fs.existsSync(markerFile);
|
||||
}
|
||||
function markProcessed(resultFile) {
|
||||
const markerFile = `${resultFile}.processed`;
|
||||
fs.writeFileSync(markerFile, '');
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
@ -142339,6 +142360,7 @@ function complete(cacheConfig, summaryConfig) {
|
|||
yield caches.save(userHome, gradleUserHome, cacheListener, daemonController, cacheConfig);
|
||||
const cachingReport = (0, cache_reporting_1.generateCachingReport)(cacheListener);
|
||||
yield jobSummary.generateJobSummary(buildResults, cachingReport, summaryConfig);
|
||||
(0, build_results_1.markBuildResultsProcessed)();
|
||||
core.info('Completed post-action step');
|
||||
return true;
|
||||
});
|
||||
|
|
2
dist/setup-gradle/main/index.js.map
vendored
2
dist/setup-gradle/main/index.js.map
vendored
File diff suppressed because one or more lines are too long
36
dist/setup-gradle/post/index.js
vendored
36
dist/setup-gradle/post/index.js
vendored
|
@ -137130,21 +137130,42 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.loadBuildResults = void 0;
|
||||
exports.markBuildResultsProcessed = exports.loadBuildResults = void 0;
|
||||
const fs = __importStar(__nccwpck_require__(57147));
|
||||
const path = __importStar(__nccwpck_require__(71017));
|
||||
function loadBuildResults() {
|
||||
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP'], '.build-results');
|
||||
if (!fs.existsSync(buildResultsDir)) {
|
||||
return [];
|
||||
}
|
||||
return fs.readdirSync(buildResultsDir).map(file => {
|
||||
const filePath = path.join(buildResultsDir, file);
|
||||
return getUnprocessedResults().map(filePath => {
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
});
|
||||
}
|
||||
exports.loadBuildResults = loadBuildResults;
|
||||
function markBuildResultsProcessed() {
|
||||
getUnprocessedResults().forEach(markProcessed);
|
||||
}
|
||||
exports.markBuildResultsProcessed = markBuildResultsProcessed;
|
||||
function getUnprocessedResults() {
|
||||
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP'], '.build-results');
|
||||
if (!fs.existsSync(buildResultsDir)) {
|
||||
return [];
|
||||
}
|
||||
return fs
|
||||
.readdirSync(buildResultsDir)
|
||||
.map(file => {
|
||||
return path.resolve(buildResultsDir, file);
|
||||
})
|
||||
.filter(filePath => {
|
||||
return path.extname(filePath) === '.json' && !isProcessed(filePath);
|
||||
});
|
||||
}
|
||||
function isProcessed(resultFile) {
|
||||
const markerFile = `${resultFile}.processed`;
|
||||
return fs.existsSync(markerFile);
|
||||
}
|
||||
function markProcessed(resultFile) {
|
||||
const markerFile = `${resultFile}.processed`;
|
||||
fs.writeFileSync(markerFile, '');
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
@ -139412,6 +139433,7 @@ function complete(cacheConfig, summaryConfig) {
|
|||
yield caches.save(userHome, gradleUserHome, cacheListener, daemonController, cacheConfig);
|
||||
const cachingReport = (0, cache_reporting_1.generateCachingReport)(cacheListener);
|
||||
yield jobSummary.generateJobSummary(buildResults, cachingReport, summaryConfig);
|
||||
(0, build_results_1.markBuildResultsProcessed)();
|
||||
core.info('Completed post-action step');
|
||||
return true;
|
||||
});
|
||||
|
|
2
dist/setup-gradle/post/index.js.map
vendored
2
dist/setup-gradle/post/index.js.map
vendored
File diff suppressed because one or more lines are too long
|
@ -13,15 +13,38 @@ export interface BuildResult {
|
|||
}
|
||||
|
||||
export function loadBuildResults(): BuildResult[] {
|
||||
return getUnprocessedResults().map(filePath => {
|
||||
const content = fs.readFileSync(filePath, 'utf8')
|
||||
return JSON.parse(content) as BuildResult
|
||||
})
|
||||
}
|
||||
|
||||
export function markBuildResultsProcessed(): void {
|
||||
getUnprocessedResults().forEach(markProcessed)
|
||||
}
|
||||
|
||||
function getUnprocessedResults(): string[] {
|
||||
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP']!, '.build-results')
|
||||
if (!fs.existsSync(buildResultsDir)) {
|
||||
return []
|
||||
}
|
||||
|
||||
return fs.readdirSync(buildResultsDir).map(file => {
|
||||
// Every file in the .build-results dir should be a BuildResults JSON
|
||||
const filePath = path.join(buildResultsDir, file)
|
||||
const content = fs.readFileSync(filePath, 'utf8')
|
||||
return JSON.parse(content) as BuildResult
|
||||
})
|
||||
return fs
|
||||
.readdirSync(buildResultsDir)
|
||||
.map(file => {
|
||||
return path.resolve(buildResultsDir, file)
|
||||
})
|
||||
.filter(filePath => {
|
||||
return path.extname(filePath) === '.json' && !isProcessed(filePath)
|
||||
})
|
||||
}
|
||||
|
||||
function isProcessed(resultFile: string): boolean {
|
||||
const markerFile = `${resultFile}.processed`
|
||||
return fs.existsSync(markerFile)
|
||||
}
|
||||
|
||||
function markProcessed(resultFile: string): void {
|
||||
const markerFile = `${resultFile}.processed`
|
||||
fs.writeFileSync(markerFile, '')
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import * as caches from './caching/caches'
|
|||
import * as jobSummary from './job-summary'
|
||||
import * as buildScan from './build-scan'
|
||||
|
||||
import {loadBuildResults} from './build-results'
|
||||
import {loadBuildResults, markBuildResultsProcessed} from './build-results'
|
||||
import {CacheListener, generateCachingReport} from './caching/cache-reporting'
|
||||
import {DaemonController} from './daemon-controller'
|
||||
import {BuildScanConfig, CacheConfig, SummaryConfig, getWorkspaceDirectory} from './input-params'
|
||||
|
@ -63,6 +63,8 @@ export async function complete(cacheConfig: CacheConfig, summaryConfig: SummaryC
|
|||
const cachingReport = generateCachingReport(cacheListener)
|
||||
await jobSummary.generateJobSummary(buildResults, cachingReport, summaryConfig)
|
||||
|
||||
markBuildResultsProcessed()
|
||||
|
||||
core.info('Completed post-action step')
|
||||
|
||||
return true
|
||||
|
|
Loading…
Reference in a new issue