Compare commits
28 commits
debug-cros
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
16f765ae58 | ||
|
d4e8205d7e | ||
|
9114792eb2 | ||
|
b4025e2cdd | ||
|
970e29c7f0 | ||
|
de2c0eb89a | ||
|
c4dd3270ce | ||
|
0bd7e8b279 | ||
|
cd28b0f5ee | ||
|
faf0426de3 | ||
|
50195ba7f6 | ||
|
fe9a9bd329 | ||
|
8a65c81355 | ||
|
44946dc88f | ||
|
58fa4b7a88 | ||
|
b260a9f8a6 | ||
|
17cd0d34de | ||
|
15d2aaca23 | ||
|
0465cdad11 | ||
|
69a9b03fd9 | ||
|
a80139913a | ||
|
6034af24fb | ||
|
0b1e2e4582 | ||
|
a3f0173fb3 | ||
|
2d72d869af | ||
|
730b76a669 | ||
|
815e458579 | ||
|
6ecde844e8 |
15 changed files with 7242 additions and 4276 deletions
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
ignore:
|
||||
- dependency-name: node-fetch
|
||||
versions:
|
||||
- ">=3.0.0"
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
4
.github/workflows/main.yml
vendored
4
.github/workflows/main.yml
vendored
|
@ -8,7 +8,7 @@ jobs:
|
|||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Install
|
||||
run: npm ci
|
||||
- name: Build
|
||||
|
@ -23,4 +23,4 @@ jobs:
|
|||
# git diff --exit-code --stat -- . ':!node_modules' \
|
||||
# || (echo "##[error] found changed files after build. please 'npm run build && npm run fmt'" \
|
||||
# "and check in all changes" \
|
||||
# && exit 1)
|
||||
# && exit 1)
|
||||
|
|
1
.nvmrc
Normal file
1
.nvmrc
Normal file
|
@ -0,0 +1 @@
|
|||
16.13.1
|
22
CHANGELOG.md
22
CHANGELOG.md
|
@ -1,3 +1,25 @@
|
|||
## 0.1.15
|
||||
|
||||
- Upgrade to action.yml declaration to node16 to address deprecations
|
||||
- Upgrade dependencies
|
||||
- Add `asset` output as a JSON array containing information about the uploaded assets
|
||||
|
||||
## 0.1.14
|
||||
|
||||
- provides an new workflow input option `generate_release_notes` which when set to true will automatically generate release notes for you based on GitHub activity [#179](https://github.com/softprops/action-gh-release/pull/179). Please see the [GitHub docs for this feature](https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes) for more information
|
||||
|
||||
## 0.1.13
|
||||
|
||||
- fix issue with multiple runs concatenating release bodies [#145](https://github.com/softprops/action-gh-release/pull/145)
|
||||
|
||||
## 0.1.12
|
||||
|
||||
- fix bug leading to empty strings subsituted for inputs users don't provide breaking api calls [#144](https://github.com/softprops/action-gh-release/pull/144)
|
||||
|
||||
## 0.1.11
|
||||
|
||||
- better error message on release create failed [#143](https://github.com/softprops/action-gh-release/pull/143)
|
||||
|
||||
## 0.1.10
|
||||
|
||||
- fixed error message formatting for file uploads
|
||||
|
|
72
README.md
72
README.md
|
@ -41,7 +41,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
|
@ -62,7 +62,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
```
|
||||
|
@ -88,7 +88,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Build
|
||||
run: echo ${{ github.sha }} > Release.txt
|
||||
- name: Test
|
||||
|
@ -112,7 +112,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Build
|
||||
run: echo ${{ github.sha }} > Release.txt
|
||||
- name: Test
|
||||
|
@ -144,7 +144,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Generate Changelog
|
||||
run: echo "# Good things have arrived" > ${{ github.workspace }}-CHANGELOG.txt
|
||||
- name: Release
|
||||
|
@ -165,20 +165,22 @@ jobs:
|
|||
|
||||
The following are optional as `step.with` keys
|
||||
|
||||
| Name | Type | Description |
|
||||
| -------------------------- | ------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `body` | String | Text communicating notable changes in this release |
|
||||
| `body_path` | String | Path to load text communicating notable changes in this release |
|
||||
| `draft` | Boolean | Indicator of whether or not this release is a draft |
|
||||
| `prerelease` | Boolean | Indicator of whether or not is a prerelease |
|
||||
| `files` | String | Newline-delimited globs of paths to assets to upload for release |
|
||||
| `name` | String | Name of the release. defaults to tag name |
|
||||
| `tag_name` | String | Name of a tag. defaults to `github.ref` |
|
||||
| `fail_on_unmatched_files` | Boolean | Indicator of whether to fail if any of the `files` globs match nothing |
|
||||
| `repository` | String | Name of a target repository in `<owner>/<repo>` format. Defaults to GITHUB_REPOSITORY env variable |
|
||||
| `target_commitish` | String | Commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. |
|
||||
| `token` | String | Secret GitHub Personal Access Token. Defaults to `${{ github.token }}` |
|
||||
| `discussion_category_name` | String | If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. For more information, see ["Managing categories for discussions in your repository."](https://docs.github.com/en/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository) |
|
||||
| Name | Type | Description |
|
||||
| -------------------------- | ------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `body` | String | Text communicating notable changes in this release |
|
||||
| `body_path` | String | Path to load text communicating notable changes in this release |
|
||||
| `draft` | Boolean | Indicator of whether or not this release is a draft |
|
||||
| `prerelease` | Boolean | Indicator of whether or not is a prerelease |
|
||||
| `files` | String | Newline-delimited globs of paths to assets to upload for release |
|
||||
| `name` | String | Name of the release. defaults to tag name |
|
||||
| `tag_name` | String | Name of a tag. defaults to `github.ref` |
|
||||
| `fail_on_unmatched_files` | Boolean | Indicator of whether to fail if any of the `files` globs match nothing |
|
||||
| `repository` | String | Name of a target repository in `<owner>/<repo>` format. Defaults to GITHUB_REPOSITORY env variable |
|
||||
| `target_commitish` | String | Commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Defaults to repository default branch. |
|
||||
| `token` | String | Secret GitHub Personal Access Token. Defaults to `${{ github.token }}` |
|
||||
| `discussion_category_name` | String | If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. For more information, see ["Managing categories for discussions in your repository."](https://docs.github.com/en/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository) |
|
||||
| `generate_release_notes` | Boolean | Whether to automatically generate the name and body for this release. If name is specified, the specified name will be used; otherwise, a name will be automatically generated. If body is specified, the body will be pre-pended to the automatically generated notes. See the [GitHub docs for this feature](https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes) for more information |
|
||||
| `append_body` | Boolean | Append to existing body instead of overwriting it |
|
||||
|
||||
💡 When providing a `body` and `body_path` at the same time, `body_path` will be
|
||||
attempted first, then falling back on `body` if the path can not be read from.
|
||||
|
@ -191,11 +193,14 @@ release will retain its original info.
|
|||
|
||||
The following outputs can be accessed via `${{ steps.<step-id>.outputs }}` from this action
|
||||
|
||||
| Name | Type | Description |
|
||||
| ------------ | ------ | --------------------------------------- |
|
||||
| `url` | String | Github.com URL for the release |
|
||||
| `id` | String | Release ID |
|
||||
| `upload_url` | String | URL for uploading assets to the release |
|
||||
| Name | Type | Description |
|
||||
| ------------ | ------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `url` | String | Github.com URL for the release |
|
||||
| `id` | String | Release ID |
|
||||
| `upload_url` | String | URL for uploading assets to the release |
|
||||
| `assets` | String | JSON array containing information about each uploaded asset, in the format given [here](https://docs.github.com/en/rest/releases/assets#get-a-release-asset) (minus the `uploader` field) |
|
||||
|
||||
As an example, you can use `${{ fromJSON(steps.<step-id>.outputs.assets)[0].browser_download_url }}` to get the download URL of the first asset.
|
||||
|
||||
#### environment variables
|
||||
|
||||
|
@ -208,4 +213,23 @@ The following `step.env` keys are allowed as a fallback but deprecated in favor
|
|||
|
||||
> **⚠️ Note:** This action was previously implemented as a Docker container, limiting its use to GitHub Actions Linux virtual environments only. With recent releases, we now support cross platform usage. You'll need to remove the `docker://` prefix in these versions
|
||||
|
||||
### Permissions
|
||||
|
||||
This Action requires the following permissions on the GitHub integration token:
|
||||
|
||||
```yaml
|
||||
permissions:
|
||||
contents: write
|
||||
```
|
||||
|
||||
When used with `discussion_category_name`, additional permission is needed:
|
||||
|
||||
```yaml
|
||||
permissions:
|
||||
contents: write
|
||||
discussions: write
|
||||
```
|
||||
|
||||
[GitHub token permissions](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#permissions-for-the-github_token) can be set for an individual job, workflow, or for Actions as a whole.
|
||||
|
||||
Doug Tangren (softprops) 2019
|
||||
|
|
|
@ -5,13 +5,13 @@ import {
|
|||
parseConfig,
|
||||
parseInputFiles,
|
||||
unmatchedPatterns,
|
||||
uploadUrl
|
||||
uploadUrl,
|
||||
} from "../src/util";
|
||||
import * as assert from "assert";
|
||||
|
||||
describe("util", () => {
|
||||
describe("uploadUrl", () => {
|
||||
it("stripts template", () => {
|
||||
it("strips template", () => {
|
||||
assert.equal(
|
||||
uploadUrl(
|
||||
"https://uploads.github.com/repos/octocat/Hello-World/releases/1/assets{?name,label}"
|
||||
|
@ -50,7 +50,8 @@ describe("util", () => {
|
|||
input_name: undefined,
|
||||
input_tag_name: undefined,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: undefined
|
||||
input_discussion_category_name: undefined,
|
||||
input_generate_release_notes: false,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
@ -69,7 +70,8 @@ describe("util", () => {
|
|||
input_name: undefined,
|
||||
input_tag_name: undefined,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: undefined
|
||||
input_discussion_category_name: undefined,
|
||||
input_generate_release_notes: false,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
@ -88,39 +90,55 @@ describe("util", () => {
|
|||
input_name: undefined,
|
||||
input_tag_name: undefined,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: undefined
|
||||
input_discussion_category_name: undefined,
|
||||
input_generate_release_notes: false,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
describe("parseConfig", () => {
|
||||
it("parses basic config", () => {
|
||||
assert.deepStrictEqual(parseConfig({}), {
|
||||
github_ref: "",
|
||||
github_repository: "",
|
||||
github_token: "",
|
||||
input_body: undefined,
|
||||
input_body_path: undefined,
|
||||
input_draft: undefined,
|
||||
input_prerelease: undefined,
|
||||
input_files: [],
|
||||
input_name: undefined,
|
||||
input_tag_name: undefined,
|
||||
input_fail_on_unmatched_files: false,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: undefined
|
||||
});
|
||||
});
|
||||
|
||||
it("parses basic config with commitish", () => {
|
||||
assert.deepStrictEqual(
|
||||
parseConfig({
|
||||
INPUT_TARGET_COMMITISH: "affa18ef97bc9db20076945705aba8c516139abd"
|
||||
// note: inputs declared in actions.yml, even when declared not required,
|
||||
// are still provided by the actions runtime env as empty strings instead of
|
||||
// the normal absent env value one would expect. this breaks things
|
||||
// as an empty string !== undefined in terms of what we pass to the api
|
||||
// so we cover that in a test case here to ensure undefined values are actually
|
||||
// resolved as undefined and not empty strings
|
||||
INPUT_TARGET_COMMITISH: "",
|
||||
INPUT_DISCUSSION_CATEGORY_NAME: "",
|
||||
}),
|
||||
{
|
||||
github_ref: "",
|
||||
github_repository: "",
|
||||
github_token: "",
|
||||
input_append_body: false,
|
||||
input_body: undefined,
|
||||
input_body_path: undefined,
|
||||
input_draft: undefined,
|
||||
input_prerelease: undefined,
|
||||
input_files: [],
|
||||
input_name: undefined,
|
||||
input_tag_name: undefined,
|
||||
input_fail_on_unmatched_files: false,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: undefined,
|
||||
input_generate_release_notes: false,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it("parses basic config with commitish", () => {
|
||||
assert.deepStrictEqual(
|
||||
parseConfig({
|
||||
INPUT_TARGET_COMMITISH: "affa18ef97bc9db20076945705aba8c516139abd",
|
||||
}),
|
||||
{
|
||||
github_ref: "",
|
||||
github_repository: "",
|
||||
github_token: "",
|
||||
input_append_body: false,
|
||||
input_body: undefined,
|
||||
input_body_path: undefined,
|
||||
input_draft: undefined,
|
||||
|
@ -130,19 +148,21 @@ describe("util", () => {
|
|||
input_tag_name: undefined,
|
||||
input_fail_on_unmatched_files: false,
|
||||
input_target_commitish: "affa18ef97bc9db20076945705aba8c516139abd",
|
||||
input_discussion_category_name: undefined
|
||||
input_discussion_category_name: undefined,
|
||||
input_generate_release_notes: false,
|
||||
}
|
||||
);
|
||||
});
|
||||
it("supports discussion category names", () => {
|
||||
assert.deepStrictEqual(
|
||||
parseConfig({
|
||||
INPUT_DISCUSSION_CATEGORY_NAME: "releases"
|
||||
INPUT_DISCUSSION_CATEGORY_NAME: "releases",
|
||||
}),
|
||||
{
|
||||
github_ref: "",
|
||||
github_repository: "",
|
||||
github_token: "",
|
||||
input_append_body: false,
|
||||
input_body: undefined,
|
||||
input_body_path: undefined,
|
||||
input_draft: undefined,
|
||||
|
@ -152,22 +172,50 @@ describe("util", () => {
|
|||
input_tag_name: undefined,
|
||||
input_fail_on_unmatched_files: false,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: "releases"
|
||||
input_discussion_category_name: "releases",
|
||||
input_generate_release_notes: false,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it("supports generating release notes", () => {
|
||||
assert.deepStrictEqual(
|
||||
parseConfig({
|
||||
INPUT_GENERATE_RELEASE_NOTES: "true",
|
||||
}),
|
||||
{
|
||||
github_ref: "",
|
||||
github_repository: "",
|
||||
github_token: "",
|
||||
input_append_body: false,
|
||||
input_body: undefined,
|
||||
input_body_path: undefined,
|
||||
input_draft: undefined,
|
||||
input_prerelease: undefined,
|
||||
input_files: [],
|
||||
input_name: undefined,
|
||||
input_tag_name: undefined,
|
||||
input_fail_on_unmatched_files: false,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: undefined,
|
||||
input_generate_release_notes: true,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it("prefers GITHUB_TOKEN over token input for backwards compatibility", () => {
|
||||
assert.deepStrictEqual(
|
||||
parseConfig({
|
||||
INPUT_DRAFT: "false",
|
||||
INPUT_PRERELEASE: "true",
|
||||
GITHUB_TOKEN: "env-token",
|
||||
INPUT_TOKEN: "input-token"
|
||||
INPUT_TOKEN: "input-token",
|
||||
}),
|
||||
{
|
||||
github_ref: "",
|
||||
github_repository: "",
|
||||
github_token: "env-token",
|
||||
input_append_body: false,
|
||||
input_body: undefined,
|
||||
input_body_path: undefined,
|
||||
input_draft: false,
|
||||
|
@ -177,7 +225,8 @@ describe("util", () => {
|
|||
input_tag_name: undefined,
|
||||
input_fail_on_unmatched_files: false,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: undefined
|
||||
input_discussion_category_name: undefined,
|
||||
input_generate_release_notes: false,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
@ -186,12 +235,13 @@ describe("util", () => {
|
|||
parseConfig({
|
||||
INPUT_DRAFT: "false",
|
||||
INPUT_PRERELEASE: "true",
|
||||
INPUT_TOKEN: "input-token"
|
||||
INPUT_TOKEN: "input-token",
|
||||
}),
|
||||
{
|
||||
github_ref: "",
|
||||
github_repository: "",
|
||||
github_token: "input-token",
|
||||
input_append_body: false,
|
||||
input_body: undefined,
|
||||
input_body_path: undefined,
|
||||
input_draft: false,
|
||||
|
@ -201,7 +251,8 @@ describe("util", () => {
|
|||
input_tag_name: undefined,
|
||||
input_fail_on_unmatched_files: false,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: undefined
|
||||
input_discussion_category_name: undefined,
|
||||
input_generate_release_notes: false,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
@ -209,12 +260,13 @@ describe("util", () => {
|
|||
assert.deepStrictEqual(
|
||||
parseConfig({
|
||||
INPUT_DRAFT: "false",
|
||||
INPUT_PRERELEASE: "true"
|
||||
INPUT_PRERELEASE: "true",
|
||||
}),
|
||||
{
|
||||
github_ref: "",
|
||||
github_repository: "",
|
||||
github_token: "",
|
||||
input_append_body: false,
|
||||
input_body: undefined,
|
||||
input_body_path: undefined,
|
||||
input_draft: false,
|
||||
|
@ -224,7 +276,32 @@ describe("util", () => {
|
|||
input_tag_name: undefined,
|
||||
input_fail_on_unmatched_files: false,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: undefined
|
||||
input_discussion_category_name: undefined,
|
||||
input_generate_release_notes: false,
|
||||
}
|
||||
);
|
||||
});
|
||||
it("parses basic config with append_body", () => {
|
||||
assert.deepStrictEqual(
|
||||
parseConfig({
|
||||
INPUT_APPEND_BODY: "true",
|
||||
}),
|
||||
{
|
||||
github_ref: "",
|
||||
github_repository: "",
|
||||
github_token: "",
|
||||
input_append_body: true,
|
||||
input_body: undefined,
|
||||
input_body_path: undefined,
|
||||
input_draft: undefined,
|
||||
input_prerelease: undefined,
|
||||
input_files: [],
|
||||
input_name: undefined,
|
||||
input_tag_name: undefined,
|
||||
input_fail_on_unmatched_files: false,
|
||||
input_target_commitish: undefined,
|
||||
input_discussion_category_name: undefined,
|
||||
input_generate_release_notes: false,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
|
10
action.yml
10
action.yml
|
@ -40,6 +40,12 @@ inputs:
|
|||
discussion_category_name:
|
||||
description: "If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. If there is already a discussion linked to the release, this parameter is ignored."
|
||||
required: false
|
||||
generate_release_notes:
|
||||
description: "Whether to automatically generate the name and body for this release. If name is specified, the specified name will be used; otherwise, a name will be automatically generated. If body is specified, the body will be pre-pended to the automatically generated notes."
|
||||
required: false
|
||||
append_body:
|
||||
description: "Append to existing body instead of overwriting it. Default is false."
|
||||
required: false
|
||||
env:
|
||||
"GITHUB_TOKEN": "As provided by Github Actions"
|
||||
outputs:
|
||||
|
@ -49,8 +55,10 @@ outputs:
|
|||
description: "Release ID"
|
||||
upload_url:
|
||||
description: "URL for uploading assets to the release"
|
||||
assets:
|
||||
description: "JSON array containing information about each uploaded asset, in the format given [here](https://docs.github.com/en/rest/reference/repos#upload-a-release-asset--code-samples) (minus the `uploader` field)"
|
||||
runs:
|
||||
using: "node12"
|
||||
using: "node16"
|
||||
main: "dist/index.js"
|
||||
branding:
|
||||
color: "green"
|
||||
|
|
452
dist/37.index.js
vendored
Normal file
452
dist/37.index.js
vendored
Normal file
|
@ -0,0 +1,452 @@
|
|||
"use strict";
|
||||
exports.id = 37;
|
||||
exports.ids = [37];
|
||||
exports.modules = {
|
||||
|
||||
/***/ 4037:
|
||||
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
||||
|
||||
__webpack_require__.r(__webpack_exports__);
|
||||
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
|
||||
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
|
||||
/* harmony export */ });
|
||||
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(2777);
|
||||
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(8010);
|
||||
|
||||
|
||||
|
||||
let s = 0;
|
||||
const S = {
|
||||
START_BOUNDARY: s++,
|
||||
HEADER_FIELD_START: s++,
|
||||
HEADER_FIELD: s++,
|
||||
HEADER_VALUE_START: s++,
|
||||
HEADER_VALUE: s++,
|
||||
HEADER_VALUE_ALMOST_DONE: s++,
|
||||
HEADERS_ALMOST_DONE: s++,
|
||||
PART_DATA_START: s++,
|
||||
PART_DATA: s++,
|
||||
END: s++
|
||||
};
|
||||
|
||||
let f = 1;
|
||||
const F = {
|
||||
PART_BOUNDARY: f,
|
||||
LAST_BOUNDARY: f *= 2
|
||||
};
|
||||
|
||||
const LF = 10;
|
||||
const CR = 13;
|
||||
const SPACE = 32;
|
||||
const HYPHEN = 45;
|
||||
const COLON = 58;
|
||||
const A = 97;
|
||||
const Z = 122;
|
||||
|
||||
const lower = c => c | 0x20;
|
||||
|
||||
const noop = () => {};
|
||||
|
||||
class MultipartParser {
|
||||
/**
|
||||
* @param {string} boundary
|
||||
*/
|
||||
constructor(boundary) {
|
||||
this.index = 0;
|
||||
this.flags = 0;
|
||||
|
||||
this.onHeaderEnd = noop;
|
||||
this.onHeaderField = noop;
|
||||
this.onHeadersEnd = noop;
|
||||
this.onHeaderValue = noop;
|
||||
this.onPartBegin = noop;
|
||||
this.onPartData = noop;
|
||||
this.onPartEnd = noop;
|
||||
|
||||
this.boundaryChars = {};
|
||||
|
||||
boundary = '\r\n--' + boundary;
|
||||
const ui8a = new Uint8Array(boundary.length);
|
||||
for (let i = 0; i < boundary.length; i++) {
|
||||
ui8a[i] = boundary.charCodeAt(i);
|
||||
this.boundaryChars[ui8a[i]] = true;
|
||||
}
|
||||
|
||||
this.boundary = ui8a;
|
||||
this.lookbehind = new Uint8Array(this.boundary.length + 8);
|
||||
this.state = S.START_BOUNDARY;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
*/
|
||||
write(data) {
|
||||
let i = 0;
|
||||
const length_ = data.length;
|
||||
let previousIndex = this.index;
|
||||
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
|
||||
const boundaryLength = this.boundary.length;
|
||||
const boundaryEnd = boundaryLength - 1;
|
||||
const bufferLength = data.length;
|
||||
let c;
|
||||
let cl;
|
||||
|
||||
const mark = name => {
|
||||
this[name + 'Mark'] = i;
|
||||
};
|
||||
|
||||
const clear = name => {
|
||||
delete this[name + 'Mark'];
|
||||
};
|
||||
|
||||
const callback = (callbackSymbol, start, end, ui8a) => {
|
||||
if (start === undefined || start !== end) {
|
||||
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
|
||||
}
|
||||
};
|
||||
|
||||
const dataCallback = (name, clear) => {
|
||||
const markSymbol = name + 'Mark';
|
||||
if (!(markSymbol in this)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (clear) {
|
||||
callback(name, this[markSymbol], i, data);
|
||||
delete this[markSymbol];
|
||||
} else {
|
||||
callback(name, this[markSymbol], data.length, data);
|
||||
this[markSymbol] = 0;
|
||||
}
|
||||
};
|
||||
|
||||
for (i = 0; i < length_; i++) {
|
||||
c = data[i];
|
||||
|
||||
switch (state) {
|
||||
case S.START_BOUNDARY:
|
||||
if (index === boundary.length - 2) {
|
||||
if (c === HYPHEN) {
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else if (c !== CR) {
|
||||
return;
|
||||
}
|
||||
|
||||
index++;
|
||||
break;
|
||||
} else if (index - 1 === boundary.length - 2) {
|
||||
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
|
||||
index = 0;
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (c !== boundary[index + 2]) {
|
||||
index = -2;
|
||||
}
|
||||
|
||||
if (c === boundary[index + 2]) {
|
||||
index++;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_FIELD_START:
|
||||
state = S.HEADER_FIELD;
|
||||
mark('onHeaderField');
|
||||
index = 0;
|
||||
// falls through
|
||||
case S.HEADER_FIELD:
|
||||
if (c === CR) {
|
||||
clear('onHeaderField');
|
||||
state = S.HEADERS_ALMOST_DONE;
|
||||
break;
|
||||
}
|
||||
|
||||
index++;
|
||||
if (c === HYPHEN) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (c === COLON) {
|
||||
if (index === 1) {
|
||||
// empty header field
|
||||
return;
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField', true);
|
||||
state = S.HEADER_VALUE_START;
|
||||
break;
|
||||
}
|
||||
|
||||
cl = lower(c);
|
||||
if (cl < A || cl > Z) {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_START:
|
||||
if (c === SPACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
mark('onHeaderValue');
|
||||
state = S.HEADER_VALUE;
|
||||
// falls through
|
||||
case S.HEADER_VALUE:
|
||||
if (c === CR) {
|
||||
dataCallback('onHeaderValue', true);
|
||||
callback('onHeaderEnd');
|
||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
case S.HEADERS_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
callback('onHeadersEnd');
|
||||
state = S.PART_DATA_START;
|
||||
break;
|
||||
case S.PART_DATA_START:
|
||||
state = S.PART_DATA;
|
||||
mark('onPartData');
|
||||
// falls through
|
||||
case S.PART_DATA:
|
||||
previousIndex = index;
|
||||
|
||||
if (index === 0) {
|
||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
||||
i += boundaryEnd;
|
||||
while (i < bufferLength && !(data[i] in boundaryChars)) {
|
||||
i += boundaryLength;
|
||||
}
|
||||
|
||||
i -= boundaryEnd;
|
||||
c = data[i];
|
||||
}
|
||||
|
||||
if (index < boundary.length) {
|
||||
if (boundary[index] === c) {
|
||||
if (index === 0) {
|
||||
dataCallback('onPartData', true);
|
||||
}
|
||||
|
||||
index++;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index === boundary.length) {
|
||||
index++;
|
||||
if (c === CR) {
|
||||
// CR = part boundary
|
||||
flags |= F.PART_BOUNDARY;
|
||||
} else if (c === HYPHEN) {
|
||||
// HYPHEN = end boundary
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index - 1 === boundary.length) {
|
||||
if (flags & F.PART_BOUNDARY) {
|
||||
index = 0;
|
||||
if (c === LF) {
|
||||
// unset the PART_BOUNDARY flag
|
||||
flags &= ~F.PART_BOUNDARY;
|
||||
callback('onPartEnd');
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
}
|
||||
} else if (flags & F.LAST_BOUNDARY) {
|
||||
if (c === HYPHEN) {
|
||||
callback('onPartEnd');
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (index > 0) {
|
||||
// when matching a possible boundary, keep a lookbehind reference
|
||||
// in case it turns out to be a false lead
|
||||
lookbehind[index - 1] = c;
|
||||
} else if (previousIndex > 0) {
|
||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
||||
// belongs to partData
|
||||
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
|
||||
callback('onPartData', 0, previousIndex, _lookbehind);
|
||||
previousIndex = 0;
|
||||
mark('onPartData');
|
||||
|
||||
// reconsider the current character even so it interrupted the sequence
|
||||
// it could be the beginning of a new sequence
|
||||
i--;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.END:
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unexpected state entered: ${state}`);
|
||||
}
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField');
|
||||
dataCallback('onHeaderValue');
|
||||
dataCallback('onPartData');
|
||||
|
||||
// Update properties for the next call
|
||||
this.index = index;
|
||||
this.state = state;
|
||||
this.flags = flags;
|
||||
}
|
||||
|
||||
end() {
|
||||
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
|
||||
this.onPartEnd();
|
||||
} else if (this.state !== S.END) {
|
||||
throw new Error('MultipartParser.end(): stream ended unexpectedly');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _fileName(headerValue) {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
|
||||
if (!m) {
|
||||
return;
|
||||
}
|
||||
|
||||
const match = m[2] || m[3] || '';
|
||||
let filename = match.slice(match.lastIndexOf('\\') + 1);
|
||||
filename = filename.replace(/%22/g, '"');
|
||||
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
|
||||
return String.fromCharCode(code);
|
||||
});
|
||||
return filename;
|
||||
}
|
||||
|
||||
async function toFormData(Body, ct) {
|
||||
if (!/multipart/i.test(ct)) {
|
||||
throw new TypeError('Failed to fetch');
|
||||
}
|
||||
|
||||
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('no or bad content-type header, no multipart boundary');
|
||||
}
|
||||
|
||||
const parser = new MultipartParser(m[1] || m[2]);
|
||||
|
||||
let headerField;
|
||||
let headerValue;
|
||||
let entryValue;
|
||||
let entryName;
|
||||
let contentType;
|
||||
let filename;
|
||||
const entryChunks = [];
|
||||
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
|
||||
|
||||
const onPartData = ui8a => {
|
||||
entryValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
const appendToFile = ui8a => {
|
||||
entryChunks.push(ui8a);
|
||||
};
|
||||
|
||||
const appendFileToFormData = () => {
|
||||
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
|
||||
formData.append(entryName, file);
|
||||
};
|
||||
|
||||
const appendEntryToFormData = () => {
|
||||
formData.append(entryName, entryValue);
|
||||
};
|
||||
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
decoder.decode();
|
||||
|
||||
parser.onPartBegin = function () {
|
||||
parser.onPartData = onPartData;
|
||||
parser.onPartEnd = appendEntryToFormData;
|
||||
|
||||
headerField = '';
|
||||
headerValue = '';
|
||||
entryValue = '';
|
||||
entryName = '';
|
||||
contentType = '';
|
||||
filename = null;
|
||||
entryChunks.length = 0;
|
||||
};
|
||||
|
||||
parser.onHeaderField = function (ui8a) {
|
||||
headerField += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderValue = function (ui8a) {
|
||||
headerValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderEnd = function () {
|
||||
headerValue += decoder.decode();
|
||||
headerField = headerField.toLowerCase();
|
||||
|
||||
if (headerField === 'content-disposition') {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
|
||||
|
||||
if (m) {
|
||||
entryName = m[2] || m[3] || '';
|
||||
}
|
||||
|
||||
filename = _fileName(headerValue);
|
||||
|
||||
if (filename) {
|
||||
parser.onPartData = appendToFile;
|
||||
parser.onPartEnd = appendFileToFormData;
|
||||
}
|
||||
} else if (headerField === 'content-type') {
|
||||
contentType = headerValue;
|
||||
}
|
||||
|
||||
headerValue = '';
|
||||
headerField = '';
|
||||
};
|
||||
|
||||
for await (const chunk of Body) {
|
||||
parser.write(chunk);
|
||||
}
|
||||
|
||||
parser.end();
|
||||
|
||||
return formData;
|
||||
}
|
||||
|
||||
|
||||
/***/ })
|
||||
|
||||
};
|
||||
;
|
8
dist/index.js
vendored
8
dist/index.js
vendored
File diff suppressed because one or more lines are too long
10657
package-lock.json
generated
10657
package-lock.json
generated
File diff suppressed because it is too large
Load diff
39
package.json
39
package.json
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "action-gh-release",
|
||||
"version": "0.1.10",
|
||||
"version": "0.1.15",
|
||||
"private": true,
|
||||
"description": "GitHub Action for creating GitHub Releases",
|
||||
"main": "lib/main.js",
|
||||
|
@ -12,34 +12,33 @@
|
|||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/softprops/action-gh-template.git"
|
||||
"url": "git+https://github.com/softprops/action-gh-release.git"
|
||||
},
|
||||
"keywords": [
|
||||
"actions"
|
||||
],
|
||||
"author": "softprops",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.4.0",
|
||||
"@actions/github": "^5.0.0",
|
||||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@octokit/plugin-throttling": "^3.5.1",
|
||||
"glob": "^7.1.6",
|
||||
"mime": "^2.4.4",
|
||||
"node-fetch": "^2.6.1"
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/github": "^5.1.1",
|
||||
"@octokit/plugin-retry": "^4.0.3",
|
||||
"@octokit/plugin-throttling": "^4.3.2",
|
||||
"glob": "^8.0.3",
|
||||
"mime": "^3.0.0",
|
||||
"node-fetch": "^2.6.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/jest": "^24.0.25",
|
||||
"@types/mime": "^2.0.1",
|
||||
"@types/node": "^12.12.24",
|
||||
"@types/glob": "^8.0.0",
|
||||
"@types/jest": "^29.2.3",
|
||||
"@types/mime": "^3.0.1",
|
||||
"@types/node": "^18.11.9",
|
||||
"@types/node-fetch": "^2.5.12",
|
||||
"@zeit/ncc": "^0.21.0",
|
||||
"jest": "^24.9.0",
|
||||
"jest-circus": "^24.9.0",
|
||||
"prettier": "1.19.1",
|
||||
"ts-jest": "^24.2.0",
|
||||
"typescript": "^3.7.4",
|
||||
"@vercel/ncc": "^0.34.0",
|
||||
"jest": "^29.3.1",
|
||||
"jest-circus": "^29.3.1",
|
||||
"prettier": "2.8.0",
|
||||
"ts-jest": "^29.0.3",
|
||||
"typescript": "^4.9.3",
|
||||
"typescript-formatter": "^7.2.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import fetch from "node-fetch";
|
||||
import { GitHub } from "@actions/github/lib/utils";
|
||||
import { Config, isTag, releaseBody } from "./util";
|
||||
import { lstatSync, readFileSync } from "fs";
|
||||
import { statSync, readFileSync } from "fs";
|
||||
import { getType } from "mime";
|
||||
import { basename } from "path";
|
||||
|
||||
|
@ -44,6 +44,7 @@ export interface Releaser {
|
|||
prerelease: boolean | undefined;
|
||||
target_commitish: string | undefined;
|
||||
discussion_category_name: string | undefined;
|
||||
generate_release_notes: boolean | undefined;
|
||||
}): Promise<{ data: Release }>;
|
||||
|
||||
updateRelease(params: {
|
||||
|
@ -57,6 +58,7 @@ export interface Releaser {
|
|||
draft: boolean | undefined;
|
||||
prerelease: boolean | undefined;
|
||||
discussion_category_name: string | undefined;
|
||||
generate_release_notes: boolean | undefined;
|
||||
}): Promise<{ data: Release }>;
|
||||
|
||||
allReleases(params: {
|
||||
|
@ -89,6 +91,7 @@ export class GitHubReleaser implements Releaser {
|
|||
prerelease: boolean | undefined;
|
||||
target_commitish: string | undefined;
|
||||
discussion_category_name: string | undefined;
|
||||
generate_release_notes: boolean | undefined;
|
||||
}): Promise<{ data: Release }> {
|
||||
return this.github.rest.repos.createRelease(params);
|
||||
}
|
||||
|
@ -104,6 +107,7 @@ export class GitHubReleaser implements Releaser {
|
|||
draft: boolean | undefined;
|
||||
prerelease: boolean | undefined;
|
||||
discussion_category_name: string | undefined;
|
||||
generate_release_notes: boolean | undefined;
|
||||
}): Promise<{ data: Release }> {
|
||||
return this.github.rest.repos.updateRelease(params);
|
||||
}
|
||||
|
@ -123,8 +127,8 @@ export const asset = (path: string): ReleaseAsset => {
|
|||
return {
|
||||
name: basename(path),
|
||||
mime: mimeOrDefault(path),
|
||||
size: lstatSync(path).size,
|
||||
data: readFileSync(path)
|
||||
size: statSync(path).size,
|
||||
data: readFileSync(path),
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -149,7 +153,7 @@ export const upload = async (
|
|||
await github.rest.repos.deleteReleaseAsset({
|
||||
asset_id: currentAsset.id || 1,
|
||||
owner,
|
||||
repo
|
||||
repo,
|
||||
});
|
||||
}
|
||||
console.log(`⬆️ Uploading ${name}...`);
|
||||
|
@ -159,15 +163,15 @@ export const upload = async (
|
|||
headers: {
|
||||
"content-length": `${size}`,
|
||||
"content-type": mime,
|
||||
authorization: `token ${config.github_token}`
|
||||
authorization: `token ${config.github_token}`,
|
||||
},
|
||||
method: "POST",
|
||||
body
|
||||
body,
|
||||
});
|
||||
const json = await resp.json();
|
||||
if (resp.status !== 201) {
|
||||
throw new Error(
|
||||
`Failed to upload release asset ${name}. recieved status code ${
|
||||
`Failed to upload release asset ${name}. received status code ${
|
||||
resp.status
|
||||
}\n${json.message}\n${JSON.stringify(json.errors)}`
|
||||
);
|
||||
|
@ -193,15 +197,16 @@ export const release = async (
|
|||
: "");
|
||||
|
||||
const discussion_category_name = config.input_discussion_category_name;
|
||||
const generate_release_notes = config.input_generate_release_notes;
|
||||
try {
|
||||
// you can't get a an existing draft by tag
|
||||
// so we must find one in the list of all releases
|
||||
if (config.input_draft) {
|
||||
for await (const response of releaser.allReleases({
|
||||
owner,
|
||||
repo
|
||||
repo,
|
||||
})) {
|
||||
let release = response.data.find(release => release.tag_name === tag);
|
||||
let release = response.data.find((release) => release.tag_name === tag);
|
||||
if (release) {
|
||||
return release;
|
||||
}
|
||||
|
@ -210,7 +215,7 @@ export const release = async (
|
|||
let existingRelease = await releaser.getReleaseByTag({
|
||||
owner,
|
||||
repo,
|
||||
tag
|
||||
tag,
|
||||
});
|
||||
|
||||
const release_id = existingRelease.data.id;
|
||||
|
@ -229,12 +234,18 @@ export const release = async (
|
|||
|
||||
const tag_name = tag;
|
||||
const name = config.input_name || existingRelease.data.name || tag;
|
||||
|
||||
let body: string = "";
|
||||
if (existingRelease.data.body) body += existingRelease.data.body;
|
||||
let workflowBody = releaseBody(config);
|
||||
if (existingRelease.data.body && workflowBody) body += "\n";
|
||||
if (workflowBody) body += workflowBody;
|
||||
// revisit: support a new body-concat-strategy input for accumulating
|
||||
// body parts as a release gets updated. some users will likely want this while
|
||||
// others won't previously this was duplicating content for most which
|
||||
// no one wants
|
||||
const workflowBody = releaseBody(config) || "";
|
||||
const existingReleaseBody = existingRelease.data.body || "";
|
||||
let body: string;
|
||||
if (config.input_append_body && workflowBody && existingReleaseBody) {
|
||||
body = existingReleaseBody + "\n" + workflowBody;
|
||||
} else {
|
||||
body = workflowBody || existingReleaseBody;
|
||||
}
|
||||
|
||||
const draft =
|
||||
config.input_draft !== undefined
|
||||
|
@ -255,7 +266,8 @@ export const release = async (
|
|||
body,
|
||||
draft,
|
||||
prerelease,
|
||||
discussion_category_name
|
||||
discussion_category_name,
|
||||
generate_release_notes,
|
||||
});
|
||||
return release.data;
|
||||
} catch (error) {
|
||||
|
@ -283,7 +295,8 @@ export const release = async (
|
|||
draft,
|
||||
prerelease,
|
||||
target_commitish,
|
||||
discussion_category_name
|
||||
discussion_category_name,
|
||||
generate_release_notes,
|
||||
});
|
||||
return release.data;
|
||||
} catch (error) {
|
||||
|
@ -291,7 +304,9 @@ export const release = async (
|
|||
console.log(
|
||||
`⚠️ GitHub release failed with status: ${
|
||||
error.status
|
||||
}, retrying... (${maxRetries - 1} retries remaining)`
|
||||
}\n${JSON.stringify(error.response.data.errors)}\nretrying... (${
|
||||
maxRetries - 1
|
||||
} retries remaining)`
|
||||
);
|
||||
return release(config, releaser, maxRetries - 1);
|
||||
}
|
||||
|
|
23
src/main.ts
23
src/main.ts
|
@ -3,7 +3,7 @@ import {
|
|||
parseConfig,
|
||||
isTag,
|
||||
unmatchedPatterns,
|
||||
uploadUrl
|
||||
uploadUrl,
|
||||
} from "./util";
|
||||
import { release, upload, GitHubReleaser } from "./github";
|
||||
import { getOctokit } from "@actions/github";
|
||||
|
@ -24,7 +24,7 @@ async function run() {
|
|||
}
|
||||
if (config.input_files) {
|
||||
const patterns = unmatchedPatterns(config.input_files);
|
||||
patterns.forEach(pattern =>
|
||||
patterns.forEach((pattern) =>
|
||||
console.warn(`🤔 Pattern '${pattern}' does not match any files.`)
|
||||
);
|
||||
if (patterns.length > 0 && config.input_fail_on_unmatched_files) {
|
||||
|
@ -55,8 +55,8 @@ async function run() {
|
|||
console.warn(
|
||||
`Abuse detected for request ${options.method} ${options.url}`
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
//);
|
||||
const rel = await release(config, new GitHubReleaser(gh));
|
||||
|
@ -65,20 +65,23 @@ async function run() {
|
|||
if (files.length == 0) {
|
||||
console.warn(`🤔 ${config.input_files} not include valid file.`);
|
||||
}
|
||||
const currentAsserts = rel.assets;
|
||||
await Promise.all(
|
||||
files.map(async path => {
|
||||
await upload(
|
||||
const currentAssets = rel.assets;
|
||||
const assets = await Promise.all(
|
||||
files.map(async (path) => {
|
||||
const json = await upload(
|
||||
config,
|
||||
gh,
|
||||
uploadUrl(rel.upload_url),
|
||||
path,
|
||||
currentAsserts
|
||||
currentAssets
|
||||
);
|
||||
delete json.uploader;
|
||||
return json;
|
||||
})
|
||||
).catch(error => {
|
||||
).catch((error) => {
|
||||
throw error;
|
||||
});
|
||||
setOutput("assets", assets);
|
||||
}
|
||||
console.log(`🎉 Release ready at ${rel.html_url}`);
|
||||
setOutput("url", rel.html_url);
|
||||
|
|
19
src/util.ts
19
src/util.ts
|
@ -1,5 +1,5 @@
|
|||
import * as glob from "glob";
|
||||
import { lstatSync, readFileSync } from "fs";
|
||||
import { statSync, readFileSync } from "fs";
|
||||
|
||||
export interface Config {
|
||||
github_token: string;
|
||||
|
@ -17,6 +17,8 @@ export interface Config {
|
|||
input_fail_on_unmatched_files?: boolean;
|
||||
input_target_commitish?: string;
|
||||
input_discussion_category_name?: string;
|
||||
input_generate_release_notes?: boolean;
|
||||
input_append_body?: boolean;
|
||||
}
|
||||
|
||||
export const uploadUrl = (url: string): string => {
|
||||
|
@ -42,8 +44,8 @@ export const parseInputFiles = (files: string): string[] => {
|
|||
(acc, line) =>
|
||||
acc
|
||||
.concat(line.split(","))
|
||||
.filter(pat => pat)
|
||||
.map(pat => pat.trim()),
|
||||
.filter((pat) => pat)
|
||||
.map((pat) => pat.trim()),
|
||||
[]
|
||||
);
|
||||
};
|
||||
|
@ -63,15 +65,18 @@ export const parseConfig = (env: Env): Config => {
|
|||
? env.INPUT_PRERELEASE == "true"
|
||||
: undefined,
|
||||
input_fail_on_unmatched_files: env.INPUT_FAIL_ON_UNMATCHED_FILES == "true",
|
||||
input_target_commitish: env.INPUT_TARGET_COMMITISH,
|
||||
input_discussion_category_name: env.INPUT_DISCUSSION_CATEGORY_NAME
|
||||
input_target_commitish: env.INPUT_TARGET_COMMITISH || undefined,
|
||||
input_discussion_category_name:
|
||||
env.INPUT_DISCUSSION_CATEGORY_NAME || undefined,
|
||||
input_generate_release_notes: env.INPUT_GENERATE_RELEASE_NOTES == "true",
|
||||
input_append_body: env.INPUT_APPEND_BODY == "true",
|
||||
};
|
||||
};
|
||||
|
||||
export const paths = (patterns: string[]): string[] => {
|
||||
return patterns.reduce((acc: string[], pattern: string): string[] => {
|
||||
return acc.concat(
|
||||
glob.sync(pattern).filter(path => lstatSync(path).isFile())
|
||||
glob.sync(pattern).filter((path) => statSync(path).isFile())
|
||||
);
|
||||
}, []);
|
||||
};
|
||||
|
@ -79,7 +84,7 @@ export const paths = (patterns: string[]): string[] => {
|
|||
export const unmatchedPatterns = (patterns: string[]): string[] => {
|
||||
return patterns.reduce((acc: string[], pattern: string): string[] => {
|
||||
return acc.concat(
|
||||
glob.sync(pattern).filter(path => lstatSync(path).isFile()).length == 0
|
||||
glob.sync(pattern).filter((path) => statSync(path).isFile()).length == 0
|
||||
? [pattern]
|
||||
: []
|
||||
);
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"useUnknownInCatchVariables": false,
|
||||
/* Basic Options */
|
||||
// "incremental": true, /* Enable incremental compilation */
|
||||
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||
|
|
Loading…
Reference in a new issue