Compare commits

..

7 commits

Author SHA1 Message Date
softprops
e6268c631a error is of unknown structure 2021-08-09 09:19:25 -04:00
softprops
8988630456 error is of unknown structure 2021-08-09 00:20:24 -04:00
softprops
c1b107442c error is of unknown structure 2021-08-09 00:18:15 -04:00
softprops
4c8c431191 error is of unknown structure 2021-08-09 00:16:19 -04:00
softprops
1505034bb0 error is of unknown structure 2021-08-09 00:14:58 -04:00
softprops
21e9098c3b more error info 2021-08-09 00:12:44 -04:00
softprops
26941a6e6b debug cross repo 2021-08-09 00:05:07 -04:00
15 changed files with 4297 additions and 7261 deletions

View file

@ -1,14 +0,0 @@
version: 2
updates:
- package-ecosystem: npm
directory: "/"
schedule:
interval: weekly
ignore:
- dependency-name: node-fetch
versions:
- ">=3.0.0"
- package-ecosystem: github-actions
directory: "/"
schedule:
interval: weekly

View file

@ -8,7 +8,7 @@ jobs:
steps: steps:
# https://github.com/actions/checkout # https://github.com/actions/checkout
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Install - name: Install
run: npm ci run: npm ci
- name: Build - name: Build
@ -23,4 +23,4 @@ jobs:
# git diff --exit-code --stat -- . ':!node_modules' \ # git diff --exit-code --stat -- . ':!node_modules' \
# || (echo "##[error] found changed files after build. please 'npm run build && npm run fmt'" \ # || (echo "##[error] found changed files after build. please 'npm run build && npm run fmt'" \
# "and check in all changes" \ # "and check in all changes" \
# && exit 1) # && exit 1)

1
.nvmrc
View file

@ -1 +0,0 @@
16.13.1

View file

@ -1,25 +1,3 @@
## 0.1.15
- Upgrade to action.yml declaration to node16 to address deprecations
- Upgrade dependencies
- Add `asset` output as a JSON array containing information about the uploaded assets
## 0.1.14
- provides an new workflow input option `generate_release_notes` which when set to true will automatically generate release notes for you based on GitHub activity [#179](https://github.com/softprops/action-gh-release/pull/179). Please see the [GitHub docs for this feature](https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes) for more information
## 0.1.13
- fix issue with multiple runs concatenating release bodies [#145](https://github.com/softprops/action-gh-release/pull/145)
## 0.1.12
- fix bug leading to empty strings subsituted for inputs users don't provide breaking api calls [#144](https://github.com/softprops/action-gh-release/pull/144)
## 0.1.11
- better error message on release create failed [#143](https://github.com/softprops/action-gh-release/pull/143)
## 0.1.10 ## 0.1.10
- fixed error message formatting for file uploads - fixed error message formatting for file uploads

View file

@ -41,7 +41,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Release - name: Release
uses: softprops/action-gh-release@v1 uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
@ -62,7 +62,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Release - name: Release
uses: softprops/action-gh-release@v1 uses: softprops/action-gh-release@v1
``` ```
@ -88,7 +88,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Build - name: Build
run: echo ${{ github.sha }} > Release.txt run: echo ${{ github.sha }} > Release.txt
- name: Test - name: Test
@ -112,7 +112,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Build - name: Build
run: echo ${{ github.sha }} > Release.txt run: echo ${{ github.sha }} > Release.txt
- name: Test - name: Test
@ -144,7 +144,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Generate Changelog - name: Generate Changelog
run: echo "# Good things have arrived" > ${{ github.workspace }}-CHANGELOG.txt run: echo "# Good things have arrived" > ${{ github.workspace }}-CHANGELOG.txt
- name: Release - name: Release
@ -165,22 +165,20 @@ jobs:
The following are optional as `step.with` keys The following are optional as `step.with` keys
| Name | Type | Description | | Name | Type | Description |
| -------------------------- | ------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | -------------------------- | ------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `body` | String | Text communicating notable changes in this release | | `body` | String | Text communicating notable changes in this release |
| `body_path` | String | Path to load text communicating notable changes in this release | | `body_path` | String | Path to load text communicating notable changes in this release |
| `draft` | Boolean | Indicator of whether or not this release is a draft | | `draft` | Boolean | Indicator of whether or not this release is a draft |
| `prerelease` | Boolean | Indicator of whether or not is a prerelease | | `prerelease` | Boolean | Indicator of whether or not is a prerelease |
| `files` | String | Newline-delimited globs of paths to assets to upload for release | | `files` | String | Newline-delimited globs of paths to assets to upload for release |
| `name` | String | Name of the release. defaults to tag name | | `name` | String | Name of the release. defaults to tag name |
| `tag_name` | String | Name of a tag. defaults to `github.ref` | | `tag_name` | String | Name of a tag. defaults to `github.ref` |
| `fail_on_unmatched_files` | Boolean | Indicator of whether to fail if any of the `files` globs match nothing | | `fail_on_unmatched_files` | Boolean | Indicator of whether to fail if any of the `files` globs match nothing |
| `repository` | String | Name of a target repository in `<owner>/<repo>` format. Defaults to GITHUB_REPOSITORY env variable | | `repository` | String | Name of a target repository in `<owner>/<repo>` format. Defaults to GITHUB_REPOSITORY env variable |
| `target_commitish` | String | Commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Defaults to repository default branch. | | `target_commitish` | String | Commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. |
| `token` | String | Secret GitHub Personal Access Token. Defaults to `${{ github.token }}` | | `token` | String | Secret GitHub Personal Access Token. Defaults to `${{ github.token }}` |
| `discussion_category_name` | String | If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. For more information, see ["Managing categories for discussions in your repository."](https://docs.github.com/en/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository) | | `discussion_category_name` | String | If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. For more information, see ["Managing categories for discussions in your repository."](https://docs.github.com/en/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository) |
| `generate_release_notes` | Boolean | Whether to automatically generate the name and body for this release. If name is specified, the specified name will be used; otherwise, a name will be automatically generated. If body is specified, the body will be pre-pended to the automatically generated notes. See the [GitHub docs for this feature](https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes) for more information |
| `append_body` | Boolean | Append to existing body instead of overwriting it |
💡 When providing a `body` and `body_path` at the same time, `body_path` will be 💡 When providing a `body` and `body_path` at the same time, `body_path` will be
attempted first, then falling back on `body` if the path can not be read from. attempted first, then falling back on `body` if the path can not be read from.
@ -193,14 +191,11 @@ release will retain its original info.
The following outputs can be accessed via `${{ steps.<step-id>.outputs }}` from this action The following outputs can be accessed via `${{ steps.<step-id>.outputs }}` from this action
| Name | Type | Description | | Name | Type | Description |
| ------------ | ------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | ------------ | ------ | --------------------------------------- |
| `url` | String | Github.com URL for the release | | `url` | String | Github.com URL for the release |
| `id` | String | Release ID | | `id` | String | Release ID |
| `upload_url` | String | URL for uploading assets to the release | | `upload_url` | String | URL for uploading assets to the release |
| `assets` | String | JSON array containing information about each uploaded asset, in the format given [here](https://docs.github.com/en/rest/releases/assets#get-a-release-asset) (minus the `uploader` field) |
As an example, you can use `${{ fromJSON(steps.<step-id>.outputs.assets)[0].browser_download_url }}` to get the download URL of the first asset.
#### environment variables #### environment variables
@ -213,23 +208,4 @@ The following `step.env` keys are allowed as a fallback but deprecated in favor
> **⚠️ Note:** This action was previously implemented as a Docker container, limiting its use to GitHub Actions Linux virtual environments only. With recent releases, we now support cross platform usage. You'll need to remove the `docker://` prefix in these versions > **⚠️ Note:** This action was previously implemented as a Docker container, limiting its use to GitHub Actions Linux virtual environments only. With recent releases, we now support cross platform usage. You'll need to remove the `docker://` prefix in these versions
### Permissions
This Action requires the following permissions on the GitHub integration token:
```yaml
permissions:
contents: write
```
When used with `discussion_category_name`, additional permission is needed:
```yaml
permissions:
contents: write
discussions: write
```
[GitHub token permissions](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#permissions-for-the-github_token) can be set for an individual job, workflow, or for Actions as a whole.
Doug Tangren (softprops) 2019 Doug Tangren (softprops) 2019

View file

@ -5,13 +5,13 @@ import {
parseConfig, parseConfig,
parseInputFiles, parseInputFiles,
unmatchedPatterns, unmatchedPatterns,
uploadUrl, uploadUrl
} from "../src/util"; } from "../src/util";
import * as assert from "assert"; import * as assert from "assert";
describe("util", () => { describe("util", () => {
describe("uploadUrl", () => { describe("uploadUrl", () => {
it("strips template", () => { it("stripts template", () => {
assert.equal( assert.equal(
uploadUrl( uploadUrl(
"https://uploads.github.com/repos/octocat/Hello-World/releases/1/assets{?name,label}" "https://uploads.github.com/repos/octocat/Hello-World/releases/1/assets{?name,label}"
@ -50,8 +50,7 @@ describe("util", () => {
input_name: undefined, input_name: undefined,
input_tag_name: undefined, input_tag_name: undefined,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined
input_generate_release_notes: false,
}) })
); );
}); });
@ -70,8 +69,7 @@ describe("util", () => {
input_name: undefined, input_name: undefined,
input_tag_name: undefined, input_tag_name: undefined,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined
input_generate_release_notes: false,
}) })
); );
}); });
@ -90,55 +88,39 @@ describe("util", () => {
input_name: undefined, input_name: undefined,
input_tag_name: undefined, input_tag_name: undefined,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined
input_generate_release_notes: false,
}) })
); );
}); });
}); });
describe("parseConfig", () => { describe("parseConfig", () => {
it("parses basic config", () => { it("parses basic config", () => {
assert.deepStrictEqual( assert.deepStrictEqual(parseConfig({}), {
parseConfig({ github_ref: "",
// note: inputs declared in actions.yml, even when declared not required, github_repository: "",
// are still provided by the actions runtime env as empty strings instead of github_token: "",
// the normal absent env value one would expect. this breaks things input_body: undefined,
// as an empty string !== undefined in terms of what we pass to the api input_body_path: undefined,
// so we cover that in a test case here to ensure undefined values are actually input_draft: undefined,
// resolved as undefined and not empty strings input_prerelease: undefined,
INPUT_TARGET_COMMITISH: "", input_files: [],
INPUT_DISCUSSION_CATEGORY_NAME: "", input_name: undefined,
}), input_tag_name: undefined,
{ input_fail_on_unmatched_files: false,
github_ref: "", input_target_commitish: undefined,
github_repository: "", input_discussion_category_name: undefined
github_token: "", });
input_append_body: false,
input_body: undefined,
input_body_path: undefined,
input_draft: undefined,
input_prerelease: undefined,
input_files: [],
input_name: undefined,
input_tag_name: undefined,
input_fail_on_unmatched_files: false,
input_target_commitish: undefined,
input_discussion_category_name: undefined,
input_generate_release_notes: false,
}
);
}); });
it("parses basic config with commitish", () => { it("parses basic config with commitish", () => {
assert.deepStrictEqual( assert.deepStrictEqual(
parseConfig({ parseConfig({
INPUT_TARGET_COMMITISH: "affa18ef97bc9db20076945705aba8c516139abd", INPUT_TARGET_COMMITISH: "affa18ef97bc9db20076945705aba8c516139abd"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "", github_token: "",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: undefined, input_draft: undefined,
@ -148,21 +130,19 @@ describe("util", () => {
input_tag_name: undefined, input_tag_name: undefined,
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: "affa18ef97bc9db20076945705aba8c516139abd", input_target_commitish: "affa18ef97bc9db20076945705aba8c516139abd",
input_discussion_category_name: undefined, input_discussion_category_name: undefined
input_generate_release_notes: false,
} }
); );
}); });
it("supports discussion category names", () => { it("supports discussion category names", () => {
assert.deepStrictEqual( assert.deepStrictEqual(
parseConfig({ parseConfig({
INPUT_DISCUSSION_CATEGORY_NAME: "releases", INPUT_DISCUSSION_CATEGORY_NAME: "releases"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "", github_token: "",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: undefined, input_draft: undefined,
@ -172,50 +152,22 @@ describe("util", () => {
input_tag_name: undefined, input_tag_name: undefined,
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: "releases", input_discussion_category_name: "releases"
input_generate_release_notes: false,
} }
); );
}); });
it("supports generating release notes", () => {
assert.deepStrictEqual(
parseConfig({
INPUT_GENERATE_RELEASE_NOTES: "true",
}),
{
github_ref: "",
github_repository: "",
github_token: "",
input_append_body: false,
input_body: undefined,
input_body_path: undefined,
input_draft: undefined,
input_prerelease: undefined,
input_files: [],
input_name: undefined,
input_tag_name: undefined,
input_fail_on_unmatched_files: false,
input_target_commitish: undefined,
input_discussion_category_name: undefined,
input_generate_release_notes: true,
}
);
});
it("prefers GITHUB_TOKEN over token input for backwards compatibility", () => { it("prefers GITHUB_TOKEN over token input for backwards compatibility", () => {
assert.deepStrictEqual( assert.deepStrictEqual(
parseConfig({ parseConfig({
INPUT_DRAFT: "false", INPUT_DRAFT: "false",
INPUT_PRERELEASE: "true", INPUT_PRERELEASE: "true",
GITHUB_TOKEN: "env-token", GITHUB_TOKEN: "env-token",
INPUT_TOKEN: "input-token", INPUT_TOKEN: "input-token"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "env-token", github_token: "env-token",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: false, input_draft: false,
@ -225,8 +177,7 @@ describe("util", () => {
input_tag_name: undefined, input_tag_name: undefined,
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined
input_generate_release_notes: false,
} }
); );
}); });
@ -235,13 +186,12 @@ describe("util", () => {
parseConfig({ parseConfig({
INPUT_DRAFT: "false", INPUT_DRAFT: "false",
INPUT_PRERELEASE: "true", INPUT_PRERELEASE: "true",
INPUT_TOKEN: "input-token", INPUT_TOKEN: "input-token"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "input-token", github_token: "input-token",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: false, input_draft: false,
@ -251,8 +201,7 @@ describe("util", () => {
input_tag_name: undefined, input_tag_name: undefined,
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined
input_generate_release_notes: false,
} }
); );
}); });
@ -260,13 +209,12 @@ describe("util", () => {
assert.deepStrictEqual( assert.deepStrictEqual(
parseConfig({ parseConfig({
INPUT_DRAFT: "false", INPUT_DRAFT: "false",
INPUT_PRERELEASE: "true", INPUT_PRERELEASE: "true"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "", github_token: "",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: false, input_draft: false,
@ -276,32 +224,7 @@ describe("util", () => {
input_tag_name: undefined, input_tag_name: undefined,
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined
input_generate_release_notes: false,
}
);
});
it("parses basic config with append_body", () => {
assert.deepStrictEqual(
parseConfig({
INPUT_APPEND_BODY: "true",
}),
{
github_ref: "",
github_repository: "",
github_token: "",
input_append_body: true,
input_body: undefined,
input_body_path: undefined,
input_draft: undefined,
input_prerelease: undefined,
input_files: [],
input_name: undefined,
input_tag_name: undefined,
input_fail_on_unmatched_files: false,
input_target_commitish: undefined,
input_discussion_category_name: undefined,
input_generate_release_notes: false,
} }
); );
}); });

View file

@ -40,12 +40,6 @@ inputs:
discussion_category_name: discussion_category_name:
description: "If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. If there is already a discussion linked to the release, this parameter is ignored." description: "If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. If there is already a discussion linked to the release, this parameter is ignored."
required: false required: false
generate_release_notes:
description: "Whether to automatically generate the name and body for this release. If name is specified, the specified name will be used; otherwise, a name will be automatically generated. If body is specified, the body will be pre-pended to the automatically generated notes."
required: false
append_body:
description: "Append to existing body instead of overwriting it. Default is false."
required: false
env: env:
"GITHUB_TOKEN": "As provided by Github Actions" "GITHUB_TOKEN": "As provided by Github Actions"
outputs: outputs:
@ -55,10 +49,8 @@ outputs:
description: "Release ID" description: "Release ID"
upload_url: upload_url:
description: "URL for uploading assets to the release" description: "URL for uploading assets to the release"
assets:
description: "JSON array containing information about each uploaded asset, in the format given [here](https://docs.github.com/en/rest/reference/repos#upload-a-release-asset--code-samples) (minus the `uploader` field)"
runs: runs:
using: "node16" using: "node12"
main: "dist/index.js" main: "dist/index.js"
branding: branding:
color: "green" color: "green"

452
dist/37.index.js vendored
View file

@ -1,452 +0,0 @@
"use strict";
exports.id = 37;
exports.ids = [37];
exports.modules = {
/***/ 4037:
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
/* harmony export */ });
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(2777);
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(8010);
let s = 0;
const S = {
START_BOUNDARY: s++,
HEADER_FIELD_START: s++,
HEADER_FIELD: s++,
HEADER_VALUE_START: s++,
HEADER_VALUE: s++,
HEADER_VALUE_ALMOST_DONE: s++,
HEADERS_ALMOST_DONE: s++,
PART_DATA_START: s++,
PART_DATA: s++,
END: s++
};
let f = 1;
const F = {
PART_BOUNDARY: f,
LAST_BOUNDARY: f *= 2
};
const LF = 10;
const CR = 13;
const SPACE = 32;
const HYPHEN = 45;
const COLON = 58;
const A = 97;
const Z = 122;
const lower = c => c | 0x20;
const noop = () => {};
class MultipartParser {
/**
* @param {string} boundary
*/
constructor(boundary) {
this.index = 0;
this.flags = 0;
this.onHeaderEnd = noop;
this.onHeaderField = noop;
this.onHeadersEnd = noop;
this.onHeaderValue = noop;
this.onPartBegin = noop;
this.onPartData = noop;
this.onPartEnd = noop;
this.boundaryChars = {};
boundary = '\r\n--' + boundary;
const ui8a = new Uint8Array(boundary.length);
for (let i = 0; i < boundary.length; i++) {
ui8a[i] = boundary.charCodeAt(i);
this.boundaryChars[ui8a[i]] = true;
}
this.boundary = ui8a;
this.lookbehind = new Uint8Array(this.boundary.length + 8);
this.state = S.START_BOUNDARY;
}
/**
* @param {Uint8Array} data
*/
write(data) {
let i = 0;
const length_ = data.length;
let previousIndex = this.index;
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
const boundaryLength = this.boundary.length;
const boundaryEnd = boundaryLength - 1;
const bufferLength = data.length;
let c;
let cl;
const mark = name => {
this[name + 'Mark'] = i;
};
const clear = name => {
delete this[name + 'Mark'];
};
const callback = (callbackSymbol, start, end, ui8a) => {
if (start === undefined || start !== end) {
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
}
};
const dataCallback = (name, clear) => {
const markSymbol = name + 'Mark';
if (!(markSymbol in this)) {
return;
}
if (clear) {
callback(name, this[markSymbol], i, data);
delete this[markSymbol];
} else {
callback(name, this[markSymbol], data.length, data);
this[markSymbol] = 0;
}
};
for (i = 0; i < length_; i++) {
c = data[i];
switch (state) {
case S.START_BOUNDARY:
if (index === boundary.length - 2) {
if (c === HYPHEN) {
flags |= F.LAST_BOUNDARY;
} else if (c !== CR) {
return;
}
index++;
break;
} else if (index - 1 === boundary.length - 2) {
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
state = S.END;
flags = 0;
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
index = 0;
callback('onPartBegin');
state = S.HEADER_FIELD_START;
} else {
return;
}
break;
}
if (c !== boundary[index + 2]) {
index = -2;
}
if (c === boundary[index + 2]) {
index++;
}
break;
case S.HEADER_FIELD_START:
state = S.HEADER_FIELD;
mark('onHeaderField');
index = 0;
// falls through
case S.HEADER_FIELD:
if (c === CR) {
clear('onHeaderField');
state = S.HEADERS_ALMOST_DONE;
break;
}
index++;
if (c === HYPHEN) {
break;
}
if (c === COLON) {
if (index === 1) {
// empty header field
return;
}
dataCallback('onHeaderField', true);
state = S.HEADER_VALUE_START;
break;
}
cl = lower(c);
if (cl < A || cl > Z) {
return;
}
break;
case S.HEADER_VALUE_START:
if (c === SPACE) {
break;
}
mark('onHeaderValue');
state = S.HEADER_VALUE;
// falls through
case S.HEADER_VALUE:
if (c === CR) {
dataCallback('onHeaderValue', true);
callback('onHeaderEnd');
state = S.HEADER_VALUE_ALMOST_DONE;
}
break;
case S.HEADER_VALUE_ALMOST_DONE:
if (c !== LF) {
return;
}
state = S.HEADER_FIELD_START;
break;
case S.HEADERS_ALMOST_DONE:
if (c !== LF) {
return;
}
callback('onHeadersEnd');
state = S.PART_DATA_START;
break;
case S.PART_DATA_START:
state = S.PART_DATA;
mark('onPartData');
// falls through
case S.PART_DATA:
previousIndex = index;
if (index === 0) {
// boyer-moore derrived algorithm to safely skip non-boundary data
i += boundaryEnd;
while (i < bufferLength && !(data[i] in boundaryChars)) {
i += boundaryLength;
}
i -= boundaryEnd;
c = data[i];
}
if (index < boundary.length) {
if (boundary[index] === c) {
if (index === 0) {
dataCallback('onPartData', true);
}
index++;
} else {
index = 0;
}
} else if (index === boundary.length) {
index++;
if (c === CR) {
// CR = part boundary
flags |= F.PART_BOUNDARY;
} else if (c === HYPHEN) {
// HYPHEN = end boundary
flags |= F.LAST_BOUNDARY;
} else {
index = 0;
}
} else if (index - 1 === boundary.length) {
if (flags & F.PART_BOUNDARY) {
index = 0;
if (c === LF) {
// unset the PART_BOUNDARY flag
flags &= ~F.PART_BOUNDARY;
callback('onPartEnd');
callback('onPartBegin');
state = S.HEADER_FIELD_START;
break;
}
} else if (flags & F.LAST_BOUNDARY) {
if (c === HYPHEN) {
callback('onPartEnd');
state = S.END;
flags = 0;
} else {
index = 0;
}
} else {
index = 0;
}
}
if (index > 0) {
// when matching a possible boundary, keep a lookbehind reference
// in case it turns out to be a false lead
lookbehind[index - 1] = c;
} else if (previousIndex > 0) {
// if our boundary turned out to be rubbish, the captured lookbehind
// belongs to partData
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
callback('onPartData', 0, previousIndex, _lookbehind);
previousIndex = 0;
mark('onPartData');
// reconsider the current character even so it interrupted the sequence
// it could be the beginning of a new sequence
i--;
}
break;
case S.END:
break;
default:
throw new Error(`Unexpected state entered: ${state}`);
}
}
dataCallback('onHeaderField');
dataCallback('onHeaderValue');
dataCallback('onPartData');
// Update properties for the next call
this.index = index;
this.state = state;
this.flags = flags;
}
end() {
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
this.onPartEnd();
} else if (this.state !== S.END) {
throw new Error('MultipartParser.end(): stream ended unexpectedly');
}
}
}
function _fileName(headerValue) {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
if (!m) {
return;
}
const match = m[2] || m[3] || '';
let filename = match.slice(match.lastIndexOf('\\') + 1);
filename = filename.replace(/%22/g, '"');
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
return String.fromCharCode(code);
});
return filename;
}
async function toFormData(Body, ct) {
if (!/multipart/i.test(ct)) {
throw new TypeError('Failed to fetch');
}
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
if (!m) {
throw new TypeError('no or bad content-type header, no multipart boundary');
}
const parser = new MultipartParser(m[1] || m[2]);
let headerField;
let headerValue;
let entryValue;
let entryName;
let contentType;
let filename;
const entryChunks = [];
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
const onPartData = ui8a => {
entryValue += decoder.decode(ui8a, {stream: true});
};
const appendToFile = ui8a => {
entryChunks.push(ui8a);
};
const appendFileToFormData = () => {
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
formData.append(entryName, file);
};
const appendEntryToFormData = () => {
formData.append(entryName, entryValue);
};
const decoder = new TextDecoder('utf-8');
decoder.decode();
parser.onPartBegin = function () {
parser.onPartData = onPartData;
parser.onPartEnd = appendEntryToFormData;
headerField = '';
headerValue = '';
entryValue = '';
entryName = '';
contentType = '';
filename = null;
entryChunks.length = 0;
};
parser.onHeaderField = function (ui8a) {
headerField += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderValue = function (ui8a) {
headerValue += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderEnd = function () {
headerValue += decoder.decode();
headerField = headerField.toLowerCase();
if (headerField === 'content-disposition') {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
if (m) {
entryName = m[2] || m[3] || '';
}
filename = _fileName(headerValue);
if (filename) {
parser.onPartData = appendToFile;
parser.onPartEnd = appendFileToFormData;
}
} else if (headerField === 'content-type') {
contentType = headerValue;
}
headerValue = '';
headerField = '';
};
for await (const chunk of Body) {
parser.write(chunk);
}
parser.end();
return formData;
}
/***/ })
};
;

8
dist/index.js vendored

File diff suppressed because one or more lines are too long

10699
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{ {
"name": "action-gh-release", "name": "action-gh-release",
"version": "0.1.15", "version": "0.1.10",
"private": true, "private": true,
"description": "GitHub Action for creating GitHub Releases", "description": "GitHub Action for creating GitHub Releases",
"main": "lib/main.js", "main": "lib/main.js",
@ -12,33 +12,34 @@
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/softprops/action-gh-release.git" "url": "git+https://github.com/softprops/action-gh-template.git"
}, },
"keywords": [ "keywords": [
"actions" "actions"
], ],
"author": "softprops", "author": "softprops",
"license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.4.0",
"@actions/github": "^5.1.1", "@actions/github": "^5.0.0",
"@octokit/plugin-retry": "^4.0.3", "@octokit/plugin-retry": "^3.0.9",
"@octokit/plugin-throttling": "^4.3.2", "@octokit/plugin-throttling": "^3.5.1",
"glob": "^8.0.3", "glob": "^7.1.6",
"mime": "^3.0.0", "mime": "^2.4.4",
"node-fetch": "^2.6.7" "node-fetch": "^2.6.1"
}, },
"devDependencies": { "devDependencies": {
"@types/glob": "^8.0.0", "@types/glob": "^7.1.1",
"@types/jest": "^29.2.3", "@types/jest": "^24.0.25",
"@types/mime": "^3.0.1", "@types/mime": "^2.0.1",
"@types/node": "^18.11.9", "@types/node": "^12.12.24",
"@types/node-fetch": "^2.5.12", "@types/node-fetch": "^2.5.12",
"@vercel/ncc": "^0.34.0", "@zeit/ncc": "^0.21.0",
"jest": "^29.3.1", "jest": "^24.9.0",
"jest-circus": "^29.3.1", "jest-circus": "^24.9.0",
"prettier": "2.8.0", "prettier": "1.19.1",
"ts-jest": "^29.0.3", "ts-jest": "^24.2.0",
"typescript": "^4.9.3", "typescript": "^3.7.4",
"typescript-formatter": "^7.2.2" "typescript-formatter": "^7.2.2"
} }
} }

View file

@ -1,7 +1,7 @@
import fetch from "node-fetch"; import fetch from "node-fetch";
import { GitHub } from "@actions/github/lib/utils"; import { GitHub } from "@actions/github/lib/utils";
import { Config, isTag, releaseBody } from "./util"; import { Config, isTag, releaseBody } from "./util";
import { statSync, readFileSync } from "fs"; import { lstatSync, readFileSync } from "fs";
import { getType } from "mime"; import { getType } from "mime";
import { basename } from "path"; import { basename } from "path";
@ -44,7 +44,6 @@ export interface Releaser {
prerelease: boolean | undefined; prerelease: boolean | undefined;
target_commitish: string | undefined; target_commitish: string | undefined;
discussion_category_name: string | undefined; discussion_category_name: string | undefined;
generate_release_notes: boolean | undefined;
}): Promise<{ data: Release }>; }): Promise<{ data: Release }>;
updateRelease(params: { updateRelease(params: {
@ -58,7 +57,6 @@ export interface Releaser {
draft: boolean | undefined; draft: boolean | undefined;
prerelease: boolean | undefined; prerelease: boolean | undefined;
discussion_category_name: string | undefined; discussion_category_name: string | undefined;
generate_release_notes: boolean | undefined;
}): Promise<{ data: Release }>; }): Promise<{ data: Release }>;
allReleases(params: { allReleases(params: {
@ -91,7 +89,6 @@ export class GitHubReleaser implements Releaser {
prerelease: boolean | undefined; prerelease: boolean | undefined;
target_commitish: string | undefined; target_commitish: string | undefined;
discussion_category_name: string | undefined; discussion_category_name: string | undefined;
generate_release_notes: boolean | undefined;
}): Promise<{ data: Release }> { }): Promise<{ data: Release }> {
return this.github.rest.repos.createRelease(params); return this.github.rest.repos.createRelease(params);
} }
@ -107,7 +104,6 @@ export class GitHubReleaser implements Releaser {
draft: boolean | undefined; draft: boolean | undefined;
prerelease: boolean | undefined; prerelease: boolean | undefined;
discussion_category_name: string | undefined; discussion_category_name: string | undefined;
generate_release_notes: boolean | undefined;
}): Promise<{ data: Release }> { }): Promise<{ data: Release }> {
return this.github.rest.repos.updateRelease(params); return this.github.rest.repos.updateRelease(params);
} }
@ -127,8 +123,8 @@ export const asset = (path: string): ReleaseAsset => {
return { return {
name: basename(path), name: basename(path),
mime: mimeOrDefault(path), mime: mimeOrDefault(path),
size: statSync(path).size, size: lstatSync(path).size,
data: readFileSync(path), data: readFileSync(path)
}; };
}; };
@ -153,7 +149,7 @@ export const upload = async (
await github.rest.repos.deleteReleaseAsset({ await github.rest.repos.deleteReleaseAsset({
asset_id: currentAsset.id || 1, asset_id: currentAsset.id || 1,
owner, owner,
repo, repo
}); });
} }
console.log(`⬆️ Uploading ${name}...`); console.log(`⬆️ Uploading ${name}...`);
@ -163,15 +159,15 @@ export const upload = async (
headers: { headers: {
"content-length": `${size}`, "content-length": `${size}`,
"content-type": mime, "content-type": mime,
authorization: `token ${config.github_token}`, authorization: `token ${config.github_token}`
}, },
method: "POST", method: "POST",
body, body
}); });
const json = await resp.json(); const json = await resp.json();
if (resp.status !== 201) { if (resp.status !== 201) {
throw new Error( throw new Error(
`Failed to upload release asset ${name}. received status code ${ `Failed to upload release asset ${name}. recieved status code ${
resp.status resp.status
}\n${json.message}\n${JSON.stringify(json.errors)}` }\n${json.message}\n${JSON.stringify(json.errors)}`
); );
@ -197,16 +193,15 @@ export const release = async (
: ""); : "");
const discussion_category_name = config.input_discussion_category_name; const discussion_category_name = config.input_discussion_category_name;
const generate_release_notes = config.input_generate_release_notes;
try { try {
// you can't get a an existing draft by tag // you can't get a an existing draft by tag
// so we must find one in the list of all releases // so we must find one in the list of all releases
if (config.input_draft) { if (config.input_draft) {
for await (const response of releaser.allReleases({ for await (const response of releaser.allReleases({
owner, owner,
repo, repo
})) { })) {
let release = response.data.find((release) => release.tag_name === tag); let release = response.data.find(release => release.tag_name === tag);
if (release) { if (release) {
return release; return release;
} }
@ -215,7 +210,7 @@ export const release = async (
let existingRelease = await releaser.getReleaseByTag({ let existingRelease = await releaser.getReleaseByTag({
owner, owner,
repo, repo,
tag, tag
}); });
const release_id = existingRelease.data.id; const release_id = existingRelease.data.id;
@ -234,18 +229,12 @@ export const release = async (
const tag_name = tag; const tag_name = tag;
const name = config.input_name || existingRelease.data.name || tag; const name = config.input_name || existingRelease.data.name || tag;
// revisit: support a new body-concat-strategy input for accumulating
// body parts as a release gets updated. some users will likely want this while let body: string = "";
// others won't previously this was duplicating content for most which if (existingRelease.data.body) body += existingRelease.data.body;
// no one wants let workflowBody = releaseBody(config);
const workflowBody = releaseBody(config) || ""; if (existingRelease.data.body && workflowBody) body += "\n";
const existingReleaseBody = existingRelease.data.body || ""; if (workflowBody) body += workflowBody;
let body: string;
if (config.input_append_body && workflowBody && existingReleaseBody) {
body = existingReleaseBody + "\n" + workflowBody;
} else {
body = workflowBody || existingReleaseBody;
}
const draft = const draft =
config.input_draft !== undefined config.input_draft !== undefined
@ -266,8 +255,7 @@ export const release = async (
body, body,
draft, draft,
prerelease, prerelease,
discussion_category_name, discussion_category_name
generate_release_notes,
}); });
return release.data; return release.data;
} catch (error) { } catch (error) {
@ -283,7 +271,7 @@ export const release = async (
commitMessage = ` using commit "${target_commitish}"`; commitMessage = ` using commit "${target_commitish}"`;
} }
console.log( console.log(
`👩‍🏭 Creating new GitHub release for tag ${tag_name}${commitMessage}...` `👩‍🏭 Creating new GitHub release in ${owner}/${repo} for tag ${tag_name}${commitMessage}...`
); );
try { try {
let release = await releaser.createRelease({ let release = await releaser.createRelease({
@ -295,8 +283,7 @@ export const release = async (
draft, draft,
prerelease, prerelease,
target_commitish, target_commitish,
discussion_category_name, discussion_category_name
generate_release_notes,
}); });
return release.data; return release.data;
} catch (error) { } catch (error) {
@ -304,9 +291,9 @@ export const release = async (
console.log( console.log(
`⚠️ GitHub release failed with status: ${ `⚠️ GitHub release failed with status: ${
error.status error.status
}\n${JSON.stringify(error.response.data.errors)}\nretrying... (${ }\n${JSON.stringify(
maxRetries - 1 error.response.data.errors
} retries remaining)` )}\nretrying... (${maxRetries - 1} retries remaining)`
); );
return release(config, releaser, maxRetries - 1); return release(config, releaser, maxRetries - 1);
} }

View file

@ -3,7 +3,7 @@ import {
parseConfig, parseConfig,
isTag, isTag,
unmatchedPatterns, unmatchedPatterns,
uploadUrl, uploadUrl
} from "./util"; } from "./util";
import { release, upload, GitHubReleaser } from "./github"; import { release, upload, GitHubReleaser } from "./github";
import { getOctokit } from "@actions/github"; import { getOctokit } from "@actions/github";
@ -24,7 +24,7 @@ async function run() {
} }
if (config.input_files) { if (config.input_files) {
const patterns = unmatchedPatterns(config.input_files); const patterns = unmatchedPatterns(config.input_files);
patterns.forEach((pattern) => patterns.forEach(pattern =>
console.warn(`🤔 Pattern '${pattern}' does not match any files.`) console.warn(`🤔 Pattern '${pattern}' does not match any files.`)
); );
if (patterns.length > 0 && config.input_fail_on_unmatched_files) { if (patterns.length > 0 && config.input_fail_on_unmatched_files) {
@ -55,8 +55,8 @@ async function run() {
console.warn( console.warn(
`Abuse detected for request ${options.method} ${options.url}` `Abuse detected for request ${options.method} ${options.url}`
); );
}, }
}, }
}); });
//); //);
const rel = await release(config, new GitHubReleaser(gh)); const rel = await release(config, new GitHubReleaser(gh));
@ -65,23 +65,20 @@ async function run() {
if (files.length == 0) { if (files.length == 0) {
console.warn(`🤔 ${config.input_files} not include valid file.`); console.warn(`🤔 ${config.input_files} not include valid file.`);
} }
const currentAssets = rel.assets; const currentAsserts = rel.assets;
const assets = await Promise.all( await Promise.all(
files.map(async (path) => { files.map(async path => {
const json = await upload( await upload(
config, config,
gh, gh,
uploadUrl(rel.upload_url), uploadUrl(rel.upload_url),
path, path,
currentAssets currentAsserts
); );
delete json.uploader;
return json;
}) })
).catch((error) => { ).catch(error => {
throw error; throw error;
}); });
setOutput("assets", assets);
} }
console.log(`🎉 Release ready at ${rel.html_url}`); console.log(`🎉 Release ready at ${rel.html_url}`);
setOutput("url", rel.html_url); setOutput("url", rel.html_url);

View file

@ -1,5 +1,5 @@
import * as glob from "glob"; import * as glob from "glob";
import { statSync, readFileSync } from "fs"; import { lstatSync, readFileSync } from "fs";
export interface Config { export interface Config {
github_token: string; github_token: string;
@ -17,8 +17,6 @@ export interface Config {
input_fail_on_unmatched_files?: boolean; input_fail_on_unmatched_files?: boolean;
input_target_commitish?: string; input_target_commitish?: string;
input_discussion_category_name?: string; input_discussion_category_name?: string;
input_generate_release_notes?: boolean;
input_append_body?: boolean;
} }
export const uploadUrl = (url: string): string => { export const uploadUrl = (url: string): string => {
@ -44,8 +42,8 @@ export const parseInputFiles = (files: string): string[] => {
(acc, line) => (acc, line) =>
acc acc
.concat(line.split(",")) .concat(line.split(","))
.filter((pat) => pat) .filter(pat => pat)
.map((pat) => pat.trim()), .map(pat => pat.trim()),
[] []
); );
}; };
@ -65,18 +63,15 @@ export const parseConfig = (env: Env): Config => {
? env.INPUT_PRERELEASE == "true" ? env.INPUT_PRERELEASE == "true"
: undefined, : undefined,
input_fail_on_unmatched_files: env.INPUT_FAIL_ON_UNMATCHED_FILES == "true", input_fail_on_unmatched_files: env.INPUT_FAIL_ON_UNMATCHED_FILES == "true",
input_target_commitish: env.INPUT_TARGET_COMMITISH || undefined, input_target_commitish: env.INPUT_TARGET_COMMITISH,
input_discussion_category_name: input_discussion_category_name: env.INPUT_DISCUSSION_CATEGORY_NAME
env.INPUT_DISCUSSION_CATEGORY_NAME || undefined,
input_generate_release_notes: env.INPUT_GENERATE_RELEASE_NOTES == "true",
input_append_body: env.INPUT_APPEND_BODY == "true",
}; };
}; };
export const paths = (patterns: string[]): string[] => { export const paths = (patterns: string[]): string[] => {
return patterns.reduce((acc: string[], pattern: string): string[] => { return patterns.reduce((acc: string[], pattern: string): string[] => {
return acc.concat( return acc.concat(
glob.sync(pattern).filter((path) => statSync(path).isFile()) glob.sync(pattern).filter(path => lstatSync(path).isFile())
); );
}, []); }, []);
}; };
@ -84,7 +79,7 @@ export const paths = (patterns: string[]): string[] => {
export const unmatchedPatterns = (patterns: string[]): string[] => { export const unmatchedPatterns = (patterns: string[]): string[] => {
return patterns.reduce((acc: string[], pattern: string): string[] => { return patterns.reduce((acc: string[], pattern: string): string[] => {
return acc.concat( return acc.concat(
glob.sync(pattern).filter((path) => statSync(path).isFile()).length == 0 glob.sync(pattern).filter(path => lstatSync(path).isFile()).length == 0
? [pattern] ? [pattern]
: [] : []
); );

View file

@ -1,6 +1,5 @@
{ {
"compilerOptions": { "compilerOptions": {
"useUnknownInCatchVariables": false,
/* Basic Options */ /* Basic Options */
// "incremental": true, /* Enable incremental compilation */ // "incremental": true, /* Enable incremental compilation */
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */