Update dependencies and rebuild

This commit is contained in:
Vilius Sutkus '89 2024-04-05 13:59:39 +03:00
parent 0d0e8c3af6
commit 00854ea68c
3 changed files with 1960 additions and 2357 deletions

513
dist/index.js vendored
View file

@ -2971,7 +2971,7 @@ class HttpClient {
if (this._keepAlive && useProxy) {
agent = this._proxyAgent;
}
if (this._keepAlive && !useProxy) {
if (!useProxy) {
agent = this._agent;
}
// if agent is already assigned use that agent.
@ -3003,16 +3003,12 @@ class HttpClient {
agent = tunnelAgent(agentOptions);
this._proxyAgent = agent;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) {
// if tunneling agent isn't assigned create a new agent
if (!agent) {
const options = { keepAlive: this._keepAlive, maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
@ -7482,6 +7478,7 @@ function request (opts, callback) {
}
module.exports = request
module.exports.RequestHandler = RequestHandler
/***/ }),
@ -8028,7 +8025,7 @@ module.exports = class BodyReadable extends Readable {
this
.on('close', function () {
signalListenerCleanup()
if (signal?.aborted) {
if (signal && signal.aborted) {
reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' }))
} else {
resolve(null)
@ -9422,13 +9419,13 @@ module.exports = {
/***/ }),
/***/ 9174:
/***/ ((module) => {
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
module.exports = {
kConstruct: Symbol('constructable')
kConstruct: (__nccwpck_require__(2785).kConstruct)
}
@ -10414,11 +10411,9 @@ class Parser {
socket[kReset] = true
}
let pause
try {
pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false
} catch (err) {
util.destroy(socket, err)
const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false
if (request.aborted) {
return -1
}
@ -10465,14 +10460,9 @@ class Parser {
this.bytesRead += buf.length
try {
if (request.onData(buf) === false) {
return constants.ERROR.PAUSED
}
} catch (err) {
util.destroy(socket, err)
return -1
}
}
onMessageComplete () {
@ -10512,11 +10502,7 @@ class Parser {
return -1
}
try {
request.onComplete(headers)
} catch (err) {
errorRequest(client, request, err)
}
client[kQueue][client[kRunningIdx]++] = null
@ -11302,13 +11288,17 @@ function writeH2 (client, session, request) {
})
stream.on('data', (chunk) => {
if (request.onData(chunk) === false) stream.pause()
if (request.onData(chunk) === false) {
stream.pause()
}
})
stream.once('close', () => {
h2State.openStreams -= 1
// TODO(HTTP/2): unref only if current streams count is 0
if (h2State.openStreams === 0) session.unref()
if (h2State.openStreams === 0) {
session.unref()
}
})
stream.once('error', function (err) {
@ -12876,6 +12866,132 @@ function onConnectTimeout (socket) {
module.exports = buildConnector
/***/ }),
/***/ 4462:
/***/ ((module) => {
"use strict";
/** @type {Record<string, string | undefined>} */
const headerNameLowerCasedRecord = {}
// https://developer.mozilla.org/docs/Web/HTTP/Headers
const wellknownHeaderNames = [
'Accept',
'Accept-Encoding',
'Accept-Language',
'Accept-Ranges',
'Access-Control-Allow-Credentials',
'Access-Control-Allow-Headers',
'Access-Control-Allow-Methods',
'Access-Control-Allow-Origin',
'Access-Control-Expose-Headers',
'Access-Control-Max-Age',
'Access-Control-Request-Headers',
'Access-Control-Request-Method',
'Age',
'Allow',
'Alt-Svc',
'Alt-Used',
'Authorization',
'Cache-Control',
'Clear-Site-Data',
'Connection',
'Content-Disposition',
'Content-Encoding',
'Content-Language',
'Content-Length',
'Content-Location',
'Content-Range',
'Content-Security-Policy',
'Content-Security-Policy-Report-Only',
'Content-Type',
'Cookie',
'Cross-Origin-Embedder-Policy',
'Cross-Origin-Opener-Policy',
'Cross-Origin-Resource-Policy',
'Date',
'Device-Memory',
'Downlink',
'ECT',
'ETag',
'Expect',
'Expect-CT',
'Expires',
'Forwarded',
'From',
'Host',
'If-Match',
'If-Modified-Since',
'If-None-Match',
'If-Range',
'If-Unmodified-Since',
'Keep-Alive',
'Last-Modified',
'Link',
'Location',
'Max-Forwards',
'Origin',
'Permissions-Policy',
'Pragma',
'Proxy-Authenticate',
'Proxy-Authorization',
'RTT',
'Range',
'Referer',
'Referrer-Policy',
'Refresh',
'Retry-After',
'Sec-WebSocket-Accept',
'Sec-WebSocket-Extensions',
'Sec-WebSocket-Key',
'Sec-WebSocket-Protocol',
'Sec-WebSocket-Version',
'Server',
'Server-Timing',
'Service-Worker-Allowed',
'Service-Worker-Navigation-Preload',
'Set-Cookie',
'SourceMap',
'Strict-Transport-Security',
'Supports-Loading-Mode',
'TE',
'Timing-Allow-Origin',
'Trailer',
'Transfer-Encoding',
'Upgrade',
'Upgrade-Insecure-Requests',
'User-Agent',
'Vary',
'Via',
'WWW-Authenticate',
'X-Content-Type-Options',
'X-DNS-Prefetch-Control',
'X-Frame-Options',
'X-Permitted-Cross-Domain-Policies',
'X-Powered-By',
'X-Requested-With',
'X-XSS-Protection'
]
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
const key = wellknownHeaderNames[i]
const lowerCasedKey = key.toLowerCase()
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
lowerCasedKey
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
module.exports = {
wellknownHeaderNames,
headerNameLowerCasedRecord
}
/***/ }),
/***/ 8045:
@ -13351,7 +13467,11 @@ class Request {
onBodySent (chunk) {
if (this[kHandler].onBodySent) {
try {
return this[kHandler].onBodySent(chunk)
} catch (err) {
this.abort(err)
}
}
}
@ -13361,7 +13481,11 @@ class Request {
}
if (this[kHandler].onRequestSent) {
try {
return this[kHandler].onRequestSent()
} catch (err) {
this.abort(err)
}
}
}
@ -13385,14 +13509,23 @@ class Request {
channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })
}
try {
return this[kHandler].onHeaders(statusCode, headers, resume, statusText)
} catch (err) {
this.abort(err)
}
}
onData (chunk) {
assert(!this.aborted)
assert(!this.completed)
try {
return this[kHandler].onData(chunk)
} catch (err) {
this.abort(err)
return false
}
}
onUpgrade (statusCode, headers, socket) {
@ -13411,7 +13544,13 @@ class Request {
if (channels.trailers.hasSubscribers) {
channels.trailers.publish({ request: this, trailers })
}
try {
return this[kHandler].onComplete(trailers)
} catch (err) {
// TODO (fix): This might be a bad idea?
this.onError(err)
}
}
onError (error) {
@ -13425,6 +13564,7 @@ class Request {
return
}
this.aborted = true
return this[kHandler].onError(error)
}
@ -13662,7 +13802,8 @@ module.exports = {
kHTTP1BuildRequest: Symbol('http1 build request'),
kHTTP2CopyHeaders: Symbol('http2 copy headers'),
kHTTPConnVersion: Symbol('http connection version'),
kRetryHandlerDefaultRetry: Symbol('retry agent default retry')
kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
kConstruct: Symbol('constructable')
}
@ -13683,6 +13824,7 @@ const { InvalidArgumentError } = __nccwpck_require__(8045)
const { Blob } = __nccwpck_require__(4300)
const nodeUtil = __nccwpck_require__(3837)
const { stringify } = __nccwpck_require__(3477)
const { headerNameLowerCasedRecord } = __nccwpck_require__(4462)
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
@ -13892,6 +14034,15 @@ function parseKeepAliveTimeout (val) {
return m ? parseInt(m[1], 10) * 1000 : null
}
/**
* Retrieves a header name and returns its lowercase value.
* @param {string | Buffer} value Header name
* @returns {string}
*/
function headerNameToString (value) {
return headerNameLowerCasedRecord[value] || value.toLowerCase()
}
function parseHeaders (headers, obj = {}) {
// For H2 support
if (!Array.isArray(headers)) return headers
@ -14163,6 +14314,7 @@ module.exports = {
isIterable,
isAsyncIterable,
isDestroyed,
headerNameToString,
parseRawHeaders,
parseHeaders,
parseKeepAliveTimeout,
@ -15310,17 +15462,14 @@ function dataURLProcessor (dataURL) {
* @param {boolean} excludeFragment
*/
function URLSerializer (url, excludeFragment = false) {
const href = url.href
if (!excludeFragment) {
return href
return url.href
}
const hash = href.lastIndexOf('#')
if (hash === -1) {
return href
}
return href.slice(0, hash)
const href = url.href
const hashLength = url.hash.length
return hashLength === 0 ? href : href.substring(0, href.length - hashLength)
}
// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
@ -16504,7 +16653,7 @@ module.exports = {
const { kHeadersList } = __nccwpck_require__(2785)
const { kHeadersList, kConstruct } = __nccwpck_require__(2785)
const { kGuard } = __nccwpck_require__(5861)
const { kEnumerableProperty } = __nccwpck_require__(3983)
const {
@ -16742,6 +16891,9 @@ class HeadersList {
// https://fetch.spec.whatwg.org/#headers-class
class Headers {
constructor (init = undefined) {
if (init === kConstruct) {
return
}
this[kHeadersList] = new HeadersList()
// The new Headers(init) constructor steps are:
@ -17382,7 +17534,7 @@ function finalizeAndReportTiming (response, initiatorType = 'other') {
}
// 8. If responses timing allow passed flag is not set, then:
if (!timingInfo.timingAllowPassed) {
if (!response.timingAllowPassed) {
// 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo.
timingInfo = createOpaqueTimingInfo({
startTime: timingInfo.startTime
@ -18299,6 +18451,9 @@ function httpRedirectFetch (fetchParams, response) {
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
request.headersList.delete('authorization')
// https://fetch.spec.whatwg.org/#authentication-entries
request.headersList.delete('proxy-authorization', true)
// "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
request.headersList.delete('cookie')
request.headersList.delete('host')
@ -19259,7 +19414,8 @@ const {
isValidHTTPToken,
sameOrigin,
normalizeMethod,
makePolicyContainer
makePolicyContainer,
normalizeMethodRecord
} = __nccwpck_require__(2538)
const {
forbiddenMethodsSet,
@ -19276,13 +19432,12 @@ const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(5861)
const { webidl } = __nccwpck_require__(1744)
const { getGlobalOrigin } = __nccwpck_require__(1246)
const { URLSerializer } = __nccwpck_require__(685)
const { kHeadersList } = __nccwpck_require__(2785)
const { kHeadersList, kConstruct } = __nccwpck_require__(2785)
const assert = __nccwpck_require__(9491)
const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(2361)
let TransformStream = globalThis.TransformStream
const kInit = Symbol('init')
const kAbortController = Symbol('abortController')
const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => {
@ -19293,7 +19448,7 @@ const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => {
class Request {
// https://fetch.spec.whatwg.org/#dom-request
constructor (input, init = {}) {
if (input === kInit) {
if (input === kConstruct) {
return
}
@ -19432,8 +19587,10 @@ class Request {
urlList: [...request.urlList]
})
const initHasKey = Object.keys(init).length !== 0
// 13. If init is not empty, then:
if (Object.keys(init).length > 0) {
if (initHasKey) {
// 1. If requests mode is "navigate", then set it to "same-origin".
if (request.mode === 'navigate') {
request.mode = 'same-origin'
@ -19548,7 +19705,7 @@ class Request {
}
// 23. If init["integrity"] exists, then set requests integrity metadata to it.
if (init.integrity !== undefined && init.integrity != null) {
if (init.integrity != null) {
request.integrity = String(init.integrity)
}
@ -19564,16 +19721,16 @@ class Request {
// 2. If method is not a method or method is a forbidden method, then
// throw a TypeError.
if (!isValidHTTPToken(init.method)) {
throw new TypeError(`'${init.method}' is not a valid HTTP method.`)
if (!isValidHTTPToken(method)) {
throw new TypeError(`'${method}' is not a valid HTTP method.`)
}
if (forbiddenMethodsSet.has(method.toUpperCase())) {
throw new TypeError(`'${init.method}' HTTP method is unsupported.`)
throw new TypeError(`'${method}' HTTP method is unsupported.`)
}
// 3. Normalize method.
method = normalizeMethod(init.method)
method = normalizeMethodRecord[method] ?? normalizeMethod(method)
// 4. Set requests method to method.
request.method = method
@ -19644,7 +19801,7 @@ class Request {
// 30. Set thiss headers to a new Headers object with thiss relevant
// Realm, whose header list is requests header list and guard is
// "request".
this[kHeaders] = new Headers()
this[kHeaders] = new Headers(kConstruct)
this[kHeaders][kHeadersList] = request.headersList
this[kHeaders][kGuard] = 'request'
this[kHeaders][kRealm] = this[kRealm]
@ -19664,25 +19821,25 @@ class Request {
}
// 32. If init is not empty, then:
if (Object.keys(init).length !== 0) {
if (initHasKey) {
/** @type {HeadersList} */
const headersList = this[kHeaders][kHeadersList]
// 1. Let headers be a copy of thiss headers and its associated header
// list.
let headers = new Headers(this[kHeaders])
// 2. If init["headers"] exists, then set headers to init["headers"].
if (init.headers !== undefined) {
headers = init.headers
}
const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList)
// 3. Empty thiss headerss header list.
this[kHeaders][kHeadersList].clear()
headersList.clear()
// 4. If headers is a Headers object, then for each header in its header
// list, append headers name/headers value to thiss headers.
if (headers.constructor.name === 'Headers') {
if (headers instanceof HeadersList) {
for (const [key, val] of headers) {
this[kHeaders].append(key, val)
headersList.append(key, val)
}
// Note: Copy the `set-cookie` meta-data.
headersList.cookies = headers.cookies
} else {
// 5. Otherwise, fill thiss headers with headers.
fillHeaders(this[kHeaders], headers)
@ -19971,10 +20128,10 @@ class Request {
// 3. Let clonedRequestObject be the result of creating a Request object,
// given clonedRequest, thiss headerss guard, and thiss relevant Realm.
const clonedRequestObject = new Request(kInit)
const clonedRequestObject = new Request(kConstruct)
clonedRequestObject[kState] = clonedRequest
clonedRequestObject[kRealm] = this[kRealm]
clonedRequestObject[kHeaders] = new Headers()
clonedRequestObject[kHeaders] = new Headers(kConstruct)
clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList
clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard]
clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm]
@ -20224,7 +20381,7 @@ const { webidl } = __nccwpck_require__(1744)
const { FormData } = __nccwpck_require__(2015)
const { getGlobalOrigin } = __nccwpck_require__(1246)
const { URLSerializer } = __nccwpck_require__(685)
const { kHeadersList } = __nccwpck_require__(2785)
const { kHeadersList, kConstruct } = __nccwpck_require__(2785)
const assert = __nccwpck_require__(9491)
const { types } = __nccwpck_require__(3837)
@ -20345,7 +20502,7 @@ class Response {
// 2. Set thiss headers to a new Headers object with thiss relevant
// Realm, whose header list is thiss responses header list and guard
// is "response".
this[kHeaders] = new Headers()
this[kHeaders] = new Headers(kConstruct)
this[kHeaders][kGuard] = 'response'
this[kHeaders][kHeadersList] = this[kState].headersList
this[kHeaders][kRealm] = this[kRealm]
@ -20715,11 +20872,7 @@ webidl.converters.XMLHttpRequestBodyInit = function (V) {
return webidl.converters.Blob(V, { strict: false })
}
if (
types.isAnyArrayBuffer(V) ||
types.isTypedArray(V) ||
types.isDataView(V)
) {
if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) {
return webidl.converters.BufferSource(V)
}
@ -20809,14 +20962,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983
const assert = __nccwpck_require__(9491)
const { isUint8Array } = __nccwpck_require__(9830)
let supportedHashes = []
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
/** @type {import('crypto')|undefined} */
let crypto
try {
crypto = __nccwpck_require__(6113)
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
/* c8 ignore next 3 */
} catch {
}
function responseURL (response) {
@ -21344,66 +21501,56 @@ function bytesMatch (bytes, metadataList) {
return true
}
// 3. If parsedMetadata is the empty set, return true.
// 3. If response is not eligible for integrity validation, return false.
// TODO
// 4. If parsedMetadata is the empty set, return true.
if (parsedMetadata.length === 0) {
return true
}
// 4. Let metadata be the result of getting the strongest
// 5. Let metadata be the result of getting the strongest
// metadata from parsedMetadata.
const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
// get the strongest algorithm
const strongest = list[0].algo
// get all entries that use the strongest algorithm; ignore weaker
const metadata = list.filter((item) => item.algo === strongest)
const strongest = getStrongestMetadata(parsedMetadata)
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
// 5. For each item in metadata:
// 6. For each item in metadata:
for (const item of metadata) {
// 1. Let algorithm be the alg component of item.
const algorithm = item.algo
// 2. Let expectedValue be the val component of item.
let expectedValue = item.hash
const expectedValue = item.hash
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
// "be liberal with padding". This is annoying, and it's not even in the spec.
if (expectedValue.endsWith('==')) {
expectedValue = expectedValue.slice(0, -2)
}
// 3. Let actualValue be the result of applying algorithm to bytes.
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
if (actualValue.endsWith('==')) {
if (actualValue[actualValue.length - 1] === '=') {
if (actualValue[actualValue.length - 2] === '=') {
actualValue = actualValue.slice(0, -2)
} else {
actualValue = actualValue.slice(0, -1)
}
}
// 4. If actualValue is a case-sensitive match for expectedValue,
// return true.
if (actualValue === expectedValue) {
return true
}
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
if (actualBase64URL.endsWith('==')) {
actualBase64URL = actualBase64URL.slice(0, -2)
}
if (actualBase64URL === expectedValue) {
if (compareBase64Mixed(actualValue, expectedValue)) {
return true
}
}
// 6. Return false.
// 7. Return false.
return false
}
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
// https://www.w3.org/TR/CSP2/#source-list-syntax
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
/**
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
@ -21417,8 +21564,6 @@ function parseMetadata (metadata) {
// 2. Let empty be equal to true.
let empty = true
const supportedHashes = crypto.getHashes()
// 3. For each token returned by splitting metadata on spaces:
for (const token of metadata.split(' ')) {
// 1. Set empty to false.
@ -21428,7 +21573,11 @@ function parseMetadata (metadata) {
const parsedToken = parseHashWithOptions.exec(token)
// 3. If token does not parse, continue to the next token.
if (parsedToken === null || parsedToken.groups === undefined) {
if (
parsedToken === null ||
parsedToken.groups === undefined ||
parsedToken.groups.algo === undefined
) {
// Note: Chromium blocks the request at this point, but Firefox
// gives a warning that an invalid integrity was given. The
// correct behavior is to ignore these, and subsequently not
@ -21437,11 +21586,11 @@ function parseMetadata (metadata) {
}
// 4. Let algorithm be the hash-algo component of token.
const algorithm = parsedToken.groups.algo
const algorithm = parsedToken.groups.algo.toLowerCase()
// 5. If algorithm is a hash function recognized by the user
// agent, add the parsed token to result.
if (supportedHashes.includes(algorithm.toLowerCase())) {
if (supportedHashes.includes(algorithm)) {
result.push(parsedToken.groups)
}
}
@ -21454,6 +21603,82 @@ function parseMetadata (metadata) {
return result
}
/**
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
*/
function getStrongestMetadata (metadataList) {
// Let algorithm be the algo component of the first item in metadataList.
// Can be sha256
let algorithm = metadataList[0].algo
// If the algorithm is sha512, then it is the strongest
// and we can return immediately
if (algorithm[3] === '5') {
return algorithm
}
for (let i = 1; i < metadataList.length; ++i) {
const metadata = metadataList[i]
// If the algorithm is sha512, then it is the strongest
// and we can break the loop immediately
if (metadata.algo[3] === '5') {
algorithm = 'sha512'
break
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
} else if (algorithm[3] === '3') {
continue
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
// the strongest
} else if (metadata.algo[3] === '3') {
algorithm = 'sha384'
}
}
return algorithm
}
function filterMetadataListByAlgorithm (metadataList, algorithm) {
if (metadataList.length === 1) {
return metadataList
}
let pos = 0
for (let i = 0; i < metadataList.length; ++i) {
if (metadataList[i].algo === algorithm) {
metadataList[pos++] = metadataList[i]
}
}
metadataList.length = pos
return metadataList
}
/**
* Compares two base64 strings, allowing for base64url
* in the second string.
*
* @param {string} actualValue always base64
* @param {string} expectedValue base64 or base64url
* @returns {boolean}
*/
function compareBase64Mixed (actualValue, expectedValue) {
if (actualValue.length !== expectedValue.length) {
return false
}
for (let i = 0; i < actualValue.length; ++i) {
if (actualValue[i] !== expectedValue[i]) {
if (
(actualValue[i] === '+' && expectedValue[i] === '-') ||
(actualValue[i] === '/' && expectedValue[i] === '_')
) {
continue
}
return false
}
}
return true
}
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
// TODO
@ -21500,11 +21725,30 @@ function isCancelled (fetchParams) {
fetchParams.controller.state === 'terminated'
}
// https://fetch.spec.whatwg.org/#concept-method-normalize
const normalizeMethodRecord = {
delete: 'DELETE',
DELETE: 'DELETE',
get: 'GET',
GET: 'GET',
head: 'HEAD',
HEAD: 'HEAD',
options: 'OPTIONS',
OPTIONS: 'OPTIONS',
post: 'POST',
POST: 'POST',
put: 'PUT',
PUT: 'PUT'
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(normalizeMethodRecord, null)
/**
* @see https://fetch.spec.whatwg.org/#concept-method-normalize
* @param {string} method
*/
function normalizeMethod (method) {
return /^(DELETE|GET|HEAD|OPTIONS|POST|PUT)$/i.test(method)
? method.toUpperCase()
: method
return normalizeMethodRecord[method.toLowerCase()] ?? method
}
// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string
@ -21849,7 +22093,9 @@ module.exports = {
urlIsLocal,
urlHasHttpsScheme,
urlIsHttpHttpsScheme,
readAllBytes
readAllBytes,
normalizeMethodRecord,
parseMetadata
}
@ -23936,12 +24182,17 @@ function parseLocation (statusCode, headers) {
// https://tools.ietf.org/html/rfc7231#section-6.4.4
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
return (
(header.length === 4 && header.toString().toLowerCase() === 'host') ||
(removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
(unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
(unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
)
if (header.length === 4) {
return util.headerNameToString(header) === 'host'
}
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
return true
}
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
const name = util.headerNameToString(header)
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
}
return false
}
// https://tools.ietf.org/html/rfc7231#section-6.4
@ -23973,7 +24224,7 @@ module.exports = RedirectHandler
/***/ 2286:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
const assert = __nccwpck_require__(8061)
const assert = __nccwpck_require__(9491)
const { kRetryHandlerDefaultRetry } = __nccwpck_require__(2785)
const { RequestRetryError } = __nccwpck_require__(8045)
@ -24070,7 +24321,7 @@ class RetryHandler {
}
onBodySent (chunk) {
return this.handler.onBodySent(chunk)
if (this.handler.onBodySent) return this.handler.onBodySent(chunk)
}
static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {
@ -26233,6 +26484,9 @@ class ProxyAgent extends DispatcherBase {
this[kProxyTls] = opts.proxyTls
this[kProxyHeaders] = opts.headers || {}
const resolvedUrl = new URL(opts.uri)
const { origin, port, host, username, password } = resolvedUrl
if (opts.auth && opts.token) {
throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
} else if (opts.auth) {
@ -26240,11 +26494,10 @@ class ProxyAgent extends DispatcherBase {
this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}`
} else if (opts.token) {
this[kProxyHeaders]['proxy-authorization'] = opts.token
} else if (username && password) {
this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}`
}
const resolvedUrl = new URL(opts.uri)
const { origin, port, host } = resolvedUrl
const connect = buildConnector({ ...opts.proxyTls })
this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
this[kClient] = clientFactory(resolvedUrl, { connect })
@ -26268,7 +26521,7 @@ class ProxyAgent extends DispatcherBase {
})
if (statusCode !== 200) {
socket.on('error', () => {}).destroy()
callback(new RequestAbortedError('Proxy response !== 200 when HTTP Tunneling'))
callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))
}
if (opts.protocol !== 'https:') {
callback(null, socket)
@ -28598,8 +28851,8 @@ const COMMANDLINE_TOOLS_MAC_URL = `https://dl.google.com/android/repository/comm
const COMMANDLINE_TOOLS_LIN_URL = `https://dl.google.com/android/repository/commandlinetools-linux-${VERSION_LONG}_latest.zip`;
const ANDROID_HOME_SDK_DIR = path.join(os.homedir(), '.android', 'sdk');
let ANDROID_SDK_ROOT = process.env['ANDROID_SDK_ROOT'] || ANDROID_HOME_SDK_DIR;
function callSdkManager(sdkManager, arg, printOutput = true) {
return __awaiter(this, void 0, void 0, function* () {
function callSdkManager(sdkManager_1, arg_1) {
return __awaiter(this, arguments, void 0, function* (sdkManager, arg, printOutput = true) {
const acceptBuffer = Buffer.from(Array(10).fill('y').join('\n'), 'utf8');
yield exec.exec(sdkManager, [arg], {
input: acceptBuffer,
@ -28816,14 +29069,6 @@ module.exports = require("net");
/***/ }),
/***/ 8061:
/***/ ((module) => {
"use strict";
module.exports = require("node:assert");
/***/ }),
/***/ 5673:
/***/ ((module) => {
@ -29044,7 +29289,7 @@ Dicer.prototype._write = function (data, encoding, cb) {
if (this._headerFirst && this._isPreamble) {
if (!this._part) {
this._part = new PartStream(this._partOpts)
if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() }
if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() }
}
const r = this._hparser.push(data)
if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
@ -29101,7 +29346,7 @@ Dicer.prototype._oninfo = function (isMatch, data, start, end) {
}
}
if (this._dashes === 2) {
if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) }
if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) }
this.reset()
this._finished = true
// no more parts will be added
@ -29119,7 +29364,13 @@ Dicer.prototype._oninfo = function (isMatch, data, start, end) {
this._part._read = function (n) {
self._unpause()
}
if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() }
if (this._isPreamble && this.listenerCount('preamble') !== 0) {
this.emit('preamble', this._part)
} else if (this._isPreamble !== true && this.listenerCount('part') !== 0) {
this.emit('part', this._part)
} else {
this._ignore()
}
if (!this._isPreamble) { this._inHeader = true }
}
if (data && start < end && !this._ignoreData) {
@ -29802,7 +30053,7 @@ function Multipart (boy, cfg) {
++nfiles
if (!boy._events.file) {
if (boy.listenerCount('file') === 0) {
self.parser._ignore()
return
}
@ -30331,7 +30582,7 @@ const decoders = {
if (textDecoders.has(this.toString())) {
try {
return textDecoders.get(this).decode(data)
} catch (e) { }
} catch {}
}
return typeof data === 'string'
? data

3782
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -32,8 +32,8 @@
"devDependencies": {
"@types/jest": "^29.5.5",
"@types/node": "^20.6.5",
"@typescript-eslint/eslint-plugin": "^6.7.2",
"@typescript-eslint/parser": "^6.7.2",
"@typescript-eslint/eslint-plugin": "^7.5.0",
"@typescript-eslint/parser": "^7.5.0",
"@vercel/ncc": "^0.38.0",
"eslint": "^8.50.0",
"eslint-plugin-github": "^4.10.2",